2019-03-29 21:30:02 +00:00
|
|
|
struct PlaylistVideo
|
2019-06-08 18:31:41 +00:00
|
|
|
def to_json(locale, config, kemal_config, json : JSON::Builder)
|
|
|
|
json.object do
|
|
|
|
json.field "title", self.title
|
|
|
|
json.field "videoId", self.id
|
|
|
|
|
|
|
|
json.field "author", self.author
|
|
|
|
json.field "authorId", self.ucid
|
|
|
|
json.field "authorUrl", "/channel/#{self.ucid}"
|
|
|
|
|
|
|
|
json.field "videoThumbnails" do
|
|
|
|
generate_thumbnails(json, self.id, config, kemal_config)
|
|
|
|
end
|
|
|
|
|
|
|
|
json.field "index", self.index
|
|
|
|
json.field "lengthSeconds", self.length_seconds
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def to_json(locale, config, kemal_config, json : JSON::Builder | Nil = nil)
|
|
|
|
if json
|
|
|
|
to_json(locale, config, kemal_config, json)
|
|
|
|
else
|
|
|
|
JSON.build do |json|
|
|
|
|
to_json(locale, config, kemal_config, json)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-04-03 16:35:58 +00:00
|
|
|
db_mapping({
|
2018-09-29 04:12:35 +00:00
|
|
|
title: String,
|
|
|
|
id: String,
|
|
|
|
author: String,
|
|
|
|
ucid: String,
|
|
|
|
length_seconds: Int32,
|
|
|
|
published: Time,
|
2019-06-08 01:23:37 +00:00
|
|
|
plid: String,
|
2018-09-29 04:12:35 +00:00
|
|
|
index: Int32,
|
2019-03-24 14:10:14 +00:00
|
|
|
live_now: Bool,
|
2018-09-29 04:12:35 +00:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2019-03-29 21:30:02 +00:00
|
|
|
struct Playlist
|
2019-04-03 16:35:58 +00:00
|
|
|
db_mapping({
|
2018-09-05 00:27:10 +00:00
|
|
|
title: String,
|
|
|
|
id: String,
|
|
|
|
author: String,
|
2018-09-25 15:28:40 +00:00
|
|
|
author_thumbnail: String,
|
2018-09-05 00:27:10 +00:00
|
|
|
ucid: String,
|
|
|
|
description_html: String,
|
|
|
|
video_count: Int32,
|
|
|
|
views: Int64,
|
|
|
|
updated: Time,
|
2018-08-15 15:22:36 +00:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2018-12-20 21:32:09 +00:00
|
|
|
def fetch_playlist_videos(plid, page, video_count, continuation = nil, locale = nil)
|
2018-08-15 15:22:36 +00:00
|
|
|
client = make_client(YT_URL)
|
2018-09-22 19:13:10 +00:00
|
|
|
|
2018-10-08 02:11:33 +00:00
|
|
|
if continuation
|
2018-11-10 16:50:09 +00:00
|
|
|
html = client.get("/watch?v=#{continuation}&list=#{plid}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999")
|
2018-10-08 02:11:33 +00:00
|
|
|
html = XML.parse_html(html.body)
|
|
|
|
|
|
|
|
index = html.xpath_node(%q(//span[@id="playlist-current-index"])).try &.content.to_i?
|
|
|
|
if index
|
|
|
|
index -= 1
|
|
|
|
end
|
|
|
|
index ||= 0
|
|
|
|
else
|
2018-09-22 19:13:10 +00:00
|
|
|
index = (page - 1) * 100
|
2018-10-08 02:11:33 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
if video_count > 100
|
2018-09-22 19:13:10 +00:00
|
|
|
url = produce_playlist_url(plid, index)
|
|
|
|
|
|
|
|
response = client.get(url)
|
|
|
|
response = JSON.parse(response.body)
|
|
|
|
if !response["content_html"]? || response["content_html"].as_s.empty?
|
2019-04-19 16:14:11 +00:00
|
|
|
raise translate(locale, "Empty playlist")
|
2018-09-22 19:13:10 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
document = XML.parse_html(response["content_html"].as_s)
|
|
|
|
nodeset = document.xpath_nodes(%q(.//tr[contains(@class, "pl-video")]))
|
|
|
|
videos = extract_playlist(plid, nodeset, index)
|
|
|
|
else
|
2018-09-28 14:54:01 +00:00
|
|
|
# Playlist has less than one page of videos, so subsequent pages will be empty
|
2018-09-22 19:13:10 +00:00
|
|
|
if page > 1
|
|
|
|
videos = [] of PlaylistVideo
|
|
|
|
else
|
2018-09-28 14:54:01 +00:00
|
|
|
# Extract first page of videos
|
2018-09-25 22:55:32 +00:00
|
|
|
response = client.get("/playlist?list=#{plid}&gl=US&hl=en&disable_polymer=1")
|
2018-09-22 19:13:10 +00:00
|
|
|
document = XML.parse_html(response.body)
|
|
|
|
nodeset = document.xpath_nodes(%q(.//tr[contains(@class, "pl-video")]))
|
|
|
|
|
|
|
|
videos = extract_playlist(plid, nodeset, 0)
|
2018-12-24 23:47:23 +00:00
|
|
|
|
2018-10-11 00:47:51 +00:00
|
|
|
if continuation
|
|
|
|
until videos[0].id == continuation
|
|
|
|
videos.shift
|
|
|
|
end
|
|
|
|
end
|
2018-09-22 19:13:10 +00:00
|
|
|
end
|
2018-08-15 15:22:36 +00:00
|
|
|
end
|
|
|
|
|
2018-09-22 19:13:10 +00:00
|
|
|
return videos
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_playlist(plid, nodeset, index)
|
2018-08-15 15:22:36 +00:00
|
|
|
videos = [] of PlaylistVideo
|
|
|
|
|
2018-09-22 19:13:10 +00:00
|
|
|
nodeset.each_with_index do |video, offset|
|
|
|
|
anchor = video.xpath_node(%q(.//td[@class="pl-video-title"]))
|
|
|
|
if !anchor
|
|
|
|
next
|
2018-08-15 15:22:36 +00:00
|
|
|
end
|
2018-09-22 19:13:10 +00:00
|
|
|
|
|
|
|
title = anchor.xpath_node(%q(.//a)).not_nil!.content.strip(" \n")
|
|
|
|
id = anchor.xpath_node(%q(.//a)).not_nil!["href"].lchop("/watch?v=")[0, 11]
|
|
|
|
|
|
|
|
anchor = anchor.xpath_node(%q(.//div[@class="pl-video-owner"]/a))
|
|
|
|
if anchor
|
|
|
|
author = anchor.content
|
|
|
|
ucid = anchor["href"].split("/")[2]
|
|
|
|
else
|
|
|
|
author = ""
|
|
|
|
ucid = ""
|
|
|
|
end
|
|
|
|
|
|
|
|
anchor = video.xpath_node(%q(.//td[@class="pl-video-time"]/div/div[1]))
|
|
|
|
if anchor && !anchor.content.empty?
|
|
|
|
length_seconds = decode_length_seconds(anchor.content)
|
2019-03-24 14:10:14 +00:00
|
|
|
live_now = false
|
2018-09-22 19:13:10 +00:00
|
|
|
else
|
|
|
|
length_seconds = 0
|
2019-03-24 14:10:14 +00:00
|
|
|
live_now = true
|
2018-09-22 19:13:10 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
videos << PlaylistVideo.new(
|
2018-12-20 21:32:09 +00:00
|
|
|
title: title,
|
|
|
|
id: id,
|
|
|
|
author: author,
|
|
|
|
ucid: ucid,
|
|
|
|
length_seconds: length_seconds,
|
2019-06-08 01:23:37 +00:00
|
|
|
published: Time.utc,
|
|
|
|
plid: plid,
|
2018-12-20 21:32:09 +00:00
|
|
|
index: index + offset,
|
2019-03-24 14:10:14 +00:00
|
|
|
live_now: live_now
|
2018-09-22 19:13:10 +00:00
|
|
|
)
|
2018-08-15 15:22:36 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
return videos
|
|
|
|
end
|
|
|
|
|
|
|
|
def produce_playlist_url(id, index)
|
|
|
|
if id.starts_with? "UC"
|
|
|
|
id = "UU" + id.lchop("UC")
|
|
|
|
end
|
|
|
|
ucid = "VL" + id
|
|
|
|
|
2019-02-04 21:17:10 +00:00
|
|
|
meta = IO::Memory.new
|
|
|
|
meta.write(Bytes[0x08])
|
|
|
|
meta.write(write_var_int(index))
|
|
|
|
|
|
|
|
meta.rewind
|
|
|
|
meta = Base64.urlsafe_encode(meta.to_slice, false)
|
2018-09-17 21:38:18 +00:00
|
|
|
meta = "PT:#{meta}"
|
|
|
|
|
2019-02-04 21:17:10 +00:00
|
|
|
continuation = IO::Memory.new
|
|
|
|
continuation.write(Bytes[0x7a, meta.size])
|
|
|
|
continuation.print(meta)
|
2018-09-17 21:38:18 +00:00
|
|
|
|
2019-02-04 21:17:10 +00:00
|
|
|
continuation.rewind
|
|
|
|
meta = Base64.urlsafe_encode(continuation.to_slice)
|
|
|
|
meta = URI.escape(meta)
|
2018-09-17 21:38:18 +00:00
|
|
|
|
2019-02-04 21:17:10 +00:00
|
|
|
continuation = IO::Memory.new
|
|
|
|
continuation.write(Bytes[0x12, ucid.size])
|
|
|
|
continuation.print(ucid)
|
|
|
|
continuation.write(Bytes[0x1a, meta.size])
|
|
|
|
continuation.print(meta)
|
2018-09-17 21:38:18 +00:00
|
|
|
|
2019-02-04 21:17:10 +00:00
|
|
|
wrapper = IO::Memory.new
|
|
|
|
wrapper.write(Bytes[0xe2, 0xa9, 0x85, 0xb2, 0x02, continuation.size])
|
|
|
|
wrapper.print(continuation)
|
|
|
|
wrapper.rewind
|
2018-09-17 21:38:18 +00:00
|
|
|
|
2019-02-04 21:17:10 +00:00
|
|
|
wrapper = Base64.urlsafe_encode(wrapper.to_slice)
|
|
|
|
wrapper = URI.escape(wrapper)
|
2018-08-15 15:22:36 +00:00
|
|
|
|
2019-02-04 21:17:10 +00:00
|
|
|
url = "/browse_ajax?continuation=#{wrapper}&gl=US&hl=en"
|
2018-08-15 15:22:36 +00:00
|
|
|
|
|
|
|
return url
|
|
|
|
end
|
|
|
|
|
2018-12-20 21:32:09 +00:00
|
|
|
def fetch_playlist(plid, locale)
|
2018-08-15 15:22:36 +00:00
|
|
|
client = make_client(YT_URL)
|
2018-09-22 19:13:10 +00:00
|
|
|
|
|
|
|
if plid.starts_with? "UC"
|
|
|
|
plid = "UU#{plid.lchop("UC")}"
|
|
|
|
end
|
|
|
|
|
2018-09-25 22:55:32 +00:00
|
|
|
response = client.get("/playlist?list=#{plid}&hl=en&disable_polymer=1")
|
2018-09-23 17:26:12 +00:00
|
|
|
if response.status_code != 200
|
2019-04-19 16:14:11 +00:00
|
|
|
raise translate(locale, "Not a playlist.")
|
2018-09-23 17:26:12 +00:00
|
|
|
end
|
|
|
|
|
2019-01-05 04:48:00 +00:00
|
|
|
body = response.body.gsub(/<button[^>]+><span[^>]+>\s*less\s*<img[^>]+>\n<\/span><\/button>/, "")
|
2018-09-14 02:00:39 +00:00
|
|
|
document = XML.parse_html(body)
|
2018-08-15 15:22:36 +00:00
|
|
|
|
2018-09-23 17:32:32 +00:00
|
|
|
title = document.xpath_node(%q(//h1[@class="pl-header-title"]))
|
|
|
|
if !title
|
2018-12-20 21:32:09 +00:00
|
|
|
raise translate(locale, "Playlist does not exist.")
|
2018-09-23 17:32:32 +00:00
|
|
|
end
|
|
|
|
title = title.content.strip(" \n")
|
2018-08-15 15:22:36 +00:00
|
|
|
|
2019-06-08 20:08:27 +00:00
|
|
|
description_html = document.xpath_node(%q(//span[@class="pl-header-description-text"]/div/div[1])).try &.to_s ||
|
|
|
|
document.xpath_node(%q(//span[@class="pl-header-description-text"])).try &.to_s || ""
|
2018-08-15 15:22:36 +00:00
|
|
|
|
2019-05-01 13:03:58 +00:00
|
|
|
# YouTube allows anonymous playlists, so most of this can be empty or optional
|
|
|
|
anchor = document.xpath_node(%q(//ul[@class="pl-header-details"]))
|
|
|
|
author = anchor.try &.xpath_node(%q(.//li[1]/a)).try &.content
|
|
|
|
author ||= ""
|
2018-09-25 15:28:40 +00:00
|
|
|
author_thumbnail = document.xpath_node(%q(//img[@class="channel-header-profile-image"])).try &.["src"]
|
|
|
|
author_thumbnail ||= ""
|
2019-05-01 13:03:58 +00:00
|
|
|
ucid = anchor.try &.xpath_node(%q(.//li[1]/a)).try &.["href"].split("/")[-1]
|
|
|
|
ucid ||= ""
|
2018-08-15 15:22:36 +00:00
|
|
|
|
2019-05-01 13:03:58 +00:00
|
|
|
video_count = anchor.try &.xpath_node(%q(.//li[2])).try &.content.gsub(/\D/, "").to_i?
|
|
|
|
video_count ||= 0
|
|
|
|
views = anchor.try &.xpath_node(%q(.//li[3])).try &.content.delete("No views, ").to_i64?
|
|
|
|
views ||= 0_i64
|
|
|
|
|
|
|
|
updated = anchor.try &.xpath_node(%q(.//li[4])).try &.content.lchop("Last updated on ").lchop("Updated ")
|
|
|
|
if updated
|
|
|
|
updated = decode_date(updated)
|
2018-09-18 00:21:13 +00:00
|
|
|
else
|
2019-06-08 00:56:41 +00:00
|
|
|
updated = Time.utc
|
2018-09-18 00:21:13 +00:00
|
|
|
end
|
2018-08-15 15:22:36 +00:00
|
|
|
|
|
|
|
playlist = Playlist.new(
|
2018-12-15 19:02:53 +00:00
|
|
|
title: title,
|
|
|
|
id: plid,
|
|
|
|
author: author,
|
|
|
|
author_thumbnail: author_thumbnail,
|
|
|
|
ucid: ucid,
|
|
|
|
description_html: description_html,
|
|
|
|
video_count: video_count,
|
|
|
|
views: views,
|
|
|
|
updated: updated
|
2018-08-15 15:22:36 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
return playlist
|
|
|
|
end
|
2018-10-08 02:11:33 +00:00
|
|
|
|
|
|
|
def template_playlist(playlist)
|
|
|
|
html = <<-END_HTML
|
|
|
|
<h3>
|
|
|
|
<a href="/playlist?list=#{playlist["playlistId"]}">
|
|
|
|
#{playlist["title"]}
|
|
|
|
</a>
|
|
|
|
</h3>
|
|
|
|
<div class="pure-menu pure-menu-scrollable playlist-restricted">
|
|
|
|
<ol class="pure-menu-list">
|
|
|
|
END_HTML
|
|
|
|
|
|
|
|
playlist["videos"].as_a.each do |video|
|
|
|
|
html += <<-END_HTML
|
|
|
|
<li class="pure-menu-item">
|
|
|
|
<a href="/watch?v=#{video["videoId"]}&list=#{playlist["playlistId"]}">
|
2019-03-03 16:03:24 +00:00
|
|
|
<div class="thumbnail">
|
|
|
|
<img class="thumbnail" src="/vi/#{video["videoId"]}/mqdefault.jpg">
|
|
|
|
<p class="length">#{recode_length_seconds(video["lengthSeconds"].as_i)}</p>
|
|
|
|
</div>
|
2018-10-08 02:11:33 +00:00
|
|
|
<p style="width:100%">#{video["title"]}</p>
|
|
|
|
<p>
|
2019-05-02 01:03:39 +00:00
|
|
|
<b style="width:100%">#{video["author"]}</b>
|
2018-10-08 02:11:33 +00:00
|
|
|
</p>
|
|
|
|
</a>
|
|
|
|
</li>
|
|
|
|
END_HTML
|
|
|
|
end
|
|
|
|
|
|
|
|
html += <<-END_HTML
|
|
|
|
</ol>
|
|
|
|
</div>
|
|
|
|
<hr>
|
|
|
|
END_HTML
|
|
|
|
|
|
|
|
html
|
|
|
|
end
|