2018-03-07 23:58:33 +00:00
macro add_mapping ( mapping )
def initialize ( {{ * mapping . keys . map { | id | " @ #{ id } " . id }} } )
end
def to_a
return [ {{ * mapping . keys . map { | id | " @ #{ id } " . id }} } ]
end
DB . mapping ( {{ mapping }} )
end
macro templated ( filename )
render " src/views/ #{ { { filename } } } .ecr " , " src/views/layout.ecr "
end
2018-03-09 18:42:23 +00:00
class Config
YAML . mapping ( {
pool_size : Int32 ,
threads : Int32 ,
db : NamedTuple (
user : String ,
password : String ,
host : String ,
port : Int32 ,
dbname : String ,
) ,
2018-03-17 00:52:25 +00:00
dl_api_key : String ?,
2018-03-09 18:42:23 +00:00
} )
end
2018-01-28 02:09:27 +00:00
class Video
module HTTPParamConverter
def self . from_rs ( rs )
HTTP :: Params . parse ( rs . read ( String ) )
end
end
2018-03-07 23:58:33 +00:00
add_mapping ( {
2018-01-28 02:09:27 +00:00
id : String ,
info : {
type : HTTP :: Params ,
default : HTTP :: Params . parse ( " " ) ,
converter : Video :: HTTPParamConverter ,
} ,
updated : Time ,
title : String ,
views : Int64 ,
likes : Int32 ,
dislikes : Int32 ,
wilson_score : Float64 ,
2018-02-03 03:44:10 +00:00
published : Time ,
2018-02-27 00:58:45 +00:00
description : String ,
2018-03-17 00:45:37 +00:00
language : String ?,
2018-01-28 02:09:27 +00:00
} )
end
2018-03-25 03:38:35 +00:00
class InvidiousChannel
module XMLConverter
def self . from_rs ( rs )
XML . parse_html ( rs . read ( String ) )
end
end
add_mapping ( {
2018-03-29 03:29:54 +00:00
id : String ,
2018-03-25 03:38:35 +00:00
author : String ,
2018-03-29 03:29:54 +00:00
updated : Time ,
} )
end
class ChannelVideo
add_mapping ( {
id : String ,
title : String ,
published : Time ,
updated : Time ,
ucid : String ,
author : String ,
2018-03-25 03:38:35 +00:00
} )
end
2018-03-30 02:41:05 +00:00
class User
add_mapping ( {
2018-04-01 00:09:27 +00:00
id : String ,
updated : Time ,
notifications : Array ( String ) ,
subscriptions : Array ( String ) ,
email : String ,
2018-03-30 02:41:05 +00:00
} )
end
2018-03-03 21:06:14 +00:00
class RedditSubmit
JSON . mapping ( {
data : RedditSubmitData ,
} )
end
class RedditSubmitData
JSON . mapping ( {
children : Array ( RedditThread ) ,
} )
end
class RedditThread
JSON . mapping ( {
data : RedditThreadData ,
} )
end
class RedditThreadData
JSON . mapping ( {
subreddit : String ,
id : String ,
num_comments : Int32 ,
score : Int32 ,
author : String ,
permalink : String ,
title : String ,
} )
end
2018-01-21 00:19:12 +00:00
# See http://www.evanmiller.org/how-not-to-sort-by-average-rating.html
def ci_lower_bound ( pos , n )
if n == 0
2018-01-28 02:09:27 +00:00
return 0.0
2018-01-21 00:19:12 +00:00
end
# z value here represents a confidence level of 0.95
z = 1.96
phat = 1.0 * pos / n
return ( phat + z * z / ( 2 * n ) - z * Math . sqrt ( ( phat * ( 1 - phat ) + z * z / ( 4 * n ) ) / n ) ) / ( 1 + z * z / n )
end
def elapsed_text ( elapsed )
millis = elapsed . total_milliseconds
return " #{ millis . round ( 2 ) } ms " if millis >= 1
" #{ ( millis * 1000 ) . round ( 2 ) } µs "
end
2018-01-28 02:09:27 +00:00
def get_client ( pool )
while pool . empty?
2018-01-21 00:19:12 +00:00
sleep rand ( 0 .. 10 ) . milliseconds
end
2018-01-28 02:09:27 +00:00
return pool . shift
2018-01-21 00:19:12 +00:00
end
2018-01-28 02:09:27 +00:00
def fetch_video ( id , client )
2018-03-04 14:54:19 +00:00
info = client . get ( " /get_video_info?video_id= #{ id } &el=detailpage&ps=default&eurl=&gl=US&hl=en " ) . body
2018-03-09 16:55:14 +00:00
html = client . get ( " /watch?v= #{ id } &bpctr= #{ Time . new . epoch + 2000 } " ) . body
2018-01-21 00:19:12 +00:00
html = XML . parse_html ( html )
2018-01-28 02:09:27 +00:00
info = HTTP :: Params . parse ( info )
2018-01-21 00:19:12 +00:00
if info [ " reason " ]?
2018-02-03 04:04:34 +00:00
info = client . get ( " /get_video_info?video_id= #{ id } &ps=default&eurl=&gl=US&hl=en " ) . body
info = HTTP :: Params . parse ( info )
if info [ " reason " ]?
raise info [ " reason " ]
end
2018-01-21 00:19:12 +00:00
end
2018-01-28 02:09:27 +00:00
title = info [ " title " ]
views = info [ " view_count " ] . to_i64
2018-01-21 00:19:12 +00:00
2018-01-28 02:09:27 +00:00
likes = html . xpath_node ( % q ( / / button [ @title = " I like this " ] / span ) )
2018-03-14 23:06:21 +00:00
likes = likes . try & . content . delete ( " , " ) . try & . to_i
likes || = 0
2018-01-28 02:09:27 +00:00
dislikes = html . xpath_node ( % q ( / / button [ @title = " I dislike this " ] / span ) )
2018-03-14 23:06:21 +00:00
dislikes = dislikes . try & . content . delete ( " , " ) . try & . to_i
dislikes || = 0
2018-01-28 02:09:27 +00:00
2018-02-27 00:58:45 +00:00
description = html . xpath_node ( % q ( / / p [ @id = " eow-description " ] ) )
description = description ? description . to_xml : " "
2018-01-28 02:09:27 +00:00
wilson_score = ci_lower_bound ( likes , likes + dislikes )
2018-02-05 01:42:13 +00:00
published = html . xpath_node ( % q ( / /s trong [ contains ( @class , " watch-time-text " ) ] ) )
2018-02-03 03:44:10 +00:00
if published
published = published . content
2018-02-05 01:42:13 +00:00
else
raise " Could not find date published "
end
published = published . lchop ( " Published " )
published = published . lchop ( " Started streaming " )
2018-03-17 03:34:26 +00:00
published = published . lchop ( " Streamed live " )
published = published . lchop ( " Uploaded " )
2018-02-05 01:42:13 +00:00
published = published . lchop ( " on " )
published = published . lchop ( " Scheduled for " )
2018-02-05 23:57:03 +00:00
if ! published . includes? ( " ago " )
published = Time . parse ( published , " %b %-d, %Y " )
else
# Time matches format "20 hours ago", "40 minutes ago"...
2018-02-05 01:42:13 +00:00
delta = published . split ( " " ) [ 0 ] . to_i
case published
when . includes? " minute "
published = Time . now - delta . minutes
when . includes? " hour "
published = Time . now - delta . hours
2018-02-05 23:57:03 +00:00
else
2018-02-05 01:42:13 +00:00
raise " Could not parse #{ published } "
2018-02-05 23:57:03 +00:00
end
2018-02-03 03:44:10 +00:00
end
2018-03-17 00:45:37 +00:00
video = Video . new ( id , info , Time . now , title , views , likes , dislikes , wilson_score , published , description , nil )
2018-01-21 00:19:12 +00:00
return video
end
2018-01-28 02:09:27 +00:00
def get_video ( id , client , db , refresh = true )
if db . query_one? ( " SELECT EXISTS (SELECT true FROM videos WHERE id = $1) " , id , as : Bool )
video = db . query_one ( " SELECT * FROM videos WHERE id = $1 " , id , as : Video )
2018-01-21 00:19:12 +00:00
2018-01-28 02:09:27 +00:00
# If record was last updated over an hour ago, refresh (expire param in response lasts for 6 hours)
2018-03-30 00:21:44 +00:00
if refresh && Time . now - video . updated > 1 . hour
2018-03-29 03:29:54 +00:00
begin
2018-03-31 14:51:44 +00:00
video = fetch_video ( id , client )
2018-03-30 00:03:00 +00:00
video_array = video . to_a
args = arg_array ( video_array [ 1 .. - 1 ] , 2 )
2018-03-29 03:29:54 +00:00
2018-03-30 00:03:00 +00:00
db . exec ( " UPDATE videos SET (info,updated,title,views,likes,dislikes,wilson_score,published,description,language) \
= ( #{args}) WHERE id = $1", video_array)
2018-03-29 03:29:54 +00:00
rescue ex
db . exec ( " DELETE FROM videos * WHERE id = $1 " , id )
2018-03-31 14:51:44 +00:00
end
2018-01-21 00:19:12 +00:00
end
else
2018-01-28 02:09:27 +00:00
video = fetch_video ( id , client )
2018-03-04 14:54:19 +00:00
args = arg_array ( video . to_a )
db . exec ( " INSERT INTO videos VALUES ( #{ args } ) " , video . to_a )
2018-01-21 00:19:12 +00:00
end
return video
end
2018-01-21 23:49:27 +00:00
2018-01-28 02:09:27 +00:00
def search ( query , client )
2018-03-04 14:54:19 +00:00
html = client . get ( " https://www.youtube.com/results?q= #{ query } &sp=EgIQAVAU " ) . body
2018-01-21 23:49:27 +00:00
html = XML . parse_html ( html )
html . xpath_nodes ( % q ( / / ol [ @class = " item-section " ] / li ) ) . each do | item |
root = item . xpath_node ( % q ( div [ contains ( @class , " yt-lockup-video " ) ] / div ) )
if root
link = root . xpath_node ( % q ( div [ contains ( @class , " yt-lockup-thumbnail " ) ] / a / @href ) )
if link
yield link . content . split ( " = " ) [ 1 ]
end
end
end
end
2018-02-03 20:41:59 +00:00
2018-02-22 19:01:37 +00:00
def splice ( a , b )
2018-02-03 20:41:59 +00:00
c = a [ 0 ]
2018-02-12 04:06:29 +00:00
a [ 0 ] = a [ b % a . size ]
a [ b % a . size ] = c
return a
end
2018-02-22 19:01:37 +00:00
def decrypt_signature ( a )
2018-02-13 16:44:11 +00:00
a = a . split ( " " )
2018-02-12 04:06:29 +00:00
2018-03-07 14:07:40 +00:00
a . reverse!
2018-03-22 16:02:15 +00:00
a . delete_at ( 0 .. 2 )
2018-03-07 14:07:40 +00:00
a . reverse!
2018-03-22 16:02:15 +00:00
a . delete_at ( 0 .. 2 )
2018-03-28 23:37:00 +00:00
a = splice ( a , 38 )
a . delete_at ( 0 .. 0 )
a = splice ( a , 64 )
a . reverse!
a . delete_at ( 0 .. 1 )
2018-03-07 04:03:45 +00:00
2018-02-13 16:44:11 +00:00
return a . join ( " " )
end
2018-03-17 00:36:49 +00:00
def rank_videos ( db , n , pool , filter )
2018-02-05 23:56:40 +00:00
top = [ ] of { Float64 , String }
2018-03-17 04:57:31 +00:00
db . query ( " SELECT id, wilson_score, published FROM videos WHERE views > 5000 ORDER BY published DESC LIMIT 1000 " ) do | rs |
2018-02-05 23:56:40 +00:00
rs . each do
id = rs . read ( String )
wilson_score = rs . read ( Float64 )
published = rs . read ( Time )
# Exponential decay, older videos tend to rank lower
2018-02-10 16:06:37 +00:00
temperature = wilson_score * Math . exp ( - 0.000005 * ( ( Time . now - published ) . total_minutes ) )
2018-02-05 23:56:40 +00:00
top << { temperature , id }
end
end
top . sort!
# Make hottest come first
top . reverse!
top = top . map { | a , b | b }
2018-03-17 00:36:49 +00:00
if filter
language_list = [ ] of String
top . each do | id |
if language_list . size == n
break
else
client = get_client ( pool )
2018-03-19 17:35:35 +00:00
begin
video = get_video ( id , client , db )
rescue ex
next
end
2018-03-17 00:36:49 +00:00
pool << client
2018-03-17 00:45:37 +00:00
if video . language
language = video . language
else
description = XML . parse ( video . description )
content = [ video . title , description . content ] . join ( " " )
2018-03-19 17:35:35 +00:00
content = content [ 0 , 10000 ]
2018-03-17 00:36:49 +00:00
2018-03-17 00:45:37 +00:00
results = DetectLanguage . detect ( content )
language = results [ 0 ] . language
2018-03-17 00:36:49 +00:00
2018-03-17 00:45:37 +00:00
db . exec ( " UPDATE videos SET language = $1 WHERE id = $2 " , language , id )
end
if language == " en "
2018-03-17 00:36:49 +00:00
language_list << id
end
end
end
return language_list
else
return top [ 0 .. n - 1 ]
end
2018-02-05 23:56:40 +00:00
end
2018-02-06 01:07:49 +00:00
2018-03-05 04:25:03 +00:00
def make_client ( url )
context = OpenSSL :: SSL :: Context :: Client . new
context . add_options (
OpenSSL :: SSL :: Options :: ALL |
OpenSSL :: SSL :: Options :: NO_SSL_V2 |
OpenSSL :: SSL :: Options :: NO_SSL_V3
)
2018-02-27 00:59:02 +00:00
client = HTTP :: Client . new ( url , context )
2018-03-04 16:59:03 +00:00
client . read_timeout = 10 . seconds
client . connect_timeout = 10 . seconds
2018-02-06 01:07:49 +00:00
return client
end
2018-03-03 21:06:14 +00:00
2018-03-04 16:59:03 +00:00
def get_reddit_comments ( id , client , headers )
2018-03-04 15:31:26 +00:00
query = " (url:3D #{ id } %20OR%20url: #{ id } )%20(site:youtube.com%20OR%20site:youtu.be) "
2018-03-04 16:59:03 +00:00
search_results = client . get ( " /search.json?q= #{ query } " , headers )
2018-03-09 16:55:14 +00:00
2018-03-04 16:59:03 +00:00
if search_results . status_code == 200
2018-03-04 01:10:25 +00:00
search_results = RedditSubmit . from_json ( search_results . body )
2018-03-09 16:55:14 +00:00
2018-03-04 01:10:25 +00:00
thread = search_results . data . children . sort_by { | child | child . data . score } [ - 1 ]
2018-03-09 16:47:50 +00:00
result = client . get ( " /r/ #{ thread . data . subreddit } /comments/ #{ thread . data . id } ?limit=100&sort=top " , headers ) . body
2018-03-04 01:10:25 +00:00
result = JSON . parse ( result )
2018-03-04 16:59:03 +00:00
elsif search_results . status_code == 302
search_results = client . get ( search_results . headers [ " Location " ] , headers ) . body
result = JSON . parse ( search_results )
thread = RedditThread . from_json ( result [ 0 ] [ " data " ] [ " children " ] [ 0 ] . to_json )
else
raise " Got error code #{ search_results . status_code } "
2018-03-04 01:10:25 +00:00
end
2018-03-04 16:59:03 +00:00
comments = result [ 1 ] [ " data " ] [ " children " ]
2018-03-04 01:10:25 +00:00
return comments , thread
2018-03-03 21:06:14 +00:00
end
def template_comments ( root )
html = " "
root . each do | child |
if child [ " data " ] [ " body_html " ]?
author = child [ " data " ] [ " author " ]
score = child [ " data " ] [ " score " ]
body_html = HTML . unescape ( child [ " data " ] [ " body_html " ] . as_s )
2018-03-07 04:00:35 +00:00
# Replace local links wtih links back to Reddit
body_html = fill_links ( body_html , " https " , " www.reddit.com " )
2018-03-03 21:06:14 +00:00
replies_html = " "
if child [ " data " ] [ " replies " ] != " "
replies_html = template_comments ( child [ " data " ] [ " replies " ] [ " data " ] [ " children " ] )
end
content = <<-END_HTML
< p >
2018-03-07 04:03:45 +00:00
< a href = " javascript:void(0) " onclick = " toggle(this) " > [ - ] < / a> #{ score } <b> #{ author } < / b >
2018-03-03 21:06:14 +00:00
< / p>
2018-03-04 04:36:18 +00:00
< div >
#{body_html}
2018-03-03 21:06:14 +00:00
#{replies_html}
2018-03-04 04:36:18 +00:00
< / div>
2018-03-03 21:06:14 +00:00
END_HTML
if child [ " data " ] [ " depth " ] . as_i > 0
html += <<-END_HTML
< div class = " pure-g " >
< div class = " pure-u-1-24 " > < / div>
< div class = " pure-u-23-24 " >
#{content}
< / div>
< / div>
END_HTML
else
html += <<-END_HTML
< div class = " pure-g " >
< div class = " pure-u-1 " >
#{content}
< / div>
< / div>
END_HTML
end
end
end
return html
end
2018-03-03 21:10:56 +00:00
def number_with_separator ( number )
number . to_s . reverse . gsub ( / ( \ d{3})(?= \ d) / , " \\ 1, " ) . reverse
end
2018-03-04 14:54:19 +00:00
2018-03-30 00:03:00 +00:00
def arg_array ( array , start = 1 )
2018-03-04 14:54:19 +00:00
args = [ ] of String
2018-03-30 00:03:00 +00:00
( start .. array . size + start - 1 ) . each { | i | args << " ($ #{ i } ) " }
2018-03-25 01:57:57 +00:00
args = args . join ( " , " )
2018-03-04 14:54:19 +00:00
return args
end
2018-03-07 04:00:35 +00:00
def add_alt_links ( html )
alt_links = [ ] of { Int32 , String }
# This is painful but is likely the only way to accomplish this in Crystal,
# as Crystigiri and others are not able to insert XML Nodes into a document.
# The goal here is to use as little regex as possible
html . scan ( / <a[^>]*>([^<]+)< \/ a> / ) do | match |
anchor = XML . parse_html ( match [ 0 ] )
anchor = anchor . xpath_node ( " //a " ) . not_nil!
url = URI . parse ( HTML . unescape ( anchor [ " href " ] ) )
2018-03-09 20:06:35 +00:00
if [ " www.youtube.com " , " m.youtube.com " ] . includes? ( url . host ) && url . path == " /watch "
2018-03-07 04:00:35 +00:00
alt_link = <<-END_HTML
2018-03-07 04:03:45 +00:00
< a href = " #{ url . full_path } " >
2018-03-07 04:00:35 +00:00
< i class = " fa fa-link " aria - hidden = " true " > < / i>
< / a>
END_HTML
2018-03-09 20:06:35 +00:00
elsif url . host == " youtu.be "
alt_link = <<-END_HTML
< a href = " /watch?v= #{ url . full_path . lchop ( " / " ) } " >
< i class = " fa fa-link " aria - hidden = " true " > < / i>
< / a>
END_HTML
else
alt_link = " "
2018-03-07 04:00:35 +00:00
end
2018-03-09 20:06:35 +00:00
alt_links << { match . end . not_nil! , alt_link }
2018-03-07 04:00:35 +00:00
end
alt_links . reverse!
alt_links . each do | position , alt_link |
html = html . insert ( position , alt_link )
end
return html
end
def fill_links ( html , scheme , host )
html = XML . parse_html ( html )
html . xpath_nodes ( " //a " ) . each do | match |
url = URI . parse ( match [ " href " ] )
2018-03-26 03:21:24 +00:00
# Reddit links don't have host
if ! url . host
2018-03-07 04:00:35 +00:00
url . scheme = scheme
url . host = host
match [ " href " ] = url
end
end
html = html . to_xml
end
2018-03-16 16:40:29 +00:00
def login_req ( login_form , f_req )
data = {
" pstMsg " = > " 1 " ,
" checkConnection " = > " youtube " ,
" checkedDomains " = > " youtube " ,
" hl " = > " en " ,
" deviceinfo " = > % q ( [ null , null , null , [ ] , null , " US " , null , null , [ ] , " GlifWebSignIn " , null , [ null , null , [ ] ] ] ) ,
" f.req " = > f_req ,
" flowName " = > " GlifWebSignIn " ,
" flowEntry " = > " ServiceLogin " ,
}
data = data . merge ( login_form )
return HTTP :: Params . encode ( data )
end
2018-03-25 03:38:35 +00:00
def get_channel ( id , client , db )
if db . query_one? ( " SELECT EXISTS (SELECT true FROM channels WHERE id = $1) " , id , as : Bool )
channel = db . query_one ( " SELECT * FROM channels WHERE id = $1 " , id , as : InvidiousChannel )
2018-03-26 03:18:29 +00:00
if Time . now - channel . updated > 1 . minutes
2018-03-29 03:29:54 +00:00
channel = fetch_channel ( id , client , db )
2018-03-29 04:05:20 +00:00
channel_array = channel . to_a
2018-03-26 03:18:29 +00:00
args = arg_array ( channel_array )
2018-03-29 04:05:20 +00:00
db . exec ( " INSERT INTO channels VALUES ( #{ args } ) \
2018-03-29 23:50:24 +00:00
ON CONFLICT ( id ) DO UPDATE SET updated = $3 " , channel_array)
2018-03-25 03:38:35 +00:00
end
else
2018-03-29 03:29:54 +00:00
channel = fetch_channel ( id , client , db )
2018-03-25 03:38:35 +00:00
args = arg_array ( channel . to_a )
db . exec ( " INSERT INTO channels VALUES ( #{ args } ) " , channel . to_a )
end
return channel
end
2018-03-29 03:29:54 +00:00
def fetch_channel ( id , client , db )
2018-03-25 03:38:35 +00:00
rss = client . get ( " /feeds/videos.xml?channel_id= #{ id } " ) . body
rss = XML . parse_html ( rss )
2018-03-31 21:33:01 +00:00
db . exec ( " DELETE FROM channel_videos * WHERE ucid = $1 " , id )
2018-03-29 03:29:54 +00:00
rss . xpath_nodes ( " //feed/entry " ) . each do | entry |
video_id = entry . xpath_node ( " videoid " ) . not_nil! . content
title = entry . xpath_node ( " title " ) . not_nil! . content
published = Time . parse ( entry . xpath_node ( " published " ) . not_nil! . content , " %FT%X%z " )
updated = Time . parse ( entry . xpath_node ( " updated " ) . not_nil! . content , " %FT%X%z " )
author = entry . xpath_node ( " author/name " ) . not_nil! . content
ucid = entry . xpath_node ( " channelid " ) . not_nil! . content
video = ChannelVideo . new ( video_id , title , published , updated , ucid , author )
2018-03-31 14:51:44 +00:00
video_array = video . to_a
2018-03-29 03:29:54 +00:00
args = arg_array ( video_array )
2018-04-01 00:09:27 +00:00
db . exec ( " UPDATE users SET notifications = notifications || $1 \
WHERE updated < $2 AND $3 = ANY ( subscriptions ) AND $1 < > ALL ( notifications ) " , video_id, published, ucid)
# UPDATE users SET notifications = notifications || ARRAY['Os9Rypn2rEQ'] WHERE updated < '2018-03-24 20:48:46' AND 'UCSc16oMxxlcJSb9SXkjwMjA' = ANY(subscriptions) AND 'Os9Rypn2rEQ' <> ALL (notifications);
2018-03-29 03:29:54 +00:00
# TODO: Update record on conflict
2018-03-31 14:51:44 +00:00
db . exec ( " INSERT INTO channel_videos VALUES ( #{ args } ) \
ON CONFLICT ( id ) DO NOTHING " , video_array)
2018-03-29 03:29:54 +00:00
end
2018-03-25 03:38:35 +00:00
author = rss . xpath_node ( " //feed/author/name " ) . not_nil! . content
2018-03-29 03:29:54 +00:00
channel = InvidiousChannel . new ( id , author , Time . now )
2018-03-25 03:38:35 +00:00
return channel
end
2018-03-30 02:41:05 +00:00
def get_user ( sid , client , headers , db )
if db . query_one? ( " SELECT EXISTS (SELECT true FROM users WHERE id = $1) " , sid , as : Bool )
user = db . query_one ( " SELECT * FROM users WHERE id = $1 " , sid , as : User )
if Time . now - user . updated > 1 . minutes
user = fetch_user ( sid , client , headers )
user_array = user . to_a
args = arg_array ( user_array )
2018-03-31 14:51:44 +00:00
2018-03-30 02:41:05 +00:00
db . exec ( " INSERT INTO users VALUES ( #{ args } ) \
2018-04-01 00:09:27 +00:00
ON CONFLICT ( email ) DO UPDATE SET id = $1 , updated = $2 , notifications = ARRAY [ ] :: text [ ] , subscriptions = $4 " , user_array)
2018-03-30 02:41:05 +00:00
end
else
user = fetch_user ( sid , client , headers )
2018-03-31 15:30:17 +00:00
user_array = user . to_a
2018-03-30 02:41:05 +00:00
args = arg_array ( user . to_a )
2018-03-31 15:30:17 +00:00
db . exec ( " INSERT INTO users VALUES ( #{ args } ) \
ON CONFLICT ( email ) DO UPDATE SET id = $1 , updated = $2 , subscriptions = $4 " , user_array)
2018-03-30 02:41:05 +00:00
end
return user
end
def fetch_user ( sid , client , headers )
2018-03-31 15:30:17 +00:00
feed = client . get ( " /subscription_manager?disable_polymer=1 " , headers ) . body
2018-03-30 02:41:05 +00:00
channels = [ ] of String
feed = XML . parse_html ( feed )
2018-03-31 15:30:17 +00:00
feed . xpath_nodes ( % q ( / / a [ @class = " subscription-title yt-uix-sessionlink " ] / @href ) ) . each do | channel |
channel_id = channel . content . lstrip ( " /channel/ " ) . not_nil!
get_channel ( channel_id , client , PG_DB )
channels << channel_id
end
email = feed . xpath_node ( % q ( / / a [ @class = " yt-masthead-picker-header yt-masthead-picker-active-account " ] ) )
if email
email = email . content . lstrip . rstrip
else
email = " "
2018-03-30 02:41:05 +00:00
end
2018-04-01 00:09:27 +00:00
user = User . new ( sid , Time . now , [ ] of String , channels , email )
2018-03-30 02:41:05 +00:00
return user
end