invidious/src/invidious.cr

1158 lines
33 KiB
Crystal
Raw Normal View History

# "Invidious" (which is what YouTube should be)
2018-01-28 18:32:40 +01:00
# Copyright (C) 2018 Omar Roth
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
2018-03-17 01:36:49 +01:00
require "detect_language"
2017-11-23 08:48:55 +01:00
require "kemal"
require "option_parser"
require "pg"
2018-01-16 21:02:35 +01:00
require "xml"
2018-03-09 19:42:23 +01:00
require "yaml"
2018-07-06 14:59:56 +02:00
require "./invidious/*"
2017-11-29 22:33:46 +01:00
2018-03-09 19:42:23 +01:00
CONFIG = Config.from_yaml(File.read("config/config.yml"))
2018-05-02 01:51:16 +02:00
crawl_threads = CONFIG.crawl_threads
channel_threads = CONFIG.channel_threads
2018-04-28 17:50:02 +02:00
video_threads = CONFIG.video_threads
Kemal.config.extra_options do |parser|
parser.banner = "Usage: invidious [arguments]"
2018-05-02 01:51:16 +02:00
parser.on("-t THREADS", "--crawl-threads=THREADS", "Number of threads for crawling (default: #{crawl_threads})") do |number|
begin
2018-05-02 01:51:16 +02:00
crawl_threads = number.to_i
rescue ex
puts "THREADS must be integer"
exit
end
end
parser.on("-c THREADS", "--channel-threads=THREADS", "Number of threads for refreshing channels (default: #{channel_threads})") do |number|
begin
channel_threads = number.to_i
rescue ex
puts "THREADS must be integer"
exit
end
end
2018-04-28 17:50:02 +02:00
parser.on("-v THREADS", "--video-threads=THREADS", "Number of threads for refreshing videos (default: #{video_threads})") do |number|
begin
video_threads = number.to_i
rescue ex
puts "THREADS must be integer"
exit
2018-04-29 16:40:33 +02:00
end
2018-04-28 17:50:02 +02:00
end
2018-02-13 17:43:15 +01:00
end
2018-02-11 23:48:27 +01:00
Kemal::CLI.new
2018-03-09 19:42:23 +01:00
PG_URL = URI.new(
scheme: "postgres",
user: CONFIG.db[:user],
password: CONFIG.db[:password],
host: CONFIG.db[:host],
port: CONFIG.db[:port],
path: CONFIG.db[:dbname],
)
PG_DB = DB.open PG_URL
YT_URL = URI.parse("https://www.youtube.com")
REDDIT_URL = URI.parse("https://api.reddit.com")
2018-03-16 17:40:29 +01:00
LOGIN_URL = URI.parse("https://accounts.google.com")
2018-03-05 05:25:03 +01:00
2018-05-02 01:51:16 +02:00
crawl_threads.times do
2018-01-28 03:09:27 +01:00
spawn do
ids = Deque(String).new
random = Random.new
2018-01-21 18:04:58 +01:00
2018-04-28 16:22:06 +02:00
client = make_client(YT_URL)
2018-03-05 05:25:03 +01:00
search(random.base64(3), client) do |id|
2018-01-28 03:09:27 +01:00
ids << id
2018-01-22 00:49:27 +01:00
end
2018-02-09 03:19:44 +01:00
2018-01-28 03:09:27 +01:00
loop do
if ids.empty?
2018-03-05 05:25:03 +01:00
search(random.base64(3), client) do |id|
2018-01-28 03:09:27 +01:00
ids << id
end
end
2018-01-21 18:04:58 +01:00
2018-01-28 03:09:27 +01:00
begin
id = ids[0]
2018-03-05 05:25:03 +01:00
video = get_video(id, client, PG_DB)
2018-01-28 03:09:27 +01:00
rescue ex
2018-03-05 05:25:03 +01:00
STDOUT << id << " : " << ex.message << "\n"
2018-04-28 16:22:06 +02:00
client = make_client(YT_URL)
2018-01-28 03:09:27 +01:00
next
ensure
ids.delete(id)
end
2018-01-21 01:19:55 +01:00
2018-01-28 03:09:27 +01:00
rvs = [] of Hash(String, String)
if video.info.has_key?("rvs")
video.info["rvs"].split(",").each do |rv|
rvs << HTTP::Params.parse(rv).to_h
end
2018-01-19 04:46:29 +01:00
end
2018-01-21 01:19:55 +01:00
2018-01-28 03:09:27 +01:00
rvs.each do |rv|
if rv.has_key?("id") && !PG_DB.query_one?("SELECT EXISTS (SELECT true FROM videos WHERE id = $1)", rv["id"], as: Bool)
ids.delete(id)
2018-01-22 00:49:27 +01:00
ids << rv["id"]
2018-01-28 03:09:27 +01:00
if ids.size == 150
2018-01-22 00:49:27 +01:00
ids.shift
2018-01-21 18:07:32 +01:00
end
end
2018-01-21 18:04:58 +01:00
end
2018-04-28 16:26:03 +02:00
Fiber.yield
2018-01-28 03:09:27 +01:00
end
2018-01-17 04:42:48 +01:00
end
2018-01-08 00:18:24 +01:00
end
2018-03-26 05:18:29 +02:00
channel_threads.times do |i|
spawn do
loop do
query = "SELECT id FROM channels ORDER BY updated \
LIMIT (SELECT count(*)/$2 FROM channels) \
OFFSET (SELECT count(*)*$1/$2 FROM channels)"
PG_DB.query(query, i, channel_threads) do |rs|
2018-03-26 05:18:29 +02:00
rs.each do
2018-04-28 16:22:06 +02:00
client = make_client(YT_URL)
2018-04-12 00:37:06 +02:00
begin
id = rs.read(String)
channel = get_channel(id, client, PG_DB)
rescue ex
STDOUT << id << " : " << ex.message << "\n"
2018-04-28 16:22:06 +02:00
client = make_client(YT_URL)
2018-04-12 00:37:06 +02:00
next
end
2018-03-26 05:18:29 +02:00
end
end
2018-04-28 16:26:03 +02:00
Fiber.yield
2018-03-26 05:18:29 +02:00
end
end
end
2018-04-28 17:50:02 +02:00
video_threads.times do |i|
spawn do
loop do
query = "SELECT id FROM videos ORDER BY updated \
LIMIT (SELECT count(*)/$2 FROM videos) \
OFFSET (SELECT count(*)*$1/$2 FROM videos)"
PG_DB.query(query, i, video_threads) do |rs|
rs.each do
client = make_client(YT_URL)
begin
id = rs.read(String)
video = get_video(id, client, PG_DB)
rescue ex
STDOUT << id << " : " << ex.message << "\n"
client = make_client(YT_URL)
next
end
end
end
Fiber.yield
end
end
end
2018-02-08 05:04:47 +01:00
top_videos = [] of Video
2017-11-23 08:48:55 +01:00
2018-02-08 05:04:47 +01:00
spawn do
2018-03-17 01:36:49 +01:00
if CONFIG.dl_api_key
DetectLanguage.configure do |config|
config.api_key = CONFIG.dl_api_key.not_nil!
end
filter = true
2018-03-17 01:36:49 +01:00
end
2018-04-28 16:27:05 +02:00
filter ||= false
2018-02-08 05:04:47 +01:00
loop do
2018-03-19 18:35:35 +01:00
begin
2018-04-28 16:22:06 +02:00
top = rank_videos(PG_DB, 40, filter, YT_URL)
2018-03-19 18:35:35 +01:00
rescue ex
next
end
2018-02-06 00:56:40 +01:00
if top.size > 0
2018-03-04 15:54:19 +01:00
args = arg_array(top)
else
next
end
2018-02-06 00:56:40 +01:00
2018-02-09 03:19:44 +01:00
videos = [] of Video
2018-03-17 05:01:35 +01:00
top.each do |id|
2018-04-28 16:22:06 +02:00
client = make_client(YT_URL)
2018-03-17 05:57:31 +01:00
begin
videos << get_video(id, client, PG_DB)
rescue ex
2018-03-19 18:35:35 +01:00
next
2018-03-17 05:57:31 +01:00
end
2018-02-06 00:56:40 +01:00
end
2018-02-08 05:04:47 +01:00
2018-02-09 03:19:44 +01:00
top_videos = videos
2018-03-06 01:34:26 +01:00
Fiber.yield
2018-02-08 05:04:47 +01:00
end
end
2018-03-25 05:56:41 +02:00
before_all do |env|
if env.request.cookies.has_key?("SID")
2018-03-22 18:44:36 +01:00
env.set "authorized", true
2018-04-01 02:09:27 +02:00
sid = env.request.cookies["SID"].value
env.set "sid", sid
2018-07-06 01:43:26 +02:00
subscriptions = PG_DB.query_one?("SELECT subscriptions FROM users WHERE id = $1", sid, as: Array(String))
subscriptions ||= [] of String
env.set "subscriptions", subscriptions
2018-04-01 02:09:27 +02:00
notifications = PG_DB.query_one?("SELECT cardinality(notifications) FROM users WHERE id = $1", sid, as: Int32)
notifications ||= 0
2018-07-06 01:43:26 +02:00
2018-04-01 02:09:27 +02:00
env.set "notifications", notifications
2018-03-22 18:44:36 +01:00
end
2018-04-14 04:32:14 +02:00
if env.request.cookies.has_key?("darktheme") && env.request.cookies["darktheme"].value == "true"
env.set "darktheme", true
end
2018-03-22 18:44:36 +01:00
end
2018-02-08 05:04:47 +01:00
get "/" do |env|
templated "index"
end
get "/watch" do |env|
2018-07-06 01:43:26 +02:00
authorized = env.get? "authorized"
if authorized
subscriptions = env.get("subscriptions").as(Array(String))
end
subscriptions ||= [] of String
2018-02-15 19:05:39 +01:00
if env.params.query["v"]?
id = env.params.query["v"]
else
2018-04-01 17:09:08 +02:00
next env.redirect "/"
2018-02-15 19:05:39 +01:00
end
2018-01-16 03:30:57 +01:00
if env.params.query["start"]?
video_start = decode_time(env.params.query["start"])
end
if env.params.query["t"]?
video_start = decode_time(env.params.query["t"])
end
2018-05-30 01:40:29 +02:00
video_start ||= 0
2018-04-06 03:46:32 +02:00
if env.params.query["end"]?
video_end = decode_time(env.params.query["end"])
end
2018-05-30 01:40:29 +02:00
video_end ||= -1
2018-04-06 03:46:32 +02:00
2018-02-12 00:01:32 +01:00
if env.params.query["listen"]? && env.params.query["listen"] == "true"
listen = true
2018-02-13 17:43:15 +01:00
env.params.query.delete_all("listen")
2018-02-12 00:01:32 +01:00
end
listen ||= false
2018-04-28 16:22:06 +02:00
client = make_client(YT_URL)
begin
2018-03-31 16:51:14 +02:00
video = get_video(id, client, PG_DB)
rescue ex
error_message = ex.message
next templated "error"
end
fmt_stream = [] of HTTP::Params
video.info["url_encoded_fmt_stream_map"].split(",") do |string|
2018-03-15 00:05:19 +01:00
if !string.empty?
2018-03-15 00:06:21 +01:00
fmt_stream << HTTP::Params.parse(string)
end
2018-03-15 00:05:19 +01:00
end
2018-01-21 18:07:32 +01:00
2018-05-30 01:40:36 +02:00
fmt_stream.each { |s| s.add("label", "#{s["quality"]} - #{s["type"].split(";")[0].split("/")[1]}") }
fmt_stream = fmt_stream.uniq { |s| s["label"] }
2018-01-04 03:06:16 +01:00
adaptive_fmts = [] of HTTP::Params
if video.info.has_key?("adaptive_fmts")
video.info["adaptive_fmts"].split(",") do |string|
adaptive_fmts << HTTP::Params.parse(string)
end
2018-01-04 03:06:16 +01:00
end
2018-01-28 03:09:27 +01:00
2018-03-14 00:37:56 +01:00
if adaptive_fmts[0]? && adaptive_fmts[0]["s"]?
adaptive_fmts.each do |fmt|
fmt["url"] += "&signature=" + decrypt_signature(fmt["s"])
end
fmt_stream.each do |fmt|
fmt["url"] += "&signature=" + decrypt_signature(fmt["s"])
end
end
2018-03-14 00:37:56 +01:00
audio_streams = adaptive_fmts.compact_map { |s| s["type"].starts_with?("audio") ? s : nil }
audio_streams.sort_by! { |s| s["bitrate"].to_i }.reverse!
2018-05-30 01:40:36 +02:00
audio_streams.each do |stream|
stream["bitrate"] = (stream["bitrate"].to_f64/1000).to_i.to_s
2018-03-14 00:37:56 +01:00
end
rvs = [] of Hash(String, String)
if video.info.has_key?("rvs")
video.info["rvs"].split(",").each do |rv|
rvs << HTTP::Params.parse(rv).to_h
end
end
2018-01-21 18:07:32 +01:00
rating = video.info["avg_rating"].to_f64
2018-01-28 03:09:27 +01:00
engagement = ((video.dislikes.to_f + video.likes.to_f)/video.views * 100)
2018-02-05 02:42:13 +01:00
if video.likes > 0 || video.dislikes > 0
calculated_rating = (video.likes.to_f/(video.likes.to_f + video.dislikes.to_f) * 4 + 1)
2018-02-05 02:42:13 +01:00
end
2018-04-28 16:27:05 +02:00
calculated_rating ||= 0.0
2018-04-08 03:09:20 +02:00
if video.info["ad_slots"]?
ad_slots = video.info["ad_slots"].split(",")
ad_slots = ad_slots.join(", ")
end
if video.info["enabled_engage_types"]?
engage_types = video.info["enabled_engage_types"].split(",")
engage_types = engage_types.join(", ")
end
if video.info["ad_tag"]?
ad_tag = URI.parse(video.info["ad_tag"])
ad_query = HTTP::Params.parse(ad_tag.query.not_nil!)
ad_category = URI.unescape(ad_query["iu"])
ad_category = ad_category.lstrip("/4061/").split(".")[-1]
ad_query = HTTP::Params.parse(ad_query["scp"])
k2 = URI.unescape(ad_query["k2"]).split(",")
k2 = k2.join(", ")
end
2018-03-05 05:25:03 +01:00
reddit_client = make_client(REDDIT_URL)
2018-03-04 18:00:35 +01:00
headers = HTTP::Headers{"User-Agent" => "web:invidio.us:v0.1.0 (by /u/omarroth)"}
2018-03-03 22:06:14 +01:00
begin
reddit_comments, reddit_thread = get_reddit_comments(id, reddit_client, headers)
2018-03-07 05:00:35 +01:00
reddit_html = template_comments(reddit_comments)
reddit_html = fill_links(reddit_html, "https", "www.reddit.com")
2018-03-07 05:00:35 +01:00
reddit_html = add_alt_links(reddit_html)
2018-03-03 22:06:14 +01:00
rescue ex
reddit_thread = nil
2018-03-07 05:00:35 +01:00
reddit_html = ""
2018-03-03 22:06:14 +01:00
end
2018-03-07 05:00:35 +01:00
video.description = fill_links(video.description, "https", "www.youtube.com")
video.description = add_alt_links(video.description)
2018-04-06 03:54:56 +02:00
thumbnail = "https://i.ytimg.com/vi/#{id}/mqdefault.jpg"
2018-03-07 23:48:26 +01:00
2017-11-23 08:48:55 +01:00
templated "watch"
end
2018-07-14 15:36:31 +02:00
get "/embed/:id" do |env|
if env.params.url["id"]?
id = env.params.url["id"]
else
next env.redirect "/"
end
if env.params.query["start"]?
video_start = decode_time(env.params.query["start"])
end
if env.params.query["t"]?
video_start = decode_time(env.params.query["t"])
end
video_start ||= 0
if env.params.query["end"]?
video_end = decode_time(env.params.query["end"])
end
video_end ||= -1
if env.params.query["listen"]? && env.params.query["listen"] == "true"
listen = true
env.params.query.delete_all("listen")
end
listen ||= false
client = make_client(YT_URL)
begin
video = get_video(id, client, PG_DB)
rescue ex
error_message = ex.message
next templated "error"
end
fmt_stream = [] of HTTP::Params
video.info["url_encoded_fmt_stream_map"].split(",") do |string|
if !string.empty?
fmt_stream << HTTP::Params.parse(string)
end
end
fmt_stream.each { |s| s.add("label", "#{s["quality"]} - #{s["type"].split(";")[0].split("/")[1]}") }
fmt_stream = fmt_stream.uniq { |s| s["label"] }
adaptive_fmts = [] of HTTP::Params
if video.info.has_key?("adaptive_fmts")
video.info["adaptive_fmts"].split(",") do |string|
adaptive_fmts << HTTP::Params.parse(string)
end
end
if adaptive_fmts[0]? && adaptive_fmts[0]["s"]?
adaptive_fmts.each do |fmt|
fmt["url"] += "&signature=" + decrypt_signature(fmt["s"])
end
fmt_stream.each do |fmt|
fmt["url"] += "&signature=" + decrypt_signature(fmt["s"])
end
end
audio_streams = adaptive_fmts.compact_map { |s| s["type"].starts_with?("audio") ? s : nil }
audio_streams.sort_by! { |s| s["bitrate"].to_i }.reverse!
audio_streams.each do |stream|
stream["bitrate"] = (stream["bitrate"].to_f64/1000).to_i.to_s
end
thumbnail = "https://i.ytimg.com/vi/#{id}/mqdefault.jpg"
rendered "embed"
end
2017-12-30 22:21:43 +01:00
get "/search" do |env|
2018-02-15 19:05:39 +01:00
if env.params.query["q"]?
query = env.params.query["q"]
else
2018-04-01 17:09:08 +02:00
next env.redirect "/"
2018-02-15 19:05:39 +01:00
end
2018-02-27 01:59:02 +01:00
2018-03-15 00:06:21 +01:00
page = env.params.query["page"]?.try &.to_i
page ||= 1
2018-01-07 18:42:24 +01:00
2018-04-28 16:22:06 +02:00
client = make_client(YT_URL)
2018-01-08 00:18:24 +01:00
2018-03-05 05:25:03 +01:00
html = client.get("/results?q=#{URI.escape(query)}&page=#{page}&sp=EgIQAVAU").body
2018-01-15 04:16:09 +01:00
html = XML.parse_html(html)
2018-06-02 00:26:00 +02:00
videos = [] of Video
2017-12-30 22:21:43 +01:00
2018-01-16 05:11:51 +01:00
html.xpath_nodes(%q(//ol[@class="item-section"]/li)).each do |item|
root = item.xpath_node(%q(div[contains(@class,"yt-lockup-video")]/div))
if root
id = root.xpath_node(%q(div[contains(@class,"yt-lockup-thumbnail")]/a/@href))
if id
id = id.content.lchop("/watch?v=")
2018-01-15 04:16:09 +01:00
end
2018-04-28 16:27:05 +02:00
id ||= ""
2017-12-30 22:21:43 +01:00
2018-01-16 05:11:51 +01:00
title = root.xpath_node(%q(div[@class="yt-lockup-content"]/h3/a))
2018-01-15 04:16:09 +01:00
if title
2018-06-02 00:26:00 +02:00
title = title.content
2018-01-15 04:16:09 +01:00
end
2018-06-02 00:26:00 +02:00
title ||= ""
2018-01-15 04:16:09 +01:00
2018-02-27 03:59:18 +01:00
author = root.xpath_node(%q(div[@class="yt-lockup-content"]/div/a))
if author
2018-06-02 00:26:00 +02:00
ucid = author["href"].rpartition("/")[-1]
author = author.content
2018-02-27 03:59:18 +01:00
end
2018-06-02 00:26:00 +02:00
author ||= ""
ucid ||= ""
2018-02-27 03:59:18 +01:00
2018-06-02 00:26:00 +02:00
video = Video.new(id, HTTP::Params.parse(""), Time.now, title, 0_i64, 0, 0, 0.0, Time.now, "", nil, author, ucid)
2018-01-16 05:11:51 +01:00
videos << video
end
end
2017-12-30 22:21:43 +01:00
templated "search"
end
2018-03-16 17:40:29 +01:00
get "/login" do |env|
2018-04-08 04:36:09 +02:00
referer = env.request.headers["referer"]?
referer ||= "/feed/subscriptions"
2018-04-29 16:40:33 +02:00
tfa = env.params.query["tfa"]?
tfa ||= false
2018-04-17 22:58:00 +02:00
if referer.ends_with? "/login"
referer = "/feed/subscriptions"
end
2018-04-28 16:27:05 +02:00
if referer.size > 32
referer = "/feed/subscriptions"
end
2018-03-16 17:40:29 +01:00
templated "login"
end
# See https://github.com/rg3/youtube-dl/blob/master/youtube_dl/extractor/youtube.py#L79
post "/login" do |env|
2018-04-08 04:36:09 +02:00
referer = env.params.query["referer"]?
referer ||= "/feed/subscriptions"
2018-03-16 17:40:29 +01:00
email = env.params.body["email"]?
password = env.params.body["password"]?
2018-04-29 16:40:33 +02:00
tfa_code = env.params.body["tfa"]?.try &.lchop("G-")
2018-03-16 17:40:29 +01:00
begin
client = make_client(LOGIN_URL)
headers = HTTP::Headers.new
2018-04-29 16:40:33 +02:00
headers["Content-Type"] = "application/x-www-form-urlencoded;charset=utf-8"
headers["Google-Accounts-XSRF"] = "1"
2018-03-16 17:40:29 +01:00
login_page = client.get("/ServiceLogin")
headers = login_page.cookies.add_request_headers(headers)
login_page = XML.parse_html(login_page.body)
inputs = {} of String => String
login_page.xpath_nodes(%q(//input[@type="submit"])).each do |node|
name = node["id"]? || node["name"]?
name ||= ""
value = node["value"]?
value ||= ""
if name != "" && value != ""
inputs[name] = value
end
end
login_page.xpath_nodes(%q(//input[@type="hidden"])).each do |node|
name = node["id"]? || node["name"]?
name ||= ""
value = node["value"]?
value ||= ""
if name != "" && value != ""
inputs[name] = value
end
end
2018-04-29 16:40:33 +02:00
lookup_req = %(["#{email}",null,[],null,"US",null,null,2,false,true,[null,null,[2,1,null,1,"https://accounts.google.com/ServiceLogin?passive=1209600&continue=https%3A%2F%2Faccounts.google.com%2FManageAccount&followup=https%3A%2F%2Faccounts.google.com%2FManageAccount",null,[],4,[]],1,[null,null,[]],null,null,null,true],"#{email}"])
2018-03-16 17:40:29 +01:00
lookup_results = client.post("/_/signin/sl/lookup", headers, login_req(inputs, lookup_req))
headers = lookup_results.cookies.add_request_headers(headers)
lookup_results = lookup_results.body
lookup_results = lookup_results[5..-1]
lookup_results = JSON.parse(lookup_results)
user_hash = lookup_results[0][2]
2018-04-29 16:40:33 +02:00
challenge_req = %(["#{user_hash}",null,1,null,[1,null,null,null,["#{password}",null,true]],[null,null,[2,1,null,1,"https://accounts.google.com/ServiceLogin?passive=1209600&continue=https%3A%2F%2Faccounts.google.com%2FManageAccount&followup=https%3A%2F%2Faccounts.google.com%2FManageAccount",null,[],4,[]],1,[null,null,[]],null,null,null,true]])
2018-03-16 17:40:29 +01:00
challenge_results = client.post("/_/signin/sl/challenge", headers, login_req(inputs, challenge_req))
headers = challenge_results.cookies.add_request_headers(headers)
challenge_results = challenge_results.body
challenge_results = challenge_results[5..-1]
challenge_results = JSON.parse(challenge_results)
2018-04-29 16:40:33 +02:00
headers["Cookie"] = URI.unescape(headers["Cookie"])
2018-07-06 14:49:48 +02:00
if challenge_results[0][-1]?.try &.[5] == "INCORRECT_ANSWER_ENTERED"
error_message = "Incorrect password"
next templated "error"
end
2018-04-29 16:40:33 +02:00
if challenge_results[0][-1][0].as_a?
tfa = challenge_results[0][-1][0][0]
if tfa[2] == "TWO_STEP_VERIFICATION"
if tfa[5] == "QUOTA_EXCEEDED"
error_message = "Quota exceeded, try again in a few hours"
next templated "error"
end
if !tfa_code
next env.redirect "/login?tfa=true"
end
tl = challenge_results[1][2]
request_type = tfa[8]
2018-07-08 22:22:32 +02:00
case request_type
when 6
# Authenticator app
tfa_req = %(["#{user_hash}",null,2,null,[6,null,null,null,null,["#{tfa_code}",false]]])
2018-07-08 22:22:32 +02:00
when 9
# Voice or text message
tfa_req = %(["#{user_hash}",null,2,null,[9,null,null,null,null,null,null,null,[null,"#{tfa_code}",false,2]]])
2018-07-08 22:22:32 +02:00
else
error_message = "Unable to login, make sure two-factor authentication (Authenticator or SMS) is enabled."
next templated "error"
end
2018-04-29 16:40:33 +02:00
challenge_results = client.post("/_/signin/challenge?hl=en&TL=#{tl}", headers, login_req(inputs, tfa_req))
headers = challenge_results.cookies.add_request_headers(headers)
challenge_results = challenge_results.body
challenge_results = challenge_results[5..-1]
challenge_results = JSON.parse(challenge_results)
2018-07-06 14:49:48 +02:00
if challenge_results[0][-1]?.try &.[5] == "INCORRECT_ANSWER_ENTERED"
2018-04-29 16:40:33 +02:00
error_message = "Invalid TFA code"
next templated "error"
end
end
end
2018-03-16 17:40:29 +01:00
login_res = challenge_results[0][13][2].to_s
login = client.get(login_res, headers)
headers = login.cookies.add_request_headers(headers)
login = client.get(login.headers["Location"], headers)
2018-04-08 04:36:09 +02:00
headers = HTTP::Headers.new
2018-03-16 17:40:29 +01:00
headers = login.cookies.add_request_headers(headers)
2018-03-26 05:21:24 +02:00
2018-04-08 04:36:09 +02:00
sid = login.cookies["SID"].value
2018-04-28 16:22:06 +02:00
client = make_client(YT_URL)
2018-04-08 04:36:09 +02:00
user = get_user(sid, client, headers, PG_DB)
2018-03-16 17:40:29 +01:00
# We are now logged in
2018-03-25 05:38:35 +02:00
host = URI.parse(env.request.headers["Host"]).host
2018-03-22 18:44:36 +01:00
login.cookies.each do |cookie|
2018-03-25 05:38:35 +02:00
cookie.secure = false
2018-03-22 18:44:36 +01:00
cookie.extension = cookie.extension.not_nil!.gsub(".youtube.com", host)
2018-03-25 05:38:35 +02:00
cookie.extension = cookie.extension.not_nil!.gsub("Secure; ", "")
2018-03-22 18:44:36 +01:00
end
login.cookies.add_response_headers(env.response.headers)
2018-04-08 04:36:09 +02:00
env.redirect referer
2018-03-16 17:40:29 +01:00
rescue ex
2018-05-06 03:51:31 +02:00
error_message = "Login failed. This may be because two-factor authentication is not enabled on your account."
2018-03-16 17:40:29 +01:00
next templated "error"
end
end
2018-03-25 05:38:35 +02:00
get "/signout" do |env|
2018-04-08 04:36:09 +02:00
referer = env.request.headers["referer"]?
referer ||= "/"
2018-03-22 18:44:36 +01:00
env.request.cookies.each do |cookie|
2018-07-06 02:50:22 +02:00
cookie.expires = Time.new(1990, 1, 1)
2018-03-22 18:44:36 +01:00
end
env.request.cookies.add_response_headers(env.response.headers)
2018-04-08 04:36:09 +02:00
env.redirect referer
2018-03-22 18:44:36 +01:00
end
get "/redirect" do |env|
if env.params.query["q"]?
env.redirect env.params.query["q"]
else
env.redirect "/"
end
end
2018-03-13 02:36:49 +01:00
get "/api/manifest/dash/id/:id" do |env|
env.response.headers.add("Access-Control-Allow-Origin", "*")
env.response.content_type = "application/dash+xml"
local = env.params.query["local"]?.try &.== "true"
2018-03-13 02:36:49 +01:00
id = env.params.url["id"]
2018-04-28 16:22:06 +02:00
client = make_client(YT_URL)
2018-03-13 02:36:49 +01:00
begin
2018-04-28 16:22:06 +02:00
video = get_video(id, client, PG_DB)
2018-03-13 02:36:49 +01:00
rescue ex
halt env, status_code: 403
end
if video.info["dashmpd"]?
manifest = client.get(video.info["dashmpd"]).body
manifest = manifest.gsub(/<BaseURL>[^<]+<\/BaseURL>/) do |baseurl|
url = baseurl.lchop("<BaseURL>")
url = url.rchop("</BaseURL>")
if local
if Kemal.config.ssl
scheme = "https://"
end
scheme ||= "http://"
url = scheme + env.request.headers["Host"] + URI.parse(url).full_path
end
"<BaseURL>#{url}</BaseURL>"
end
next manifest
end
2018-03-13 02:36:49 +01:00
adaptive_fmts = [] of HTTP::Params
if video.info.has_key?("adaptive_fmts")
video.info["adaptive_fmts"].split(",") do |string|
adaptive_fmts << HTTP::Params.parse(string)
end
else
halt env, status_code: 403
end
if local
adaptive_fmts.each do |fmt|
if Kemal.config.ssl
scheme = "https://"
end
scheme ||= "http://"
fmt["url"] = scheme + env.request.headers["Host"] + URI.parse(fmt["url"]).full_path
end
end
2018-03-13 02:36:49 +01:00
if adaptive_fmts[0]? && adaptive_fmts[0]["s"]?
2018-03-14 00:37:56 +01:00
adaptive_fmts.each do |fmt|
fmt["url"] += "&signature=" + decrypt_signature(fmt["s"])
end
end
video_streams = adaptive_fmts.compact_map { |s| s["type"].starts_with?("video/mp4") ? s : nil }
audio_streams = adaptive_fmts.compact_map { |s| s["type"].starts_with?("audio/mp4") ? s : nil }
audio_streams.sort_by! { |s| s["bitrate"].to_i }.reverse!
audio_streams.each do |fmt|
fmt["bitrate"] = (fmt["bitrate"].to_f64/1000).to_i.to_s
end
manifest = XML.build(indent: " ", encoding: "UTF-8") do |xml|
2018-06-07 00:55:51 +02:00
xml.element("MPD", "xmlns:xsi": "http://www.w3.org/2001/XMLSchema-instance", "xmlns": "urn:mpeg:DASH:schema:MPD:2011",
"xmlns:yt": "http://youtube.com/yt/2012/10/10", "xsi:schemaLocation": "urn:mpeg:DASH:schema:MPD:2011 DASH-MPD.xsd",
minBufferTime: "PT1.5S", profiles: "urn:mpeg:dash:profile:isoff-main:2011", type: "static",
mediaPresentationDuration: "PT#{video.info["length_seconds"]}S") do
xml.element("Period") do
xml.element("AdaptationSet", id: 0, mimeType: "audio/mp4", subsegmentAlignment: true) do
xml.element("Role", schemeIdUri: "urn:mpeg:DASH:role:2011", value: "main")
2018-03-17 03:10:38 +01:00
audio_streams.each do |fmt|
2018-03-14 00:37:56 +01:00
mimetype, codecs = fmt["type"].split(";")
codecs = codecs[9..-2]
fmt_type = mimetype.split("/")[0]
bandwidth = fmt["bitrate"]
itag = fmt["itag"]
2018-03-17 03:10:38 +01:00
url = fmt["url"]
2018-06-07 00:55:51 +02:00
url = url.gsub("?", "/")
url = url.gsub("&", "/")
url = url.gsub("=", "/")
2018-03-13 02:36:49 +01:00
xml.element("Representation", id: fmt["itag"], codecs: codecs, bandwidth: bandwidth) do
2018-06-07 00:55:51 +02:00
xml.element("AudioChannelConfiguration", schemeIdUri: "urn:mpeg:dash:23003:3:audio_channel_configuration:2011", value: "2")
2018-03-17 03:10:38 +01:00
xml.element("BaseURL") { xml.text url }
2018-03-14 00:37:56 +01:00
xml.element("SegmentBase", indexRange: fmt["init"]) do
xml.element("Initialization", range: fmt["index"])
2018-03-13 02:36:49 +01:00
end
end
2018-03-14 00:37:56 +01:00
end
end
xml.element("AdaptationSet", id: 1, mimeType: "video/mp4", subsegmentAlignment: true) do
xml.element("Role", schemeIdUri: "urn:mpeg:DASH:role:2011", value: "main")
video_streams.each do |fmt|
mimetype, codecs = fmt["type"].split(";")
codecs = codecs[9..-2]
bandwidth = fmt["bitrate"]
itag = fmt["itag"]
2018-03-17 03:10:38 +01:00
url = fmt["url"]
2018-06-07 00:55:51 +02:00
url = url.gsub("?", "/")
url = url.gsub("&", "/")
url = url.gsub("=", "/")
height, width = fmt["size"].split("x")
2018-06-07 00:55:51 +02:00
xml.element("Representation", id: itag, codecs: codecs, width: width, startWithSAP: "1", maxPlayoutRate: "1",
height: height, bandwidth: bandwidth, frameRate: fmt["fps"]) do
2018-03-17 03:10:38 +01:00
xml.element("BaseURL") { xml.text url }
2018-03-14 00:37:56 +01:00
xml.element("SegmentBase", indexRange: fmt["init"]) do
xml.element("Initialization", range: fmt["index"])
2018-03-13 02:36:49 +01:00
end
end
end
end
end
end
2018-03-14 00:37:56 +01:00
end
2018-03-13 02:36:49 +01:00
manifest = manifest.gsub(%(<?xml version="1.0" encoding="UTF-8U"?>), %(<?xml version="1.0" encoding="UTF-8"?>))
manifest = manifest.gsub(%(<?xml version="1.0" encoding="UTF-8V"?>), %(<?xml version="1.0" encoding="UTF-8"?>))
2018-03-13 02:36:49 +01:00
manifest
end
2018-03-25 05:38:35 +02:00
# Get subscriptions for authorized user
get "/feed/subscriptions" do |env|
2018-03-25 05:56:41 +02:00
authorized = env.get? "authorized"
2018-03-25 05:38:35 +02:00
if authorized
max_results = env.params.query["maxResults"]?.try &.to_i || 40
2018-03-25 05:38:35 +02:00
page = env.params.query["page"]?.try &.to_i
page ||= 1
if max_results < 0
limit = nil
offset = (page - 1) * 1
else
limit = max_results
offset = (page - 1) * max_results
end
2018-03-25 05:38:35 +02:00
headers = HTTP::Headers.new
headers["Cookie"] = env.request.headers["Cookie"]
2018-04-01 02:09:27 +02:00
sid = env.get("sid").as(String)
2018-03-25 05:38:35 +02:00
2018-04-28 16:22:06 +02:00
client = make_client(YT_URL)
2018-03-30 04:41:05 +02:00
user = get_user(sid, client, headers, PG_DB)
2018-03-25 05:38:35 +02:00
args = arg_array(user.subscriptions, 3)
videos = PG_DB.query_all("SELECT * FROM channel_videos WHERE ucid IN (#{args}) \
ORDER BY published DESC LIMIT $1 OFFSET $2", [limit, offset] + user.subscriptions, as: ChannelVideo)
notifications = PG_DB.query_one("SELECT notifications FROM users WHERE email = $1", user.email, as: Array(String))
notifications = videos.select { |v| notifications.includes? v.id }
2018-05-08 05:04:58 +02:00
videos = videos - notifications
if !limit
videos = videos[0..max_results]
end
2018-04-01 02:09:27 +02:00
2018-04-06 03:07:14 +02:00
PG_DB.exec("UPDATE users SET notifications = $1 WHERE id = $2", [] of String, sid)
2018-04-01 02:09:27 +02:00
env.set "notifications", 0
2018-03-25 05:38:35 +02:00
templated "subscriptions"
else
env.redirect "/"
end
end
# Function that is useful if you have multiple channels that don't have
2018-05-30 01:40:29 +02:00
# the bell dinged. Request parameters are fairly self-explanatory,
# receive_all_updates = true and receive_post_updates = true will ding all
# channels. Calling /modify_notifications without any arguments will
# request all notifications from all channels.
# /modify_notifications?receive_all_updates=false&receive_no_updates=false
# will "unding" all subscriptions.
get "/modify_notifications" do |env|
authorized = env.get? "authorized"
referer = env.request.headers["referer"]?
referer ||= "/"
if authorized
channel_req = {} of String => String
channel_req["receive_all_updates"] = env.params.query["receive_all_updates"]? || "true"
channel_req["receive_no_updates"] = env.params.query["receive_no_updates"]? || ""
channel_req["receive_post_updates"] = env.params.query["receive_post_updates"]? || "true"
channel_req.reject! { |k, v| v != "true" && v != "false" }
headers = HTTP::Headers.new
headers["Cookie"] = env.request.headers["Cookie"]
2018-04-28 16:22:06 +02:00
client = make_client(YT_URL)
subs = client.get("/subscription_manager?disable_polymer=1", headers)
headers["Cookie"] += "; " + subs.cookies.add_request_headers(headers)["Cookie"]
match = subs.body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/)
if match
session_token = match["session_token"]
else
next env.redirect referer
end
channel_req["session_token"] = session_token
headers["content-type"] = "application/x-www-form-urlencoded"
subs = XML.parse_html(subs.body)
subs.xpath_nodes(%q(//a[@class="subscription-title yt-uix-sessionlink"]/@href)).each do |channel|
2018-03-31 16:51:14 +02:00
channel_id = channel.content.lstrip("/channel/").not_nil!
channel_req["channel_id"] = channel_id
client.post("/subscription_ajax?action_update_subscription_preferences=1", headers, HTTP::Params.encode(channel_req)).body
end
end
env.redirect referer
end
2018-07-06 02:48:55 +02:00
get "/subscription_manager" do |env|
authorized = env.get? "authorized"
if !authorized
next env.redirect "/"
end
subscriptions = env.get?("subscriptions").as(Array(String))
subscriptions ||= [] of String
client = make_client(YT_URL)
subscriptions = subscriptions.map do |ucid|
get_channel(ucid, client, PG_DB, false)
end
subscriptions.sort_by! { |channel| channel.author.downcase }
templated "subscription_manager"
end
2018-03-31 16:51:14 +02:00
get "/subscription_ajax" do |env|
authorized = env.get? "authorized"
referer = env.request.headers["referer"]?
referer ||= "/"
if authorized
if env.params.query["action_create_subscription_to_channel"]?
action = "action_create_subscription_to_channel"
elsif env.params.query["action_remove_subscriptions"]?
action = "action_remove_subscriptions"
else
2018-04-01 17:09:08 +02:00
next env.redirect referer
2018-03-31 16:51:14 +02:00
end
channel_id = env.params.query["c"]?
channel_id ||= ""
headers = HTTP::Headers.new
headers["Cookie"] = env.request.headers["Cookie"]
2018-04-28 16:22:06 +02:00
client = make_client(YT_URL)
2018-03-31 16:51:14 +02:00
subs = client.get("/subscription_manager?disable_polymer=1", headers)
headers["Cookie"] += "; " + subs.cookies.add_request_headers(headers)["Cookie"]
match = subs.body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/)
if match
session_token = match["session_token"]
else
next env.redirect "/"
end
headers["content-type"] = "application/x-www-form-urlencoded"
post_req = {
"session_token" => session_token,
}
post_req = HTTP::Params.encode(post_req)
post_url = "/subscription_ajax?#{action}=1&c=#{channel_id}"
# Update user
if client.post(post_url, headers, post_req).status_code == 200
2018-04-01 02:09:27 +02:00
sid = env.get("sid").as(String)
2018-03-31 16:51:14 +02:00
case action
when .starts_with? "action_create"
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE id = $2", channel_id, sid)
when .starts_with? "action_remove"
PG_DB.exec("UPDATE users SET subscriptions = array_remove(subscriptions,$1) WHERE id = $2", channel_id, sid)
end
end
end
env.redirect referer
end
2018-04-14 04:32:14 +02:00
get "/modify_theme" do |env|
referer = env.request.headers["referer"]?
referer ||= "/"
if env.params.query["dark"]?
env.response.cookies["darktheme"] = "true"
elsif env.params.query["light"]?
env.request.cookies["darktheme"].expires = Time.new(1990, 1, 1)
env.request.cookies.add_response_headers(env.response.headers)
end
env.redirect referer
end
2018-06-07 00:55:51 +02:00
get "/videoplayback*" do |env|
path = env.request.path
if path != "/videoplayback"
path = path.lchop("/videoplayback/")
path = path.rchop("/")
2018-06-07 00:55:51 +02:00
path = path.split("/")
raw_params = {} of String => Array(String)
path.each_slice(2) do |pair|
key, value = pair
value = URI.unescape(value)
if raw_params[key]?
raw_params[key] << value
else
raw_params[key] = [value]
end
end
query_params = HTTP::Params.new(raw_params)
else
query_params = env.params.query
end
2018-04-16 03:47:37 +02:00
2018-04-16 20:08:10 +02:00
fvip = query_params["fvip"]
2018-04-16 03:47:37 +02:00
mn = query_params["mn"].split(",")[0]
2018-04-16 20:08:10 +02:00
host = "https://r#{fvip}---#{mn}.googlevideo.com"
2018-04-16 03:47:37 +02:00
url = "/videoplayback?#{query_params.to_s}"
client = make_client(URI.parse(host))
response = client.head(url)
headers = env.request.headers
headers.delete("Host")
headers.delete("Cookie")
headers.delete("User-Agent")
headers.delete("Referer")
client.get(url, headers) do |response|
if response.headers["Location"]?
url = URI.parse(response.headers["Location"])
env.redirect url.full_path
else
env.response.status_code = response.status_code
2018-04-16 03:47:37 +02:00
response.headers.each do |key, value|
env.response.headers[key] = value
end
2018-04-17 02:31:57 +02:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-04-16 03:47:37 +02:00
chunk = Bytes[8]
loop do
count = response.body_io.read(chunk)
begin
env.response.write(chunk)
env.response.flush
rescue ex
break
end
end
end
end
end
2018-07-05 18:25:15 +02:00
get "/user/:user" do |env|
user = env.params.url["user"]
env.redirect "/channel/#{user}"
end
2018-06-03 02:52:58 +02:00
get "/channel/:ucid" do |env|
2018-06-15 02:06:22 +02:00
authorized = env.get? "authorized"
if authorized
sid = env.get("sid").as(String)
subscriptions = PG_DB.query_one?("SELECT subscriptions FROM users WHERE id = $1", sid, as: Array(String))
end
subscriptions ||= [] of String
2018-06-03 02:52:58 +02:00
ucid = env.params.url["ucid"]
page = env.params.query["page"]?.try &.to_i
page ||= 1
client = make_client(YT_URL)
if !ucid.starts_with? "UC"
rss = client.get("/feeds/videos.xml?user=#{ucid}").body
rss = XML.parse_html(rss)
ucid = rss.xpath_node("//feed/channelid").not_nil!.content
env.redirect "/channel/#{ucid}"
end
url = produce_playlist_url(ucid, (page - 1) * 100)
response = client.get(url)
json = JSON.parse(response.body)
document = XML.parse_html(json["content_html"].as_s)
author = document.xpath_node(%q(//div[@class="pl-video-owner"]/a)).not_nil!.content
videos = [] of ChannelVideo
document.xpath_nodes(%q(//a[contains(@class,"pl-video-title-link")])).each do |item|
href = URI.parse(item["href"])
id = HTTP::Params.parse(href.query.not_nil!)["v"]
title = item.content
videos << ChannelVideo.new(id, title, Time.now, Time.now, ucid, author)
end
templated "channel"
end
options "/videoplayback*" do |env|
2018-04-17 02:41:38 +02:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
env.response.headers["Access-Control-Allow-Methods"] = "GET"
2018-04-17 03:49:00 +02:00
env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, range"
2018-04-17 02:41:38 +02:00
end
2018-02-10 16:15:23 +01:00
error 404 do |env|
error_message = "404 Page not found"
templated "error"
2017-12-30 22:21:43 +01:00
end
error 500 do |env|
2018-02-10 16:15:23 +01:00
error_message = "500 Server error"
templated "error"
2017-12-30 22:21:43 +01:00
end
2018-03-17 01:58:33 +01:00
# Add redirect if SSL is enabled
if Kemal.config.ssl
2018-03-09 20:22:04 +01:00
spawn do
server = HTTP::Server.new do |context|
2018-03-09 21:13:26 +01:00
redirect_url = "https://#{context.request.host}#{context.request.path}"
if context.request.query
redirect_url += "?#{context.request.query}"
end
2018-03-11 16:24:12 +01:00
context.response.headers.add("Location", redirect_url)
2018-03-09 21:11:30 +01:00
context.response.status_code = 301
2018-03-09 20:22:04 +01:00
end
server.bind_tcp "0.0.0.0", 80
2018-03-09 20:22:04 +01:00
server.listen
end
2018-03-11 16:29:40 +01:00
before_all do |env|
env.response.headers.add("Strict-Transport-Security", "max-age=31536000; includeSubDomains; preload")
end
2018-03-09 20:22:04 +01:00
end
2018-03-09 18:28:57 +01:00
static_headers do |response, filepath, filestat|
response.headers.add("Cache-Control", "max-age=86400")
end
2017-11-23 08:48:55 +01:00
public_folder "assets"
2018-04-16 05:56:58 +02:00
add_handler FilteredCompressHandler.new
2018-07-06 01:43:26 +02:00
add_context_storage_type(Array(String))
2017-11-23 08:48:55 +01:00
Kemal.run