Add /feed/trending

This commit is contained in:
Omar Roth 2018-11-20 11:18:12 -06:00
parent aeaeacbf8d
commit 2e99642173
3 changed files with 79 additions and 5 deletions

View File

@ -1590,6 +1590,20 @@ end
# Feeds # Feeds
get "/feed/trending" do |env|
trending_type = env.params.query["type"]?
region = env.params.query["region"]?
begin
trending = fetch_trending(trending_type, proxies, region)
rescue ex
error_message = "#{ex.message}"
next templated "error"
end
templated "trending"
end
get "/feed/subscriptions" do |env| get "/feed/subscriptions" do |env|
user = env.get? "user" user = env.get? "user"
referer = get_referer(env) referer = get_referer(env)
@ -2467,14 +2481,19 @@ get "/api/v1/videos/:id" do |env|
end end
get "/api/v1/trending" do |env| get "/api/v1/trending" do |env|
client = make_client(YT_URL) region = env.params.query["region"]?
trending = client.get("/feed/trending?disable_polymer=1").body trending_type = env.params.query["type"]?
begin
trending = fetch_trending(trending_type, proxies, region)
rescue ex
error_message = {"error" => ex.message}.to_json
halt env, status_code: 500, response: error_message
end
trending = XML.parse_html(trending)
videos = JSON.build do |json| videos = JSON.build do |json|
json.array do json.array do
nodeset = trending.xpath_nodes(%q(//ul/li[@class="expanded-shelf-content-item-wrapper"])) trending.each do |video|
extract_videos(nodeset).each do |video|
json.object do json.object do
json.field "title", video.title json.field "title", video.title
json.field "videoId", video.id json.field "videoId", video.id
@ -2493,6 +2512,9 @@ get "/api/v1/trending" do |env|
json.field "publishedText", "#{recode_date(video.published)} ago" json.field "publishedText", "#{recode_date(video.published)} ago"
json.field "description", video.description json.field "description", video.description
json.field "descriptionHtml", video.description_html json.field "descriptionHtml", video.description_html
json.field "liveNow", video.live_now
json.field "paid", video.paid
json.field "premium", video.premium
end end
end end
end end

41
src/invidious/trending.cr Normal file
View File

@ -0,0 +1,41 @@
def fetch_trending(trending_type, proxies, region)
client = make_client(YT_URL)
headers = HTTP::Headers.new
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"
region ||= "US"
region = region.upcase
trending = ""
if trending_type
trending_type = trending_type.downcase.capitalize
response = client.get("/feed/trending?gl=#{region}&hl=en", headers).body
yt_data = response.match(/window\["ytInitialData"\] = (?<data>.*);/)
if yt_data
yt_data = JSON.parse(yt_data["data"].rchop(";"))
else
raise "Could not pull trending pages."
end
tabs = yt_data["contents"]["twoColumnBrowseResultsRenderer"]["tabs"][0]["tabRenderer"]["content"]["sectionListRenderer"]["subMenu"]["channelListSubMenuRenderer"]["contents"].as_a
url = tabs.select { |tab| tab["channelListSubMenuAvatarRenderer"]["title"]["simpleText"] == trending_type }[0]?
if url
url = url["channelListSubMenuAvatarRenderer"]["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"].as_s
url += "&disable_polymer=1&gl=#{region}&hl=en"
trending = client.get(url).body
else
trending = client.get("/feed/trending?gl=#{region}&hl=en&disable_polymer=1").body
end
else
trending = client.get("/feed/trending?gl=#{region}&hl=en&disable_polymer=1").body
end
trending = XML.parse_html(trending)
nodeset = trending.xpath_nodes(%q(//ul/li[@class="expanded-shelf-content-item-wrapper"]))
trending = extract_videos(nodeset)
return trending
end

View File

@ -0,0 +1,11 @@
<% content_for "header" do %>
<title>Trending - Invidious</title>
<% end %>
<div class="pure-g">
<% trending.each_slice(4) do |slice| %>
<% slice.each do |item| %>
<%= rendered "components/item" %>
<% end %>
<% end %>
</div>