diff --git a/src/invidious.cr b/src/invidious.cr index 913fb5f4..3129f6d2 100644 --- a/src/invidious.cr +++ b/src/invidious.cr @@ -2242,6 +2242,8 @@ get "/channel/:ucid" do |env| page = env.params.query["page"]?.try &.to_i? page ||= 1 + continuation = env.params.query["continuation"]? + sort_by = env.params.query["sort_by"]?.try &.downcase sort_by ||= "newest" @@ -2260,8 +2262,14 @@ get "/channel/:ucid" do |env| end end - videos, count = get_60_videos(ucid, page, auto_generated, sort_by) - videos.select! { |video| !video.paid } + if auto_generated + items, continuation = fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by) + items.select! { |item| item.is_a?(SearchPlaylist) && !item.videos.empty? } + items = items.map { |item| item.as(SearchPlaylist) } + else + items, count = get_60_videos(ucid, page, auto_generated, sort_by) + items.select! { |item| !item.paid } + end templated "channel" end @@ -2899,11 +2907,16 @@ get "/api/v1/channels/:ucid" do |env| end page = 1 - begin - videos, count = get_60_videos(ucid, page, auto_generated, sort_by) - rescue ex - error_message = {"error" => ex.message}.to_json - halt env, status_code: 500, response: error_message + if auto_generated + videos = [] of SearchVideo + count = 0 + else + begin + videos, count = get_60_videos(ucid, page, auto_generated, sort_by) + rescue ex + error_message = {"error" => ex.message}.to_json + halt env, status_code: 500, response: error_message + end end client = make_client(YT_URL) @@ -3006,6 +3019,7 @@ get "/api/v1/channels/:ucid" do |env| json.field "joined", joined.to_unix json.field "paid", paid + json.field "autoGenerated", auto_generated json.field "isFamilyFriendly", is_family_friendly json.field "description", description json.field "descriptionHtml", description_html @@ -3225,71 +3239,7 @@ end halt env, status_code: 500, response: error_message end - client = make_client(YT_URL) - - if continuation - url = produce_channel_playlists_url(ucid, continuation, sort_by, auto_generated) - - response = client.get(url) - json = JSON.parse(response.body) - - if json["load_more_widget_html"].as_s.empty? - response = { - "playlists" => [] of String, - "continuation" => nil, - } - - if env.params.query["pretty"]? && env.params.query["pretty"] == "1" - response = response.to_pretty_json - else - response = response.to_json - end - - halt env, status_code: 200, response: response - end - - continuation = XML.parse_html(json["load_more_widget_html"].as_s) - continuation = continuation.xpath_node(%q(//button[@data-uix-load-more-href])) - if continuation - continuation = extract_channel_playlists_cursor(continuation["data-uix-load-more-href"], auto_generated) - end - - html = XML.parse_html(json["content_html"].as_s) - nodeset = html.xpath_nodes(%q(//li[contains(@class, "feed-item-container")])) - else - url = "/channel/#{ucid}/playlists?disable_polymer=1&flow=list" - - if auto_generated - url += "&view=50" - else - url += "&view=1" - end - - case sort_by - when "last", "last_added" - # - when "oldest", "oldest_created" - url += "&sort=da" - when "newest", "newest_created" - url += "&sort=dd" - end - - response = client.get(url) - html = XML.parse_html(response.body) - - continuation = html.xpath_node(%q(//button[@data-uix-load-more-href])) - if continuation - continuation = extract_channel_playlists_cursor(continuation["data-uix-load-more-href"], auto_generated) - end - - nodeset = html.xpath_nodes(%q(//ul[@id="browse-items-primary"]/li[contains(@class, "feed-item-container")])) - end - - if auto_generated - items = extract_shelf_items(nodeset, ucid, author) - else - items = extract_items(nodeset, ucid, author) - end + items, continuation = fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by) response = JSON.build do |json| json.object do diff --git a/src/invidious/channels.cr b/src/invidious/channels.cr index d852e517..25a7ce77 100644 --- a/src/invidious/channels.cr +++ b/src/invidious/channels.cr @@ -50,13 +50,11 @@ def get_batch_channels(channels, db, refresh = false, pull_all_videos = true, ma end def get_channel(id, db, refresh = true, pull_all_videos = true) - client = make_client(YT_URL) - if db.query_one?("SELECT EXISTS (SELECT true FROM channels WHERE id = $1)", id, as: Bool) channel = db.query_one("SELECT * FROM channels WHERE id = $1", id, as: InvidiousChannel) if refresh && Time.now - channel.updated > 10.minutes - channel = fetch_channel(id, client, db, pull_all_videos: pull_all_videos) + channel = fetch_channel(id, db, pull_all_videos: pull_all_videos) channel_array = channel.to_a args = arg_array(channel_array) @@ -64,7 +62,7 @@ def get_channel(id, db, refresh = true, pull_all_videos = true) ON CONFLICT (id) DO UPDATE SET author = $2, updated = $3", channel_array) end else - channel = fetch_channel(id, client, db, pull_all_videos: pull_all_videos) + channel = fetch_channel(id, db, pull_all_videos: pull_all_videos) channel_array = channel.to_a args = arg_array(channel_array) @@ -74,7 +72,9 @@ def get_channel(id, db, refresh = true, pull_all_videos = true) return channel end -def fetch_channel(ucid, client, db, pull_all_videos = true, locale = nil) +def fetch_channel(ucid, db, pull_all_videos = true, locale = nil) + client = make_client(YT_URL) + rss = client.get("/feeds/videos.xml?channel_id=#{ucid}").body rss = XML.parse_html(rss) @@ -193,6 +193,65 @@ def fetch_channel(ucid, client, db, pull_all_videos = true, locale = nil) return channel end +def fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by) + client = make_client(YT_URL) + + if continuation + url = produce_channel_playlists_url(ucid, continuation, sort_by, auto_generated) + + response = client.get(url) + json = JSON.parse(response.body) + + if json["load_more_widget_html"].as_s.empty? + return [] of SearchItem, nil + end + + continuation = XML.parse_html(json["load_more_widget_html"].as_s) + continuation = continuation.xpath_node(%q(//button[@data-uix-load-more-href])) + if continuation + continuation = extract_channel_playlists_cursor(continuation["data-uix-load-more-href"], auto_generated) + end + + html = XML.parse_html(json["content_html"].as_s) + nodeset = html.xpath_nodes(%q(//li[contains(@class, "feed-item-container")])) + else + url = "/channel/#{ucid}/playlists?disable_polymer=1&flow=list" + + if auto_generated + url += "&view=50" + else + url += "&view=1" + end + + case sort_by + when "last", "last_added" + # + when "oldest", "oldest_created" + url += "&sort=da" + when "newest", "newest_created" + url += "&sort=dd" + end + + response = client.get(url) + html = XML.parse_html(response.body) + + continuation = html.xpath_node(%q(//button[@data-uix-load-more-href])) + if continuation + continuation = extract_channel_playlists_cursor(continuation["data-uix-load-more-href"], auto_generated) + end + + nodeset = html.xpath_nodes(%q(//ul[@id="browse-items-primary"]/li[contains(@class, "feed-item-container")])) + end + + if auto_generated + items = extract_shelf_items(nodeset, ucid, author) + else + items = extract_items(nodeset, ucid, author) + end + + return items, continuation +end + def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "newest") if auto_generated seed = Time.unix(1525757349) diff --git a/src/invidious/jobs.cr b/src/invidious/jobs.cr index 52bf143d..721edd54 100644 --- a/src/invidious/jobs.cr +++ b/src/invidious/jobs.cr @@ -68,8 +68,7 @@ def refresh_channels(db, logger, max_threads = 1, full_refresh = false) active_threads += 1 spawn do begin - client = make_client(YT_URL) - channel = fetch_channel(id, client, db, full_refresh) + channel = fetch_channel(id, db, full_refresh) db.exec("UPDATE channels SET updated = $1, author = $2, deleted = false WHERE id = $3", Time.now, channel.author, id) rescue ex diff --git a/src/invidious/views/channel.ecr b/src/invidious/views/channel.ecr index 53b71b6f..65c289c5 100644 --- a/src/invidious/views/channel.ecr +++ b/src/invidious/views/channel.ecr @@ -46,7 +46,7 @@
-<% videos.each_slice(4) do |slice| %> +<% items.each_slice(4) do |slice| %> <% slice.each do |item| %> <%= rendered "components/item" %> <% end %>