+ END_HTML
+ else
+ html << <<-END_HTML
+
+
+ END_HTML
+ end
+
+ html << <<-END_HTML
+
+ [ − ]
+ #{child.author}
+ #{translate_count(locale, "comments_points_count", child.score, NumberFormatting::Separator)}
+ #{translate(locale, "`x` ago", recode_date(child.created_utc, locale))}
+ #{translate(locale, "permalink")}
+
+
+ #{body_html}
+ #{replies_html}
+
+
+
+ END_HTML
+ end
+ end
+ end
+ end
+end
diff --git a/src/invidious/frontend/comments_youtube.cr b/src/invidious/frontend/comments_youtube.cr
new file mode 100644
index 00000000..a0e1d783
--- /dev/null
+++ b/src/invidious/frontend/comments_youtube.cr
@@ -0,0 +1,208 @@
+module Invidious::Frontend::Comments
+ extend self
+
+ def template_youtube(comments, locale, thin_mode, is_replies = false)
+ String.build do |html|
+ root = comments["comments"].as_a
+ root.each do |child|
+ if child["replies"]?
+ replies_count_text = translate_count(locale,
+ "comments_view_x_replies",
+ child["replies"]["replyCount"].as_i64 || 0,
+ NumberFormatting::Separator
+ )
+
+ replies_html = <<-END_HTML
+
+ END_HTML
+ elsif comments["authorId"]? && !comments["singlePost"]?
+ # for posts we should display a link to the post
+ replies_count_text = translate_count(locale,
+ "comments_view_x_replies",
+ child["replyCount"].as_i64 || 0,
+ NumberFormatting::Separator
+ )
+
+ replies_html = <<-END_HTML
+
+ END_HTML
+ end
+
+ if !thin_mode
+ author_thumbnail = "/ggpht#{URI.parse(child["authorThumbnails"][-1]["url"].as_s).request_target}"
+ else
+ author_thumbnail = ""
+ end
+
+ author_name = HTML.escape(child["author"].as_s)
+ sponsor_icon = ""
+ if child["verified"]?.try &.as_bool && child["authorIsChannelOwner"]?.try &.as_bool
+ author_name += "
"
+ elsif child["verified"]?.try &.as_bool
+ author_name += "
"
+ end
+
+ if child["isSponsor"]?.try &.as_bool
+ sponsor_icon = String.build do |str|
+ str << %(
)
+ end
+ end
+ html << <<-END_HTML
+
+
+
+
+
+
+
+ #{author_name}
+
+ #{sponsor_icon}
+
#{child["contentHtml"]}
+ END_HTML
+
+ if child["attachment"]?
+ attachment = child["attachment"]
+
+ case attachment["type"]
+ when "image"
+ attachment = attachment["imageThumbnails"][1]
+
+ html << <<-END_HTML
+
+
+
+
+
+ END_HTML
+ when "video"
+ if attachment["error"]?
+ html << <<-END_HTML
+
+
#{attachment["error"]}
+
+ END_HTML
+ else
+ html << <<-END_HTML
+
+
+
+ END_HTML
+ end
+ when "multiImage"
+ html << <<-END_HTML
+
+ #{translate(locale, "carousel_skip")}
+
+ END_HTML
+ image_array = attachment["images"].as_a
+
+ image_array.each_index do |i|
+ html << <<-END_HTML
+
(i + 1).to_s, "total" => image_array.size.to_s})}" tabindex="0">
+
+
+ END_HTML
+ end
+
+ html << <<-END_HTML
+
+
+ END_HTML
+ attachment["images"].as_a.each_index do |i|
+ html << <<-END_HTML
+
#{i + 1}
+ END_HTML
+ end
+ html << <<-END_HTML
+
+
+
+ END_HTML
+ else nil # Ignore
+ end
+ end
+
+ html << <<-END_HTML
+
+ #{translate(locale, "`x` ago", recode_date(Time.unix(child["published"].as_i64), locale))} #{child["isEdited"] == true ? translate(locale, "(edited)") : ""}
+ |
+ END_HTML
+
+ if comments["videoId"]?
+ html << <<-END_HTML
+ [YT]
+ |
+ END_HTML
+ elsif comments["authorId"]?
+ html << <<-END_HTML
+ [YT]
+ |
+ END_HTML
+ end
+
+ html << <<-END_HTML
+ #{number_with_separator(child["likeCount"])}
+ END_HTML
+
+ if child["creatorHeart"]?
+ if !thin_mode
+ creator_thumbnail = "/ggpht#{URI.parse(child["creatorHeart"]["creatorThumbnail"].as_s).request_target}"
+ else
+ creator_thumbnail = ""
+ end
+
+ html << <<-END_HTML
+
+
+
+
+
+
+
+
+
+ END_HTML
+ end
+
+ html << <<-END_HTML
+
+ #{replies_html}
+
+
+ END_HTML
+ end
+
+ if comments["continuation"]?
+ html << <<-END_HTML
+
+ END_HTML
+ end
+ end
+ end
+end
diff --git a/src/invidious/frontend/misc.cr b/src/invidious/frontend/misc.cr
new file mode 100644
index 00000000..7a6cf79d
--- /dev/null
+++ b/src/invidious/frontend/misc.cr
@@ -0,0 +1,14 @@
+module Invidious::Frontend::Misc
+ extend self
+
+ def redirect_url(env : HTTP::Server::Context)
+ prefs = env.get("preferences").as(Preferences)
+
+ if prefs.automatic_instance_redirect
+ current_page = env.get?("current_page").as(String)
+ return "/redirect?referer=#{current_page}"
+ else
+ return "https://redirect.invidious.io#{env.request.resource}"
+ end
+ end
+end
diff --git a/src/invidious/frontend/pagination.cr b/src/invidious/frontend/pagination.cr
new file mode 100644
index 00000000..a29f5936
--- /dev/null
+++ b/src/invidious/frontend/pagination.cr
@@ -0,0 +1,121 @@
+require "uri"
+
+module Invidious::Frontend::Pagination
+ extend self
+
+ private def first_page(str : String::Builder, locale : String?, url : String)
+ str << %(
)
+
+ if locale_is_rtl?(locale)
+ # Inverted arrow ("first" points to the right)
+ str << translate(locale, "First page")
+ str << " "
+ str << %( )
+ else
+ # Regular arrow ("first" points to the left)
+ str << %( )
+ str << " "
+ str << translate(locale, "First page")
+ end
+
+ str << " "
+ end
+
+ private def previous_page(str : String::Builder, locale : String?, url : String)
+ # Link
+ str << %(
)
+
+ if locale_is_rtl?(locale)
+ # Inverted arrow ("previous" points to the right)
+ str << translate(locale, "Previous page")
+ str << " "
+ str << %( )
+ else
+ # Regular arrow ("previous" points to the left)
+ str << %( )
+ str << " "
+ str << translate(locale, "Previous page")
+ end
+
+ str << " "
+ end
+
+ private def next_page(str : String::Builder, locale : String?, url : String)
+ # Link
+ str << %(
)
+
+ if locale_is_rtl?(locale)
+ # Inverted arrow ("next" points to the left)
+ str << %( )
+ str << " "
+ str << translate(locale, "Next page")
+ else
+ # Regular arrow ("next" points to the right)
+ str << translate(locale, "Next page")
+ str << " "
+ str << %( )
+ end
+
+ str << " "
+ end
+
+ def nav_numeric(locale : String?, *, base_url : String | URI, current_page : Int, show_next : Bool = true)
+ return String.build do |str|
+ str << %(
\n)
+ str << %(
\n)
+
+ str << %(
)
+
+ if current_page > 1
+ params_prev = URI::Params{"page" => (current_page - 1).to_s}
+ url_prev = HttpServer::Utils.add_params_to_url(base_url, params_prev)
+
+ self.previous_page(str, locale, url_prev.to_s)
+ end
+
+ str << %(
\n)
+ str << %(
)
+
+ if show_next
+ params_next = URI::Params{"page" => (current_page + 1).to_s}
+ url_next = HttpServer::Utils.add_params_to_url(base_url, params_next)
+
+ self.next_page(str, locale, url_next.to_s)
+ end
+
+ str << %(
\n)
+
+ str << %(
\n)
+ str << %(
\n\n)
+ end
+ end
+
+ def nav_ctoken(locale : String?, *, base_url : String | URI, ctoken : String?, first_page : Bool, params : URI::Params)
+ return String.build do |str|
+ str << %(
\n)
+ str << %(
\n)
+
+ str << %(
)
+
+ if !first_page
+ self.first_page(str, locale, base_url.to_s)
+ end
+
+ str << %(
\n)
+
+ str << %(
)
+
+ if !ctoken.nil?
+ params["continuation"] = ctoken
+ url_next = HttpServer::Utils.add_params_to_url(base_url, params)
+
+ self.next_page(str, locale, url_next.to_s)
+ end
+
+ str << %(
\n)
+
+ str << %(
\n)
+ str << %(
\n\n)
+ end
+ end
+end
diff --git a/src/invidious/frontend/search_filters.cr b/src/invidious/frontend/search_filters.cr
new file mode 100644
index 00000000..8ac0af2e
--- /dev/null
+++ b/src/invidious/frontend/search_filters.cr
@@ -0,0 +1,135 @@
+module Invidious::Frontend::SearchFilters
+ extend self
+
+ # Generate the search filters collapsable widget.
+ def generate(filters : Search::Filters, query : String, page : Int, locale : String) : String
+ return String.build(8000) do |str|
+ str << "
\n"
+ str << "\t
"
+ str << "\t\t" << translate(locale, "search_filters_title") << " \n"
+
+ str << "\t\t\n"
+
+ str << "\t \n"
+ str << "
\n"
+ end
+ end
+
+ # Generate wrapper HTML (`
`, filter name, etc...) around the
+ # `
` elements of a search filter
+ macro filter_wrapper(name)
+ str << "\t\t\t\t
\n"
+
+ str << "\t\t\t\t\t"
+ str << translate(locale, "search_filters_{{name}}_label")
+ str << "
\n"
+
+ str << "\t\t\t\t\t\n"
+ make_{{name}}_filter_options(str, filters.{{name}}, locale)
+ str << "\t\t\t\t\t
"
+
+ str << "\t\t\t\t \n"
+ end
+
+ # Generates the HTML for the list of radio buttons of the "date" search filter
+ def make_date_filter_options(str : String::Builder, value : Search::Filters::Date, locale : String)
+ {% for value in Invidious::Search::Filters::Date.constants %}
+ {% date = value.underscore %}
+
+ str << "\t\t\t\t\t\t
"
+ str << " '
+
+ str << ""
+ str << translate(locale, "search_filters_date_option_{{date}}")
+ str << "
\n"
+ {% end %}
+ end
+
+ # Generates the HTML for the list of radio buttons of the "type" search filter
+ def make_type_filter_options(str : String::Builder, value : Search::Filters::Type, locale : String)
+ {% for value in Invidious::Search::Filters::Type.constants %}
+ {% type = value.underscore %}
+
+ str << "\t\t\t\t\t\t
"
+ str << " '
+
+ str << ""
+ str << translate(locale, "search_filters_type_option_{{type}}")
+ str << "
\n"
+ {% end %}
+ end
+
+ # Generates the HTML for the list of radio buttons of the "duration" search filter
+ def make_duration_filter_options(str : String::Builder, value : Search::Filters::Duration, locale : String)
+ {% for value in Invidious::Search::Filters::Duration.constants %}
+ {% duration = value.underscore %}
+
+ str << "\t\t\t\t\t\t
"
+ str << " '
+
+ str << ""
+ str << translate(locale, "search_filters_duration_option_{{duration}}")
+ str << "
\n"
+ {% end %}
+ end
+
+ # Generates the HTML for the list of checkboxes of the "features" search filter
+ def make_features_filter_options(str : String::Builder, value : Search::Filters::Features, locale : String)
+ {% for value in Invidious::Search::Filters::Features.constants %}
+ {% if value.stringify != "All" && value.stringify != "None" %}
+ {% feature = value.underscore %}
+
+ str << "\t\t\t\t\t\t
"
+ str << " '
+
+ str << ""
+ str << translate(locale, "search_filters_features_option_{{feature}}")
+ str << "
\n"
+ {% end %}
+ {% end %}
+ end
+
+ # Generates the HTML for the list of radio buttons of the "sort" search filter
+ def make_sort_filter_options(str : String::Builder, value : Search::Filters::Sort, locale : String)
+ {% for value in Invidious::Search::Filters::Sort.constants %}
+ {% sort = value.underscore %}
+
+ str << "\t\t\t\t\t\t
"
+ str << " '
+
+ str << ""
+ str << translate(locale, "search_filters_sort_option_{{sort}}")
+ str << "
\n"
+ {% end %}
+ end
+end
diff --git a/src/invidious/frontend/watch_page.cr b/src/invidious/frontend/watch_page.cr
index 80b67641..15d925e3 100644
--- a/src/invidious/frontend/watch_page.cr
+++ b/src/invidious/frontend/watch_page.cr
@@ -7,26 +7,32 @@ module Invidious::Frontend::WatchPage
getter full_videos : Array(Hash(String, JSON::Any))
getter video_streams : Array(Hash(String, JSON::Any))
getter audio_streams : Array(Hash(String, JSON::Any))
- getter captions : Array(Caption)
+ getter captions : Array(Invidious::Videos::Captions::Metadata)
def initialize(
@full_videos,
@video_streams,
@audio_streams,
- @captions
+ @captions,
)
end
end
def download_widget(locale : String, video : Video, video_assets : VideoAssets) : String
if CONFIG.disabled?("downloads")
- return "
#{translate(locale, "Download is disabled.")}
"
+ return "
#{translate(locale, "Download is disabled")}
"
+ end
+
+ url = "/download"
+ if (CONFIG.invidious_companion.present?)
+ invidious_companion = CONFIG.invidious_companion.sample
+ url = "#{invidious_companion.public_url}/download?check=#{invidious_companion_encrypt(video.id)}"
end
return String.build(4000) do |str|
str << "
END_HTML
@@ -103,7 +106,7 @@ end
# -------------------
macro error_atom(*args)
- error_atom_helper(env, {{*args}})
+ error_atom_helper(env, {{args.splat}})
end
def error_atom_helper(env : HTTP::Server::Context, status_code : Int32, exception : Exception)
@@ -129,14 +132,14 @@ end
# -------------------
macro error_json(*args)
- error_json_helper(env, {{*args}})
+ error_json_helper(env, {{args.splat}})
end
def error_json_helper(
env : HTTP::Server::Context,
status_code : Int32,
exception : Exception,
- additional_fields : Hash(String, Object) | Nil = nil
+ additional_fields : Hash(String, Object) | Nil = nil,
)
if exception.is_a?(InfoException)
return error_json_helper(env, status_code, exception.message || "", additional_fields)
@@ -158,7 +161,7 @@ def error_json_helper(
env : HTTP::Server::Context,
status_code : Int32,
message : String,
- additional_fields : Hash(String, Object) | Nil = nil
+ additional_fields : Hash(String, Object) | Nil = nil,
)
env.response.content_type = "application/json"
env.response.status_code = status_code
@@ -198,7 +201,7 @@ def error_redirect_helper(env : HTTP::Server::Context)
#{switch_instance}
- #{go_to_youtube}
+ #{go_to_youtube}
END_HTML
diff --git a/src/invidious/helpers/handlers.cr b/src/invidious/helpers/handlers.cr
index d140a858..13ea9fe9 100644
--- a/src/invidious/helpers/handlers.cr
+++ b/src/invidious/helpers/handlers.cr
@@ -27,6 +27,7 @@ class Kemal::RouteHandler
# Processes the route if it's a match. Otherwise renders 404.
private def process_request(context)
raise Kemal::Exceptions::RouteNotFound.new(context) unless context.route_found?
+ return if context.response.closed?
content = context.route.handler.call(context)
if !Kemal.config.error_handlers.empty? && Kemal.config.error_handlers.has_key?(context.response.status_code) && exclude_match?(context)
@@ -97,7 +98,7 @@ class AuthHandler < Kemal::Handler
if token = env.request.headers["Authorization"]?
token = JSON.parse(URI.decode_www_form(token.lchop("Bearer ")))
session = URI.decode_www_form(token["session"].as_s)
- scopes, expire, signature = validate_request(token, session, env.request, HMAC_KEY, nil)
+ scopes, _, _ = validate_request(token, session, env.request, HMAC_KEY, nil)
if email = Invidious::Database::SessionIDs.select_email(session)
user = Invidious::Database::Users.select!(email: email)
@@ -142,63 +143,8 @@ class APIHandler < Kemal::Handler
exclude ["/api/v1/auth/notifications"], "POST"
def call(env)
- return call_next env unless only_match? env
-
- env.response.headers["Access-Control-Allow-Origin"] = "*"
-
- # Since /api/v1/notifications is an event-stream, we don't want
- # to wrap the response
- return call_next env if exclude_match? env
-
- # Here we swap out the socket IO so we can modify the response as needed
- output = env.response.output
- env.response.output = IO::Memory.new
-
- begin
- call_next env
-
- env.response.output.rewind
-
- if env.response.output.as(IO::Memory).size != 0 &&
- env.response.headers.includes_word?("Content-Type", "application/json")
- response = JSON.parse(env.response.output)
-
- if fields_text = env.params.query["fields"]?
- begin
- JSONFilter.filter(response, fields_text)
- rescue ex
- env.response.status_code = 400
- response = {"error" => ex.message}
- end
- end
-
- if env.params.query["pretty"]?.try &.== "1"
- response = response.to_pretty_json
- else
- response = response.to_json
- end
- else
- response = env.response.output.gets_to_end
- end
- rescue ex
- env.response.content_type = "application/json" if env.response.headers.includes_word?("Content-Type", "text/html")
- env.response.status_code = 500
-
- if env.response.headers.includes_word?("Content-Type", "application/json")
- response = {"error" => ex.message || "Unspecified error"}
-
- if env.params.query["pretty"]?.try &.== "1"
- response = response.to_pretty_json
- else
- response = response.to_json
- end
- end
- ensure
- env.response.output = output
- env.response.print response
-
- env.response.flush
- end
+ env.response.headers["Access-Control-Allow-Origin"] = "*" if only_match?(env)
+ call_next env
end
end
diff --git a/src/invidious/helpers/helpers.cr b/src/invidious/helpers/helpers.cr
index c3b53339..6add0237 100644
--- a/src/invidious/helpers/helpers.cr
+++ b/src/invidious/helpers/helpers.cr
@@ -22,31 +22,6 @@ struct Annotation
property annotations : String
end
-def login_req(f_req)
- data = {
- # Unfortunately there's not much information available on `bgRequest`; part of Google's BotGuard
- # Generally this is much longer (>1250 characters), see also
- # https://github.com/ytdl-org/youtube-dl/commit/baf67a604d912722b0fe03a40e9dc5349a2208cb .
- # For now this can be empty.
- "bgRequest" => %|["identifier",""]|,
- "pstMsg" => "1",
- "checkConnection" => "youtube",
- "checkedDomains" => "youtube",
- "hl" => "en",
- "deviceinfo" => %|[null,null,null,[],null,"US",null,null,[],"GlifWebSignIn",null,[null,null,[]]]|,
- "f.req" => f_req,
- "flowName" => "GlifWebSignIn",
- "flowEntry" => "ServiceLogin",
- # "cookiesDisabled" => "false",
- # "gmscoreversion" => "undefined",
- # "continue" => "https://accounts.google.com/ManageAccount",
- # "azt" => "",
- # "bgHash" => "",
- }
-
- return HTTP::Params.encode(data)
-end
-
def html_to_content(description_html : String)
description = description_html.gsub(/(
)|(
)/, {
"
": "\n",
@@ -103,15 +78,6 @@ def create_notification_stream(env, topics, connection_channel)
video.published = published
response = JSON.parse(video.to_json(locale, nil))
- if fields_text = env.params.query["fields"]?
- begin
- JSONFilter.filter(response, fields_text)
- rescue ex
- env.response.status_code = 400
- response = {"error" => ex.message}
- end
- end
-
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
env.response.puts
@@ -138,15 +104,6 @@ def create_notification_stream(env, topics, connection_channel)
Invidious::Database::ChannelVideos.select_notfications(topic, since_unix).each do |video|
response = JSON.parse(video.to_json(locale))
- if fields_text = env.params.query["fields"]?
- begin
- JSONFilter.filter(response, fields_text)
- rescue ex
- env.response.status_code = 400
- response = {"error" => ex.message}
- end
- end
-
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
env.response.puts
@@ -180,15 +137,6 @@ def create_notification_stream(env, topics, connection_channel)
video.published = Time.unix(published)
response = JSON.parse(video.to_json(locale, nil))
- if fields_text = env.params.query["fields"]?
- begin
- JSONFilter.filter(response, fields_text)
- rescue ex
- env.response.status_code = 400
- response = {"error" => ex.message}
- end
- end
-
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
env.response.puts
@@ -233,3 +181,20 @@ def proxy_file(response, env)
IO.copy response.body_io, env.response
end
end
+
+# Fetch the playback requests tracker from the statistics endpoint.
+#
+# Creates a new tracker when unavailable.
+def get_playback_statistic
+ if (tracker = Invidious::Jobs::StatisticsRefreshJob::STATISTICS["playback"]) && tracker.as(Hash).empty?
+ tracker = {
+ "totalRequests" => 0_i64,
+ "successfulRequests" => 0_i64,
+ "ratio" => 0_f64,
+ }
+
+ Invidious::Jobs::StatisticsRefreshJob::STATISTICS["playback"] = tracker
+ end
+
+ return tracker.as(Hash(String, Int64 | Float64))
+end
diff --git a/src/invidious/helpers/i18n.cr b/src/invidious/helpers/i18n.cr
index 39e183f2..bca2edda 100644
--- a/src/invidious/helpers/i18n.cr
+++ b/src/invidious/helpers/i18n.cr
@@ -1,19 +1,35 @@
-# "bn_BD" => load_locale("bn_BD"), # Bengali (Bangladesh) [Incomplete]
-# "eu" => load_locale("eu"), # Basque [Incomplete]
-# "sk" => load_locale("sk"), # Slovak [Incomplete]
+# Languages requiring a better level of translation (at least 20%)
+# to be added to the list below:
+#
+# "af" => "", # Afrikaans
+# "az" => "", # Azerbaijani
+# "be" => "", # Belarusian
+# "bn_BD" => "", # Bengali (Bangladesh)
+# "ia" => "", # Interlingua
+# "or" => "", # Odia
+# "tk" => "", # Turkmen
+# "tok => "", # Toki Pona
+#
LOCALES_LIST = {
"ar" => "العربية", # Arabic
+ "bg" => "български", # Bulgarian
+ "bn" => "বাংলা", # Bengali
+ "ca" => "Català", # Catalan
"cs" => "Čeština", # Czech
+ "cy" => "Cymraeg", # Welsh
"da" => "Dansk", # Danish
"de" => "Deutsch", # German
"el" => "Ελληνικά", # Greek
"en-US" => "English", # English
"eo" => "Esperanto", # Esperanto
"es" => "Español", # Spanish
+ "et" => "Eesti keel", # Estonian
+ "eu" => "Euskara", # Basque
"fa" => "فارسی", # Persian
"fi" => "Suomi", # Finnish
"fr" => "Français", # French
"he" => "עברית", # Hebrew
+ "hi" => "हिन्दी", # Hindi
"hr" => "Hrvatski", # Croatian
"hu-HU" => "Magyar Nyelv", # Hungarian
"id" => "Bahasa Indonesia", # Indonesian
@@ -21,6 +37,7 @@ LOCALES_LIST = {
"it" => "Italiano", # Italian
"ja" => "日本語", # Japanese
"ko" => "한국어", # Korean
+ "lmo" => "Lombard", # Lombard
"lt" => "Lietuvių", # Lithuanian
"nb-NO" => "Norsk bokmål", # Norwegian Bokmål
"nl" => "Nederlands", # Dutch
@@ -29,11 +46,15 @@ LOCALES_LIST = {
"pt-BR" => "Português Brasileiro", # Portuguese (Brazil)
"pt-PT" => "Português de Portugal", # Portuguese (Portugal)
"ro" => "Română", # Romanian
- "ru" => "русский", # Russian
+ "ru" => "Русский", # Russian
+ "si" => "සිංහල", # Sinhala
+ "sk" => "Slovenčina", # Slovak
+ "sl" => "Slovenščina", # Slovenian
"sq" => "Shqip", # Albanian
- "sr" => "srpski (latinica)", # Serbian (Latin)
- "sr_Cyrl" => "српски (ћирилица)", # Serbian (Cyrillic)
+ "sr" => "Srpski (latinica)", # Serbian (Latin)
+ "sr_Cyrl" => "Српски (ћирилица)", # Serbian (Cyrillic)
"sv-SE" => "Svenska", # Swedish
+ "ta" => "தமிழ்", # Tamil
"tr" => "Türkçe", # Turkish
"uk" => "Українська", # Ukrainian
"vi" => "Tiếng Việt", # Vietnamese
@@ -73,7 +94,7 @@ def load_all_locales
return locales
end
-def translate(locale : String?, key : String, text : String | Nil = nil) : String
+def translate(locale : String?, key : String, text : String | Hash(String, String) | Nil = nil) : String
# Log a warning if "key" doesn't exist in en-US locale and return
# that key as the text, so this is more or less transparent to the user.
if !LOCALES["en-US"].has_key?(key)
@@ -96,10 +117,12 @@ def translate(locale : String?, key : String, text : String | Nil = nil) : Strin
match_length = 0
raw_data.as_h.each do |hash_key, value|
- if md = text.try &.match(/#{hash_key}/)
- if md[0].size >= match_length
- translation = value.as_s
- match_length = md[0].size
+ if text.is_a?(String)
+ if md = text.try &.match(/#{hash_key}/)
+ if md[0].size >= match_length
+ translation = value.as_s
+ match_length = md[0].size
+ end
end
end
end
@@ -109,8 +132,13 @@ def translate(locale : String?, key : String, text : String | Nil = nil) : Strin
raise "Invalid translation \"#{raw_data}\""
end
- if text
+ if text.is_a?(String)
translation = translation.gsub("`x`", text)
+ elsif text.is_a?(Hash(String, String))
+ # adds support for multi string interpolation. Based on i18next https://www.i18next.com/translation-function/interpolation#basic
+ text.each_key do |hash_key|
+ translation = translation.gsub("{{#{hash_key}}}", text[hash_key])
+ end
end
return translation
@@ -160,3 +188,12 @@ def translate_bool(locale : String?, translation : Bool)
return translate(locale, "No")
end
end
+
+def locale_is_rtl?(locale : String?)
+ # Fallback to en-US
+ return false if locale.nil?
+
+ # Arabic, Persian, Hebrew
+ # See https://en.wikipedia.org/wiki/Right-to-left_script#List_of_RTL_scripts
+ return {"ar", "fa", "he"}.includes? locale
+end
diff --git a/src/invidious/helpers/i18next.cr b/src/invidious/helpers/i18next.cr
index e84f88fb..684e6d14 100644
--- a/src/invidious/helpers/i18next.cr
+++ b/src/invidious/helpers/i18next.cr
@@ -35,27 +35,35 @@ module I18next::Plurals
Special_Slovenian = 21
Special_Hebrew = 22
Special_Odia = 23
+
+ # Mixed v3/v4 rules in Weblate
+ # `es`, `pt` and `pt-PT` doesn't seem to have been refreshed
+ # by weblate yet, but I suspect it will happen one day.
+ # See: https://github.com/translate/translate/issues/4873
+ Special_French_Portuguese
+ Special_Hungarian_Serbian
+ Special_Spanish_Italian
end
private PLURAL_SETS = {
PluralForms::Single_gt_one => [
- "ach", "ak", "am", "arn", "br", "fil", "fr", "gun", "ln", "mfe", "mg",
- "mi", "oc", "pt", "pt-BR", "tg", "tl", "ti", "tr", "uz", "wa",
+ "ach", "ak", "am", "arn", "br", "fa", "fil", "gun", "ln", "mfe", "mg",
+ "mi", "oc", "pt-PT", "tg", "tl", "ti", "tr", "uz", "wa",
],
PluralForms::Single_not_one => [
"af", "an", "ast", "az", "bg", "bn", "ca", "da", "de", "dev", "el", "en",
- "eo", "es", "et", "eu", "fi", "fo", "fur", "fy", "gl", "gu", "ha", "hi",
- "hu", "hy", "ia", "it", "kk", "kn", "ku", "lb", "mai", "ml", "mn", "mr",
+ "eo", "et", "eu", "fi", "fo", "fur", "fy", "gl", "gu", "ha", "hi",
+ "hu", "hy", "ia", "kk", "kn", "ku", "lb", "mai", "ml", "mn", "mr",
"nah", "nap", "nb", "ne", "nl", "nn", "no", "nso", "pa", "pap", "pms",
- "ps", "pt-PT", "rm", "sco", "se", "si", "so", "son", "sq", "sv", "sw",
+ "ps", "rm", "sco", "se", "si", "so", "son", "sq", "sv", "sw",
"ta", "te", "tk", "ur", "yo",
],
PluralForms::None => [
- "ay", "bo", "cgg", "fa", "ht", "id", "ja", "jbo", "ka", "km", "ko", "ky",
+ "ay", "bo", "cgg", "ht", "id", "ja", "jbo", "ka", "km", "ko", "ky",
"lo", "ms", "sah", "su", "th", "tt", "ug", "vi", "wo", "zh",
],
PluralForms::Dual_Slavic => [
- "be", "bs", "cnr", "dz", "hr", "ru", "sr", "uk",
+ "be", "bs", "cnr", "dz", "ru", "uk",
],
}
@@ -81,6 +89,13 @@ module I18next::Plurals
"ro" => PluralForms::Special_Romanian,
"sk" => PluralForms::Special_Czech_Slovak,
"sl" => PluralForms::Special_Slovenian,
+ # Mixed v3/v4 rules
+ "es" => PluralForms::Special_Spanish_Italian,
+ "fr" => PluralForms::Special_French_Portuguese,
+ "hr" => PluralForms::Special_Hungarian_Serbian,
+ "it" => PluralForms::Special_Spanish_Italian,
+ "pt" => PluralForms::Special_French_Portuguese,
+ "sr" => PluralForms::Special_Hungarian_Serbian,
}
# These are the v1 and v2 compatible suffixes.
@@ -150,9 +165,8 @@ module I18next::Plurals
end
def get_plural_form(locale : String) : PluralForms
- # Extract the ISO 639-1 or 639-2 code from an RFC 5646 language code,
- # except for pt-BR and pt-PT which needs to be kept as-is.
- if !locale.matches?(/^pt-(BR|PT)$/)
+ # Extract the ISO 639-1 or 639-2 code from an RFC 5646 language code
+ if !locale.matches?(/^pt-PT$/)
locale = locale.split('-')[0]
end
@@ -174,7 +188,7 @@ module I18next::Plurals
# Emulate the `rule.numbers.size == 2 && rule.numbers[0] == 1` check
# from original i18next code
- private def is_simple_plural(form : PluralForms) : Bool
+ private def simple_plural?(form : PluralForms) : Bool
case form
when .single_gt_one? then return true
when .single_not_one? then return true
@@ -196,7 +210,7 @@ module I18next::Plurals
idx = SuffixIndex.get_index(plural_form, count)
# Simple plurals are handled differently in all versions (but v4)
- if @simplify_plural_suffix && is_simple_plural(plural_form)
+ if @simplify_plural_suffix && simple_plural?(plural_form)
return (idx == 1) ? "_plural" : ""
end
@@ -246,6 +260,10 @@ module I18next::Plurals
when .special_slovenian? then return special_slovenian(count)
when .special_hebrew? then return special_hebrew(count)
when .special_odia? then return special_odia(count)
+ # Mixed v3/v4 forms
+ when .special_spanish_italian? then return special_cldr_spanish_italian(count)
+ when .special_french_portuguese? then return special_cldr_french_portuguese(count)
+ when .special_hungarian_serbian? then return special_cldr_hungarian_serbian(count)
else
# default, if nothing matched above
return 0_u8
@@ -507,5 +525,42 @@ module I18next::Plurals
def self.special_odia(count : Int) : UInt8
return (count == 1) ? 0_u8 : 1_u8
end
+
+ # -------------------
+ # "v3.5" rules
+ # -------------------
+
+ # Plural form for Spanish & Italian languages
+ #
+ # This rule is mostly compliant to CLDR v42
+ #
+ def self.special_cldr_spanish_italian(count : Int) : UInt8
+ return 0_u8 if (count == 1) # one
+ return 1_u8 if (count != 0 && count % 1_000_000 == 0) # many
+ return 2_u8 # other
+ end
+
+ # Plural form for French and Portuguese
+ #
+ # This rule is mostly compliant to CLDR v42
+ #
+ def self.special_cldr_french_portuguese(count : Int) : UInt8
+ return 0_u8 if (count == 0 || count == 1) # one
+ return 1_u8 if (count % 1_000_000 == 0) # many
+ return 2_u8 # other
+ end
+
+ # Plural form for Hungarian and Serbian
+ #
+ # This rule is mostly compliant to CLDR v42
+ #
+ def self.special_cldr_hungarian_serbian(count : Int) : UInt8
+ n_mod_10 = count % 10
+ n_mod_100 = count % 100
+
+ return 0_u8 if (n_mod_10 == 1 && n_mod_100 != 11) # one
+ return 1_u8 if (2 <= n_mod_10 <= 4 && (n_mod_100 < 12 || 14 < n_mod_100)) # few
+ return 2_u8 # other
+ end
end
end
diff --git a/src/invidious/helpers/json_filter.cr b/src/invidious/helpers/json_filter.cr
deleted file mode 100644
index b8e8f96d..00000000
--- a/src/invidious/helpers/json_filter.cr
+++ /dev/null
@@ -1,248 +0,0 @@
-module JSONFilter
- alias BracketIndex = Hash(Int64, Int64)
-
- alias GroupedFieldsValue = String | Array(GroupedFieldsValue)
- alias GroupedFieldsList = Array(GroupedFieldsValue)
-
- class FieldsParser
- class ParseError < Exception
- end
-
- # Returns the `Regex` pattern used to match nest groups
- def self.nest_group_pattern : Regex
- # uses a '.' character to match json keys as they are allowed
- # to contain any unicode codepoint
- /(?:|,)(?
[^,\n]*?)\(/
- end
-
- # Returns the `Regex` pattern used to check if there are any empty nest groups
- def self.unnamed_nest_group_pattern : Regex
- /^\(|\(\(|\/\(/
- end
-
- def self.parse_fields(fields_text : String) : Nil
- if fields_text.empty?
- raise FieldsParser::ParseError.new "Fields is empty"
- end
-
- opening_bracket_count = fields_text.count('(')
- closing_bracket_count = fields_text.count(')')
-
- if opening_bracket_count != closing_bracket_count
- bracket_type = opening_bracket_count > closing_bracket_count ? "opening" : "closing"
- raise FieldsParser::ParseError.new "There are too many #{bracket_type} brackets (#{opening_bracket_count}:#{closing_bracket_count})"
- elsif match_result = unnamed_nest_group_pattern.match(fields_text)
- raise FieldsParser::ParseError.new "Unnamed nest group at position #{match_result.begin}"
- end
-
- # first, handle top-level single nested properties: items/id, playlistItems/snippet, etc
- parse_single_nests(fields_text) { |nest_list| yield nest_list }
-
- # next, handle nest groups: items(id, etag, etc)
- parse_nest_groups(fields_text) { |nest_list| yield nest_list }
- end
-
- def self.parse_single_nests(fields_text : String) : Nil
- single_nests = remove_nest_groups(fields_text)
-
- if !single_nests.empty?
- property_nests = single_nests.split(',')
-
- property_nests.each do |nest|
- nest_list = nest.split('/')
- if nest_list.includes? ""
- raise FieldsParser::ParseError.new "Empty key in nest list: #{nest_list}"
- end
- yield nest_list
- end
- # else
- # raise FieldsParser::ParseError.new "Empty key in nest list 22: #{fields_text} | #{single_nests}"
- end
- end
-
- def self.parse_nest_groups(fields_text : String) : Nil
- nest_stack = [] of NamedTuple(group_name: String, closing_bracket_index: Int64)
- bracket_pairs = get_bracket_pairs(fields_text, true)
-
- text_index = 0
- regex_index = 0
-
- while regex_result = self.nest_group_pattern.match(fields_text, regex_index)
- raw_match = regex_result[0]
- group_name = regex_result["groupname"]
-
- text_index = regex_result.begin
- regex_index = regex_result.end
-
- if text_index.nil? || regex_index.nil?
- raise FieldsParser::ParseError.new "Received invalid index while parsing nest groups: text_index: #{text_index} | regex_index: #{regex_index}"
- end
-
- offset = raw_match.starts_with?(',') ? 1 : 0
-
- opening_bracket_index = (text_index + group_name.size) + offset
- closing_bracket_index = bracket_pairs[opening_bracket_index]
- content_start = opening_bracket_index + 1
-
- content = fields_text[content_start...closing_bracket_index]
-
- if content.empty?
- raise FieldsParser::ParseError.new "Empty nest group at position #{content_start}"
- else
- content = remove_nest_groups(content)
- end
-
- while nest_stack.size > 0 && closing_bracket_index > nest_stack[nest_stack.size - 1][:closing_bracket_index]
- if nest_stack.size
- nest_stack.pop
- end
- end
-
- group_name.split('/').each do |name|
- nest_stack.push({
- group_name: name,
- closing_bracket_index: closing_bracket_index,
- })
- end
-
- if !content.empty?
- properties = content.split(',')
-
- properties.each do |prop|
- nest_list = nest_stack.map { |nest_prop| nest_prop[:group_name] }
-
- if !prop.empty?
- if prop.includes?('/')
- parse_single_nests(prop) { |list| nest_list += list }
- else
- nest_list.push prop
- end
- else
- raise FieldsParser::ParseError.new "Empty key in nest list: #{nest_list << prop}"
- end
-
- yield nest_list
- end
- end
- end
- end
-
- def self.remove_nest_groups(text : String) : String
- content_bracket_pairs = get_bracket_pairs(text, false)
-
- content_bracket_pairs.each_key.to_a.reverse.each do |opening_bracket|
- closing_bracket = content_bracket_pairs[opening_bracket]
- last_comma = text.rindex(',', opening_bracket) || 0
-
- text = text[0...last_comma] + text[closing_bracket + 1...text.size]
- end
-
- return text.starts_with?(',') ? text[1...text.size] : text
- end
-
- def self.get_bracket_pairs(text : String, recursive = true) : BracketIndex
- istart = [] of Int64
- bracket_index = BracketIndex.new
-
- text.each_char_with_index do |char, index|
- if char == '('
- istart.push(index.to_i64)
- end
-
- if char == ')'
- begin
- opening = istart.pop
- if recursive || (!recursive && istart.size == 0)
- bracket_index[opening] = index.to_i64
- end
- rescue
- raise FieldsParser::ParseError.new "No matching opening parenthesis at: #{index}"
- end
- end
- end
-
- if istart.size != 0
- idx = istart.pop
- raise FieldsParser::ParseError.new "No matching closing parenthesis at: #{idx}"
- end
-
- return bracket_index
- end
- end
-
- class FieldsGrouper
- alias SkeletonValue = Hash(String, SkeletonValue)
-
- def self.create_json_skeleton(fields_text : String) : SkeletonValue
- root_hash = {} of String => SkeletonValue
-
- FieldsParser.parse_fields(fields_text) do |nest_list|
- current_item = root_hash
- nest_list.each do |key|
- if current_item[key]?
- current_item = current_item[key]
- else
- current_item[key] = {} of String => SkeletonValue
- current_item = current_item[key]
- end
- end
- end
- root_hash
- end
-
- def self.create_grouped_fields_list(json_skeleton : SkeletonValue) : GroupedFieldsList
- grouped_fields_list = GroupedFieldsList.new
- json_skeleton.each do |key, value|
- grouped_fields_list.push key
-
- nested_keys = create_grouped_fields_list(value)
- grouped_fields_list.push nested_keys unless nested_keys.empty?
- end
- return grouped_fields_list
- end
- end
-
- class FilterError < Exception
- end
-
- def self.filter(item : JSON::Any, fields_text : String, in_place : Bool = true)
- skeleton = FieldsGrouper.create_json_skeleton(fields_text)
- grouped_fields_list = FieldsGrouper.create_grouped_fields_list(skeleton)
- filter(item, grouped_fields_list, in_place)
- end
-
- def self.filter(item : JSON::Any, grouped_fields_list : GroupedFieldsList, in_place : Bool = true) : JSON::Any
- item = item.clone unless in_place
-
- if !item.as_h? && !item.as_a?
- raise FilterError.new "Can't filter '#{item}' by #{grouped_fields_list}"
- end
-
- top_level_keys = Array(String).new
- grouped_fields_list.each do |value|
- if value.is_a? String
- top_level_keys.push value
- elsif value.is_a? Array
- if !top_level_keys.empty?
- key_to_filter = top_level_keys.last
-
- if item.as_h?
- filter(item[key_to_filter], value, in_place: true)
- elsif item.as_a?
- item.as_a.each { |arr_item| filter(arr_item[key_to_filter], value, in_place: true) }
- end
- else
- raise FilterError.new "Tried to filter while top level keys list is empty"
- end
- end
- end
-
- if item.as_h?
- item.as_h.select! top_level_keys
- elsif item.as_a?
- item.as_a.map { |value| filter(value, top_level_keys, in_place: true) }
- end
-
- item
- end
-end
diff --git a/src/invidious/helpers/logger.cr b/src/invidious/helpers/logger.cr
index e2e50905..03349595 100644
--- a/src/invidious/helpers/logger.cr
+++ b/src/invidious/helpers/logger.cr
@@ -1,3 +1,5 @@
+require "colorize"
+
enum LogLevel
All = 0
Trace = 1
@@ -10,7 +12,9 @@ enum LogLevel
end
class Invidious::LogHandler < Kemal::BaseLogHandler
- def initialize(@io : IO = STDOUT, @level = LogLevel::Debug)
+ def initialize(@io : IO = STDOUT, @level = LogLevel::Debug, use_color : Bool = true)
+ Colorize.enabled = use_color
+ Colorize.on_tty_only!
end
def call(context : HTTP::Server::Context)
@@ -34,28 +38,27 @@ class Invidious::LogHandler < Kemal::BaseLogHandler
context
end
- def puts(message : String)
- @io << message << '\n'
- @io.flush
- end
-
def write(message : String)
@io << message
@io.flush
end
- def set_log_level(level : String)
- @level = LogLevel.parse(level)
- end
-
- def set_log_level(level : LogLevel)
- @level = level
+ def color(level)
+ case level
+ when LogLevel::Trace then :cyan
+ when LogLevel::Debug then :green
+ when LogLevel::Info then :white
+ when LogLevel::Warn then :yellow
+ when LogLevel::Error then :red
+ when LogLevel::Fatal then :magenta
+ else :default
+ end
end
{% for level in %w(trace debug info warn error fatal) %}
def {{level.id}}(message : String)
if LogLevel::{{level.id.capitalize}} >= @level
- puts("#{Time.utc} [{{level.id}}] #{message}")
+ puts("#{Time.utc} [{{level.id}}] #{message}".colorize(color(LogLevel::{{level.id.capitalize}})))
end
end
{% end %}
diff --git a/src/invidious/helpers/macros.cr b/src/invidious/helpers/macros.cr
index 75df1612..84847321 100644
--- a/src/invidious/helpers/macros.cr
+++ b/src/invidious/helpers/macros.cr
@@ -48,13 +48,18 @@ module JSON::Serializable
end
end
-macro templated(filename, template = "template", navbar_search = true)
+macro templated(_filename, template = "template", navbar_search = true)
navbar_search = {{navbar_search}}
- render "src/invidious/views/#{{{filename}}}.ecr", "src/invidious/views/#{{{template}}}.ecr"
+
+ {{ filename = "src/invidious/views/" + _filename + ".ecr" }}
+ {{ layout = "src/invidious/views/" + template + ".ecr" }}
+
+ __content_filename__ = {{filename}}
+ render {{filename}}, {{layout}}
end
macro rendered(filename)
- render "src/invidious/views/#{{{filename}}}.ecr"
+ render("src/invidious/views/#{{{filename}}}.ecr")
end
# Similar to Kemals halt method but works in a
diff --git a/src/invidious/helpers/serialized_yt_data.cr b/src/invidious/helpers/serialized_yt_data.cr
index bfbc237c..2796a8dc 100644
--- a/src/invidious/helpers/serialized_yt_data.cr
+++ b/src/invidious/helpers/serialized_yt_data.cr
@@ -1,3 +1,16 @@
+@[Flags]
+enum VideoBadges
+ LiveNow
+ Premium
+ ThreeD
+ FourK
+ New
+ EightK
+ VR180
+ VR360
+ ClosedCaptions
+end
+
struct SearchVideo
include DB::Serializable
@@ -9,9 +22,10 @@ struct SearchVideo
property views : Int64
property description_html : String
property length_seconds : Int32
- property live_now : Bool
- property premium : Bool
property premiere_timestamp : Time?
+ property author_verified : Bool
+ property author_thumbnail : String?
+ property badges : VideoBadges
def to_xml(auto_generated, query_params, xml : XML::Builder)
query_params["v"] = self.id
@@ -73,25 +87,52 @@ struct SearchVideo
json.field "author", self.author
json.field "authorId", self.ucid
json.field "authorUrl", "/channel/#{self.ucid}"
+ json.field "authorVerified", self.author_verified
+
+ author_thumbnail = self.author_thumbnail
+
+ if author_thumbnail
+ json.field "authorThumbnails" do
+ json.array do
+ qualities = {32, 48, 76, 100, 176, 512}
+
+ qualities.each do |quality|
+ json.object do
+ json.field "url", author_thumbnail.gsub(/=s\d+/, "=s#{quality}")
+ json.field "width", quality
+ json.field "height", quality
+ end
+ end
+ end
+ end
+ end
json.field "videoThumbnails" do
- generate_thumbnails(json, self.id)
+ Invidious::JSONify::APIv1.thumbnails(json, self.id)
end
json.field "description", html_to_content(self.description_html)
json.field "descriptionHtml", self.description_html
json.field "viewCount", self.views
+ json.field "viewCountText", translate_count(locale, "generic_views_count", self.views, NumberFormatting::Short)
json.field "published", self.published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(self.published, locale))
json.field "lengthSeconds", self.length_seconds
- json.field "liveNow", self.live_now
- json.field "premium", self.premium
- json.field "isUpcoming", self.is_upcoming
+ json.field "liveNow", self.badges.live_now?
+ json.field "premium", self.badges.premium?
+ json.field "isUpcoming", self.upcoming?
if self.premiere_timestamp
json.field "premiereTimestamp", self.premiere_timestamp.try &.to_unix
end
+ json.field "isNew", self.badges.new?
+ json.field "is4k", self.badges.four_k?
+ json.field "is8k", self.badges.eight_k?
+ json.field "isVr180", self.badges.vr180?
+ json.field "isVr360", self.badges.vr360?
+ json.field "is3d", self.badges.three_d?
+ json.field "hasCaptions", self.badges.closed_captions?
end
end
@@ -106,7 +147,7 @@ struct SearchVideo
to_json(nil, json)
end
- def is_upcoming
+ def upcoming?
premiere_timestamp ? true : false
end
end
@@ -129,6 +170,7 @@ struct SearchPlaylist
property video_count : Int32
property videos : Array(SearchPlaylistVideo)
property thumbnail : String?
+ property author_verified : Bool
def to_json(locale : String?, json : JSON::Builder)
json.object do
@@ -141,6 +183,8 @@ struct SearchPlaylist
json.field "authorId", self.ucid
json.field "authorUrl", "/channel/#{self.ucid}"
+ json.field "authorVerified", self.author_verified
+
json.field "videoCount", self.video_count
json.field "videos" do
json.array do
@@ -151,7 +195,7 @@ struct SearchPlaylist
json.field "lengthSeconds", video.length_seconds
json.field "videoThumbnails" do
- generate_thumbnails(json, video.id)
+ Invidious::JSONify::APIv1.thumbnails(json, video.id)
end
end
end
@@ -180,8 +224,10 @@ struct SearchChannel
property author_thumbnail : String
property subscriber_count : Int32
property video_count : Int32
+ property channel_handle : String?
property description_html : String
property auto_generated : Bool
+ property author_verified : Bool
def to_json(locale : String?, json : JSON::Builder)
json.object do
@@ -189,14 +235,14 @@ struct SearchChannel
json.field "author", self.author
json.field "authorId", self.ucid
json.field "authorUrl", "/channel/#{self.ucid}"
-
+ json.field "authorVerified", self.author_verified
json.field "authorThumbnails" do
json.array do
qualities = {32, 48, 76, 100, 176, 512}
qualities.each do |quality|
json.object do
- json.field "url", self.author_thumbnail.gsub(/=\d+/, "=s#{quality}")
+ json.field "url", self.author_thumbnail.gsub(/=s\d+/, "=s#{quality}")
json.field "width", quality
json.field "height", quality
end
@@ -207,6 +253,7 @@ struct SearchChannel
json.field "autoGenerated", self.auto_generated
json.field "subCount", self.subscriber_count
json.field "videoCount", self.video_count
+ json.field "channelHandle", self.channel_handle
json.field "description", html_to_content(self.description_html)
json.field "descriptionHtml", self.description_html
@@ -225,6 +272,74 @@ struct SearchChannel
end
end
+struct SearchHashtag
+ include DB::Serializable
+
+ property title : String
+ property url : String
+ property video_count : Int64
+ property channel_count : Int64
+
+ def to_json(locale : String?, json : JSON::Builder)
+ json.object do
+ json.field "type", "hashtag"
+ json.field "title", self.title
+ json.field "url", self.url
+ json.field "videoCount", self.video_count
+ json.field "channelCount", self.channel_count
+ end
+ end
+end
+
+# A `ProblematicTimelineItem` is a `SearchItem` created by Invidious that
+# represents an item that caused an exception during parsing.
+#
+# This is not a parsed object from YouTube but rather an Invidious-only type
+# created to gracefully communicate parse errors without throwing away
+# the rest of the (hopefully) successfully parsed item on a page.
+struct ProblematicTimelineItem
+ property parse_exception : Exception
+ property id : String
+
+ def initialize(@parse_exception)
+ @id = Random.new.hex(8)
+ end
+
+ def to_json(locale : String?, json : JSON::Builder)
+ json.object do
+ json.field "type", "parse-error"
+ json.field "errorMessage", @parse_exception.message
+ json.field "errorBacktrace", @parse_exception.inspect_with_backtrace
+ end
+ end
+
+ # Provides compatibility with PlaylistVideo
+ def to_json(json : JSON::Builder, *args, **kwargs)
+ return to_json("", json)
+ end
+
+ def to_xml(env, locale, xml : XML::Builder)
+ xml.element("entry") do
+ xml.element("id") { xml.text "iv-err-#{@id}" }
+ xml.element("title") { xml.text "Parse Error: This item has failed to parse" }
+ xml.element("updated") { xml.text Time.utc.to_rfc3339 }
+
+ xml.element("content", type: "xhtml") do
+ xml.element("div", xmlns: "http://www.w3.org/1999/xhtml") do
+ xml.element("div") do
+ xml.element("h4") { translate(locale, "timeline_parse_error_placeholder_heading") }
+ xml.element("p") { translate(locale, "timeline_parse_error_placeholder_message") }
+ end
+
+ xml.element("pre") do
+ get_issue_template(env, @parse_exception)
+ end
+ end
+ end
+ end
+ end
+end
+
class Category
include DB::Serializable
@@ -260,4 +375,11 @@ class Category
end
end
-alias SearchItem = SearchVideo | SearchChannel | SearchPlaylist | Category
+struct Continuation
+ getter token
+
+ def initialize(@token : String)
+ end
+end
+
+alias SearchItem = SearchVideo | SearchChannel | SearchPlaylist | SearchHashtag | Category | ProblematicTimelineItem
diff --git a/src/invidious/helpers/sig_helper.cr b/src/invidious/helpers/sig_helper.cr
new file mode 100644
index 00000000..6d198a42
--- /dev/null
+++ b/src/invidious/helpers/sig_helper.cr
@@ -0,0 +1,349 @@
+require "uri"
+require "socket"
+require "socket/tcp_socket"
+require "socket/unix_socket"
+
+{% if flag?(:advanced_debug) %}
+ require "io/hexdump"
+{% end %}
+
+private alias NetworkEndian = IO::ByteFormat::NetworkEndian
+
+module Invidious::SigHelper
+ enum UpdateStatus
+ Updated
+ UpdateNotRequired
+ Error
+ end
+
+ # -------------------
+ # Payload types
+ # -------------------
+
+ abstract struct Payload
+ end
+
+ struct StringPayload < Payload
+ getter string : String
+
+ def initialize(str : String)
+ raise Exception.new("SigHelper: String can't be empty") if str.empty?
+ @string = str
+ end
+
+ def self.from_bytes(slice : Bytes)
+ size = IO::ByteFormat::NetworkEndian.decode(UInt16, slice)
+ if size == 0 # Error code
+ raise Exception.new("SigHelper: Server encountered an error")
+ end
+
+ if (slice.bytesize - 2) != size
+ raise Exception.new("SigHelper: String size mismatch")
+ end
+
+ if str = String.new(slice[2..])
+ return self.new(str)
+ else
+ raise Exception.new("SigHelper: Can't read string from socket")
+ end
+ end
+
+ def to_io(io)
+ # `.to_u16` raises if there is an overflow during the conversion
+ io.write_bytes(@string.bytesize.to_u16, NetworkEndian)
+ io.write(@string.to_slice)
+ end
+ end
+
+ private enum Opcode
+ FORCE_UPDATE = 0
+ DECRYPT_N_SIGNATURE = 1
+ DECRYPT_SIGNATURE = 2
+ GET_SIGNATURE_TIMESTAMP = 3
+ GET_PLAYER_STATUS = 4
+ PLAYER_UPDATE_TIMESTAMP = 5
+ end
+
+ private record Request,
+ opcode : Opcode,
+ payload : Payload?
+
+ # ----------------------
+ # High-level functions
+ # ----------------------
+
+ class Client
+ @mux : Multiplexor
+
+ def initialize(uri_or_path)
+ @mux = Multiplexor.new(uri_or_path)
+ end
+
+ # Forces the server to re-fetch the YouTube player, and extract the necessary
+ # components from it (nsig function code, sig function code, signature timestamp).
+ def force_update : UpdateStatus
+ request = Request.new(Opcode::FORCE_UPDATE, nil)
+
+ value = send_request(request) do |bytes|
+ IO::ByteFormat::NetworkEndian.decode(UInt16, bytes)
+ end
+
+ case value
+ when 0x0000 then return UpdateStatus::Error
+ when 0xFFFF then return UpdateStatus::UpdateNotRequired
+ when 0xF44F then return UpdateStatus::Updated
+ else
+ code = value.nil? ? "nil" : value.to_s(base: 16)
+ raise Exception.new("SigHelper: Invalid status code received #{code}")
+ end
+ end
+
+ # Decrypt a provided n signature using the server's current nsig function
+ # code, and return the result (or an error).
+ def decrypt_n_param(n : String) : String?
+ request = Request.new(Opcode::DECRYPT_N_SIGNATURE, StringPayload.new(n))
+
+ n_dec = self.send_request(request) do |bytes|
+ StringPayload.from_bytes(bytes).string
+ end
+
+ return n_dec
+ end
+
+ # Decrypt a provided s signature using the server's current sig function
+ # code, and return the result (or an error).
+ def decrypt_sig(sig : String) : String?
+ request = Request.new(Opcode::DECRYPT_SIGNATURE, StringPayload.new(sig))
+
+ sig_dec = self.send_request(request) do |bytes|
+ StringPayload.from_bytes(bytes).string
+ end
+
+ return sig_dec
+ end
+
+ # Return the signature timestamp from the server's current player
+ def get_signature_timestamp : UInt64?
+ request = Request.new(Opcode::GET_SIGNATURE_TIMESTAMP, nil)
+
+ return self.send_request(request) do |bytes|
+ IO::ByteFormat::NetworkEndian.decode(UInt64, bytes)
+ end
+ end
+
+ # Return the current player's version
+ def get_player : UInt32?
+ request = Request.new(Opcode::GET_PLAYER_STATUS, nil)
+
+ return self.send_request(request) do |bytes|
+ has_player = (bytes[0] == 0xFF)
+ player_version = IO::ByteFormat::NetworkEndian.decode(UInt32, bytes[1..4])
+ has_player ? player_version : nil
+ end
+ end
+
+ # Return when the player was last updated
+ def get_player_timestamp : UInt64?
+ request = Request.new(Opcode::PLAYER_UPDATE_TIMESTAMP, nil)
+
+ return self.send_request(request) do |bytes|
+ IO::ByteFormat::NetworkEndian.decode(UInt64, bytes)
+ end
+ end
+
+ private def send_request(request : Request, &)
+ channel = @mux.send(request)
+ slice = channel.receive
+ return yield slice
+ rescue ex
+ LOGGER.debug("SigHelper: Error when sending a request")
+ LOGGER.trace(ex.inspect_with_backtrace)
+ return nil
+ end
+ end
+
+ # ---------------------
+ # Low level functions
+ # ---------------------
+
+ class Multiplexor
+ alias TransactionID = UInt32
+ record Transaction, channel = ::Channel(Bytes).new
+
+ @prng = Random.new
+ @mutex = Mutex.new
+ @queue = {} of TransactionID => Transaction
+
+ @conn : Connection
+ @uri_or_path : String
+
+ def initialize(@uri_or_path)
+ @conn = Connection.new(uri_or_path)
+ listen
+ end
+
+ def listen : Nil
+ raise "Socket is closed" if @conn.closed?
+
+ LOGGER.debug("SigHelper: Multiplexor listening")
+
+ spawn do
+ loop do
+ begin
+ receive_data
+ rescue ex
+ LOGGER.info("SigHelper: Connection to helper died with '#{ex.message}' trying to reconnect...")
+ # We close the socket because for some reason is not closed.
+ @conn.close
+ loop do
+ begin
+ @conn = Connection.new(@uri_or_path)
+ LOGGER.info("SigHelper: Reconnected to SigHelper!")
+ rescue ex
+ LOGGER.debug("SigHelper: Reconnection to helper unsuccessful with error '#{ex.message}'. Retrying")
+ sleep 500.milliseconds
+ next
+ end
+ break if !@conn.closed?
+ end
+ end
+ Fiber.yield
+ end
+ end
+ end
+
+ def send(request : Request)
+ transaction = Transaction.new
+ transaction_id = @prng.rand(TransactionID)
+
+ # Add transaction to queue
+ @mutex.synchronize do
+ # On a 32-bits random integer, this should never happen. Though, just in case, ...
+ if @queue[transaction_id]?
+ raise Exception.new("SigHelper: Duplicate transaction ID! You got a shiny pokemon!")
+ end
+
+ @queue[transaction_id] = transaction
+ end
+
+ write_packet(transaction_id, request)
+
+ return transaction.channel
+ end
+
+ def receive_data
+ transaction_id, slice = read_packet
+
+ @mutex.synchronize do
+ if transaction = @queue.delete(transaction_id)
+ # Remove transaction from queue and send data to the channel
+ transaction.channel.send(slice)
+ LOGGER.trace("SigHelper: Transaction unqueued and data sent to channel")
+ else
+ raise Exception.new("SigHelper: Received transaction was not in queue")
+ end
+ end
+ end
+
+ # Read a single packet from the socket
+ private def read_packet : {TransactionID, Bytes}
+ # Header
+ transaction_id = @conn.read_bytes(UInt32, NetworkEndian)
+ length = @conn.read_bytes(UInt32, NetworkEndian)
+
+ LOGGER.trace("SigHelper: Recv transaction 0x#{transaction_id.to_s(base: 16)} / length #{length}")
+
+ if length > 67_000
+ raise Exception.new("SigHelper: Packet longer than expected (#{length})")
+ end
+
+ # Payload
+ slice = Bytes.new(length)
+ @conn.read(slice) if length > 0
+
+ LOGGER.trace("SigHelper: payload = #{slice}")
+ LOGGER.trace("SigHelper: Recv transaction 0x#{transaction_id.to_s(base: 16)} - Done")
+
+ return transaction_id, slice
+ end
+
+ # Write a single packet to the socket
+ private def write_packet(transaction_id : TransactionID, request : Request)
+ LOGGER.trace("SigHelper: Send transaction 0x#{transaction_id.to_s(base: 16)} / opcode #{request.opcode}")
+
+ io = IO::Memory.new(1024)
+ io.write_bytes(request.opcode.to_u8, NetworkEndian)
+ io.write_bytes(transaction_id, NetworkEndian)
+
+ if payload = request.payload
+ payload.to_io(io)
+ end
+
+ @conn.send(io)
+ @conn.flush
+
+ LOGGER.trace("SigHelper: Send transaction 0x#{transaction_id.to_s(base: 16)} - Done")
+ end
+ end
+
+ class Connection
+ @socket : UNIXSocket | TCPSocket
+
+ {% if flag?(:advanced_debug) %}
+ @io : IO::Hexdump
+ {% end %}
+
+ def initialize(host_or_path : String)
+ case host_or_path
+ when .starts_with?('/')
+ # Make sure that the file exists
+ if File.exists?(host_or_path)
+ @socket = UNIXSocket.new(host_or_path)
+ else
+ raise Exception.new("SigHelper: '#{host_or_path}' no such file")
+ end
+ when .starts_with?("tcp://")
+ uri = URI.parse(host_or_path)
+ @socket = TCPSocket.new(uri.host.not_nil!, uri.port.not_nil!)
+ else
+ uri = URI.parse("tcp://#{host_or_path}")
+ @socket = TCPSocket.new(uri.host.not_nil!, uri.port.not_nil!)
+ end
+ LOGGER.info("SigHelper: Using helper at '#{host_or_path}'")
+
+ {% if flag?(:advanced_debug) %}
+ @io = IO::Hexdump.new(@socket, output: STDERR, read: true, write: true)
+ {% end %}
+
+ @socket.sync = false
+ @socket.blocking = false
+ end
+
+ def closed? : Bool
+ return @socket.closed?
+ end
+
+ def close : Nil
+ @socket.close if !@socket.closed?
+ end
+
+ def flush(*args, **options)
+ @socket.flush(*args, **options)
+ end
+
+ def send(*args, **options)
+ @socket.send(*args, **options)
+ end
+
+ # Wrap IO functions, with added debug tooling if needed
+ {% for function in %w(read read_bytes write write_bytes) %}
+ def {{function.id}}(*args, **options)
+ {% if flag?(:advanced_debug) %}
+ @io.{{function.id}}(*args, **options)
+ {% else %}
+ @socket.{{function.id}}(*args, **options)
+ {% end %}
+ end
+ {% end %}
+ end
+end
diff --git a/src/invidious/helpers/signatures.cr b/src/invidious/helpers/signatures.cr
index ee09415b..82a28fc0 100644
--- a/src/invidious/helpers/signatures.cr
+++ b/src/invidious/helpers/signatures.cr
@@ -1,73 +1,53 @@
-alias SigProc = Proc(Array(String), Int32, Array(String))
+require "http/params"
+require "./sig_helper"
-struct DecryptFunction
- @decrypt_function = [] of {SigProc, Int32}
- @decrypt_time = Time.monotonic
+class Invidious::DecryptFunction
+ @last_update : Time = Time.utc - 42.days
- def initialize(@use_polling = true)
+ def initialize(uri_or_path)
+ @client = SigHelper::Client.new(uri_or_path)
+ self.check_update
end
- def update_decrypt_function
- @decrypt_function = fetch_decrypt_function
+ def check_update
+ # If we have updated in the last 5 minutes, do nothing
+ return if (Time.utc - @last_update) < 5.minutes
+
+ # Get the amount of time elapsed since when the player was updated, in the
+ # event where multiple invidious processes are run in parallel.
+ update_time_elapsed = (@client.get_player_timestamp || 301).seconds
+
+ if update_time_elapsed > 5.minutes
+ LOGGER.debug("Signature: Player might be outdated, updating")
+ @client.force_update
+ @last_update = Time.utc
+ end
end
- private def fetch_decrypt_function(id = "CvFH_6DNRCY")
- document = YT_POOL.client &.get("/watch?v=#{id}&gl=US&hl=en").body
- url = document.match(/src="(?\/s\/player\/[^\/]+\/player_ias[^\/]+\/en_US\/base.js)"/).not_nil!["url"]
- player = YT_POOL.client &.get(url).body
-
- function_name = player.match(/^(?[^=]+)=function\(\w\){\w=\w\.split\(""\);[^\. ]+\.[^( ]+/m).not_nil!["name"]
- function_body = player.match(/^#{Regex.escape(function_name)}=function\(\w\){(?[^}]+)}/m).not_nil!["body"]
- function_body = function_body.split(";")[1..-2]
-
- var_name = function_body[0][0, 2]
- var_body = player.delete("\n").match(/var #{Regex.escape(var_name)}={(?(.*?))};/).not_nil!["body"]
-
- operations = {} of String => SigProc
- var_body.split("},").each do |operation|
- op_name = operation.match(/^[^:]+/).not_nil![0]
- op_body = operation.match(/\{[^}]+/).not_nil![0]
-
- case op_body
- when "{a.reverse()"
- operations[op_name] = ->(a : Array(String), _b : Int32) { a.reverse }
- when "{a.splice(0,b)"
- operations[op_name] = ->(a : Array(String), b : Int32) { a.delete_at(0..(b - 1)); a }
- else
- operations[op_name] = ->(a : Array(String), b : Int32) { c = a[0]; a[0] = a[b % a.size]; a[b % a.size] = c; a }
- end
- end
-
- decrypt_function = [] of {SigProc, Int32}
- function_body.each do |function|
- function = function.lchop(var_name).delete("[].")
-
- op_name = function.match(/[^\(]+/).not_nil![0]
- value = function.match(/\(\w,(?[\d]+)\)/).not_nil!["value"].to_i
-
- decrypt_function << {operations[op_name], value}
- end
-
- return decrypt_function
+ def decrypt_nsig(n : String) : String?
+ self.check_update
+ return @client.decrypt_n_param(n)
+ rescue ex
+ LOGGER.debug(ex.message || "Signature: Unknown error")
+ LOGGER.trace(ex.inspect_with_backtrace)
+ return nil
end
- def decrypt_signature(fmt : Hash(String, JSON::Any))
- return "" if !fmt["s"]? || !fmt["sp"]?
+ def decrypt_signature(str : String) : String?
+ self.check_update
+ return @client.decrypt_sig(str)
+ rescue ex
+ LOGGER.debug(ex.message || "Signature: Unknown error")
+ LOGGER.trace(ex.inspect_with_backtrace)
+ return nil
+ end
- sp = fmt["sp"].as_s
- sig = fmt["s"].as_s.split("")
- if !@use_polling
- now = Time.monotonic
- if now - @decrypt_time > 60.seconds || @decrypt_function.size == 0
- @decrypt_function = fetch_decrypt_function
- @decrypt_time = Time.monotonic
- end
- end
-
- @decrypt_function.each do |proc, value|
- sig = proc.call(sig, value)
- end
-
- return "{sp}=#{sig.join("")}"
+ def get_sts : UInt64?
+ self.check_update
+ return @client.get_signature_timestamp
+ rescue ex
+ LOGGER.debug(ex.message || "Signature: Unknown error")
+ LOGGER.trace(ex.inspect_with_backtrace)
+ return nil
end
end
diff --git a/src/invidious/helpers/utils.cr b/src/invidious/helpers/utils.cr
index c1dc17db..5637e533 100644
--- a/src/invidious/helpers/utils.cr
+++ b/src/invidious/helpers/utils.cr
@@ -52,9 +52,9 @@ def recode_length_seconds(time)
end
def decode_interval(string : String) : Time::Span
- rawMinutes = string.try &.to_i32?
+ raw_minutes = string.try &.to_i32?
- if !rawMinutes
+ if !raw_minutes
hours = /(?\d+)h/.match(string).try &.["hours"].try &.to_i32
hours ||= 0
@@ -63,7 +63,7 @@ def decode_interval(string : String) : Time::Span
time = Time::Span.new(hours: hours, minutes: minutes)
else
- time = Time::Span.new(minutes: rawMinutes)
+ time = Time::Span.new(minutes: raw_minutes)
end
return time
@@ -111,24 +111,27 @@ def decode_date(string : String)
else nil # Continue
end
- # String matches format "20 hours ago", "4 months ago"...
- date = string.split(" ")[-3, 3]
- delta = date[0].to_i
+ # String matches format "20 hours ago", "4 months ago", "20s ago", "15min ago"...
+ match = string.match(/(?\d+) ?(?[smhdwy]\w*) ago/)
- case date[1]
- when .includes? "second"
+ raise "Could not parse #{string}" if match.nil?
+
+ delta = match["count"].to_i
+
+ case match["span"]
+ when .starts_with? "s" # second(s)
delta = delta.seconds
- when .includes? "minute"
+ when .starts_with? "mi" # minute(s)
delta = delta.minutes
- when .includes? "hour"
+ when .starts_with? "h" # hour(s)
delta = delta.hours
- when .includes? "day"
+ when .starts_with? "d" # day(s)
delta = delta.days
- when .includes? "week"
+ when .starts_with? "w" # week(s)
delta = delta.weeks
- when .includes? "month"
+ when .starts_with? "mo" # month(s)
delta = delta.months
- when .includes? "year"
+ when .starts_with? "y" # year(s)
delta = delta.years
else
raise "Could not parse #{string}"
@@ -161,21 +164,19 @@ def number_with_separator(number)
number.to_s.reverse.gsub(/(\d{3})(?=\d)/, "\\1,").reverse
end
-def short_text_to_number(short_text : String) : Int32
- case short_text
- when .ends_with? "M"
- number = short_text.rstrip(" mM").to_f
- number *= 1000000
- when .ends_with? "K"
- number = short_text.rstrip(" kK").to_f
- number *= 1000
- else
- number = short_text.rstrip(" ")
+def short_text_to_number(short_text : String) : Int64
+ matches = /(?\d+(\.\d+)?)\s?(?[mMkKbB]?)/.match(short_text)
+ number = matches.try &.["number"].to_f || 0.0
+
+ case matches.try &.["suffix"].downcase
+ when "k" then number *= 1_000
+ when "m" then number *= 1_000_000
+ when "b" then number *= 1_000_000_000
end
- number = number.to_i
-
- return number
+ return number.to_i64
+rescue ex
+ return 0_i64
end
def number_to_short_text(number)
@@ -261,7 +262,7 @@ def get_referer(env, fallback = "/", unroll = true)
end
referer = referer.request_target
- referer = "/" + referer.gsub(/[^\/?@&%=\-_.0-9a-zA-Z]/, "").lstrip("/\\")
+ referer = "/" + referer.gsub(/[^\/?@&%=\-_.:,*0-9a-zA-Z+]/, "").lstrip("/\\")
if referer == env.request.path
referer = fallback
@@ -322,64 +323,82 @@ def parse_range(range)
return 0_i64, nil
end
-def fetch_random_instance
- begin
- instance_api_client = make_client(URI.parse("https://api.invidious.io"))
-
- # Timeouts
- instance_api_client.connect_timeout = 10.seconds
- instance_api_client.dns_timeout = 10.seconds
-
- instance_list = JSON.parse(instance_api_client.get("/instances.json").body).as_a
- instance_api_client.close
- rescue Socket::ConnectError | IO::TimeoutError | JSON::ParseException
- instance_list = [] of JSON::Any
+def reduce_uri(uri : URI | String, max_length : Int32 = 50, suffix : String = "…") : String
+ str = uri.to_s.sub(/^https?:\/\//, "")
+ if str.size > max_length
+ str = "#{str[0, max_length]}#{suffix}"
end
+ return str
+end
- filtered_instance_list = [] of String
+# Get the html link from a NavigationEndpoint or an innertubeCommand
+def parse_link_endpoint(endpoint : JSON::Any, text : String, video_id : String)
+ if url = endpoint.dig?("urlEndpoint", "url").try &.as_s
+ url = URI.parse(url)
+ displayed_url = text
- instance_list.each do |data|
- # TODO Check if current URL is onion instance and use .onion types if so.
- if data[1]["type"] == "https"
- # Instances can have statistics disabled, which is an requirement of version validation.
- # as_nil? doesn't exist. Thus we'll have to handle the error raised if as_nil fails.
- begin
- data[1]["stats"].as_nil
- next
- rescue TypeCastError
- end
-
- # stats endpoint could also lack the software dict.
- next if data[1]["stats"]["software"]?.nil?
-
- # Makes sure the instance isn't too outdated.
- if remote_version = data[1]["stats"]?.try &.["software"]?.try &.["version"]
- remote_commit_date = remote_version.as_s.match(/\d{4}\.\d{2}\.\d{2}/)
- next if !remote_commit_date
-
- remote_commit_date = Time.parse(remote_commit_date[0], "%Y.%m.%d", Time::Location::UTC)
- local_commit_date = Time.parse(CURRENT_VERSION, "%Y.%m.%d", Time::Location::UTC)
-
- next if (remote_commit_date - local_commit_date).abs.days > 30
-
- begin
- data[1]["monitor"].as_nil
- health = data[1]["monitor"].as_h["dailyRatios"][0].as_h["ratio"]
- filtered_instance_list << data[0].as_s if health.to_s.to_f > 90
- rescue TypeCastError
- # We can't check the health if the monitoring is broken. Thus we'll just add it to the list
- # and move on. Ideally we'll ignore any instance that has broken health monitoring but due to the fact that
- # it's an error that often occurs with all the instances at the same time, we have to just skip the check.
- filtered_instance_list << data[0].as_s
- end
+ if url.host == "youtu.be"
+ url = "/watch?v=#{url.request_target.lstrip('/')}"
+ elsif url.host.nil? || url.host.not_nil!.ends_with?("youtube.com")
+ if url.path == "/redirect"
+ # Sometimes, links can be corrupted (why?) so make sure to fallback
+ # nicely. See https://github.com/iv-org/invidious/issues/2682
+ url = url.query_params["q"]? || ""
+ displayed_url = url
+ else
+ url = url.request_target
+ displayed_url = "youtube.com#{url}"
end
end
- end
- # If for some reason no instances managed to get fetched successfully then we'll just redirect to redirect.invidious.io
- if filtered_instance_list.size == 0
- return "redirect.invidious.io"
- end
+ text = %(#{reduce_uri(displayed_url)} )
+ elsif watch_endpoint = endpoint.dig?("watchEndpoint")
+ start_time = watch_endpoint["startTimeSeconds"]?.try &.as_i
+ link_video_id = watch_endpoint["videoId"].as_s
- return filtered_instance_list.sample(1)[0]
+ url = "/watch?v=#{link_video_id}"
+ url += "&t=#{start_time}" if !start_time.nil?
+
+ # If the current video ID (passed through from the caller function)
+ # is the same as the video ID in the link, add HTML attributes for
+ # the JS handler function that bypasses page reload.
+ #
+ # See: https://github.com/iv-org/invidious/issues/3063
+ if link_video_id == video_id
+ start_time ||= 0
+ text = %(#{reduce_uri(text)} )
+ else
+ text = %(#{text} )
+ end
+ elsif url = endpoint.dig?("commandMetadata", "webCommandMetadata", "url").try &.as_s
+ if text.starts_with?(/\s?[@#]/)
+ # Handle "pings" in comments and hasthags differently
+ # See:
+ # - https://github.com/iv-org/invidious/issues/3038
+ # - https://github.com/iv-org/invidious/issues/3062
+ text = %(#{text} )
+ else
+ text = %(#{reduce_uri(text)} )
+ end
+ end
+ return text
+end
+
+def encrypt_ecb_without_salt(data, key)
+ cipher = OpenSSL::Cipher.new("aes-128-ecb")
+ cipher.encrypt
+ cipher.key = key
+
+ io = IO::Memory.new
+ io.write(cipher.update(data))
+ io.write(cipher.final)
+ io.rewind
+
+ return io
+end
+
+def invidious_companion_encrypt(data)
+ timestamp = Time.utc.to_unix
+ encrypted_data = encrypt_ecb_without_salt("#{timestamp}|#{data}", CONFIG.invidious_companion_key)
+ return Base64.urlsafe_encode(encrypted_data)
end
diff --git a/src/invidious/helpers/webvtt.cr b/src/invidious/helpers/webvtt.cr
new file mode 100644
index 00000000..260d250f
--- /dev/null
+++ b/src/invidious/helpers/webvtt.cr
@@ -0,0 +1,81 @@
+# Namespace for logic relating to generating WebVTT files
+#
+# Probably not compliant to WebVTT's specs but it is enough for Invidious.
+module WebVTT
+ # A WebVTT builder generates WebVTT files
+ private class Builder
+ # See https://developer.mozilla.org/en-US/docs/Web/API/WebVTT_API#cue_payload
+ private ESCAPE_SUBSTITUTIONS = {
+ '&' => "&",
+ '<' => "<",
+ '>' => ">",
+ '\u200E' => "",
+ '\u200F' => "",
+ '\u00A0' => " ",
+ }
+
+ def initialize(@io : IO)
+ end
+
+ # Writes an vtt cue with the specified time stamp and contents
+ def cue(start_time : Time::Span, end_time : Time::Span, text : String)
+ timestamp(start_time, end_time)
+ @io << self.escape(text)
+ @io << "\n\n"
+ end
+
+ private def timestamp(start_time : Time::Span, end_time : Time::Span)
+ timestamp_component(start_time)
+ @io << " --> "
+ timestamp_component(end_time)
+
+ @io << '\n'
+ end
+
+ private def timestamp_component(timestamp : Time::Span)
+ @io << timestamp.hours.to_s.rjust(2, '0')
+ @io << ':' << timestamp.minutes.to_s.rjust(2, '0')
+ @io << ':' << timestamp.seconds.to_s.rjust(2, '0')
+ @io << '.' << timestamp.milliseconds.to_s.rjust(3, '0')
+ end
+
+ private def escape(text : String) : String
+ return text.gsub(ESCAPE_SUBSTITUTIONS)
+ end
+
+ def document(setting_fields : Hash(String, String)? = nil, &)
+ @io << "WEBVTT\n"
+
+ if setting_fields
+ setting_fields.each do |name, value|
+ @io << name << ": " << value << '\n'
+ end
+ end
+
+ @io << '\n'
+
+ yield
+ end
+ end
+
+ # Returns the resulting `String` of writing WebVTT to the yielded `WebVTT::Builder`
+ #
+ # ```
+ # string = WebVTT.build do |vtt|
+ # vtt.cue(Time::Span.new(seconds: 1), Time::Span.new(seconds: 2), "Line 1")
+ # vtt.cue(Time::Span.new(seconds: 2), Time::Span.new(seconds: 3), "Line 2")
+ # end
+ #
+ # string # => "WEBVTT\n\n00:00:01.000 --> 00:00:02.000\nLine 1\n\n00:00:02.000 --> 00:00:03.000\nLine 2\n\n"
+ # ```
+ #
+ # Accepts an optional settings fields hash to add settings attribute to the resulting vtt file.
+ def self.build(setting_fields : Hash(String, String)? = nil, &)
+ String.build do |str|
+ builder = Builder.new(str)
+ builder.document(setting_fields) do
+ yield builder
+ end
+ end
+ end
+end
diff --git a/src/invidious/http_server/utils.cr b/src/invidious/http_server/utils.cr
new file mode 100644
index 00000000..623a9177
--- /dev/null
+++ b/src/invidious/http_server/utils.cr
@@ -0,0 +1,41 @@
+require "uri"
+
+module Invidious::HttpServer
+ module Utils
+ extend self
+
+ def proxy_video_url(raw_url : String, *, region : String? = nil, absolute : Bool = false)
+ url = URI.parse(raw_url)
+
+ # Add some URL parameters
+ params = url.query_params
+ params["host"] = url.host.not_nil! # Should never be nil, in theory
+ params["region"] = region if !region.nil?
+ url.query_params = params
+
+ if absolute
+ return "#{HOST_URL}#{url.request_target}"
+ else
+ return url.request_target
+ end
+ end
+
+ def add_params_to_url(url : String | URI, params : URI::Params) : URI
+ url = URI.parse(url) if url.is_a?(String)
+
+ url_query = url.query || ""
+
+ # Append the parameters
+ url.query = String.build do |str|
+ if !url_query.empty?
+ str << url_query
+ str << '&'
+ end
+
+ str << params
+ end
+
+ return url
+ end
+ end
+end
diff --git a/src/invidious/jobs.cr b/src/invidious/jobs.cr
index ec0cad64..b6b673f7 100644
--- a/src/invidious/jobs.cr
+++ b/src/invidious/jobs.cr
@@ -1,12 +1,39 @@
module Invidious::Jobs
JOBS = [] of BaseJob
+ # Automatically generate a structure that wraps the various
+ # jobs' configs, so that the following YAML config can be used:
+ #
+ # jobs:
+ # job_name:
+ # enabled: true
+ # some_property: "value"
+ #
+ macro finished
+ struct JobsConfig
+ include YAML::Serializable
+
+ {% for sc in BaseJob.subclasses %}
+ # Voodoo macro to transform `Some::Module::CustomJob` to `custom`
+ {% class_name = sc.id.split("::").last.id.gsub(/Job$/, "").underscore %}
+
+ getter {{ class_name }} = {{ sc.name }}::Config.new
+ {% end %}
+
+ def initialize
+ end
+ end
+ end
+
def self.register(job : BaseJob)
JOBS << job
end
def self.start_all
JOBS.each do |job|
+ # Don't run the main rountine if the job is disabled by config
+ next if job.disabled?
+
spawn { job.begin }
end
end
diff --git a/src/invidious/jobs/base_job.cr b/src/invidious/jobs/base_job.cr
index 47e75864..f90f0bfe 100644
--- a/src/invidious/jobs/base_job.cr
+++ b/src/invidious/jobs/base_job.cr
@@ -1,3 +1,33 @@
abstract class Invidious::Jobs::BaseJob
abstract def begin
+
+ # When this base job class is inherited, make sure to define
+ # a basic "Config" structure, that contains the "enable" property,
+ # and to create the associated instance property.
+ #
+ macro inherited
+ macro finished
+ # This config structure can be expanded as required.
+ struct Config
+ include YAML::Serializable
+
+ property enable = true
+
+ def initialize
+ end
+ end
+
+ property cfg = Config.new
+
+ # Return true if job is enabled by config
+ protected def enabled? : Bool
+ return (@cfg.enable == true)
+ end
+
+ # Return true if job is disabled by config
+ protected def disabled? : Bool
+ return (@cfg.enable == false)
+ end
+ end
+ end
end
diff --git a/src/invidious/jobs/bypass_captcha_job.cr b/src/invidious/jobs/bypass_captcha_job.cr
deleted file mode 100644
index 71f8a938..00000000
--- a/src/invidious/jobs/bypass_captcha_job.cr
+++ /dev/null
@@ -1,135 +0,0 @@
-class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob
- def begin
- loop do
- begin
- random_video = PG_DB.query_one?("select id, ucid from (select id, ucid from channel_videos limit 1000) as s ORDER BY RANDOM() LIMIT 1", as: {id: String, ucid: String})
- if !random_video
- random_video = {id: "zj82_v2R6ts", ucid: "UCK87Lox575O_HCHBWaBSyGA"}
- end
- {"/watch?v=#{random_video["id"]}&gl=US&hl=en&has_verified=1&bpctr=9999999999", produce_channel_videos_url(ucid: random_video["ucid"])}.each do |path|
- response = YT_POOL.client &.get(path)
- if response.body.includes?("To continue with your YouTube experience, please fill out the form below.")
- html = XML.parse_html(response.body)
- form = html.xpath_node(%(//form[@action="/das_captcha"])).not_nil!
- site_key = form.xpath_node(%(.//div[@id="recaptcha"])).try &.["data-sitekey"]
- s_value = form.xpath_node(%(.//div[@id="recaptcha"])).try &.["data-s"]
-
- inputs = {} of String => String
- form.xpath_nodes(%(.//input[@name])).map do |node|
- inputs[node["name"]] = node["value"]
- end
-
- headers = response.cookies.add_request_headers(HTTP::Headers.new)
-
- response = JSON.parse(HTTP::Client.post(CONFIG.captcha_api_url + "/createTask",
- headers: HTTP::Headers{"Content-Type" => "application/json"}, body: {
- "clientKey" => CONFIG.captcha_key,
- "task" => {
- "type" => "NoCaptchaTaskProxyless",
- "websiteURL" => "https://www.youtube.com#{path}",
- "websiteKey" => site_key,
- "recaptchaDataSValue" => s_value,
- },
- }.to_json).body)
-
- raise response["error"].as_s if response["error"]?
- task_id = response["taskId"].as_i
-
- loop do
- sleep 10.seconds
-
- response = JSON.parse(HTTP::Client.post(CONFIG.captcha_api_url + "/getTaskResult",
- headers: HTTP::Headers{"Content-Type" => "application/json"}, body: {
- "clientKey" => CONFIG.captcha_key,
- "taskId" => task_id,
- }.to_json).body)
-
- if response["status"]?.try &.== "ready"
- break
- elsif response["errorId"]?.try &.as_i != 0
- raise response["errorDescription"].as_s
- end
- end
-
- inputs["g-recaptcha-response"] = response["solution"]["gRecaptchaResponse"].as_s
- headers["Cookies"] = response["solution"]["cookies"].as_h?.try &.map { |k, v| "#{k}=#{v}" }.join("; ") || ""
- response = YT_POOL.client &.post("/das_captcha", headers, form: inputs)
-
- response.cookies
- .select { |cookie| cookie.name != "PREF" }
- .each { |cookie| CONFIG.cookies << cookie }
-
- # Persist cookies between runs
- File.write("config/config.yml", CONFIG.to_yaml)
- elsif response.headers["Location"]?.try &.includes?("/sorry/index")
- location = response.headers["Location"].try { |u| URI.parse(u) }
- headers = HTTP::Headers{":authority" => location.host.not_nil!}
- response = YT_POOL.client &.get(location.request_target, headers)
-
- html = XML.parse_html(response.body)
- form = html.xpath_node(%(//form[@action="index"])).not_nil!
- site_key = form.xpath_node(%(.//div[@id="recaptcha"])).try &.["data-sitekey"]
- s_value = form.xpath_node(%(.//div[@id="recaptcha"])).try &.["data-s"]
-
- inputs = {} of String => String
- form.xpath_nodes(%(.//input[@name])).map do |node|
- inputs[node["name"]] = node["value"]
- end
-
- captcha_client = HTTPClient.new(URI.parse(CONFIG.captcha_api_url))
- captcha_client.family = CONFIG.force_resolve || Socket::Family::INET
- response = JSON.parse(captcha_client.post("/createTask",
- headers: HTTP::Headers{"Content-Type" => "application/json"}, body: {
- "clientKey" => CONFIG.captcha_key,
- "task" => {
- "type" => "NoCaptchaTaskProxyless",
- "websiteURL" => location.to_s,
- "websiteKey" => site_key,
- "recaptchaDataSValue" => s_value,
- },
- }.to_json).body)
-
- captcha_client.close
-
- raise response["error"].as_s if response["error"]?
- task_id = response["taskId"].as_i
-
- loop do
- sleep 10.seconds
-
- response = JSON.parse(captcha_client.post("/getTaskResult",
- headers: HTTP::Headers{"Content-Type" => "application/json"}, body: {
- "clientKey" => CONFIG.captcha_key,
- "taskId" => task_id,
- }.to_json).body)
-
- if response["status"]?.try &.== "ready"
- break
- elsif response["errorId"]?.try &.as_i != 0
- raise response["errorDescription"].as_s
- end
- end
-
- inputs["g-recaptcha-response"] = response["solution"]["gRecaptchaResponse"].as_s
- headers["Cookies"] = response["solution"]["cookies"].as_h?.try &.map { |k, v| "#{k}=#{v}" }.join("; ") || ""
- response = YT_POOL.client &.post("/sorry/index", headers: headers, form: inputs)
- headers = HTTP::Headers{
- "Cookie" => URI.parse(response.headers["location"]).query_params["google_abuse"].split(";")[0],
- }
- cookies = HTTP::Cookies.from_client_headers(headers)
-
- cookies.each { |cookie| CONFIG.cookies << cookie }
-
- # Persist cookies between runs
- File.write("config/config.yml", CONFIG.to_yaml)
- end
- end
- rescue ex
- LOGGER.error("BypassCaptchaJob: #{ex.message}")
- ensure
- sleep 1.minute
- Fiber.yield
- end
- end
- end
-end
diff --git a/src/invidious/jobs/clear_expired_items_job.cr b/src/invidious/jobs/clear_expired_items_job.cr
new file mode 100644
index 00000000..17191aac
--- /dev/null
+++ b/src/invidious/jobs/clear_expired_items_job.cr
@@ -0,0 +1,27 @@
+class Invidious::Jobs::ClearExpiredItemsJob < Invidious::Jobs::BaseJob
+ # Remove items (videos, nonces, etc..) whose cache is outdated every hour.
+ # Removes the need for a cron job.
+ def begin
+ loop do
+ failed = false
+
+ LOGGER.info("jobs: running ClearExpiredItems job")
+
+ begin
+ Invidious::Database::Videos.delete_expired
+ Invidious::Database::Nonces.delete_expired
+ rescue DB::Error
+ failed = true
+ end
+
+ # Retry earlier than scheduled on DB error
+ if failed
+ LOGGER.info("jobs: ClearExpiredItems failed. Retrying in 10 minutes.")
+ sleep 10.minutes
+ else
+ LOGGER.info("jobs: ClearExpiredItems done.")
+ sleep 1.hour
+ end
+ end
+ end
+end
diff --git a/src/invidious/jobs/instance_refresh_job.cr b/src/invidious/jobs/instance_refresh_job.cr
new file mode 100644
index 00000000..cb4280b9
--- /dev/null
+++ b/src/invidious/jobs/instance_refresh_job.cr
@@ -0,0 +1,97 @@
+class Invidious::Jobs::InstanceListRefreshJob < Invidious::Jobs::BaseJob
+ # We update the internals of a constant as so it can be accessed from anywhere
+ # within the codebase
+ #
+ # "INSTANCES" => Array(Tuple(String, String)) # region, instance
+
+ INSTANCES = {"INSTANCES" => [] of Tuple(String, String)}
+
+ def initialize
+ end
+
+ def begin
+ loop do
+ refresh_instances
+ LOGGER.info("InstanceListRefreshJob: Done, sleeping for 30 minutes")
+ sleep 30.minute
+ Fiber.yield
+ end
+ end
+
+ # Refreshes the list of instances used for redirects.
+ #
+ # Does the following three checks for each instance
+ # - Is it a clear-net instance?
+ # - Is it an instance with a good uptime?
+ # - Is it an updated instance?
+ private def refresh_instances
+ raw_instance_list = self.fetch_instances
+ filtered_instance_list = [] of Tuple(String, String)
+
+ raw_instance_list.each do |instance_data|
+ # TODO allow Tor hidden service instances when the current instance
+ # is also a hidden service. Same for i2p and any other non-clearnet instances.
+ begin
+ domain = instance_data[0]
+ info = instance_data[1]
+ stats = info["stats"]
+
+ next unless info["type"] == "https"
+ next if bad_uptime?(info["monitor"])
+ next if outdated?(stats["software"]["version"])
+
+ filtered_instance_list << {info["region"].as_s, domain.as_s}
+ rescue ex
+ if domain
+ LOGGER.info("InstanceListRefreshJob: failed to parse information from '#{domain}' because \"#{ex}\"\n\"#{ex.backtrace.join('\n')}\" ")
+ else
+ LOGGER.info("InstanceListRefreshJob: failed to parse information from an instance because \"#{ex}\"\n\"#{ex.backtrace.join('\n')}\" ")
+ end
+ end
+ end
+
+ if !filtered_instance_list.empty?
+ INSTANCES["INSTANCES"] = filtered_instance_list
+ end
+ end
+
+ # Fetches information regarding instances from api.invidious.io or an otherwise configured URL
+ private def fetch_instances : Array(JSON::Any)
+ begin
+ # We directly call the stdlib HTTP::Client here as it allows us to negate the effects
+ # of the force_resolve config option. This is needed as api.invidious.io does not support ipv6
+ # and as such the following request raises if we were to use force_resolve with the ipv6 value.
+ instance_api_client = HTTP::Client.new(URI.parse("https://api.invidious.io"))
+
+ # Timeouts
+ instance_api_client.connect_timeout = 10.seconds
+ instance_api_client.dns_timeout = 10.seconds
+
+ raw_instance_list = JSON.parse(instance_api_client.get("/instances.json").body).as_a
+ instance_api_client.close
+ rescue ex : Socket::ConnectError | IO::TimeoutError | JSON::ParseException
+ raw_instance_list = [] of JSON::Any
+ end
+
+ return raw_instance_list
+ end
+
+ # Checks if the given target instance is outdated
+ private def outdated?(target_instance_version) : Bool
+ remote_commit_date = target_instance_version.as_s.match(/\d{4}\.\d{2}\.\d{2}/)
+ return false if !remote_commit_date
+
+ remote_commit_date = Time.parse(remote_commit_date[0], "%Y.%m.%d", Time::Location::UTC)
+ local_commit_date = Time.parse(CURRENT_VERSION, "%Y.%m.%d", Time::Location::UTC)
+
+ return (remote_commit_date - local_commit_date).abs.days > 30
+ end
+
+ # Checks if the uptime of the target instance is greater than 90% over a 30 day period
+ private def bad_uptime?(target_instance_health_monitor) : Bool
+ return true if !target_instance_health_monitor["down"].as_bool == false
+ return true if target_instance_health_monitor["uptime"].as_f < 90
+
+ return false
+ end
+end
diff --git a/src/invidious/jobs/notification_job.cr b/src/invidious/jobs/notification_job.cr
index 2f525e08..968ee47f 100644
--- a/src/invidious/jobs/notification_job.cr
+++ b/src/invidious/jobs/notification_job.cr
@@ -1,15 +1,103 @@
+struct VideoNotification
+ getter video_id : String
+ getter channel_id : String
+ getter published : Time
+
+ def_hash @channel_id, @video_id
+
+ def ==(other)
+ video_id == other.video_id
+ end
+
+ def self.from_video(video : ChannelVideo) : self
+ VideoNotification.new(video.id, video.ucid, video.published)
+ end
+
+ def initialize(@video_id, @channel_id, @published)
+ end
+
+ def clone : VideoNotification
+ VideoNotification.new(video_id.clone, channel_id.clone, published.clone)
+ end
+end
+
class Invidious::Jobs::NotificationJob < Invidious::Jobs::BaseJob
- private getter connection_channel : Channel({Bool, Channel(PQ::Notification)})
+ private getter notification_channel : ::Channel(VideoNotification)
+ private getter connection_channel : ::Channel({Bool, ::Channel(PQ::Notification)})
private getter pg_url : URI
- def initialize(@connection_channel, @pg_url)
+ def initialize(@notification_channel, @connection_channel, @pg_url)
end
def begin
- connections = [] of Channel(PQ::Notification)
+ connections = [] of ::Channel(PQ::Notification)
PG.connect_listen(pg_url, "notifications") { |event| connections.each(&.send(event)) }
+ # hash of channels to their videos (id+published) that need notifying
+ to_notify = Hash(String, Set(VideoNotification)).new(
+ ->(hash : Hash(String, Set(VideoNotification)), key : String) {
+ hash[key] = Set(VideoNotification).new
+ }
+ )
+ notify_mutex = Mutex.new
+
+ # fiber to locally cache all incoming notifications (from pubsub webhooks and refresh channels job)
+ spawn do
+ begin
+ loop do
+ notification = notification_channel.receive
+ notify_mutex.synchronize do
+ to_notify[notification.channel_id] << notification
+ end
+ end
+ end
+ end
+ # fiber to regularly persist all cached notifications
+ spawn do
+ loop do
+ begin
+ LOGGER.debug("NotificationJob: waking up")
+ cloned = {} of String => Set(VideoNotification)
+ notify_mutex.synchronize do
+ cloned = to_notify.clone
+ to_notify.clear
+ end
+
+ cloned.each do |channel_id, notifications|
+ if notifications.empty?
+ next
+ end
+
+ LOGGER.info("NotificationJob: updating channel #{channel_id} with #{notifications.size} notifications")
+ if CONFIG.enable_user_notifications
+ video_ids = notifications.map(&.video_id)
+ Invidious::Database::Users.add_multiple_notifications(channel_id, video_ids)
+ PG_DB.using_connection do |conn|
+ notifications.each do |n|
+ # Deliver notifications to `/api/v1/auth/notifications`
+ payload = {
+ "topic" => n.channel_id,
+ "videoId" => n.video_id,
+ "published" => n.published.to_unix,
+ }.to_json
+ conn.exec("NOTIFY notifications, E'#{payload}'")
+ end
+ end
+ else
+ Invidious::Database::Users.feed_needs_update(channel_id)
+ end
+ end
+
+ LOGGER.trace("NotificationJob: Done, sleeping")
+ rescue ex
+ LOGGER.error("NotificationJob: #{ex.message}")
+ end
+ sleep 1.minute
+ Fiber.yield
+ end
+ end
+
loop do
action, connection = connection_channel.receive
diff --git a/src/invidious/jobs/refresh_channels_job.cr b/src/invidious/jobs/refresh_channels_job.cr
index 92681408..80812a63 100644
--- a/src/invidious/jobs/refresh_channels_job.cr
+++ b/src/invidious/jobs/refresh_channels_job.cr
@@ -8,7 +8,7 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
max_fibers = CONFIG.channel_threads
lim_fibers = max_fibers
active_fibers = 0
- active_channel = Channel(Bool).new
+ active_channel = ::Channel(Bool).new
backoff = 2.minutes
loop do
diff --git a/src/invidious/jobs/refresh_feeds_job.cr b/src/invidious/jobs/refresh_feeds_job.cr
index 4b52c959..4f8130df 100644
--- a/src/invidious/jobs/refresh_feeds_job.cr
+++ b/src/invidious/jobs/refresh_feeds_job.cr
@@ -7,7 +7,7 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
def begin
max_fibers = CONFIG.feed_threads
active_fibers = 0
- active_channel = Channel(Bool).new
+ active_channel = ::Channel(Bool).new
loop do
db.query("SELECT email FROM users WHERE feed_needs_update = true OR feed_needs_update IS NULL") do |rs|
diff --git a/src/invidious/jobs/statistics_refresh_job.cr b/src/invidious/jobs/statistics_refresh_job.cr
index a113bd77..66c91ad5 100644
--- a/src/invidious/jobs/statistics_refresh_job.cr
+++ b/src/invidious/jobs/statistics_refresh_job.cr
@@ -18,6 +18,13 @@ class Invidious::Jobs::StatisticsRefreshJob < Invidious::Jobs::BaseJob
"updatedAt" => Time.utc.to_unix,
"lastChannelRefreshedAt" => 0_i64,
},
+
+ #
+ # "totalRequests" => 0_i64,
+ # "successfulRequests" => 0_i64
+ # "ratio" => 0_i64
+ #
+ "playback" => {} of String => Int64 | Float64,
}
private getter db : DB::Database
@@ -30,7 +37,7 @@ class Invidious::Jobs::StatisticsRefreshJob < Invidious::Jobs::BaseJob
loop do
refresh_stats
- sleep 1.minute
+ sleep 10.minute
Fiber.yield
end
end
@@ -49,12 +56,15 @@ class Invidious::Jobs::StatisticsRefreshJob < Invidious::Jobs::BaseJob
users = STATISTICS.dig("usage", "users").as(Hash(String, Int64))
users["total"] = Invidious::Database::Statistics.count_users_total
- users["activeHalfyear"] = Invidious::Database::Statistics.count_users_active_1m
- users["activeMonth"] = Invidious::Database::Statistics.count_users_active_6m
+ users["activeHalfyear"] = Invidious::Database::Statistics.count_users_active_6m
+ users["activeMonth"] = Invidious::Database::Statistics.count_users_active_1m
STATISTICS["metadata"] = {
"updatedAt" => Time.utc.to_unix,
"lastChannelRefreshedAt" => Invidious::Database::Statistics.channel_last_update.try &.to_unix || 0_i64,
}
+
+ # Reset playback requests tracker
+ STATISTICS["playback"] = {} of String => Int64 | Float64
end
end
diff --git a/src/invidious/jobs/subscribe_to_feeds_job.cr b/src/invidious/jobs/subscribe_to_feeds_job.cr
index a431a48a..8584fb9c 100644
--- a/src/invidious/jobs/subscribe_to_feeds_job.cr
+++ b/src/invidious/jobs/subscribe_to_feeds_job.cr
@@ -12,7 +12,7 @@ class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob
end
active_fibers = 0
- active_channel = Channel(Bool).new
+ active_channel = ::Channel(Bool).new
loop do
db.query_all("SELECT id FROM channels WHERE CURRENT_TIMESTAMP - subscribed > interval '4 days' OR subscribed IS NULL") do |rs|
diff --git a/src/invidious/jobs/update_decrypt_function_job.cr b/src/invidious/jobs/update_decrypt_function_job.cr
deleted file mode 100644
index 6fa0ae1b..00000000
--- a/src/invidious/jobs/update_decrypt_function_job.cr
+++ /dev/null
@@ -1,14 +0,0 @@
-class Invidious::Jobs::UpdateDecryptFunctionJob < Invidious::Jobs::BaseJob
- def begin
- loop do
- begin
- DECRYPT_FUNCTION.update_decrypt_function
- rescue ex
- LOGGER.error("UpdateDecryptFunctionJob : #{ex.message}")
- ensure
- sleep 1.minute
- Fiber.yield
- end
- end
- end
-end
diff --git a/src/invidious/jsonify/api_v1/common.cr b/src/invidious/jsonify/api_v1/common.cr
new file mode 100644
index 00000000..64b06465
--- /dev/null
+++ b/src/invidious/jsonify/api_v1/common.cr
@@ -0,0 +1,18 @@
+require "json"
+
+module Invidious::JSONify::APIv1
+ extend self
+
+ def thumbnails(json : JSON::Builder, id : String)
+ json.array do
+ build_thumbnails(id).each do |thumbnail|
+ json.object do
+ json.field "quality", thumbnail[:name]
+ json.field "url", "#{thumbnail[:host]}/vi/#{id}/#{thumbnail["url"]}.jpg"
+ json.field "width", thumbnail[:width]
+ json.field "height", thumbnail[:height]
+ end
+ end
+ end
+ end
+end
diff --git a/src/invidious/jsonify/api_v1/video_json.cr b/src/invidious/jsonify/api_v1/video_json.cr
new file mode 100644
index 00000000..58805af2
--- /dev/null
+++ b/src/invidious/jsonify/api_v1/video_json.cr
@@ -0,0 +1,301 @@
+require "json"
+
+module Invidious::JSONify::APIv1
+ extend self
+
+ def video(video : Video, json : JSON::Builder, *, locale : String?, proxy : Bool = false)
+ json.object do
+ json.field "type", video.video_type
+
+ json.field "title", video.title
+ json.field "videoId", video.id
+
+ json.field "error", video.info["reason"] if video.info["reason"]?
+
+ json.field "videoThumbnails" do
+ self.thumbnails(json, video.id)
+ end
+ json.field "storyboards" do
+ self.storyboards(json, video.id, video.storyboards)
+ end
+
+ json.field "description", video.description
+ json.field "descriptionHtml", video.description_html
+ json.field "published", video.published.to_unix
+ json.field "publishedText", translate(locale, "`x` ago", recode_date(video.published, locale))
+ json.field "keywords", video.keywords
+
+ json.field "viewCount", video.views
+ json.field "likeCount", video.likes
+ json.field "dislikeCount", 0_i64
+
+ json.field "paid", video.paid
+ json.field "premium", video.premium
+ json.field "isFamilyFriendly", video.is_family_friendly
+ json.field "allowedRegions", video.allowed_regions
+ json.field "genre", video.genre
+ json.field "genreUrl", video.genre_url
+
+ json.field "author", video.author
+ json.field "authorId", video.ucid
+ json.field "authorUrl", "/channel/#{video.ucid}"
+ json.field "authorVerified", video.author_verified
+
+ json.field "authorThumbnails" do
+ json.array do
+ qualities = {32, 48, 76, 100, 176, 512}
+
+ qualities.each do |quality|
+ json.object do
+ json.field "url", video.author_thumbnail.gsub(/=s\d+/, "=s#{quality}")
+ json.field "width", quality
+ json.field "height", quality
+ end
+ end
+ end
+ end
+
+ json.field "subCountText", video.sub_count_text
+
+ json.field "lengthSeconds", video.length_seconds
+ json.field "allowRatings", video.allow_ratings
+ json.field "rating", 0_i64
+ json.field "isListed", video.is_listed
+ json.field "liveNow", video.live_now
+ json.field "isPostLiveDvr", video.post_live_dvr
+ json.field "isUpcoming", video.upcoming?
+
+ if video.premiere_timestamp
+ json.field "premiereTimestamp", video.premiere_timestamp.try &.to_unix
+ end
+
+ if hlsvp = video.hls_manifest_url
+ hlsvp = hlsvp.gsub("https://manifest.googlevideo.com", HOST_URL)
+ json.field "hlsUrl", hlsvp
+ end
+
+ json.field "dashUrl", "#{HOST_URL}/api/manifest/dash/id/#{video.id}"
+
+ json.field "adaptiveFormats" do
+ json.array do
+ video.adaptive_fmts.each do |fmt|
+ json.object do
+ # Only available on regular videos, not livestreams/OTF streams
+ if init_range = fmt["initRange"]?
+ json.field "init", "#{init_range["start"]}-#{init_range["end"]}"
+ end
+ if index_range = fmt["indexRange"]?
+ json.field "index", "#{index_range["start"]}-#{index_range["end"]}"
+ end
+
+ # Not available on MPEG-4 Timed Text (`text/mp4`) streams (livestreams only)
+ json.field "bitrate", fmt["bitrate"].as_i.to_s if fmt["bitrate"]?
+
+ if proxy
+ json.field "url", Invidious::HttpServer::Utils.proxy_video_url(
+ fmt["url"].to_s, absolute: true
+ )
+ else
+ json.field "url", fmt["url"]
+ end
+
+ json.field "itag", fmt["itag"].as_i.to_s
+ json.field "type", fmt["mimeType"]
+ json.field "clen", fmt["contentLength"]? || "-1"
+
+ # Last modified is a unix timestamp with µS, with the dot omitted.
+ # E.g: 1638056732(.)141582
+ #
+ # On livestreams, it's not present, so always fall back to the
+ # current unix timestamp (up to mS precision) for compatibility.
+ last_modified = fmt["lastModified"]?
+ last_modified ||= "#{Time.utc.to_unix_ms}000"
+ json.field "lmt", last_modified
+
+ json.field "projectionType", fmt["projectionType"]
+
+ height = fmt["height"]?.try &.as_i
+ width = fmt["width"]?.try &.as_i
+
+ fps = fmt["fps"]?.try &.as_i
+
+ if fps
+ json.field "fps", fps
+ end
+
+ if height && width
+ json.field "size", "#{width}x#{height}"
+ json.field "resolution", "#{height}p"
+
+ quality_label = "#{width > height ? height : width}p"
+
+ if fps && fps > 30
+ quality_label += fps.to_s
+ end
+
+ json.field "qualityLabel", quality_label
+ end
+
+ if fmt_info = Invidious::Videos::Formats.itag_to_metadata?(fmt["itag"])
+ json.field "container", fmt_info["ext"]
+ json.field "encoding", fmt_info["vcodec"]? || fmt_info["acodec"]
+ end
+
+ # Livestream chunk infos
+ json.field "targetDurationSec", fmt["targetDurationSec"].as_i if fmt.has_key?("targetDurationSec")
+ json.field "maxDvrDurationSec", fmt["maxDvrDurationSec"].as_i if fmt.has_key?("maxDvrDurationSec")
+
+ # Audio-related data
+ json.field "audioQuality", fmt["audioQuality"] if fmt.has_key?("audioQuality")
+ json.field "audioSampleRate", fmt["audioSampleRate"].as_s.to_i if fmt.has_key?("audioSampleRate")
+ json.field "audioChannels", fmt["audioChannels"] if fmt.has_key?("audioChannels")
+
+ # Extra misc stuff
+ json.field "colorInfo", fmt["colorInfo"] if fmt.has_key?("colorInfo")
+ json.field "captionTrack", fmt["captionTrack"] if fmt.has_key?("captionTrack")
+ end
+ end
+ end
+ end
+
+ json.field "formatStreams" do
+ json.array do
+ video.fmt_stream.each do |fmt|
+ json.object do
+ if proxy
+ json.field "url", Invidious::HttpServer::Utils.proxy_video_url(
+ fmt["url"].to_s, absolute: true
+ )
+ else
+ json.field "url", fmt["url"]
+ end
+ json.field "itag", fmt["itag"].as_i.to_s
+ json.field "type", fmt["mimeType"]
+ json.field "quality", fmt["quality"]
+
+ json.field "bitrate", fmt["bitrate"].as_i.to_s if fmt["bitrate"]?
+
+ height = fmt["height"]?.try &.as_i
+ width = fmt["width"]?.try &.as_i
+
+ fps = fmt["fps"]?.try &.as_i
+
+ if fps
+ json.field "fps", fps
+ end
+
+ if height && width
+ json.field "size", "#{width}x#{height}"
+ json.field "resolution", "#{height}p"
+
+ quality_label = "#{width > height ? height : width}p"
+
+ if fps && fps > 30
+ quality_label += fps.to_s
+ end
+
+ json.field "qualityLabel", quality_label
+ end
+
+ if fmt_info = Invidious::Videos::Formats.itag_to_metadata?(fmt["itag"])
+ json.field "container", fmt_info["ext"]
+ json.field "encoding", fmt_info["vcodec"]? || fmt_info["acodec"]
+ end
+ end
+ end
+ end
+ end
+
+ json.field "captions" do
+ json.array do
+ video.captions.each do |caption|
+ json.object do
+ json.field "label", caption.name
+ json.field "language_code", caption.language_code
+ json.field "url", "/api/v1/captions/#{video.id}?label=#{URI.encode_www_form(caption.name)}"
+ end
+ end
+ end
+ end
+
+ if !video.music.empty?
+ json.field "musicTracks" do
+ json.array do
+ video.music.each do |music|
+ json.object do
+ json.field "song", music.song
+ json.field "artist", music.artist
+ json.field "album", music.album
+ json.field "license", music.license
+ end
+ end
+ end
+ end
+ end
+
+ json.field "recommendedVideos" do
+ json.array do
+ video.related_videos.each do |rv|
+ if rv["id"]?
+ json.object do
+ json.field "videoId", rv["id"]
+ json.field "title", rv["title"]
+ json.field "videoThumbnails" do
+ self.thumbnails(json, rv["id"])
+ end
+
+ json.field "author", rv["author"]
+ json.field "authorUrl", "/channel/#{rv["ucid"]?}"
+ json.field "authorId", rv["ucid"]?
+ json.field "authorVerified", rv["author_verified"] == "true"
+ if rv["author_thumbnail"]?
+ json.field "authorThumbnails" do
+ json.array do
+ qualities = {32, 48, 76, 100, 176, 512}
+
+ qualities.each do |quality|
+ json.object do
+ json.field "url", rv["author_thumbnail"].gsub(/s\d+-/, "s#{quality}-")
+ json.field "width", quality
+ json.field "height", quality
+ end
+ end
+ end
+ end
+ end
+
+ json.field "lengthSeconds", rv["length_seconds"]?.try &.to_i
+ json.field "viewCountText", rv["short_view_count"]?
+ json.field "viewCount", rv["view_count"]?.try &.empty? ? nil : rv["view_count"].to_i64
+ json.field "published", rv["published"]?
+ if rv["published"]?.try &.presence
+ json.field "publishedText", translate(locale, "`x` ago", recode_date(Time.parse_rfc3339(rv["published"].to_s), locale))
+ else
+ json.field "publishedText", ""
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ def storyboards(json, id, storyboards)
+ json.array do
+ storyboards.each do |sb|
+ json.object do
+ json.field "url", "/api/v1/storyboards/#{id}?width=#{sb.width}&height=#{sb.height}"
+ json.field "templateUrl", sb.url.to_s
+ json.field "width", sb.width
+ json.field "height", sb.height
+ json.field "count", sb.count
+ json.field "interval", sb.interval
+ json.field "storyboardWidth", sb.columns
+ json.field "storyboardHeight", sb.rows
+ json.field "storyboardCount", sb.images_count
+ end
+ end
+ end
+ end
+end
diff --git a/src/invidious/mixes.cr b/src/invidious/mixes.cr
index 3f342b92..28ff0ff6 100644
--- a/src/invidious/mixes.cr
+++ b/src/invidious/mixes.cr
@@ -81,7 +81,7 @@ def fetch_mix(rdid, video_id, cookies = nil, locale = nil)
})
end
-def template_mix(mix)
+def template_mix(mix, listen)
html = <<-END_HTML