diff --git a/.ameba.yml b/.ameba.yml
index 96cbc8f0..36d7c48f 100644
--- a/.ameba.yml
+++ b/.ameba.yml
@@ -20,6 +20,13 @@ Lint/ShadowingOuterLocalVar:
Excluded:
- src/invidious/helpers/tokens.cr
+Lint/NotNil:
+ Enabled: false
+
+Lint/SpecFilename:
+ Excluded:
+ - spec/parsers_helper.cr
+
#
# Style
@@ -31,6 +38,29 @@ Style/RedundantBegin:
Style/RedundantReturn:
Enabled: false
+Style/RedundantNext:
+ Enabled: false
+
+Style/ParenthesesAroundCondition:
+ Enabled: false
+
+# This requires a rewrite of most data structs (and their usage) in Invidious.
+Naming/QueryBoolMethods:
+ Enabled: false
+
+Naming/AccessorMethodName:
+ Enabled: false
+
+Naming/BlockParameterName:
+ Enabled: false
+
+# Hides TODO comment warnings.
+#
+# Call `bin/ameba --only Documentation/DocumentationAdmonition` to
+# list them
+Documentation/DocumentationAdmonition:
+ Enabled: false
+
#
# Metrics
@@ -39,50 +69,4 @@ Style/RedundantReturn:
# Ignore function complexity (number of if/else & case/when branches)
# For some functions that can hardly be simplified for now
Metrics/CyclomaticComplexity:
- Excluded:
- # get_about_info(ucid, locale) => [17/10]
- - src/invidious/channels/about.cr
-
- # fetch_channel_community(ucid, continuation, ...) => [34/10]
- - src/invidious/channels/community.cr
-
- # create_notification_stream(env, topics, connection_channel) => [14/10]
- - src/invidious/helpers/helpers.cr:84:5
-
- # get_index(plural_form, count) => [25/10]
- - src/invidious/helpers/i18next.cr
-
- # call(context) => [18/10]
- - src/invidious/helpers/static_file_handler.cr
-
- # show(env) => [38/10]
- - src/invidious/routes/embed.cr
-
- # get_video_playback(env) => [45/10]
- - src/invidious/routes/video_playback.cr
-
- # handle(env) => [40/10]
- - src/invidious/routes/watch.cr
-
- # playlist_ajax(env) => [24/10]
- - src/invidious/routes/playlists.cr
-
- # fetch_youtube_comments(id, cursor, ....) => [40/10]
- # template_youtube_comments(comments, locale, ...) => [16/10]
- # content_to_comment_html(content) => [14/10]
- - src/invidious/comments.cr
-
- # to_json(locale, json) => [21/10]
- # extract_video_info(video_id, ...) => [44/10]
- # process_video_params(query, preferences) => [20/10]
- - src/invidious/videos.cr
-
-
-
-#src/invidious/playlists.cr:327:5
-#[C] Metrics/CyclomaticComplexity: Cyclomatic complexity too high [19/10]
-# fetch_playlist(plid : String)
-
-#src/invidious/playlists.cr:436:5
-#[C] Metrics/CyclomaticComplexity: Cyclomatic complexity too high [11/10]
-# extract_playlist_videos(initial_data : Hash(String, JSON::Any))
+ Enabled: false
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 7a2c3760..9f17bb40 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -1,12 +1,9 @@
-# Default and lowest precedence. If none of the below matches, @iv-org/developers would be requested for review.
-* @iv-org/developers
-
docker-compose.yml @unixfox
docker/ @unixfox
kubernetes/ @unixfox
README.md @thefrenchghosty
-config/config.example.yml @thefrenchghosty @SamantazFox @unixfox
+config/config.example.yml @SamantazFox @unixfox
scripts/ @syeopite
shards.lock @syeopite
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index 4c1a6330..02bc3795 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -10,8 +10,10 @@ assignees: ''
-
#{issue_template}
+ #{issue_template}
END_HTML
@@ -128,7 +139,7 @@ def error_json_helper(
env : HTTP::Server::Context,
status_code : Int32,
exception : Exception,
- additional_fields : Hash(String, Object) | Nil = nil
+ additional_fields : Hash(String, Object) | Nil = nil,
)
if exception.is_a?(InfoException)
return error_json_helper(env, status_code, exception.message || "", additional_fields)
@@ -150,7 +161,7 @@ def error_json_helper(
env : HTTP::Server::Context,
status_code : Int32,
message : String,
- additional_fields : Hash(String, Object) | Nil = nil
+ additional_fields : Hash(String, Object) | Nil = nil,
)
env.response.content_type = "application/json"
env.response.status_code = status_code
@@ -190,7 +201,7 @@ def error_redirect_helper(env : HTTP::Server::Context)
#{switch_instance}
- #{go_to_youtube}
+ #{go_to_youtube}
END_HTML
diff --git a/src/invidious/helpers/handlers.cr b/src/invidious/helpers/handlers.cr
index 174f620d..13ea9fe9 100644
--- a/src/invidious/helpers/handlers.cr
+++ b/src/invidious/helpers/handlers.cr
@@ -27,6 +27,7 @@ class Kemal::RouteHandler
# Processes the route if it's a match. Otherwise renders 404.
private def process_request(context)
raise Kemal::Exceptions::RouteNotFound.new(context) unless context.route_found?
+ return if context.response.closed?
content = context.route.handler.call(context)
if !Kemal.config.error_handlers.empty? && Kemal.config.error_handlers.has_key?(context.response.status_code) && exclude_match?(context)
@@ -97,7 +98,7 @@ class AuthHandler < Kemal::Handler
if token = env.request.headers["Authorization"]?
token = JSON.parse(URI.decode_www_form(token.lchop("Bearer ")))
session = URI.decode_www_form(token["session"].as_s)
- scopes, expire, signature = validate_request(token, session, env.request, HMAC_KEY, nil)
+ scopes, _, _ = validate_request(token, session, env.request, HMAC_KEY, nil)
if email = Invidious::Database::SessionIDs.select_email(session)
user = Invidious::Database::Users.select!(email: email)
diff --git a/src/invidious/helpers/i18n.cr b/src/invidious/helpers/i18n.cr
index 23a1aafc..bca2edda 100644
--- a/src/invidious/helpers/i18n.cr
+++ b/src/invidious/helpers/i18n.cr
@@ -1,8 +1,22 @@
+# Languages requiring a better level of translation (at least 20%)
+# to be added to the list below:
+#
+# "af" => "", # Afrikaans
+# "az" => "", # Azerbaijani
+# "be" => "", # Belarusian
+# "bn_BD" => "", # Bengali (Bangladesh)
+# "ia" => "", # Interlingua
+# "or" => "", # Odia
+# "tk" => "", # Turkmen
+# "tok => "", # Toki Pona
+#
LOCALES_LIST = {
"ar" => "العربية", # Arabic
+ "bg" => "български", # Bulgarian
"bn" => "বাংলা", # Bengali
"ca" => "Català", # Catalan
"cs" => "Čeština", # Czech
+ "cy" => "Cymraeg", # Welsh
"da" => "Dansk", # Danish
"de" => "Deutsch", # German
"el" => "Ελληνικά", # Greek
@@ -23,6 +37,7 @@ LOCALES_LIST = {
"it" => "Italiano", # Italian
"ja" => "日本語", # Japanese
"ko" => "한국어", # Korean
+ "lmo" => "Lombard", # Lombard
"lt" => "Lietuvių", # Lithuanian
"nb-NO" => "Norsk bokmål", # Norwegian Bokmål
"nl" => "Nederlands", # Dutch
@@ -39,6 +54,7 @@ LOCALES_LIST = {
"sr" => "Srpski (latinica)", # Serbian (Latin)
"sr_Cyrl" => "Српски (ћирилица)", # Serbian (Cyrillic)
"sv-SE" => "Svenska", # Swedish
+ "ta" => "தமிழ்", # Tamil
"tr" => "Türkçe", # Turkish
"uk" => "Українська", # Ukrainian
"vi" => "Tiếng Việt", # Vietnamese
diff --git a/src/invidious/helpers/i18next.cr b/src/invidious/helpers/i18next.cr
index 9f4077e1..684e6d14 100644
--- a/src/invidious/helpers/i18next.cr
+++ b/src/invidious/helpers/i18next.cr
@@ -95,7 +95,6 @@ module I18next::Plurals
"hr" => PluralForms::Special_Hungarian_Serbian,
"it" => PluralForms::Special_Spanish_Italian,
"pt" => PluralForms::Special_French_Portuguese,
- "pt" => PluralForms::Special_French_Portuguese,
"sr" => PluralForms::Special_Hungarian_Serbian,
}
@@ -189,7 +188,7 @@ module I18next::Plurals
# Emulate the `rule.numbers.size == 2 && rule.numbers[0] == 1` check
# from original i18next code
- private def is_simple_plural(form : PluralForms) : Bool
+ private def simple_plural?(form : PluralForms) : Bool
case form
when .single_gt_one? then return true
when .single_not_one? then return true
@@ -211,7 +210,7 @@ module I18next::Plurals
idx = SuffixIndex.get_index(plural_form, count)
# Simple plurals are handled differently in all versions (but v4)
- if @simplify_plural_suffix && is_simple_plural(plural_form)
+ if @simplify_plural_suffix && simple_plural?(plural_form)
return (idx == 1) ? "_plural" : ""
end
@@ -262,9 +261,9 @@ module I18next::Plurals
when .special_hebrew? then return special_hebrew(count)
when .special_odia? then return special_odia(count)
# Mixed v3/v4 forms
- when .special_spanish_italian? then return special_cldr_Spanish_Italian(count)
- when .special_french_portuguese? then return special_cldr_French_Portuguese(count)
- when .special_hungarian_serbian? then return special_cldr_Hungarian_Serbian(count)
+ when .special_spanish_italian? then return special_cldr_spanish_italian(count)
+ when .special_french_portuguese? then return special_cldr_french_portuguese(count)
+ when .special_hungarian_serbian? then return special_cldr_hungarian_serbian(count)
else
# default, if nothing matched above
return 0_u8
@@ -535,7 +534,7 @@ module I18next::Plurals
#
# This rule is mostly compliant to CLDR v42
#
- def self.special_cldr_Spanish_Italian(count : Int) : UInt8
+ def self.special_cldr_spanish_italian(count : Int) : UInt8
return 0_u8 if (count == 1) # one
return 1_u8 if (count != 0 && count % 1_000_000 == 0) # many
return 2_u8 # other
@@ -545,7 +544,7 @@ module I18next::Plurals
#
# This rule is mostly compliant to CLDR v42
#
- def self.special_cldr_French_Portuguese(count : Int) : UInt8
+ def self.special_cldr_french_portuguese(count : Int) : UInt8
return 0_u8 if (count == 0 || count == 1) # one
return 1_u8 if (count % 1_000_000 == 0) # many
return 2_u8 # other
@@ -555,7 +554,7 @@ module I18next::Plurals
#
# This rule is mostly compliant to CLDR v42
#
- def self.special_cldr_Hungarian_Serbian(count : Int) : UInt8
+ def self.special_cldr_hungarian_serbian(count : Int) : UInt8
n_mod_10 = count % 10
n_mod_100 = count % 100
diff --git a/src/invidious/helpers/logger.cr b/src/invidious/helpers/logger.cr
index e2e50905..03349595 100644
--- a/src/invidious/helpers/logger.cr
+++ b/src/invidious/helpers/logger.cr
@@ -1,3 +1,5 @@
+require "colorize"
+
enum LogLevel
All = 0
Trace = 1
@@ -10,7 +12,9 @@ enum LogLevel
end
class Invidious::LogHandler < Kemal::BaseLogHandler
- def initialize(@io : IO = STDOUT, @level = LogLevel::Debug)
+ def initialize(@io : IO = STDOUT, @level = LogLevel::Debug, use_color : Bool = true)
+ Colorize.enabled = use_color
+ Colorize.on_tty_only!
end
def call(context : HTTP::Server::Context)
@@ -34,28 +38,27 @@ class Invidious::LogHandler < Kemal::BaseLogHandler
context
end
- def puts(message : String)
- @io << message << '\n'
- @io.flush
- end
-
def write(message : String)
@io << message
@io.flush
end
- def set_log_level(level : String)
- @level = LogLevel.parse(level)
- end
-
- def set_log_level(level : LogLevel)
- @level = level
+ def color(level)
+ case level
+ when LogLevel::Trace then :cyan
+ when LogLevel::Debug then :green
+ when LogLevel::Info then :white
+ when LogLevel::Warn then :yellow
+ when LogLevel::Error then :red
+ when LogLevel::Fatal then :magenta
+ else :default
+ end
end
{% for level in %w(trace debug info warn error fatal) %}
def {{level.id}}(message : String)
if LogLevel::{{level.id.capitalize}} >= @level
- puts("#{Time.utc} [{{level.id}}] #{message}")
+ puts("#{Time.utc} [{{level.id}}] #{message}".colorize(color(LogLevel::{{level.id.capitalize}})))
end
end
{% end %}
diff --git a/src/invidious/helpers/macros.cr b/src/invidious/helpers/macros.cr
index 43e7171b..84847321 100644
--- a/src/invidious/helpers/macros.cr
+++ b/src/invidious/helpers/macros.cr
@@ -55,12 +55,11 @@ macro templated(_filename, template = "template", navbar_search = true)
{{ layout = "src/invidious/views/" + template + ".ecr" }}
__content_filename__ = {{filename}}
- content = Kilt.render({{filename}})
- Kilt.render({{layout}})
+ render {{filename}}, {{layout}}
end
macro rendered(filename)
- Kilt.render("src/invidious/views/#{{{filename}}}.ecr")
+ render("src/invidious/views/#{{{filename}}}.ecr")
end
# Similar to Kemals halt method but works in a
diff --git a/src/invidious/helpers/serialized_yt_data.cr b/src/invidious/helpers/serialized_yt_data.cr
index 31a3cf44..2796a8dc 100644
--- a/src/invidious/helpers/serialized_yt_data.cr
+++ b/src/invidious/helpers/serialized_yt_data.cr
@@ -1,3 +1,16 @@
+@[Flags]
+enum VideoBadges
+ LiveNow
+ Premium
+ ThreeD
+ FourK
+ New
+ EightK
+ VR180
+ VR360
+ ClosedCaptions
+end
+
struct SearchVideo
include DB::Serializable
@@ -9,10 +22,10 @@ struct SearchVideo
property views : Int64
property description_html : String
property length_seconds : Int32
- property live_now : Bool
- property premium : Bool
property premiere_timestamp : Time?
property author_verified : Bool
+ property author_thumbnail : String?
+ property badges : VideoBadges
def to_xml(auto_generated, query_params, xml : XML::Builder)
query_params["v"] = self.id
@@ -76,6 +89,24 @@ struct SearchVideo
json.field "authorUrl", "/channel/#{self.ucid}"
json.field "authorVerified", self.author_verified
+ author_thumbnail = self.author_thumbnail
+
+ if author_thumbnail
+ json.field "authorThumbnails" do
+ json.array do
+ qualities = {32, 48, 76, 100, 176, 512}
+
+ qualities.each do |quality|
+ json.object do
+ json.field "url", author_thumbnail.gsub(/=s\d+/, "=s#{quality}")
+ json.field "width", quality
+ json.field "height", quality
+ end
+ end
+ end
+ end
+ end
+
json.field "videoThumbnails" do
Invidious::JSONify::APIv1.thumbnails(json, self.id)
end
@@ -88,13 +119,20 @@ struct SearchVideo
json.field "published", self.published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(self.published, locale))
json.field "lengthSeconds", self.length_seconds
- json.field "liveNow", self.live_now
- json.field "premium", self.premium
- json.field "isUpcoming", self.is_upcoming
+ json.field "liveNow", self.badges.live_now?
+ json.field "premium", self.badges.premium?
+ json.field "isUpcoming", self.upcoming?
if self.premiere_timestamp
json.field "premiereTimestamp", self.premiere_timestamp.try &.to_unix
end
+ json.field "isNew", self.badges.new?
+ json.field "is4k", self.badges.four_k?
+ json.field "is8k", self.badges.eight_k?
+ json.field "isVr180", self.badges.vr180?
+ json.field "isVr360", self.badges.vr360?
+ json.field "is3d", self.badges.three_d?
+ json.field "hasCaptions", self.badges.closed_captions?
end
end
@@ -109,7 +147,7 @@ struct SearchVideo
to_json(nil, json)
end
- def is_upcoming
+ def upcoming?
premiere_timestamp ? true : false
end
end
@@ -204,7 +242,7 @@ struct SearchChannel
qualities.each do |quality|
json.object do
- json.field "url", self.author_thumbnail.gsub(/=\d+/, "=s#{quality}")
+ json.field "url", self.author_thumbnail.gsub(/=s\d+/, "=s#{quality}")
json.field "width", quality
json.field "height", quality
end
@@ -253,6 +291,55 @@ struct SearchHashtag
end
end
+# A `ProblematicTimelineItem` is a `SearchItem` created by Invidious that
+# represents an item that caused an exception during parsing.
+#
+# This is not a parsed object from YouTube but rather an Invidious-only type
+# created to gracefully communicate parse errors without throwing away
+# the rest of the (hopefully) successfully parsed item on a page.
+struct ProblematicTimelineItem
+ property parse_exception : Exception
+ property id : String
+
+ def initialize(@parse_exception)
+ @id = Random.new.hex(8)
+ end
+
+ def to_json(locale : String?, json : JSON::Builder)
+ json.object do
+ json.field "type", "parse-error"
+ json.field "errorMessage", @parse_exception.message
+ json.field "errorBacktrace", @parse_exception.inspect_with_backtrace
+ end
+ end
+
+ # Provides compatibility with PlaylistVideo
+ def to_json(json : JSON::Builder, *args, **kwargs)
+ return to_json("", json)
+ end
+
+ def to_xml(env, locale, xml : XML::Builder)
+ xml.element("entry") do
+ xml.element("id") { xml.text "iv-err-#{@id}" }
+ xml.element("title") { xml.text "Parse Error: This item has failed to parse" }
+ xml.element("updated") { xml.text Time.utc.to_rfc3339 }
+
+ xml.element("content", type: "xhtml") do
+ xml.element("div", xmlns: "http://www.w3.org/1999/xhtml") do
+ xml.element("div") do
+ xml.element("h4") { translate(locale, "timeline_parse_error_placeholder_heading") }
+ xml.element("p") { translate(locale, "timeline_parse_error_placeholder_message") }
+ end
+
+ xml.element("pre") do
+ get_issue_template(env, @parse_exception)
+ end
+ end
+ end
+ end
+ end
+end
+
class Category
include DB::Serializable
@@ -295,4 +382,4 @@ struct Continuation
end
end
-alias SearchItem = SearchVideo | SearchChannel | SearchPlaylist | SearchHashtag | Category
+alias SearchItem = SearchVideo | SearchChannel | SearchPlaylist | SearchHashtag | Category | ProblematicTimelineItem
diff --git a/src/invidious/helpers/sig_helper.cr b/src/invidious/helpers/sig_helper.cr
new file mode 100644
index 00000000..6d198a42
--- /dev/null
+++ b/src/invidious/helpers/sig_helper.cr
@@ -0,0 +1,349 @@
+require "uri"
+require "socket"
+require "socket/tcp_socket"
+require "socket/unix_socket"
+
+{% if flag?(:advanced_debug) %}
+ require "io/hexdump"
+{% end %}
+
+private alias NetworkEndian = IO::ByteFormat::NetworkEndian
+
+module Invidious::SigHelper
+ enum UpdateStatus
+ Updated
+ UpdateNotRequired
+ Error
+ end
+
+ # -------------------
+ # Payload types
+ # -------------------
+
+ abstract struct Payload
+ end
+
+ struct StringPayload < Payload
+ getter string : String
+
+ def initialize(str : String)
+ raise Exception.new("SigHelper: String can't be empty") if str.empty?
+ @string = str
+ end
+
+ def self.from_bytes(slice : Bytes)
+ size = IO::ByteFormat::NetworkEndian.decode(UInt16, slice)
+ if size == 0 # Error code
+ raise Exception.new("SigHelper: Server encountered an error")
+ end
+
+ if (slice.bytesize - 2) != size
+ raise Exception.new("SigHelper: String size mismatch")
+ end
+
+ if str = String.new(slice[2..])
+ return self.new(str)
+ else
+ raise Exception.new("SigHelper: Can't read string from socket")
+ end
+ end
+
+ def to_io(io)
+ # `.to_u16` raises if there is an overflow during the conversion
+ io.write_bytes(@string.bytesize.to_u16, NetworkEndian)
+ io.write(@string.to_slice)
+ end
+ end
+
+ private enum Opcode
+ FORCE_UPDATE = 0
+ DECRYPT_N_SIGNATURE = 1
+ DECRYPT_SIGNATURE = 2
+ GET_SIGNATURE_TIMESTAMP = 3
+ GET_PLAYER_STATUS = 4
+ PLAYER_UPDATE_TIMESTAMP = 5
+ end
+
+ private record Request,
+ opcode : Opcode,
+ payload : Payload?
+
+ # ----------------------
+ # High-level functions
+ # ----------------------
+
+ class Client
+ @mux : Multiplexor
+
+ def initialize(uri_or_path)
+ @mux = Multiplexor.new(uri_or_path)
+ end
+
+ # Forces the server to re-fetch the YouTube player, and extract the necessary
+ # components from it (nsig function code, sig function code, signature timestamp).
+ def force_update : UpdateStatus
+ request = Request.new(Opcode::FORCE_UPDATE, nil)
+
+ value = send_request(request) do |bytes|
+ IO::ByteFormat::NetworkEndian.decode(UInt16, bytes)
+ end
+
+ case value
+ when 0x0000 then return UpdateStatus::Error
+ when 0xFFFF then return UpdateStatus::UpdateNotRequired
+ when 0xF44F then return UpdateStatus::Updated
+ else
+ code = value.nil? ? "nil" : value.to_s(base: 16)
+ raise Exception.new("SigHelper: Invalid status code received #{code}")
+ end
+ end
+
+ # Decrypt a provided n signature using the server's current nsig function
+ # code, and return the result (or an error).
+ def decrypt_n_param(n : String) : String?
+ request = Request.new(Opcode::DECRYPT_N_SIGNATURE, StringPayload.new(n))
+
+ n_dec = self.send_request(request) do |bytes|
+ StringPayload.from_bytes(bytes).string
+ end
+
+ return n_dec
+ end
+
+ # Decrypt a provided s signature using the server's current sig function
+ # code, and return the result (or an error).
+ def decrypt_sig(sig : String) : String?
+ request = Request.new(Opcode::DECRYPT_SIGNATURE, StringPayload.new(sig))
+
+ sig_dec = self.send_request(request) do |bytes|
+ StringPayload.from_bytes(bytes).string
+ end
+
+ return sig_dec
+ end
+
+ # Return the signature timestamp from the server's current player
+ def get_signature_timestamp : UInt64?
+ request = Request.new(Opcode::GET_SIGNATURE_TIMESTAMP, nil)
+
+ return self.send_request(request) do |bytes|
+ IO::ByteFormat::NetworkEndian.decode(UInt64, bytes)
+ end
+ end
+
+ # Return the current player's version
+ def get_player : UInt32?
+ request = Request.new(Opcode::GET_PLAYER_STATUS, nil)
+
+ return self.send_request(request) do |bytes|
+ has_player = (bytes[0] == 0xFF)
+ player_version = IO::ByteFormat::NetworkEndian.decode(UInt32, bytes[1..4])
+ has_player ? player_version : nil
+ end
+ end
+
+ # Return when the player was last updated
+ def get_player_timestamp : UInt64?
+ request = Request.new(Opcode::PLAYER_UPDATE_TIMESTAMP, nil)
+
+ return self.send_request(request) do |bytes|
+ IO::ByteFormat::NetworkEndian.decode(UInt64, bytes)
+ end
+ end
+
+ private def send_request(request : Request, &)
+ channel = @mux.send(request)
+ slice = channel.receive
+ return yield slice
+ rescue ex
+ LOGGER.debug("SigHelper: Error when sending a request")
+ LOGGER.trace(ex.inspect_with_backtrace)
+ return nil
+ end
+ end
+
+ # ---------------------
+ # Low level functions
+ # ---------------------
+
+ class Multiplexor
+ alias TransactionID = UInt32
+ record Transaction, channel = ::Channel(Bytes).new
+
+ @prng = Random.new
+ @mutex = Mutex.new
+ @queue = {} of TransactionID => Transaction
+
+ @conn : Connection
+ @uri_or_path : String
+
+ def initialize(@uri_or_path)
+ @conn = Connection.new(uri_or_path)
+ listen
+ end
+
+ def listen : Nil
+ raise "Socket is closed" if @conn.closed?
+
+ LOGGER.debug("SigHelper: Multiplexor listening")
+
+ spawn do
+ loop do
+ begin
+ receive_data
+ rescue ex
+ LOGGER.info("SigHelper: Connection to helper died with '#{ex.message}' trying to reconnect...")
+ # We close the socket because for some reason is not closed.
+ @conn.close
+ loop do
+ begin
+ @conn = Connection.new(@uri_or_path)
+ LOGGER.info("SigHelper: Reconnected to SigHelper!")
+ rescue ex
+ LOGGER.debug("SigHelper: Reconnection to helper unsuccessful with error '#{ex.message}'. Retrying")
+ sleep 500.milliseconds
+ next
+ end
+ break if !@conn.closed?
+ end
+ end
+ Fiber.yield
+ end
+ end
+ end
+
+ def send(request : Request)
+ transaction = Transaction.new
+ transaction_id = @prng.rand(TransactionID)
+
+ # Add transaction to queue
+ @mutex.synchronize do
+ # On a 32-bits random integer, this should never happen. Though, just in case, ...
+ if @queue[transaction_id]?
+ raise Exception.new("SigHelper: Duplicate transaction ID! You got a shiny pokemon!")
+ end
+
+ @queue[transaction_id] = transaction
+ end
+
+ write_packet(transaction_id, request)
+
+ return transaction.channel
+ end
+
+ def receive_data
+ transaction_id, slice = read_packet
+
+ @mutex.synchronize do
+ if transaction = @queue.delete(transaction_id)
+ # Remove transaction from queue and send data to the channel
+ transaction.channel.send(slice)
+ LOGGER.trace("SigHelper: Transaction unqueued and data sent to channel")
+ else
+ raise Exception.new("SigHelper: Received transaction was not in queue")
+ end
+ end
+ end
+
+ # Read a single packet from the socket
+ private def read_packet : {TransactionID, Bytes}
+ # Header
+ transaction_id = @conn.read_bytes(UInt32, NetworkEndian)
+ length = @conn.read_bytes(UInt32, NetworkEndian)
+
+ LOGGER.trace("SigHelper: Recv transaction 0x#{transaction_id.to_s(base: 16)} / length #{length}")
+
+ if length > 67_000
+ raise Exception.new("SigHelper: Packet longer than expected (#{length})")
+ end
+
+ # Payload
+ slice = Bytes.new(length)
+ @conn.read(slice) if length > 0
+
+ LOGGER.trace("SigHelper: payload = #{slice}")
+ LOGGER.trace("SigHelper: Recv transaction 0x#{transaction_id.to_s(base: 16)} - Done")
+
+ return transaction_id, slice
+ end
+
+ # Write a single packet to the socket
+ private def write_packet(transaction_id : TransactionID, request : Request)
+ LOGGER.trace("SigHelper: Send transaction 0x#{transaction_id.to_s(base: 16)} / opcode #{request.opcode}")
+
+ io = IO::Memory.new(1024)
+ io.write_bytes(request.opcode.to_u8, NetworkEndian)
+ io.write_bytes(transaction_id, NetworkEndian)
+
+ if payload = request.payload
+ payload.to_io(io)
+ end
+
+ @conn.send(io)
+ @conn.flush
+
+ LOGGER.trace("SigHelper: Send transaction 0x#{transaction_id.to_s(base: 16)} - Done")
+ end
+ end
+
+ class Connection
+ @socket : UNIXSocket | TCPSocket
+
+ {% if flag?(:advanced_debug) %}
+ @io : IO::Hexdump
+ {% end %}
+
+ def initialize(host_or_path : String)
+ case host_or_path
+ when .starts_with?('/')
+ # Make sure that the file exists
+ if File.exists?(host_or_path)
+ @socket = UNIXSocket.new(host_or_path)
+ else
+ raise Exception.new("SigHelper: '#{host_or_path}' no such file")
+ end
+ when .starts_with?("tcp://")
+ uri = URI.parse(host_or_path)
+ @socket = TCPSocket.new(uri.host.not_nil!, uri.port.not_nil!)
+ else
+ uri = URI.parse("tcp://#{host_or_path}")
+ @socket = TCPSocket.new(uri.host.not_nil!, uri.port.not_nil!)
+ end
+ LOGGER.info("SigHelper: Using helper at '#{host_or_path}'")
+
+ {% if flag?(:advanced_debug) %}
+ @io = IO::Hexdump.new(@socket, output: STDERR, read: true, write: true)
+ {% end %}
+
+ @socket.sync = false
+ @socket.blocking = false
+ end
+
+ def closed? : Bool
+ return @socket.closed?
+ end
+
+ def close : Nil
+ @socket.close if !@socket.closed?
+ end
+
+ def flush(*args, **options)
+ @socket.flush(*args, **options)
+ end
+
+ def send(*args, **options)
+ @socket.send(*args, **options)
+ end
+
+ # Wrap IO functions, with added debug tooling if needed
+ {% for function in %w(read read_bytes write write_bytes) %}
+ def {{function.id}}(*args, **options)
+ {% if flag?(:advanced_debug) %}
+ @io.{{function.id}}(*args, **options)
+ {% else %}
+ @socket.{{function.id}}(*args, **options)
+ {% end %}
+ end
+ {% end %}
+ end
+end
diff --git a/src/invidious/helpers/signatures.cr b/src/invidious/helpers/signatures.cr
index ee09415b..82a28fc0 100644
--- a/src/invidious/helpers/signatures.cr
+++ b/src/invidious/helpers/signatures.cr
@@ -1,73 +1,53 @@
-alias SigProc = Proc(Array(String), Int32, Array(String))
+require "http/params"
+require "./sig_helper"
-struct DecryptFunction
- @decrypt_function = [] of {SigProc, Int32}
- @decrypt_time = Time.monotonic
+class Invidious::DecryptFunction
+ @last_update : Time = Time.utc - 42.days
- def initialize(@use_polling = true)
+ def initialize(uri_or_path)
+ @client = SigHelper::Client.new(uri_or_path)
+ self.check_update
end
- def update_decrypt_function
- @decrypt_function = fetch_decrypt_function
+ def check_update
+ # If we have updated in the last 5 minutes, do nothing
+ return if (Time.utc - @last_update) < 5.minutes
+
+ # Get the amount of time elapsed since when the player was updated, in the
+ # event where multiple invidious processes are run in parallel.
+ update_time_elapsed = (@client.get_player_timestamp || 301).seconds
+
+ if update_time_elapsed > 5.minutes
+ LOGGER.debug("Signature: Player might be outdated, updating")
+ @client.force_update
+ @last_update = Time.utc
+ end
end
- private def fetch_decrypt_function(id = "CvFH_6DNRCY")
- document = YT_POOL.client &.get("/watch?v=#{id}&gl=US&hl=en").body
- url = document.match(/src="(?\/s\/player\/[^\/]+\/player_ias[^\/]+\/en_US\/base.js)"/).not_nil!["url"]
- player = YT_POOL.client &.get(url).body
-
- function_name = player.match(/^(?[^=]+)=function\(\w\){\w=\w\.split\(""\);[^\. ]+\.[^( ]+/m).not_nil!["name"]
- function_body = player.match(/^#{Regex.escape(function_name)}=function\(\w\){(?[^}]+)}/m).not_nil!["body"]
- function_body = function_body.split(";")[1..-2]
-
- var_name = function_body[0][0, 2]
- var_body = player.delete("\n").match(/var #{Regex.escape(var_name)}={(?(.*?))};/).not_nil!["body"]
-
- operations = {} of String => SigProc
- var_body.split("},").each do |operation|
- op_name = operation.match(/^[^:]+/).not_nil![0]
- op_body = operation.match(/\{[^}]+/).not_nil![0]
-
- case op_body
- when "{a.reverse()"
- operations[op_name] = ->(a : Array(String), _b : Int32) { a.reverse }
- when "{a.splice(0,b)"
- operations[op_name] = ->(a : Array(String), b : Int32) { a.delete_at(0..(b - 1)); a }
- else
- operations[op_name] = ->(a : Array(String), b : Int32) { c = a[0]; a[0] = a[b % a.size]; a[b % a.size] = c; a }
- end
- end
-
- decrypt_function = [] of {SigProc, Int32}
- function_body.each do |function|
- function = function.lchop(var_name).delete("[].")
-
- op_name = function.match(/[^\(]+/).not_nil![0]
- value = function.match(/\(\w,(?[\d]+)\)/).not_nil!["value"].to_i
-
- decrypt_function << {operations[op_name], value}
- end
-
- return decrypt_function
+ def decrypt_nsig(n : String) : String?
+ self.check_update
+ return @client.decrypt_n_param(n)
+ rescue ex
+ LOGGER.debug(ex.message || "Signature: Unknown error")
+ LOGGER.trace(ex.inspect_with_backtrace)
+ return nil
end
- def decrypt_signature(fmt : Hash(String, JSON::Any))
- return "" if !fmt["s"]? || !fmt["sp"]?
+ def decrypt_signature(str : String) : String?
+ self.check_update
+ return @client.decrypt_sig(str)
+ rescue ex
+ LOGGER.debug(ex.message || "Signature: Unknown error")
+ LOGGER.trace(ex.inspect_with_backtrace)
+ return nil
+ end
- sp = fmt["sp"].as_s
- sig = fmt["s"].as_s.split("")
- if !@use_polling
- now = Time.monotonic
- if now - @decrypt_time > 60.seconds || @decrypt_function.size == 0
- @decrypt_function = fetch_decrypt_function
- @decrypt_time = Time.monotonic
- end
- end
-
- @decrypt_function.each do |proc, value|
- sig = proc.call(sig, value)
- end
-
- return "{sp}=#{sig.join("")}"
+ def get_sts : UInt64?
+ self.check_update
+ return @client.get_signature_timestamp
+ rescue ex
+ LOGGER.debug(ex.message || "Signature: Unknown error")
+ LOGGER.trace(ex.inspect_with_backtrace)
+ return nil
end
end
diff --git a/src/invidious/helpers/utils.cr b/src/invidious/helpers/utils.cr
index e438e3b9..5637e533 100644
--- a/src/invidious/helpers/utils.cr
+++ b/src/invidious/helpers/utils.cr
@@ -52,9 +52,9 @@ def recode_length_seconds(time)
end
def decode_interval(string : String) : Time::Span
- rawMinutes = string.try &.to_i32?
+ raw_minutes = string.try &.to_i32?
- if !rawMinutes
+ if !raw_minutes
hours = /(?\d+)h/.match(string).try &.["hours"].try &.to_i32
hours ||= 0
@@ -63,7 +63,7 @@ def decode_interval(string : String) : Time::Span
time = Time::Span.new(hours: hours, minutes: minutes)
else
- time = Time::Span.new(minutes: rawMinutes)
+ time = Time::Span.new(minutes: raw_minutes)
end
return time
@@ -262,7 +262,7 @@ def get_referer(env, fallback = "/", unroll = true)
end
referer = referer.request_target
- referer = "/" + referer.gsub(/[^\/?@&%=\-_.:,*0-9a-zA-Z]/, "").lstrip("/\\")
+ referer = "/" + referer.gsub(/[^\/?@&%=\-_.:,*0-9a-zA-Z+]/, "").lstrip("/\\")
if referer == env.request.path
referer = fallback
@@ -323,68 +323,6 @@ def parse_range(range)
return 0_i64, nil
end
-def fetch_random_instance
- begin
- instance_api_client = make_client(URI.parse("https://api.invidious.io"))
-
- # Timeouts
- instance_api_client.connect_timeout = 10.seconds
- instance_api_client.dns_timeout = 10.seconds
-
- instance_list = JSON.parse(instance_api_client.get("/instances.json").body).as_a
- instance_api_client.close
- rescue Socket::ConnectError | IO::TimeoutError | JSON::ParseException
- instance_list = [] of JSON::Any
- end
-
- filtered_instance_list = [] of String
-
- instance_list.each do |data|
- # TODO Check if current URL is onion instance and use .onion types if so.
- if data[1]["type"] == "https"
- # Instances can have statistics disabled, which is an requirement of version validation.
- # as_nil? doesn't exist. Thus we'll have to handle the error raised if as_nil fails.
- begin
- data[1]["stats"].as_nil
- next
- rescue TypeCastError
- end
-
- # stats endpoint could also lack the software dict.
- next if data[1]["stats"]["software"]?.nil?
-
- # Makes sure the instance isn't too outdated.
- if remote_version = data[1]["stats"]?.try &.["software"]?.try &.["version"]
- remote_commit_date = remote_version.as_s.match(/\d{4}\.\d{2}\.\d{2}/)
- next if !remote_commit_date
-
- remote_commit_date = Time.parse(remote_commit_date[0], "%Y.%m.%d", Time::Location::UTC)
- local_commit_date = Time.parse(CURRENT_VERSION, "%Y.%m.%d", Time::Location::UTC)
-
- next if (remote_commit_date - local_commit_date).abs.days > 30
-
- begin
- data[1]["monitor"].as_nil
- health = data[1]["monitor"].as_h["dailyRatios"][0].as_h["ratio"]
- filtered_instance_list << data[0].as_s if health.to_s.to_f > 90
- rescue TypeCastError
- # We can't check the health if the monitoring is broken. Thus we'll just add it to the list
- # and move on. Ideally we'll ignore any instance that has broken health monitoring but due to the fact that
- # it's an error that often occurs with all the instances at the same time, we have to just skip the check.
- filtered_instance_list << data[0].as_s
- end
- end
- end
- end
-
- # If for some reason no instances managed to get fetched successfully then we'll just redirect to redirect.invidious.io
- if filtered_instance_list.size == 0
- return "redirect.invidious.io"
- end
-
- return filtered_instance_list.sample(1)[0]
-end
-
def reduce_uri(uri : URI | String, max_length : Int32 = 50, suffix : String = "…") : String
str = uri.to_s.sub(/^https?:\/\//, "")
if str.size > max_length
@@ -445,3 +383,22 @@ def parse_link_endpoint(endpoint : JSON::Any, text : String, video_id : String)
end
return text
end
+
+def encrypt_ecb_without_salt(data, key)
+ cipher = OpenSSL::Cipher.new("aes-128-ecb")
+ cipher.encrypt
+ cipher.key = key
+
+ io = IO::Memory.new
+ io.write(cipher.update(data))
+ io.write(cipher.final)
+ io.rewind
+
+ return io
+end
+
+def invidious_companion_encrypt(data)
+ timestamp = Time.utc.to_unix
+ encrypted_data = encrypt_ecb_without_salt("#{timestamp}|#{data}", CONFIG.invidious_companion_key)
+ return Base64.urlsafe_encode(encrypted_data)
+end
diff --git a/src/invidious/http_server/utils.cr b/src/invidious/http_server/utils.cr
index 222dfc4a..623a9177 100644
--- a/src/invidious/http_server/utils.cr
+++ b/src/invidious/http_server/utils.cr
@@ -11,11 +11,12 @@ module Invidious::HttpServer
params = url.query_params
params["host"] = url.host.not_nil! # Should never be nil, in theory
params["region"] = region if !region.nil?
+ url.query_params = params
if absolute
- return "#{HOST_URL}#{url.request_target}?#{params}"
+ return "#{HOST_URL}#{url.request_target}"
else
- return "#{url.request_target}?#{params}"
+ return url.request_target
end
end
diff --git a/src/invidious/jobs/instance_refresh_job.cr b/src/invidious/jobs/instance_refresh_job.cr
new file mode 100644
index 00000000..cb4280b9
--- /dev/null
+++ b/src/invidious/jobs/instance_refresh_job.cr
@@ -0,0 +1,97 @@
+class Invidious::Jobs::InstanceListRefreshJob < Invidious::Jobs::BaseJob
+ # We update the internals of a constant as so it can be accessed from anywhere
+ # within the codebase
+ #
+ # "INSTANCES" => Array(Tuple(String, String)) # region, instance
+
+ INSTANCES = {"INSTANCES" => [] of Tuple(String, String)}
+
+ def initialize
+ end
+
+ def begin
+ loop do
+ refresh_instances
+ LOGGER.info("InstanceListRefreshJob: Done, sleeping for 30 minutes")
+ sleep 30.minute
+ Fiber.yield
+ end
+ end
+
+ # Refreshes the list of instances used for redirects.
+ #
+ # Does the following three checks for each instance
+ # - Is it a clear-net instance?
+ # - Is it an instance with a good uptime?
+ # - Is it an updated instance?
+ private def refresh_instances
+ raw_instance_list = self.fetch_instances
+ filtered_instance_list = [] of Tuple(String, String)
+
+ raw_instance_list.each do |instance_data|
+ # TODO allow Tor hidden service instances when the current instance
+ # is also a hidden service. Same for i2p and any other non-clearnet instances.
+ begin
+ domain = instance_data[0]
+ info = instance_data[1]
+ stats = info["stats"]
+
+ next unless info["type"] == "https"
+ next if bad_uptime?(info["monitor"])
+ next if outdated?(stats["software"]["version"])
+
+ filtered_instance_list << {info["region"].as_s, domain.as_s}
+ rescue ex
+ if domain
+ LOGGER.info("InstanceListRefreshJob: failed to parse information from '#{domain}' because \"#{ex}\"\n\"#{ex.backtrace.join('\n')}\" ")
+ else
+ LOGGER.info("InstanceListRefreshJob: failed to parse information from an instance because \"#{ex}\"\n\"#{ex.backtrace.join('\n')}\" ")
+ end
+ end
+ end
+
+ if !filtered_instance_list.empty?
+ INSTANCES["INSTANCES"] = filtered_instance_list
+ end
+ end
+
+ # Fetches information regarding instances from api.invidious.io or an otherwise configured URL
+ private def fetch_instances : Array(JSON::Any)
+ begin
+ # We directly call the stdlib HTTP::Client here as it allows us to negate the effects
+ # of the force_resolve config option. This is needed as api.invidious.io does not support ipv6
+ # and as such the following request raises if we were to use force_resolve with the ipv6 value.
+ instance_api_client = HTTP::Client.new(URI.parse("https://api.invidious.io"))
+
+ # Timeouts
+ instance_api_client.connect_timeout = 10.seconds
+ instance_api_client.dns_timeout = 10.seconds
+
+ raw_instance_list = JSON.parse(instance_api_client.get("/instances.json").body).as_a
+ instance_api_client.close
+ rescue ex : Socket::ConnectError | IO::TimeoutError | JSON::ParseException
+ raw_instance_list = [] of JSON::Any
+ end
+
+ return raw_instance_list
+ end
+
+ # Checks if the given target instance is outdated
+ private def outdated?(target_instance_version) : Bool
+ remote_commit_date = target_instance_version.as_s.match(/\d{4}\.\d{2}\.\d{2}/)
+ return false if !remote_commit_date
+
+ remote_commit_date = Time.parse(remote_commit_date[0], "%Y.%m.%d", Time::Location::UTC)
+ local_commit_date = Time.parse(CURRENT_VERSION, "%Y.%m.%d", Time::Location::UTC)
+
+ return (remote_commit_date - local_commit_date).abs.days > 30
+ end
+
+ # Checks if the uptime of the target instance is greater than 90% over a 30 day period
+ private def bad_uptime?(target_instance_health_monitor) : Bool
+ return true if !target_instance_health_monitor["down"].as_bool == false
+ return true if target_instance_health_monitor["uptime"].as_f < 90
+
+ return false
+ end
+end
diff --git a/src/invidious/jobs/notification_job.cr b/src/invidious/jobs/notification_job.cr
index b445107b..968ee47f 100644
--- a/src/invidious/jobs/notification_job.cr
+++ b/src/invidious/jobs/notification_job.cr
@@ -1,8 +1,32 @@
+struct VideoNotification
+ getter video_id : String
+ getter channel_id : String
+ getter published : Time
+
+ def_hash @channel_id, @video_id
+
+ def ==(other)
+ video_id == other.video_id
+ end
+
+ def self.from_video(video : ChannelVideo) : self
+ VideoNotification.new(video.id, video.ucid, video.published)
+ end
+
+ def initialize(@video_id, @channel_id, @published)
+ end
+
+ def clone : VideoNotification
+ VideoNotification.new(video_id.clone, channel_id.clone, published.clone)
+ end
+end
+
class Invidious::Jobs::NotificationJob < Invidious::Jobs::BaseJob
+ private getter notification_channel : ::Channel(VideoNotification)
private getter connection_channel : ::Channel({Bool, ::Channel(PQ::Notification)})
private getter pg_url : URI
- def initialize(@connection_channel, @pg_url)
+ def initialize(@notification_channel, @connection_channel, @pg_url)
end
def begin
@@ -10,6 +34,70 @@ class Invidious::Jobs::NotificationJob < Invidious::Jobs::BaseJob
PG.connect_listen(pg_url, "notifications") { |event| connections.each(&.send(event)) }
+ # hash of channels to their videos (id+published) that need notifying
+ to_notify = Hash(String, Set(VideoNotification)).new(
+ ->(hash : Hash(String, Set(VideoNotification)), key : String) {
+ hash[key] = Set(VideoNotification).new
+ }
+ )
+ notify_mutex = Mutex.new
+
+ # fiber to locally cache all incoming notifications (from pubsub webhooks and refresh channels job)
+ spawn do
+ begin
+ loop do
+ notification = notification_channel.receive
+ notify_mutex.synchronize do
+ to_notify[notification.channel_id] << notification
+ end
+ end
+ end
+ end
+ # fiber to regularly persist all cached notifications
+ spawn do
+ loop do
+ begin
+ LOGGER.debug("NotificationJob: waking up")
+ cloned = {} of String => Set(VideoNotification)
+ notify_mutex.synchronize do
+ cloned = to_notify.clone
+ to_notify.clear
+ end
+
+ cloned.each do |channel_id, notifications|
+ if notifications.empty?
+ next
+ end
+
+ LOGGER.info("NotificationJob: updating channel #{channel_id} with #{notifications.size} notifications")
+ if CONFIG.enable_user_notifications
+ video_ids = notifications.map(&.video_id)
+ Invidious::Database::Users.add_multiple_notifications(channel_id, video_ids)
+ PG_DB.using_connection do |conn|
+ notifications.each do |n|
+ # Deliver notifications to `/api/v1/auth/notifications`
+ payload = {
+ "topic" => n.channel_id,
+ "videoId" => n.video_id,
+ "published" => n.published.to_unix,
+ }.to_json
+ conn.exec("NOTIFY notifications, E'#{payload}'")
+ end
+ end
+ else
+ Invidious::Database::Users.feed_needs_update(channel_id)
+ end
+ end
+
+ LOGGER.trace("NotificationJob: Done, sleeping")
+ rescue ex
+ LOGGER.error("NotificationJob: #{ex.message}")
+ end
+ sleep 1.minute
+ Fiber.yield
+ end
+ end
+
loop do
action, connection = connection_channel.receive
diff --git a/src/invidious/jobs/update_decrypt_function_job.cr b/src/invidious/jobs/update_decrypt_function_job.cr
deleted file mode 100644
index 6fa0ae1b..00000000
--- a/src/invidious/jobs/update_decrypt_function_job.cr
+++ /dev/null
@@ -1,14 +0,0 @@
-class Invidious::Jobs::UpdateDecryptFunctionJob < Invidious::Jobs::BaseJob
- def begin
- loop do
- begin
- DECRYPT_FUNCTION.update_decrypt_function
- rescue ex
- LOGGER.error("UpdateDecryptFunctionJob : #{ex.message}")
- ensure
- sleep 1.minute
- Fiber.yield
- end
- end
- end
-end
diff --git a/src/invidious/jsonify/api_v1/video_json.cr b/src/invidious/jsonify/api_v1/video_json.cr
index 0dced80b..58805af2 100644
--- a/src/invidious/jsonify/api_v1/video_json.cr
+++ b/src/invidious/jsonify/api_v1/video_json.cr
@@ -63,7 +63,7 @@ module Invidious::JSONify::APIv1
json.field "isListed", video.is_listed
json.field "liveNow", video.live_now
json.field "isPostLiveDvr", video.post_live_dvr
- json.field "isUpcoming", video.is_upcoming
+ json.field "isUpcoming", video.upcoming?
if video.premiere_timestamp
json.field "premiereTimestamp", video.premiere_timestamp.try &.to_unix
@@ -109,30 +109,36 @@ module Invidious::JSONify::APIv1
# On livestreams, it's not present, so always fall back to the
# current unix timestamp (up to mS precision) for compatibility.
last_modified = fmt["lastModified"]?
- last_modified ||= "#{Time.utc.to_unix_ms.to_s}000"
+ last_modified ||= "#{Time.utc.to_unix_ms}000"
json.field "lmt", last_modified
json.field "projectionType", fmt["projectionType"]
- if fmt_info = Invidious::Videos::Formats.itag_to_metadata?(fmt["itag"])
- fps = fmt_info["fps"]?.try &.to_i || fmt["fps"]?.try &.as_i || 30
+ height = fmt["height"]?.try &.as_i
+ width = fmt["width"]?.try &.as_i
+
+ fps = fmt["fps"]?.try &.as_i
+
+ if fps
json.field "fps", fps
+ end
+
+ if height && width
+ json.field "size", "#{width}x#{height}"
+ json.field "resolution", "#{height}p"
+
+ quality_label = "#{width > height ? height : width}p"
+
+ if fps && fps > 30
+ quality_label += fps.to_s
+ end
+
+ json.field "qualityLabel", quality_label
+ end
+
+ if fmt_info = Invidious::Videos::Formats.itag_to_metadata?(fmt["itag"])
json.field "container", fmt_info["ext"]
json.field "encoding", fmt_info["vcodec"]? || fmt_info["acodec"]
-
- if fmt_info["height"]?
- json.field "resolution", "#{fmt_info["height"]}p"
-
- quality_label = "#{fmt_info["height"]}p"
- if fps > 30
- quality_label += "60"
- end
- json.field "qualityLabel", quality_label
-
- if fmt_info["width"]?
- json.field "size", "#{fmt_info["width"]}x#{fmt_info["height"]}"
- end
- end
end
# Livestream chunk infos
@@ -156,33 +162,44 @@ module Invidious::JSONify::APIv1
json.array do
video.fmt_stream.each do |fmt|
json.object do
- json.field "url", fmt["url"]
+ if proxy
+ json.field "url", Invidious::HttpServer::Utils.proxy_video_url(
+ fmt["url"].to_s, absolute: true
+ )
+ else
+ json.field "url", fmt["url"]
+ end
json.field "itag", fmt["itag"].as_i.to_s
json.field "type", fmt["mimeType"]
json.field "quality", fmt["quality"]
json.field "bitrate", fmt["bitrate"].as_i.to_s if fmt["bitrate"]?
- fmt_info = Invidious::Videos::Formats.itag_to_metadata?(fmt["itag"])
- if fmt_info
- fps = fmt_info["fps"]?.try &.to_i || fmt["fps"]?.try &.as_i || 30
+ height = fmt["height"]?.try &.as_i
+ width = fmt["width"]?.try &.as_i
+
+ fps = fmt["fps"]?.try &.as_i
+
+ if fps
json.field "fps", fps
+ end
+
+ if height && width
+ json.field "size", "#{width}x#{height}"
+ json.field "resolution", "#{height}p"
+
+ quality_label = "#{width > height ? height : width}p"
+
+ if fps && fps > 30
+ quality_label += fps.to_s
+ end
+
+ json.field "qualityLabel", quality_label
+ end
+
+ if fmt_info = Invidious::Videos::Formats.itag_to_metadata?(fmt["itag"])
json.field "container", fmt_info["ext"]
json.field "encoding", fmt_info["vcodec"]? || fmt_info["acodec"]
-
- if fmt_info["height"]?
- json.field "resolution", "#{fmt_info["height"]}p"
-
- quality_label = "#{fmt_info["height"]}p"
- if fps > 30
- quality_label += "60"
- end
- json.field "qualityLabel", quality_label
-
- if fmt_info["width"]?
- json.field "size", "#{fmt_info["width"]}x#{fmt_info["height"]}"
- end
- end
end
end
end
@@ -250,6 +267,12 @@ module Invidious::JSONify::APIv1
json.field "lengthSeconds", rv["length_seconds"]?.try &.to_i
json.field "viewCountText", rv["short_view_count"]?
json.field "viewCount", rv["view_count"]?.try &.empty? ? nil : rv["view_count"].to_i64
+ json.field "published", rv["published"]?
+ if rv["published"]?.try &.presence
+ json.field "publishedText", translate(locale, "`x` ago", recode_date(Time.parse_rfc3339(rv["published"].to_s), locale))
+ else
+ json.field "publishedText", ""
+ end
end
end
end
@@ -260,17 +283,17 @@ module Invidious::JSONify::APIv1
def storyboards(json, id, storyboards)
json.array do
- storyboards.each do |storyboard|
+ storyboards.each do |sb|
json.object do
- json.field "url", "/api/v1/storyboards/#{id}?width=#{storyboard[:width]}&height=#{storyboard[:height]}"
- json.field "templateUrl", storyboard[:url]
- json.field "width", storyboard[:width]
- json.field "height", storyboard[:height]
- json.field "count", storyboard[:count]
- json.field "interval", storyboard[:interval]
- json.field "storyboardWidth", storyboard[:storyboard_width]
- json.field "storyboardHeight", storyboard[:storyboard_height]
- json.field "storyboardCount", storyboard[:storyboard_count]
+ json.field "url", "/api/v1/storyboards/#{id}?width=#{sb.width}&height=#{sb.height}"
+ json.field "templateUrl", sb.url.to_s
+ json.field "width", sb.width
+ json.field "height", sb.height
+ json.field "count", sb.count
+ json.field "interval", sb.interval
+ json.field "storyboardWidth", sb.columns
+ json.field "storyboardHeight", sb.rows
+ json.field "storyboardCount", sb.images_count
end
end
end
diff --git a/src/invidious/mixes.cr b/src/invidious/mixes.cr
index 823ca85b..28ff0ff6 100644
--- a/src/invidious/mixes.cr
+++ b/src/invidious/mixes.cr
@@ -81,7 +81,7 @@ def fetch_mix(rdid, video_id, cookies = nil, locale = nil)
})
end
-def template_mix(mix)
+def template_mix(mix, listen)
html = <<-END_HTML