invidious/src/invidious.cr

5033 lines
144 KiB
Crystal
Raw Normal View History

2018-09-04 16:22:10 +02:00
# "Invidious" (which is an alternative front-end to YouTube)
2019-03-15 17:44:53 +01:00
# Copyright (C) 2019 Omar Roth
2018-01-28 18:32:40 +01:00
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
2018-11-22 20:26:08 +01:00
require "digest/md5"
2019-01-23 21:15:19 +01:00
require "file_utils"
2017-11-23 08:48:55 +01:00
require "kemal"
2019-03-13 02:51:23 +01:00
require "markdown"
2018-07-18 21:26:02 +02:00
require "openssl/hmac"
require "option_parser"
require "pg"
2018-11-22 00:12:13 +01:00
require "sqlite3"
2018-01-16 21:02:35 +01:00
require "xml"
2018-03-09 19:42:23 +01:00
require "yaml"
require "zip"
2018-08-04 22:30:44 +02:00
require "./invidious/helpers/*"
2018-07-06 14:59:56 +02:00
require "./invidious/*"
2017-11-29 22:33:46 +01:00
2018-07-18 21:26:02 +02:00
CONFIG = Config.from_yaml(File.read("config/config.yml"))
HMAC_KEY = CONFIG.hmac_key || Random::Secure.hex(32)
2018-03-09 19:42:23 +01:00
PG_URL = URI.new(
scheme: "postgres",
2019-05-21 16:00:35 +02:00
user: CONFIG.db.user,
password: CONFIG.db.password,
host: CONFIG.db.host,
port: CONFIG.db.port,
path: CONFIG.db.dbname,
2018-03-09 19:42:23 +01:00
)
PG_DB = DB.open PG_URL
ARCHIVE_URL = URI.parse("https://archive.org")
LOGIN_URL = URI.parse("https://accounts.google.com")
PUBSUB_URL = URI.parse("https://pubsubhubbub.appspot.com")
REDDIT_URL = URI.parse("https://www.reddit.com")
TEXTCAPTCHA_URL = URI.parse("http://textcaptcha.com")
YT_URL = URI.parse("https://www.youtube.com")
2019-06-07 19:39:12 +02:00
CHARS_SAFE = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_"
TEST_IDS = {"AgbeGFYluEA", "BaW_jenozKc", "a9LDPn-MO4I", "ddFvjfvPnqk", "iqKdEhx-dD4"}
2019-06-08 23:04:55 +02:00
MAX_ITEMS_PER_PAGE = 1500
2018-03-05 05:25:03 +01:00
REQUEST_HEADERS_WHITELIST = {"Accept", "Accept-Encoding", "Cache-Control", "Connection", "Content-Length", "If-None-Match", "Range"}
RESPONSE_HEADERS_BLACKLIST = {"Access-Control-Allow-Origin", "Alt-Svc", "Server"}
2019-07-04 22:30:00 +02:00
HTTP_CHUNK_SIZE = 10485760 # ~10MB
CURRENT_BRANCH = {{ "#{`git branch | sed -n '/\* /s///p'`.strip}" }}
CURRENT_COMMIT = {{ "#{`git rev-list HEAD --max-count=1 --abbrev-commit`.strip}" }}
CURRENT_VERSION = {{ "#{`git describe --tags --abbrev=0`.strip}" }}
# This is used to determine the `?v=` on the end of file URLs (for cache busting). We
# only need to expire modified assets, so we can use this to find the last commit that changes
# any assets
ASSET_COMMIT = {{ "#{`git rev-list HEAD --max-count=1 --abbrev-commit -- assets`.strip}" }}
2019-04-06 15:28:53 +02:00
SOFTWARE = {
"name" => "invidious",
"version" => "#{CURRENT_VERSION}-#{CURRENT_COMMIT}",
"branch" => "#{CURRENT_BRANCH}",
}
2018-12-20 22:32:09 +01:00
LOCALES = {
"ar" => load_locale("ar"),
"de" => load_locale("de"),
2019-05-20 20:06:54 +02:00
"el" => load_locale("el"),
2018-12-20 22:32:09 +01:00
"en-US" => load_locale("en-US"),
2019-04-19 18:20:18 +02:00
"eo" => load_locale("eo"),
2019-04-06 00:24:06 +02:00
"es" => load_locale("es"),
2019-03-02 02:24:53 +01:00
"eu" => load_locale("eu"),
2019-01-21 22:04:09 +01:00
"fr" => load_locale("fr"),
2019-02-20 00:46:31 +01:00
"it" => load_locale("it"),
2018-12-26 16:29:12 +01:00
"nb_NO" => load_locale("nb_NO"),
2018-12-20 22:32:09 +01:00
"nl" => load_locale("nl"),
"pl" => load_locale("pl"),
"ru" => load_locale("ru"),
2019-04-19 18:20:18 +02:00
"uk" => load_locale("uk"),
2019-07-05 06:11:04 +02:00
"zh-CN" => load_locale("zh-CN"),
2018-12-20 22:32:09 +01:00
}
2019-04-06 15:28:53 +02:00
config = CONFIG
logger = Invidious::LogHandler.new
Kemal.config.extra_options do |parser|
parser.banner = "Usage: invidious [arguments]"
parser.on("-c THREADS", "--channel-threads=THREADS", "Number of threads for refreshing channels (default: #{config.channel_threads})") do |number|
begin
config.channel_threads = number.to_i
rescue ex
puts "THREADS must be integer"
exit
end
end
parser.on("-f THREADS", "--feed-threads=THREADS", "Number of threads for refreshing feeds (default: #{config.feed_threads})") do |number|
begin
config.feed_threads = number.to_i
rescue ex
puts "THREADS must be integer"
exit
end
end
parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: STDOUT)") do |output|
FileUtils.mkdir_p(File.dirname(output))
logger = Invidious::LogHandler.new(File.open(output, mode: "a"))
end
parser.on("-v", "--version", "Print version") do |output|
puts SOFTWARE.to_pretty_json
exit
end
end
Kemal::CLI.new ARGV
# Check table integrity
2019-04-11 19:13:25 +02:00
if CONFIG.check_tables
2019-07-01 16:29:52 +02:00
analyze_table(PG_DB, logger, "channels", InvidiousChannel)
2019-04-12 05:31:45 +02:00
analyze_table(PG_DB, logger, "channel_videos", ChannelVideo)
analyze_table(PG_DB, logger, "nonces", Nonce)
analyze_table(PG_DB, logger, "session_ids", SessionId)
analyze_table(PG_DB, logger, "users", User)
analyze_table(PG_DB, logger, "videos", Video)
if CONFIG.cache_annotations
analyze_table(PG_DB, logger, "annotations", Annotation)
end
2019-04-11 19:13:25 +02:00
end
2018-03-26 05:18:29 +02:00
2019-04-10 23:23:37 +02:00
# Start jobs
2019-05-15 19:26:29 +02:00
2019-05-27 21:48:57 +02:00
refresh_channels(PG_DB, logger, config)
refresh_feeds(PG_DB, logger, config)
subscribe_to_feeds(PG_DB, logger, HMAC_KEY, config)
2019-03-02 02:25:16 +01:00
statistics = {
"error" => "Statistics are not availabile.",
}
if config.statistics_enabled
spawn do
loop do
statistics = {
2019-04-06 15:28:53 +02:00
"version" => "2.0",
"software" => SOFTWARE,
2019-03-02 02:25:16 +01:00
"openRegistrations" => config.registration_enabled,
"usage" => {
2019-03-02 04:03:57 +01:00
"users" => {
"total" => PG_DB.query_one("SELECT count(*) FROM users", as: Int64),
"activeHalfyear" => PG_DB.query_one("SELECT count(*) FROM users WHERE CURRENT_TIMESTAMP - updated < '6 months'", as: Int64),
"activeMonth" => PG_DB.query_one("SELECT count(*) FROM users WHERE CURRENT_TIMESTAMP - updated < '1 month'", as: Int64),
},
},
"metadata" => {
2019-06-08 02:56:41 +02:00
"updatedAt" => Time.utc.to_unix,
"lastChannelRefreshedAt" => PG_DB.query_one?("SELECT updated FROM channels ORDER BY updated DESC LIMIT 1", as: Time).try &.to_unix || 0,
2019-03-02 02:25:16 +01:00
},
}
sleep 1.minute
2019-06-16 02:18:36 +02:00
Fiber.yield
2019-03-02 02:25:16 +01:00
end
end
end
2019-03-01 23:47:06 +01:00
2018-02-08 05:04:47 +01:00
top_videos = [] of Video
2019-03-01 23:06:45 +01:00
if config.top_enabled
spawn do
pull_top_videos(config, PG_DB) do |videos|
top_videos = videos
end
2018-02-08 05:04:47 +01:00
end
end
2018-11-09 03:08:03 +01:00
popular_videos = [] of ChannelVideo
spawn do
pull_popular_videos(PG_DB) do |videos|
popular_videos = videos
end
end
decrypt_function = [] of {name: String, value: Int32}
spawn do
2018-08-04 22:30:44 +02:00
update_decrypt_function do |function|
decrypt_function = function
end
end
2019-06-03 20:36:49 +02:00
connection_channel = Channel({Bool, Channel(PQ::Notification)}).new(32)
spawn do
connections = [] of Channel(PQ::Notification)
PG.connect_listen(PG_URL, "notifications") { |event| connections.each { |connection| connection.send(event) } }
loop do
action, connection = connection_channel.receive
case action
when true
connections << connection
when false
connections.delete(connection)
end
end
end
2018-03-25 05:56:41 +02:00
before_all do |env|
2019-05-10 22:29:10 +02:00
host_url = make_host_url(config, Kemal.config)
2019-05-10 23:48:38 +02:00
env.response.headers["X-XSS-Protection"] = "1; mode=block"
env.response.headers["X-Content-Type-Options"] = "nosniff"
2019-05-10 22:29:10 +02:00
env.response.headers["Content-Security-Policy"] = "default-src blob: data: 'self' #{host_url} 'unsafe-inline' 'unsafe-eval'; media-src blob: 'self' #{host_url} https://*.googlevideo.com:443"
2019-04-07 21:01:08 +02:00
env.response.headers["Referrer-Policy"] = "same-origin"
2019-05-14 15:21:01 +02:00
if (Kemal.config.ssl || config.https_only) && config.hsts
2019-05-01 03:53:56 +02:00
env.response.headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains; preload"
2019-04-07 21:01:08 +02:00
end
2019-03-28 19:43:40 +01:00
begin
preferences = Preferences.from_json(env.request.cookies["PREFS"]?.try &.value || "{}")
rescue
preferences = Preferences.from_json("{}")
end
2018-07-16 18:24:24 +02:00
if env.request.cookies.has_key? "SID"
2018-04-01 02:09:27 +02:00
sid = env.request.cookies["SID"].value
2018-07-06 01:43:26 +02:00
2019-04-18 23:23:50 +02:00
if sid.starts_with? "v1:"
raise "Cannot use token as SID"
end
2018-07-18 21:26:02 +02:00
# Invidious users only have SID
if !env.request.cookies.has_key? "SSID"
if email = PG_DB.query_one?("SELECT email FROM session_ids WHERE id = $1", sid, as: String)
2019-02-12 03:52:47 +01:00
user = PG_DB.query_one("SELECT * FROM users WHERE email = $1", email, as: User)
2019-04-18 23:23:50 +02:00
csrf_token = generate_response(sid, {":signout", ":watch_ajax", ":subscription_ajax", ":token_ajax", ":authorize_token"}, HMAC_KEY, PG_DB, 1.week)
2018-11-09 00:42:25 +01:00
preferences = user.preferences
2018-08-15 19:40:42 +02:00
env.set "sid", sid
2019-04-18 23:23:50 +02:00
env.set "csrf_token", csrf_token
env.set "user", user
2018-07-18 21:26:02 +02:00
end
else
headers = HTTP::Headers.new
headers["Cookie"] = env.request.headers["Cookie"]
2018-07-18 21:26:02 +02:00
begin
2019-02-10 19:33:29 +01:00
user, sid = get_user(sid, headers, PG_DB, false)
2019-04-18 23:23:50 +02:00
csrf_token = generate_response(sid, {":signout", ":watch_ajax", ":subscription_ajax", ":token_ajax", ":authorize_token"}, HMAC_KEY, PG_DB, 1.week)
2018-11-16 03:23:17 +01:00
preferences = user.preferences
2018-08-15 19:40:42 +02:00
env.set "sid", sid
2019-04-18 23:23:50 +02:00
env.set "csrf_token", csrf_token
env.set "user", user
2018-07-18 21:26:02 +02:00
rescue ex
end
2018-07-16 19:50:41 +02:00
end
2018-04-14 04:32:14 +02:00
end
2018-08-17 17:19:20 +02:00
dark_mode = env.params.query["dark_mode"]? || preferences.dark_mode.to_s
dark_mode = dark_mode == "true"
thin_mode = env.params.query["thin_mode"]? || preferences.thin_mode.to_s
thin_mode = thin_mode == "true"
locale = env.params.query["hl"]? || preferences.locale
preferences.dark_mode = dark_mode
preferences.thin_mode = thin_mode
preferences.locale = locale
env.set "preferences", preferences
2018-12-20 22:32:09 +01:00
2018-08-17 17:19:20 +02:00
current_page = env.request.path
if env.request.query
query = HTTP::Params.parse(env.request.query.not_nil!)
if query["referer"]?
query["referer"] = get_referer(env, "/")
end
current_page += "?#{query}"
end
env.set "current_page", URI.escape(current_page)
2018-03-22 18:44:36 +01:00
end
2018-02-08 05:04:47 +01:00
get "/" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
2018-12-20 22:32:09 +01:00
if user
user = user.as(User)
if user.preferences.redirect_feed
2019-03-06 15:45:04 +01:00
next env.redirect "/feed/subscriptions"
end
end
2019-03-01 23:06:45 +01:00
case config.default_home
when "Popular"
templated "popular"
when "Top"
templated "top"
when "Trending"
env.redirect "/feed/trending"
when "Subscriptions"
if user
env.redirect "/feed/subscriptions"
else
templated "popular"
end
end
end
2019-03-13 02:51:23 +01:00
get "/privacy" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
templated "privacy"
end
get "/licenses" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
rendered "licenses"
end
2018-08-04 22:30:44 +02:00
# Videos
get "/watch" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
region = env.params.query["region"]?
2018-12-20 22:32:09 +01:00
2018-08-07 03:52:37 +02:00
if env.params.query.to_s.includes?("%20") || env.params.query.to_s.includes?("+")
url = "/watch?" + env.params.query.to_s.gsub("%20", "").delete("+")
next env.redirect url
end
2018-11-06 16:55:52 +01:00
if env.params.query["v"]?
2018-07-29 03:40:59 +02:00
id = env.params.query["v"]
2018-08-05 00:19:42 +02:00
2018-11-06 16:55:52 +01:00
if env.params.query["v"].empty?
error_message = "Invalid parameters."
2019-06-17 21:06:02 +02:00
env.response.status_code = 400
2018-11-06 16:55:52 +01:00
next templated "error"
end
2018-08-05 00:19:42 +02:00
if id.size > 11
url = "/watch?v=#{id[0, 11]}"
env.params.query.delete_all("v")
if env.params.query.size > 0
url += "&#{env.params.query}"
end
next env.redirect url
2018-08-05 00:19:42 +02:00
end
2018-07-29 03:40:59 +02:00
else
next env.redirect "/"
end
2018-10-08 04:11:33 +02:00
plid = env.params.query["list"]?
nojs = env.params.query["nojs"]?
nojs ||= "0"
nojs = nojs == "1"
2018-10-08 04:11:33 +02:00
2019-03-27 17:31:05 +01:00
preferences = env.get("preferences").as(Preferences)
2018-07-29 03:40:59 +02:00
2019-05-01 06:39:04 +02:00
user = env.get?("user").try &.as(User)
if user
2018-08-05 06:07:38 +02:00
subscriptions = user.subscriptions
watched = user.watched
notifications = user.notifications
2018-07-06 01:43:26 +02:00
end
subscriptions ||= [] of String
params = process_video_params(env.params.query, preferences)
2018-10-30 15:41:23 +01:00
env.params.query.delete_all("listen")
begin
2019-06-29 04:17:56 +02:00
video = get_video(id, PG_DB, region: params.region)
rescue ex : VideoRedirect
next env.redirect "/watch?v=#{ex.message}"
rescue ex
error_message = ex.message
2019-06-17 21:06:02 +02:00
env.response.status_code = 500
2019-06-08 03:07:55 +02:00
logger.puts("#{id} : #{ex.message}")
next templated "error"
end
2019-05-01 14:38:42 +02:00
if preferences.annotations_subscribed &&
subscriptions.includes?(video.ucid) &&
(env.params.query["iv_load_policy"]? || "1") == "1"
2019-05-01 06:39:04 +02:00
params.annotations = true
end
2019-05-01 14:38:42 +02:00
env.params.query.delete_all("iv_load_policy")
2019-05-01 06:39:04 +02:00
if watched && !watched.includes? id
2019-02-10 19:33:29 +01:00
PG_DB.exec("UPDATE users SET watched = watched || $1 WHERE email = $2", [id], user.as(User).email)
end
if notifications && notifications.includes? id
PG_DB.exec("UPDATE users SET notifications = array_remove(notifications, $1) WHERE email = $2", id, user.as(User).email)
env.get("user").as(User).notifications.delete(id)
notifications.delete(id)
end
if nojs
if preferences
source = preferences.comments[0]
if source.empty?
source = preferences.comments[1]
end
if source == "youtube"
begin
2019-06-29 04:17:56 +02:00
comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
rescue ex
if preferences.comments[1] == "reddit"
comments, reddit_thread = fetch_reddit_comments(id)
2018-12-20 22:32:09 +01:00
comment_html = template_reddit_comments(comments, locale)
comment_html = fill_links(comment_html, "https", "www.reddit.com")
comment_html = replace_links(comment_html)
end
end
elsif source == "reddit"
begin
comments, reddit_thread = fetch_reddit_comments(id)
2018-12-20 22:32:09 +01:00
comment_html = template_reddit_comments(comments, locale)
comment_html = fill_links(comment_html, "https", "www.reddit.com")
comment_html = replace_links(comment_html)
rescue ex
if preferences.comments[1] == "youtube"
2019-06-29 04:17:56 +02:00
comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
end
end
end
else
2019-06-29 04:17:56 +02:00
comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
end
comment_html ||= ""
end
2018-08-05 06:07:38 +02:00
fmt_stream = video.fmt_stream(decrypt_function)
adaptive_fmts = video.adaptive_fmts(decrypt_function)
2019-05-01 06:39:04 +02:00
if params.local
2019-03-11 18:55:05 +01:00
fmt_stream.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path }
adaptive_fmts.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path }
end
2018-08-07 18:39:56 +02:00
video_streams = video.video_streams(adaptive_fmts)
2018-08-05 06:07:38 +02:00
audio_streams = video.audio_streams(adaptive_fmts)
2018-01-21 18:07:32 +01:00
# Older videos may not have audio sources available.
# We redirect here so they're not unplayable
if params.listen && audio_streams.empty?
next env.redirect "/watch?#{env.params.query}&listen=0"
end
2018-08-05 06:07:38 +02:00
captions = video.captions
2018-08-06 20:23:36 +02:00
preferred_captions = captions.select { |caption|
2019-05-01 06:39:04 +02:00
params.preferred_captions.includes?(caption.name.simpleText) ||
params.preferred_captions.includes?(caption.languageCode.split("-")[0])
}
preferred_captions.sort_by! { |caption|
2019-05-01 06:39:04 +02:00
(params.preferred_captions.index(caption.name.simpleText) ||
params.preferred_captions.index(caption.languageCode.split("-")[0])).not_nil!
}
captions = captions - preferred_captions
aspect_ratio = "16:9"
2018-05-30 01:40:36 +02:00
2019-06-08 22:08:27 +02:00
video.description_html = fill_links(video.description_html, "https", "www.youtube.com")
video.description_html = replace_links(video.description_html)
2018-03-14 00:37:56 +01:00
2019-03-05 19:56:59 +01:00
host_url = make_host_url(config, Kemal.config)
2018-07-22 18:09:43 +02:00
2019-01-12 19:00:44 +01:00
if video.player_response["streamingData"]?.try &.["hlsManifestUrl"]?
hlsvp = video.player_response["streamingData"]["hlsManifestUrl"].as_s
2018-08-05 06:07:38 +02:00
hlsvp = hlsvp.gsub("https://manifest.googlevideo.com", host_url)
2018-07-28 01:25:58 +02:00
end
2018-09-15 04:24:28 +02:00
thumbnail = "/vi/#{video.id}/maxres.jpg"
2018-08-05 06:07:38 +02:00
2019-05-01 06:39:04 +02:00
if params.raw
2019-06-01 23:26:18 +02:00
if params.listen
url = audio_streams[0]["url"]
audio_streams.each do |fmt|
if fmt["bitrate"] == params.quality.rchop("k")
url = fmt["url"]
end
end
else
url = fmt_stream[0]["url"]
2018-08-05 21:03:13 +02:00
2019-06-01 23:26:18 +02:00
fmt_stream.each do |fmt|
if fmt["label"].split(" - ")[0] == params.quality
url = fmt["url"]
end
2018-08-05 21:03:13 +02:00
end
end
next env.redirect url
end
rvs = [] of Hash(String, String)
2018-08-13 17:50:09 +02:00
video.info["rvs"]?.try &.split(",").each do |rv|
rvs << HTTP::Params.parse(rv).to_h
end
2018-01-21 18:07:32 +01:00
rating = video.info["avg_rating"].to_f64
2018-01-28 03:09:27 +01:00
engagement = ((video.dislikes.to_f + video.likes.to_f)/video.views * 100)
playability_status = video.player_response["playabilityStatus"]?
2019-06-08 17:18:45 +02:00
if playability_status && playability_status["status"] == "LIVE_STREAM_OFFLINE" && !video.premiere_timestamp
reason = playability_status["reason"]?.try &.as_s
end
reason ||= ""
2017-11-23 08:48:55 +01:00
templated "watch"
end
2018-08-04 22:30:44 +02:00
get "/embed/:id" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-08-07 03:52:37 +02:00
id = env.params.url["id"]
plid = env.params.query["list"]?
if md = env.params.query["playlist"]?
.try &.match(/[a-zA-Z0-9_-]{11}(,[a-zA-Z0-9_-]{11})*/)
video_series = md[0].split(",")
env.params.query.delete("playlist")
end
2018-08-05 00:19:42 +02:00
2019-03-27 17:31:05 +01:00
preferences = env.get("preferences").as(Preferences)
2018-08-07 03:52:37 +02:00
if id.includes?("%20") || id.includes?("+") || env.params.query.to_s.includes?("%20") || env.params.query.to_s.includes?("+")
id = env.params.url["id"].gsub("%20", "").delete("+")
2018-08-07 03:52:37 +02:00
url = "/embed/#{id}"
2018-08-07 03:52:37 +02:00
if env.params.query.size > 0
url += "?#{env.params.query.to_s.gsub("%20", "").delete("+")}"
2018-08-05 00:19:42 +02:00
end
2018-08-07 03:52:37 +02:00
next env.redirect url
end
# YouTube embed supports `videoseries` with either `list=PLID`
# or `playlist=VIDEO_ID,VIDEO_ID`
if id == "videoseries"
url = ""
if plid
begin
videos = fetch_playlist_videos(plid, 1, 1, locale: locale)
rescue ex
error_message = ex.message
2019-06-17 21:06:02 +02:00
env.response.status_code = 500
next templated "error"
end
url = "/embed/#{videos[0].id}"
elsif video_series
url = "/embed/#{video_series.shift}"
env.params.query["playlist"] = video_series.join(",")
else
next env.redirect "/"
end
if env.params.query.size > 0
url += "?#{env.params.query}"
end
next env.redirect url
elsif id.size > 11
2018-08-07 03:52:37 +02:00
url = "/embed/#{id[0, 11]}"
if env.params.query.size > 0
url += "?#{env.params.query}"
end
next env.redirect url
2018-08-04 22:30:44 +02:00
end
params = process_video_params(env.params.query, preferences)
2018-07-22 18:09:43 +02:00
2019-05-01 06:39:04 +02:00
user = env.get?("user").try &.as(User)
if user
subscriptions = user.subscriptions
watched = user.watched
notifications = user.notifications
2019-05-01 06:39:04 +02:00
end
subscriptions ||= [] of String
2018-07-22 18:09:43 +02:00
begin
2019-06-29 04:17:56 +02:00
video = get_video(id, PG_DB, region: params.region)
rescue ex : VideoRedirect
next env.redirect "/embed/#{ex.message}"
2018-07-22 18:09:43 +02:00
rescue ex
2018-08-04 22:30:44 +02:00
error_message = ex.message
2019-06-17 21:06:02 +02:00
env.response.status_code = 500
2018-08-04 22:30:44 +02:00
next templated "error"
2018-07-22 18:09:43 +02:00
end
2019-05-01 14:38:42 +02:00
if preferences.annotations_subscribed &&
subscriptions.includes?(video.ucid) &&
(env.params.query["iv_load_policy"]? || "1") == "1"
2019-05-01 06:39:04 +02:00
params.annotations = true
end
# if watched && !watched.includes? id
# PG_DB.exec("UPDATE users SET watched = watched || $1 WHERE email = $2", [id], user.as(User).email)
# end
2019-05-01 06:39:04 +02:00
if notifications && notifications.includes? id
PG_DB.exec("UPDATE users SET notifications = array_remove(notifications, $1) WHERE email = $2", id, user.as(User).email)
env.get("user").as(User).notifications.delete(id)
notifications.delete(id)
end
2018-08-05 06:07:38 +02:00
fmt_stream = video.fmt_stream(decrypt_function)
adaptive_fmts = video.adaptive_fmts(decrypt_function)
2019-05-01 06:39:04 +02:00
if params.local
2019-03-11 18:55:05 +01:00
fmt_stream.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path }
adaptive_fmts.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path }
end
2018-08-07 18:39:56 +02:00
video_streams = video.video_streams(adaptive_fmts)
2018-08-05 06:07:38 +02:00
audio_streams = video.audio_streams(adaptive_fmts)
2018-07-22 18:09:43 +02:00
2018-08-05 06:07:38 +02:00
captions = video.captions
2018-07-22 18:09:43 +02:00
preferred_captions = captions.select { |caption|
2019-05-01 06:39:04 +02:00
params.preferred_captions.includes?(caption.name.simpleText) ||
params.preferred_captions.includes?(caption.languageCode.split("-")[0])
}
preferred_captions.sort_by! { |caption|
2019-05-01 06:39:04 +02:00
(params.preferred_captions.index(caption.name.simpleText) ||
params.preferred_captions.index(caption.languageCode.split("-")[0])).not_nil!
}
captions = captions - preferred_captions
aspect_ratio = nil
2019-06-08 22:08:27 +02:00
video.description_html = fill_links(video.description_html, "https", "www.youtube.com")
video.description_html = replace_links(video.description_html)
2018-07-22 18:09:43 +02:00
2019-03-05 19:56:59 +01:00
host_url = make_host_url(config, Kemal.config)
2018-07-22 18:09:43 +02:00
2019-01-12 19:00:44 +01:00
if video.player_response["streamingData"]?.try &.["hlsManifestUrl"]?
hlsvp = video.player_response["streamingData"]["hlsManifestUrl"].as_s
2018-08-05 06:07:38 +02:00
hlsvp = hlsvp.gsub("https://manifest.googlevideo.com", host_url)
2018-08-04 22:30:44 +02:00
end
2018-07-22 18:09:43 +02:00
2018-09-15 04:24:28 +02:00
thumbnail = "/vi/#{video.id}/maxres.jpg"
2018-07-22 18:09:43 +02:00
2019-05-01 06:39:04 +02:00
if params.raw
2018-08-04 22:30:44 +02:00
url = fmt_stream[0]["url"]
2018-07-22 18:09:43 +02:00
2018-08-04 22:30:44 +02:00
fmt_stream.each do |fmt|
2019-05-01 06:39:04 +02:00
if fmt["label"].split(" - ")[0] == params.quality
2018-08-04 22:30:44 +02:00
url = fmt["url"]
end
2018-07-22 18:09:43 +02:00
end
2018-08-04 22:30:44 +02:00
next env.redirect url
end
2018-07-22 18:09:43 +02:00
2018-08-04 22:30:44 +02:00
rendered "embed"
end
2018-08-15 17:22:36 +02:00
# Playlists
2018-09-29 06:12:35 +02:00
2018-08-15 17:22:36 +02:00
get "/playlist" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-08-15 17:22:36 +02:00
plid = env.params.query["list"]?
if !plid
next env.redirect "/"
end
page = env.params.query["page"]?.try &.to_i?
page ||= 1
2018-10-07 05:18:50 +02:00
if plid.starts_with? "RD"
next env.redirect "/mix?list=#{plid}"
end
2018-09-18 03:07:32 +02:00
begin
2018-12-20 22:32:09 +01:00
playlist = fetch_playlist(plid, locale)
2018-09-18 03:07:32 +02:00
rescue ex
error_message = ex.message
2019-06-17 21:06:02 +02:00
env.response.status_code = 500
2018-09-18 03:07:32 +02:00
next templated "error"
2018-08-15 17:22:36 +02:00
end
begin
videos = fetch_playlist_videos(plid, page, playlist.video_count, locale: locale)
rescue ex
videos = [] of PlaylistVideo
end
2018-08-15 17:22:36 +02:00
templated "playlist"
end
2018-09-29 06:12:35 +02:00
get "/mix" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-09-29 06:12:35 +02:00
rdid = env.params.query["list"]?
if !rdid
next env.redirect "/"
end
continuation = env.params.query["continuation"]?
continuation ||= rdid.lchop("RD")
begin
2018-12-20 22:32:09 +01:00
mix = fetch_mix(rdid, continuation, locale: locale)
2018-09-29 06:12:35 +02:00
rescue ex
error_message = ex.message
2019-06-17 21:06:02 +02:00
env.response.status_code = 500
2018-09-29 06:12:35 +02:00
next templated "error"
end
templated "mix"
end
2018-08-04 22:30:44 +02:00
# Search
2018-11-22 03:00:17 +01:00
get "/opensearch.xml" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-11-22 03:00:17 +01:00
env.response.content_type = "application/opensearchdescription+xml"
2019-03-05 19:56:59 +01:00
host = make_host_url(config, Kemal.config)
2018-11-22 03:00:17 +01:00
XML.build(indent: " ", encoding: "UTF-8") do |xml|
xml.element("OpenSearchDescription", xmlns: "http://a9.com/-/spec/opensearch/1.1/") do
xml.element("ShortName") { xml.text "Invidious" }
xml.element("LongName") { xml.text "Invidious Search" }
xml.element("Description") { xml.text "Search for videos, channels, and playlists on Invidious" }
xml.element("InputEncoding") { xml.text "UTF-8" }
xml.element("Image", width: 48, height: 48, type: "image/x-icon") { xml.text "#{host}/favicon.ico" }
xml.element("Url", type: "text/html", method: "get", template: "#{host}/search?q={searchTerms}")
2018-11-22 03:00:17 +01:00
end
end
end
2018-08-04 22:30:44 +02:00
get "/results" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-08-06 01:35:52 +02:00
query = env.params.query["search_query"]?
query ||= env.params.query["q"]?
query ||= ""
2018-08-05 06:07:38 +02:00
page = env.params.query["page"]?.try &.to_i?
page ||= 1
2018-08-06 01:35:52 +02:00
if query
env.redirect "/search?q=#{URI.escape(query)}&page=#{page}"
2018-08-04 22:30:44 +02:00
else
env.redirect "/"
end
end
2018-07-22 03:56:11 +02:00
2018-08-04 22:30:44 +02:00
get "/search" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2019-02-07 01:21:40 +01:00
region = env.params.query["region"]?
2018-12-20 22:32:09 +01:00
2018-08-06 01:35:52 +02:00
query = env.params.query["search_query"]?
query ||= env.params.query["q"]?
2018-08-05 06:07:38 +02:00
query ||= ""
2018-07-22 03:56:11 +02:00
2019-01-03 03:14:31 +01:00
if query.empty?
next env.redirect "/"
end
2018-08-04 22:30:44 +02:00
page = env.params.query["page"]?.try &.to_i?
page ||= 1
2018-07-22 03:56:11 +02:00
user = env.get? "user"
if user
user = user.as(User)
2019-04-11 02:56:38 +02:00
view_name = "subscriptions_#{sha256(user.email)}"
end
2018-09-14 00:47:31 +02:00
channel = nil
content_type = "all"
2018-08-27 22:23:25 +02:00
date = ""
duration = ""
features = [] of String
2018-09-14 00:47:31 +02:00
sort = "relevance"
2018-09-17 04:28:00 +02:00
subscriptions = nil
2018-08-27 22:23:25 +02:00
operators = query.split(" ").select { |a| a.match(/\w+:[\w,]+/) }
operators.each do |operator|
key, value = operator.downcase.split(":")
2018-08-27 22:23:25 +02:00
case key
2018-09-14 00:47:31 +02:00
when "channel", "user"
channel = operator.split(":")[-1]
when "content_type", "type"
content_type = value
2018-08-27 22:23:25 +02:00
when "date"
date = value
when "duration"
duration = value
2018-09-17 23:38:18 +02:00
when "feature", "features"
2018-08-27 22:23:25 +02:00
features = value.split(",")
2018-09-14 00:47:31 +02:00
when "sort"
sort = value
2018-09-17 04:28:00 +02:00
when "subscriptions"
subscriptions = value == "true"
else
operators.delete(operator)
2018-08-27 22:23:25 +02:00
end
end
2018-08-31 00:42:30 +02:00
search_query = (query.split(" ") - operators).join(" ")
2018-08-27 22:23:25 +02:00
2018-09-14 00:47:31 +02:00
if channel
count, videos = channel_search(search_query, page, channel)
2018-09-17 04:28:00 +02:00
elsif subscriptions
if view_name
videos = PG_DB.query_all("SELECT id,title,published,updated,ucid,author,length_seconds FROM (
2018-09-17 04:28:00 +02:00
SELECT *,
to_tsvector(#{view_name}.title) ||
to_tsvector(#{view_name}.author)
as document
FROM #{view_name}
) v_search WHERE v_search.document @@ plainto_tsquery($1) LIMIT 20 OFFSET $2;", search_query, (page - 1) * 20, as: ChannelVideo)
count = videos.size
else
videos = [] of ChannelVideo
count = 0
end
2018-09-14 00:47:31 +02:00
else
2018-09-17 23:38:18 +02:00
begin
search_params = produce_search_params(sort: sort, date: date, content_type: content_type,
2018-09-17 23:38:18 +02:00
duration: duration, features: features)
rescue ex
error_message = ex.message
2019-06-17 21:06:02 +02:00
env.response.status_code = 500
2018-09-17 23:38:18 +02:00
next templated "error"
end
2019-06-29 04:17:56 +02:00
count, videos = search(search_query, page, search_params, region).as(Tuple)
2018-09-14 00:47:31 +02:00
end
2018-07-22 03:56:11 +02:00
2018-08-04 22:30:44 +02:00
templated "search"
end
2018-07-22 03:56:11 +02:00
2018-08-04 22:30:44 +02:00
# Users
2018-07-22 03:56:11 +02:00
2018-08-04 22:30:44 +02:00
get "/login" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
user = env.get? "user"
if user
next env.redirect "/feed/subscriptions"
end
2018-07-22 03:56:11 +02:00
2019-03-01 23:06:45 +01:00
if !config.login_enabled
error_message = "Login has been disabled by administrator."
2019-06-17 21:06:02 +02:00
env.response.status_code = 400
2019-03-01 23:06:45 +01:00
next templated "error"
end
2018-08-09 03:26:02 +02:00
referer = get_referer(env, "/feed/subscriptions")
2018-07-22 03:56:11 +02:00
2019-03-19 22:13:23 +01:00
email = nil
password = nil
captcha = nil
2018-08-04 22:30:44 +02:00
account_type = env.params.query["type"]?
account_type ||= "invidious"
2018-11-22 20:26:08 +01:00
captcha_type = env.params.query["captcha"]?
captcha_type ||= "image"
2018-08-04 22:30:44 +02:00
tfa = env.params.query["tfa"]?
tfa ||= false
2018-07-22 03:56:11 +02:00
2018-08-04 22:30:44 +02:00
templated "login"
end
2018-07-22 03:56:11 +02:00
2018-08-04 22:30:44 +02:00
post "/login" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-08-17 17:19:20 +02:00
referer = get_referer(env, "/feed/subscriptions")
2018-07-22 03:56:11 +02:00
2019-03-01 23:06:45 +01:00
if !config.login_enabled
error_message = "Login has been disabled by administrator."
2019-06-17 21:06:02 +02:00
env.response.status_code = 403
2019-03-01 23:06:45 +01:00
next templated "error"
end
2019-06-07 18:28:58 +02:00
# https://stackoverflow.com/a/574698
email = env.params.body["email"]?.try &.downcase.byte_slice(0, 254)
2018-08-04 22:30:44 +02:00
password = env.params.body["password"]?
2018-07-22 03:56:11 +02:00
2018-08-04 22:30:44 +02:00
account_type = env.params.query["type"]?
2019-03-19 22:13:23 +01:00
account_type ||= "invidious"
2018-07-22 03:56:11 +02:00
2019-03-19 22:13:23 +01:00
case account_type
when "google"
2018-08-04 22:30:44 +02:00
tfa_code = env.params.body["tfa"]?.try &.lchop("G-")
2019-06-09 20:48:31 +02:00
traceback = IO::Memory.new
2018-07-22 03:56:11 +02:00
# See https://github.com/ytdl-org/youtube-dl/blob/2019.04.07/youtube_dl/extractor/youtube.py#L82
2018-08-04 22:30:44 +02:00
begin
client = make_client(LOGIN_URL)
headers = HTTP::Headers.new
headers["Content-Type"] = "application/x-www-form-urlencoded;charset=utf-8"
headers["Google-Accounts-XSRF"] = "1"
2019-06-09 20:48:31 +02:00
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.80 Safari/537.36"
headers["X-Same-Domain"] = "1"
2018-07-22 03:56:11 +02:00
2019-06-09 20:48:31 +02:00
login_page = client.get("/ServiceLogin?flowName=GlifWebSignIn&flowEntry=ServiceLogin&cid=1&navigationDirection=forward")
2018-08-04 22:30:44 +02:00
headers = login_page.cookies.add_request_headers(headers)
2018-07-22 03:56:11 +02:00
lookup_req = {
email, nil, [] of String, nil, "US", nil, nil, 2, false, true,
{nil, nil,
2019-06-09 20:48:31 +02:00
{2, 1, nil, 1, "https://accounts.google.com/ServiceLogin?passive=true&continue=https%3A%2F%2Fwww.youtube.com%2Fsignin%3Fnext%3D%252F%26action_handle_signin%3Dtrue%26hl%3Den%26app%3Ddesktop%26feature%3Dsign_in_button&hl=en&service=youtube&uilel=3&requestPath=%2FServiceLogin&Page=PasswordSeparationSignIn", nil, [] of String, 4, [] of String, "GlifWebSignIn"},
1,
2019-06-09 20:48:31 +02:00
{nil, nil, [] of String, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, [] of String, nil, nil, nil, [] of String, [] of String},
nil, nil, nil, true,
2019-06-09 20:48:31 +02:00
},
email,
}.to_json
2019-06-09 20:48:31 +02:00
traceback << "Getting lookup..."
2019-06-09 20:48:31 +02:00
response = client.post("/_/signin/sl/lookup", headers, login_req(lookup_req))
headers = response.cookies.add_request_headers(headers)
lookup_results = JSON.parse(response.body[5..-1])
traceback << "done, returned #{response.status_code}.<br/>"
2018-08-04 22:30:44 +02:00
user_hash = lookup_results[0][2]
2018-07-23 22:09:11 +02:00
challenge_req = {
user_hash, nil, 1, nil,
2018-11-20 17:07:50 +01:00
{1, nil, nil, nil,
{password, nil, true},
},
{nil, nil,
2019-06-09 20:48:31 +02:00
{2, 1, nil, 1, "https://accounts.google.com/ServiceLogin?passive=true&continue=https%3A%2F%2Fwww.youtube.com%2Fsignin%3Fnext%3D%252F%26action_handle_signin%3Dtrue%26hl%3Den%26app%3Ddesktop%26feature%3Dsign_in_button&hl=en&service=youtube&uilel=3&requestPath=%2FServiceLogin&Page=PasswordSeparationSignIn", nil, [] of String, 4, [] of String, "GlifWebSignIn"},
1,
2019-06-09 20:48:31 +02:00
{nil, nil, [] of String, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, [] of String, nil, nil, nil, [] of String, [] of String},
nil, nil, nil, true,
},
}.to_json
2018-07-23 22:09:11 +02:00
2019-06-09 20:48:31 +02:00
traceback << "Getting challenge..."
2018-07-23 22:09:11 +02:00
2019-06-09 20:48:31 +02:00
response = client.post("/_/signin/sl/challenge", headers, login_req(challenge_req))
headers = response.cookies.add_request_headers(headers)
challenge_results = JSON.parse(response.body[5..-1])
traceback << "done, returned #{response.status_code}.<br/>"
2018-07-23 22:09:11 +02:00
2018-08-04 22:30:44 +02:00
headers["Cookie"] = URI.unescape(headers["Cookie"])
2018-07-23 22:09:11 +02:00
2019-06-15 15:22:23 +02:00
if challenge_results[0][3]?.try &.== 7
error_message = translate(locale, "Account has temporarily been disabled")
2019-06-17 21:06:02 +02:00
env.response.status_code = 423
2019-06-15 15:22:23 +02:00
next templated "error"
end
2018-08-04 22:30:44 +02:00
if challenge_results[0][-1]?.try &.[5] == "INCORRECT_ANSWER_ENTERED"
2018-12-20 22:32:09 +01:00
error_message = translate(locale, "Incorrect password")
2019-06-17 21:06:02 +02:00
env.response.status_code = 401
2018-08-04 22:30:44 +02:00
next templated "error"
2018-07-23 22:09:11 +02:00
end
2018-07-27 04:42:12 +02:00
2019-06-09 20:48:31 +02:00
if challenge_results[0][-1]?.try &.[0]?.try &.as_a?
traceback << "User has 2FA.<br/>"
2018-08-04 22:30:44 +02:00
# Prefer Authenticator app and SMS over unsupported protocols
2019-02-05 15:49:24 +01:00
if challenge_results[0][-1][0][0][8] != 6 && challenge_results[0][-1][0][0][8] != 9
2018-08-04 22:30:44 +02:00
tfa = challenge_results[0][-1][0].as_a.select { |auth_type| auth_type[8] == 6 || auth_type[8] == 9 }[0]
2019-06-09 20:48:31 +02:00
traceback << "Selecting challenge #{tfa[8]}..."
select_challenge = {2, nil, nil, nil, {tfa[8]}}.to_json
2018-07-27 04:42:12 +02:00
2018-08-04 22:30:44 +02:00
tl = challenge_results[1][2]
2018-07-27 04:42:12 +02:00
2019-06-09 20:48:31 +02:00
tfa = client.post("/_/signin/selectchallenge?TL=#{tl}", headers, login_req(select_challenge)).body
2018-08-04 22:30:44 +02:00
tfa = tfa[5..-1]
tfa = JSON.parse(tfa)[0][-1]
2019-06-09 20:48:31 +02:00
traceback << "done.<br/>"
else
2019-06-09 20:48:31 +02:00
traceback << "Using challenge #{challenge_results[0][-1][0][0][8]}.<br/>"
2018-08-04 22:30:44 +02:00
tfa = challenge_results[0][-1][0][0]
2018-07-27 16:49:34 +02:00
end
2018-08-04 22:30:44 +02:00
if tfa[2] == "TWO_STEP_VERIFICATION"
if tfa[5] == "QUOTA_EXCEEDED"
2018-12-20 22:32:09 +01:00
error_message = translate(locale, "Quota exceeded, try again in a few hours")
2019-06-17 21:06:02 +02:00
env.response.status_code = 423
2018-08-04 22:30:44 +02:00
next templated "error"
end
2018-08-04 22:30:44 +02:00
if !tfa_code
2019-03-19 22:13:23 +01:00
account_type = "google"
captcha_type = "image"
tfa = true
captcha = nil
next templated "login"
end
2018-08-04 22:30:44 +02:00
tl = challenge_results[1][2]
2018-08-04 22:30:44 +02:00
request_type = tfa[8]
case request_type
when 6
# Authenticator app
2019-06-09 20:48:31 +02:00
tfa_req = {
user_hash, nil, 2, nil,
{6, nil, nil, nil, nil,
{tfa_code, false},
},
}.to_json
2018-08-04 22:30:44 +02:00
when 9
# Voice or text message
2019-06-09 20:48:31 +02:00
tfa_req = {
user_hash, nil, 2, nil,
{9, nil, nil, nil, nil, nil, nil, nil,
{nil, tfa_code, false, 2},
},
}.to_json
2018-08-04 22:30:44 +02:00
else
2019-04-19 18:14:11 +02:00
error_message = translate(locale, "Unable to log in, make sure two-factor authentication (Authenticator or SMS) is turned on.")
2019-06-17 21:06:02 +02:00
env.response.status_code = 500
2018-08-04 22:30:44 +02:00
next templated "error"
end
2019-06-09 20:48:31 +02:00
traceback << "Submitting challenge..."
2019-06-09 20:48:31 +02:00
response = client.post("/_/signin/challenge?hl=en&TL=#{tl}", headers, login_req(tfa_req))
headers = response.cookies.add_request_headers(headers)
challenge_results = JSON.parse(response.body[5..-1])
2019-06-09 20:48:31 +02:00
if (challenge_results[0][-1]?.try &.[5] == "INCORRECT_ANSWER_ENTERED") ||
(challenge_results[0][-1]?.try &.[5] == "INVALID_INPUT")
2018-12-20 22:32:09 +01:00
error_message = translate(locale, "Invalid TFA code")
2019-06-17 21:06:02 +02:00
env.response.status_code = 401
2018-08-04 22:30:44 +02:00
next templated "error"
end
2019-06-09 20:48:31 +02:00
traceback << "done.<br/>"
end
end
2019-06-09 20:48:31 +02:00
traceback << "Logging in..."
location = challenge_results[0][-1][2].to_s
2019-06-15 15:22:23 +02:00
cookies = HTTP::Cookies.from_headers(headers)
2019-06-09 20:48:31 +02:00
loop do
2019-06-15 15:22:23 +02:00
if !location || location.includes? "/ManageAccount"
2019-06-09 20:48:31 +02:00
break
end
2019-06-15 15:22:23 +02:00
# TODO: Occasionally there will be a second page after login confirming
# the user's phone number, which we will likely choke on.
# if location.includes? "SmsAuthInterstitial"
2019-06-09 20:48:31 +02:00
login = client.get(location, headers)
headers = login.cookies.add_request_headers(headers)
2019-06-15 15:22:23 +02:00
cookies = HTTP::Cookies.from_headers(headers)
2019-06-09 20:48:31 +02:00
location = login.headers["Location"]?
end
2018-07-30 04:01:28 +02:00
2019-06-09 20:48:31 +02:00
sid = cookies["SID"]?.try &.value
if !sid
raise "Couldn't get SID."
end
2019-02-10 19:33:29 +01:00
user, sid = get_user(sid, headers, PG_DB)
2018-08-04 22:30:44 +02:00
# We are now logged in
2019-06-09 20:48:31 +02:00
traceback << "done.<br/>"
2018-08-01 06:56:17 +02:00
2018-08-04 22:30:44 +02:00
host = URI.parse(env.request.headers["Host"]).host
2019-03-01 23:06:45 +01:00
if Kemal.config.ssl || config.https_only
secure = true
else
secure = false
end
cookies.each do |cookie|
2019-03-01 23:06:45 +01:00
if Kemal.config.ssl || config.https_only
cookie.secure = secure
else
cookie.secure = secure
end
if cookie.extension
cookie.extension = cookie.extension.not_nil!.gsub(".youtube.com", host)
cookie.extension = cookie.extension.not_nil!.gsub("Secure; ", "")
end
2019-03-19 22:13:23 +01:00
env.response.cookies << cookie
2018-08-04 22:30:44 +02:00
end
2018-08-01 06:56:17 +02:00
if env.request.cookies["PREFS"]?
preferences = env.get("preferences").as(Preferences)
PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", preferences.to_json, user.email)
2019-03-17 18:40:24 +01:00
cookie = env.request.cookies["PREFS"]
2019-06-08 03:23:37 +02:00
cookie.expires = Time.utc(1990, 1, 1)
2019-03-17 18:40:24 +01:00
env.response.cookies << cookie
end
2018-08-04 22:30:44 +02:00
env.redirect referer
rescue ex
2019-06-09 20:48:31 +02:00
traceback.rewind
# error_message = translate(locale, "Login failed. This may be because two-factor authentication is not turned on for your account.")
error_message = %(#{ex.message}<br/>Traceback:<br/><div style="padding-left:2em" id="traceback">#{traceback.gets_to_end}</div>)
2019-06-17 21:06:02 +02:00
env.response.status_code = 500
2018-08-04 22:30:44 +02:00
next templated "error"
2018-08-01 06:56:17 +02:00
end
2019-03-19 22:13:23 +01:00
when "invidious"
2018-08-04 22:30:44 +02:00
if !email
2018-12-20 22:32:09 +01:00
error_message = translate(locale, "User ID is a required field")
2019-06-17 21:06:02 +02:00
env.response.status_code = 401
2018-08-04 22:30:44 +02:00
next templated "error"
end
2018-08-01 07:01:01 +02:00
2018-08-04 22:30:44 +02:00
if !password
2018-12-20 22:32:09 +01:00
error_message = translate(locale, "Password is a required field")
2019-06-17 21:06:02 +02:00
env.response.status_code = 401
2018-08-04 22:30:44 +02:00
next templated "error"
end
2018-08-01 07:01:01 +02:00
2019-04-19 02:17:50 +02:00
user = PG_DB.query_one?("SELECT * FROM users WHERE email = $1", email, as: User)
2018-08-01 07:01:01 +02:00
2019-03-19 22:13:23 +01:00
if user
2018-08-04 22:30:44 +02:00
if !user.password
2019-04-19 18:14:11 +02:00
error_message = translate(locale, "Please sign in using 'Log in with Google'")
2019-06-17 21:06:02 +02:00
env.response.status_code = 400
2018-08-04 22:30:44 +02:00
next templated "error"
end
2019-06-08 03:23:37 +02:00
if Crypto::Bcrypt::Password.new(user.password.not_nil!).verify(password.byte_slice(0, 55))
2018-08-15 19:40:42 +02:00
sid = Base64.urlsafe_encode(Random::Secure.random_bytes(32))
2019-06-08 02:56:41 +02:00
PG_DB.exec("INSERT INTO session_ids VALUES ($1, $2, $3)", sid, email, Time.utc)
2018-08-01 07:01:01 +02:00
2019-03-01 23:06:45 +01:00
if Kemal.config.ssl || config.https_only
2018-08-04 22:30:44 +02:00
secure = true
2018-08-01 07:01:01 +02:00
else
2018-08-04 22:30:44 +02:00
secure = false
2018-08-01 07:01:01 +02:00
end
2019-03-01 23:06:45 +01:00
if config.domain
2019-06-08 02:56:41 +02:00
env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", domain: "#{config.domain}", value: sid, expires: Time.utc + 2.years,
2018-11-15 23:41:43 +01:00
secure: secure, http_only: true)
else
2019-06-08 02:56:41 +02:00
env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", value: sid, expires: Time.utc + 2.years,
2018-11-15 23:41:43 +01:00
secure: secure, http_only: true)
end
2018-08-04 22:30:44 +02:00
else
2019-04-19 18:14:11 +02:00
error_message = translate(locale, "Wrong username or password")
2019-06-17 21:06:02 +02:00
env.response.status_code = 401
2018-08-04 22:30:44 +02:00
next templated "error"
end
# Since this user has already registered, we don't want to overwrite their preferences
if env.request.cookies["PREFS"]?
2019-03-17 18:40:24 +01:00
cookie = env.request.cookies["PREFS"]
2019-06-08 03:23:37 +02:00
cookie.expires = Time.utc(1990, 1, 1)
2019-03-17 18:40:24 +01:00
env.response.cookies << cookie
end
2019-03-19 22:13:23 +01:00
else
2019-03-01 23:06:45 +01:00
if !config.registration_enabled
error_message = "Registration has been disabled by administrator."
2019-06-17 21:06:02 +02:00
env.response.status_code = 400
2019-03-01 23:06:45 +01:00
next templated "error"
end
2019-05-27 16:06:32 +02:00
if password.empty?
error_message = translate(locale, "Password cannot be empty")
2019-06-17 21:06:02 +02:00
env.response.status_code = 401
2019-05-27 16:06:32 +02:00
next templated "error"
end
# See https://security.stackexchange.com/a/39851
if password.bytesize > 55
error_message = translate(locale, "Password should not be longer than 55 characters")
2019-06-17 21:06:02 +02:00
env.response.status_code = 400
2019-05-27 16:06:32 +02:00
next templated "error"
end
password = password.byte_slice(0, 55)
2019-03-19 22:13:23 +01:00
if config.captcha_enabled
captcha_type = env.params.body["captcha_type"]?
answer = env.params.body["answer"]?
change_type = env.params.body["change_type"]?
if !captcha_type || change_type
if change_type
captcha_type = change_type
end
captcha_type ||= "image"
account_type = "invidious"
tfa = false
if captcha_type == "image"
captcha = generate_captcha(HMAC_KEY, PG_DB)
else
captcha = generate_text_captcha(HMAC_KEY, PG_DB)
end
next templated "login"
end
tokens = env.params.body.select { |k, v| k.match(/^token\[\d+\]$/) }.map { |k, v| v }
2019-03-19 22:13:23 +01:00
answer ||= ""
captcha_type ||= "image"
case captcha_type
when "image"
answer = answer.lstrip('0')
answer = OpenSSL::HMAC.hexdigest(:sha256, HMAC_KEY, answer)
begin
2019-04-18 23:23:50 +02:00
validate_request(tokens[0], answer, env.request, HMAC_KEY, PG_DB, locale)
2019-03-19 22:13:23 +01:00
rescue ex
error_message = ex.message
2019-04-18 23:23:50 +02:00
env.response.status_code = 400
2019-03-19 22:13:23 +01:00
next templated "error"
end
when "text"
answer = Digest::MD5.hexdigest(answer.downcase.strip)
found_valid_captcha = false
2019-04-19 18:14:11 +02:00
error_message = translate(locale, "Erroneous CAPTCHA")
tokens.each_with_index do |token, i|
2019-03-19 22:13:23 +01:00
begin
2019-04-18 23:23:50 +02:00
validate_request(token, answer, env.request, HMAC_KEY, PG_DB, locale)
2019-03-19 22:13:23 +01:00
found_valid_captcha = true
rescue ex
error_message = ex.message
end
end
if !found_valid_captcha
2019-06-17 21:06:02 +02:00
env.response.status_code = 500
2019-03-19 22:13:23 +01:00
next templated "error"
end
end
end
2018-08-15 19:40:42 +02:00
sid = Base64.urlsafe_encode(Random::Secure.random_bytes(32))
2019-02-10 19:33:29 +01:00
user, sid = create_user(sid, email, password)
2018-08-04 22:30:44 +02:00
user_array = user.to_a
2018-08-01 07:01:01 +02:00
2019-02-12 03:47:26 +01:00
user_array[4] = user_array[4].to_json
2018-08-04 22:30:44 +02:00
args = arg_array(user_array)
2018-08-04 22:30:44 +02:00
PG_DB.exec("INSERT INTO users VALUES (#{args})", user_array)
2019-06-08 02:56:41 +02:00
PG_DB.exec("INSERT INTO session_ids VALUES ($1, $2, $3)", sid, email, Time.utc)
2019-04-11 02:56:38 +02:00
view_name = "subscriptions_#{sha256(user.email)}"
PG_DB.exec("CREATE MATERIALIZED VIEW #{view_name} AS \
SELECT * FROM channel_videos WHERE
ucid IN (SELECT unnest(subscriptions) FROM users WHERE email = E'#{user.email.gsub("'", "\\'")}')
ORDER BY published DESC")
2019-03-01 23:06:45 +01:00
if Kemal.config.ssl || config.https_only
2018-08-04 22:30:44 +02:00
secure = true
else
secure = false
end
2018-08-04 22:30:44 +02:00
2019-03-01 23:06:45 +01:00
if config.domain
2019-06-08 02:56:41 +02:00
env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", domain: "#{config.domain}", value: sid, expires: Time.utc + 2.years,
2018-11-15 23:41:43 +01:00
secure: secure, http_only: true)
else
2019-06-08 02:56:41 +02:00
env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", value: sid, expires: Time.utc + 2.years,
2018-11-15 23:41:43 +01:00
secure: secure, http_only: true)
end
if env.request.cookies["PREFS"]?
preferences = env.get("preferences").as(Preferences)
PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", preferences.to_json, user.email)
2019-03-17 18:40:24 +01:00
cookie = env.request.cookies["PREFS"]
2019-06-08 03:23:37 +02:00
cookie.expires = Time.utc(1990, 1, 1)
2019-03-17 18:40:24 +01:00
env.response.cookies << cookie
end
end
2019-03-19 22:13:23 +01:00
env.redirect referer
else
2018-08-04 22:30:44 +02:00
env.redirect referer
end
end
post "/signout" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-11-09 00:42:25 +01:00
user = env.get? "user"
sid = env.get? "sid"
2018-08-09 03:26:02 +02:00
referer = get_referer(env)
2018-07-14 15:36:31 +02:00
2018-11-09 00:42:25 +01:00
if user
user = user.as(User)
sid = sid.as(String)
2019-04-18 23:23:50 +02:00
token = env.params.body["csrf_token"]?
2018-11-09 00:42:25 +01:00
begin
2019-04-18 23:23:50 +02:00
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
2018-11-09 00:42:25 +01:00
rescue ex
error_message = ex.message
2019-04-18 23:23:50 +02:00
env.response.status_code = 400
2018-11-09 00:42:25 +01:00
next templated "error"
2018-11-09 15:48:02 +01:00
end
2018-07-14 15:36:31 +02:00
2019-02-10 19:33:29 +01:00
PG_DB.exec("DELETE FROM session_ids * WHERE id = $1", sid)
2018-11-09 00:42:25 +01:00
env.request.cookies.each do |cookie|
2019-06-08 03:23:37 +02:00
cookie.expires = Time.utc(1990, 1, 1)
2019-03-17 18:40:24 +01:00
env.response.cookies << cookie
2018-11-09 15:48:02 +01:00
end
2018-11-09 00:42:25 +01:00
end
env.redirect referer
2018-08-04 22:30:44 +02:00
end
2018-07-14 15:36:31 +02:00
2018-08-04 22:30:44 +02:00
get "/preferences" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-08-09 03:26:02 +02:00
referer = get_referer(env)
preferences = env.get("preferences").as(Preferences)
templated "preferences"
2018-08-04 22:30:44 +02:00
end
2018-08-04 22:30:44 +02:00
post "/preferences" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-08-09 03:26:02 +02:00
referer = get_referer(env)
video_loop = env.params.body["video_loop"]?.try &.as(String)
video_loop ||= "off"
video_loop = video_loop == "on"
2019-05-01 06:39:04 +02:00
annotations = env.params.body["annotations"]?.try &.as(String)
annotations ||= "off"
annotations = annotations == "on"
annotations_subscribed = env.params.body["annotations_subscribed"]?.try &.as(String)
annotations_subscribed ||= "off"
annotations_subscribed = annotations_subscribed == "on"
autoplay = env.params.body["autoplay"]?.try &.as(String)
autoplay ||= "off"
autoplay = autoplay == "on"
continue = env.params.body["continue"]?.try &.as(String)
continue ||= "off"
continue = continue == "on"
2019-04-19 16:38:27 +02:00
continue_autoplay = env.params.body["continue_autoplay"]?.try &.as(String)
continue_autoplay ||= "off"
continue_autoplay = continue_autoplay == "on"
listen = env.params.body["listen"]?.try &.as(String)
listen ||= "off"
listen = listen == "on"
2019-03-13 03:05:49 +01:00
local = env.params.body["local"]?.try &.as(String)
local ||= "off"
local = local == "on"
2019-06-08 23:04:55 +02:00
speed = env.params.body["speed"]?.try &.as(String).to_f32?
speed ||= CONFIG.default_user_preferences.speed
quality = env.params.body["quality"]?.try &.as(String)
quality ||= CONFIG.default_user_preferences.quality
volume = env.params.body["volume"]?.try &.as(String).to_i?
volume ||= CONFIG.default_user_preferences.volume
2019-03-01 23:06:45 +01:00
comments = [] of String
2.times do |i|
comments << (env.params.body["comments[#{i}]"]?.try &.as(String) || CONFIG.default_user_preferences.comments[i])
2019-03-01 23:06:45 +01:00
end
2019-03-01 23:06:45 +01:00
captions = [] of String
3.times do |i|
captions << (env.params.body["captions[#{i}]"]?.try &.as(String) || CONFIG.default_user_preferences.captions[i])
2019-03-01 23:06:45 +01:00
end
related_videos = env.params.body["related_videos"]?.try &.as(String)
related_videos ||= "off"
related_videos = related_videos == "on"
redirect_feed = env.params.body["redirect_feed"]?.try &.as(String)
redirect_feed ||= "off"
redirect_feed = redirect_feed == "on"
locale = env.params.body["locale"]?.try &.as(String)
locale ||= CONFIG.default_user_preferences.locale
dark_mode = env.params.body["dark_mode"]?.try &.as(String)
dark_mode ||= "off"
dark_mode = dark_mode == "on"
thin_mode = env.params.body["thin_mode"]?.try &.as(String)
thin_mode ||= "off"
thin_mode = thin_mode == "on"
max_results = env.params.body["max_results"]?.try &.as(String).to_i?
max_results ||= CONFIG.default_user_preferences.max_results
sort = env.params.body["sort"]?.try &.as(String)
sort ||= CONFIG.default_user_preferences.sort
latest_only = env.params.body["latest_only"]?.try &.as(String)
latest_only ||= "off"
latest_only = latest_only == "on"
unseen_only = env.params.body["unseen_only"]?.try &.as(String)
unseen_only ||= "off"
unseen_only = unseen_only == "on"
notifications_only = env.params.body["notifications_only"]?.try &.as(String)
notifications_only ||= "off"
notifications_only = notifications_only == "on"
2019-06-08 23:04:55 +02:00
preferences = Preferences.from_json({
annotations: annotations,
annotations_subscribed: annotations_subscribed,
autoplay: autoplay,
captions: captions,
comments: comments,
continue: continue,
continue_autoplay: continue_autoplay,
dark_mode: dark_mode,
latest_only: latest_only,
listen: listen,
local: local,
locale: locale,
max_results: max_results,
notifications_only: notifications_only,
quality: quality,
redirect_feed: redirect_feed,
related_videos: related_videos,
sort: sort,
speed: speed,
thin_mode: thin_mode,
unseen_only: unseen_only,
video_loop: video_loop,
volume: volume,
}.to_json).to_json
if user = env.get? "user"
2018-08-04 22:30:44 +02:00
user = user.as(User)
PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", preferences, user.email)
2019-03-01 23:06:45 +01:00
if config.admins.includes? user.email
config.default_home = env.params.body["default_home"]?.try &.as(String) || config.default_home
feed_menu = [] of String
4.times do |index|
option = env.params.body["feed_menu[#{index}]"]?.try &.as(String) || ""
if !option.empty?
feed_menu << option
end
end
config.feed_menu = feed_menu
top_enabled = env.params.body["top_enabled"]?.try &.as(String)
top_enabled ||= "off"
config.top_enabled = top_enabled == "on"
captcha_enabled = env.params.body["captcha_enabled"]?.try &.as(String)
captcha_enabled ||= "off"
config.captcha_enabled = captcha_enabled == "on"
login_enabled = env.params.body["login_enabled"]?.try &.as(String)
login_enabled ||= "off"
config.login_enabled = login_enabled == "on"
registration_enabled = env.params.body["registration_enabled"]?.try &.as(String)
registration_enabled ||= "off"
config.registration_enabled = registration_enabled == "on"
2019-03-02 02:25:16 +01:00
statistics_enabled = env.params.body["statistics_enabled"]?.try &.as(String)
statistics_enabled ||= "off"
config.statistics_enabled = statistics_enabled == "on"
2019-03-01 23:06:45 +01:00
File.write("config/config.yml", config.to_yaml)
end
else
2019-03-27 17:12:39 +01:00
if Kemal.config.ssl || config.https_only
secure = true
else
secure = false
end
if config.domain
2019-06-08 02:56:41 +02:00
env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", domain: "#{config.domain}", value: preferences, expires: Time.utc + 2.years,
2019-03-27 17:12:39 +01:00
secure: secure, http_only: true)
else
2019-06-08 02:56:41 +02:00
env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", value: preferences, expires: Time.utc + 2.years,
2019-03-27 17:12:39 +01:00
secure: secure, http_only: true)
end
2018-07-31 20:40:26 +02:00
end
2018-08-04 22:30:44 +02:00
env.redirect referer
end
2018-07-31 20:40:26 +02:00
get "/toggle_theme" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
referer = get_referer(env, unroll: false)
redirect = env.params.query["redirect"]?
redirect ||= "true"
redirect = redirect == "true"
if user = env.get? "user"
user = user.as(User)
preferences = user.preferences
preferences.dark_mode = !preferences.dark_mode
PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", preferences.to_json, user.email)
else
preferences = env.get("preferences").as(Preferences)
preferences.dark_mode = !preferences.dark_mode
2019-03-27 17:12:39 +01:00
preferences = preferences.to_json
2019-03-27 17:12:39 +01:00
if Kemal.config.ssl || config.https_only
secure = true
else
secure = false
end
if config.domain
2019-06-08 02:56:41 +02:00
env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", domain: "#{config.domain}", value: preferences, expires: Time.utc + 2.years,
2019-03-27 17:12:39 +01:00
secure: secure, http_only: true)
else
2019-06-08 02:56:41 +02:00
env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", value: preferences, expires: Time.utc + 2.years,
2019-03-27 17:12:39 +01:00
secure: secure, http_only: true)
end
end
if redirect
env.redirect referer
else
env.response.content_type = "application/json"
"{}"
end
end
post "/watch_ajax" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env, "/feed/subscriptions")
redirect = env.params.query["redirect"]?
redirect ||= "true"
redirect = redirect == "true"
if !user
2019-04-18 23:23:50 +02:00
if redirect
next env.redirect referer
else
error_message = {"error" => "No such user"}.to_json
env.response.status_code = 403
next error_message
end
end
2018-12-20 22:32:09 +01:00
user = user.as(User)
sid = sid.as(String)
2019-04-18 23:23:50 +02:00
token = env.params.body["csrf_token"]?
id = env.params.query["id"]?
if !id
2019-03-23 16:24:30 +01:00
env.response.status_code = 400
next
end
begin
2019-04-18 23:23:50 +02:00
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
rescue ex
if redirect
error_message = ex.message
2019-04-18 23:23:50 +02:00
env.response.status_code = 400
next templated "error"
else
error_message = {"error" => ex.message}.to_json
2019-04-18 23:23:50 +02:00
env.response.status_code = 400
next error_message
end
end
if env.params.query["action_mark_watched"]?
action = "action_mark_watched"
elsif env.params.query["action_mark_unwatched"]?
action = "action_mark_unwatched"
else
next env.redirect referer
end
case action
when "action_mark_watched"
if !user.watched.includes? id
PG_DB.exec("UPDATE users SET watched = watched || $1 WHERE email = $2", [id], user.email)
end
when "action_mark_unwatched"
2018-11-22 00:12:13 +01:00
PG_DB.exec("UPDATE users SET watched = array_remove(watched, $1) WHERE email = $2", id, user.email)
end
if redirect
env.redirect referer
else
env.response.content_type = "application/json"
"{}"
end
end
2018-08-05 06:07:38 +02:00
# /modify_notifications
# will "ding" all subscriptions.
2018-08-04 22:30:44 +02:00
# /modify_notifications?receive_all_updates=false&receive_no_updates=false
# will "unding" all subscriptions.
get "/modify_notifications" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env, "/")
2018-07-14 15:36:31 +02:00
redirect = env.params.query["redirect"]?
redirect ||= "false"
redirect = redirect == "true"
2018-07-30 04:05:40 +02:00
2019-04-18 23:23:50 +02:00
if !user
if redirect
next env.redirect referer
else
error_message = {"error" => "No such user"}.to_json
env.response.status_code = 403
next error_message
end
end
user = user.as(User)
if !user.password
2018-08-04 22:30:44 +02:00
channel_req = {} of String => String
2018-02-27 01:59:02 +01:00
2018-08-04 22:30:44 +02:00
channel_req["receive_all_updates"] = env.params.query["receive_all_updates"]? || "true"
channel_req["receive_no_updates"] = env.params.query["receive_no_updates"]? || ""
channel_req["receive_post_updates"] = env.params.query["receive_post_updates"]? || "true"
2018-01-07 18:42:24 +01:00
2018-08-04 22:30:44 +02:00
channel_req.reject! { |k, v| v != "true" && v != "false" }
2018-08-04 22:30:44 +02:00
headers = HTTP::Headers.new
headers["Cookie"] = env.request.headers["Cookie"]
2017-12-30 22:21:43 +01:00
2018-08-04 22:30:44 +02:00
client = make_client(YT_URL)
html = client.get("/subscription_manager?disable_polymer=1", headers)
cookies = HTTP::Cookies.from_headers(headers)
html.cookies.each do |cookie|
if {"VISITOR_INFO1_LIVE", "YSC", "SIDCC"}.includes? cookie.name
if cookies[cookie.name]?
cookies[cookie.name] = cookie
else
cookies << cookie
end
end
end
headers = cookies.add_request_headers(headers)
match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/)
2018-08-04 22:30:44 +02:00
if match
session_token = match["session_token"]
else
next env.redirect referer
end
2018-07-18 21:26:02 +02:00
headers["content-type"] = "application/x-www-form-urlencoded"
2018-08-04 22:30:44 +02:00
channel_req["session_token"] = session_token
2018-04-08 04:36:09 +02:00
subs = XML.parse_html(html.body)
2018-08-04 22:30:44 +02:00
subs.xpath_nodes(%q(//a[@class="subscription-title yt-uix-sessionlink"]/@href)).each do |channel|
channel_id = channel.content.lstrip("/channel/").not_nil!
channel_req["channel_id"] = channel_id
client.post("/subscription_ajax?action_update_subscription_preferences=1", headers, form: channel_req)
2018-08-04 22:30:44 +02:00
end
2018-07-18 21:26:02 +02:00
end
if redirect
env.redirect referer
else
env.response.content_type = "application/json"
"{}"
end
end
post "/subscription_ajax" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env, "/")
redirect = env.params.query["redirect"]?
redirect ||= "true"
redirect = redirect == "true"
if !user
2019-04-18 23:23:50 +02:00
if redirect
next env.redirect referer
else
error_message = {"error" => "No such user"}.to_json
env.response.status_code = 403
next error_message
end
end
user = user.as(User)
sid = sid.as(String)
2019-04-18 23:23:50 +02:00
token = env.params.body["csrf_token"]?
begin
2019-04-18 23:23:50 +02:00
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
rescue ex
if redirect
error_message = ex.message
2019-04-18 23:23:50 +02:00
env.response.status_code = 400
next templated "error"
else
error_message = {"error" => ex.message}.to_json
2019-04-18 23:23:50 +02:00
env.response.status_code = 400
next error_message
end
end
2019-06-08 02:56:41 +02:00
if env.params.query["action_create_subscription_to_channel"]?.try &.to_i?.try &.== 1
action = "action_create_subscription_to_channel"
2019-06-08 02:56:41 +02:00
elsif env.params.query["action_remove_subscriptions"]?.try &.to_i?.try &.== 1
action = "action_remove_subscriptions"
else
next env.redirect referer
end
channel_id = env.params.query["c"]?
channel_id ||= ""
if !user.password
# Sync subscriptions with YouTube
2019-05-15 19:26:29 +02:00
subscribe_ajax(channel_id, action, env.request.headers)
end
2019-05-15 19:26:29 +02:00
email = user.email
case action
2019-06-08 02:56:41 +02:00
when "action_create_subscription_to_channel"
if !user.subscriptions.includes? channel_id
get_channel(channel_id, PG_DB, false, false)
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions, $1) WHERE email = $2", channel_id, email)
end
2019-06-08 02:56:41 +02:00
when "action_remove_subscriptions"
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_remove(subscriptions, $1) WHERE email = $2", channel_id, email)
end
if redirect
env.redirect referer
else
env.response.content_type = "application/json"
"{}"
end
2018-08-04 22:30:44 +02:00
end
2018-04-29 16:40:33 +02:00
2018-08-04 22:30:44 +02:00
get "/subscription_manager" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
user = env.get? "user"
2019-02-10 19:33:29 +01:00
sid = env.get? "sid"
2019-04-18 23:23:50 +02:00
referer = get_referer(env)
2018-08-09 03:26:02 +02:00
2019-04-18 23:23:50 +02:00
if !user
2018-08-09 03:26:02 +02:00
next env.redirect referer
2018-04-28 16:27:05 +02:00
end
2018-08-04 22:30:44 +02:00
user = user.as(User)
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
if !user.password
# Refresh account
headers = HTTP::Headers.new
headers["Cookie"] = env.request.headers["Cookie"]
2018-04-08 04:36:09 +02:00
2019-02-10 19:33:29 +01:00
user, sid = get_user(sid, headers, PG_DB)
2018-08-04 22:30:44 +02:00
end
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
action_takeout = env.params.query["action_takeout"]?.try &.to_i?
action_takeout ||= 0
action_takeout = action_takeout == 1
2018-07-18 21:26:02 +02:00
2018-08-04 22:30:44 +02:00
format = env.params.query["format"]?
format ||= "rss"
2018-07-18 21:26:02 +02:00
2019-04-22 17:40:29 +02:00
if user.subscriptions.empty?
values = "'{}'"
else
values = "VALUES #{user.subscriptions.map { |id| %(('#{id}')) }.join(",")}"
end
subscriptions = PG_DB.query_all("SELECT * FROM channels WHERE id = ANY(#{values})", as: InvidiousChannel)
2018-08-04 22:30:44 +02:00
subscriptions.sort_by! { |channel| channel.author.downcase }
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
if action_takeout
2019-03-05 19:56:59 +01:00
host_url = make_host_url(config, Kemal.config)
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
if format == "json"
env.response.content_type = "application/json"
env.response.headers["content-disposition"] = "attachment"
next {
"subscriptions" => user.subscriptions,
"watch_history" => user.watched,
"preferences" => user.preferences,
}.to_json
else
env.response.content_type = "application/xml"
env.response.headers["content-disposition"] = "attachment"
export = XML.build do |xml|
xml.element("opml", version: "1.1") do
xml.element("body") do
if format == "newpipe"
title = "YouTube Subscriptions"
else
title = "Invidious Subscriptions"
end
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
xml.element("outline", text: title, title: title) do
subscriptions.each do |channel|
if format == "newpipe"
xmlUrl = "https://www.youtube.com/feeds/videos.xml?channel_id=#{channel.id}"
else
2018-08-05 06:07:38 +02:00
xmlUrl = "#{host_url}/feed/channel/#{channel.id}"
2018-08-04 22:30:44 +02:00
end
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
xml.element("outline", text: channel.author, title: channel.author,
"type": "rss", xmlUrl: xmlUrl)
end
end
end
2018-07-18 21:26:02 +02:00
end
2018-03-16 17:40:29 +01:00
end
2018-08-04 22:30:44 +02:00
next export.gsub(%(<?xml version="1.0"?>\n), "")
end
end
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
templated "subscription_manager"
end
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
get "/data_control" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
user = env.get? "user"
2018-08-09 03:26:02 +02:00
referer = get_referer(env)
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
if user
user = user.as(User)
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
templated "data_control"
else
env.redirect referer
end
end
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
post "/data_control" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
user = env.get? "user"
2018-08-09 03:26:02 +02:00
referer = get_referer(env)
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
if user
user = user.as(User)
2018-04-29 16:40:33 +02:00
spawn do
# Since import can take a while, if we're not done after 20 seconds
# push out content to prevent timeout.
# Interesting to note is that Chrome will try to render before the content has finished loading,
# which is why we include a loading icon. Firefox and its derivatives will not see this page,
# instead redirecting immediately once the connection has closed.
# https://stackoverflow.com/q/2091239 is helpful but not directly applicable here.
sleep 20.seconds
env.response.puts %(<meta http-equiv="refresh" content="0; url=#{referer}">)
env.response.puts %(<link rel="stylesheet" href="/css/ionicons.min.css?v=#{ASSET_COMMIT}">)
env.response.puts %(<link rel="stylesheet" href="/css/default.css?v=#{ASSET_COMMIT}">)
if env.get("preferences").as(Preferences).dark_mode
env.response.puts %(<link rel="stylesheet" href="/css/darktheme.css?v=#{ASSET_COMMIT}">)
else
env.response.puts %(<link rel="stylesheet" href="/css/lighttheme.css?v=#{ASSET_COMMIT}">)
end
env.response.puts %(<h3><div class="loading"><i class="icon ion-ios-refresh"></i></div></h3>)
env.response.flush
loop do
2019-06-08 02:56:41 +02:00
env.response.puts %(<!-- keepalive #{Time.utc.to_unix} -->)
env.response.flush
sleep (20 + rand(11)).seconds
end
end
2018-08-04 22:30:44 +02:00
HTTP::FormData.parse(env.request) do |part|
body = part.body.gets_to_end
if body.empty?
next
2018-07-18 21:26:02 +02:00
end
2018-08-04 22:30:44 +02:00
case part.name
when "import_invidious"
body = JSON.parse(body)
2018-11-10 00:25:24 +01:00
if body["subscriptions"]?
user.subscriptions += body["subscriptions"].as_a.map { |a| a.as_s }
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
2018-11-10 00:25:24 +01:00
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
2018-08-04 22:30:44 +02:00
end
2018-11-08 23:43:28 +01:00
if body["watch_history"]?
2018-11-10 00:25:24 +01:00
user.watched += body["watch_history"].as_a.map { |a| a.as_s }
user.watched.uniq!
PG_DB.exec("UPDATE users SET watched = $1 WHERE email = $2", user.watched, user.email)
end
2018-04-29 16:40:33 +02:00
2018-11-08 23:35:26 +01:00
if body["preferences"]?
user.preferences = Preferences.from_json(body["preferences"].to_json, user.preferences)
2018-11-10 00:25:24 +01:00
PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", user.preferences.to_json, user.email)
2018-11-08 23:35:26 +01:00
end
2018-08-04 22:30:44 +02:00
when "import_youtube"
subscriptions = XML.parse(body)
2018-11-10 00:25:24 +01:00
user.subscriptions += subscriptions.xpath_nodes(%q(//outline[@type="rss"])).map do |channel|
channel["xmlUrl"].match(/UC[a-zA-Z0-9_-]{22}/).not_nil![0]
end
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
2018-11-10 00:25:24 +01:00
when "import_freetube"
user.subscriptions += body.scan(/"channelId":"(?<channel_id>[a-zA-Z0-9_-]{24})"/).map do |md|
md["channel_id"]
end
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
2018-11-10 00:25:24 +01:00
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
2018-08-04 22:30:44 +02:00
when "import_newpipe_subscriptions"
body = JSON.parse(body)
user.subscriptions += body["subscriptions"].as_a.compact_map do |channel|
if match = channel["url"].as_s.match(/\/channel\/(?<channel>UC[a-zA-Z0-9_-]{22})/)
next match["channel"]
elsif match = channel["url"].as_s.match(/\/user\/(?<user>.+)/)
client = make_client(YT_URL)
response = client.get("/user/#{match["user"]}?disable_polymer=1&hl=en&gl=US")
document = XML.parse_html(response.body)
canonical = document.xpath_node(%q(//link[@rel="canonical"]))
if canonical
ucid = canonical["href"].split("/")[-1]
next ucid
end
end
nil
2018-11-10 00:25:24 +01:00
end
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
2018-11-10 00:25:24 +01:00
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
2018-08-04 22:30:44 +02:00
when "import_newpipe"
2018-11-10 00:25:24 +01:00
Zip::Reader.open(IO::Memory.new(body)) do |file|
2018-08-04 22:30:44 +02:00
file.each_entry do |entry|
if entry.filename == "newpipe.db"
2018-11-22 00:12:13 +01:00
tempfile = File.tempfile(".db")
File.write(tempfile.path, entry.io.gets_to_end)
db = DB.open("sqlite3://" + tempfile.path)
2018-04-29 16:40:33 +02:00
2018-11-22 00:12:13 +01:00
user.watched += db.query_all("SELECT url FROM streams", as: String).map { |url| url.lchop("https://www.youtube.com/watch?v=") }
2018-11-10 00:25:24 +01:00
user.watched.uniq!
2018-07-18 21:26:02 +02:00
2018-11-10 00:25:24 +01:00
PG_DB.exec("UPDATE users SET watched = $1 WHERE email = $2", user.watched, user.email)
2018-11-22 00:12:13 +01:00
user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String).map { |url| url.lchop("https://www.youtube.com/channel/") }
2018-11-10 00:25:24 +01:00
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
2018-11-10 00:25:24 +01:00
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
2018-11-22 00:12:13 +01:00
db.close
tempfile.delete
2018-08-04 22:30:44 +02:00
end
2018-07-18 21:26:02 +02:00
end
end
2018-07-18 21:26:02 +02:00
end
2018-08-04 22:30:44 +02:00
end
end
2018-07-18 21:26:02 +02:00
2018-08-04 22:30:44 +02:00
env.redirect referer
end
2018-07-18 21:26:02 +02:00
2019-04-22 17:18:17 +02:00
get "/change_password" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
if user
user = user.as(User)
sid = sid.as(String)
csrf_token = generate_response(sid, {":change_password"}, HMAC_KEY, PG_DB)
templated "change_password"
else
env.redirect referer
end
end
post "/change_password" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
if user
user = user.as(User)
sid = sid.as(String)
token = env.params.body["csrf_token"]?
# We don't store passwords for Google accounts
if !user.password
error_message = "Cannot change password for Google accounts"
2019-06-17 21:06:02 +02:00
env.response.status_code = 400
2019-04-22 17:18:17 +02:00
next templated "error"
end
begin
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
rescue ex
error_message = ex.message
env.response.status_code = 400
next templated "error"
end
password = env.params.body["password"]?
if !password
error_message = translate(locale, "Password is a required field")
2019-06-17 21:06:02 +02:00
env.response.status_code = 401
2019-04-22 17:18:17 +02:00
next templated "error"
end
new_passwords = env.params.body.select { |k, v| k.match(/^new_password\[\d+\]$/) }.map { |k, v| v }
if new_passwords.size <= 1 || new_passwords.uniq.size != 1
error_message = translate(locale, "New passwords must match")
2019-06-17 21:06:02 +02:00
env.response.status_code = 400
2019-04-22 17:18:17 +02:00
next templated "error"
end
new_password = new_passwords.uniq[0]
if new_password.empty?
error_message = translate(locale, "Password cannot be empty")
2019-06-17 21:06:02 +02:00
env.response.status_code = 401
2019-04-22 17:18:17 +02:00
next templated "error"
end
2019-06-17 21:06:02 +02:00
if new_password.bytesize > 55
error_message = translate(locale, "Password should not be longer than 55 characters")
env.response.status_code = 400
2019-04-22 17:18:17 +02:00
next templated "error"
end
2019-06-17 21:06:02 +02:00
if !Crypto::Bcrypt::Password.new(user.password.not_nil!).verify(password.byte_slice(0, 55))
2019-04-22 17:18:17 +02:00
error_message = translate(locale, "Incorrect password")
2019-06-17 21:06:02 +02:00
env.response.status_code = 401
2019-04-22 17:18:17 +02:00
next templated "error"
end
new_password = Crypto::Bcrypt::Password.create(new_password, cost: 10)
PG_DB.exec("UPDATE users SET password = $1 WHERE email = $2", new_password.to_s, user.email)
end
env.redirect referer
end
get "/delete_account" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
if user
user = user.as(User)
sid = sid.as(String)
2019-04-18 23:23:50 +02:00
csrf_token = generate_response(sid, {":delete_account"}, HMAC_KEY, PG_DB)
templated "delete_account"
else
env.redirect referer
end
end
post "/delete_account" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
if user
user = user.as(User)
sid = sid.as(String)
2019-04-18 23:23:50 +02:00
token = env.params.body["csrf_token"]?
begin
2019-04-18 23:23:50 +02:00
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
rescue ex
error_message = ex.message
2019-04-18 23:23:50 +02:00
env.response.status_code = 400
next templated "error"
end
2019-04-11 02:56:38 +02:00
view_name = "subscriptions_#{sha256(user.email)}"
PG_DB.exec("DELETE FROM users * WHERE email = $1", user.email)
PG_DB.exec("DELETE FROM session_ids * WHERE email = $1", user.email)
2019-03-20 17:01:54 +01:00
PG_DB.exec("DROP MATERIALIZED VIEW #{view_name}")
env.request.cookies.each do |cookie|
2019-06-08 03:23:37 +02:00
cookie.expires = Time.utc(1990, 1, 1)
2019-03-17 18:40:24 +01:00
env.response.cookies << cookie
end
end
env.redirect referer
end
2018-08-04 22:30:44 +02:00
get "/clear_watch_history" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
2018-08-09 03:26:02 +02:00
if user
user = user.as(User)
sid = sid.as(String)
2019-04-18 23:23:50 +02:00
csrf_token = generate_response(sid, {":clear_watch_history"}, HMAC_KEY, PG_DB)
templated "clear_watch_history"
else
env.redirect referer
end
end
post "/clear_watch_history" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
user = env.get? "user"
sid = env.get? "sid"
2018-08-09 03:26:02 +02:00
referer = get_referer(env)
2018-03-16 17:40:29 +01:00
2018-08-04 22:30:44 +02:00
if user
user = user.as(User)
sid = sid.as(String)
2019-04-18 23:23:50 +02:00
token = env.params.body["csrf_token"]?
begin
2019-04-18 23:23:50 +02:00
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
rescue ex
error_message = ex.message
2019-04-18 23:23:50 +02:00
env.response.status_code = 400
next templated "error"
end
2018-08-26 04:49:18 +02:00
PG_DB.exec("UPDATE users SET watched = '{}' WHERE email = $1", user.email)
2018-08-04 22:30:44 +02:00
end
env.redirect referer
end
2019-05-15 19:26:29 +02:00
get "/authorize_token" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
if user
user = user.as(User)
sid = sid.as(String)
csrf_token = generate_response(sid, {":authorize_token"}, HMAC_KEY, PG_DB)
scopes = env.params.query["scopes"]?.try &.split(",")
scopes ||= [] of String
callback_url = env.params.query["callback_url"]?
if callback_url
callback_url = URI.parse(callback_url)
end
expire = env.params.query["expire"]?.try &.to_i?
templated "authorize_token"
else
env.redirect referer
end
end
2019-04-18 23:23:50 +02:00
post "/authorize_token" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
if user
user = env.get("user").as(User)
sid = sid.as(String)
token = env.params.body["csrf_token"]?
begin
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
rescue ex
error_message = ex.message
env.response.status_code = 400
next templated "error"
end
scopes = env.params.body.select { |k, v| k.match(/^scopes\[\d+\]$/) }.map { |k, v| v }
callback_url = env.params.body["callbackUrl"]?
expire = env.params.body["expire"]?.try &.to_i?
access_token = generate_token(user.email, scopes, expire, HMAC_KEY, PG_DB)
if callback_url
access_token = URI.escape(access_token)
url = URI.parse(callback_url)
if url.query
query = HTTP::Params.parse(url.query.not_nil!)
else
query = HTTP::Params.new
end
query["token"] = access_token
url.query = query.to_s
env.redirect url.to_s
else
csrf_token = ""
env.set "access_token", access_token
templated "authorize_token"
end
end
end
get "/token_manager" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env, "/subscription_manager")
if !user
next env.redirect referer
end
user = user.as(User)
tokens = PG_DB.query_all("SELECT id, issued FROM session_ids WHERE email = $1 ORDER BY issued DESC", user.email, as: {session: String, issued: Time})
templated "token_manager"
end
post "/token_ajax" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
redirect = env.params.query["redirect"]?
redirect ||= "true"
redirect = redirect == "true"
if !user
if redirect
next env.redirect referer
else
error_message = {"error" => "No such user"}.to_json
env.response.status_code = 403
next error_message
end
end
user = user.as(User)
sid = sid.as(String)
token = env.params.body["csrf_token"]?
begin
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
rescue ex
if redirect
error_message = ex.message
2019-06-17 21:06:02 +02:00
env.response.status_code = 400
2019-04-18 23:23:50 +02:00
next templated "error"
else
error_message = {"error" => ex.message}.to_json
env.response.status_code = 400
next error_message
end
end
if env.params.query["action_revoke_token"]?
action = "action_revoke_token"
else
next env.redirect referer
end
session = env.params.query["session"]?
session ||= ""
case action
when .starts_with? "action_revoke_token"
PG_DB.exec("DELETE FROM session_ids * WHERE id = $1 AND email = $2", session, user.email)
end
if redirect
env.redirect referer
else
env.response.content_type = "application/json"
"{}"
end
end
2018-08-04 22:30:44 +02:00
# Feeds
2018-11-26 17:50:34 +01:00
get "/feed/top" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2019-03-01 23:06:45 +01:00
if config.top_enabled
templated "top"
else
env.redirect "/"
end
2018-11-26 17:50:34 +01:00
end
get "/feed/popular" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-11-26 17:50:34 +01:00
templated "popular"
end
2018-11-20 18:18:12 +01:00
get "/feed/trending" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-11-20 18:18:12 +01:00
trending_type = env.params.query["type"]?
2018-12-20 23:48:45 +01:00
trending_type ||= "Default"
2018-11-20 18:18:12 +01:00
region = env.params.query["region"]?
2018-12-20 23:48:45 +01:00
region ||= "US"
2018-11-20 18:18:12 +01:00
begin
2019-06-29 04:17:56 +02:00
trending, plid = fetch_trending(trending_type, region, locale)
2018-11-20 18:18:12 +01:00
rescue ex
error_message = "#{ex.message}"
2019-06-17 21:06:02 +02:00
env.response.status_code = 500
2018-11-20 18:18:12 +01:00
next templated "error"
end
templated "trending"
end
2018-08-04 22:30:44 +02:00
get "/feed/subscriptions" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
user = env.get? "user"
2019-02-10 19:33:29 +01:00
sid = env.get? "sid"
2018-08-09 03:26:02 +02:00
referer = get_referer(env)
2018-08-04 22:30:44 +02:00
2019-06-07 19:39:12 +02:00
if !user
next env.redirect referer
end
user = user.as(User)
sid = sid.as(String)
token = user.token
2019-06-07 19:39:12 +02:00
if user.preferences.unseen_only
env.set "show_watched", true
end
2018-08-04 22:30:44 +02:00
# Refresh account
headers = HTTP::Headers.new
headers["Cookie"] = env.request.headers["Cookie"]
2018-08-04 22:30:44 +02:00
if !user.password
user, sid = get_user(sid, headers, PG_DB)
end
2018-03-26 05:21:24 +02:00
2019-06-08 23:04:55 +02:00
max_results = env.params.query["max_results"]?.try &.to_i?.try &.clamp(0, MAX_ITEMS_PER_PAGE)
max_results ||= user.preferences.max_results
max_results ||= CONFIG.default_user_preferences.max_results
2018-08-04 22:30:44 +02:00
page = env.params.query["page"]?.try &.to_i?
page ||= 1
2018-08-04 22:30:44 +02:00
2019-06-07 19:39:12 +02:00
videos, notifications = get_subscription_feed(PG_DB, user, max_results, page)
# "updated" here is used for delivering new notifications, so if
# we know a user has looked at their feed e.g. in the past 10 minutes,
# they've already seen a video posted 20 minutes ago, and don't need
# to be notified.
2019-06-08 02:56:41 +02:00
PG_DB.exec("UPDATE users SET notifications = $1, updated = $2 WHERE email = $3", [] of String, Time.utc,
user.email)
user.notifications = [] of String
env.set "user", user
2018-08-04 22:30:44 +02:00
templated "subscriptions"
end
2018-03-16 17:40:29 +01:00
get "/feed/history" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
user = env.get? "user"
referer = get_referer(env)
page = env.params.query["page"]?.try &.to_i?
page ||= 1
2019-06-07 19:39:12 +02:00
if !user
next env.redirect referer
end
user = user.as(User)
2018-11-10 03:37:46 +01:00
2019-06-08 23:04:55 +02:00
max_results = env.params.query["max_results"]?.try &.to_i?.try &.clamp(0, MAX_ITEMS_PER_PAGE)
max_results ||= user.preferences.max_results
max_results ||= CONFIG.default_user_preferences.max_results
if user.watched[(page - 1) * max_results]?
watched = user.watched.reverse[(page - 1) * max_results, max_results]
end
2019-06-08 23:04:55 +02:00
watched ||= [] of String
2018-11-10 00:25:24 +01:00
templated "history"
end
2018-08-04 22:30:44 +02:00
get "/feed/channel/:ucid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2019-06-08 04:39:32 +02:00
env.response.content_type = "application/atom+xml"
2019-03-05 19:56:59 +01:00
2018-08-04 22:30:44 +02:00
ucid = env.params.url["ucid"]
2018-04-08 04:36:09 +02:00
2018-09-21 16:40:04 +02:00
begin
channel = get_about_info(ucid, locale)
2018-09-21 16:40:04 +02:00
rescue ex
error_message = ex.message
2019-03-23 16:24:30 +01:00
env.response.status_code = 500
next error_message
2018-09-05 04:04:40 +02:00
end
2019-02-18 23:06:00 +01:00
client = make_client(YT_URL)
rss = client.get("/feeds/videos.xml?channel_id=#{channel.ucid}").body
2019-02-18 23:06:00 +01:00
rss = XML.parse_html(rss)
videos = [] of SearchVideo
rss.xpath_nodes("//feed/entry").each do |entry|
video_id = entry.xpath_node("videoid").not_nil!.content
title = entry.xpath_node("title").not_nil!.content
2019-03-08 04:13:54 +01:00
published = Time.parse_rfc3339(entry.xpath_node("published").not_nil!.content)
updated = Time.parse_rfc3339(entry.xpath_node("updated").not_nil!.content)
2019-02-18 23:06:00 +01:00
author = entry.xpath_node("author/name").not_nil!.content
ucid = entry.xpath_node("channelid").not_nil!.content
2019-06-08 22:08:27 +02:00
description_html = entry.xpath_node("group/description").not_nil!.to_s
2019-02-18 23:06:00 +01:00
views = entry.xpath_node("group/community/statistics").not_nil!.["views"].to_i64
videos << SearchVideo.new(
title: title,
id: video_id,
author: author,
ucid: ucid,
published: published,
views: views,
2019-06-08 22:08:27 +02:00
description_html: description_html,
2019-02-18 23:06:00 +01:00
length_seconds: 0,
live_now: false,
paid: false,
premium: false,
premiere_timestamp: nil
2019-02-18 23:06:00 +01:00
)
end
2018-07-16 18:24:24 +02:00
2019-03-05 19:56:59 +01:00
host_url = make_host_url(config, Kemal.config)
2018-07-16 18:24:24 +02:00
2019-06-07 19:39:12 +02:00
XML.build(indent: " ", encoding: "UTF-8") do |xml|
2018-08-04 22:30:44 +02:00
xml.element("feed", "xmlns:yt": "http://www.youtube.com/xml/schemas/2015",
2018-12-23 19:07:04 +01:00
"xmlns:media": "http://search.yahoo.com/mrss/", xmlns: "http://www.w3.org/2005/Atom",
"xml:lang": "en-US") do
2019-06-07 19:39:12 +02:00
xml.element("link", rel: "self", href: "#{host_url}#{env.request.resource}")
xml.element("id") { xml.text "yt:channel:#{channel.ucid}" }
xml.element("yt:channelId") { xml.text channel.ucid }
xml.element("title") { xml.text channel.author }
xml.element("link", rel: "alternate", href: "#{host_url}/channel/#{channel.ucid}")
2018-07-28 16:49:58 +02:00
2018-08-04 22:30:44 +02:00
xml.element("author") do
xml.element("name") { xml.text channel.author }
xml.element("uri") { xml.text "#{host_url}/channel/#{channel.ucid}" }
2018-08-04 22:30:44 +02:00
end
2018-09-05 04:04:40 +02:00
videos.each do |video|
video.to_xml(host_url, channel.auto_generated, xml)
end
end
end
end
2018-07-16 18:24:24 +02:00
2018-08-04 22:30:44 +02:00
get "/feed/private" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2019-06-08 04:39:32 +02:00
env.response.content_type = "application/atom+xml"
2019-03-05 19:56:59 +01:00
2018-08-04 22:30:44 +02:00
token = env.params.query["token"]?
2018-07-16 18:24:24 +02:00
2018-08-04 22:30:44 +02:00
if !token
2019-03-23 16:24:30 +01:00
env.response.status_code = 403
next
2018-08-04 22:30:44 +02:00
end
2018-03-25 05:38:35 +02:00
2018-08-04 22:30:44 +02:00
user = PG_DB.query_one?("SELECT * FROM users WHERE token = $1", token.strip, as: User)
if !user
2019-03-23 16:24:30 +01:00
env.response.status_code = 403
next
2018-08-04 22:30:44 +02:00
end
2019-06-08 23:04:55 +02:00
max_results = env.params.query["max_results"]?.try &.to_i?.try &.clamp(0, MAX_ITEMS_PER_PAGE)
max_results ||= user.preferences.max_results
max_results ||= CONFIG.default_user_preferences.max_results
2018-08-04 22:30:44 +02:00
page = env.params.query["page"]?.try &.to_i?
page ||= 1
2018-03-25 05:38:35 +02:00
2019-06-07 19:39:12 +02:00
videos, notifications = get_subscription_feed(PG_DB, user, max_results, page)
2019-03-05 19:56:59 +01:00
host_url = make_host_url(config, Kemal.config)
2018-07-31 17:44:07 +02:00
2019-06-07 19:39:12 +02:00
XML.build(indent: " ", encoding: "UTF-8") do |xml|
2018-12-23 19:07:04 +01:00
xml.element("feed", "xmlns:yt": "http://www.youtube.com/xml/schemas/2015",
"xmlns:media": "http://search.yahoo.com/mrss/", xmlns: "http://www.w3.org/2005/Atom",
2018-08-04 22:30:44 +02:00
"xml:lang": "en-US") do
2018-08-05 06:07:38 +02:00
xml.element("link", "type": "text/html", rel: "alternate", href: "#{host_url}/feed/subscriptions")
2019-06-07 19:39:12 +02:00
xml.element("link", "type": "application/atom+xml", rel: "self",
href: "#{host_url}#{env.request.resource}")
2018-12-20 22:32:09 +01:00
xml.element("title") { xml.text translate(locale, "Invidious Private Feed for `x`", user.email) }
2019-06-08 04:27:37 +02:00
(notifications + videos).each do |video|
2019-06-07 19:39:12 +02:00
video.to_xml(locale, host_url, xml)
2018-08-04 22:30:44 +02:00
end
end
2018-08-04 22:30:44 +02:00
end
end
2018-03-25 05:38:35 +02:00
2018-09-18 01:13:24 +02:00
get "/feed/playlist/:plid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2019-06-08 04:39:32 +02:00
env.response.content_type = "application/atom+xml"
2019-03-05 19:56:59 +01:00
2018-09-18 01:13:24 +02:00
plid = env.params.url["plid"]
2019-03-05 19:56:59 +01:00
host_url = make_host_url(config, Kemal.config)
2018-09-18 01:13:24 +02:00
path = env.request.path
client = make_client(YT_URL)
response = client.get("/feeds/videos.xml?playlist_id=#{plid}")
document = XML.parse(response.body)
document.xpath_nodes(%q(//*[@href]|//*[@url])).each do |node|
node.attributes.each do |attribute|
case attribute.name
when "url"
node["url"] = "#{host_url}#{URI.parse(node["url"]).full_path}"
when "href"
node["href"] = "#{host_url}#{URI.parse(node["href"]).full_path}"
end
end
end
document = document.to_xml(options: XML::SaveOptions::NO_DECL)
document.scan(/<uri>(?<url>[^<]+)<\/uri>/).each do |match|
content = "#{host_url}#{URI.parse(match["url"]).full_path}"
document = document.gsub(match[0], "<uri>#{content}</uri>")
end
document
end
2019-03-29 21:50:18 +01:00
get "/feeds/videos.xml" do |env|
if ucid = env.params.query["channel_id"]?
env.redirect "/feed/channel/#{ucid}"
elsif user = env.params.query["user"]?
env.redirect "/feed/channel/#{user}"
elsif plid = env.params.query["playlist_id"]?
env.redirect "/feed/playlist/#{plid}"
end
end
# Support push notifications via PubSubHubbub
2019-03-04 03:40:24 +01:00
get "/feed/webhook/:token" do |env|
verify_token = env.params.url["token"]
mode = env.params.query["hub.mode"]
topic = env.params.query["hub.topic"]
challenge = env.params.query["hub.challenge"]
2019-06-08 02:56:41 +02:00
case verify_token
when .starts_with? "v1"
2019-03-04 14:53:31 +01:00
_, time, nonce, signature = verify_token.split(":")
data = "#{time}:#{nonce}"
2019-06-08 02:56:41 +02:00
when .starts_with? "v2"
2019-03-04 14:53:31 +01:00
time, signature = verify_token.split(":")
data = "#{time}"
2019-06-08 02:56:41 +02:00
else
env.response.status_code = 400
next
2019-03-04 14:53:31 +01:00
end
2019-04-04 14:49:53 +02:00
# The hub will sometimes check if we're still subscribed after delivery errors,
# so we reply with a 200 as long as the request hasn't expired
2019-06-08 02:56:41 +02:00
if Time.utc.to_unix - time.to_i > 432000
2019-03-23 16:24:30 +01:00
env.response.status_code = 400
next
end
2019-03-04 14:53:31 +01:00
if OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, data) != signature
2019-03-23 16:24:30 +01:00
env.response.status_code = 400
next
end
2019-06-08 02:56:41 +02:00
if ucid = HTTP::Params.parse(URI.parse(topic).query.not_nil!)["channel_id"]?
PG_DB.exec("UPDATE channels SET subscribed = $1 WHERE id = $2", Time.utc, ucid)
elsif plid = HTTP::Params.parse(URI.parse(topic).query.not_nil!)["playlist_id"]?
PG_DB.exec("UPDATE playlists SET subscribed = $1 WHERE id = $2", Time.utc, ucid)
else
env.response.status_code = 400
next
end
2019-03-23 16:24:30 +01:00
env.response.status_code = 200
2019-06-08 02:56:41 +02:00
challenge
end
2019-03-04 03:40:24 +01:00
post "/feed/webhook/:token" do |env|
2019-04-11 00:58:42 +02:00
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
token = env.params.url["token"]
body = env.request.body.not_nil!.gets_to_end
signature = env.request.headers["X-Hub-Signature"].lchop("sha1=")
if signature != OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, body)
2019-06-08 03:07:55 +02:00
logger.puts("#{token} : Invalid signature")
2019-03-23 16:24:30 +01:00
env.response.status_code = 200
next
end
spawn do
rss = XML.parse_html(body)
rss.xpath_nodes("//feed/entry").each do |entry|
id = entry.xpath_node("videoid").not_nil!.content
2019-04-04 14:49:53 +02:00
author = entry.xpath_node("author/name").not_nil!.content
2019-03-08 04:49:52 +01:00
published = Time.parse_rfc3339(entry.xpath_node("published").not_nil!.content)
2019-03-08 04:13:54 +01:00
updated = Time.parse_rfc3339(entry.xpath_node("updated").not_nil!.content)
2019-06-29 04:17:56 +02:00
video = get_video(id, PG_DB, force_refresh: true)
2019-04-11 00:58:42 +02:00
# Deliver notifications to `/api/v1/auth/notifications`
payload = {
"topic" => video.ucid,
"videoId" => video.id,
"published" => published.to_unix,
2019-04-11 00:58:42 +02:00
}.to_json
PG_DB.exec("NOTIFY notifications, E'#{payload}'")
video = ChannelVideo.new(
id: id,
title: video.title,
published: published,
updated: updated,
ucid: video.ucid,
author: author,
length_seconds: video.length_seconds,
live_now: video.live_now,
premiere_timestamp: video.premiere_timestamp,
2019-05-30 22:09:39 +02:00
views: video.views,
2019-04-11 00:58:42 +02:00
)
emails = PG_DB.query_all("UPDATE users SET notifications = notifications || $1 \
2019-05-26 18:28:54 +02:00
WHERE updated < $2 AND $3 = ANY(subscriptions) AND $1 <> ALL(notifications) RETURNING email",
video.id, video.published, video.ucid, as: String)
video_array = video.to_a
args = arg_array(video_array)
2019-05-31 17:29:17 +02:00
PG_DB.exec("INSERT INTO channel_videos VALUES (#{args}) \
2019-05-26 18:28:54 +02:00
ON CONFLICT (id) DO UPDATE SET title = $2, published = $3, \
updated = $4, ucid = $5, author = $6, length_seconds = $7, \
2019-05-30 22:09:39 +02:00
live_now = $8, premiere_timestamp = $9, views = $10", video_array)
2019-05-26 18:28:54 +02:00
# Update all users affected by insert
if emails.empty?
values = "'{}'"
else
values = "VALUES #{emails.map { |id| %(('#{id}')) }.join(",")}"
2019-05-26 18:28:54 +02:00
end
2019-06-01 18:19:01 +02:00
PG_DB.exec("UPDATE users SET feed_needs_update = true WHERE email = ANY(#{values})")
end
end
2019-03-04 02:50:23 +01:00
2019-03-23 16:24:30 +01:00
env.response.status_code = 200
next
end
2018-08-04 22:30:44 +02:00
# Channels
2019-04-28 18:47:16 +02:00
{"/channel/:ucid/live", "/user/:user/live", "/c/:user/live"}.each do |route|
get route do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
client = make_client(YT_URL)
# Appears to be a bug in routing, having several routes configured
# as `/a/:a`, `/b/:a`, `/c/:a` results in 404
value = env.request.resource.split("/")[2]
body = ""
{"channel", "user", "c"}.each do |type|
response = client.get("/#{type}/#{value}/live?disable_polymer=1")
if response.status_code == 200
body = response.body
end
end
video_id = body.match(/'VIDEO_ID': "(?<id>[a-zA-Z0-9_-]{11})"/).try &.["id"]?
if video_id
params = [] of String
env.params.query.each do |k, v|
params << "#{k}=#{v}"
end
params = params.join("&")
url = "/watch?v=#{video_id}"
if !params.empty?
url += "&#{params}"
end
env.redirect url
else
env.redirect "/channel/#{value}"
end
end
end
2018-09-04 16:13:58 +02:00
# YouTube appears to let users set a "brand" URL that
# is different from their username, so we convert that here
get "/c/:user" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-09-04 16:13:58 +02:00
client = make_client(YT_URL)
user = env.params.url["user"]
response = client.get("/c/#{user}")
document = XML.parse_html(response.body)
anchor = document.xpath_node(%q(//a[contains(@class,"branded-page-header-title-link")]))
if !anchor
next env.redirect "/"
end
env.redirect anchor["href"]
end
# Legacy endpoint for /user/:username
get "/profile" do |env|
user = env.params.query["user"]?
if !user
env.redirect "/"
else
env.redirect "/user/#{user}"
end
end
2019-06-08 18:13:00 +02:00
get "/attribution_link" do |env|
if query = env.params.query["u"]?
url = URI.parse(query).full_path
else
url = "/"
end
env.redirect url
end
2019-05-26 20:49:35 +02:00
# Page used by YouTube to provide captioning widget, since we
# don't support it we redirect to '/'
get "/timedtext_video" do |env|
env.redirect "/"
end
2018-08-04 22:30:44 +02:00
get "/user/:user" do |env|
user = env.params.url["user"]
env.redirect "/channel/#{user}"
2018-03-25 05:38:35 +02:00
end
2018-09-06 06:12:11 +02:00
get "/user/:user/videos" do |env|
user = env.params.url["user"]
env.redirect "/channel/#{user}/videos"
end
2018-08-04 22:30:44 +02:00
get "/channel/:ucid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
user = env.get? "user"
if user
user = user.as(User)
subscriptions = user.subscriptions
end
subscriptions ||= [] of String
2018-07-28 15:24:53 +02:00
ucid = env.params.url["ucid"]
2018-08-04 22:30:44 +02:00
page = env.params.query["page"]?.try &.to_i?
page ||= 1
continuation = env.params.query["continuation"]?
2018-11-14 02:04:25 +01:00
sort_by = env.params.query["sort_by"]?.try &.downcase
2018-09-21 16:40:04 +02:00
begin
channel = get_about_info(ucid, locale)
2018-09-21 16:40:04 +02:00
rescue ex
error_message = ex.message
2019-06-17 21:06:02 +02:00
env.response.status_code = 500
2018-09-21 16:40:04 +02:00
next templated "error"
2018-08-04 22:30:44 +02:00
end
if channel.auto_generated
2019-03-03 17:54:23 +01:00
sort_options = {"last", "oldest", "newest"}
sort_by ||= "last"
items, continuation = fetch_channel_playlists(channel.ucid, channel.author, channel.auto_generated, continuation, sort_by)
2019-03-18 00:31:11 +01:00
items.uniq! do |item|
if item.responds_to?(:title)
item.title
elsif item.responds_to?(:author)
item.author
end
end
items.select! { |item| item.responds_to?(:thumbnail_id) && item.thumbnail_id }
items = items.map { |item| item.as(SearchPlaylist) }
items.each { |item| item.author = "" }
else
2019-03-03 17:54:23 +01:00
sort_options = {"newest", "oldest", "popular"}
sort_by ||= "newest"
items, count = get_60_videos(channel.ucid, channel.author, page, channel.auto_generated, sort_by)
items.select! { |item| !item.paid }
env.set "search", "channel:#{channel.ucid} "
end
2018-08-04 22:30:44 +02:00
templated "channel"
end
get "/channel/:ucid/videos" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-08-04 22:30:44 +02:00
ucid = env.params.url["ucid"]
params = env.request.query
if !params || params.empty?
params = ""
else
params = "?#{params}"
end
2018-08-04 22:30:44 +02:00
env.redirect "/channel/#{ucid}#{params}"
end
2018-07-28 15:24:53 +02:00
2019-03-03 17:54:23 +01:00
get "/channel/:ucid/playlists" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2019-03-03 17:54:23 +01:00
user = env.get? "user"
if user
user = user.as(User)
subscriptions = user.subscriptions
end
subscriptions ||= [] of String
ucid = env.params.url["ucid"]
continuation = env.params.query["continuation"]?
sort_by = env.params.query["sort_by"]?.try &.downcase
sort_by ||= "last"
begin
channel = get_about_info(ucid, locale)
2019-03-03 17:54:23 +01:00
rescue ex
error_message = ex.message
2019-06-17 21:06:02 +02:00
env.response.status_code = 500
2019-03-03 17:54:23 +01:00
next templated "error"
end
if channel.auto_generated
next env.redirect "/channel/#{channel.ucid}"
2019-03-03 17:54:23 +01:00
end
items, continuation = fetch_channel_playlists(channel.ucid, channel.author, channel.auto_generated, continuation, sort_by)
2019-03-03 17:54:23 +01:00
items.select! { |item| item.is_a?(SearchPlaylist) && !item.videos.empty? }
items = items.map { |item| item.as(SearchPlaylist) }
items.each { |item| item.author = "" }
templated "playlists"
end
2018-08-04 22:30:44 +02:00
# API Endpoints
2018-07-28 15:24:53 +02:00
2019-03-02 02:25:16 +01:00
get "/api/v1/stats" do |env|
env.response.content_type = "application/json"
if !config.statistics_enabled
error_message = {"error" => "Statistics are not enabled."}.to_json
2019-03-23 16:24:30 +01:00
env.response.status_code = 400
next error_message
2019-03-02 02:25:16 +01:00
end
2019-03-04 21:43:17 +01:00
if statistics["error"]?
2019-03-23 16:24:30 +01:00
env.response.status_code = 500
next statistics.to_json
2019-03-04 21:43:17 +01:00
end
statistics.to_json
end
2019-03-02 02:25:16 +01:00
2019-05-21 03:22:01 +02:00
# YouTube provides "storyboards", which are sprites containing x * y
2019-05-02 21:20:19 +02:00
# preview thumbnails for individual scenes in a video.
# See https://support.jwplayer.com/articles/how-to-add-preview-thumbnails
get "/api/v1/storyboards/:id" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
env.response.content_type = "application/json"
id = env.params.url["id"]
region = env.params.query["region"]?
client = make_client(YT_URL)
begin
2019-06-29 04:17:56 +02:00
video = get_video(id, PG_DB, region: region)
2019-05-02 21:20:19 +02:00
rescue ex : VideoRedirect
next env.redirect "/api/v1/storyboards/#{ex.message}"
rescue ex
env.response.status_code = 500
next
end
storyboards = video.storyboards
width = env.params.query["width"]?
height = env.params.query["height"]?
if !width && !height
response = JSON.build do |json|
json.object do
json.field "storyboards" do
generate_storyboards(json, id, storyboards, config, Kemal.config)
end
end
end
next response
end
env.response.content_type = "text/vtt"
storyboard = storyboards.select { |storyboard| width == "#{storyboard[:width]}" || height == "#{storyboard[:height]}" }
if storyboard.empty?
env.response.status_code = 404
next
else
storyboard = storyboard[0]
end
webvtt = <<-END_VTT
WEBVTT
END_VTT
start_time = 0.milliseconds
end_time = storyboard[:interval].milliseconds
storyboard[:storyboard_count].times do |i|
host_url = make_host_url(config, Kemal.config)
url = storyboard[:url].gsub("$M", i).gsub("https://i9.ytimg.com", host_url)
storyboard[:storyboard_height].times do |j|
storyboard[:storyboard_width].times do |k|
webvtt += <<-END_CUE
#{start_time}.000 --> #{end_time}.000
#{url}#xywh=#{storyboard[:width] * k},#{storyboard[:height] * j},#{storyboard[:width]},#{storyboard[:height]}
END_CUE
start_time += storyboard[:interval].milliseconds
end_time += storyboard[:interval].milliseconds
end
end
end
webvtt
end
2018-08-04 22:30:44 +02:00
get "/api/v1/captions/:id" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-09-27 01:44:37 +02:00
env.response.content_type = "application/json"
2018-08-04 22:30:44 +02:00
id = env.params.url["id"]
region = env.params.query["region"]?
2018-07-30 04:01:28 +02:00
2019-05-21 03:22:01 +02:00
# See https://github.com/ytdl-org/youtube-dl/blob/6ab30ff50bf6bd0585927cb73c7421bef184f87a/youtube_dl/extractor/youtube.py#L1354
# It is possible to use `/api/timedtext?type=list&v=#{id}` and
# `/api/timedtext?type=track&v=#{id}&lang=#{lang_code}` directly,
# but this does not provide links for auto-generated captions.
#
# In future this should be investigated as an alternative, since it does not require
# getting video info.
2018-08-04 22:30:44 +02:00
client = make_client(YT_URL)
begin
2019-06-29 04:17:56 +02:00
video = get_video(id, PG_DB, region: region)
rescue ex : VideoRedirect
next env.redirect "/api/v1/captions/#{ex.message}"
2018-08-04 22:30:44 +02:00
rescue ex
2019-03-23 16:24:30 +01:00
env.response.status_code = 500
next
2018-07-28 15:24:53 +02:00
end
2018-08-05 06:07:38 +02:00
captions = video.captions
2018-07-28 15:24:53 +02:00
2018-08-04 22:30:44 +02:00
label = env.params.query["label"]?
lang = env.params.query["lang"]?
tlang = env.params.query["tlang"]?
if !label && !lang
2018-08-04 22:30:44 +02:00
response = JSON.build do |json|
json.object do
json.field "captions" do
json.array do
2018-08-05 06:07:38 +02:00
captions.each do |caption|
2018-08-04 22:30:44 +02:00
json.object do
2018-08-07 01:25:25 +02:00
json.field "label", caption.name.simpleText
json.field "languageCode", caption.languageCode
2018-09-19 21:08:59 +02:00
json.field "url", "/api/v1/captions/#{id}?label=#{URI.escape(caption.name.simpleText)}"
2018-08-04 22:30:44 +02:00
end
end
end
end
2018-07-28 15:24:53 +02:00
end
2018-08-04 22:30:44 +02:00
end
2018-07-28 15:24:53 +02:00
next response
end
2018-07-28 15:24:53 +02:00
2019-05-21 03:22:01 +02:00
env.response.content_type = "text/vtt; charset=UTF-8"
2018-08-07 01:25:25 +02:00
caption = captions.select { |caption| caption.name.simpleText == label }
2018-07-28 15:24:53 +02:00
if lang
caption = captions.select { |caption| caption.languageCode == lang }
end
2018-08-05 06:07:38 +02:00
if caption.empty?
2019-03-23 16:24:30 +01:00
env.response.status_code = 404
next
2018-08-04 22:30:44 +02:00
else
2018-08-05 06:07:38 +02:00
caption = caption[0]
2018-08-04 22:30:44 +02:00
end
2018-07-28 15:24:53 +02:00
url = caption.baseUrl + "&tlang=#{tlang}"
2018-07-28 15:24:53 +02:00
# Auto-generated captions often have cues that aren't aligned properly with the video,
# as well as some other markup that makes it cumbersome, so we try to fix that here
if caption.name.simpleText.includes? "auto-generated"
caption_xml = client.get(url).body
caption_xml = XML.parse(caption_xml)
2018-07-28 15:24:53 +02:00
webvtt = <<-END_VTT
WEBVTT
Kind: captions
Language: #{tlang || caption.languageCode}
2018-07-28 15:24:53 +02:00
END_VTT
caption_nodes = caption_xml.xpath_nodes("//transcript/text")
caption_nodes.each_with_index do |node, i|
start_time = node["start"].to_f.seconds
duration = node["dur"]?.try &.to_f.seconds
duration ||= start_time
if caption_nodes.size > i + 1
end_time = caption_nodes[i + 1]["start"].to_f.seconds
else
end_time = start_time + duration
end
2018-07-28 15:24:53 +02:00
start_time = "#{start_time.hours.to_s.rjust(2, '0')}:#{start_time.minutes.to_s.rjust(2, '0')}:#{start_time.seconds.to_s.rjust(2, '0')}.#{start_time.milliseconds.to_s.rjust(3, '0')}"
end_time = "#{end_time.hours.to_s.rjust(2, '0')}:#{end_time.minutes.to_s.rjust(2, '0')}:#{end_time.seconds.to_s.rjust(2, '0')}.#{end_time.milliseconds.to_s.rjust(3, '0')}"
2018-08-04 22:30:44 +02:00
text = HTML.unescape(node.content)
text = text.gsub(/<font color="#[a-fA-F0-9]{6}">/, "")
text = text.gsub(/<\/font>/, "")
if md = text.match(/(?<name>.*) : (?<text>.*)/)
text = "<v #{md["name"]}>#{md["text"]}</v>"
end
2018-08-04 22:30:44 +02:00
webvtt += <<-END_CUE
2018-08-04 22:30:44 +02:00
#{start_time} --> #{end_time}
#{text}
END_CUE
end
else
url += "&format=vtt"
webvtt = client.get(url).body
2018-07-28 15:24:53 +02:00
end
2019-04-11 19:08:43 +02:00
if title = env.params.query["title"]?
# https://blog.fastmail.com/2011/06/24/download-non-english-filenames/
env.response.headers["Content-Disposition"] = "attachment; filename=\"#{URI.escape(title)}\"; filename*=UTF-8''#{URI.escape(title)}"
end
2018-08-04 22:30:44 +02:00
webvtt
2018-07-28 15:24:53 +02:00
end
2018-08-04 22:30:44 +02:00
get "/api/v1/comments/:id" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
region = env.params.query["region"]?
2018-12-20 22:32:09 +01:00
2018-09-27 01:44:37 +02:00
env.response.content_type = "application/json"
2018-08-04 22:30:44 +02:00
id = env.params.url["id"]
2018-07-20 18:19:49 +02:00
2018-08-04 22:30:44 +02:00
source = env.params.query["source"]?
source ||= "youtube"
2018-07-20 18:19:49 +02:00
2019-03-27 17:31:05 +01:00
thin_mode = env.params.query["thin_mode"]?
thin_mode = thin_mode == "true"
2018-08-04 22:30:44 +02:00
format = env.params.query["format"]?
format ||= "json"
2018-07-20 18:19:49 +02:00
continuation = env.params.query["continuation"]?
sort_by = env.params.query["sort_by"]?.try &.downcase
2018-08-04 22:30:44 +02:00
if source == "youtube"
sort_by ||= "top"
begin
2019-06-29 04:17:56 +02:00
comments = fetch_youtube_comments(id, PG_DB, continuation, format, locale, thin_mode, region, sort_by: sort_by)
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 16:24:30 +01:00
env.response.status_code = 500
next error_message
2018-07-20 18:19:49 +02:00
end
next comments
2018-08-04 22:30:44 +02:00
elsif source == "reddit"
sort_by ||= "confidence"
2018-08-04 22:30:44 +02:00
begin
comments, reddit_thread = fetch_reddit_comments(id, sort_by: sort_by)
2018-12-20 22:32:09 +01:00
content_html = template_reddit_comments(comments, locale)
2018-08-04 22:30:44 +02:00
content_html = fill_links(content_html, "https", "www.reddit.com")
2018-09-04 05:15:47 +02:00
content_html = replace_links(content_html)
2018-08-04 22:30:44 +02:00
rescue ex
2018-09-06 17:19:28 +02:00
comments = nil
2018-08-04 22:30:44 +02:00
reddit_thread = nil
content_html = ""
end
2018-07-16 18:24:24 +02:00
2018-09-06 17:19:28 +02:00
if !reddit_thread || !comments
2019-03-23 16:24:30 +01:00
env.response.status_code = 404
next
2018-08-04 22:30:44 +02:00
end
2018-09-06 17:19:28 +02:00
if format == "json"
reddit_thread = JSON.parse(reddit_thread.to_json).as_h
reddit_thread["comments"] = JSON.parse(comments.to_json)
2019-01-25 17:50:18 +01:00
next reddit_thread.to_json
2018-09-06 17:19:28 +02:00
else
2019-01-25 17:50:18 +01:00
response = {
2018-09-06 17:19:28 +02:00
"title" => reddit_thread.title,
2018-09-07 01:18:36 +02:00
"permalink" => reddit_thread.permalink,
"contentHtml" => content_html,
2019-01-25 17:50:18 +01:00
}
next response.to_json
2018-09-07 01:18:36 +02:00
end
2018-08-04 22:30:44 +02:00
end
end
2018-09-18 03:08:26 +02:00
get "/api/v1/insights/:id" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-09-18 03:08:26 +02:00
id = env.params.url["id"]
env.response.content_type = "application/json"
error_message = {"error" => "YouTube has removed publicly-available analytics."}.to_json
2019-03-23 16:24:30 +01:00
env.response.status_code = 410
next error_message
2018-09-18 03:08:26 +02:00
client = make_client(YT_URL)
headers = HTTP::Headers.new
2019-07-03 01:53:19 +02:00
response = client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1")
2018-09-18 03:08:26 +02:00
2019-07-03 01:53:19 +02:00
headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
2018-09-18 03:08:26 +02:00
headers["content-type"] = "application/x-www-form-urlencoded"
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}"
headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}"
headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719"
2019-07-03 01:53:19 +02:00
session_token = response.body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).try &.["session_token"]? || ""
2018-09-18 03:08:26 +02:00
post_req = {
2019-06-08 02:56:41 +02:00
session_token: session_token,
2018-09-18 03:08:26 +02:00
}
2019-06-08 02:56:41 +02:00
response = client.post("/insight_ajax?action_get_statistics_and_data=1&v=#{id}", headers, form: post_req).body
2018-09-18 03:08:26 +02:00
response = XML.parse(response)
html_content = XML.parse_html(response.xpath_node(%q(//html_content)).not_nil!.content)
graph_data = response.xpath_node(%q(//graph_data))
if !graph_data
error = html_content.xpath_node(%q(//p)).not_nil!.content
next {"error" => error}.to_json
end
graph_data = JSON.parse(graph_data.content)
view_count = 0_i64
time_watched = 0_i64
subscriptions_driven = 0
shares = 0
stats_nodes = html_content.xpath_nodes(%q(//table/tr/td))
stats_nodes.each do |node|
key = node.xpath_node(%q(.//span))
value = node.xpath_node(%q(.//div))
if !key || !value
next
end
key = key.content
value = value.content
case key
when "Views"
view_count = value.delete(", ").to_i64
when "Time watched"
time_watched = value
when "Subscriptions driven"
subscriptions_driven = value.delete(", ").to_i
when "Shares"
shares = value.delete(", ").to_i
end
end
avg_view_duration_seconds = html_content.xpath_node(%q(//div[@id="stats-chart-tab-watch-time"]/span/span[2])).not_nil!.content
avg_view_duration_seconds = decode_length_seconds(avg_view_duration_seconds)
2019-06-08 02:56:41 +02:00
{
2018-09-18 03:08:26 +02:00
"viewCount" => view_count,
"timeWatchedText" => time_watched,
"subscriptionsDriven" => subscriptions_driven,
"shares" => shares,
"avgViewDurationSeconds" => avg_view_duration_seconds,
"graphData" => graph_data,
2019-06-08 02:56:41 +02:00
}.to_json
end
2018-09-18 03:08:26 +02:00
2019-04-01 05:07:06 +02:00
get "/api/v1/annotations/:id" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
env.response.content_type = "text/xml"
id = env.params.url["id"]
source = env.params.query["source"]?
source ||= "archive"
if !id.match(/[a-zA-Z0-9_-]{11}/)
env.response.status_code = 400
next
end
annotations = ""
case source
when "archive"
if CONFIG.cache_annotations && (cached_annotation = PG_DB.query_one?("SELECT * FROM annotations WHERE id = $1", id, as: Annotation))
annotations = cached_annotation.annotations
else
index = CHARS_SAFE.index(id[0]).not_nil!.to_s.rjust(2, '0')
2019-04-01 05:07:06 +02:00
# IA doesn't handle leading hyphens,
# so we use https://archive.org/details/youtubeannotations_64
if index == "62"
index = "64"
id = id.sub(/^-/, 'A')
end
2019-04-01 05:07:06 +02:00
file = URI.escape("#{id[0, 3]}/#{id}.xml")
2019-04-01 05:07:06 +02:00
client = make_client(ARCHIVE_URL)
location = client.get("/download/youtubeannotations_#{index}/#{id[0, 2]}.tar/#{file}")
2019-04-01 05:07:06 +02:00
if !location.headers["Location"]?
env.response.status_code = location.status_code
end
2019-04-01 05:07:06 +02:00
response = make_client(URI.parse(location.headers["Location"])).get(location.headers["Location"])
2019-04-01 05:07:06 +02:00
if response.body.empty?
env.response.status_code = 404
next
end
2019-04-13 15:28:59 +02:00
if response.status_code != 200
env.response.status_code = response.status_code
next
end
2019-04-01 05:07:06 +02:00
annotations = response.body
cache_annotation(PG_DB, id, annotations)
end
2019-04-01 05:07:06 +02:00
when "youtube"
client = make_client(YT_URL)
response = client.get("/annotations_invideo?video_id=#{id}")
if response.status_code != 200
env.response.status_code = response.status_code
next
end
annotations = response.body
end
annotations
end
2018-08-04 22:30:44 +02:00
get "/api/v1/videos/:id" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-09-29 06:12:35 +02:00
env.response.content_type = "application/json"
2018-08-04 22:30:44 +02:00
id = env.params.url["id"]
region = env.params.query["region"]?
2018-08-04 22:30:44 +02:00
begin
2019-06-29 04:17:56 +02:00
video = get_video(id, PG_DB, region: region)
rescue ex : VideoRedirect
next env.redirect "/api/v1/videos/#{ex.message}"
2018-08-04 22:30:44 +02:00
rescue ex
2018-09-21 16:40:04 +02:00
error_message = {"error" => ex.message}.to_json
2019-03-23 16:24:30 +01:00
env.response.status_code = 500
next error_message
2018-08-04 22:30:44 +02:00
end
2019-04-11 00:58:42 +02:00
video.to_json(locale, config, Kemal.config, decrypt_function)
end
2018-08-04 22:30:44 +02:00
get "/api/v1/trending" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2019-01-25 17:50:18 +01:00
env.response.content_type = "application/json"
2018-11-20 18:18:12 +01:00
region = env.params.query["region"]?
trending_type = env.params.query["type"]?
begin
2019-06-29 04:17:56 +02:00
trending, plid = fetch_trending(trending_type, region, locale)
2018-11-20 18:18:12 +01:00
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 16:24:30 +01:00
env.response.status_code = 500
next error_message
2018-11-20 18:18:12 +01:00
end
2018-08-04 22:30:44 +02:00
videos = JSON.build do |json|
json.array do
2018-11-20 18:18:12 +01:00
trending.each do |video|
2019-06-08 20:31:41 +02:00
video.to_json(locale, config, Kemal.config, json)
2018-08-04 22:30:44 +02:00
end
end
end
videos
end
2018-08-04 22:30:44 +02:00
2018-11-26 01:13:11 +01:00
get "/api/v1/popular" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2019-01-25 17:50:18 +01:00
env.response.content_type = "application/json"
JSON.build do |json|
2018-11-26 01:13:11 +01:00
json.array do
popular_videos.each do |video|
video.to_json(locale, config, Kemal.config, json)
2018-11-26 01:13:11 +01:00
end
end
end
end
2018-11-26 01:13:11 +01:00
2018-08-04 22:30:44 +02:00
get "/api/v1/top" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2019-01-25 17:50:18 +01:00
env.response.content_type = "application/json"
2019-03-01 23:06:45 +01:00
if !config.top_enabled
error_message = {"error" => "Administrator has disabled this endpoint."}.to_json
2019-03-23 16:24:30 +01:00
env.response.status_code = 400
next error_message
2019-03-01 23:06:45 +01:00
end
2019-06-08 20:31:41 +02:00
JSON.build do |json|
2018-08-04 22:30:44 +02:00
json.array do
top_videos.each do |video|
2019-06-08 20:31:41 +02:00
# Top videos have much more information than provided below (adaptiveFormats, etc)
# but can be very out of date, so we only provide a subset here
2018-08-04 22:30:44 +02:00
json.object do
json.field "title", video.title
json.field "videoId", video.id
json.field "videoThumbnails" do
2019-03-08 21:42:37 +01:00
generate_thumbnails(json, video.id, config, Kemal.config)
end
2018-08-04 22:30:44 +02:00
json.field "lengthSeconds", video.info["length_seconds"].to_i
json.field "viewCount", video.views
json.field "author", video.author
json.field "authorId", video.ucid
2018-08-04 22:30:44 +02:00
json.field "authorUrl", "/channel/#{video.ucid}"
2018-11-04 16:37:12 +01:00
json.field "published", video.published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(video.published, locale))
2018-08-04 22:30:44 +02:00
2019-06-08 22:08:27 +02:00
json.field "description", html_to_content(video.description_html)
json.field "descriptionHtml", video.description_html
end
end
end
end
end
2018-08-04 22:30:44 +02:00
get "/api/v1/channels/:ucid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-09-21 16:40:04 +02:00
env.response.content_type = "application/json"
2018-09-21 16:40:04 +02:00
ucid = env.params.url["ucid"]
2018-11-14 02:04:25 +01:00
sort_by = env.params.query["sort_by"]?.try &.downcase
sort_by ||= "newest"
2018-09-05 04:04:40 +02:00
2018-09-21 16:40:04 +02:00
begin
channel = get_about_info(ucid, locale)
2018-09-21 16:40:04 +02:00
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 16:24:30 +01:00
env.response.status_code = 500
next error_message
2018-09-05 04:04:40 +02:00
end
page = 1
if channel.auto_generated
videos = [] of SearchVideo
count = 0
else
begin
2019-07-03 01:53:19 +02:00
videos, count = get_60_videos(channel.ucid, channel.author, page, channel.auto_generated, sort_by)
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 16:24:30 +01:00
env.response.status_code = 500
next error_message
end
end
2018-08-29 03:29:08 +02:00
JSON.build do |json|
2019-06-08 20:31:41 +02:00
# TODO: Refactor into `to_json` for InvidiousChannel
2018-08-04 22:30:44 +02:00
json.object do
json.field "author", channel.author
json.field "authorId", channel.ucid
json.field "authorUrl", channel.author_url
2018-08-04 22:30:44 +02:00
json.field "authorBanners" do
json.array do
if channel.banner
qualities = {
{width: 2560, height: 424},
{width: 2120, height: 351},
{width: 1060, height: 175},
}
qualities.each do |quality|
json.object do
2019-07-03 01:53:19 +02:00
json.field "url", channel.banner.not_nil!.gsub("=w1060-", "=w#{quality[:width]}-")
json.field "width", quality[:width]
json.field "height", quality[:height]
end
2018-08-04 22:30:44 +02:00
end
json.object do
2019-07-03 01:53:19 +02:00
json.field "url", channel.banner.not_nil!.split("=w1060-")[0]
json.field "width", 512
json.field "height", 288
end
2018-08-04 22:30:44 +02:00
end
2018-07-18 21:26:02 +02:00
end
end
2018-03-31 16:51:14 +02:00
2018-08-04 22:30:44 +02:00
json.field "authorThumbnails" do
json.array do
qualities = {32, 48, 76, 100, 176, 512}
2018-07-18 21:26:02 +02:00
2018-08-04 22:30:44 +02:00
qualities.each do |quality|
json.object do
json.field "url", channel.author_thumbnail.gsub("=s100-", "=s#{quality}-")
2018-08-04 22:30:44 +02:00
json.field "width", quality
json.field "height", quality
end
end
2018-07-18 21:26:02 +02:00
end
2018-03-31 16:51:14 +02:00
end
json.field "subCount", channel.sub_count
json.field "totalViews", channel.total_views
json.field "joined", channel.joined.to_unix
json.field "paid", channel.paid
2018-03-31 16:51:14 +02:00
json.field "autoGenerated", channel.auto_generated
json.field "isFamilyFriendly", channel.is_family_friendly
json.field "description", html_to_content(channel.description_html)
json.field "descriptionHtml", channel.description_html
2018-09-05 02:27:10 +02:00
json.field "allowedRegions", channel.allowed_regions
2018-07-29 03:40:59 +02:00
2018-08-04 22:30:44 +02:00
json.field "latestVideos" do
json.array do
2018-08-29 03:29:08 +02:00
videos.each do |video|
2019-06-08 20:31:41 +02:00
video.to_json(locale, config, Kemal.config, json)
2018-08-04 22:30:44 +02:00
end
end
end
json.field "relatedChannels" do
json.array do
channel.related_channels.each do |related_channel|
json.object do
json.field "author", related_channel.author
json.field "authorId", related_channel.ucid
json.field "authorUrl", related_channel.author_url
json.field "authorThumbnails" do
json.array do
qualities = {32, 48, 76, 100, 176, 512}
qualities.each do |quality|
json.object do
json.field "url", related_channel.author_thumbnail.gsub("=s48-", "=s#{quality}-")
json.field "width", quality
json.field "height", quality
end
end
end
end
end
end
end
end
2018-08-04 22:30:44 +02:00
end
2018-07-29 03:40:59 +02:00
end
end
2018-07-16 15:18:59 +02:00
2019-04-28 18:47:16 +02:00
{"/api/v1/channels/:ucid/videos", "/api/v1/channels/videos/:ucid"}.each do |route|
get route do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-09-21 16:40:04 +02:00
env.response.content_type = "application/json"
ucid = env.params.url["ucid"]
page = env.params.query["page"]?.try &.to_i?
page ||= 1
2019-02-16 00:28:54 +01:00
sort_by = env.params.query["sort"]?.try &.downcase
sort_by ||= env.params.query["sort_by"]?.try &.downcase
sort_by ||= "newest"
2018-07-16 15:18:59 +02:00
2018-09-21 16:40:04 +02:00
begin
channel = get_about_info(ucid, locale)
2018-09-21 16:40:04 +02:00
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 16:24:30 +01:00
env.response.status_code = 500
next error_message
end
2018-07-16 15:18:59 +02:00
begin
videos, count = get_60_videos(channel.ucid, channel.author, page, channel.auto_generated, sort_by)
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 16:24:30 +01:00
env.response.status_code = 500
next error_message
end
2018-07-30 04:01:28 +02:00
2019-06-08 20:31:41 +02:00
JSON.build do |json|
json.array do
videos.each do |video|
2019-06-08 20:31:41 +02:00
video.to_json(locale, config, Kemal.config, json)
2018-08-04 22:30:44 +02:00
end
end
end
end
end
2018-07-16 15:18:59 +02:00
2019-04-28 18:47:16 +02:00
{"/api/v1/channels/:ucid/latest", "/api/v1/channels/latest/:ucid"}.each do |route|
2019-02-20 00:00:06 +01:00
get route do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2019-02-20 00:00:06 +01:00
env.response.content_type = "application/json"
ucid = env.params.url["ucid"]
begin
videos = get_latest_videos(ucid)
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 16:24:30 +01:00
env.response.status_code = 500
next error_message
2019-02-20 00:00:06 +01:00
end
JSON.build do |json|
2019-02-20 00:00:06 +01:00
json.array do
videos.each do |video|
2019-06-08 20:31:41 +02:00
video.to_json(locale, config, Kemal.config, json)
2019-02-20 00:00:06 +01:00
end
end
end
end
end
2019-02-20 00:00:06 +01:00
2019-04-28 18:47:16 +02:00
{"/api/v1/channels/:ucid/playlists", "/api/v1/channels/playlists/:ucid"}.each do |route|
2019-02-20 00:05:27 +01:00
get route do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2019-02-20 00:05:27 +01:00
env.response.content_type = "application/json"
ucid = env.params.url["ucid"]
continuation = env.params.query["continuation"]?
sort_by = env.params.query["sort"]?.try &.downcase
sort_by ||= env.params.query["sort_by"]?.try &.downcase
sort_by ||= "last"
begin
channel = get_about_info(ucid, locale)
2019-02-20 00:05:27 +01:00
rescue ex
2019-03-23 16:24:30 +01:00
error_message = {"error" => ex.message}.to_json
env.response.status_code = 500
next error_message
2019-02-20 00:05:27 +01:00
end
items, continuation = fetch_channel_playlists(channel.ucid, channel.author, channel.auto_generated, continuation, sort_by)
2019-02-20 00:05:27 +01:00
JSON.build do |json|
2019-02-20 00:05:27 +01:00
json.object do
json.field "playlists" do
json.array do
items.each do |item|
2019-06-08 20:31:41 +02:00
if item.is_a?(SearchPlaylist)
item.to_json(locale, config, Kemal.config, json)
2019-02-20 00:05:27 +01:00
end
end
end
end
json.field "continuation", continuation
end
end
end
end
2019-02-20 00:05:27 +01:00
2019-07-03 01:53:19 +02:00
{"/api/v1/channels/:ucid/comments", "/api/v1/channels/comments/:ucid"}.each do |route|
get route do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
env.response.content_type = "application/json"
ucid = env.params.url["ucid"]
continuation = env.params.query["continuation"]?
# sort_by = env.params.query["sort_by"]?.try &.downcase
begin
fetch_channel_community(ucid, continuation, locale, config, Kemal.config)
rescue ex
env.response.status_code = 400
error_message = {"error" => ex.message}.to_json
next error_message
end
end
end
2018-09-22 17:49:42 +02:00
get "/api/v1/channels/search/:ucid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-09-22 17:49:42 +02:00
env.response.content_type = "application/json"
ucid = env.params.url["ucid"]
query = env.params.query["q"]?
query ||= ""
page = env.params.query["page"]?.try &.to_i?
page ||= 1
count, search_results = channel_search(query, page, ucid)
2019-06-08 20:31:41 +02:00
JSON.build do |json|
2018-09-22 17:49:42 +02:00
json.array do
search_results.each do |item|
2019-06-08 20:31:41 +02:00
item.to_json(locale, config, Kemal.config, json)
2018-09-22 17:49:42 +02:00
end
end
end
end
2018-09-22 17:49:42 +02:00
2018-08-04 22:30:44 +02:00
get "/api/v1/search" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2019-02-07 01:21:40 +01:00
region = env.params.query["region"]?
2018-12-20 22:32:09 +01:00
2018-09-22 17:49:42 +02:00
env.response.content_type = "application/json"
2018-08-05 06:07:38 +02:00
query = env.params.query["q"]?
query ||= ""
2018-08-04 22:30:44 +02:00
page = env.params.query["page"]?.try &.to_i?
page ||= 1
sort_by = env.params.query["sort_by"]?.try &.downcase
sort_by ||= "relevance"
date = env.params.query["date"]?.try &.downcase
date ||= ""
2019-02-26 21:31:37 +01:00
duration = env.params.query["duration"]?.try &.downcase
duration ||= ""
features = env.params.query["features"]?.try &.split(",").map { |feature| feature.downcase }
features ||= [] of String
content_type = env.params.query["type"]?.try &.downcase
content_type ||= "video"
begin
2018-09-17 23:38:18 +02:00
search_params = produce_search_params(sort_by, date, content_type, duration, features)
rescue ex
env.response.status_code = 400
error_message = {"error" => ex.message}.to_json
next error_message
end
2019-06-29 04:17:56 +02:00
count, search_results = search(query, page, search_params, region).as(Tuple)
2019-06-08 20:31:41 +02:00
JSON.build do |json|
2018-08-04 22:30:44 +02:00
json.array do
search_results.each do |item|
2019-06-08 20:31:41 +02:00
item.to_json(locale, config, Kemal.config, json)
2018-08-04 22:30:44 +02:00
end
end
end
end
2019-05-21 14:15:15 +02:00
get "/api/v1/search/suggestions" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
region = env.params.query["region"]?
env.response.content_type = "application/json"
query = env.params.query["q"]?
query ||= ""
begin
client = make_client(URI.parse("https://suggestqueries.google.com"))
response = client.get("/complete/search?hl=en&gl=#{region}&client=youtube&ds=yt&q=#{URI.escape(query)}&callback=suggestCallback").body
body = response[35..-2]
body = JSON.parse(body).as_a
suggestions = body[1].as_a[0..-2]
JSON.build do |json|
json.object do
json.field "query", body[0].as_s
json.field "suggestions" do
json.array do
suggestions.each do |suggestion|
json.string suggestion[0].as_s
end
end
end
end
end
rescue ex
env.response.status_code = 500
error_message = {"error" => ex.message}.to_json
next error_message
end
end
2018-08-15 17:22:36 +02:00
get "/api/v1/playlists/:plid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-09-21 16:40:04 +02:00
env.response.content_type = "application/json"
2018-08-15 17:22:36 +02:00
plid = env.params.url["plid"]
page = env.params.query["page"]?.try &.to_i?
page ||= 1
2018-10-08 04:11:33 +02:00
format = env.params.query["format"]?
format ||= "json"
continuation = env.params.query["continuation"]?
2018-10-07 05:18:50 +02:00
if plid.starts_with? "RD"
next env.redirect "/api/v1/mixes/#{plid}"
end
2018-08-15 17:22:36 +02:00
begin
2018-12-20 22:32:09 +01:00
playlist = fetch_playlist(plid, locale)
2018-08-15 17:22:36 +02:00
rescue ex
2018-09-21 16:40:04 +02:00
error_message = {"error" => "Playlist is empty"}.to_json
2019-06-17 21:06:02 +02:00
env.response.status_code = 410
2019-03-23 16:24:30 +01:00
next error_message
2018-08-15 17:22:36 +02:00
end
begin
2018-12-20 22:32:09 +01:00
videos = fetch_playlist_videos(plid, page, playlist.video_count, continuation, locale)
rescue ex
videos = [] of PlaylistVideo
end
2018-08-15 17:22:36 +02:00
response = JSON.build do |json|
json.object do
json.field "title", playlist.title
2018-09-22 18:34:29 +02:00
json.field "playlistId", playlist.id
2018-08-15 17:22:36 +02:00
json.field "author", playlist.author
json.field "authorId", playlist.ucid
json.field "authorUrl", "/channel/#{playlist.ucid}"
json.field "authorThumbnails" do
json.array do
qualities = {32, 48, 76, 100, 176, 512}
qualities.each do |quality|
json.object do
json.field "url", playlist.author_thumbnail.gsub("=s100-", "=s#{quality}-")
json.field "width", quality
json.field "height", quality
end
end
end
end
2019-06-08 22:08:27 +02:00
json.field "description", html_to_content(playlist.description_html)
2018-09-05 02:27:10 +02:00
json.field "descriptionHtml", playlist.description_html
2018-08-15 17:22:36 +02:00
json.field "videoCount", playlist.video_count
json.field "viewCount", playlist.views
2018-11-04 16:37:12 +01:00
json.field "updated", playlist.updated.to_unix
2018-08-15 17:22:36 +02:00
json.field "videos" do
json.array do
videos.each do |video|
2019-06-08 20:31:41 +02:00
video.to_json(locale, config, Kemal.config, json)
2018-08-15 17:22:36 +02:00
end
end
end
end
end
2018-08-15 17:22:36 +02:00
2018-10-08 04:11:33 +02:00
if format == "html"
response = JSON.parse(response)
playlist_html = template_playlist(response)
next_video = response["videos"].as_a[1]?.try &.["videoId"]
response = {
"playlistHtml" => playlist_html,
"nextVideo" => next_video,
}.to_json
end
response
end
2018-08-15 17:22:36 +02:00
2018-09-29 06:12:35 +02:00
get "/api/v1/mixes/:rdid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 22:32:09 +01:00
2018-09-29 06:12:35 +02:00
env.response.content_type = "application/json"
rdid = env.params.url["rdid"]
continuation = env.params.query["continuation"]?
2019-02-16 00:28:54 +01:00
continuation ||= rdid.lchop("RD")[0, 11]
2018-09-29 06:12:35 +02:00
2018-10-08 04:11:33 +02:00
format = env.params.query["format"]?
format ||= "json"
2018-09-29 06:12:35 +02:00
begin
2018-12-20 22:32:09 +01:00
mix = fetch_mix(rdid, continuation, locale: locale)
2018-10-31 15:15:17 +01:00
if !rdid.ends_with? continuation
mix = fetch_mix(rdid, mix.videos[1].id)
index = mix.videos.index(mix.videos.select { |video| video.id == continuation }[0]?)
end
index ||= 0
mix.videos = mix.videos[index..-1]
2018-09-29 06:12:35 +02:00
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 16:24:30 +01:00
env.response.status_code = 500
next error_message
2018-09-29 06:12:35 +02:00
end
response = JSON.build do |json|
json.object do
json.field "title", mix.title
json.field "mixId", mix.id
json.field "videos" do
json.array do
mix.videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
json.field "videoThumbnails" do
json.array do
2019-03-08 21:42:37 +01:00
generate_thumbnails(json, video.id, config, Kemal.config)
2018-09-29 06:12:35 +02:00
end
end
json.field "index", video.index
json.field "lengthSeconds", video.length_seconds
end
end
end
end
end
end
2018-10-08 04:11:33 +02:00
if format == "html"
response = JSON.parse(response)
playlist_html = template_mix(response)
next_video = response["videos"].as_a[1]?.try &.["videoId"]
2018-10-31 15:15:17 +01:00
next_video ||= ""
2018-10-08 04:11:33 +02:00
response = {
"playlistHtml" => playlist_html,
"nextVideo" => next_video,
}.to_json
end
2019-03-23 16:24:30 +01:00
response
2018-09-29 06:12:35 +02:00
end
2019-06-07 19:39:12 +02:00
# Authenticated endpoints
2019-04-11 00:58:42 +02:00
get "/api/v1/auth/notifications" do |env|
env.response.content_type = "text/event-stream"
2019-04-11 00:58:42 +02:00
topics = env.params.query["topics"]?.try &.split(",").uniq.first(1000)
topics ||= [] of String
2019-06-29 04:17:56 +02:00
create_notification_stream(env, config, Kemal.config, decrypt_function, topics, connection_channel)
2019-05-21 16:01:17 +02:00
end
2019-04-11 00:58:42 +02:00
2019-05-21 16:01:17 +02:00
post "/api/v1/auth/notifications" do |env|
env.response.content_type = "text/event-stream"
2019-05-21 16:01:17 +02:00
topics = env.params.body["topics"]?.try &.split(",").uniq.first(1000)
topics ||= [] of String
2019-04-11 00:58:42 +02:00
2019-06-29 04:17:56 +02:00
create_notification_stream(env, config, Kemal.config, decrypt_function, topics, connection_channel)
2019-04-11 00:58:42 +02:00
end
2019-05-01 04:01:57 +02:00
get "/api/v1/auth/preferences" do |env|
env.response.content_type = "application/json"
user = env.get("user").as(User)
user.preferences.to_json
end
2019-04-18 23:23:50 +02:00
2019-05-01 04:01:57 +02:00
post "/api/v1/auth/preferences" do |env|
env.response.content_type = "application/json"
user = env.get("user").as(User)
begin
preferences = Preferences.from_json(env.request.body || "{}", user.preferences)
2019-05-01 04:01:57 +02:00
rescue
preferences = user.preferences
end
PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", preferences.to_json, user.email)
env.response.status_code = 204
end
2019-04-18 23:23:50 +02:00
2019-06-07 19:39:12 +02:00
get "/api/v1/auth/feed" do |env|
env.response.content_type = "application/json"
user = env.get("user").as(User)
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2019-06-08 23:04:55 +02:00
max_results = env.params.query["max_results"]?.try &.to_i?
max_results ||= user.preferences.max_results
max_results ||= CONFIG.default_user_preferences.max_results
2019-06-07 19:39:12 +02:00
page = env.params.query["page"]?.try &.to_i?
page ||= 1
videos, notifications = get_subscription_feed(PG_DB, user, max_results, page)
JSON.build do |json|
json.object do
json.field "notifications" do
json.array do
notifications.each do |video|
video.to_json(locale, config, Kemal.config, json)
end
end
end
json.field "videos" do
json.array do
videos.each do |video|
video.to_json(locale, config, Kemal.config, json)
end
end
end
end
end
end
2019-04-22 17:40:29 +02:00
get "/api/v1/auth/subscriptions" do |env|
env.response.content_type = "application/json"
user = env.get("user").as(User)
2019-04-18 23:23:50 +02:00
2019-04-22 17:40:29 +02:00
if user.subscriptions.empty?
values = "'{}'"
else
values = "VALUES #{user.subscriptions.map { |id| %(('#{id}')) }.join(",")}"
end
2019-04-18 23:23:50 +02:00
2019-04-22 17:40:29 +02:00
subscriptions = PG_DB.query_all("SELECT * FROM channels WHERE id = ANY(#{values})", as: InvidiousChannel)
JSON.build do |json|
json.array do
subscriptions.each do |subscription|
json.object do
json.field "author", subscription.author
json.field "authorId", subscription.id
end
end
end
end
end
post "/api/v1/auth/subscriptions/:ucid" do |env|
env.response.content_type = "application/json"
user = env.get("user").as(User)
ucid = env.params.url["ucid"]
if !user.subscriptions.includes? ucid
get_channel(ucid, PG_DB, false, false)
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
2019-04-22 17:40:29 +02:00
end
2019-05-15 19:26:29 +02:00
# For Google accounts, access tokens don't have enough information to
# make a request on the user's behalf, which is why we don't sync with
# YouTube.
2019-04-22 17:40:29 +02:00
env.response.status_code = 204
end
delete "/api/v1/auth/subscriptions/:ucid" do |env|
env.response.content_type = "application/json"
user = env.get("user").as(User)
ucid = env.params.url["ucid"]
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_remove(subscriptions, $1) WHERE email = $2", ucid, user.email)
2019-04-22 17:40:29 +02:00
env.response.status_code = 204
end
2019-04-18 23:23:50 +02:00
get "/api/v1/auth/tokens" do |env|
env.response.content_type = "application/json"
user = env.get("user").as(User)
scopes = env.get("scopes").as(Array(String))
tokens = PG_DB.query_all("SELECT id, issued FROM session_ids WHERE email = $1", user.email, as: {session: String, issued: Time})
JSON.build do |json|
json.array do
tokens.each do |token|
json.object do
json.field "session", token[:session]
json.field "issued", token[:issued].to_unix
end
end
end
end
end
post "/api/v1/auth/tokens/register" do |env|
user = env.get("user").as(User)
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
case env.request.headers["Content-Type"]?
when "application/x-www-form-urlencoded"
scopes = env.params.body.select { |k, v| k.match(/^scopes\[\d+\]$/) }.map { |k, v| v }
callback_url = env.params.body["callbackUrl"]?
expire = env.params.body["expire"]?.try &.to_i?
when "application/json"
scopes = env.params.json["scopes"].as(Array).map { |v| v.as_s }
callback_url = env.params.json["callbackUrl"]?.try &.as(String)
expire = env.params.json["expire"]?.try &.as(Int64)
else
error_message = {"error" => "Invalid or missing header 'Content-Type'"}.to_json
env.response.status_code = 400
next error_message
end
if callback_url && callback_url.empty?
callback_url = nil
end
if callback_url
callback_url = URI.parse(callback_url)
end
if sid = env.get?("sid").try &.as(String)
env.response.content_type = "text/html"
csrf_token = generate_response(sid, {":authorize_token"}, HMAC_KEY, PG_DB, use_nonce: true)
next templated "authorize_token"
else
env.response.content_type = "application/json"
superset_scopes = env.get("scopes").as(Array(String))
authorized_scopes = [] of String
scopes.each do |scope|
if scopes_include_scope(superset_scopes, scope)
authorized_scopes << scope
end
end
access_token = generate_token(user.email, authorized_scopes, expire, HMAC_KEY, PG_DB)
if callback_url
access_token = URI.escape(access_token)
if query = callback_url.query
query = HTTP::Params.parse(query.not_nil!)
else
query = HTTP::Params.new
end
query["token"] = access_token
callback_url.query = query.to_s
env.redirect callback_url.to_s
else
access_token
end
end
end
post "/api/v1/auth/tokens/unregister" do |env|
env.response.content_type = "application/json"
user = env.get("user").as(User)
scopes = env.get("scopes").as(Array(String))
session = env.params.json["session"]?.try &.as(String)
session ||= env.get("session").as(String)
# Allow tokens to revoke other tokens with correct scope
if session == env.get("session").as(String)
PG_DB.exec("DELETE FROM session_ids * WHERE id = $1", session)
elsif scopes_include_scope(scopes, "GET:tokens")
PG_DB.exec("DELETE FROM session_ids * WHERE id = $1", session)
else
error_message = {"error" => "Cannot revoke session #{session}"}.to_json
env.response.status_code = 400
next error_message
end
env.response.status_code = 204
end
2018-08-07 20:10:52 +02:00
get "/api/manifest/dash/id/videoplayback" do |env|
env.response.headers.delete("Content-Type")
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-08-07 20:10:52 +02:00
env.redirect "/videoplayback?#{env.params.query}"
end
get "/api/manifest/dash/id/videoplayback/*" do |env|
env.response.headers.delete("Content-Type")
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-08-07 20:10:52 +02:00
env.redirect env.request.path.lchop("/api/manifest/dash/id")
end
2018-07-16 15:18:59 +02:00
get "/api/manifest/dash/id/:id" do |env|
env.response.headers.add("Access-Control-Allow-Origin", "*")
env.response.content_type = "application/dash+xml"
local = env.params.query["local"]?.try &.== "true"
id = env.params.url["id"]
region = env.params.query["region"]?
2018-07-16 15:18:59 +02:00
# Since some implementations create playlists based on resolution regardless of different codecs,
# we can opt to only add a source to a representation if it has a unique height within that representation
unique_res = env.params.query["unique_res"]? && (env.params.query["unique_res"] == "true" || env.params.query["unique_res"] == "1")
2018-07-16 15:18:59 +02:00
client = make_client(YT_URL)
begin
2019-06-29 04:17:56 +02:00
video = get_video(id, PG_DB, region: region)
rescue ex : VideoRedirect
url = "/api/manifest/dash/id/#{ex.message}"
if env.params.query
url += "?#{env.params.query}"
end
next env.redirect url
2018-07-16 15:18:59 +02:00
rescue ex
2019-03-23 16:24:30 +01:00
env.response.status_code = 403
next
2018-07-16 15:18:59 +02:00
end
2019-03-22 16:32:42 +01:00
if dashmpd = video.player_response["streamingData"]?.try &.["dashManifestUrl"]?.try &.as_s
2019-02-25 16:11:41 +01:00
manifest = client.get(dashmpd).body
2018-07-16 15:18:59 +02:00
manifest = manifest.gsub(/<BaseURL>[^<]+<\/BaseURL>/) do |baseurl|
url = baseurl.lchop("<BaseURL>")
url = url.rchop("</BaseURL>")
if local
url = URI.parse(url).full_path
2018-07-16 15:18:59 +02:00
end
"<BaseURL>#{url}</BaseURL>"
end
next manifest
end
2018-08-05 06:07:38 +02:00
adaptive_fmts = video.adaptive_fmts(decrypt_function)
2018-07-16 15:18:59 +02:00
if local
adaptive_fmts.each do |fmt|
fmt["url"] = URI.parse(fmt["url"]).full_path
2018-07-16 15:18:59 +02:00
end
end
2019-04-12 15:04:59 +02:00
audio_streams = video.audio_streams(adaptive_fmts)
video_streams = video.video_streams(adaptive_fmts)
2018-08-05 06:07:38 +02:00
XML.build(indent: " ", encoding: "UTF-8") do |xml|
2018-08-11 22:01:22 +02:00
xml.element("MPD", "xmlns": "urn:mpeg:dash:schema:mpd:2011",
"profiles": "urn:mpeg:dash:profile:full:2011", minBufferTime: "PT1.5S", type: "static",
2018-07-16 15:18:59 +02:00
mediaPresentationDuration: "PT#{video.info["length_seconds"]}S") do
xml.element("Period") do
2019-04-12 18:19:54 +02:00
i = 0
2019-04-12 15:04:59 +02:00
{"audio/mp4", "audio/webm"}.each do |mime_type|
2019-04-12 18:19:54 +02:00
xml.element("AdaptationSet", id: i, mimeType: mime_type, startWithSAP: 1, subsegmentAlignment: true) do
2019-04-12 15:04:59 +02:00
audio_streams.select { |stream| stream["type"].starts_with? mime_type }.each do |fmt|
codecs = fmt["type"].split("codecs=")[1].strip('"')
bandwidth = fmt["bitrate"].to_i * 1000
2019-04-12 15:04:59 +02:00
itag = fmt["itag"]
url = fmt["url"]
xml.element("Representation", id: fmt["itag"], codecs: codecs, bandwidth: bandwidth) do
xml.element("AudioChannelConfiguration", schemeIdUri: "urn:mpeg:dash:23003:3:audio_channel_configuration:2011",
value: "2")
xml.element("BaseURL") { xml.text url }
xml.element("SegmentBase", indexRange: fmt["index"]) do
xml.element("Initialization", range: fmt["init"])
end
2018-07-16 15:18:59 +02:00
end
end
end
2019-04-12 18:19:54 +02:00
i += 1
2018-07-16 15:18:59 +02:00
end
2019-04-12 15:04:59 +02:00
{"video/mp4", "video/webm"}.each do |mime_type|
heights = [] of Int32
2019-04-12 18:19:54 +02:00
xml.element("AdaptationSet", id: i, mimeType: mime_type, startWithSAP: 1, subsegmentAlignment: true, scanType: "progressive") do
2019-04-12 15:04:59 +02:00
video_streams.select { |stream| stream["type"].starts_with? mime_type }.each do |fmt|
codecs = fmt["type"].split("codecs=")[1].strip('"')
bandwidth = fmt["bitrate"]
itag = fmt["itag"]
url = fmt["url"]
width, height = fmt["size"].split("x").map { |i| i.to_i }
# Resolutions reported by YouTube player (may not accurately reflect source)
height = [4320, 2160, 1440, 1080, 720, 480, 360, 240, 144].sort_by { |i| (height - i).abs }[0]
next if unique_res && heights.includes? height
heights << height
2019-04-12 15:04:59 +02:00
xml.element("Representation", id: itag, codecs: codecs, width: width, height: height,
startWithSAP: "1", maxPlayoutRate: "1",
bandwidth: bandwidth, frameRate: fmt["fps"]) do
xml.element("BaseURL") { xml.text url }
xml.element("SegmentBase", indexRange: fmt["index"]) do
xml.element("Initialization", range: fmt["init"])
end
2018-07-16 15:18:59 +02:00
end
end
end
2019-04-12 18:19:54 +02:00
i += 1
2018-07-16 15:18:59 +02:00
end
end
end
end
end
2018-07-28 01:25:58 +02:00
get "/api/manifest/hls_variant/*" do |env|
client = make_client(YT_URL)
manifest = client.get(env.request.path)
if manifest.status_code != 200
2019-03-23 16:24:30 +01:00
env.response.status_code = manifest.status_code
next
2018-07-28 01:25:58 +02:00
end
2019-04-25 19:41:35 +02:00
local = env.params.query["local"]?.try &.== "true"
2018-07-28 01:25:58 +02:00
env.response.content_type = "application/x-mpegURL"
env.response.headers.add("Access-Control-Allow-Origin", "*")
2018-08-05 06:07:38 +02:00
2019-03-05 19:56:59 +01:00
host_url = make_host_url(config, Kemal.config)
2018-08-05 06:07:38 +02:00
manifest = manifest.body
2019-04-25 19:41:35 +02:00
if local
manifest = manifest.gsub("https://www.youtube.com", host_url)
manifest = manifest.gsub("index.m3u8", "index.m3u8?local=true")
end
manifest
2018-07-28 01:25:58 +02:00
end
get "/api/manifest/hls_playlist/*" do |env|
client = make_client(YT_URL)
manifest = client.get(env.request.path)
if manifest.status_code != 200
2019-03-23 16:24:30 +01:00
env.response.status_code = manifest.status_code
next
2018-07-28 01:25:58 +02:00
end
2019-04-25 19:41:35 +02:00
local = env.params.query["local"]?.try &.== "true"
env.response.content_type = "application/x-mpegURL"
env.response.headers.add("Access-Control-Allow-Origin", "*")
2019-03-05 19:56:59 +01:00
host_url = make_host_url(config, Kemal.config)
2018-07-28 01:25:58 +02:00
2019-04-25 19:41:35 +02:00
manifest = manifest.body
if local
2019-07-05 19:08:39 +02:00
manifest = manifest.gsub(/^https:\/\/r\d---.{11}\.c\.youtube\.com[^\n]*/m) do |match|
path = URI.parse(match).path
path = path.lchop("/videoplayback/")
path = path.rchop("/")
path = path.gsub(/mime\/\w+\/\w+/) do |mimetype|
mimetype = mimetype.split("/")
mimetype[0] + "/" + mimetype[1] + "%2F" + mimetype[2]
end
path = path.split("/")
raw_params = {} of String => Array(String)
path.each_slice(2) do |pair|
key, value = pair
value = URI.unescape(value)
if raw_params[key]?
raw_params[key] << value
else
raw_params[key] = [value]
end
end
raw_params = HTTP::Params.new(raw_params)
if fvip = raw_params["hls_chunk_host"].match(/r(?<fvip>\d+)---/)
raw_params["fvip"] = fvip["fvip"]
end
raw_params["local"] = "true"
2019-04-25 19:41:35 +02:00
2019-07-05 19:08:39 +02:00
"#{host_url}/videoplayback?#{raw_params}"
end
end
2018-07-28 01:25:58 +02:00
manifest
end
2019-01-28 03:35:32 +01:00
# YouTube /videoplayback links expire after 6 hours,
# so we have a mechanism here to redirect to the latest version
get "/latest_version" do |env|
2019-02-24 18:04:46 +01:00
if env.params.query["download_widget"]?
download_widget = JSON.parse(env.params.query["download_widget"])
2019-04-11 19:08:43 +02:00
2019-02-24 18:04:46 +01:00
id = download_widget["id"].as_s
title = download_widget["title"].as_s
2019-04-11 19:08:43 +02:00
if label = download_widget["label"]?
env.redirect "/api/v1/captions/#{id}?label=#{label}&title=#{title}"
next
else
2019-04-12 14:29:47 +02:00
itag = download_widget["itag"].as_s
local = "true"
end
2019-04-11 19:08:43 +02:00
end
2019-02-24 18:04:46 +01:00
id ||= env.params.query["id"]?
itag ||= env.params.query["itag"]?
2019-01-28 03:35:32 +01:00
2019-02-09 19:28:43 +01:00
region = env.params.query["region"]?
2019-02-24 18:04:46 +01:00
local ||= env.params.query["local"]?
local ||= "false"
local = local == "true"
2019-01-28 03:35:32 +01:00
if !id || !itag
2019-03-23 16:24:30 +01:00
env.response.status_code = 400
next
2019-01-28 03:35:32 +01:00
end
2019-06-29 04:17:56 +02:00
video = get_video(id, PG_DB, region: region)
2019-01-28 03:35:32 +01:00
fmt_stream = video.fmt_stream(decrypt_function)
adaptive_fmts = video.adaptive_fmts(decrypt_function)
urls = (fmt_stream + adaptive_fmts).select { |fmt| fmt["itag"] == itag }
if urls.empty?
2019-03-23 16:24:30 +01:00
env.response.status_code = 404
next
2019-01-28 03:35:32 +01:00
elsif urls.size > 1
2019-03-23 16:24:30 +01:00
env.response.status_code = 409
next
2019-01-28 03:35:32 +01:00
end
url = urls[0]["url"]
if local
url = URI.parse(url).full_path.not_nil!
end
2019-02-24 18:04:46 +01:00
if title
url += "&title=#{title}"
end
env.redirect url
2019-01-28 03:35:32 +01:00
end
2018-08-07 20:25:22 +02:00
options "/videoplayback" do |env|
env.response.headers.delete("Content-Type")
2018-08-04 22:30:44 +02:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-08-09 16:43:47 +02:00
env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
2018-08-04 22:30:44 +02:00
end
2018-08-07 18:39:56 +02:00
options "/videoplayback/*" do |env|
env.response.headers.delete("Content-Type")
2018-08-07 18:39:56 +02:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-08-09 16:43:47 +02:00
env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
2018-08-07 18:39:56 +02:00
end
2018-08-07 20:18:38 +02:00
options "/api/manifest/dash/id/videoplayback" do |env|
env.response.headers.delete("Content-Type")
2018-08-07 20:18:38 +02:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-08-09 16:43:47 +02:00
env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
2018-08-07 20:18:38 +02:00
end
options "/api/manifest/dash/id/videoplayback/*" do |env|
env.response.headers.delete("Content-Type")
2018-08-07 20:18:38 +02:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-08-09 16:43:47 +02:00
env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
2018-08-07 20:18:38 +02:00
end
2018-08-07 18:39:56 +02:00
get "/videoplayback/*" do |env|
2018-06-07 00:55:51 +02:00
path = env.request.path
2018-08-07 18:39:56 +02:00
path = path.lchop("/videoplayback/")
path = path.rchop("/")
2018-07-16 04:53:24 +02:00
path = path.gsub(/mime\/\w+\/\w+/) do |mimetype|
mimetype = mimetype.split("/")
mimetype[0] + "/" + mimetype[1] + "%2F" + mimetype[2]
end
2018-07-16 04:53:24 +02:00
path = path.split("/")
2018-06-07 00:55:51 +02:00
raw_params = {} of String => Array(String)
path.each_slice(2) do |pair|
key, value = pair
value = URI.unescape(value)
2018-06-07 00:55:51 +02:00
if raw_params[key]?
raw_params[key] << value
else
raw_params[key] = [value]
2018-06-07 00:55:51 +02:00
end
end
2018-06-07 00:55:51 +02:00
query_params = HTTP::Params.new(raw_params)
2018-08-07 18:39:56 +02:00
2018-08-11 21:29:51 +02:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-08-07 18:39:56 +02:00
env.redirect "/videoplayback?#{query_params}"
end
get "/videoplayback" do |env|
query_params = env.params.query
2018-04-16 03:47:37 +02:00
fvip = query_params["fvip"]? || "3"
mns = query_params["mn"]?.try &.split(",")
mns ||= [] of String
2019-03-27 20:59:53 +01:00
if query_params["region"]?
region = query_params["region"]
query_params.delete("region")
end
2019-03-11 19:14:30 +01:00
if query_params["host"]? && !query_params["host"].empty?
host = "https://#{query_params["host"]}"
2019-03-11 19:32:46 +01:00
query_params.delete("host")
2019-03-11 19:14:30 +01:00
else
host = "https://r#{fvip}---#{mns.pop}.googlevideo.com"
2019-03-11 19:14:30 +01:00
end
2018-04-16 03:47:37 +02:00
url = "/videoplayback?#{query_params.to_s}"
headers = HTTP::Headers.new
REQUEST_HEADERS_WHITELIST.each do |header|
if env.request.headers[header]?
headers[header] = env.request.headers[header]
end
end
2019-01-24 20:52:33 +01:00
response = HTTP::Client::Response.new(403)
5.times do
2019-01-24 20:52:33 +01:00
begin
2019-06-29 04:17:56 +02:00
client = make_client(URI.parse(host), region)
2019-01-24 20:52:33 +01:00
response = client.head(url, headers)
break
rescue Socket::Addrinfo::Error
if !mns.empty?
mn = mns.pop
end
fvip = "3"
host = "https://r#{fvip}---#{mn}.googlevideo.com"
2019-01-24 20:52:33 +01:00
rescue ex
end
end
2018-04-16 03:47:37 +02:00
2018-11-20 17:07:50 +01:00
if response.headers["Location"]?
url = URI.parse(response.headers["Location"])
2019-03-27 21:25:08 +01:00
host = url.host
2018-11-20 17:07:50 +01:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-11-20 17:07:50 +01:00
url = url.full_path
2019-03-27 21:25:08 +01:00
url += "&host=#{host}"
2019-03-27 20:59:53 +01:00
2018-11-20 17:07:50 +01:00
if region
url += "&region=#{region}"
2018-10-02 02:01:44 +02:00
end
2018-11-20 17:07:50 +01:00
next env.redirect url
end
2018-11-20 17:07:50 +01:00
if response.status_code >= 400
2019-03-23 16:24:30 +01:00
env.response.status_code = response.status_code
next
2018-08-26 00:24:07 +02:00
end
2019-07-05 18:34:22 +02:00
if url.includes? "&file=seg.ts"
begin
client = make_client(URI.parse(host), region)
client.get(url, headers) do |response|
response.headers.each do |key, value|
if !RESPONSE_HEADERS_BLACKLIST.includes?(key)
env.response.headers[key] = value
end
end
2018-04-16 03:47:37 +02:00
2019-07-05 18:34:22 +02:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2019-07-04 22:30:00 +02:00
2019-07-05 18:34:22 +02:00
if location = response.headers["Location"]?
location = URI.parse(location)
location = "#{location.full_path}&host=#{location.host}"
2019-07-04 22:30:00 +02:00
2019-07-05 18:34:22 +02:00
if region
location += "&region=#{region}"
end
2019-05-26 17:53:56 +02:00
2019-07-05 18:34:22 +02:00
next env.redirect location
end
IO.copy(response.body_io, env.response)
end
rescue ex
end
else
content_length = nil
first_chunk = true
range_start, range_end = parse_range(env.request.headers["Range"]?)
chunk_start = range_start
chunk_end = range_end
if !chunk_end || chunk_end - chunk_start > HTTP_CHUNK_SIZE
chunk_end = chunk_start + HTTP_CHUNK_SIZE - 1
2019-07-04 22:30:00 +02:00
end
2019-05-19 14:12:45 +02:00
2019-07-05 18:34:22 +02:00
# TODO: Record bytes written so we can restart after a chunk fails
while true
if !range_end && content_length
range_end = content_length
end
2019-03-27 20:59:53 +01:00
2019-07-05 18:34:22 +02:00
if range_end && chunk_start > range_end
break
end
if range_end && chunk_end > range_end
chunk_end = range_end
end
2019-03-27 20:59:53 +01:00
2019-07-05 18:34:22 +02:00
headers["Range"] = "bytes=#{chunk_start}-#{chunk_end}"
2019-07-04 22:30:00 +02:00
2019-07-05 18:34:22 +02:00
begin
client = make_client(URI.parse(host), region)
client.get(url, headers) do |response|
if first_chunk
if !env.request.headers["Range"]? && response.status_code == 206
env.response.status_code = 200
else
env.response.status_code = response.status_code
end
2019-07-04 22:30:00 +02:00
2019-07-05 18:34:22 +02:00
response.headers.each do |key, value|
if !RESPONSE_HEADERS_BLACKLIST.includes?(key) && key != "Content-Range"
env.response.headers[key] = value
end
2019-07-04 22:30:00 +02:00
end
2019-07-05 18:34:22 +02:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2019-07-04 22:30:00 +02:00
2019-07-05 18:34:22 +02:00
if location = response.headers["Location"]?
location = URI.parse(location)
location = "#{location.full_path}&host=#{location.host}"
2019-07-04 22:30:00 +02:00
2019-07-05 18:34:22 +02:00
if region
location += "&region=#{region}"
end
env.redirect location
break
end
if title = query_params["title"]?
# https://blog.fastmail.com/2011/06/24/download-non-english-filenames/
env.response.headers["Content-Disposition"] = "attachment; filename=\"#{URI.escape(title)}\"; filename*=UTF-8''#{URI.escape(title)}"
end
if !response.headers.includes_word?("Transfer-Encoding", "chunked")
content_length = response.headers["Content-Range"].split("/")[-1].to_i64
if env.request.headers["Range"]?
env.response.headers["Content-Range"] = "bytes #{range_start}-#{range_end || (content_length - 1)}/#{content_length}"
env.response.content_length = ((range_end.try &.+ 1) || content_length) - range_start
else
env.response.content_length = content_length
end
2019-07-05 18:02:12 +02:00
end
2019-07-04 22:30:00 +02:00
end
2019-07-05 18:34:22 +02:00
proxy_file(response, env)
end
rescue ex
if ex.message != "Error reading socket: Connection reset by peer"
break
end
end
2019-07-05 18:02:12 +02:00
2019-07-05 18:34:22 +02:00
chunk_start = chunk_end + 1
chunk_end += HTTP_CHUNK_SIZE
first_chunk = false
2019-03-25 22:32:11 +01:00
end
2019-05-26 17:53:56 +02:00
end
2018-09-15 04:24:28 +02:00
end
# We need this so the below route works as expected
2018-09-18 01:39:28 +02:00
get "/ggpht*" do |env|
end
get "/ggpht/*" do |env|
host = "https://yt3.ggpht.com"
client = make_client(URI.parse(host))
url = env.request.path.lchop("/ggpht")
headers = HTTP::Headers.new
REQUEST_HEADERS_WHITELIST.each do |header|
2019-04-12 00:00:00 +02:00
if env.request.headers[header]?
headers[header] = env.request.headers[header]
end
end
begin
2019-05-26 17:53:56 +02:00
client.get(url, headers) do |response|
env.response.status_code = response.status_code
2019-05-26 17:53:56 +02:00
response.headers.each do |key, value|
if !RESPONSE_HEADERS_BLACKLIST.includes? key
2019-05-26 17:53:56 +02:00
env.response.headers[key] = value
end
2019-05-19 14:12:45 +02:00
end
2019-04-12 00:00:00 +02:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
if response.status_code >= 300
2019-07-03 20:54:15 +02:00
env.response.headers.delete("Transfer-Encoding")
2019-05-26 17:53:56 +02:00
break
end
2019-04-12 00:00:00 +02:00
2019-05-26 17:53:56 +02:00
proxy_file(response, env)
end
rescue ex
end
2019-04-12 00:00:00 +02:00
end
2019-05-02 21:20:19 +02:00
options "/sb/:id/:storyboard/:index" do |env|
env.response.headers.delete("Content-Type")
env.response.headers["Access-Control-Allow-Origin"] = "*"
env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
end
2019-04-12 00:00:00 +02:00
get "/sb/:id/:storyboard/:index" do |env|
id = env.params.url["id"]
storyboard = env.params.url["storyboard"]
index = env.params.url["index"]
if storyboard.starts_with? "storyboard_live"
host = "https://i.ytimg.com"
else
host = "https://i9.ytimg.com"
end
client = make_client(URI.parse(host))
url = "/sb/#{id}/#{storyboard}/#{index}?#{env.params.query}"
headers = HTTP::Headers.new
REQUEST_HEADERS_WHITELIST.each do |header|
if env.request.headers[header]?
headers[header] = env.request.headers[header]
end
end
2018-09-18 01:39:28 +02:00
begin
2019-05-26 17:53:56 +02:00
client.get(url, headers) do |response|
env.response.status_code = response.status_code
response.headers.each do |key, value|
if !RESPONSE_HEADERS_BLACKLIST.includes? key
2019-05-26 17:53:56 +02:00
env.response.headers[key] = value
end
2019-05-19 14:12:45 +02:00
end
2018-09-18 01:39:28 +02:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
if response.status_code >= 300
2019-07-03 20:54:15 +02:00
env.response.headers.delete("Transfer-Encoding")
2019-05-26 17:53:56 +02:00
break
end
2018-09-18 01:39:28 +02:00
2019-05-26 17:53:56 +02:00
proxy_file(response, env)
end
rescue ex
end
2018-09-18 01:39:28 +02:00
end
2018-09-15 04:24:28 +02:00
get "/vi/:id/:name" do |env|
id = env.params.url["id"]
name = env.params.url["name"]
host = "https://i.ytimg.com"
client = make_client(URI.parse(host))
if name == "maxres.jpg"
2019-03-08 21:42:37 +01:00
build_thumbnails(id, config, Kemal.config).each do |thumb|
2018-09-15 04:24:28 +02:00
if client.head("/vi/#{id}/#{thumb[:url]}.jpg").status_code == 200
name = thumb[:url] + ".jpg"
break
end
end
end
url = "/vi/#{id}/#{name}"
headers = HTTP::Headers.new
REQUEST_HEADERS_WHITELIST.each do |header|
if env.request.headers[header]?
headers[header] = env.request.headers[header]
end
end
2018-09-15 04:24:28 +02:00
begin
2019-05-26 17:53:56 +02:00
client.get(url, headers) do |response|
env.response.status_code = response.status_code
response.headers.each do |key, value|
if !RESPONSE_HEADERS_BLACKLIST.includes? key
2019-05-26 17:53:56 +02:00
env.response.headers[key] = value
end
2019-05-19 14:12:45 +02:00
end
2018-09-15 04:24:28 +02:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2019-07-03 20:54:15 +02:00
if response.status_code >= 300 && response.status_code != 404
env.response.headers.delete("Transfer-Encoding")
2019-05-26 17:53:56 +02:00
break
end
2018-09-15 04:24:28 +02:00
2019-05-26 17:53:56 +02:00
proxy_file(response, env)
end
rescue ex
end
2018-04-16 03:47:37 +02:00
end
2019-05-03 16:15:53 +02:00
# Undocumented, creates anonymous playlist with specified 'video_ids'
2019-05-03 16:11:27 +02:00
get "/watch_videos" do |env|
client = make_client(YT_URL)
response = client.get("#{env.request.path}?#{env.request.query}")
if url = response.headers["Location"]?
url = URI.parse(url).full_path
next env.redirect url
end
env.response.status_code = response.status_code
end
2018-02-10 16:15:23 +01:00
error 404 do |env|
2019-03-27 11:28:53 +01:00
if md = env.request.path.match(/^\/(?<id>([a-zA-Z0-9_-]{11})|(\w+))$/)
2019-04-17 21:46:00 +02:00
item = md["id"]
2019-04-17 21:46:00 +02:00
# Check if item is branding URL e.g. https://youtube.com/gaming
2019-03-27 11:28:53 +01:00
client = make_client(YT_URL)
2019-04-17 21:46:00 +02:00
response = client.get("/#{item}")
2019-03-27 11:28:53 +01:00
if response.status_code == 301
response = client.get(response.headers["Location"])
end
if response.body.empty?
env.response.headers["Location"] = "/"
halt env, status_code: 302
end
2019-03-27 11:28:53 +01:00
html = XML.parse_html(response.body)
ucid = html.xpath_node(%q(//meta[@itemprop="channelId"]))
if ucid
env.response.headers["Location"] = "/channel/#{ucid["content"]}"
halt env, status_code: 302
end
params = [] of String
env.params.query.each do |k, v|
params << "#{k}=#{v}"
end
params = params.join("&")
2019-04-17 21:46:00 +02:00
url = "/watch?v=#{item}"
if !params.empty?
url += "&#{params}"
end
2019-04-17 21:46:00 +02:00
# Check if item is video ID
2019-04-18 23:23:50 +02:00
client = make_client(YT_URL)
if item.match(/^[a-zA-Z0-9_-]{11}$/) && client.head("/watch?v=#{item}").status_code != 404
2019-02-21 22:07:22 +01:00
env.response.headers["Location"] = url
halt env, status_code: 302
end
end
env.response.headers["Location"] = "/"
halt env, status_code: 302
2017-12-30 22:21:43 +01:00
end
error 500 do |env|
error_message = <<-END_HTML
Looks like you've found a bug in Invidious. Feel free to open a new issue
<a href="https://github.com/omarroth/invidious/issues">
here
</a>
or send an email to
<a href="mailto:omarroth@protonmail.com">
omarroth@protonmail.com</a>.
END_HTML
2018-02-10 16:15:23 +01:00
templated "error"
2017-12-30 22:21:43 +01:00
end
2018-03-17 01:58:33 +01:00
# Add redirect if SSL is enabled
if Kemal.config.ssl
2018-03-09 20:22:04 +01:00
spawn do
2019-04-07 21:01:08 +02:00
server = HTTP::Server.new do |env|
redirect_url = "https://#{env.request.host}#{env.request.path}"
if env.request.query
redirect_url += "?#{env.request.query}"
2018-03-09 21:13:26 +01:00
end
2019-04-07 21:01:08 +02:00
2019-05-14 15:21:01 +02:00
if config.hsts
env.response.headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains; preload"
end
2019-04-07 21:01:08 +02:00
env.response.headers["Location"] = redirect_url
env.response.status_code = 301
2018-03-09 20:22:04 +01:00
end
server.bind_tcp "0.0.0.0", 80
2018-03-09 20:22:04 +01:00
server.listen
end
end
2018-03-09 18:28:57 +01:00
static_headers do |response, filepath, filestat|
2019-05-08 15:58:10 +02:00
response.headers.add("Cache-Control", "max-age=2629800")
2018-03-09 18:28:57 +01:00
end
2017-11-23 08:48:55 +01:00
public_folder "assets"
2018-04-16 05:56:58 +02:00
2018-07-31 01:42:45 +02:00
Kemal.config.powered_by_header = false
2018-04-16 05:56:58 +02:00
add_handler FilteredCompressHandler.new
2019-02-03 05:48:47 +01:00
add_handler APIHandler.new
2019-04-18 23:23:50 +02:00
add_handler AuthHandler.new
2019-03-23 16:24:30 +01:00
add_handler DenyFrame.new
2019-04-18 23:23:50 +02:00
add_context_storage_type(Array(String))
add_context_storage_type(Preferences)
2019-04-18 23:23:50 +02:00
add_context_storage_type(User)
2017-11-23 08:48:55 +01:00
2019-01-23 21:15:19 +01:00
Kemal.config.logger = logger
2017-11-23 08:48:55 +01:00
Kemal.run