2014-11-19 17:37:23 +00:00
|
|
|
local utils = require 'mp.utils'
|
|
|
|
local msg = require 'mp.msg'
|
2017-07-08 13:43:37 +00:00
|
|
|
local options = require 'mp.options'
|
|
|
|
|
|
|
|
local o = {
|
2018-01-07 15:56:55 +00:00
|
|
|
exclude = "",
|
2018-02-10 00:06:36 +00:00
|
|
|
try_ytdl_first = false,
|
2020-02-19 15:33:48 +00:00
|
|
|
use_manifests = false,
|
|
|
|
all_formats = false,
|
2020-02-21 10:50:58 +00:00
|
|
|
force_all_formats = true,
|
2023-10-23 22:51:29 +00:00
|
|
|
thumbnails = "none",
|
2021-09-17 07:37:09 +00:00
|
|
|
ytdl_path = "",
|
2017-07-08 13:43:37 +00:00
|
|
|
}
|
2014-11-19 17:37:23 +00:00
|
|
|
|
|
|
|
local ytdl = {
|
2021-09-17 07:37:09 +00:00
|
|
|
path = "",
|
|
|
|
paths_to_search = {"yt-dlp", "yt-dlp_x86", "youtube-dl"},
|
2017-07-08 13:43:37 +00:00
|
|
|
searched = false,
|
|
|
|
blacklisted = {}
|
2014-11-19 17:37:23 +00:00
|
|
|
}
|
|
|
|
|
2020-03-29 14:38:52 +00:00
|
|
|
options.read_options(o, nil, function()
|
|
|
|
ytdl.blacklisted = {} -- reparse o.exclude next time
|
2020-10-25 01:26:06 +00:00
|
|
|
ytdl.searched = false
|
2020-03-29 14:38:52 +00:00
|
|
|
end)
|
|
|
|
|
2016-09-27 21:23:10 +00:00
|
|
|
local chapter_list = {}
|
2023-10-19 21:18:25 +00:00
|
|
|
local playlist_cookies = {}
|
2016-09-27 21:23:10 +00:00
|
|
|
|
2018-01-26 01:19:04 +00:00
|
|
|
function Set (t)
|
|
|
|
local set = {}
|
|
|
|
for _, v in pairs(t) do set[v] = true end
|
|
|
|
return set
|
|
|
|
end
|
|
|
|
|
2020-02-21 13:18:35 +00:00
|
|
|
-- ?: surrogate (keep in mind that there is no lazy evaluation)
|
|
|
|
function iif(cond, if_true, if_false)
|
|
|
|
if cond then
|
|
|
|
return if_true
|
|
|
|
end
|
|
|
|
return if_false
|
|
|
|
end
|
|
|
|
|
2021-05-11 20:21:46 +00:00
|
|
|
-- youtube-dl JSON name to mpv tag name
|
|
|
|
local tag_list = {
|
|
|
|
["uploader"] = "uploader",
|
|
|
|
["channel_url"] = "channel_url",
|
|
|
|
-- these titles tend to be a bit too long, so hide them on the terminal
|
|
|
|
-- (default --display-tags does not include this name)
|
|
|
|
["description"] = "ytdl_description",
|
|
|
|
-- "title" is handled by force-media-title
|
2023-02-26 18:49:23 +00:00
|
|
|
-- tags don't work with all_formats=yes
|
2021-05-11 20:21:46 +00:00
|
|
|
}
|
|
|
|
|
2018-01-26 01:19:04 +00:00
|
|
|
local safe_protos = Set {
|
|
|
|
"http", "https", "ftp", "ftps",
|
|
|
|
"rtmp", "rtmps", "rtmpe", "rtmpt", "rtmpts", "rtmpte",
|
|
|
|
"data"
|
|
|
|
}
|
|
|
|
|
2020-02-21 15:27:56 +00:00
|
|
|
-- For some sites, youtube-dl returns the audio codec (?) only in the "ext" field.
|
|
|
|
local ext_map = {
|
|
|
|
["mp3"] = "mp3",
|
|
|
|
["opus"] = "opus",
|
|
|
|
}
|
|
|
|
|
2020-02-19 15:33:48 +00:00
|
|
|
local codec_map = {
|
2020-02-21 13:29:55 +00:00
|
|
|
-- src pattern = mpv codec
|
2020-02-19 15:33:48 +00:00
|
|
|
["vtt"] = "webvtt",
|
|
|
|
["opus"] = "opus",
|
|
|
|
["vp9"] = "vp9",
|
2020-02-21 15:23:43 +00:00
|
|
|
["avc1%..*"] = "h264",
|
|
|
|
["av01%..*"] = "av1",
|
|
|
|
["mp4a%..*"] = "aac",
|
2020-02-15 17:28:13 +00:00
|
|
|
}
|
|
|
|
|
2020-02-19 15:33:48 +00:00
|
|
|
-- Codec name as reported by youtube-dl mapped to mpv internal codec names.
|
|
|
|
-- Fun fact: mpv will not really use the codec, but will still try to initialize
|
|
|
|
-- the codec on track selection (just to scrap it), meaning it's only a hint,
|
|
|
|
-- but one that may make initialization fail. On the other hand, if the codec
|
|
|
|
-- is valid but completely different from the actual media, nothing bad happens.
|
|
|
|
local function map_codec_to_mpv(codec)
|
|
|
|
if codec == nil then
|
|
|
|
return nil
|
|
|
|
end
|
2020-02-21 13:29:55 +00:00
|
|
|
for k, v in pairs(codec_map) do
|
|
|
|
local s, e = codec:find(k)
|
|
|
|
if s == 1 and e == #codec then
|
|
|
|
return v
|
|
|
|
end
|
2020-02-19 15:33:48 +00:00
|
|
|
end
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
|
2021-09-17 07:37:09 +00:00
|
|
|
local function platform_is_windows()
|
2022-01-23 05:29:28 +00:00
|
|
|
return mp.get_property_native("platform") == "windows"
|
2021-09-17 07:37:09 +00:00
|
|
|
end
|
|
|
|
|
2015-01-24 19:54:35 +00:00
|
|
|
local function exec(args)
|
2021-09-17 07:37:09 +00:00
|
|
|
msg.debug("Running: " .. table.concat(args, " "))
|
|
|
|
|
2021-10-15 21:04:54 +00:00
|
|
|
return mp.command_native({
|
|
|
|
name = "subprocess",
|
|
|
|
args = args,
|
|
|
|
capture_stdout = true,
|
|
|
|
capture_stderr = true,
|
|
|
|
})
|
2015-01-24 19:54:35 +00:00
|
|
|
end
|
|
|
|
|
2015-09-30 20:28:01 +00:00
|
|
|
-- return true if it was explicitly set on the command line
|
2015-01-24 19:54:35 +00:00
|
|
|
local function option_was_set(name)
|
2015-10-06 15:06:11 +00:00
|
|
|
return mp.get_property_bool("option-info/" ..name.. "/set-from-commandline",
|
2015-01-24 19:54:35 +00:00
|
|
|
false)
|
|
|
|
end
|
|
|
|
|
2017-06-09 07:08:54 +00:00
|
|
|
-- return true if the option was set locally
|
|
|
|
local function option_was_set_locally(name)
|
|
|
|
return mp.get_property_bool("option-info/" ..name.. "/set-locally", false)
|
|
|
|
end
|
|
|
|
|
2015-01-24 19:54:35 +00:00
|
|
|
-- youtube-dl may set special http headers for some sites (user-agent, cookies)
|
|
|
|
local function set_http_headers(http_headers)
|
|
|
|
if not http_headers then
|
|
|
|
return
|
|
|
|
end
|
|
|
|
local headers = {}
|
|
|
|
local useragent = http_headers["User-Agent"]
|
|
|
|
if useragent and not option_was_set("user-agent") then
|
|
|
|
mp.set_property("file-local-options/user-agent", useragent)
|
|
|
|
end
|
2017-07-16 12:11:24 +00:00
|
|
|
local additional_fields = {"Cookie", "Referer", "X-Forwarded-For"}
|
2016-01-10 12:33:17 +00:00
|
|
|
for idx, item in pairs(additional_fields) do
|
|
|
|
local field_value = http_headers[item]
|
|
|
|
if field_value then
|
|
|
|
headers[#headers + 1] = item .. ": " .. field_value
|
|
|
|
end
|
2014-11-19 17:37:23 +00:00
|
|
|
end
|
2015-01-24 19:54:35 +00:00
|
|
|
if #headers > 0 and not option_was_set("http-header-fields") then
|
|
|
|
mp.set_property_native("file-local-options/http-header-fields", headers)
|
|
|
|
end
|
|
|
|
end
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2023-10-19 21:18:25 +00:00
|
|
|
local special_cookie_field_names = Set {
|
|
|
|
"expires", "max-age", "domain", "path"
|
|
|
|
}
|
|
|
|
|
|
|
|
-- parse single-line Set-Cookie syntax
|
|
|
|
local function parse_cookies(cookies_line)
|
|
|
|
if not cookies_line then
|
|
|
|
return {}
|
|
|
|
end
|
|
|
|
local cookies = {}
|
|
|
|
local cookie = {}
|
|
|
|
for stem in cookies_line:gmatch('[^;]+') do
|
|
|
|
stem = stem:gsub("^%s*(.-)%s*$", "%1")
|
|
|
|
local name, value = stem:match('^(.-)=(.+)$')
|
|
|
|
if name and name ~= "" and value then
|
|
|
|
local cmp_name = name:lower()
|
|
|
|
if special_cookie_field_names[cmp_name] then
|
|
|
|
cookie[cmp_name] = value
|
|
|
|
else
|
|
|
|
if cookie.name and cookie.value then
|
|
|
|
table.insert(cookies, cookie)
|
|
|
|
end
|
|
|
|
cookie = {
|
|
|
|
name = name,
|
|
|
|
value = value,
|
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
if cookie.name and cookie.value then
|
|
|
|
local cookie_key = cookie.domain .. ":" .. cookie.name
|
|
|
|
cookies[cookie_key] = cookie
|
|
|
|
end
|
|
|
|
return cookies
|
|
|
|
end
|
|
|
|
|
|
|
|
-- serialize cookies for avformat
|
|
|
|
local function serialize_cookies_for_avformat(cookies)
|
|
|
|
local result = ''
|
|
|
|
for _, cookie in pairs(cookies) do
|
|
|
|
local cookie_str = ('%s=%s; '):format(cookie.name, cookie.value)
|
|
|
|
for k, v in pairs(cookie) do
|
|
|
|
if k ~= "name" and k ~= "value" then
|
|
|
|
cookie_str = cookie_str .. ('%s=%s; '):format(k, v)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
result = result .. cookie_str .. '\r\n'
|
|
|
|
end
|
|
|
|
return result
|
|
|
|
end
|
|
|
|
|
|
|
|
-- set file-local cookies, preserving existing ones
|
|
|
|
local function set_cookies(cookies)
|
|
|
|
if not cookies or cookies == "" then
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
|
|
|
local option_key = "file-local-options/stream-lavf-o"
|
|
|
|
local stream_opts = mp.get_property_native(option_key, {})
|
|
|
|
local existing_cookies = parse_cookies(stream_opts["cookies"])
|
|
|
|
|
|
|
|
local new_cookies = parse_cookies(cookies)
|
|
|
|
for cookie_key, cookie in pairs(new_cookies) do
|
|
|
|
if not existing_cookies[cookie_key] then
|
|
|
|
existing_cookies[cookie_key] = cookie
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
stream_opts["cookies"] = serialize_cookies_for_avformat(existing_cookies)
|
|
|
|
mp.set_property_native(option_key, stream_opts)
|
|
|
|
end
|
|
|
|
|
2018-01-15 17:56:44 +00:00
|
|
|
local function append_libav_opt(props, name, value)
|
|
|
|
if not props then
|
|
|
|
props = {}
|
2015-04-03 17:04:31 +00:00
|
|
|
end
|
|
|
|
|
2018-01-15 17:56:44 +00:00
|
|
|
if name and value and not props[name] then
|
|
|
|
props[name] = value
|
2015-04-03 17:04:31 +00:00
|
|
|
end
|
|
|
|
|
2018-01-15 17:56:44 +00:00
|
|
|
return props
|
2015-04-03 17:04:31 +00:00
|
|
|
end
|
|
|
|
|
2015-09-30 20:28:01 +00:00
|
|
|
local function edl_escape(url)
|
|
|
|
return "%" .. string.len(url) .. "%" .. url
|
|
|
|
end
|
|
|
|
|
2018-01-26 11:26:27 +00:00
|
|
|
local function url_is_safe(url)
|
2022-03-02 17:03:51 +00:00
|
|
|
local proto = type(url) == "string" and url:match("^(%a[%w+.-]*):") or nil
|
2018-01-26 11:26:27 +00:00
|
|
|
local safe = proto and safe_protos[proto]
|
|
|
|
if not safe then
|
|
|
|
msg.error(("Ignoring potentially unsafe url: '%s'"):format(url))
|
|
|
|
end
|
|
|
|
return safe
|
|
|
|
end
|
|
|
|
|
2016-09-27 21:23:10 +00:00
|
|
|
local function time_to_secs(time_string)
|
|
|
|
local ret
|
|
|
|
|
|
|
|
local a, b, c = time_string:match("(%d+):(%d%d?):(%d%d)")
|
|
|
|
if a ~= nil then
|
|
|
|
ret = (a*3600 + b*60 + c)
|
|
|
|
else
|
|
|
|
a, b = time_string:match("(%d%d?):(%d%d)")
|
|
|
|
if a ~= nil then
|
|
|
|
ret = (a*60 + b)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
return ret
|
|
|
|
end
|
|
|
|
|
|
|
|
local function extract_chapters(data, video_length)
|
|
|
|
local ret = {}
|
|
|
|
|
|
|
|
for line in data:gmatch("[^\r\n]+") do
|
|
|
|
local time = time_to_secs(line)
|
|
|
|
if time and (time < video_length) then
|
|
|
|
table.insert(ret, {time = time, title = line})
|
|
|
|
end
|
|
|
|
end
|
2016-11-13 17:02:50 +00:00
|
|
|
table.sort(ret, function(a, b) return a.time < b.time end)
|
2016-09-27 21:23:10 +00:00
|
|
|
return ret
|
|
|
|
end
|
|
|
|
|
2017-07-08 13:43:37 +00:00
|
|
|
local function is_blacklisted(url)
|
2017-07-11 22:40:40 +00:00
|
|
|
if o.exclude == "" then return false end
|
2017-07-08 13:43:37 +00:00
|
|
|
if #ytdl.blacklisted == 0 then
|
2022-09-04 17:50:52 +00:00
|
|
|
for match in o.exclude:gmatch('%|?([^|]+)') do
|
|
|
|
ytdl.blacklisted[#ytdl.blacklisted + 1] = match
|
2017-07-08 13:43:37 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
if #ytdl.blacklisted > 0 then
|
|
|
|
url = url:match('https?://(.+)')
|
|
|
|
for _, exclude in ipairs(ytdl.blacklisted) do
|
|
|
|
if url:match(exclude) then
|
2017-07-11 22:40:40 +00:00
|
|
|
msg.verbose('URL matches excluded substring. Skipping.')
|
2017-07-08 13:43:37 +00:00
|
|
|
return true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
return false
|
|
|
|
end
|
|
|
|
|
2018-01-15 21:16:36 +00:00
|
|
|
local function parse_yt_playlist(url, json)
|
|
|
|
-- return 0-based index to use with --playlist-start
|
|
|
|
|
2022-07-26 18:19:44 +00:00
|
|
|
if not json.extractor or
|
|
|
|
(json.extractor ~= "youtube:tab" and
|
|
|
|
json.extractor ~= "youtube:playlist") then
|
2018-01-15 21:16:36 +00:00
|
|
|
return nil
|
|
|
|
end
|
|
|
|
|
|
|
|
local query = url:match("%?.+")
|
|
|
|
if not query then return nil end
|
|
|
|
|
|
|
|
local args = {}
|
|
|
|
for arg, param in query:gmatch("(%a+)=([^&?]+)") do
|
|
|
|
if arg and param then
|
|
|
|
args[arg] = param
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
local maybe_idx = tonumber(args["index"])
|
|
|
|
|
|
|
|
-- if index matches v param it's probably the requested item
|
|
|
|
if maybe_idx and #json.entries >= maybe_idx and
|
|
|
|
json.entries[maybe_idx].id == args["v"] then
|
|
|
|
msg.debug("index matches requested video")
|
|
|
|
return maybe_idx - 1
|
|
|
|
end
|
|
|
|
|
|
|
|
-- if there's no index or it doesn't match, look for video
|
|
|
|
for i = 1, #json.entries do
|
2022-07-26 18:19:44 +00:00
|
|
|
if json.entries[i].id == args["v"] then
|
2018-01-15 21:16:36 +00:00
|
|
|
msg.debug("found requested video in index " .. (i - 1))
|
|
|
|
return i - 1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
msg.debug("requested video not found in playlist")
|
|
|
|
-- if item isn't on the playlist, give up
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
|
2017-09-03 11:11:49 +00:00
|
|
|
local function make_absolute_url(base_url, url)
|
|
|
|
if url:find("https?://") == 1 then return url end
|
|
|
|
|
|
|
|
local proto, domain, rest =
|
|
|
|
base_url:match("(https?://)([^/]+/)(.*)/?")
|
|
|
|
local segs = {}
|
|
|
|
rest:gsub("([^/]+)", function(c) table.insert(segs, c) end)
|
|
|
|
url:gsub("([^/]+)", function(c) table.insert(segs, c) end)
|
|
|
|
local resolved_url = {}
|
|
|
|
for i, v in ipairs(segs) do
|
|
|
|
if v == ".." then
|
|
|
|
table.remove(resolved_url)
|
|
|
|
elseif v ~= "." then
|
|
|
|
table.insert(resolved_url, v)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
return proto .. domain ..
|
|
|
|
table.concat(resolved_url, "/")
|
|
|
|
end
|
|
|
|
|
2017-08-05 03:26:28 +00:00
|
|
|
local function join_url(base_url, fragment)
|
|
|
|
local res = ""
|
|
|
|
if base_url and fragment.path then
|
2017-09-03 11:11:49 +00:00
|
|
|
res = make_absolute_url(base_url, fragment.path)
|
2017-08-05 03:26:28 +00:00
|
|
|
elseif fragment.url then
|
|
|
|
res = fragment.url
|
|
|
|
end
|
|
|
|
return res
|
|
|
|
end
|
|
|
|
|
|
|
|
local function edl_track_joined(fragments, protocol, is_live, base)
|
2023-10-31 01:23:21 +00:00
|
|
|
if type(fragments) ~= "table" or not fragments[1] then
|
2017-02-06 16:08:18 +00:00
|
|
|
msg.debug("No fragments to join into EDL")
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
|
2017-01-30 15:57:28 +00:00
|
|
|
local edl = "edl://"
|
2017-01-30 18:38:43 +00:00
|
|
|
local offset = 1
|
2017-05-04 16:10:07 +00:00
|
|
|
local parts = {}
|
2017-02-06 16:08:18 +00:00
|
|
|
|
2023-10-31 01:23:21 +00:00
|
|
|
if protocol == "http_dash_segments" and not is_live then
|
2019-01-05 13:34:31 +00:00
|
|
|
msg.debug("Using dash")
|
|
|
|
local args = ""
|
|
|
|
|
2017-02-06 16:08:18 +00:00
|
|
|
-- assume MP4 DASH initialization segment
|
2023-03-02 23:45:45 +00:00
|
|
|
if not fragments[1].duration and #fragments > 1 then
|
2019-01-05 13:34:31 +00:00
|
|
|
msg.debug("Using init segment")
|
|
|
|
args = args .. ",init=" .. edl_escape(join_url(base, fragments[1]))
|
|
|
|
offset = 2
|
|
|
|
end
|
|
|
|
|
|
|
|
table.insert(parts, "!mp4_dash" .. args)
|
2017-02-06 16:08:18 +00:00
|
|
|
|
|
|
|
-- Check remaining fragments for duration;
|
|
|
|
-- if not available in all, give up.
|
|
|
|
for i = offset, #fragments do
|
|
|
|
if not fragments[i].duration then
|
2023-03-02 23:50:58 +00:00
|
|
|
msg.verbose("EDL doesn't support fragments " ..
|
2017-02-06 16:08:18 +00:00
|
|
|
"without duration with MP4 DASH")
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
end
|
2017-01-30 18:38:43 +00:00
|
|
|
end
|
2017-02-06 16:08:18 +00:00
|
|
|
|
2017-01-30 18:38:43 +00:00
|
|
|
for i = offset, #fragments do
|
2017-01-30 15:57:28 +00:00
|
|
|
local fragment = fragments[i]
|
2018-01-26 01:19:04 +00:00
|
|
|
if not url_is_safe(join_url(base, fragment)) then
|
|
|
|
return nil
|
|
|
|
end
|
2017-08-05 03:26:28 +00:00
|
|
|
table.insert(parts, edl_escape(join_url(base, fragment)))
|
2017-01-30 15:57:28 +00:00
|
|
|
if fragment.duration then
|
2017-05-04 16:10:07 +00:00
|
|
|
parts[#parts] =
|
|
|
|
parts[#parts] .. ",length="..fragment.duration
|
2017-01-30 15:57:28 +00:00
|
|
|
end
|
|
|
|
end
|
2017-05-04 16:10:07 +00:00
|
|
|
return edl .. table.concat(parts, ";") .. ";"
|
2017-01-30 15:57:28 +00:00
|
|
|
end
|
2015-09-30 20:28:01 +00:00
|
|
|
|
2018-01-05 01:37:49 +00:00
|
|
|
local function has_native_dash_demuxer()
|
2018-02-11 13:16:45 +00:00
|
|
|
local demuxers = mp.get_property_native("demuxer-lavf-list", {})
|
|
|
|
for _, v in ipairs(demuxers) do
|
2018-01-05 01:37:49 +00:00
|
|
|
if v == "dash" then
|
|
|
|
return true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
return false
|
|
|
|
end
|
|
|
|
|
2018-01-27 12:23:03 +00:00
|
|
|
local function valid_manifest(json)
|
|
|
|
local reqfmt = json["requested_formats"] and json["requested_formats"][1] or {}
|
|
|
|
if not reqfmt["manifest_url"] and not json["manifest_url"] then
|
|
|
|
return false
|
|
|
|
end
|
|
|
|
local proto = reqfmt["protocol"] or json["protocol"] or ""
|
2018-02-11 13:16:45 +00:00
|
|
|
return (proto == "http_dash_segments" and has_native_dash_demuxer()) or
|
2018-01-27 12:23:03 +00:00
|
|
|
proto:find("^m3u8")
|
2018-01-05 01:37:49 +00:00
|
|
|
end
|
|
|
|
|
2020-02-19 15:33:48 +00:00
|
|
|
local function as_integer(v, def)
|
|
|
|
def = def or 0
|
|
|
|
local num = math.floor(tonumber(v) or def)
|
|
|
|
if num > -math.huge and num < math.huge then
|
|
|
|
return num
|
|
|
|
end
|
|
|
|
return def
|
|
|
|
end
|
|
|
|
|
2021-05-11 20:21:46 +00:00
|
|
|
local function tags_to_edl(json)
|
|
|
|
local tags = {}
|
|
|
|
for json_name, mp_name in pairs(tag_list) do
|
|
|
|
local v = json[json_name]
|
|
|
|
if v then
|
|
|
|
tags[#tags + 1] = mp_name .. "=" .. edl_escape(tostring(v))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
if #tags == 0 then
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
return "!global_tags," .. table.concat(tags, ",")
|
|
|
|
end
|
|
|
|
|
2020-02-20 13:51:48 +00:00
|
|
|
-- Convert a format list from youtube-dl to an EDL URL, or plain URL.
|
|
|
|
-- json: full json blob by youtube-dl
|
|
|
|
-- formats: format list by youtube-dl
|
|
|
|
-- use_all_formats: if=true, then formats is the full format list, and the
|
|
|
|
-- function will attempt to return them as delay-loaded tracks
|
|
|
|
-- See res table initialization in the function for result type.
|
|
|
|
local function formats_to_edl(json, formats, use_all_formats)
|
|
|
|
local res = {
|
|
|
|
-- the media URL, which may be EDL
|
|
|
|
url = nil,
|
|
|
|
-- for use_all_formats=true: whether any muxed formats are present, and
|
|
|
|
-- at the same time the separate EDL parts don't have both audio/video
|
|
|
|
muxed_needed = false,
|
|
|
|
}
|
2020-02-19 15:33:48 +00:00
|
|
|
|
2020-02-21 13:18:35 +00:00
|
|
|
local default_formats = {}
|
2022-07-10 18:07:50 +00:00
|
|
|
local requested_formats = json["requested_formats"] or json["requested_downloads"]
|
2020-02-21 13:18:35 +00:00
|
|
|
if use_all_formats and requested_formats then
|
|
|
|
for _, track in ipairs(requested_formats) do
|
|
|
|
local id = track["format_id"]
|
|
|
|
if id then
|
|
|
|
default_formats[id] = true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-02-20 13:51:48 +00:00
|
|
|
local duration = as_integer(json["duration"])
|
|
|
|
local single_url = nil
|
|
|
|
local streams = {}
|
2018-01-05 01:37:49 +00:00
|
|
|
|
2020-02-21 10:57:21 +00:00
|
|
|
local tbr_only = true
|
|
|
|
for index, track in ipairs(formats) do
|
|
|
|
tbr_only = tbr_only and track["tbr"] and
|
|
|
|
(not track["abr"]) and (not track["vbr"])
|
|
|
|
end
|
|
|
|
|
2022-07-10 21:59:30 +00:00
|
|
|
local has_requested_video = false
|
|
|
|
local has_requested_audio = false
|
2022-07-11 18:28:05 +00:00
|
|
|
-- Web players with quality selection always show the highest quality
|
|
|
|
-- option at the top. Since tracks are usually listed with the first
|
|
|
|
-- track at the top, that should also be the highest quality track.
|
|
|
|
-- yt-dlp/youtube-dl sorts it's formats from worst to best.
|
|
|
|
-- Iterate in reverse to get best track first.
|
|
|
|
for index = #formats, 1, -1 do
|
|
|
|
local track = formats[index]
|
2020-02-20 13:51:48 +00:00
|
|
|
local edl_track = nil
|
|
|
|
edl_track = edl_track_joined(track.fragments,
|
|
|
|
track.protocol, json.is_live,
|
|
|
|
track.fragment_base_url)
|
|
|
|
if not edl_track and not url_is_safe(track.url) then
|
2023-03-02 23:50:58 +00:00
|
|
|
msg.error("No safe URL or supported fragmented stream available")
|
2020-02-20 13:51:48 +00:00
|
|
|
return nil
|
2018-01-05 01:37:49 +00:00
|
|
|
end
|
|
|
|
|
2022-07-10 21:59:30 +00:00
|
|
|
local is_default = default_formats[track["format_id"]]
|
2020-02-20 23:20:22 +00:00
|
|
|
local tracks = {}
|
2022-07-18 03:21:20 +00:00
|
|
|
-- "none" means it is not a video
|
|
|
|
-- nil means it is unknown
|
|
|
|
if (o.force_all_formats or track.vcodec) and track.vcodec ~= "none" then
|
2020-02-20 23:20:22 +00:00
|
|
|
tracks[#tracks + 1] = {
|
|
|
|
media_type = "video",
|
2020-02-21 15:27:56 +00:00
|
|
|
codec = map_codec_to_mpv(track.vcodec),
|
2020-02-20 23:20:22 +00:00
|
|
|
}
|
2022-07-10 21:59:30 +00:00
|
|
|
if is_default then
|
|
|
|
has_requested_video = true
|
|
|
|
end
|
2020-02-20 13:51:48 +00:00
|
|
|
end
|
2022-07-18 03:21:20 +00:00
|
|
|
if (o.force_all_formats or track.acodec) and track.acodec ~= "none" then
|
2020-02-20 23:20:22 +00:00
|
|
|
tracks[#tracks + 1] = {
|
|
|
|
media_type = "audio",
|
2020-02-21 15:27:56 +00:00
|
|
|
codec = map_codec_to_mpv(track.acodec) or
|
|
|
|
ext_map[track.ext],
|
2020-02-20 23:20:22 +00:00
|
|
|
}
|
2022-07-10 21:59:30 +00:00
|
|
|
if is_default then
|
|
|
|
has_requested_audio = true
|
|
|
|
end
|
2020-02-20 13:51:48 +00:00
|
|
|
end
|
2017-04-22 20:47:34 +00:00
|
|
|
|
2020-02-20 13:51:48 +00:00
|
|
|
local url = edl_track or track.url
|
|
|
|
local hdr = {"!new_stream", "!no_clip", "!no_chapters"}
|
2022-07-18 03:21:20 +00:00
|
|
|
local skip = #tracks == 0
|
2020-02-20 13:55:14 +00:00
|
|
|
local params = ""
|
2020-02-20 13:51:48 +00:00
|
|
|
|
|
|
|
if use_all_formats then
|
2020-02-20 23:20:22 +00:00
|
|
|
for _, sub in ipairs(tracks) do
|
2020-02-20 13:51:48 +00:00
|
|
|
-- A single track that is either audio or video. Delay load it.
|
2020-02-21 13:47:50 +00:00
|
|
|
local props = ""
|
|
|
|
if sub.media_type == "video" then
|
|
|
|
props = props .. ",w=" .. as_integer(track.width)
|
|
|
|
.. ",h=" .. as_integer(track.height)
|
|
|
|
.. ",fps=" .. as_integer(track.fps)
|
|
|
|
elseif sub.media_type == "audio" then
|
|
|
|
props = props .. ",samplerate=" .. as_integer(track.asr)
|
|
|
|
end
|
2020-02-20 23:20:22 +00:00
|
|
|
hdr[#hdr + 1] = "!delay_open,media_type=" .. sub.media_type ..
|
2020-02-21 15:27:56 +00:00
|
|
|
",codec=" .. (sub.codec or "null") .. props
|
2020-02-20 13:51:48 +00:00
|
|
|
|
|
|
|
-- Add bitrate information etc. for better user selection.
|
2020-02-19 15:33:48 +00:00
|
|
|
local byterate = 0
|
2020-02-20 23:20:22 +00:00
|
|
|
local rates = {"tbr", "vbr", "abr"}
|
|
|
|
if #tracks > 1 then
|
|
|
|
rates = {({video = "vbr", audio = "abr"})[sub.media_type]}
|
|
|
|
end
|
2020-02-21 10:57:21 +00:00
|
|
|
if tbr_only then
|
|
|
|
rates = {"tbr"}
|
|
|
|
end
|
2020-02-20 23:20:22 +00:00
|
|
|
for _, f in ipairs(rates) do
|
2020-02-20 11:34:06 +00:00
|
|
|
local br = as_integer(track[f])
|
|
|
|
if br > 0 then
|
|
|
|
byterate = math.floor(br * 1000 / 8)
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
2020-02-21 21:23:17 +00:00
|
|
|
local title = track.format or track.format_note or ""
|
2020-02-20 23:20:22 +00:00
|
|
|
if #tracks > 1 then
|
|
|
|
if #title > 0 then
|
|
|
|
title = title .. " "
|
|
|
|
end
|
|
|
|
title = title .. "muxed-" .. index
|
|
|
|
end
|
2020-02-21 13:18:35 +00:00
|
|
|
local flags = {}
|
2022-07-10 21:59:30 +00:00
|
|
|
if is_default then
|
2020-02-21 13:18:35 +00:00
|
|
|
flags[#flags + 1] = "default"
|
|
|
|
end
|
2020-02-19 15:33:48 +00:00
|
|
|
hdr[#hdr + 1] = "!track_meta,title=" ..
|
2020-02-21 13:18:35 +00:00
|
|
|
edl_escape(title) .. ",byterate=" .. byterate ..
|
|
|
|
iif(#flags > 0, ",flags=" .. table.concat(flags, "+"), "")
|
2020-02-21 11:06:18 +00:00
|
|
|
end
|
2020-02-20 23:20:22 +00:00
|
|
|
|
2020-02-21 11:06:18 +00:00
|
|
|
if duration > 0 then
|
|
|
|
params = params .. ",length=" .. duration
|
2020-02-19 15:33:48 +00:00
|
|
|
end
|
ytdl_hook: merge separate audio tracks via EDL
This merges separate audio and video tracks into one virtual stream,
which helps the mpv caching layer. See previous EDL commit for details.
It's apparently active for most of evil Silicon Valley giant's streaming
videos.
Initial tests seem to work fine, except it happens pretty often that
playback goes into buffering immediately even when seeking within a
cached range, because there is not enough forward cache data yet to
fully restart playback. (Or something like this.)
The audio stream title used to be derived from track.format_note; this
commit stops doing so. It seemed pointless anyway. If really necessary,
it could be restored by adding new EDL headers.
Note that we explicitly don't do this with subtitle tracks. Subtitle
tracks still have a chance with on-demand loading or loading in the
background while video is already playing; merging them with EDL would
prevent this. Currently, subtitles are still added in a "blocking"
manner, but in theory this could be loosened. For example, the Lua API
already provides a way to run processes asynchronously, which could be
used to add subtitles during playback. EDL will probably be never
flexible enough to provide this. Also, subtitles are downloaded at
once, rather than streamed like audio and video.
Still missing: disabling EDL's pointless chapter generation, and
propagating download speed statistics through the EDL wrapper.
2019-01-04 13:01:56 +00:00
|
|
|
end
|
|
|
|
|
2022-07-18 03:21:20 +00:00
|
|
|
if not skip then
|
|
|
|
hdr[#hdr + 1] = edl_escape(url) .. params
|
2020-02-20 13:51:48 +00:00
|
|
|
|
2022-07-18 03:21:20 +00:00
|
|
|
streams[#streams + 1] = table.concat(hdr, ";")
|
|
|
|
-- In case there is only 1 of these streams.
|
|
|
|
-- Note: assumes it has no important EDL headers
|
|
|
|
single_url = url
|
|
|
|
end
|
2020-02-20 13:51:48 +00:00
|
|
|
end
|
2017-04-22 20:47:34 +00:00
|
|
|
|
2023-10-10 08:58:17 +00:00
|
|
|
local tags = tags_to_edl(json)
|
|
|
|
|
2020-02-20 13:51:48 +00:00
|
|
|
-- Merge all tracks into a single virtual file, but avoid EDL if it's
|
2023-10-10 08:58:17 +00:00
|
|
|
-- only a single track without metadata (i.e. redundant).
|
|
|
|
if #streams == 1 and single_url and not tags then
|
2020-02-20 13:51:48 +00:00
|
|
|
res.url = single_url
|
|
|
|
elseif #streams > 0 then
|
2021-05-11 20:21:46 +00:00
|
|
|
if tags then
|
|
|
|
-- not a stream; just for the sake of concatenating the EDL string
|
|
|
|
streams[#streams + 1] = tags
|
|
|
|
end
|
2020-02-20 13:51:48 +00:00
|
|
|
res.url = "edl://" .. table.concat(streams, ";")
|
|
|
|
else
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
|
2022-07-10 21:59:30 +00:00
|
|
|
if has_requested_audio ~= has_requested_video then
|
|
|
|
local not_req_prop = has_requested_video and "aid" or "vid"
|
|
|
|
if mp.get_property(not_req_prop) == "auto" then
|
|
|
|
mp.set_property("file-local-options/" .. not_req_prop, "no")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-02-20 13:51:48 +00:00
|
|
|
return res
|
|
|
|
end
|
|
|
|
|
2023-10-23 22:51:29 +00:00
|
|
|
local function add_single_video(json)
|
2020-02-20 13:51:48 +00:00
|
|
|
local streamurl = ""
|
|
|
|
local format_info = ""
|
|
|
|
local max_bitrate = 0
|
2022-07-10 18:07:50 +00:00
|
|
|
local requested_formats = json["requested_formats"] or json["requested_downloads"]
|
2020-02-20 13:51:48 +00:00
|
|
|
local all_formats = json["formats"]
|
2022-07-15 00:12:58 +00:00
|
|
|
local has_requested_formats = requested_formats and #requested_formats > 0
|
|
|
|
local http_headers = has_requested_formats
|
|
|
|
and requested_formats[1].http_headers
|
|
|
|
or json.http_headers
|
2023-10-19 21:18:25 +00:00
|
|
|
local cookies = has_requested_formats
|
|
|
|
and requested_formats[1].cookies
|
|
|
|
or json.cookies
|
2020-02-20 13:51:48 +00:00
|
|
|
|
|
|
|
if o.use_manifests and valid_manifest(json) then
|
|
|
|
-- prefer manifest_url if present
|
|
|
|
format_info = "manifest"
|
|
|
|
|
2020-02-23 11:02:01 +00:00
|
|
|
local mpd_url = requested_formats and
|
|
|
|
requested_formats[1]["manifest_url"] or json["manifest_url"]
|
2020-02-20 13:51:48 +00:00
|
|
|
if not mpd_url then
|
|
|
|
msg.error("No manifest URL found in JSON data.")
|
|
|
|
return
|
|
|
|
elseif not url_is_safe(mpd_url) then
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
|
|
|
streamurl = mpd_url
|
|
|
|
|
2020-02-23 11:02:01 +00:00
|
|
|
if requested_formats then
|
|
|
|
for _, track in pairs(requested_formats) do
|
2021-03-08 08:46:18 +00:00
|
|
|
max_bitrate = (track.tbr and track.tbr > max_bitrate) and
|
2020-02-20 13:51:48 +00:00
|
|
|
track.tbr or max_bitrate
|
|
|
|
end
|
|
|
|
elseif json.tbr then
|
|
|
|
max_bitrate = json.tbr > max_bitrate and json.tbr or max_bitrate
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if streamurl == "" then
|
|
|
|
-- possibly DASH/split tracks
|
|
|
|
local res = nil
|
|
|
|
|
2020-02-21 10:50:58 +00:00
|
|
|
-- Not having requested_formats usually hints to HLS master playlist
|
|
|
|
-- usage, which we don't want to split off, at least not yet.
|
|
|
|
if (all_formats and o.all_formats) and
|
|
|
|
(has_requested_formats or o.force_all_formats)
|
|
|
|
then
|
2020-02-20 13:51:48 +00:00
|
|
|
format_info = "all_formats (separate)"
|
|
|
|
res = formats_to_edl(json, all_formats, true)
|
|
|
|
-- Note: since we don't delay-load muxed streams, use normal stream
|
|
|
|
-- selection if we have to use muxed streams.
|
|
|
|
if res and res.muxed_needed then
|
|
|
|
res = nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-10-31 01:23:21 +00:00
|
|
|
if not res and has_requested_formats then
|
2020-02-20 13:51:48 +00:00
|
|
|
format_info = "youtube-dl (separate)"
|
|
|
|
res = formats_to_edl(json, requested_formats, false)
|
|
|
|
end
|
|
|
|
|
|
|
|
if res then
|
|
|
|
streamurl = res.url
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if streamurl == "" and json.url then
|
|
|
|
format_info = "youtube-dl (single)"
|
2017-04-22 20:47:34 +00:00
|
|
|
local edl_track = nil
|
|
|
|
edl_track = edl_track_joined(json.fragments, json.protocol,
|
2017-08-05 03:26:28 +00:00
|
|
|
json.is_live, json.fragment_base_url)
|
2017-04-22 20:47:34 +00:00
|
|
|
|
2018-01-26 18:54:17 +00:00
|
|
|
if not edl_track and not url_is_safe(json.url) then
|
|
|
|
return
|
|
|
|
end
|
2017-04-22 20:47:34 +00:00
|
|
|
-- normal video or single track
|
|
|
|
streamurl = edl_track or json.url
|
2020-02-20 13:51:48 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
if streamurl == "" then
|
2017-04-22 20:47:34 +00:00
|
|
|
msg.error("No URL found in JSON data.")
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
2022-07-15 00:12:58 +00:00
|
|
|
set_http_headers(http_headers)
|
|
|
|
|
2020-02-20 13:51:48 +00:00
|
|
|
msg.verbose("format selection: " .. format_info)
|
2017-04-22 20:47:34 +00:00
|
|
|
msg.debug("streamurl: " .. streamurl)
|
|
|
|
|
2018-01-26 18:54:17 +00:00
|
|
|
mp.set_property("stream-open-filename", streamurl:gsub("^data:", "data://", 1))
|
2017-04-22 20:47:34 +00:00
|
|
|
|
2023-02-26 18:49:23 +00:00
|
|
|
if mp.get_property("force-media-title", "") == "" then
|
|
|
|
mp.set_property("file-local-options/force-media-title", json.title)
|
|
|
|
end
|
2017-04-22 20:47:34 +00:00
|
|
|
|
2018-01-05 01:37:49 +00:00
|
|
|
-- set hls-bitrate for dash track selection
|
|
|
|
if max_bitrate > 0 and
|
|
|
|
not option_was_set("hls-bitrate") and
|
|
|
|
not option_was_set_locally("hls-bitrate") then
|
|
|
|
mp.set_property_native('file-local-options/hls-bitrate', max_bitrate*1000)
|
|
|
|
end
|
|
|
|
|
2017-04-22 20:47:34 +00:00
|
|
|
-- add subtitles
|
2023-10-31 01:23:21 +00:00
|
|
|
if json.requested_subtitles ~= nil then
|
2020-08-12 18:16:13 +00:00
|
|
|
local subs = {}
|
|
|
|
for lang, info in pairs(json.requested_subtitles) do
|
|
|
|
subs[#subs + 1] = {lang = lang or "-", info = info}
|
|
|
|
end
|
|
|
|
table.sort(subs, function(a, b) return a.lang < b.lang end)
|
|
|
|
for _, e in ipairs(subs) do
|
|
|
|
local lang, sub_info = e.lang, e.info
|
2017-04-22 20:47:34 +00:00
|
|
|
msg.verbose("adding subtitle ["..lang.."]")
|
|
|
|
|
|
|
|
local sub = nil
|
|
|
|
|
2023-10-31 01:23:21 +00:00
|
|
|
if sub_info.data ~= nil then
|
2017-04-22 20:47:34 +00:00
|
|
|
sub = "memory://"..sub_info.data
|
2023-10-31 01:23:21 +00:00
|
|
|
elseif sub_info.url ~= nil and
|
2018-02-11 13:18:10 +00:00
|
|
|
url_is_safe(sub_info.url) then
|
2017-04-22 20:47:34 +00:00
|
|
|
sub = sub_info.url
|
|
|
|
end
|
|
|
|
|
2023-10-31 01:23:21 +00:00
|
|
|
if sub ~= nil then
|
2020-02-15 17:28:13 +00:00
|
|
|
local edl = "edl://!no_clip;!delay_open,media_type=sub"
|
2020-02-19 15:33:48 +00:00
|
|
|
local codec = map_codec_to_mpv(sub_info.ext)
|
2020-02-15 17:28:13 +00:00
|
|
|
if codec then
|
|
|
|
edl = edl .. ",codec=" .. codec
|
|
|
|
end
|
|
|
|
edl = edl .. ";" .. edl_escape(sub)
|
2022-08-17 20:16:33 +00:00
|
|
|
local title = sub_info.name or sub_info.ext
|
|
|
|
mp.commandv("sub-add", edl, "auto", title, lang)
|
2017-04-22 20:47:34 +00:00
|
|
|
else
|
|
|
|
msg.verbose("No subtitle data/url for ["..lang.."]")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-06-21 01:30:59 +00:00
|
|
|
-- add thumbnails
|
2023-10-31 01:23:21 +00:00
|
|
|
if (o.thumbnails == 'all' or o.thumbnails == 'best') and json.thumbnails ~= nil then
|
2020-06-21 01:30:59 +00:00
|
|
|
local thumb = nil
|
|
|
|
local thumb_height = -1
|
2023-10-23 23:04:48 +00:00
|
|
|
local thumb_preference = nil
|
2020-06-21 01:30:59 +00:00
|
|
|
|
2023-10-23 23:13:28 +00:00
|
|
|
for i = #json.thumbnails, 1, -1 do
|
|
|
|
local thumb_info = json.thumbnails[i]
|
2023-10-31 01:23:21 +00:00
|
|
|
if thumb_info.url ~= nil then
|
|
|
|
if o.thumbnails == 'all' then
|
2020-06-21 01:30:59 +00:00
|
|
|
msg.verbose("adding thumbnail")
|
|
|
|
mp.commandv("video-add", thumb_info.url, "auto")
|
|
|
|
thumb_height = 0
|
2023-11-05 20:54:01 +00:00
|
|
|
elseif (thumb_preference ~= nil and (thumb_info.preference or -math.huge) > thumb_preference) or
|
2023-10-23 23:04:48 +00:00
|
|
|
(thumb_preference == nil and ((thumb_info.height or 0) > thumb_height)) then
|
2020-06-21 01:30:59 +00:00
|
|
|
thumb = thumb_info.url
|
|
|
|
thumb_height = thumb_info.height or 0
|
2023-10-23 23:04:48 +00:00
|
|
|
thumb_preference = thumb_info.preference
|
2020-06-21 01:30:59 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-10-31 01:23:21 +00:00
|
|
|
if thumb ~= nil then
|
2020-06-21 01:30:59 +00:00
|
|
|
msg.verbose("adding thumbnail")
|
|
|
|
mp.commandv("video-add", thumb, "auto")
|
2023-10-31 01:23:21 +00:00
|
|
|
elseif thumb_height == -1 then
|
2020-06-21 01:30:59 +00:00
|
|
|
msg.verbose("No thumbnail url")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-07-02 20:15:15 +00:00
|
|
|
-- add chapters
|
|
|
|
if json.chapters then
|
|
|
|
msg.debug("Adding pre-parsed chapters")
|
|
|
|
for i = 1, #json.chapters do
|
|
|
|
local chapter = json.chapters[i]
|
|
|
|
local title = chapter.title or ""
|
|
|
|
if title == "" then
|
|
|
|
title = string.format('Chapter %02d', i)
|
|
|
|
end
|
|
|
|
table.insert(chapter_list, {time=chapter.start_time, title=title})
|
|
|
|
end
|
2023-10-31 01:23:21 +00:00
|
|
|
elseif json.description ~= nil and json.duration ~= nil then
|
2017-04-22 20:47:34 +00:00
|
|
|
chapter_list = extract_chapters(json.description, json.duration)
|
|
|
|
end
|
|
|
|
|
|
|
|
-- set start time
|
2023-10-31 01:23:21 +00:00
|
|
|
if json.start_time or json.section_start and
|
2017-06-09 07:08:54 +00:00
|
|
|
not option_was_set("start") and
|
|
|
|
not option_was_set_locally("start") then
|
2023-04-10 18:42:12 +00:00
|
|
|
local start_time = json.start_time or json.section_start
|
|
|
|
msg.debug("Setting start to: " .. start_time .. " secs")
|
|
|
|
mp.set_property("file-local-options/start", start_time)
|
|
|
|
end
|
|
|
|
|
|
|
|
-- set end time
|
2023-10-31 01:23:21 +00:00
|
|
|
if json.end_time or json.section_end and
|
2023-04-10 18:42:12 +00:00
|
|
|
not option_was_set("end") and
|
|
|
|
not option_was_set_locally("end") then
|
|
|
|
local end_time = json.end_time or json.section_end
|
|
|
|
msg.debug("Setting end to: " .. end_time .. " secs")
|
|
|
|
mp.set_property("file-local-options/end", end_time)
|
2017-04-22 20:47:34 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
-- set aspect ratio for anamorphic video
|
2023-10-31 01:23:21 +00:00
|
|
|
if json.stretched_ratio ~= nil and
|
2019-10-04 20:40:19 +00:00
|
|
|
not option_was_set("video-aspect-override") then
|
|
|
|
mp.set_property('file-local-options/video-aspect-override', json.stretched_ratio)
|
2017-04-22 20:47:34 +00:00
|
|
|
end
|
|
|
|
|
2018-01-15 17:56:44 +00:00
|
|
|
local stream_opts = mp.get_property_native("file-local-options/stream-lavf-o", {})
|
|
|
|
|
2017-04-22 20:47:34 +00:00
|
|
|
-- for rtmp
|
2023-10-31 01:23:21 +00:00
|
|
|
if json.protocol == "rtmp" then
|
2018-01-15 17:56:44 +00:00
|
|
|
stream_opts = append_libav_opt(stream_opts,
|
2017-04-22 20:47:34 +00:00
|
|
|
"rtmp_tcurl", streamurl)
|
2018-01-15 17:56:44 +00:00
|
|
|
stream_opts = append_libav_opt(stream_opts,
|
2017-04-22 20:47:34 +00:00
|
|
|
"rtmp_pageurl", json.page_url)
|
2018-01-15 17:56:44 +00:00
|
|
|
stream_opts = append_libav_opt(stream_opts,
|
2017-04-22 20:47:34 +00:00
|
|
|
"rtmp_playpath", json.play_path)
|
2018-01-15 17:56:44 +00:00
|
|
|
stream_opts = append_libav_opt(stream_opts,
|
2017-04-22 20:47:34 +00:00
|
|
|
"rtmp_swfverify", json.player_url)
|
2018-01-15 17:56:44 +00:00
|
|
|
stream_opts = append_libav_opt(stream_opts,
|
2017-04-22 20:47:34 +00:00
|
|
|
"rtmp_swfurl", json.player_url)
|
2018-01-15 17:56:44 +00:00
|
|
|
stream_opts = append_libav_opt(stream_opts,
|
2017-04-22 20:47:34 +00:00
|
|
|
"rtmp_app", json.app)
|
2018-01-15 17:56:44 +00:00
|
|
|
end
|
2017-04-22 20:47:34 +00:00
|
|
|
|
2018-01-15 17:56:44 +00:00
|
|
|
if json.proxy and json.proxy ~= "" then
|
|
|
|
stream_opts = append_libav_opt(stream_opts,
|
|
|
|
"http_proxy", json.proxy)
|
2017-04-22 20:47:34 +00:00
|
|
|
end
|
2018-01-15 17:56:44 +00:00
|
|
|
|
2023-10-19 21:18:25 +00:00
|
|
|
if cookies and cookies ~= "" then
|
|
|
|
local existing_cookies = parse_cookies(stream_opts["cookies"])
|
|
|
|
local new_cookies = parse_cookies(cookies)
|
|
|
|
for cookie_key, cookie in pairs(new_cookies) do
|
|
|
|
existing_cookies[cookie_key] = cookie
|
|
|
|
end
|
|
|
|
stream_opts["cookies"] = serialize_cookies_for_avformat(existing_cookies)
|
|
|
|
end
|
|
|
|
|
2018-01-15 17:56:44 +00:00
|
|
|
mp.set_property_native("file-local-options/stream-lavf-o", stream_opts)
|
2017-04-22 20:47:34 +00:00
|
|
|
end
|
|
|
|
|
2019-10-12 17:04:24 +00:00
|
|
|
local function check_version(ytdl_path)
|
|
|
|
local command = {
|
|
|
|
name = "subprocess",
|
|
|
|
capture_stdout = true,
|
|
|
|
args = {ytdl_path, "--version"}
|
|
|
|
}
|
|
|
|
local version_string = mp.command_native(command).stdout
|
|
|
|
local year, month, day = string.match(version_string, "(%d+).(%d+).(%d+)")
|
|
|
|
|
|
|
|
-- sanity check
|
2023-10-31 01:23:21 +00:00
|
|
|
if tonumber(year) < 2000 or tonumber(month) > 12 or
|
|
|
|
tonumber(day) > 31 then
|
2019-10-12 17:04:24 +00:00
|
|
|
return
|
|
|
|
end
|
|
|
|
local version_ts = os.time{year=year, month=month, day=day}
|
2023-10-31 01:23:21 +00:00
|
|
|
if os.difftime(os.time(), version_ts) > 60*60*24*90 then
|
2019-10-12 17:04:24 +00:00
|
|
|
msg.warn("It appears that your youtube-dl version is severely out of date.")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-17 18:18:13 +00:00
|
|
|
function run_ytdl_hook(url)
|
2017-07-08 13:42:04 +00:00
|
|
|
local start_time = os.clock()
|
2015-10-10 22:35:35 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
-- strip ytdl://
|
2023-10-31 01:23:21 +00:00
|
|
|
if url:find("ytdl://") == 1 then
|
2018-02-09 23:44:58 +00:00
|
|
|
url = url:sub(8)
|
|
|
|
end
|
2014-11-19 22:33:28 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
local format = mp.get_property("options/ytdl-format")
|
|
|
|
local raw_options = mp.get_property_native("options/ytdl-raw-options")
|
|
|
|
local allsubs = true
|
|
|
|
local proxy = nil
|
2018-01-15 21:16:36 +00:00
|
|
|
local use_playlist = false
|
2015-04-27 20:30:10 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
local command = {
|
|
|
|
ytdl.path, "--no-warnings", "-J", "--flat-playlist",
|
2018-01-15 21:16:36 +00:00
|
|
|
"--sub-format", "ass/srt/best"
|
2018-02-09 23:44:58 +00:00
|
|
|
}
|
2015-09-30 20:28:01 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
-- Checks if video option is "no", change format accordingly,
|
|
|
|
-- but only if user didn't explicitly set one
|
2023-10-31 01:23:21 +00:00
|
|
|
if mp.get_property("options/vid") == "no" and #format == 0 then
|
2018-02-09 23:44:58 +00:00
|
|
|
format = "bestaudio/best"
|
|
|
|
msg.verbose("Video disabled. Only using audio")
|
|
|
|
end
|
2015-04-09 13:26:12 +00:00
|
|
|
|
2023-10-31 01:23:21 +00:00
|
|
|
if format == "" then
|
2018-02-09 23:44:58 +00:00
|
|
|
format = "bestvideo+bestaudio/best"
|
|
|
|
end
|
2020-02-19 15:31:04 +00:00
|
|
|
|
|
|
|
if format ~= "ytdl" then
|
|
|
|
table.insert(command, "--format")
|
|
|
|
table.insert(command, format)
|
|
|
|
end
|
2015-10-23 16:22:54 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
for param, arg in pairs(raw_options) do
|
|
|
|
table.insert(command, "--" .. param)
|
2023-10-31 01:23:21 +00:00
|
|
|
if arg ~= "" then
|
2018-02-09 23:44:58 +00:00
|
|
|
table.insert(command, arg)
|
2015-02-22 20:32:42 +00:00
|
|
|
end
|
2021-11-14 12:30:04 +00:00
|
|
|
if (param == "sub-lang" or param == "sub-langs" or param == "srt-lang") and (arg ~= "") then
|
2018-02-09 23:44:58 +00:00
|
|
|
allsubs = false
|
2023-10-31 01:23:21 +00:00
|
|
|
elseif param == "proxy" and arg ~= "" then
|
2018-02-09 23:44:58 +00:00
|
|
|
proxy = arg
|
2023-10-31 01:23:21 +00:00
|
|
|
elseif param == "yes-playlist" then
|
2018-01-15 21:16:36 +00:00
|
|
|
use_playlist = true
|
2018-02-09 23:44:58 +00:00
|
|
|
end
|
|
|
|
end
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2023-10-31 01:23:21 +00:00
|
|
|
if allsubs == true then
|
2018-02-09 23:44:58 +00:00
|
|
|
table.insert(command, "--all-subs")
|
|
|
|
end
|
2018-01-15 21:16:36 +00:00
|
|
|
if not use_playlist then
|
|
|
|
table.insert(command, "--no-playlist")
|
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
table.insert(command, "--")
|
|
|
|
table.insert(command, url)
|
2021-09-17 07:37:09 +00:00
|
|
|
|
2021-10-15 21:04:54 +00:00
|
|
|
local result
|
2021-09-17 07:37:09 +00:00
|
|
|
if ytdl.searched then
|
2021-10-15 21:04:54 +00:00
|
|
|
result = exec(command)
|
2021-09-17 07:37:09 +00:00
|
|
|
else
|
|
|
|
local separator = platform_is_windows() and ";" or ":"
|
|
|
|
if o.ytdl_path:match("[^" .. separator .. "]") then
|
|
|
|
ytdl.paths_to_search = {}
|
|
|
|
for path in o.ytdl_path:gmatch("[^" .. separator .. "]+") do
|
|
|
|
table.insert(ytdl.paths_to_search, path)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
for _, path in pairs(ytdl.paths_to_search) do
|
|
|
|
-- search for youtube-dl in mpv's config dir
|
2023-08-13 05:10:27 +00:00
|
|
|
local exesuf = platform_is_windows() and not path:lower():match("%.exe$") and ".exe" or ""
|
2021-09-17 07:37:09 +00:00
|
|
|
local ytdl_cmd = mp.find_config_file(path .. exesuf)
|
|
|
|
if ytdl_cmd then
|
|
|
|
msg.verbose("Found youtube-dl at: " .. ytdl_cmd)
|
|
|
|
ytdl.path = ytdl_cmd
|
|
|
|
command[1] = ytdl.path
|
2021-10-15 21:04:54 +00:00
|
|
|
result = exec(command)
|
2021-09-17 07:37:09 +00:00
|
|
|
break
|
|
|
|
else
|
|
|
|
msg.verbose("No youtube-dl found with path " .. path .. exesuf .. " in config directories")
|
|
|
|
command[1] = path
|
2021-10-15 21:04:54 +00:00
|
|
|
result = exec(command)
|
2021-09-17 07:37:09 +00:00
|
|
|
if result.error_string == "init" then
|
2023-08-13 05:10:27 +00:00
|
|
|
msg.verbose("youtube-dl with path " .. path .. " not found in PATH or not enough permissions")
|
2021-09-17 07:37:09 +00:00
|
|
|
else
|
2023-08-13 05:10:27 +00:00
|
|
|
msg.verbose("Found youtube-dl with path " .. path .. " in PATH")
|
2021-09-17 07:37:09 +00:00
|
|
|
ytdl.path = path
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
ytdl.searched = true
|
|
|
|
end
|
2018-04-15 07:00:18 +00:00
|
|
|
|
2021-10-15 21:04:54 +00:00
|
|
|
if result.killed_by_us then
|
2018-04-15 07:00:18 +00:00
|
|
|
return
|
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
|
2021-10-15 21:04:54 +00:00
|
|
|
local json = result.stdout
|
2021-05-27 19:25:48 +00:00
|
|
|
local parse_err = nil
|
|
|
|
|
2021-10-15 21:04:54 +00:00
|
|
|
if result.status ~= 0 or json == "" then
|
2021-05-27 19:25:48 +00:00
|
|
|
json = nil
|
|
|
|
elseif json then
|
|
|
|
json, parse_err = utils.parse_json(json)
|
|
|
|
end
|
|
|
|
|
2023-10-31 01:23:21 +00:00
|
|
|
if json == nil then
|
2021-10-15 21:04:54 +00:00
|
|
|
msg.verbose("status:", result.status)
|
2022-02-26 10:09:45 +00:00
|
|
|
msg.verbose("reason:", result.error_string)
|
|
|
|
msg.verbose("stdout:", result.stdout)
|
|
|
|
msg.verbose("stderr:", result.stderr)
|
|
|
|
|
2019-10-12 17:04:24 +00:00
|
|
|
-- trim our stderr to avoid spurious newlines
|
|
|
|
ytdl_err = result.stderr:gsub("^%s*(.-)%s*$", "%1")
|
|
|
|
msg.error(ytdl_err)
|
2018-02-09 23:44:58 +00:00
|
|
|
local err = "youtube-dl failed: "
|
2019-10-12 17:04:24 +00:00
|
|
|
if result.error_string and result.error_string == "init" then
|
2018-02-09 23:44:58 +00:00
|
|
|
err = err .. "not found or not enough permissions"
|
2021-05-27 19:25:48 +00:00
|
|
|
elseif parse_err then
|
|
|
|
err = err .. "failed to parse JSON data: " .. parse_err
|
2018-02-09 23:44:58 +00:00
|
|
|
else
|
2021-10-15 21:04:54 +00:00
|
|
|
err = err .. "unexpected error occurred"
|
2014-11-19 17:37:23 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
msg.error(err)
|
2021-05-27 19:25:48 +00:00
|
|
|
if parse_err or string.find(ytdl_err, "yt%-dl%.org/bug") then
|
2019-10-12 17:04:24 +00:00
|
|
|
check_version(ytdl.path)
|
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
return
|
|
|
|
end
|
|
|
|
|
|
|
|
msg.verbose("youtube-dl succeeded!")
|
|
|
|
msg.debug('ytdl parsing took '..os.clock()-start_time..' seconds')
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
json["proxy"] = json["proxy"] or proxy
|
2018-01-15 17:56:44 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
-- what did we get?
|
|
|
|
if json["direct"] then
|
|
|
|
-- direct URL, nothing to do
|
|
|
|
msg.verbose("Got direct URL")
|
|
|
|
return
|
2023-10-31 01:23:21 +00:00
|
|
|
elseif json["_type"] == "playlist" or
|
|
|
|
json["_type"] == "multi_video" then
|
2018-02-09 23:44:58 +00:00
|
|
|
-- a playlist
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2023-10-31 01:23:21 +00:00
|
|
|
if #json.entries == 0 then
|
2018-02-09 23:44:58 +00:00
|
|
|
msg.warn("Got empty playlist, nothing to play.")
|
|
|
|
return
|
|
|
|
end
|
2015-01-16 05:59:16 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
local self_redirecting_url =
|
|
|
|
json.entries[1]["_type"] ~= "url_transparent" and
|
|
|
|
json.entries[1]["webpage_url"] and
|
|
|
|
json.entries[1]["webpage_url"] == json["webpage_url"]
|
2017-12-29 17:14:36 +00:00
|
|
|
|
2015-01-16 05:59:16 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
-- some funky guessing to detect multi-arc videos
|
|
|
|
if self_redirecting_url and #json.entries > 1
|
|
|
|
and json.entries[1].protocol == "m3u8_native"
|
|
|
|
and json.entries[1].url then
|
|
|
|
msg.verbose("multi-arc video detected, building EDL")
|
2014-11-26 16:36:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
local playlist = edl_track_joined(json.entries)
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
msg.debug("EDL: " .. playlist)
|
2014-11-26 16:36:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
if not playlist then
|
|
|
|
return
|
|
|
|
end
|
2018-01-26 18:54:17 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
-- can't change the http headers for each entry, so use the 1st
|
|
|
|
set_http_headers(json.entries[1].http_headers)
|
2023-10-19 21:18:25 +00:00
|
|
|
set_cookies(json.entries[1].cookies or json.cookies)
|
2014-11-26 16:36:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
mp.set_property("stream-open-filename", playlist)
|
2023-02-26 18:49:23 +00:00
|
|
|
if json.title and mp.get_property("force-media-title", "") == "" then
|
2018-02-09 23:44:58 +00:00
|
|
|
mp.set_property("file-local-options/force-media-title",
|
|
|
|
json.title)
|
|
|
|
end
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
-- there might not be subs for the first segment
|
|
|
|
local entry_wsubs = nil
|
|
|
|
for i, entry in pairs(json.entries) do
|
2023-10-31 01:23:21 +00:00
|
|
|
if entry.requested_subtitles ~= nil then
|
2018-02-09 23:44:58 +00:00
|
|
|
entry_wsubs = i
|
|
|
|
break
|
2016-09-11 14:58:05 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
end
|
2016-09-11 14:58:05 +00:00
|
|
|
|
2023-10-31 01:23:21 +00:00
|
|
|
if entry_wsubs ~= nil and
|
|
|
|
json.entries[entry_wsubs].duration ~= nil then
|
2018-02-09 23:44:58 +00:00
|
|
|
for j, req in pairs(json.entries[entry_wsubs].requested_subtitles) do
|
|
|
|
local subfile = "edl://"
|
|
|
|
for i, entry in pairs(json.entries) do
|
2023-10-31 01:23:21 +00:00
|
|
|
if entry.requested_subtitles ~= nil and
|
|
|
|
entry.requested_subtitles[j] ~= nil and
|
2018-02-09 23:44:58 +00:00
|
|
|
url_is_safe(entry.requested_subtitles[j].url) then
|
|
|
|
subfile = subfile..edl_escape(entry.requested_subtitles[j].url)
|
|
|
|
else
|
|
|
|
subfile = subfile..edl_escape("memory://WEBVTT")
|
2016-05-22 21:18:27 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
subfile = subfile..",length="..entry.duration..";"
|
2016-05-22 21:18:27 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
msg.debug(j.." sub EDL: "..subfile)
|
|
|
|
mp.commandv("sub-add", subfile, "auto", req.ext, j)
|
2016-05-22 21:18:27 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
end
|
2016-05-22 21:18:27 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
elseif self_redirecting_url and #json.entries == 1 then
|
|
|
|
msg.verbose("Playlist with single entry detected.")
|
2023-10-23 22:51:29 +00:00
|
|
|
add_single_video(json.entries[1])
|
2018-02-09 23:44:58 +00:00
|
|
|
else
|
2018-01-15 21:16:36 +00:00
|
|
|
local playlist_index = parse_yt_playlist(url, json)
|
2018-02-09 23:44:58 +00:00
|
|
|
local playlist = {"#EXTM3U"}
|
|
|
|
for i, entry in pairs(json.entries) do
|
|
|
|
local site = entry.url
|
|
|
|
local title = entry.title
|
|
|
|
|
2023-10-31 01:23:21 +00:00
|
|
|
if title ~= nil then
|
2018-02-09 23:44:58 +00:00
|
|
|
title = string.gsub(title, '%s+', ' ')
|
|
|
|
table.insert(playlist, "#EXTINF:0," .. title)
|
|
|
|
end
|
2017-12-22 22:12:18 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
--[[ some extractors will still return the full info for
|
|
|
|
all clips in the playlist and the URL will point
|
|
|
|
directly to the file in that case, which we don't
|
|
|
|
want so get the webpage URL instead, which is what
|
|
|
|
we want, but only if we aren't going to trigger an
|
|
|
|
infinite loop
|
|
|
|
--]]
|
|
|
|
if entry["webpage_url"] and not self_redirecting_url then
|
|
|
|
site = entry["webpage_url"]
|
2014-11-26 16:36:23 +00:00
|
|
|
end
|
|
|
|
|
2023-10-19 21:18:25 +00:00
|
|
|
local playlist_url = nil
|
|
|
|
|
2018-02-10 13:12:47 +00:00
|
|
|
-- links without protocol as returned by --flat-playlist
|
2018-02-09 23:44:58 +00:00
|
|
|
if not site:find("://") then
|
2018-02-10 13:12:47 +00:00
|
|
|
-- youtube extractor provides only IDs,
|
|
|
|
-- others come prefixed with the extractor name and ":"
|
|
|
|
local prefix = site:find(":") and "ytdl://" or
|
|
|
|
"https://youtu.be/"
|
2023-10-19 21:18:25 +00:00
|
|
|
playlist_url = prefix .. site
|
2018-02-09 23:44:58 +00:00
|
|
|
elseif url_is_safe(site) then
|
2023-10-19 21:18:25 +00:00
|
|
|
playlist_url = site
|
|
|
|
end
|
|
|
|
|
|
|
|
if playlist_url then
|
|
|
|
table.insert(playlist, playlist_url)
|
|
|
|
-- save the cookies in a table for the playlist hook
|
|
|
|
playlist_cookies[playlist_url] = entry.cookies or json.cookies
|
2018-01-26 01:19:04 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
|
2014-11-26 16:36:23 +00:00
|
|
|
end
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2018-01-15 21:16:36 +00:00
|
|
|
if use_playlist and
|
|
|
|
not option_was_set("playlist-start") and playlist_index then
|
|
|
|
mp.set_property_number("playlist-start", playlist_index)
|
2018-02-09 23:44:58 +00:00
|
|
|
end
|
2018-01-15 21:16:36 +00:00
|
|
|
|
|
|
|
mp.set_property("stream-open-filename", "memory://" .. table.concat(playlist, "\n"))
|
2014-11-19 17:37:23 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
|
|
|
|
else -- probably a video
|
2023-10-23 22:51:29 +00:00
|
|
|
add_single_video(json)
|
2014-11-19 17:37:23 +00:00
|
|
|
end
|
2017-07-08 13:42:04 +00:00
|
|
|
msg.debug('script running time: '..os.clock()-start_time..' seconds')
|
2018-08-17 18:18:13 +00:00
|
|
|
end
|
2016-09-27 21:23:10 +00:00
|
|
|
|
2023-10-31 01:23:21 +00:00
|
|
|
if not o.try_ytdl_first then
|
2018-08-17 18:18:13 +00:00
|
|
|
mp.add_hook("on_load", 10, function ()
|
|
|
|
msg.verbose('ytdl:// hook')
|
|
|
|
local url = mp.get_property("stream-open-filename", "")
|
2023-10-31 01:23:21 +00:00
|
|
|
if url:find("ytdl://") ~= 1 then
|
2018-08-17 18:18:13 +00:00
|
|
|
msg.verbose('not a ytdl:// url')
|
|
|
|
return
|
|
|
|
end
|
|
|
|
run_ytdl_hook(url)
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2023-10-19 21:18:25 +00:00
|
|
|
mp.add_hook("on_load", 20, function ()
|
|
|
|
msg.verbose('playlist hook')
|
|
|
|
local url = mp.get_property("stream-open-filename", "")
|
|
|
|
if playlist_cookies[url] then
|
|
|
|
set_cookies(playlist_cookies[url])
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
|
2018-08-17 18:18:13 +00:00
|
|
|
mp.add_hook(o.try_ytdl_first and "on_load" or "on_load_fail", 10, function()
|
|
|
|
msg.verbose('full hook')
|
|
|
|
local url = mp.get_property("stream-open-filename", "")
|
2023-10-31 01:23:21 +00:00
|
|
|
if url:find("ytdl://") ~= 1 and
|
2018-08-17 18:18:13 +00:00
|
|
|
not ((url:find("https?://") == 1) and not is_blacklisted(url)) then
|
|
|
|
return
|
|
|
|
end
|
|
|
|
run_ytdl_hook(url)
|
|
|
|
end)
|
2016-09-27 21:23:10 +00:00
|
|
|
|
|
|
|
mp.add_hook("on_preloaded", 10, function ()
|
|
|
|
if next(chapter_list) ~= nil then
|
2017-07-02 20:15:15 +00:00
|
|
|
msg.verbose("Setting chapters")
|
2016-09-27 21:23:10 +00:00
|
|
|
|
|
|
|
mp.set_property_native("chapter-list", chapter_list)
|
|
|
|
chapter_list = {}
|
|
|
|
end
|
|
|
|
end)
|