2014-11-19 17:37:23 +00:00
|
|
|
local utils = require 'mp.utils'
|
|
|
|
local msg = require 'mp.msg'
|
2017-07-08 13:43:37 +00:00
|
|
|
local options = require 'mp.options'
|
|
|
|
|
|
|
|
local o = {
|
2018-01-07 15:56:55 +00:00
|
|
|
exclude = "",
|
2018-02-10 00:06:36 +00:00
|
|
|
try_ytdl_first = false,
|
|
|
|
use_manifests = false
|
2017-07-08 13:43:37 +00:00
|
|
|
}
|
2017-07-11 22:40:40 +00:00
|
|
|
options.read_options(o)
|
2014-11-19 17:37:23 +00:00
|
|
|
|
|
|
|
local ytdl = {
|
2015-10-10 22:35:35 +00:00
|
|
|
path = "youtube-dl",
|
2017-07-08 13:43:37 +00:00
|
|
|
searched = false,
|
|
|
|
blacklisted = {}
|
2014-11-19 17:37:23 +00:00
|
|
|
}
|
|
|
|
|
2016-09-27 21:23:10 +00:00
|
|
|
local chapter_list = {}
|
|
|
|
|
2018-01-26 01:19:04 +00:00
|
|
|
function Set (t)
|
|
|
|
local set = {}
|
|
|
|
for _, v in pairs(t) do set[v] = true end
|
|
|
|
return set
|
|
|
|
end
|
|
|
|
|
|
|
|
local safe_protos = Set {
|
|
|
|
"http", "https", "ftp", "ftps",
|
|
|
|
"rtmp", "rtmps", "rtmpe", "rtmpt", "rtmpts", "rtmpte",
|
|
|
|
"data"
|
|
|
|
}
|
|
|
|
|
2015-01-24 19:54:35 +00:00
|
|
|
local function exec(args)
|
|
|
|
local ret = utils.subprocess({args = args})
|
2018-04-15 07:00:18 +00:00
|
|
|
return ret.status, ret.stdout, ret, ret.killed_by_us
|
2015-01-24 19:54:35 +00:00
|
|
|
end
|
|
|
|
|
2015-09-30 20:28:01 +00:00
|
|
|
-- return true if it was explicitly set on the command line
|
2015-01-24 19:54:35 +00:00
|
|
|
local function option_was_set(name)
|
2015-10-06 15:06:11 +00:00
|
|
|
return mp.get_property_bool("option-info/" ..name.. "/set-from-commandline",
|
2015-01-24 19:54:35 +00:00
|
|
|
false)
|
|
|
|
end
|
|
|
|
|
2017-06-09 07:08:54 +00:00
|
|
|
-- return true if the option was set locally
|
|
|
|
local function option_was_set_locally(name)
|
|
|
|
return mp.get_property_bool("option-info/" ..name.. "/set-locally", false)
|
|
|
|
end
|
|
|
|
|
2015-01-24 19:54:35 +00:00
|
|
|
-- youtube-dl may set special http headers for some sites (user-agent, cookies)
|
|
|
|
local function set_http_headers(http_headers)
|
|
|
|
if not http_headers then
|
|
|
|
return
|
|
|
|
end
|
|
|
|
local headers = {}
|
|
|
|
local useragent = http_headers["User-Agent"]
|
|
|
|
if useragent and not option_was_set("user-agent") then
|
|
|
|
mp.set_property("file-local-options/user-agent", useragent)
|
|
|
|
end
|
2017-07-16 12:11:24 +00:00
|
|
|
local additional_fields = {"Cookie", "Referer", "X-Forwarded-For"}
|
2016-01-10 12:33:17 +00:00
|
|
|
for idx, item in pairs(additional_fields) do
|
|
|
|
local field_value = http_headers[item]
|
|
|
|
if field_value then
|
|
|
|
headers[#headers + 1] = item .. ": " .. field_value
|
|
|
|
end
|
2014-11-19 17:37:23 +00:00
|
|
|
end
|
2015-01-24 19:54:35 +00:00
|
|
|
if #headers > 0 and not option_was_set("http-header-fields") then
|
|
|
|
mp.set_property_native("file-local-options/http-header-fields", headers)
|
|
|
|
end
|
|
|
|
end
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2018-01-15 17:56:44 +00:00
|
|
|
local function append_libav_opt(props, name, value)
|
|
|
|
if not props then
|
|
|
|
props = {}
|
2015-04-03 17:04:31 +00:00
|
|
|
end
|
|
|
|
|
2018-01-15 17:56:44 +00:00
|
|
|
if name and value and not props[name] then
|
|
|
|
props[name] = value
|
2015-04-03 17:04:31 +00:00
|
|
|
end
|
|
|
|
|
2018-01-15 17:56:44 +00:00
|
|
|
return props
|
2015-04-03 17:04:31 +00:00
|
|
|
end
|
|
|
|
|
2015-09-30 20:28:01 +00:00
|
|
|
local function edl_escape(url)
|
|
|
|
return "%" .. string.len(url) .. "%" .. url
|
|
|
|
end
|
|
|
|
|
2018-01-26 11:26:27 +00:00
|
|
|
local function url_is_safe(url)
|
|
|
|
local proto = type(url) == "string" and url:match("^(.+)://") or nil
|
|
|
|
local safe = proto and safe_protos[proto]
|
|
|
|
if not safe then
|
|
|
|
msg.error(("Ignoring potentially unsafe url: '%s'"):format(url))
|
|
|
|
end
|
|
|
|
return safe
|
|
|
|
end
|
|
|
|
|
2016-09-27 21:23:10 +00:00
|
|
|
local function time_to_secs(time_string)
|
|
|
|
local ret
|
|
|
|
|
|
|
|
local a, b, c = time_string:match("(%d+):(%d%d?):(%d%d)")
|
|
|
|
if a ~= nil then
|
|
|
|
ret = (a*3600 + b*60 + c)
|
|
|
|
else
|
|
|
|
a, b = time_string:match("(%d%d?):(%d%d)")
|
|
|
|
if a ~= nil then
|
|
|
|
ret = (a*60 + b)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
return ret
|
|
|
|
end
|
|
|
|
|
|
|
|
local function extract_chapters(data, video_length)
|
|
|
|
local ret = {}
|
|
|
|
|
|
|
|
for line in data:gmatch("[^\r\n]+") do
|
|
|
|
local time = time_to_secs(line)
|
|
|
|
if time and (time < video_length) then
|
|
|
|
table.insert(ret, {time = time, title = line})
|
|
|
|
end
|
|
|
|
end
|
2016-11-13 17:02:50 +00:00
|
|
|
table.sort(ret, function(a, b) return a.time < b.time end)
|
2016-09-27 21:23:10 +00:00
|
|
|
return ret
|
|
|
|
end
|
|
|
|
|
2017-07-08 13:43:37 +00:00
|
|
|
local function is_blacklisted(url)
|
2017-07-11 22:40:40 +00:00
|
|
|
if o.exclude == "" then return false end
|
2017-07-08 13:43:37 +00:00
|
|
|
if #ytdl.blacklisted == 0 then
|
2017-07-11 22:40:40 +00:00
|
|
|
local joined = o.exclude
|
|
|
|
while joined:match('%|?[^|]+') do
|
|
|
|
local _, e, substring = joined:find('%|?([^|]+)')
|
|
|
|
table.insert(ytdl.blacklisted, substring)
|
2017-07-08 13:43:37 +00:00
|
|
|
joined = joined:sub(e+1)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
if #ytdl.blacklisted > 0 then
|
|
|
|
url = url:match('https?://(.+)')
|
|
|
|
for _, exclude in ipairs(ytdl.blacklisted) do
|
|
|
|
if url:match(exclude) then
|
2017-07-11 22:40:40 +00:00
|
|
|
msg.verbose('URL matches excluded substring. Skipping.')
|
2017-07-08 13:43:37 +00:00
|
|
|
return true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
return false
|
|
|
|
end
|
|
|
|
|
2018-01-15 21:16:36 +00:00
|
|
|
local function parse_yt_playlist(url, json)
|
|
|
|
-- return 0-based index to use with --playlist-start
|
|
|
|
|
|
|
|
if not json.extractor or json.extractor ~= "youtube:playlist" then
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
|
|
|
|
local query = url:match("%?.+")
|
|
|
|
if not query then return nil end
|
|
|
|
|
|
|
|
local args = {}
|
|
|
|
for arg, param in query:gmatch("(%a+)=([^&?]+)") do
|
|
|
|
if arg and param then
|
|
|
|
args[arg] = param
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
local maybe_idx = tonumber(args["index"])
|
|
|
|
|
|
|
|
-- if index matches v param it's probably the requested item
|
|
|
|
if maybe_idx and #json.entries >= maybe_idx and
|
|
|
|
json.entries[maybe_idx].id == args["v"] then
|
|
|
|
msg.debug("index matches requested video")
|
|
|
|
return maybe_idx - 1
|
|
|
|
end
|
|
|
|
|
|
|
|
-- if there's no index or it doesn't match, look for video
|
|
|
|
for i = 1, #json.entries do
|
|
|
|
if json.entries[i] == args["v"] then
|
|
|
|
msg.debug("found requested video in index " .. (i - 1))
|
|
|
|
return i - 1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
msg.debug("requested video not found in playlist")
|
|
|
|
-- if item isn't on the playlist, give up
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
|
2017-09-03 11:11:49 +00:00
|
|
|
local function make_absolute_url(base_url, url)
|
|
|
|
if url:find("https?://") == 1 then return url end
|
|
|
|
|
|
|
|
local proto, domain, rest =
|
|
|
|
base_url:match("(https?://)([^/]+/)(.*)/?")
|
|
|
|
local segs = {}
|
|
|
|
rest:gsub("([^/]+)", function(c) table.insert(segs, c) end)
|
|
|
|
url:gsub("([^/]+)", function(c) table.insert(segs, c) end)
|
|
|
|
local resolved_url = {}
|
|
|
|
for i, v in ipairs(segs) do
|
|
|
|
if v == ".." then
|
|
|
|
table.remove(resolved_url)
|
|
|
|
elseif v ~= "." then
|
|
|
|
table.insert(resolved_url, v)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
return proto .. domain ..
|
|
|
|
table.concat(resolved_url, "/")
|
|
|
|
end
|
|
|
|
|
2017-08-05 03:26:28 +00:00
|
|
|
local function join_url(base_url, fragment)
|
|
|
|
local res = ""
|
|
|
|
if base_url and fragment.path then
|
2017-09-03 11:11:49 +00:00
|
|
|
res = make_absolute_url(base_url, fragment.path)
|
2017-08-05 03:26:28 +00:00
|
|
|
elseif fragment.url then
|
|
|
|
res = fragment.url
|
|
|
|
end
|
|
|
|
return res
|
|
|
|
end
|
|
|
|
|
|
|
|
local function edl_track_joined(fragments, protocol, is_live, base)
|
2017-02-06 16:08:18 +00:00
|
|
|
if not (type(fragments) == "table") or not fragments[1] then
|
|
|
|
msg.debug("No fragments to join into EDL")
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
|
2017-01-30 15:57:28 +00:00
|
|
|
local edl = "edl://"
|
2017-01-30 18:38:43 +00:00
|
|
|
local offset = 1
|
2017-05-04 16:10:07 +00:00
|
|
|
local parts = {}
|
2017-02-06 16:08:18 +00:00
|
|
|
|
|
|
|
if (protocol == "http_dash_segments") and
|
2017-02-10 19:43:32 +00:00
|
|
|
not fragments[1].duration and not is_live then
|
2017-02-06 16:08:18 +00:00
|
|
|
-- assume MP4 DASH initialization segment
|
2017-05-04 16:10:07 +00:00
|
|
|
table.insert(parts,
|
2017-08-05 03:26:28 +00:00
|
|
|
"!mp4_dash,init=" .. edl_escape(join_url(base, fragments[1])))
|
2017-01-30 18:38:43 +00:00
|
|
|
offset = 2
|
2017-02-06 16:08:18 +00:00
|
|
|
|
|
|
|
-- Check remaining fragments for duration;
|
|
|
|
-- if not available in all, give up.
|
|
|
|
for i = offset, #fragments do
|
|
|
|
if not fragments[i].duration then
|
|
|
|
msg.error("EDL doesn't support fragments" ..
|
|
|
|
"without duration with MP4 DASH")
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
end
|
2017-01-30 18:38:43 +00:00
|
|
|
end
|
2017-02-06 16:08:18 +00:00
|
|
|
|
2017-01-30 18:38:43 +00:00
|
|
|
for i = offset, #fragments do
|
2017-01-30 15:57:28 +00:00
|
|
|
local fragment = fragments[i]
|
2018-01-26 01:19:04 +00:00
|
|
|
if not url_is_safe(join_url(base, fragment)) then
|
|
|
|
return nil
|
|
|
|
end
|
2017-08-05 03:26:28 +00:00
|
|
|
table.insert(parts, edl_escape(join_url(base, fragment)))
|
2017-01-30 15:57:28 +00:00
|
|
|
if fragment.duration then
|
2017-05-04 16:10:07 +00:00
|
|
|
parts[#parts] =
|
|
|
|
parts[#parts] .. ",length="..fragment.duration
|
2017-01-30 15:57:28 +00:00
|
|
|
end
|
|
|
|
end
|
2017-05-04 16:10:07 +00:00
|
|
|
return edl .. table.concat(parts, ";") .. ";"
|
2017-01-30 15:57:28 +00:00
|
|
|
end
|
2015-09-30 20:28:01 +00:00
|
|
|
|
2018-01-05 01:37:49 +00:00
|
|
|
local function has_native_dash_demuxer()
|
2018-02-11 13:16:45 +00:00
|
|
|
local demuxers = mp.get_property_native("demuxer-lavf-list", {})
|
|
|
|
for _, v in ipairs(demuxers) do
|
2018-01-05 01:37:49 +00:00
|
|
|
if v == "dash" then
|
|
|
|
return true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
return false
|
|
|
|
end
|
|
|
|
|
2018-01-27 12:23:03 +00:00
|
|
|
local function valid_manifest(json)
|
|
|
|
local reqfmt = json["requested_formats"] and json["requested_formats"][1] or {}
|
|
|
|
if not reqfmt["manifest_url"] and not json["manifest_url"] then
|
|
|
|
return false
|
|
|
|
end
|
|
|
|
local proto = reqfmt["protocol"] or json["protocol"] or ""
|
2018-02-11 13:16:45 +00:00
|
|
|
return (proto == "http_dash_segments" and has_native_dash_demuxer()) or
|
2018-01-27 12:23:03 +00:00
|
|
|
proto:find("^m3u8")
|
2018-01-05 01:37:49 +00:00
|
|
|
end
|
|
|
|
|
2017-04-22 20:47:34 +00:00
|
|
|
local function add_single_video(json)
|
|
|
|
local streamurl = ""
|
2018-01-05 01:37:49 +00:00
|
|
|
local max_bitrate = 0
|
2018-01-26 20:57:10 +00:00
|
|
|
local reqfmts = json["requested_formats"]
|
2018-01-05 01:37:49 +00:00
|
|
|
|
2018-01-26 20:57:10 +00:00
|
|
|
-- prefer manifest_url if present
|
2018-02-10 00:06:36 +00:00
|
|
|
if o.use_manifests and valid_manifest(json) then
|
2018-01-26 20:57:10 +00:00
|
|
|
local mpd_url = reqfmts and reqfmts[1]["manifest_url"] or
|
2018-01-05 01:37:49 +00:00
|
|
|
json["manifest_url"]
|
|
|
|
if not mpd_url then
|
|
|
|
msg.error("No manifest URL found in JSON data.")
|
|
|
|
return
|
2018-01-26 01:19:04 +00:00
|
|
|
elseif not url_is_safe(mpd_url) then
|
|
|
|
return
|
2018-01-05 01:37:49 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
streamurl = mpd_url
|
|
|
|
|
2018-01-26 20:57:10 +00:00
|
|
|
if reqfmts then
|
|
|
|
for _, track in pairs(reqfmts) do
|
2018-01-05 01:37:49 +00:00
|
|
|
max_bitrate = track.tbr > max_bitrate and
|
|
|
|
track.tbr or max_bitrate
|
|
|
|
end
|
|
|
|
elseif json.tbr then
|
|
|
|
max_bitrate = json.tbr > max_bitrate and json.tbr or max_bitrate
|
|
|
|
end
|
2017-04-22 20:47:34 +00:00
|
|
|
|
|
|
|
-- DASH/split tracks
|
2018-01-26 20:57:10 +00:00
|
|
|
elseif reqfmts then
|
ytdl_hook: merge separate audio tracks via EDL
This merges separate audio and video tracks into one virtual stream,
which helps the mpv caching layer. See previous EDL commit for details.
It's apparently active for most of evil Silicon Valley giant's streaming
videos.
Initial tests seem to work fine, except it happens pretty often that
playback goes into buffering immediately even when seeking within a
cached range, because there is not enough forward cache data yet to
fully restart playback. (Or something like this.)
The audio stream title used to be derived from track.format_note; this
commit stops doing so. It seemed pointless anyway. If really necessary,
it could be restored by adding new EDL headers.
Note that we explicitly don't do this with subtitle tracks. Subtitle
tracks still have a chance with on-demand loading or loading in the
background while video is already playing; merging them with EDL would
prevent this. Currently, subtitles are still added in a "blocking"
manner, but in theory this could be loosened. For example, the Lua API
already provides a way to run processes asynchronously, which could be
used to add subtitles during playback. EDL will probably be never
flexible enough to provide this. Also, subtitles are downloaded at
once, rather than streamed like audio and video.
Still missing: disabling EDL's pointless chapter generation, and
propagating download speed statistics through the EDL wrapper.
2019-01-04 13:01:56 +00:00
|
|
|
local streams = {}
|
|
|
|
|
2018-01-26 20:57:10 +00:00
|
|
|
for _, track in pairs(reqfmts) do
|
2017-04-22 20:47:34 +00:00
|
|
|
local edl_track = nil
|
|
|
|
edl_track = edl_track_joined(track.fragments,
|
2017-08-05 03:26:28 +00:00
|
|
|
track.protocol, json.is_live,
|
|
|
|
track.fragment_base_url)
|
2018-01-26 18:54:17 +00:00
|
|
|
if not edl_track and not url_is_safe(track.url) then
|
2018-01-26 01:19:04 +00:00
|
|
|
return
|
|
|
|
end
|
2018-05-03 00:40:24 +00:00
|
|
|
if track.vcodec and track.vcodec ~= "none" then
|
|
|
|
-- video track
|
ytdl_hook: merge separate audio tracks via EDL
This merges separate audio and video tracks into one virtual stream,
which helps the mpv caching layer. See previous EDL commit for details.
It's apparently active for most of evil Silicon Valley giant's streaming
videos.
Initial tests seem to work fine, except it happens pretty often that
playback goes into buffering immediately even when seeking within a
cached range, because there is not enough forward cache data yet to
fully restart playback. (Or something like this.)
The audio stream title used to be derived from track.format_note; this
commit stops doing so. It seemed pointless anyway. If really necessary,
it could be restored by adding new EDL headers.
Note that we explicitly don't do this with subtitle tracks. Subtitle
tracks still have a chance with on-demand loading or loading in the
background while video is already playing; merging them with EDL would
prevent this. Currently, subtitles are still added in a "blocking"
manner, but in theory this could be loosened. For example, the Lua API
already provides a way to run processes asynchronously, which could be
used to add subtitles during playback. EDL will probably be never
flexible enough to provide this. Also, subtitles are downloaded at
once, rather than streamed like audio and video.
Still missing: disabling EDL's pointless chapter generation, and
propagating download speed statistics through the EDL wrapper.
2019-01-04 13:01:56 +00:00
|
|
|
streams[#streams + 1] = edl_track or track.url
|
2018-08-17 18:17:30 +00:00
|
|
|
elseif track.vcodec == "none" then
|
ytdl_hook: merge separate audio tracks via EDL
This merges separate audio and video tracks into one virtual stream,
which helps the mpv caching layer. See previous EDL commit for details.
It's apparently active for most of evil Silicon Valley giant's streaming
videos.
Initial tests seem to work fine, except it happens pretty often that
playback goes into buffering immediately even when seeking within a
cached range, because there is not enough forward cache data yet to
fully restart playback. (Or something like this.)
The audio stream title used to be derived from track.format_note; this
commit stops doing so. It seemed pointless anyway. If really necessary,
it could be restored by adding new EDL headers.
Note that we explicitly don't do this with subtitle tracks. Subtitle
tracks still have a chance with on-demand loading or loading in the
background while video is already playing; merging them with EDL would
prevent this. Currently, subtitles are still added in a "blocking"
manner, but in theory this could be loosened. For example, the Lua API
already provides a way to run processes asynchronously, which could be
used to add subtitles during playback. EDL will probably be never
flexible enough to provide this. Also, subtitles are downloaded at
once, rather than streamed like audio and video.
Still missing: disabling EDL's pointless chapter generation, and
propagating download speed statistics through the EDL wrapper.
2019-01-04 13:01:56 +00:00
|
|
|
-- audio track
|
|
|
|
streams[#streams + 1] = track.url
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if #streams > 1 then
|
|
|
|
-- merge them via EDL
|
|
|
|
for i = 1, #streams do
|
|
|
|
streams[i] = edl_escape(streams[i])
|
2017-04-22 20:47:34 +00:00
|
|
|
end
|
2019-01-05 07:45:10 +00:00
|
|
|
streamurl = "edl://!no_chapters;" ..
|
|
|
|
table.concat(streams, ";!new_stream;") .. ";"
|
ytdl_hook: merge separate audio tracks via EDL
This merges separate audio and video tracks into one virtual stream,
which helps the mpv caching layer. See previous EDL commit for details.
It's apparently active for most of evil Silicon Valley giant's streaming
videos.
Initial tests seem to work fine, except it happens pretty often that
playback goes into buffering immediately even when seeking within a
cached range, because there is not enough forward cache data yet to
fully restart playback. (Or something like this.)
The audio stream title used to be derived from track.format_note; this
commit stops doing so. It seemed pointless anyway. If really necessary,
it could be restored by adding new EDL headers.
Note that we explicitly don't do this with subtitle tracks. Subtitle
tracks still have a chance with on-demand loading or loading in the
background while video is already playing; merging them with EDL would
prevent this. Currently, subtitles are still added in a "blocking"
manner, but in theory this could be loosened. For example, the Lua API
already provides a way to run processes asynchronously, which could be
used to add subtitles during playback. EDL will probably be never
flexible enough to provide this. Also, subtitles are downloaded at
once, rather than streamed like audio and video.
Still missing: disabling EDL's pointless chapter generation, and
propagating download speed statistics through the EDL wrapper.
2019-01-04 13:01:56 +00:00
|
|
|
else
|
|
|
|
streamurl = streams[1]
|
2017-04-22 20:47:34 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
elseif not (json.url == nil) then
|
|
|
|
local edl_track = nil
|
|
|
|
edl_track = edl_track_joined(json.fragments, json.protocol,
|
2017-08-05 03:26:28 +00:00
|
|
|
json.is_live, json.fragment_base_url)
|
2017-04-22 20:47:34 +00:00
|
|
|
|
2018-01-26 18:54:17 +00:00
|
|
|
if not edl_track and not url_is_safe(json.url) then
|
|
|
|
return
|
|
|
|
end
|
2017-04-22 20:47:34 +00:00
|
|
|
-- normal video or single track
|
|
|
|
streamurl = edl_track or json.url
|
|
|
|
set_http_headers(json.http_headers)
|
|
|
|
else
|
|
|
|
msg.error("No URL found in JSON data.")
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
|
|
|
msg.debug("streamurl: " .. streamurl)
|
|
|
|
|
2018-01-26 18:54:17 +00:00
|
|
|
mp.set_property("stream-open-filename", streamurl:gsub("^data:", "data://", 1))
|
2017-04-22 20:47:34 +00:00
|
|
|
|
|
|
|
mp.set_property("file-local-options/force-media-title", json.title)
|
|
|
|
|
2018-01-05 01:37:49 +00:00
|
|
|
-- set hls-bitrate for dash track selection
|
|
|
|
if max_bitrate > 0 and
|
|
|
|
not option_was_set("hls-bitrate") and
|
|
|
|
not option_was_set_locally("hls-bitrate") then
|
|
|
|
mp.set_property_native('file-local-options/hls-bitrate', max_bitrate*1000)
|
|
|
|
end
|
|
|
|
|
2017-04-22 20:47:34 +00:00
|
|
|
-- add subtitles
|
|
|
|
if not (json.requested_subtitles == nil) then
|
|
|
|
for lang, sub_info in pairs(json.requested_subtitles) do
|
|
|
|
msg.verbose("adding subtitle ["..lang.."]")
|
|
|
|
|
|
|
|
local sub = nil
|
|
|
|
|
|
|
|
if not (sub_info.data == nil) then
|
|
|
|
sub = "memory://"..sub_info.data
|
2018-02-11 13:18:10 +00:00
|
|
|
elseif not (sub_info.url == nil) and
|
|
|
|
url_is_safe(sub_info.url) then
|
2017-04-22 20:47:34 +00:00
|
|
|
sub = sub_info.url
|
|
|
|
end
|
|
|
|
|
|
|
|
if not (sub == nil) then
|
|
|
|
mp.commandv("sub-add", sub,
|
|
|
|
"auto", sub_info.ext, lang)
|
|
|
|
else
|
|
|
|
msg.verbose("No subtitle data/url for ["..lang.."]")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-07-02 20:15:15 +00:00
|
|
|
-- add chapters
|
|
|
|
if json.chapters then
|
|
|
|
msg.debug("Adding pre-parsed chapters")
|
|
|
|
for i = 1, #json.chapters do
|
|
|
|
local chapter = json.chapters[i]
|
|
|
|
local title = chapter.title or ""
|
|
|
|
if title == "" then
|
|
|
|
title = string.format('Chapter %02d', i)
|
|
|
|
end
|
|
|
|
table.insert(chapter_list, {time=chapter.start_time, title=title})
|
|
|
|
end
|
|
|
|
elseif not (json.description == nil) and not (json.duration == nil) then
|
2017-04-22 20:47:34 +00:00
|
|
|
chapter_list = extract_chapters(json.description, json.duration)
|
|
|
|
end
|
|
|
|
|
|
|
|
-- set start time
|
2017-06-08 18:45:49 +00:00
|
|
|
if not (json.start_time == nil) and
|
2017-06-09 07:08:54 +00:00
|
|
|
not option_was_set("start") and
|
|
|
|
not option_was_set_locally("start") then
|
2017-04-22 20:47:34 +00:00
|
|
|
msg.debug("Setting start to: " .. json.start_time .. " secs")
|
|
|
|
mp.set_property("file-local-options/start", json.start_time)
|
|
|
|
end
|
|
|
|
|
|
|
|
-- set aspect ratio for anamorphic video
|
|
|
|
if not (json.stretched_ratio == nil) and
|
|
|
|
not option_was_set("video-aspect") then
|
|
|
|
mp.set_property('file-local-options/video-aspect', json.stretched_ratio)
|
|
|
|
end
|
|
|
|
|
2018-01-15 17:56:44 +00:00
|
|
|
local stream_opts = mp.get_property_native("file-local-options/stream-lavf-o", {})
|
|
|
|
|
2017-04-22 20:47:34 +00:00
|
|
|
-- for rtmp
|
|
|
|
if (json.protocol == "rtmp") then
|
2018-01-15 17:56:44 +00:00
|
|
|
stream_opts = append_libav_opt(stream_opts,
|
2017-04-22 20:47:34 +00:00
|
|
|
"rtmp_tcurl", streamurl)
|
2018-01-15 17:56:44 +00:00
|
|
|
stream_opts = append_libav_opt(stream_opts,
|
2017-04-22 20:47:34 +00:00
|
|
|
"rtmp_pageurl", json.page_url)
|
2018-01-15 17:56:44 +00:00
|
|
|
stream_opts = append_libav_opt(stream_opts,
|
2017-04-22 20:47:34 +00:00
|
|
|
"rtmp_playpath", json.play_path)
|
2018-01-15 17:56:44 +00:00
|
|
|
stream_opts = append_libav_opt(stream_opts,
|
2017-04-22 20:47:34 +00:00
|
|
|
"rtmp_swfverify", json.player_url)
|
2018-01-15 17:56:44 +00:00
|
|
|
stream_opts = append_libav_opt(stream_opts,
|
2017-04-22 20:47:34 +00:00
|
|
|
"rtmp_swfurl", json.player_url)
|
2018-01-15 17:56:44 +00:00
|
|
|
stream_opts = append_libav_opt(stream_opts,
|
2017-04-22 20:47:34 +00:00
|
|
|
"rtmp_app", json.app)
|
2018-01-15 17:56:44 +00:00
|
|
|
end
|
2017-04-22 20:47:34 +00:00
|
|
|
|
2018-01-15 17:56:44 +00:00
|
|
|
if json.proxy and json.proxy ~= "" then
|
|
|
|
stream_opts = append_libav_opt(stream_opts,
|
|
|
|
"http_proxy", json.proxy)
|
2017-04-22 20:47:34 +00:00
|
|
|
end
|
2018-01-15 17:56:44 +00:00
|
|
|
|
|
|
|
mp.set_property_native("file-local-options/stream-lavf-o", stream_opts)
|
2017-04-22 20:47:34 +00:00
|
|
|
end
|
|
|
|
|
2018-08-17 18:18:13 +00:00
|
|
|
function run_ytdl_hook(url)
|
2017-07-08 13:42:04 +00:00
|
|
|
local start_time = os.clock()
|
2015-10-10 22:35:35 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
-- check for youtube-dl in mpv's config dir
|
|
|
|
if not (ytdl.searched) then
|
|
|
|
local exesuf = (package.config:sub(1,1) == '\\') and '.exe' or ''
|
|
|
|
local ytdl_mcd = mp.find_config_file("youtube-dl" .. exesuf)
|
|
|
|
if not (ytdl_mcd == nil) then
|
|
|
|
msg.verbose("found youtube-dl at: " .. ytdl_mcd)
|
|
|
|
ytdl.path = ytdl_mcd
|
2014-11-19 17:37:23 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
ytdl.searched = true
|
|
|
|
end
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
-- strip ytdl://
|
|
|
|
if (url:find("ytdl://") == 1) then
|
|
|
|
url = url:sub(8)
|
|
|
|
end
|
2014-11-19 22:33:28 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
local format = mp.get_property("options/ytdl-format")
|
|
|
|
local raw_options = mp.get_property_native("options/ytdl-raw-options")
|
|
|
|
local allsubs = true
|
|
|
|
local proxy = nil
|
2018-01-15 21:16:36 +00:00
|
|
|
local use_playlist = false
|
2015-04-27 20:30:10 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
local command = {
|
|
|
|
ytdl.path, "--no-warnings", "-J", "--flat-playlist",
|
2018-01-15 21:16:36 +00:00
|
|
|
"--sub-format", "ass/srt/best"
|
2018-02-09 23:44:58 +00:00
|
|
|
}
|
2015-09-30 20:28:01 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
-- Checks if video option is "no", change format accordingly,
|
|
|
|
-- but only if user didn't explicitly set one
|
|
|
|
if (mp.get_property("options/vid") == "no")
|
|
|
|
and not option_was_set("ytdl-format") then
|
2015-04-27 20:30:10 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
format = "bestaudio/best"
|
|
|
|
msg.verbose("Video disabled. Only using audio")
|
|
|
|
end
|
2015-04-09 13:26:12 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
if (format == "") then
|
|
|
|
format = "bestvideo+bestaudio/best"
|
|
|
|
end
|
|
|
|
table.insert(command, "--format")
|
|
|
|
table.insert(command, format)
|
2015-10-23 16:22:54 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
for param, arg in pairs(raw_options) do
|
|
|
|
table.insert(command, "--" .. param)
|
|
|
|
if (arg ~= "") then
|
|
|
|
table.insert(command, arg)
|
2015-02-22 20:32:42 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
if (param == "sub-lang") and (arg ~= "") then
|
|
|
|
allsubs = false
|
2018-01-15 21:16:36 +00:00
|
|
|
elseif (param == "proxy") and (arg ~= "") then
|
2018-02-09 23:44:58 +00:00
|
|
|
proxy = arg
|
2018-01-15 21:16:36 +00:00
|
|
|
elseif (param == "yes-playlist") then
|
|
|
|
use_playlist = true
|
2018-02-09 23:44:58 +00:00
|
|
|
end
|
|
|
|
end
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
if (allsubs == true) then
|
|
|
|
table.insert(command, "--all-subs")
|
|
|
|
end
|
2018-01-15 21:16:36 +00:00
|
|
|
if not use_playlist then
|
|
|
|
table.insert(command, "--no-playlist")
|
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
table.insert(command, "--")
|
|
|
|
table.insert(command, url)
|
|
|
|
msg.debug("Running: " .. table.concat(command,' '))
|
2018-04-15 07:00:18 +00:00
|
|
|
local es, json, result, aborted = exec(command)
|
|
|
|
|
|
|
|
if aborted then
|
|
|
|
return
|
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
|
|
|
|
if (es < 0) or (json == nil) or (json == "") then
|
|
|
|
local err = "youtube-dl failed: "
|
|
|
|
if result.error and result.error == "init" then
|
|
|
|
err = err .. "not found or not enough permissions"
|
|
|
|
elseif not result.killed_by_us then
|
|
|
|
err = err .. "unexpected error ocurred"
|
|
|
|
else
|
|
|
|
err = string.format("%s returned '%d'", err, es)
|
2014-11-19 17:37:23 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
msg.error(err)
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
|
|
|
local json, err = utils.parse_json(json)
|
|
|
|
|
|
|
|
if (json == nil) then
|
|
|
|
msg.error("failed to parse JSON data: " .. err)
|
|
|
|
return
|
|
|
|
end
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
msg.verbose("youtube-dl succeeded!")
|
|
|
|
msg.debug('ytdl parsing took '..os.clock()-start_time..' seconds')
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
json["proxy"] = json["proxy"] or proxy
|
2018-01-15 17:56:44 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
-- what did we get?
|
|
|
|
if json["direct"] then
|
|
|
|
-- direct URL, nothing to do
|
|
|
|
msg.verbose("Got direct URL")
|
|
|
|
return
|
|
|
|
elseif (json["_type"] == "playlist")
|
|
|
|
or (json["_type"] == "multi_video") then
|
|
|
|
-- a playlist
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
if (#json.entries == 0) then
|
|
|
|
msg.warn("Got empty playlist, nothing to play.")
|
|
|
|
return
|
|
|
|
end
|
2015-01-16 05:59:16 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
local self_redirecting_url =
|
|
|
|
json.entries[1]["_type"] ~= "url_transparent" and
|
|
|
|
json.entries[1]["webpage_url"] and
|
|
|
|
json.entries[1]["webpage_url"] == json["webpage_url"]
|
2017-12-29 17:14:36 +00:00
|
|
|
|
2015-01-16 05:59:16 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
-- some funky guessing to detect multi-arc videos
|
|
|
|
if self_redirecting_url and #json.entries > 1
|
|
|
|
and json.entries[1].protocol == "m3u8_native"
|
|
|
|
and json.entries[1].url then
|
|
|
|
msg.verbose("multi-arc video detected, building EDL")
|
2014-11-26 16:36:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
local playlist = edl_track_joined(json.entries)
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
msg.debug("EDL: " .. playlist)
|
2014-11-26 16:36:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
if not playlist then
|
|
|
|
return
|
|
|
|
end
|
2018-01-26 18:54:17 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
-- can't change the http headers for each entry, so use the 1st
|
|
|
|
set_http_headers(json.entries[1].http_headers)
|
2014-11-26 16:36:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
mp.set_property("stream-open-filename", playlist)
|
|
|
|
if not (json.title == nil) then
|
|
|
|
mp.set_property("file-local-options/force-media-title",
|
|
|
|
json.title)
|
|
|
|
end
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
-- there might not be subs for the first segment
|
|
|
|
local entry_wsubs = nil
|
|
|
|
for i, entry in pairs(json.entries) do
|
|
|
|
if not (entry.requested_subtitles == nil) then
|
|
|
|
entry_wsubs = i
|
|
|
|
break
|
2016-09-11 14:58:05 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
end
|
2016-09-11 14:58:05 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
if not (entry_wsubs == nil) and
|
|
|
|
not (json.entries[entry_wsubs].duration == nil) then
|
|
|
|
for j, req in pairs(json.entries[entry_wsubs].requested_subtitles) do
|
|
|
|
local subfile = "edl://"
|
|
|
|
for i, entry in pairs(json.entries) do
|
|
|
|
if not (entry.requested_subtitles == nil) and
|
|
|
|
not (entry.requested_subtitles[j] == nil) and
|
|
|
|
url_is_safe(entry.requested_subtitles[j].url) then
|
|
|
|
subfile = subfile..edl_escape(entry.requested_subtitles[j].url)
|
|
|
|
else
|
|
|
|
subfile = subfile..edl_escape("memory://WEBVTT")
|
2016-05-22 21:18:27 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
subfile = subfile..",length="..entry.duration..";"
|
2016-05-22 21:18:27 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
msg.debug(j.." sub EDL: "..subfile)
|
|
|
|
mp.commandv("sub-add", subfile, "auto", req.ext, j)
|
2016-05-22 21:18:27 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
end
|
2016-05-22 21:18:27 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
elseif self_redirecting_url and #json.entries == 1 then
|
|
|
|
msg.verbose("Playlist with single entry detected.")
|
|
|
|
add_single_video(json.entries[1])
|
|
|
|
else
|
2018-01-15 21:16:36 +00:00
|
|
|
local playlist_index = parse_yt_playlist(url, json)
|
2018-02-09 23:44:58 +00:00
|
|
|
local playlist = {"#EXTM3U"}
|
|
|
|
for i, entry in pairs(json.entries) do
|
|
|
|
local site = entry.url
|
|
|
|
local title = entry.title
|
|
|
|
|
|
|
|
if not (title == nil) then
|
|
|
|
title = string.gsub(title, '%s+', ' ')
|
|
|
|
table.insert(playlist, "#EXTINF:0," .. title)
|
|
|
|
end
|
2017-12-22 22:12:18 +00:00
|
|
|
|
2018-02-09 23:44:58 +00:00
|
|
|
--[[ some extractors will still return the full info for
|
|
|
|
all clips in the playlist and the URL will point
|
|
|
|
directly to the file in that case, which we don't
|
|
|
|
want so get the webpage URL instead, which is what
|
|
|
|
we want, but only if we aren't going to trigger an
|
|
|
|
infinite loop
|
|
|
|
--]]
|
|
|
|
if entry["webpage_url"] and not self_redirecting_url then
|
|
|
|
site = entry["webpage_url"]
|
2014-11-26 16:36:23 +00:00
|
|
|
end
|
|
|
|
|
2018-02-10 13:12:47 +00:00
|
|
|
-- links without protocol as returned by --flat-playlist
|
2018-02-09 23:44:58 +00:00
|
|
|
if not site:find("://") then
|
2018-02-10 13:12:47 +00:00
|
|
|
-- youtube extractor provides only IDs,
|
|
|
|
-- others come prefixed with the extractor name and ":"
|
|
|
|
local prefix = site:find(":") and "ytdl://" or
|
|
|
|
"https://youtu.be/"
|
|
|
|
table.insert(playlist, prefix .. site)
|
2018-02-09 23:44:58 +00:00
|
|
|
elseif url_is_safe(site) then
|
|
|
|
table.insert(playlist, site)
|
2018-01-26 01:19:04 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
|
2014-11-26 16:36:23 +00:00
|
|
|
end
|
2014-11-19 17:37:23 +00:00
|
|
|
|
2018-01-15 21:16:36 +00:00
|
|
|
if use_playlist and
|
|
|
|
not option_was_set("playlist-start") and playlist_index then
|
|
|
|
mp.set_property_number("playlist-start", playlist_index)
|
2018-02-09 23:44:58 +00:00
|
|
|
end
|
2018-01-15 21:16:36 +00:00
|
|
|
|
|
|
|
mp.set_property("stream-open-filename", "memory://" .. table.concat(playlist, "\n"))
|
loadfile, ytdl_hook: don't reject EDL-resolved URLs through playlist
The ytdl wrapper can resolve web links to playlists. This playlist is
passed as big memory:// blob, and will contain further quite normal web
links. When playback of one of these playlist entries starts, ytdl is
called again and will resolve the web link to a media URL again.
This didn't work if playlist entries resolved to EDL URLs. Playback was
rejected with a "potentially unsafe URL from playlist" error. This was
completely weird and unexpected: using the playlist entry directly on
the command line worked fine, and there isn't a reason why it should be
different for a playlist entry (both are resolved by the ytdl wrapper
anyway). Also, if the only EDL URL was added via audio-add or sub-add,
the URL was accessed successfully.
The reason this happened is because the playlist entries were marked as
STREAM_SAFE_ONLY, and edl:// is not marked as "safe". Playlist entries
passed via command line directly are not marked, so resolving them to
EDL worked.
Fix this by making the ytdl hook set load-unsafe-playlists while the
playlist is parsed. (After the playlist is parsed, and before the first
playlist entry is played, file-local options are reset again.) Further,
extend the load-unsafe-playlists option so that the playlist entries are
not marked while the playlist is loaded.
Since playlist entries are already verified, this should change nothing
about the actual security situation.
There are now 2 locations which check load_unsafe_playlists. The old one
is a bit redundant now. In theory, the playlist loading code might not
be the only code which sets these flags, so keeping the old code is
somewhat justified (and in any case it doesn't hurt to keep it).
In general, the security concept sucks (and always did). I can for
example not answer the question whether you can "break" this mechanism
with various combinations of archives, EDL files, playlists files,
compromised sites, and so on. You probably can, and I'm fully aware that
it's probably possible, so don't blame me.
2019-01-04 12:48:27 +00:00
|
|
|
|
|
|
|
-- This disables mpv's mushy playlist security code, which will
|
|
|
|
-- break links that will be resolved to EDL later (because EDL is
|
|
|
|
-- not considered "safe", and the playlist entries got tagged as
|
|
|
|
-- network originating due to the playlist redirection).
|
|
|
|
mp.set_property_native("file-local-options/load-unsafe-playlists", true)
|
2014-11-19 17:37:23 +00:00
|
|
|
end
|
2018-02-09 23:44:58 +00:00
|
|
|
|
|
|
|
else -- probably a video
|
|
|
|
add_single_video(json)
|
2014-11-19 17:37:23 +00:00
|
|
|
end
|
2017-07-08 13:42:04 +00:00
|
|
|
msg.debug('script running time: '..os.clock()-start_time..' seconds')
|
2018-08-17 18:18:13 +00:00
|
|
|
end
|
2016-09-27 21:23:10 +00:00
|
|
|
|
2018-08-17 18:18:13 +00:00
|
|
|
if (not o.try_ytdl_first) then
|
|
|
|
mp.add_hook("on_load", 10, function ()
|
|
|
|
msg.verbose('ytdl:// hook')
|
|
|
|
local url = mp.get_property("stream-open-filename", "")
|
|
|
|
if not (url:find("ytdl://") == 1) then
|
|
|
|
msg.verbose('not a ytdl:// url')
|
|
|
|
return
|
|
|
|
end
|
|
|
|
run_ytdl_hook(url)
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
|
|
|
mp.add_hook(o.try_ytdl_first and "on_load" or "on_load_fail", 10, function()
|
|
|
|
msg.verbose('full hook')
|
|
|
|
local url = mp.get_property("stream-open-filename", "")
|
|
|
|
if not (url:find("ytdl://") == 1) and
|
|
|
|
not ((url:find("https?://") == 1) and not is_blacklisted(url)) then
|
|
|
|
return
|
|
|
|
end
|
|
|
|
run_ytdl_hook(url)
|
|
|
|
end)
|
2016-09-27 21:23:10 +00:00
|
|
|
|
|
|
|
mp.add_hook("on_preloaded", 10, function ()
|
|
|
|
if next(chapter_list) ~= nil then
|
2017-07-02 20:15:15 +00:00
|
|
|
msg.verbose("Setting chapters")
|
2016-09-27 21:23:10 +00:00
|
|
|
|
|
|
|
mp.set_property_native("chapter-list", chapter_list)
|
|
|
|
chapter_list = {}
|
|
|
|
end
|
|
|
|
end)
|