Add group call settings box.

This commit is contained in:
John Preston 2020-11-28 19:18:51 +03:00
parent abb81c764e
commit 9e5006dd67
11 changed files with 369 additions and 219 deletions

View File

@ -38,7 +38,13 @@ GroupCall::GroupCall(
: _delegate(delegate)
, _channel(channel)
, _api(&_channel->session().mtp()) {
if (inputCall.c_inputGroupCall().vid().v) {
const auto id = inputCall.c_inputGroupCall().vid().v;
if (id) {
if (const auto call = _channel->call(); call && call->id() == id) {
if (!_channel->canManageCall() && call->joinMuted()) {
_muted = MuteState::ForceMuted;
}
}
_state = State::Joining;
join(inputCall);
} else {

View File

@ -191,10 +191,9 @@ void GroupPanel::initControls() {
_hangup->setClickedCallback([=] { hangup(false); });
_settings->setClickedCallback([=] {
_layerBg->showBox(Box(
GroupCallSettingsBox,
[=] { copyShareLink(); },
[=] { hangup(true); }));
if (_call) {
_layerBg->showBox(Box(GroupCallSettingsBox, _call));
}
});
_settings->setText(tr::lng_menu_settings());

View File

@ -7,26 +7,217 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "calls/calls_group_settings.h"
#include "calls/calls_group_call.h"
#include "calls/calls_group_panel.h" // LeaveGroupCallBox.
#include "calls/calls_instance.h"
#include "ui/widgets/checkbox.h"
#include "ui/widgets/level_meter.h"
#include "ui/toast/toast.h"
#include "lang/lang_keys.h"
#include "data/data_channel.h"
#include "data/data_group_call.h"
#include "core/application.h"
#include "boxes/single_choice_box.h"
#include "webrtc/webrtc_audio_input_tester.h"
#include "webrtc/webrtc_media_devices.h"
#include "settings/settings_common.h"
#include "settings/settings_calls.h"
#include "main/main_session.h"
#include "apiwrap.h"
#include "styles/style_layers.h"
#include "styles/style_calls.h"
#include "styles/style_settings.h"
#include <QtGui/QGuiApplication>
namespace Calls {
namespace {
void SaveCallJoinMuted(
not_null<ChannelData*> channel,
uint64 callId,
bool joinMuted) {
const auto call = channel->call();
if (!call
|| call->id() != callId
|| !channel->canManageCall()
|| !call->canChangeJoinMuted()
|| call->joinMuted() == joinMuted) {
return;
}
call->setJoinMutedLocally(joinMuted);
channel->session().api().request(MTPphone_ToggleGroupCallSettings(
MTP_flags(MTPphone_ToggleGroupCallSettings::Flag::f_join_muted),
call->input(),
MTP_bool(joinMuted)
)).send();
}
} // namespace
void GroupCallSettingsBox(
not_null<Ui::GenericBox*> box,
Fn<void()> copyShareLink,
Fn<void()> discard) {
box->setTitle(tr::lng_group_call_settings_title());
//box->addRow(object_ptr<Ui::Checkbox>(
// box.get(),
// tr::lng_group_call_new_muted(),
// newMuted
// ))
box->addButton(tr::lng_settings_save(), [=] {
box->closeBox();
not_null<GroupCall*> call) {
using namespace Settings;
const auto weakCall = base::make_weak(call.get());
const auto weakBox = Ui::MakeWeak(box);
struct State {
rpl::event_stream<QString> outputNameStream;
rpl::event_stream<QString> inputNameStream;
std::unique_ptr<Webrtc::AudioInputTester> micTester;
Ui::LevelMeter *micTestLevel = nullptr;
float micLevel = 0.;
Ui::Animations::Simple micLevelAnimation;
base::Timer levelUpdateTimer;
bool generatingLink = false;
};
const auto state = box->lifetime().make_state<State>();
const auto channel = call->channel();
const auto real = channel->call();
const auto id = call->id();
const auto goodReal = (real && real->id() == id);
const auto layout = box->verticalLayout();
const auto &settings = Core::App().settings();
const auto joinMuted = goodReal ? real->joinMuted() : false;
const auto canChangeJoinMuted = (goodReal && real->canChangeJoinMuted());
const auto muteJoined = (channel->canManageCall() && canChangeJoinMuted)
? box->addRow(object_ptr<Ui::Checkbox>(
box.get(),
tr::lng_group_call_new_muted(),
joinMuted))
: nullptr;
if (muteJoined) {
AddSkip(layout);
}
state->levelUpdateTimer.callEach(kMicTestUpdateInterval);
state->micTester = std::make_unique<Webrtc::AudioInputTester>(
settings.callInputDeviceId());
AddButtonWithLabel(
layout,
tr::lng_group_call_speakers(),
rpl::single(
CurrentAudioOutputName()
) | rpl::then(
state->outputNameStream.events()
),
st::settingsButton
)->addClickHandler([=] {
box->getDelegate()->show(ChooseAudioInputBox(crl::guard(box, [=](
const QString &id,
const QString &name) {
state->outputNameStream.fire_copy(name);
})));
});
box->addButton(tr::lng_cancel(), [=] {
AddButtonWithLabel(
layout,
tr::lng_group_call_microphone(),
rpl::single(
CurrentAudioInputName()
) | rpl::then(
state->inputNameStream.events()
),
st::settingsButton
)->addClickHandler([=] {
box->getDelegate()->show(ChooseAudioInputBox(crl::guard(box, [=](
const QString &id,
const QString &name) {
state->inputNameStream.fire_copy(name);
state->micTester->setDeviceId(id);
})));
});
state->micTestLevel = box->addRow(
object_ptr<Ui::LevelMeter>(
box.get(),
st::defaultLevelMeter),
st::settingsLevelMeterPadding);
state->micTestLevel->resize(QSize(0, st::defaultLevelMeter.height));
state->levelUpdateTimer.setCallback([=] {
const auto was = state->micLevel;
state->micLevel = state->micTester->getAndResetLevel();
state->micLevelAnimation.start([=] {
state->micTestLevel->setValue(
state->micLevelAnimation.value(state->micLevel));
}, was, state->micLevel, kMicTestAnimationDuration);
});
AddSkip(layout);
const auto lookupLink = [=] {
return channel->hasUsername()
? channel->session().createInternalLinkFull(channel->username)
: channel->inviteLink();
};
if (!lookupLink().isEmpty() || channel->canHaveInviteLink()) {
const auto copyLink = [=] {
const auto link = lookupLink();
if (link.isEmpty()) {
return false;
}
QGuiApplication::clipboard()->setText(link);
if (weakBox) {
Ui::Toast::Show(
box->getDelegate()->outerContainer(),
tr::lng_create_channel_link_copied(tr::now));
}
return true;
};
AddButton(
layout,
tr::lng_group_call_share(),
st::settingsButton
)->addClickHandler([=] {
if (!copyLink() && !state->generatingLink) {
state->generatingLink = true;
channel->session().api().request(MTPmessages_ExportChatInvite(
channel->input
)).done([=](const MTPExportedChatInvite &result) {
if (result.type() == mtpc_chatInviteExported) {
channel->setInviteLink(
qs(result.c_chatInviteExported().vlink()));
copyLink();
}
}).send();
}
});
}
if (channel->canManageCall()) {
AddButton(
layout,
tr::lng_group_call_end(),
st::settingsAttentionButton
)->addClickHandler([=] {
if (const auto call = weakCall.get()) {
box->getDelegate()->show(Box(
LeaveGroupCallBox,
call,
true,
BoxContext::GroupCallPanel));
box->closeBox();
}
});
}
box->setTitle(tr::lng_group_call_settings_title());
box->boxClosing(
) | rpl::start_with_next([=] {
if (canChangeJoinMuted
&& muteJoined
&& muteJoined->checked() != joinMuted) {
SaveCallJoinMuted(channel, id, muteJoined->checked());
}
}, box->lifetime());
box->addButton(tr::lng_box_done(), [=] {
box->closeBox();
});
}

View File

@ -11,9 +11,10 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
namespace Calls {
class GroupCall;
void GroupCallSettingsBox(
not_null<Ui::GenericBox*> box,
Fn<void()> copyShareLink,
Fn<void()> discard);
not_null<GroupCall*> call);
} // namespace Calls

View File

@ -323,6 +323,20 @@ void Instance::showInfoPanel(not_null<GroupCall*> call) {
}
}
void Instance::setCurrentAudioDevice(bool input, const QString &deviceId) {
if (input) {
Core::App().settings().setCallInputDeviceId(deviceId);
} else {
Core::App().settings().setCallOutputDeviceId(deviceId);
}
Core::App().saveSettingsDelayed();
if (const auto call = currentCall()) {
call->setCurrentAudioDevice(input, deviceId);
} else if (const auto group = currentGroupCall()) {
group->setCurrentAudioDevice(input, deviceId);
}
}
bool Instance::isQuitPrevent() {
if (!_currentCall || _currentCall->isIncomingWaiting()) {
return false;

View File

@ -55,6 +55,8 @@ public:
[[nodiscard]] rpl::producer<GroupCall*> currentGroupCallValue() const;
std::shared_ptr<tgcalls::VideoCaptureInterface> getVideoCapture() override;
void setCurrentAudioDevice(bool input, const QString &deviceId);
[[nodiscard]] bool isQuitPrevent();
private:

View File

@ -123,13 +123,17 @@ void GroupCall::applyUpdate(const MTPGroupCall &update) {
void GroupCall::applyCall(const MTPGroupCall &call, bool force) {
call.match([&](const MTPDgroupCall &data) {
const auto changed = (_version != data.vversion().v)
|| (_fullCount.current() != data.vparticipants_count().v);
|| (_fullCount.current() != data.vparticipants_count().v)
|| (_joinMuted != data.is_join_muted())
|| (_canChangeJoinMuted != data.is_can_change_join_muted());
if (!force && !changed) {
return;
} else if (!force && _version > data.vversion().v) {
reload();
return;
}
_joinMuted = data.is_join_muted();
_canChangeJoinMuted = data.is_can_change_join_muted();
_version = data.vversion().v;
_fullCount = data.vparticipants_count().v;
}, [&](const MTPDgroupCallDiscarded &data) {
@ -275,4 +279,16 @@ void GroupCall::applyUpdateChecked(
applyParticipantsSlice(update.vparticipants().v, true);
}
void GroupCall::setJoinMutedLocally(bool muted) {
_joinMuted = muted;
}
bool GroupCall::joinMuted() const {
return _joinMuted;
}
bool GroupCall::canChangeJoinMuted() const {
return _canChangeJoinMuted;
}
} // namespace Data

View File

@ -54,6 +54,10 @@ public:
void reload();
void setJoinMutedLocally(bool muted);
[[nodiscard]] bool joinMuted() const;
[[nodiscard]] bool canChangeJoinMuted() const;
private:
void applyCall(const MTPGroupCall &call, bool force);
void applyParticipantsSlice(
@ -78,6 +82,8 @@ private:
rpl::event_stream<ParticipantUpdate> _participantUpdates;
rpl::event_stream<> _participantsSliceAdded;
bool _joinMuted = false;
bool _canChangeJoinMuted = true;
bool _allReceived = false;
};

View File

@ -952,13 +952,14 @@ void MainWidget::setCurrentCall(Calls::Call *call) {
_currentCallLifetime.destroy();
_currentCall = call;
if (_currentCall) {
_callTopBar.destroy();
_currentCall->stateValue(
) | rpl::start_with_next([=](Calls::Call::State state) {
using State = Calls::Call::State;
if (state == State::Established) {
createCallTopBar();
} else {
if (state != State::Established) {
destroyCallTopBar();
} else if (!_callTopBar) {
createCallTopBar();
}
}, _currentCallLifetime);
} else {
@ -973,13 +974,14 @@ void MainWidget::setCurrentGroupCall(Calls::GroupCall *call) {
_currentCallLifetime.destroy();
_currentGroupCall = call;
if (_currentGroupCall) {
_callTopBar.destroy();
_currentGroupCall->stateValue(
) | rpl::start_with_next([=](Calls::GroupCall::State state) {
using State = Calls::GroupCall::State;
if (state == State::Joined || state == State::Connecting) {
createCallTopBar();
} else {
if (state != State::Joined && state != State::Connecting) {
destroyCallTopBar();
} else if (!_callTopBar) {
createCallTopBar();
}
}, _currentCallLifetime);
} else {

View File

@ -35,12 +35,6 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "styles/style_layers.h"
namespace Settings {
namespace {
constexpr auto kMicTestUpdateInterval = crl::time(100);
constexpr auto kMicTestAnimationDuration = crl::time(200);
} // namespace
Calls::Calls(
QWidget *parent,
@ -51,11 +45,7 @@ Calls::Calls(
requestPermissionAndStartTestingMicrophone();
}
Calls::~Calls() {
if (_needWriteSettings) {
Core::App().saveSettingsDelayed();
}
}
Calls::~Calls() = default;
void Calls::sectionSaveChanges(FnMut<void()> done) {
if (_micTester) {
@ -66,37 +56,8 @@ void Calls::sectionSaveChanges(FnMut<void()> done) {
void Calls::setupContent() {
const auto content = Ui::CreateChild<Ui::VerticalLayout>(this);
const auto getId = [](const auto &device) {
return device.id;
};
const auto getName = [](const auto &device) {
return device.name;
};
const auto &settings = Core::App().settings();
const auto currentOutputName = [&] {
const auto list = Webrtc::GetAudioOutputList();
const auto i = ranges::find(
list,
settings.callOutputDeviceId(),
getId);
return (i != end(list))
? getName(*i)
: tr::lng_settings_call_device_default(tr::now);
}();
const auto currentInputName = [&] {
const auto list = Webrtc::GetAudioInputList();
const auto i = ranges::find(
list,
settings.callInputDeviceId(),
getId);
return (i != end(list))
? getName(*i)
: tr::lng_settings_call_device_default(tr::now);
}();
const auto cameras = Webrtc::GetVideoInputList();
if (!cameras.empty()) {
const auto hasCall = (Core::App().calls().currentCall() != nullptr);
@ -114,9 +75,9 @@ void Calls::setupContent() {
const auto i = ranges::find(
cameras,
settings.callVideoInputDeviceId(),
getId);
&Webrtc::VideoInput::id);
return (i != end(cameras))
? getName(*i)
? i->name
: tr::lng_settings_call_device_default(tr::now);
}();
@ -135,12 +96,12 @@ void Calls::setupContent() {
const auto &devices = Webrtc::GetVideoInputList();
const auto options = ranges::view::concat(
ranges::view::single(tr::lng_settings_call_device_default(tr::now)),
devices | ranges::view::transform(getName)
devices | ranges::view::transform(&Webrtc::VideoInput::name)
) | ranges::to_vector;
const auto i = ranges::find(
devices,
Core::App().settings().callVideoInputDeviceId(),
getId);
&Webrtc::VideoInput::id);
const auto currentOption = (i != end(devices))
? int(i - begin(devices) + 1)
: 0;
@ -216,73 +177,19 @@ void Calls::setupContent() {
content,
tr::lng_settings_call_output_device(),
rpl::single(
currentOutputName
CurrentAudioOutputName()
) | rpl::then(
_outputNameStream.events()
),
st::settingsButton
)->addClickHandler([=] {
const auto &devices = Webrtc::GetAudioOutputList();
const auto options = ranges::view::concat(
ranges::view::single(tr::lng_settings_call_device_default(tr::now)),
devices | ranges::view::transform(getName)
) | ranges::to_vector;
const auto i = ranges::find(
devices,
Core::App().settings().callOutputDeviceId(),
getId);
const auto currentOption = (i != end(devices))
? int(i - begin(devices) + 1)
: 0;
const auto save = crl::guard(this, [=](int option) {
_outputNameStream.fire_copy(options[option]);
const auto deviceId = option
? devices[option - 1].id
: "default";
Core::App().settings().setCallOutputDeviceId(deviceId);
Core::App().saveSettingsDelayed();
if (const auto call = Core::App().calls().currentCall()) {
call->setCurrentAudioDevice(false, deviceId);
}
});
Ui::show(Box<SingleChoiceBox>(
tr::lng_settings_call_output_device(),
options,
currentOption,
save));
Ui::show(ChooseAudioOutputBox(crl::guard(this, [=](
const QString &id,
const QString &name) {
_outputNameStream.fire_copy(name);
})));
});
//const auto outputLabel = content->add(
// object_ptr<Ui::LabelSimple>(
// content,
// st::settingsAudioVolumeLabel),
// st::settingsAudioVolumeLabelPadding);
//const auto outputSlider = content->add(
// object_ptr<Ui::MediaSlider>(
// content,
// st::settingsAudioVolumeSlider),
// st::settingsAudioVolumeSliderPadding);
//const auto updateOutputLabel = [=](int value) {
// const auto percent = QString::number(value);
// outputLabel->setText(
// tr::lng_settings_call_output_volume(tr::now, lt_percent, percent));
//};
//const auto updateOutputVolume = [=](int value) {
// _needWriteSettings = true;
// updateOutputLabel(value);
// Core::App().settings().setCallOutputVolume(value);
// if (const auto call = Core::App().calls().currentCall()) {
// call->setAudioVolume(false, value / 100.0f);
// }
//};
//outputSlider->resize(st::settingsAudioVolumeSlider.seekSize);
//outputSlider->setPseudoDiscrete(
// 101,
// [](int val) { return val; },
// settings.callOutputVolume(),
// updateOutputVolume);
//updateOutputLabel(Core::App().settings().callOutputVolume());
AddSkip(content);
AddDivider(content);
AddSkip(content);
@ -291,91 +198,22 @@ void Calls::setupContent() {
content,
tr::lng_settings_call_input_device(),
rpl::single(
currentInputName
CurrentAudioInputName()
) | rpl::then(
_inputNameStream.events()
),
st::settingsButton
)->addClickHandler([=] {
const auto devices = Webrtc::GetAudioInputList();
const auto options = ranges::view::concat(
ranges::view::single(tr::lng_settings_call_device_default(tr::now)),
devices | ranges::view::transform(getName)
) | ranges::to_vector;
const auto i = ranges::find(
devices,
Core::App().settings().callInputDeviceId(),
getId);
const auto currentOption = (i != end(devices))
? int(i - begin(devices) + 1)
: 0;
const auto save = crl::guard(this, [=](int option) {
_inputNameStream.fire_copy(options[option]);
const auto deviceId = option
? devices[option - 1].id
: "default";
Core::App().settings().setCallInputDeviceId(deviceId);
Core::App().saveSettingsDelayed();
Ui::show(ChooseAudioOutputBox(crl::guard(this, [=](
const QString &id,
const QString &name) {
_inputNameStream.fire_copy(name);
if (_micTester) {
_micTester->setDeviceId(deviceId);
_micTester->setDeviceId(id);
}
if (const auto call = Core::App().calls().currentCall()) {
call->setCurrentAudioDevice(true, deviceId);
}
});
Ui::show(Box<SingleChoiceBox>(
tr::lng_settings_call_input_device(),
options,
currentOption,
save));
})));
});
//const auto inputLabel = content->add(
// object_ptr<Ui::LabelSimple>(
// content,
// st::settingsAudioVolumeLabel),
// st::settingsAudioVolumeLabelPadding);
//const auto inputSlider = content->add(
// object_ptr<Ui::MediaSlider>(
// content,
// st::settingsAudioVolumeSlider),
// st::settingsAudioVolumeSliderPadding);
//const auto updateInputLabel = [=](int value) {
// const auto percent = QString::number(value);
// inputLabel->setText(
// tr::lng_settings_call_input_volume(tr::now, lt_percent, percent));
//};
//const auto updateInputVolume = [=](int value) {
// _needWriteSettings = true;
// updateInputLabel(value);
// Core::App().settings().setCallInputVolume(value);
// if (const auto call = Core::App().calls().currentCall()) {
// call->setAudioVolume(true, value / 100.0f);
// }
//};
//inputSlider->resize(st::settingsAudioVolumeSlider.seekSize);
//inputSlider->setPseudoDiscrete(101,
// [](int val) { return val; },
// settings.callInputVolume(),
// updateInputVolume);
//updateInputLabel(settings.callInputVolume());
//AddButton(
// content,
// rpl::single(
// tr::lng_settings_call_test_mic(tr::now)
// ) | rpl::then(
// _micTestTextStream.events()
// ),
// st::settingsButton
//)->addClickHandler([=] {
// if (!_micTester) {
// requestPermissionAndStartTestingMicrophone();
// } else {
// stopTestingMicrophone();
// }
//});
_micTestLevel = content->add(
object_ptr<Ui::LevelMeter>(
content,
@ -461,22 +299,88 @@ void Calls::requestPermissionAndStartTestingMicrophone() {
}
void Calls::startTestingMicrophone() {
//_micTestTextStream.fire(tr::lng_settings_call_stop_mic_test(tr::now));
_levelUpdateTimer.callEach(kMicTestUpdateInterval);
_micTester = std::make_unique<Webrtc::AudioInputTester>(
Core::App().settings().callInputDeviceId());
//if (_micTester->Failed()) {
// stopTestingMicrophone();
// Ui::show(Box<InformBox>(tr::lng_call_error_audio_io(tr::now)));
//}
}
//void Calls::stopTestingMicrophone() {
// _micTestTextStream.fire(tr::lng_settings_call_test_mic(tr::now));
// _levelUpdateTimer.cancel();
// _micTester.reset();
// _micTestLevel->setValue(0.0f);
//}
QString CurrentAudioOutputName() {
const auto list = Webrtc::GetAudioOutputList();
const auto i = ranges::find(
list,
Core::App().settings().callOutputDeviceId(),
&Webrtc::AudioOutput::id);
return (i != end(list))
? i->name
: tr::lng_settings_call_device_default(tr::now);
}
QString CurrentAudioInputName() {
const auto list = Webrtc::GetAudioInputList();
const auto i = ranges::find(
list,
Core::App().settings().callInputDeviceId(),
&Webrtc::AudioInput::id);
return (i != end(list))
? i->name
: tr::lng_settings_call_device_default(tr::now);
}
object_ptr<SingleChoiceBox> ChooseAudioOutputBox(
Fn<void(QString id, QString name)> chosen) {
const auto &devices = Webrtc::GetAudioOutputList();
const auto options = ranges::view::concat(
ranges::view::single(tr::lng_settings_call_device_default(tr::now)),
devices | ranges::view::transform(&Webrtc::AudioOutput::name)
) | ranges::to_vector;
const auto i = ranges::find(
devices,
Core::App().settings().callOutputDeviceId(),
&Webrtc::AudioOutput::id);
const auto currentOption = (i != end(devices))
? int(i - begin(devices) + 1)
: 0;
const auto save = [=](int option) {
const auto deviceId = option
? devices[option - 1].id
: "default";
Core::App().calls().setCurrentAudioDevice(false, deviceId);
chosen(deviceId, options[option]);
};
return Box<SingleChoiceBox>(
tr::lng_settings_call_output_device(),
options,
currentOption,
save);
}
object_ptr<SingleChoiceBox> ChooseAudioInputBox(
Fn<void(QString id, QString name)> chosen) {
const auto devices = Webrtc::GetAudioInputList();
const auto options = ranges::view::concat(
ranges::view::single(tr::lng_settings_call_device_default(tr::now)),
devices | ranges::view::transform(&Webrtc::AudioInput::name)
) | ranges::to_vector;
const auto i = ranges::find(
devices,
Core::App().settings().callInputDeviceId(),
&Webrtc::AudioInput::id);
const auto currentOption = (i != end(devices))
? int(i - begin(devices) + 1)
: 0;
const auto save = [=](int option) {
const auto deviceId = option
? devices[option - 1].id
: "default";
Core::App().calls().setCurrentAudioDevice(true, deviceId);
chosen(deviceId, options[option]);
};
return Box<SingleChoiceBox>(
tr::lng_settings_call_input_device(),
options,
currentOption,
save);
}
} // namespace Settings

View File

@ -23,6 +23,8 @@ namespace Webrtc {
class AudioInputTester;
} // namespace Webrtc
class SingleChoiceBox;
namespace Settings {
class Calls : public Section {
@ -36,14 +38,11 @@ private:
void setupContent();
void requestPermissionAndStartTestingMicrophone();
void startTestingMicrophone();
//void stopTestingMicrophone();
const not_null<Window::SessionController*> _controller;
rpl::event_stream<QString> _cameraNameStream;
rpl::event_stream<QString> _outputNameStream;
rpl::event_stream<QString> _inputNameStream;
//rpl::event_stream<QString> _micTestTextStream;
bool _needWriteSettings = false;
std::unique_ptr<Webrtc::AudioInputTester> _micTester;
Ui::LevelMeter *_micTestLevel = nullptr;
float _micLevel = 0.;
@ -52,5 +51,15 @@ private:
};
inline constexpr auto kMicTestUpdateInterval = crl::time(100);
inline constexpr auto kMicTestAnimationDuration = crl::time(200);
[[nodiscard]] QString CurrentAudioOutputName();
[[nodiscard]] QString CurrentAudioInputName();
[[nodiscard]] object_ptr<SingleChoiceBox> ChooseAudioOutputBox(
Fn<void(QString id, QString name)> chosen);
[[nodiscard]] object_ptr<SingleChoiceBox> ChooseAudioInputBox(
Fn<void(QString id, QString name)> chosen);
} // namespace Settings