Compare commits

...

5 Commits

Author SHA1 Message Date
Tobias Fella
c65183f93a More work 2023-06-14 20:16:05 +02:00
Tobias Fella
39ee17cfa1 ? 2023-06-13 23:32:57 +02:00
Tobias Fella
12bb75e5b0 Fix various problems 2023-06-13 16:28:19 +02:00
Tobias Fella
481f12337a Always start calls with camera disabled
Simplifies the code quite a bit
WIP: Renegotiate to enable camera
2023-06-13 15:45:30 +02:00
Tobias Fella
5e533b8e03 Implement voice & video calls 2023-06-13 15:45:30 +02:00
41 changed files with 3462 additions and 13 deletions

View File

@@ -145,6 +145,14 @@ if(ANDROID)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/android/version.gradle.in ${CMAKE_BINARY_DIR}/version.gradle)
endif()
include(FindPkgConfig)
pkg_check_modules(GSTREAMER IMPORTED_TARGET gstreamer-sdp-1.0>1.18 gstreamer-webrtc-1.0>=1.18)
if (TARGET PkgConfig::GSTREAMER)
add_feature_info(voip ON "GStreamer found. Call support is enabled.")
else()
add_feature_info(voip OFF "GStreamer not found. Call support is disabled.")
endif()
ki18n_install(po)
install(FILES org.kde.neochat.desktop DESTINATION ${KDE_INSTALL_APPDIR})

View File

@@ -65,6 +65,12 @@ ecm_qt_declare_logging_category(neochat
DEFAULT_SEVERITY Info
)
ecm_qt_declare_logging_category(neochat
HEADER "voip_logging.h"
IDENTIFIER "voip"
CATEGORY_NAME "org.kde.neochat.voip"
)
add_executable(neochat-app
main.cpp
res.qrc
@@ -111,6 +117,20 @@ endif()
target_include_directories(neochat PRIVATE ${CMAKE_BINARY_DIR})
target_link_libraries(neochat PUBLIC Qt::Core Qt::Quick Qt::Qml Qt::Gui Qt::Multimedia Qt::Network Qt::QuickControls2 KF${QT_MAJOR_VERSION}::I18n KF${QT_MAJOR_VERSION}::Kirigami2 KF${QT_MAJOR_VERSION}::Notifications KF${QT_MAJOR_VERSION}::ConfigCore KF${QT_MAJOR_VERSION}::ConfigGui KF${QT_MAJOR_VERSION}::CoreAddons KF${QT_MAJOR_VERSION}::SonnetCore KF${QT_MAJOR_VERSION}::ItemModels Quotient${QUOTIENT_SUFFIX} cmark::cmark ${QTKEYCHAIN_LIBRARIES} QCoro::Core)
if (TARGET PkgConfig::GSTREAMER)
target_link_libraries(neochat PUBLIC PkgConfig::GSTREAMER)
target_sources(neochat PRIVATE
call/callmanager.cpp
call/callsession.cpp
call/audiosources.cpp
call/videosources.cpp
call/devicemonitor.cpp
models/callparticipantsmodel.cpp
call/callparticipant.cpp
)
target_compile_definitions(neochat PUBLIC GSTREAMER_AVAILABLE)
endif()
kconfig_add_kcfg_files(neochat GENERATE_MOC neochatconfig.kcfgc)
if(NEOCHAT_FLATPAK)

99
src/call/audiosources.cpp Normal file
View File

@@ -0,0 +1,99 @@
// SPDX-FileCopyrightText: 2021 Tobias Fella <fella@posteo.de>
// SPDX-License-Identifier: LGPL-2.0-or-later
#include "audiosources.h"
#include <gst/gst.h>
#include <QDebug>
#include <QString>
#include "devicemonitor.h"
#include "neochatconfig.h"
int AudioSources::rowCount(const QModelIndex &parent) const
{
Q_UNUSED(parent);
return DeviceMonitor::instance().audioSources().size();
}
QVariant AudioSources::data(const QModelIndex &index, int role) const
{
if (index.row() >= DeviceMonitor::instance().audioSources().size()) {
return QVariant(QStringLiteral("DEADBEEF"));
}
if (role == TitleRole) {
return DeviceMonitor::instance().audioSources()[index.row()]->title;
}
return QVariant();
}
QHash<int, QByteArray> AudioSources::roleNames() const
{
return {
{TitleRole, "title"},
};
}
AudioSources::AudioSources()
: QAbstractListModel()
{
connect(&DeviceMonitor::instance(), &DeviceMonitor::audioSourceAdded, this, [this]() {
beginResetModel();
endResetModel();
Q_EMIT currentIndexChanged();
});
connect(&DeviceMonitor::instance(), &DeviceMonitor::audioSourceRemoved, this, [this]() {
beginResetModel();
endResetModel();
Q_EMIT currentIndexChanged();
});
}
GstDevice *AudioSources::currentDevice() const
{
const auto config = NeoChatConfig::self();
const QString name = config->microphone();
for (const auto &audioSource : DeviceMonitor::instance().audioSources()) {
if (audioSource->title == name) {
qDebug() << "WebRTC: microphone:" << name;
return audioSource->device;
}
}
return DeviceMonitor::instance().audioSources()[0]->device;
}
void AudioSources::setCurrentIndex(int index) const
{
if (DeviceMonitor::instance().audioSources().size() == 0) {
return;
}
NeoChatConfig::setMicrophone(DeviceMonitor::instance().audioSources()[index]->title);
NeoChatConfig::self()->save();
}
int AudioSources::currentIndex() const
{
const auto config = NeoChatConfig::self();
const QString name = config->microphone();
if (name.isEmpty()) {
return getDefaultDeviceIndex();
}
for (auto i = 0; i < DeviceMonitor::instance().audioSources().size(); i++) {
if (DeviceMonitor::instance().audioSources()[i]->title == name) {
return i;
}
}
return 0;
}
int AudioSources::getDefaultDeviceIndex() const
{
for (auto i = 0; i < DeviceMonitor::instance().audioSources().size(); i++) {
if (DeviceMonitor::instance().audioSources()[i]->isDefault) {
return i;
}
}
return 0;
}

41
src/call/audiosources.h Normal file
View File

@@ -0,0 +1,41 @@
// SPDX-FileCopyrightText: 2021 Tobias Fella <fella@posteo.de>
// SPDX-License-Identifier: LGPL-2.0-or-later
#pragma once
#include <QtCore/QAbstractListModel>
#include <gst/gst.h>
class AudioSources : public QAbstractListModel
{
Q_OBJECT
Q_PROPERTY(int currentIndex READ currentIndex WRITE setCurrentIndex NOTIFY currentIndexChanged)
public:
enum Roles {
TitleRole = Qt::UserRole + 1,
};
static AudioSources &instance()
{
static AudioSources _instance;
return _instance;
}
int rowCount(const QModelIndex &parent = QModelIndex()) const override;
QVariant data(const QModelIndex &index, int role = Qt::DisplayRole) const override;
QHash<int, QByteArray> roleNames() const override;
GstDevice *currentDevice() const;
void setCurrentIndex(int index) const;
int currentIndex() const;
Q_SIGNALS:
void currentIndexChanged();
private:
AudioSources();
int getDefaultDeviceIndex() const;
};

199
src/call/calldevices.cpp Normal file
View File

@@ -0,0 +1,199 @@
// SPDX-FileCopyrightText: 2021 Nheko Contributors
// SPDX-FileCopyrightText: 2021 Tobias Fella <fella@posteo.de>
// SPDX-FileCopyrightText: 2021 Carl Schwan <carl@carlschwan.eu>
//
// SPDX-License-Identifier: GPL-3.0-or-later
#include "calldevices.h"
#include "audiodevicesmodel.h"
#include "neochatconfig.h"
#include "videodevicesmodel.h"
#include <QStringView>
#include <cstring>
#include <optional>
#include "voiplogging.h"
#ifdef GSTREAMER_AVAILABLE
extern "C" {
#include "gst/gst.h"
}
#endif
#ifdef GSTREAMER_AVAILABLE
CallDevices::CallDevices()
: QObject()
, m_audioDevicesModel(new AudioDevicesModel(this))
, m_videoDevicesModel(new VideoDevicesModel(this))
{
init();
}
AudioDevicesModel *CallDevices::audioDevicesModel() const
{
return m_audioDevicesModel;
}
VideoDevicesModel *CallDevices::videoDevicesModel() const
{
return m_videoDevicesModel;
}
void CallDevices::addDevice(GstDevice *device)
{
if (!device)
return;
gchar *type = gst_device_get_device_class(device);
bool isVideo = !std::strncmp(type, "Video", 5);
g_free(type);
if (isVideo) {
m_videoDevicesModel->addDevice(device);
m_videoDevicesModel->setDefaultDevice();
} else {
m_audioDevicesModel->addDevice(device);
m_audioDevicesModel->setDefaultDevice();
}
}
void CallDevices::removeDevice(GstDevice *device, bool changed)
{
if (device) {
if (m_audioDevicesModel->removeDevice(device, changed) || m_videoDevicesModel->removeDevice(device, changed))
return;
}
}
namespace
{
gboolean newBusMessage(GstBus *bus, GstMessage *msg, gpointer user_data)
{
Q_UNUSED(bus)
Q_UNUSED(user_data)
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_DEVICE_ADDED: {
GstDevice *device;
gst_message_parse_device_added(msg, &device);
CallDevices::instance().addDevice(device);
Q_EMIT CallDevices::instance().devicesChanged();
break;
}
case GST_MESSAGE_DEVICE_REMOVED: {
GstDevice *device;
gst_message_parse_device_removed(msg, &device);
CallDevices::instance().removeDevice(device, false);
Q_EMIT CallDevices::instance().devicesChanged();
break;
}
case GST_MESSAGE_DEVICE_CHANGED: {
GstDevice *device;
GstDevice *oldDevice;
gst_message_parse_device_changed(msg, &device, &oldDevice);
CallDevices::instance().removeDevice(oldDevice, true);
CallDevices::instance().addDevice(device);
Q_EMIT CallDevices::instance().devicesChanged();
break;
}
default:
break;
}
return true;
}
}
void CallDevices::init()
{
static GstDeviceMonitor *monitor = nullptr;
if (!monitor) {
monitor = gst_device_monitor_new();
Q_ASSERT(monitor);
GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
gst_device_monitor_add_filter(monitor, "Audio/Duplex", caps);
gst_caps_unref(caps);
caps = gst_caps_new_empty_simple("video/x-raw");
gst_device_monitor_add_filter(monitor, "Video/Source", caps);
gst_device_monitor_add_filter(monitor, "Video/Duplex", caps);
gst_caps_unref(caps);
GstBus *bus = gst_device_monitor_get_bus(monitor);
gst_bus_add_watch(bus, newBusMessage, nullptr);
gst_object_unref(bus);
if (!gst_device_monitor_start(monitor)) {
qCCritical(voip) << "Failed to start device monitor";
return;
} else {
qCDebug(voip) << "Device monitor started";
}
}
}
bool CallDevices::hasMicrophone() const
{
return m_audioDevicesModel->hasMicrophone();
}
bool CallDevices::hasCamera() const
{
return m_videoDevicesModel->hasCamera();
}
QStringList CallDevices::resolutions(const QString &cameraName) const
{
return m_videoDevicesModel->resolutions(cameraName);
}
QStringList CallDevices::frameRates(const QString &cameraName, const QString &resolution) const
{
if (auto s = m_videoDevicesModel->getVideoSource(cameraName); s) {
if (auto it = std::find_if(s->caps.cbegin(),
s->caps.cend(),
[&](const auto &c) {
return c.resolution == resolution;
});
it != s->caps.cend())
return it->frameRates;
}
return {};
}
GstDevice *CallDevices::audioDevice() const
{
return m_audioDevicesModel->currentDevice();
}
GstDevice *CallDevices::videoDevice(QPair<int, int> &resolution, QPair<int, int> &frameRate) const
{
return m_videoDevicesModel->currentDevice(resolution, frameRate);
}
#else
bool CallDevices::hasMicrophone() const
{
return false;
}
bool CallDevices::hasCamera() const
{
return false;
}
QStringList CallDevices::names(bool, const QString &) const
{
return {};
}
QStringList CallDevices::resolutions(const QString &) const
{
return {};
}
QStringList CallDevices::frameRates(const QString &, const QString &) const
{
return {};
}
#endif

64
src/call/calldevices.h Normal file
View File

@@ -0,0 +1,64 @@
// SPDX-FileCopyrightText: 2021 Contributors
// SPDX-FileCopyrightText: 2021 Tobias Fella <fella@posteo.de>
// SPDX-FileCopyrightText: 2021 Carl Schwan <carl@carlschwan.eu>
//
// SPDX-License-Identifier: GPL-3.0-or-later
#pragma once
#include <string>
#include <utility>
#include <vector>
#include <QObject>
typedef struct _GstDevice GstDevice;
class CallDevices;
class AudioDevicesModel;
class VideoDevicesModel;
class CallDevices : public QObject
{
Q_OBJECT
Q_PROPERTY(AudioDevicesModel *audioDevices READ audioDevicesModel CONSTANT);
Q_PROPERTY(VideoDevicesModel *videoDevices READ videoDevicesModel CONSTANT);
public:
static CallDevices &instance()
{
static CallDevices instance;
return instance;
}
CallDevices(CallDevices const &) = delete;
void operator=(CallDevices const &) = delete;
bool hasMicrophone() const;
bool hasCamera() const;
QStringList names(bool isVideo, const QString &defaultDevice) const;
QStringList resolutions(const QString &cameraName) const;
QStringList frameRates(const QString &cameraName, const QString &resolution) const;
AudioDevicesModel *audioDevicesModel() const;
VideoDevicesModel *videoDevicesModel() const;
void addDevice(GstDevice *device);
void removeDevice(GstDevice *device, bool changed);
Q_SIGNALS:
void devicesChanged();
private:
CallDevices();
void init();
GstDevice *audioDevice() const;
GstDevice *videoDevice(QPair<int, int> &resolution, QPair<int, int> &frameRate) const;
AudioDevicesModel *m_audioDevicesModel;
VideoDevicesModel *m_videoDevicesModel;
friend class CallSession;
friend class Audio;
};

617
src/call/callmanager.cpp Normal file
View File

@@ -0,0 +1,617 @@
// SPDX-FileCopyrightText: 2020-2021 Nheko Authors
// SPDX-FileCopyrightText: 2021-2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: GPL-3.0-or-later
#include "callmanager.h"
#include "controller.h"
#include <gst/gst.h>
#include "voiplogging.h"
#include <KLocalizedString>
#include <QDateTime>
#include <QMediaPlaylist>
#include <QMimeDatabase>
#include <qcoro/qcorosignal.h>
#include <qt_connection_util.h>
#include "neochatconfig.h"
#define CALL_VERSION "1"
CallManager::CallManager()
{
init();
connect(&Controller::instance(), &Controller::activeConnectionChanged, this, [this] {
updateTurnServers();
});
}
QCoro::Task<void> CallManager::updateTurnServers()
{
if (m_cachedTurnUrisValidUntil > QDateTime::currentDateTime()) {
co_return;
}
Controller::instance().activeConnection()->getTurnServers();
auto servers = co_await qCoro(Controller::instance().activeConnection(), &Connection::turnServersChanged);
m_cachedTurnUrisValidUntil = QDateTime::currentDateTime().addSecs(servers["ttl"].toInt());
const auto password = servers["password"].toString();
const auto username = servers["username"].toString();
const auto uris = servers["uris"].toArray();
m_cachedTurnUris.clear();
for (const auto &u : uris) {
QString uri = u.toString();
auto c = uri.indexOf(':');
if (c == -1) {
qCWarning(voip) << "Invalid TURN URI:" << uri;
continue;
}
QString scheme = uri.left(c);
if (scheme != "turn" && scheme != "turns") {
qCWarning(voip) << "Invalid TURN scheme:" << scheme;
continue;
}
m_cachedTurnUris += QStringLiteral("%1://%2:%3@%4").arg(scheme, QUrl::toPercentEncoding(username), QUrl::toPercentEncoding(password), uri.mid(c + 1));
}
}
QString CallManager::callId() const
{
return m_callId;
}
void CallManager::handleCallEvent(NeoChatRoom *room, const Quotient::RoomEvent *event)
{
if (const auto &inviteEvent = eventCast<const CallInviteEvent>(event)) {
handleInvite(room, inviteEvent);
} else if (const auto &hangupEvent = eventCast<const CallHangupEvent>(event)) {
handleHangup(room, hangupEvent);
} else if (const auto &candidatesEvent = eventCast<const CallCandidatesEvent>(event)) {
handleCandidates(room, candidatesEvent);
} else if (const auto &answerEvent = eventCast<const CallAnswerEvent>(event)) {
handleAnswer(room, answerEvent);
} else if (const auto &negotiateEvent = eventCast<const CallNegotiateEvent>(event)) {
handleNegotiate(room, negotiateEvent);
}
}
void CallManager::checkStartCall()
{
if ((m_incomingCandidates.isEmpty() && !m_incomingSdp.contains("candidates"_ls)) || m_incomingSdp.isEmpty()) {
qCDebug(voip) << "Not ready to start this call yet";
return;
}
m_session->acceptAnswer(m_incomingSdp, m_incomingCandidates, m_remoteUser->id());
m_incomingCandidates.clear();
m_incomingSdp.clear();
setGlobalState(ACTIVE);
}
void CallManager::handleAnswer(NeoChatRoom *room, const Quotient::CallAnswerEvent *event)
{
if (globalState() != OUTGOING) {
qCDebug(voip) << "Ignoring answer while in state" << globalState();
return;
}
if (event->callId() != m_callId) {
qCDebug(voip) << "Ignoring answer for unknown call id" << event->callId() << ". Our call id is" << m_callId;
return;
}
if (event->senderId() == room->localUser()->id() && partyId() == event->contentJson()["party_id"].toString()) {
qCDebug(voip) << "Ignoring echo for answer";
return;
}
if (event->senderId() == room->localUser()->id()) {
qCDebug(voip) << "Call was accepted on a different device";
// Show the user that call was accepted on a different device
// Stop ringing
return;
}
// TODO handle that MSC wrt to accepting on other devices
m_session->setMetadata(event->contentJson()["org.matrix.msc3077.sdp_stream_metadata"].toObject());
m_remotePartyId = event->contentJson()["party_id"].toString();
m_incomingSdp = event->sdp();
checkStartCall();
}
void CallManager::handleCandidates(NeoChatRoom *room, const Quotient::CallCandidatesEvent *event)
{
// TODO what if candidates come before invite? this looks wrong
if (globalState() == IDLE) {
qCDebug(voip) << "Ignoring candidates in state" << globalState();
return;
}
if (event->senderId() == room->localUser()->id()) {
qCDebug(voip) << "Ignoring candidates sent by ourself";
return;
}
if (globalState() == ACTIVE) {
QVector<Candidate> candidates;
for (const auto &candidate : event->candidates()) {
candidates += Candidate{candidate.toObject()["candidate"].toString(),
candidate.toObject()["sdpMLineIndex"].toInt(),
candidate.toObject()["sdpMid"].toString()};
}
m_session->acceptCandidates(candidates);
return;
}
qCDebug(voip) << "Storing" << event->candidates().size() << "incoming candidates";
for (const auto &candidate : event->candidates()) {
m_incomingCandidates +=
Candidate{candidate.toObject()["candidate"].toString(), candidate.toObject()["sdpMLineIndex"].toInt(), candidate.toObject()["sdpMid"].toString()};
}
if (globalState() == OUTGOING) {
checkStartCall();
}
}
void CallManager::handleInvite(NeoChatRoom *room, const Quotient::CallInviteEvent *event)
{
if (event->senderId() == room->localUser()->id()) {
qCDebug(voip) << "Igoring invite sent by ourself";
return;
}
if (globalState() != IDLE) {
// TODO handle glare
qCDebug(voip) << "Ignoring invite while already in a call";
return;
}
if (event->originTimestamp() < QDateTime::currentDateTime().addSecs(-60)) {
qCDebug(voip) << "Ignoring outdated invite; sent at:" << event->originTimestamp() << "current:" << QDateTime::currentDateTime();
return;
}
setGlobalState(INCOMING);
m_incomingSdp = event->sdp();
setRemoteUser(dynamic_cast<NeoChatUser *>(room->user(event->senderId())));
setRoom(room);
setCallId(event->callId());
setPartyId(generatePartyId());
m_remotePartyId = event->contentJson()["party_id"].toString();
setLifetime(event->lifetime());
Q_EMIT incomingCall(remoteUser(), room, event->lifetime(), callId());
ring(event->lifetime());
}
void CallManager::handleNegotiate(NeoChatRoom *room, const Quotient::CallNegotiateEvent *event)
{
Q_UNUSED(room);
if (event->callId() != m_callId) {
qCDebug(voip) << "Ignoring negotiate for unknown call id" << event->callId() << ". Our call id is" << m_callId;
return;
}
if (event->partyId() != m_remotePartyId) {
qCDebug(voip) << "Ignoring negotiate for unknown party id" << event->partyId() << ". Remote party id is" << m_remotePartyId;
return;
}
if (event->senderId() != m_remoteUser->id()) {
qCDebug(voip) << "Ignoring negotiate for unknown user id" << event->senderId() << ". Remote user id is" << m_remoteUser->id();
return;
}
// TODO DUPLICATES FFS
m_session->setMetadata(event->contentJson()["org.matrix.msc3077.sdp_stream_metadata"].toObject());
m_session->renegotiateOffer(event->sdp(), m_remoteUser->id(), event->contentJson()["description"]["type"] == QStringLiteral("answer"));
}
void CallManager::ring(int lifetime)
{
// TODO put a better default ringtone in the kcfg
// TODO which one? ship one? plasma-mobile-sounds?
if (!QFileInfo::exists(NeoChatConfig::ringtone())) {
qCWarning(voip) << "Ringtone file doesn't exist. Not audibly ringing";
return;
}
auto ringtone = QUrl::fromLocalFile(NeoChatConfig::ringtone());
m_playlist.setPlaybackMode(QMediaPlaylist::Loop);
m_playlist.clear();
m_ringPlayer.setPlaylist(&m_playlist);
m_playlist.addMedia(ringtone);
m_ringPlayer.play();
QTimer::singleShot(lifetime, this, [this]() {
stopRinging();
Q_EMIT callEnded();
});
}
void CallManager::stopRinging()
{
m_ringPlayer.stop();
}
void CallManager::handleHangup(NeoChatRoom *room, const Quotient::CallHangupEvent *event)
{
if (globalState() == IDLE) {
qCDebug(voip) << "Ignoring hangup since we're not in a call";
return;
}
if (event->senderId() == room->localUser()->id()) {
qCDebug(voip) << "Ignoring hangup we sent ourselves";
// TODO hangup-to-decline by different device?
return;
}
if (event->callId() != m_callId) {
qCDebug(voip) << "Hangup not for this call. Event's call id:" << event->callId() << ". Our call id" << m_callId;
return;
}
stopRinging();
if (m_session) {
m_session->end();
delete m_session;
}
setGlobalState(IDLE);
Q_EMIT callEnded();
}
void CallManager::acceptCall()
{
// TODO metadata for this case
if (globalState() != INCOMING) {
qCWarning(voip) << "Not accepting call while state is" << globalState();
return;
}
stopRinging();
if (!checkPlugins()) {
qCCritical(voip) << "Missing plugins; can't accept call";
}
updateTurnServers();
// TODO wait until candidates are here
m_session = CallSession::acceptCall(m_incomingSdp, m_incomingCandidates, m_cachedTurnUris, m_remoteUser->id(), this);
m_participants->clear();
connect(m_session.data(), &CallSession::stateChanged, this, [this] {
Q_EMIT stateChanged();
if (state() == CallSession::ICEFAILED) {
Q_EMIT callEnded();
}
}); // TODO refactor away?
m_incomingCandidates.clear();
connectSingleShot(m_session.data(), &CallSession::answerCreated, this, [this](const QString &_sdp, const QVector<Candidate> &candidates) {
const auto &[uuids, sdp] = mangleSdp(_sdp);
QVector<std::pair<QString, QString>> msidToPurpose;
for (const auto &uuid : uuids) {
msidToPurpose += {uuid, "m.usermedia"}; // TODO
}
auto answer = createAnswer(m_callId, sdp, msidToPurpose);
m_room->postJson("m.call.answer", answer);
qCWarning(voip) << "Sending Answer";
auto c = createCandidates(m_callId, candidates);
auto cand = createCandidates(m_callId, candidates);
m_room->postJson("m.call.candidates", cand);
qCWarning(voip) << "Sending Candidates";
setGlobalState(ACTIVE);
});
}
void CallManager::hangupCall()
{
qCDebug(voip) << "Ending call";
if (m_session) {
m_session->end();
delete m_session;
m_session = nullptr;
}
stopRinging();
m_room->postJson("m.call.hangup", createHangup(m_callId));
setGlobalState(IDLE);
Q_EMIT callEnded();
}
NeoChatUser *CallManager::remoteUser() const
{
return m_remoteUser;
}
NeoChatRoom *CallManager::room() const
{
return m_room;
}
CallSession::State CallManager::state() const
{
if (!m_session) {
return CallSession::DISCONNECTED;
}
return m_session->state();
}
int CallManager::lifetime() const
{
return m_lifetime;
}
void CallManager::ignoreCall()
{
setLifetime(0);
setCallId({});
setRoom(nullptr);
setRemoteUser(nullptr);
}
void CallManager::startCall(NeoChatRoom *room)
{
if (m_session) {
// Don't start calls if there already is one
Q_EMIT Controller::instance().errorOccured(i18n("A call is already started"));
return;
}
if (room->users().size() != 2) {
// Don't start calls if the room doesn't have exactly two members
Q_EMIT Controller::instance().errorOccured(i18n("Calls are limited to 1:1 rooms"));
return;
}
auto missingPlugins = CallSession::missingPlugins();
if (!missingPlugins.isEmpty()) {
qCCritical(voip) << "Missing GStreamer plugins:" << missingPlugins;
Q_EMIT Controller::instance().errorOccured("Missing GStreamer plugins.");
return;
}
setLifetime(60000);
setRoom(room);
setRemoteUser(otherUser(room));
updateTurnServers();
setCallId(generateCallId());
setPartyId(generatePartyId());
m_participants->clear();
for (const auto &user : m_room->users()) {
auto participant = new CallParticipant(m_session);
participant->m_user = dynamic_cast<NeoChatUser *>(user);
m_participants->addParticipant(participant);
}
m_session = CallSession::startCall(m_cachedTurnUris, this);
setGlobalState(OUTGOING);
connect(m_session, &CallSession::stateChanged, this, [this] {
Q_EMIT stateChanged();
if (state() == CallSession::ICEFAILED) {
Q_EMIT callEnded();
}
});
connectSingleShot(m_session.data(), &CallSession::offerCreated, this, [this](const QString &_sdp, const QVector<Candidate> &candidates) {
const auto &[uuids, sdp] = mangleSdp(_sdp);
QVector<std::pair<QString, QString>> msidToPurpose;
for (const auto &uuid : uuids) {
msidToPurpose += {uuid, "m.usermedia"}; // TODO
}
qCWarning(voip) << "Sending Invite";
qCWarning(voip) << "Sending Candidates";
auto invite = createInvite(m_callId, sdp, msidToPurpose);
auto c = createCandidates(m_callId, candidates);
m_room->postJson("m.call.invite", invite);
m_room->postJson("m.call.candidates", c);
});
connect(m_session, &CallSession::renegotiate, this, [this](const QString &sdp, const QString &type) {
QVector<std::pair<QString, QString>> msidToPurpose;
const auto &[uuids, _sdp] = mangleSdp(sdp);
for (const auto &uuid : uuids) {
msidToPurpose += {uuid, "m.usermedia"}; // TODO
}
QJsonObject json{
{QStringLiteral("lifetime"), 60000},
{QStringLiteral("version"), 1},
{QStringLiteral("description"), QJsonObject{{QStringLiteral("type"), type}, {QStringLiteral("sdp"), _sdp}}}, // AAAAA
{QStringLiteral("party_id"), m_partyId},
{QStringLiteral("call_id"), m_callId},
};
QJsonObject metadata;
for (const auto &[stream, purpose] : msidToPurpose) {
QJsonObject data = {{"purpose", purpose}};
metadata[stream] = data;
}
json["org.matrix.msc3077.sdp_stream_metadata"] = metadata;
m_room->postJson("m.call.negotiate", json);
});
}
QString CallManager::generateCallId() const
{
return QDateTime::currentDateTime().toString("yyyyMMddhhmmsszzz");
}
QString CallManager::generatePartyId() const
{
return QUuid::createUuid().toString();
}
void CallManager::setCallId(const QString &callId)
{
m_callId = callId;
Q_EMIT callIdChanged();
}
void CallManager::setPartyId(const QString &partyId)
{
m_partyId = partyId;
}
void CallManager::setMuted(bool muted)
{
if (!m_session) {
return;
}
m_session->setMuted(muted);
Q_EMIT mutedChanged();
}
bool CallManager::muted() const
{
if (!m_session) {
return false;
}
return m_session->muted();
}
bool CallManager::init()
{
qRegisterMetaType<Candidate>();
qRegisterMetaType<QVector<Candidate>>();
GError *error = nullptr;
if (!gst_init_check(nullptr, nullptr, &error)) {
QString strError;
if (error) {
strError += error->message;
g_error_free(error);
}
qCCritical(voip) << "Failed to initialize GStreamer:" << strError;
return false;
}
gchar *version = gst_version_string();
qCDebug(voip) << "GStreamer version" << version;
g_free(version);
// Required to register the qml types
auto _sink = gst_element_factory_make("qmlglsink", nullptr);
Q_ASSERT(_sink);
gst_object_unref(_sink);
return true;
}
void CallManager::setLifetime(int lifetime)
{
m_lifetime = lifetime;
Q_EMIT lifetimeChanged();
}
void CallManager::setRoom(NeoChatRoom *room)
{
m_room = room;
Q_EMIT roomChanged();
}
void CallManager::setRemoteUser(NeoChatUser *user)
{
m_remoteUser = user;
Q_EMIT roomChanged();
}
NeoChatUser *CallManager::otherUser(NeoChatRoom *room)
{
return dynamic_cast<NeoChatUser *>(room->users()[0]->id() == room->localUser()->id() ? room->users()[1] : room->users()[0]);
}
QJsonObject CallManager::createCandidates(const QString &callId, const QVector<Candidate> &candidates) const
{
QJsonArray candidatesJson;
for (const auto &candidate : candidates) {
candidatesJson += QJsonObject{{"candidate", candidate.candidate}, {"sdpMid", candidate.sdpMid}, {"sdpMLineIndex", candidate.sdpMLineIndex}};
}
return QJsonObject{{"call_id", callId}, {"candidates", candidatesJson}, {"version", CALL_VERSION}, {"party_id", "todopartyid"}};
}
void CallManager::setGlobalState(GlobalState globalState)
{
if (m_globalState == globalState) {
return;
}
m_globalState = globalState;
Q_EMIT globalStateChanged();
}
CallManager::GlobalState CallManager::globalState() const
{
return m_globalState;
}
CallParticipantsModel *CallManager::callParticipants() const
{
return m_participants;
}
std::pair<QStringList, QString> CallManager::mangleSdp(const QString &_sdp)
{
QString sdp = _sdp;
QRegularExpression regex("msid:user[0-9]+@host-[0-9a-f]+ webrtctransceiver([0-9])");
auto iter = regex.globalMatch(sdp);
QStringList uuids;
while (iter.hasNext()) {
auto uuid = QUuid::createUuid();
auto match = iter.next();
uuids += uuid.toString();
sdp.replace(match.captured(), QStringLiteral("msid:") + uuid.toString() + QStringLiteral(" foo"));
}
return {uuids, sdp};
}
QJsonObject CallManager::createInvite(const QString &callId, const QString &sdp, const QVector<std::pair<QString, QString>> &msidToPurpose) const
{
QJsonObject metadata;
for (const auto &[msid, purpose] : msidToPurpose) {
metadata[msid] = QJsonObject{{"purpose", purpose}};
}
return {{"call_id", callId},
{"party_id", "todopartyid"},
{"lifetime", 60000},
{"capabilities", QJsonObject{{"m.call.transferee", false}}},
{"offer", QJsonObject{{"sdp", sdp}, {"type", "offer"}}},
{"org.matrix.msc3077.sdp_stream_metadata", metadata},
{"version", CALL_VERSION}};
}
QJsonObject CallManager::createHangup(const QString &callId) const
{
return {{"call_id", callId}, {"party_id", "todopartyid"}, {"version", CALL_VERSION}};
}
QJsonObject CallManager::createAnswer(const QString &callId, const QString &sdp, const QVector<std::pair<QString, QString>> &msidToPurpose) const
{
Q_ASSERT(!callId.isEmpty());
QJsonObject metadata;
for (const auto &[msid, purpose] : msidToPurpose) {
metadata[msid] = QJsonObject{{"purpose", purpose}};
}
return {{"call_id", callId},
{"party_id", "todopartyid"},
{"lifetime", "lifetime"},
{"capabilities", QJsonObject{{"m.call.transferee", false}}},
{"offer", QJsonObject{{"sdp", sdp}, {"type", "offer"}}},
{"org.matrix.msc3077.sdp_stream_metadata", metadata},
{"version", CALL_VERSION}};
}
void CallManager::toggleCamera()
{
m_session->toggleCamera();
}
QString CallManager::partyId() const
{
return m_partyId;
}
bool CallManager::checkPlugins() const
{
auto missingPlugins = m_session->missingPlugins();
if (!missingPlugins.isEmpty()) {
qCCritical(voip) << "Missing GStreamer plugins:" << missingPlugins;
Q_EMIT Controller::instance().errorOccured("Missing GStreamer plugins.");
}
return !missingPlugins.isEmpty();
}

159
src/call/callmanager.h Normal file
View File

@@ -0,0 +1,159 @@
// SPDX-FileCopyrightText: 2020-2021 Nheko Authors
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: GPL-3.0-or-later
#pragma once
#include "neochatroom.h"
#include "neochatuser.h"
#include <QAbstractListModel>
#include <QObject>
#include <QString>
#include <events/roomevent.h>
#include "callsession.h"
#include "models/callparticipantsmodel.h"
#include <events/callevents.h>
#include <QMediaPlayer>
#include <QMediaPlaylist>
#include <QTimer>
#include <qcoro/task.h>
#include <qobjectdefs.h>
class CallSession;
class QQuickItem;
using namespace Quotient;
class CallManager : public QObject
{
Q_OBJECT
public:
enum GlobalState {
IDLE,
INCOMING,
OUTGOING,
ACTIVE,
};
Q_ENUM(GlobalState);
Q_PROPERTY(GlobalState globalState READ globalState NOTIFY globalStateChanged)
Q_PROPERTY(NeoChatUser *remoteUser READ remoteUser NOTIFY remoteUserChanged)
Q_PROPERTY(QString callId READ callId NOTIFY callIdChanged)
Q_PROPERTY(NeoChatRoom *room READ room NOTIFY roomChanged)
Q_PROPERTY(int lifetime READ lifetime NOTIFY lifetimeChanged)
Q_PROPERTY(bool muted READ muted WRITE setMuted NOTIFY mutedChanged)
Q_PROPERTY(QQuickItem *item MEMBER m_item) // TODO allow for different devices for each session
Q_PROPERTY(CallSession::State state READ state NOTIFY stateChanged)
Q_PROPERTY(CallParticipantsModel *callParticipants READ callParticipants CONSTANT)
static CallManager &instance()
{
static CallManager _instance;
return _instance;
}
[[nodiscard]] QString callId() const;
[[nodiscard]] QString partyId() const;
CallSession::State state() const;
NeoChatUser *remoteUser() const;
NeoChatRoom *room() const;
int lifetime() const;
bool muted() const;
void setMuted(bool muted);
CallManager::GlobalState globalState() const;
void handleCallEvent(NeoChatRoom *room, const RoomEvent *event);
Q_INVOKABLE void startCall(NeoChatRoom *room);
Q_INVOKABLE void acceptCall();
Q_INVOKABLE void hangupCall();
Q_INVOKABLE void ignoreCall();
Q_INVOKABLE void toggleCamera();
QCoro::Task<void> updateTurnServers();
[[nodiscard]] CallParticipantsModel *callParticipants() const;
QQuickItem *m_item = nullptr;
Q_SIGNALS:
void currentCallIdChanged();
void incomingCall(NeoChatUser *user, NeoChatRoom *room, int timeout, const QString &callId);
void callEnded();
void remoteUserChanged();
void callIdChanged();
void roomChanged();
void stateChanged();
void lifetimeChanged();
void mutedChanged();
void globalStateChanged();
private:
CallManager();
QString m_callId;
QVector<Candidate> m_incomingCandidates;
QString m_incomingSdp;
[[nodiscard]] bool checkPlugins() const;
QStringList m_cachedTurnUris;
QDateTime m_cachedTurnUrisValidUntil = QDateTime::fromSecsSinceEpoch(0);
NeoChatUser *m_remoteUser = nullptr;
NeoChatRoom *m_room = nullptr;
QString m_remotePartyId;
QString m_partyId;
int m_lifetime = 0;
GlobalState m_globalState = IDLE;
void handleInvite(NeoChatRoom *room, const CallInviteEvent *event);
void handleHangup(NeoChatRoom *room, const CallHangupEvent *event);
void handleCandidates(NeoChatRoom *room, const CallCandidatesEvent *event);
void handleAnswer(NeoChatRoom *room, const CallAnswerEvent *event);
void handleNegotiate(NeoChatRoom *room, const CallNegotiateEvent *event);
void checkStartCall();
void ring(int lifetime);
void stopRinging();
[[nodiscard]] QString generateCallId() const;
[[nodiscard]] QString generatePartyId() const;
bool init();
bool m_initialised = false;
QPointer<CallSession> m_session = nullptr;
void setLifetime(int lifetime);
void setRoom(NeoChatRoom *room);
void setRemoteUser(NeoChatUser *user);
void setCallId(const QString &callId);
void setPartyId(const QString &partyId);
void setGlobalState(GlobalState state);
std::pair<QStringList, QString> mangleSdp(const QString &sdp);
CallParticipantsModel *m_participants = new CallParticipantsModel();
NeoChatUser *otherUser(NeoChatRoom *room);
[[nodiscard]] QJsonObject createCandidates(const QString &callId, const QVector<Candidate> &candidates) const;
[[nodiscard]] QJsonObject createInvite(const QString &callId, const QString &sdp, const QVector<std::pair<QString, QString>> &msidToPurpose) const;
[[nodiscard]] QJsonObject createHangup(const QString &callId) const;
[[nodiscard]] QJsonObject createAnswer(const QString &callId, const QString &sdp, const QVector<std::pair<QString, QString>> &msidToPurpose) const;
QMediaPlayer m_ringPlayer;
QMediaPlaylist m_playlist;
};

View File

@@ -0,0 +1,51 @@
// SPDX-FileCopyrightText: 2022 Tobias Fella <fella@posteo.de>
// SPDX-License-Identifier: LGPL-2.0-or-later
#include "callnegotiateevent.h"
using namespace Quotient;
CallNegotiateEvent::CallNegotiateEvent(const QString &callId,
const QString &partyId,
int lifetime,
const QString &sdp,
bool answer,
QVector<std::pair<QString, QString>> msidToPurpose)
: EventTemplate(callId,
{
{QStringLiteral("lifetime"), lifetime},
{QStringLiteral("version"), 1},
{QStringLiteral("description"),
QJsonObject{{QStringLiteral("type"), answer ? QStringLiteral("answer") : QStringLiteral("offer")}, {QStringLiteral("sdp"), sdp}}},
{QStringLiteral("party_id"), partyId},
})
{
QJsonObject metadata;
for (const auto &[stream, purpose] : msidToPurpose) {
QJsonObject data = {{"purpose", purpose}};
metadata[stream] = purpose;
}
auto content = editJson();
content["org.matrix.msc3077.sdp_stream_metadata"] = metadata;
editJson()["content"] = content;
}
CallNegotiateEvent::CallNegotiateEvent(const QJsonObject &json)
: EventTemplate(json)
{
}
QString CallNegotiateEvent::partyId() const
{
return contentJson()["party_id"].toString();
}
QString CallNegotiateEvent::sdp() const
{
return contentJson()["description"]["sdp"].toString();
}
QJsonObject CallNegotiateEvent::sdpStreamMetadata() const
{
return contentJson()["org.matrix.msc3077.sdp_stream_metadata"].toObject();
}

View File

@@ -0,0 +1,30 @@
// SPDX-FileCopyrightText: 2022 Tobias Fella <fella@posteo.de>
// SPDX-License-Identifier: LGPL-2.0-or-later
#pragma once
#include <events/callevents.h>
namespace Quotient
{
class CallNegotiateEvent : public EventTemplate<CallNegotiateEvent, CallEvent>
{
public:
QUO_EVENT(CallNegotiateEvent, "m.call.negotiate")
explicit CallNegotiateEvent(const QJsonObject &obj);
explicit CallNegotiateEvent(const QString &callId,
const QString &partyId,
int lifetime,
const QString &sdp,
bool answer,
QVector<std::pair<QString, QString>> msidToPurpose);
QString partyId() const;
QString sdp() const;
// TODO make this a struct instead
QJsonObject sdpStreamMetadata() const;
};
}

View File

@@ -0,0 +1,26 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: GPL-2.0-or-later
#include "callparticipant.h"
NeoChatUser *CallParticipant::user() const
{
return m_user;
}
bool CallParticipant::hasCamera() const
{
return m_hasCamera;
}
CallParticipant::CallParticipant(QObject *parent)
: QObject(parent)
{
}
void CallParticipant::initCamera(QQuickItem *item)
{
QTimer::singleShot(500, this, [=] {
Q_EMIT initialized(item);
});
}

View File

@@ -0,0 +1,36 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: GPL-2.0-or-later
#pragma once
#include <QObject>
#include <QTimer>
#include "neochatuser.h"
class QQuickItem;
class CallParticipant : public QObject
{
Q_OBJECT
Q_PROPERTY(NeoChatUser *user READ user CONSTANT)
Q_PROPERTY(bool hasCamera READ hasCamera NOTIFY hasCameraChanged)
public:
NeoChatUser *m_user = nullptr;
bool m_hasCamera = false;
Q_INVOKABLE void initCamera(QQuickItem *item);
[[nodiscard]] NeoChatUser *user() const;
[[nodiscard]] bool hasCamera() const;
explicit CallParticipant(QObject *parent = nullptr);
Q_SIGNALS:
void initialized(QQuickItem *item);
void heightChanged();
void widthChanged();
void hasCameraChanged();
};

916
src/call/callsession.cpp Normal file
View File

@@ -0,0 +1,916 @@
// SPDX-FileCopyrightText: 2021 Nheko Contributors
// SPDX-FileCopyrightText: 2021 Carl Schwan <carl@carlschwan.eu>
// SPDX-FileCopyrightText: 2021-2022 Tobias Fella <fella@posteo.de>
//
// SPDX-License-Identifier: GPL-3.0-or-later
#include "calldevices.h"
#include <QDebug>
#include <QThread>
#include <gst/gst.h>
#define GST_USE_UNSTABLE_API
#include <gst/webrtc/webrtc.h>
#undef GST_USE_UNSTABLE_API
#include "voiplogging.h"
#include "audiosources.h"
#include "videosources.h"
#include <qcoro/qcorosignal.h>
#define private public
#include "callsession.h"
#undef private
#include "callmanager.h"
#include <qt_connection_util.h>
#define STUN_SERVER "stun://turn.matrix.org:3478" // TODO make STUN server configurable
#define INSTANCE \
Q_ASSERT(user_data); \
auto instance = static_cast<CallSession *>(user_data);
GstElement *createElement(const char *type, GstElement *pipe, const char *name = nullptr)
{
auto element = gst_element_factory_make(type, name);
Q_ASSERT_X(element, __FUNCTION__, QStringLiteral("Failed to create element %1 %2").arg(type, name).toLatin1());
if (pipe) {
gst_bin_add_many(GST_BIN(pipe), element, nullptr);
}
return element;
}
GstElement *binGetByName(GstElement *bin, const char *name)
{
auto element = gst_bin_get_by_name(GST_BIN(bin), name);
Q_ASSERT_X(element, __FUNCTION__, QStringLiteral("Failed to get element by name: %1").arg(name).toLatin1());
return element;
}
struct KeyFrameRequestData {
GstElement *pipe = nullptr;
GstElement *decodeBin = nullptr;
gint packetsLost = 0;
guint timerId = 0;
QString statsField;
} keyFrameRequestData;
std::pair<int, int> getResolution(GstPad *pad)
{
std::pair<int, int> ret;
auto caps = gst_pad_get_current_caps(pad);
auto structure = gst_caps_get_structure(caps, 0);
gst_structure_get_int(structure, "width", &ret.first);
gst_structure_get_int(structure, "height", &ret.second);
gst_caps_unref(caps);
return ret;
}
std::pair<int, int> getResolution(GstElement *pipe, const gchar *elementName, const gchar *padName)
{
auto element = binGetByName(pipe, elementName);
auto pad = gst_element_get_static_pad(element, padName);
auto ret = getResolution(pad);
gst_object_unref(pad);
gst_object_unref(element);
return ret;
}
void setLocalDescription(GstPromise *promise, gpointer user_data)
{
INSTANCE
qCDebug(voip) << "Setting local description";
const GstStructure *reply = gst_promise_get_reply(promise);
gboolean isAnswer = gst_structure_id_has_field(reply, g_quark_from_string("answer"));
GstWebRTCSessionDescription *gstsdp = nullptr;
gst_structure_get(reply, isAnswer ? "answer" : "offer", GST_TYPE_WEBRTC_SESSION_DESCRIPTION, &gstsdp, nullptr);
gst_promise_unref(promise);
auto webrtcbin = binGetByName(instance->m_pipe, "webrtcbin");
Q_ASSERT(gstsdp);
g_signal_emit_by_name(webrtcbin, "set-local-description", gstsdp, nullptr);
gchar *sdp = gst_sdp_message_as_text(gstsdp->sdp);
if (!instance->m_localSdp.isEmpty()) {
// This is a renegotiation
qWarning() << "emitting renegotiate";
Q_EMIT instance->renegotiate(QString(sdp), isAnswer ? QStringLiteral("answer") : QStringLiteral("offer"));
}
instance->m_localSdp = QString(sdp);
g_free(sdp);
gst_webrtc_session_description_free(gstsdp);
qCDebug(voip) << "Local description set:" << isAnswer;
}
bool contains(std::string_view str1, std::string_view str2)
{
return std::search(str1.cbegin(),
str1.cend(),
str2.cbegin(),
str2.cend(),
[](unsigned char c1, unsigned char c2) {
return std::tolower(c1) == std::tolower(c2);
})
!= str1.cend();
}
void createOffer(GstElement *webrtc, CallSession *session)
{
// TODO ?!?
if (!session->m_localSdp.isEmpty()) {
return;
}
qCWarning(voip) << "Creating Offer";
auto promise = gst_promise_new_with_change_func(setLocalDescription, session, nullptr);
g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise);
}
void createAnswer(GstPromise *promise, gpointer user_data)
{
INSTANCE
qCDebug(voip) << "Creating Answer";
gst_promise_unref(promise);
promise = gst_promise_new_with_change_func(setLocalDescription, instance, nullptr);
auto webrtcbin = binGetByName(instance->m_pipe, "webrtcbin");
g_signal_emit_by_name(webrtcbin, "create-answer", nullptr, promise);
}
bool getMediaAttributes(const GstSDPMessage *sdp, const char *mediaType, const char *encoding, int &payloadType, bool &receiveOnly, bool &sendOnly)
{
payloadType = -1;
receiveOnly = false;
sendOnly = false;
for (guint mlineIndex = 0; mlineIndex < gst_sdp_message_medias_len(sdp); mlineIndex++) {
const GstSDPMedia *media = gst_sdp_message_get_media(sdp, mlineIndex);
if (!strcmp(gst_sdp_media_get_media(media), mediaType)) {
receiveOnly = gst_sdp_media_get_attribute_val(media, "recvonly") != nullptr;
sendOnly = gst_sdp_media_get_attribute_val(media, "sendonly") != nullptr;
const gchar *rtpval = nullptr;
for (guint n = 0; n == 0 || rtpval; n++) {
rtpval = gst_sdp_media_get_attribute_val_n(media, "rtpmap", n);
if (rtpval && contains(rtpval, encoding)) {
payloadType = QString::fromLatin1(rtpval).toInt();
break;
}
}
return true;
}
}
return false;
}
GstWebRTCSessionDescription *parseSDP(const QString &sdp, GstWebRTCSDPType type)
{
GstSDPMessage *message;
gst_sdp_message_new(&message);
if (gst_sdp_message_parse_buffer((guint8 *)sdp.toLatin1().data(), sdp.size(), message) == GST_SDP_OK) {
return gst_webrtc_session_description_new(type, message);
} else {
qCCritical(voip) << "Failed to parse remote SDP";
gst_sdp_message_free(message);
return nullptr;
}
}
void addLocalICECandidate(GstElement *webrtc, guint mlineIndex, const gchar *candidate, gpointer user_data)
{
Q_UNUSED(webrtc);
INSTANCE
// qCWarning(voip) << "Adding local ICE Candidates";
instance->m_localCandidates += Candidate{candidate, static_cast<int>(mlineIndex), QString()};
}
void iceConnectionStateChanged(GstElement *webrtc, GParamSpec *pspec, gpointer user_data)
{
Q_UNUSED(pspec);
INSTANCE
GstWebRTCICEConnectionState newState;
g_object_get(webrtc, "ice-connection-state", &newState, nullptr);
switch (newState) {
case GST_WEBRTC_ICE_CONNECTION_STATE_NEW:
case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING:
instance->setState(CallSession::CONNECTING);
break;
case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED:
instance->setState(CallSession::ICEFAILED);
break;
case GST_WEBRTC_ICE_CONNECTION_STATE_CONNECTED:
instance->setState(CallSession::CONNECTED);
case GST_WEBRTC_ICE_CONNECTION_STATE_COMPLETED:
case GST_WEBRTC_ICE_CONNECTION_STATE_DISCONNECTED:
case GST_WEBRTC_ICE_CONNECTION_STATE_CLOSED:
default:
break;
}
}
GstElement *newAudioSinkChain(GstElement *pipe)
{
qCWarning(voip) << "New Audio Sink Chain";
GstElement *queue = createElement("queue", pipe);
GstElement *convert = createElement("audioconvert", pipe);
GstElement *resample = createElement("audioresample", pipe);
GstElement *sink = createElement("autoaudiosink", pipe);
gst_element_link_many(queue, convert, resample, sink, nullptr);
gst_element_sync_state_with_parent(queue);
gst_element_sync_state_with_parent(convert);
gst_element_sync_state_with_parent(resample);
gst_element_sync_state_with_parent(sink);
return queue;
}
void sendKeyFrameRequest()
{
auto sinkpad = gst_element_get_static_pad(keyFrameRequestData.decodeBin, "sink");
if (!gst_pad_push_event(sinkpad, gst_event_new_custom(GST_EVENT_CUSTOM_UPSTREAM, gst_structure_new_empty("GstForceKeyUnit")))) {
qCWarning(voip) << "Keyframe request failed";
}
gst_object_unref(sinkpad);
}
void onGetStats(GstPromise *promise, gpointer)
{
auto reply = gst_promise_get_reply(promise);
GstStructure *rtpStats;
if (!gst_structure_get(reply, keyFrameRequestData.statsField.toLatin1().data(), GST_TYPE_STRUCTURE, &rtpStats, nullptr)) {
gst_promise_unref(promise);
return;
}
auto packetsLost = 0;
gst_structure_get_int(rtpStats, "packets-lost", &packetsLost);
gst_structure_free(rtpStats);
gst_promise_unref(promise);
if (packetsLost > keyFrameRequestData.packetsLost) {
qCWarning(voip) << "inbound video lost packet count:" << packetsLost;
keyFrameRequestData.packetsLost = packetsLost;
sendKeyFrameRequest();
}
}
// TODO port to QTimer?
gboolean testPacketLoss(gpointer)
{
if (!keyFrameRequestData.pipe) {
return false;
}
auto webrtc = binGetByName(keyFrameRequestData.pipe, "webrtcbin");
auto promise = gst_promise_new_with_change_func(onGetStats, nullptr, nullptr);
g_signal_emit_by_name(webrtc, "get-stats", nullptr, promise);
gst_object_unref(webrtc);
return true;
}
GstElement *newVideoSinkChain(GstElement *pipe, QQuickItem *quickItem)
{
Q_ASSERT(pipe);
Q_ASSERT(quickItem);
qCWarning(voip) << "Creating Video Sink Chain";
auto queue = createElement("queue", pipe);
auto compositor = createElement("compositor", pipe);
auto glupload = createElement("glupload", pipe);
auto glcolorconvert = createElement("glcolorconvert", pipe);
auto qmlglsink = createElement("qmlglsink", nullptr);
auto glsinkbin = createElement("glsinkbin", pipe);
g_object_set(qmlglsink, "widget", quickItem, nullptr);
g_object_set(glsinkbin, "sink", qmlglsink, nullptr);
gst_element_link_many(queue, compositor, glupload, glcolorconvert, glsinkbin, nullptr);
gst_element_sync_state_with_parent(queue);
gst_element_sync_state_with_parent(compositor);
gst_element_sync_state_with_parent(glupload);
gst_element_sync_state_with_parent(glcolorconvert);
gst_element_sync_state_with_parent(glsinkbin);
return queue;
}
static GstPadProbeReturn pad_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
{
Q_UNUSED(pad);
// auto stream = static_cast<VideoStream *>(user_data);
auto event = GST_PAD_PROBE_INFO_EVENT(info);
if (GST_EVENT_CAPS == GST_EVENT_TYPE(event)) {
GstCaps *caps = gst_caps_new_any();
int width, height;
gst_event_parse_caps(event, &caps);
auto structure = gst_caps_get_structure(caps, 0);
gst_structure_get_int(structure, "width", &width);
gst_structure_get_int(structure, "height", &height);
// stream->setWidth(width);
// stream->setHeight(height);
// TODO needed?
}
return GST_PAD_PROBE_OK;
}
void linkNewPad(GstElement *decodeBin, GstPad *newpad, gpointer user_data)
{
INSTANCE
qCWarning(voip) << "Linking New Pad";
auto sinkpad = gst_element_get_static_pad(decodeBin, "sink");
auto sinkcaps = gst_pad_get_current_caps(sinkpad);
auto structure = gst_caps_get_structure(sinkcaps, 0);
gchar *mediaType = nullptr;
guint ssrc = 0;
gst_structure_get(structure, "media", G_TYPE_STRING, &mediaType, "ssrc", G_TYPE_UINT, &ssrc, nullptr);
gst_caps_unref(sinkcaps);
gst_object_unref(sinkpad);
GstElement *queue = nullptr;
if (!strcmp(mediaType, "audio")) {
qCWarning(voip) << "Receiving audio stream";
queue = newAudioSinkChain(instance->m_pipe);
} else if (!strcmp(mediaType, "video")) {
qCWarning(voip) << "Receiving video stream";
auto fake = createElement("fakesink", instance->m_pipe);
auto selector = createElement("output-selector", instance->m_pipe);
auto selectorSink = gst_element_get_static_pad(selector, "sink");
auto selectorSrc1 = gst_element_request_pad_simple(selector, "src_%u");
gst_pad_link(newpad, selectorSink);
auto fakepad = gst_element_get_static_pad(fake, "sink");
gst_pad_link(selectorSrc1, fakepad);
g_object_set(selector, "active-pad", selectorSrc1, nullptr);
auto msid = instance->ssrcToMsid[ssrc];
// gst_pad_add_probe(newpad, GST_PAD_PROBE_TYPE_EVENT_BOTH, pad_cb, stream, nullptr);
auto manager = dynamic_cast<CallManager *>(instance->parent());
auto participants = manager->callParticipants();
auto user = dynamic_cast<NeoChatUser *>(manager->room()->user(instance->msidToUserId[msid]));
participants->setHasCamera(user, true);
auto participant = participants->callParticipantForUser(user);
// gst_pad_add_probe(newpad, GST_PAD_PROBE_TYPE_EVENT_BOTH, pad_cb, nullptr, nullptr);
connectSingleShot(participant, &CallParticipant::initialized, instance, [=](QQuickItem *item) {
gst_pad_unlink(newpad, fakepad);
auto queue = newVideoSinkChain(instance->m_pipe, item);
auto queuepad = gst_element_get_static_pad(queue, "sink");
Q_ASSERT(queuepad);
auto selectorSrc = gst_element_request_pad_simple(selector, "src_%u");
auto ok = GST_PAD_LINK_SUCCESSFUL(gst_pad_link(selectorSrc, queuepad));
Q_ASSERT(ok);
g_object_set(selector, "active-pad", selectorSrc, nullptr);
instance->setState(CallSession::CONNECTED);
keyFrameRequestData.pipe = instance->m_pipe;
keyFrameRequestData.decodeBin = decodeBin;
keyFrameRequestData.timerId = g_timeout_add_seconds(3, testPacketLoss, nullptr);
keyFrameRequestData.statsField = QStringLiteral("rtp-inbound-stream-stats_") + QString::number(ssrc);
gst_object_unref(queuepad);
g_free(mediaType);
});
return;
} else {
g_free(mediaType);
qCWarning(voip) << "Unknown pad type:" << GST_PAD_NAME(newpad);
return;
}
auto queuepad = gst_element_get_static_pad(queue, "sink");
Q_ASSERT(queuepad);
auto ok = GST_PAD_LINK_SUCCESSFUL(gst_pad_link(newpad, queuepad));
Q_ASSERT(ok);
gst_object_unref(queuepad);
g_free(mediaType);
}
void setWaitForKeyFrame(GstBin *decodeBin, GstElement *element, gpointer)
{
Q_UNUSED(decodeBin);
if (!strcmp(gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(gst_element_get_factory(element))), "rtpvp8depay")) {
g_object_set(element, "wait-for-keyframe", TRUE, nullptr);
}
}
void addDecodeBin(GstElement *webrtc, GstPad *newpad, gpointer user_data)
{
Q_UNUSED(webrtc);
if (GST_PAD_DIRECTION(newpad) != GST_PAD_SRC) {
return;
}
INSTANCE
auto decodeBin = createElement("decodebin", instance->m_pipe);
// Investigate hardware, see nheko source
g_object_set(decodeBin, "force-sw-decoders", TRUE, nullptr);
g_signal_connect(decodeBin, "pad-added", G_CALLBACK(linkNewPad), instance);
g_signal_connect(decodeBin, "element-added", G_CALLBACK(setWaitForKeyFrame), nullptr);
gst_element_sync_state_with_parent(decodeBin);
auto sinkpad = gst_element_get_static_pad(decodeBin, "sink");
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad))) {
// TODO: Error handling
qCWarning(voip) << "Unable to link decodebin";
}
gst_object_unref(sinkpad);
}
void iceGatheringStateChanged(GstElement *webrtc, GParamSpec *pspec, gpointer user_data)
{
Q_UNUSED(pspec);
INSTANCE
GstWebRTCICEGatheringState newState;
g_object_get(webrtc, "ice-gathering-state", &newState, nullptr);
if (newState == GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE) {
qCWarning(voip) << "GstWebRTCICEGatheringState -> Complete";
if (instance->m_isOffering) {
Q_EMIT instance->offerCreated(instance->m_localSdp, instance->m_localCandidates);
instance->setState(CallSession::OFFERSENT);
} else {
Q_EMIT instance->answerCreated(instance->m_localSdp, instance->m_localCandidates);
instance->setState(CallSession::ANSWERSENT);
}
}
}
gboolean newBusMessage(GstBus *bus, GstMessage *msg, gpointer user_data)
{
Q_UNUSED(bus);
INSTANCE
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
qCWarning(voip) << "End of stream";
// TODO: Error handling
instance->end();
break;
case GST_MESSAGE_ERROR:
GError *error;
gchar *debug;
gst_message_parse_error(msg, &error, &debug);
qCWarning(voip) << "Error from element:" << GST_OBJECT_NAME(msg->src) << error->message;
// TODO: Error handling
g_clear_error(&error);
g_free(debug);
instance->end();
break;
default:
break;
}
return TRUE;
}
CallSession::CallSession(QObject *parent)
: QObject(parent)
{
}
void CallSession::acceptAnswer(const QString &sdp, const QVector<Candidate> &candidates, const QString &userId)
{
qCDebug(voip) << "Accepting Answer";
if (m_state != CallSession::OFFERSENT) {
return;
}
GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER);
if (!answer) {
end();
return;
}
acceptCandidates(candidates);
setRemoteDescription(answer, userId);
}
void CallSession::setRemoteDescription(GstWebRTCSessionDescription *remote, const QString &userId, GstPromise *promise)
{
GstElement *webrtcbin = binGetByName(m_pipe, "webrtcbin");
auto sdp = remote->sdp;
for (guint i = 0; i < gst_sdp_message_medias_len(sdp); i++) {
auto media = gst_sdp_message_get_media(sdp, i);
QList<uint32_t> ssrcs;
QString msid;
for (guint j = 0; j < gst_sdp_media_attributes_len(media); j++) {
auto attribute = gst_sdp_media_get_attribute(media, j);
if (!strcmp(attribute->key, "ssrc")) {
ssrcs += QString(attribute->value).split(" ")[0].toUInt();
}
if (!strcmp(attribute->key, "msid")) {
msid = QString(attribute->value).split(" ")[0];
}
}
for (const auto &ssrc : ssrcs) {
ssrcToMsid[ssrc] = msid;
}
msidToUserId[msid] = userId;
}
g_signal_emit_by_name(webrtcbin, "set-remote-description", remote, promise);
}
void CallSession::renegotiateOffer(const QString &_offer, const QString &userId, bool answer)
{
GstWebRTCSessionDescription *sdp = parseSDP(_offer, answer ? GST_WEBRTC_SDP_TYPE_ANSWER : GST_WEBRTC_SDP_TYPE_OFFER);
if (!sdp) {
Q_ASSERT(false);
}
GstElement *webrtcbin = binGetByName(m_pipe, "webrtcbin");
setRemoteDescription(sdp, userId);
qWarning() << "answer:" << answer;
if (!answer) {
GstPromise *promise = gst_promise_new_with_change_func(setLocalDescription, this, nullptr);
g_signal_emit_by_name(webrtcbin, "create-answer", nullptr, promise);
}
}
void CallSession::acceptOffer(const QString &sdp, const QVector<Candidate> remoteCandidates, const QString &userId)
{
Q_ASSERT(!sdp.isEmpty());
Q_ASSERT(!remoteCandidates.isEmpty());
qCDebug(voip) << "Accepting offer";
if (m_state != CallSession::DISCONNECTED) {
return;
}
m_isOffering = false;
GstWebRTCSessionDescription *offer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_OFFER);
if (!offer) {
qCCritical(voip) << "Not an offer";
return;
}
int opusPayloadType;
bool receiveOnly;
bool sendOnly;
if (getMediaAttributes(offer->sdp, "audio", "opus", opusPayloadType, receiveOnly, sendOnly)) {
if (opusPayloadType == -1) {
qCCritical(voip) << "No OPUS in offer";
gst_webrtc_session_description_free(offer);
return;
}
} else {
qCCritical(voip) << "No audio in offer";
gst_webrtc_session_description_free(offer);
return;
}
startPipeline();
QThread::msleep(1000); // ?
acceptCandidates(remoteCandidates);
auto promise = gst_promise_new_with_change_func(createAnswer, this, nullptr);
setRemoteDescription(offer, userId, promise);
gst_webrtc_session_description_free(offer);
}
void CallSession::createCall()
{
qCDebug(voip) << "Creating call";
m_isOffering = true;
startPipeline();
}
void CallSession::startPipeline()
{
qCDebug(voip) << "Starting Pipeline";
if (m_state != CallSession::DISCONNECTED) {
return;
}
m_state = CallSession::INITIATING;
Q_EMIT stateChanged();
createPipeline();
auto webrtcbin = binGetByName(m_pipe, "webrtcbin");
Q_ASSERT(webrtcbin);
if (false /*TODO: CHECK USE STUN*/) {
qCDebug(voip) << "Setting STUN server:" << STUN_SERVER;
g_object_set(webrtcbin, "stun-server", STUN_SERVER, nullptr);
}
for (const auto &uri : m_turnServers) {
qCDebug(voip) << "Setting turn server:" << uri;
gboolean udata;
g_signal_emit_by_name(webrtcbin, "add-turn-server", uri.toLatin1().data(), (gpointer)(&udata));
}
if (m_turnServers.empty()) {
qCWarning(voip) << "No TURN servers provided";
}
if (m_isOffering) {
g_signal_connect(webrtcbin, "on-negotiation-needed", G_CALLBACK(::createOffer), this);
}
g_signal_connect(webrtcbin, "on-ice-candidate", G_CALLBACK(addLocalICECandidate), this);
g_signal_connect(webrtcbin, "notify::ice-connection-state", G_CALLBACK(iceConnectionStateChanged), this);
gst_element_set_state(m_pipe, GST_STATE_READY);
g_signal_connect(webrtcbin, "pad-added", G_CALLBACK(addDecodeBin), this);
g_signal_connect(webrtcbin, "notify::ice-gathering-state", G_CALLBACK(iceGatheringStateChanged), this);
gst_object_unref(webrtcbin);
GstStateChangeReturn ret = gst_element_set_state(m_pipe, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
// TODO: Error handling
qCCritical(voip) << "Unable to start pipeline";
end();
return;
}
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipe));
m_busWatchId = gst_bus_add_watch(bus, newBusMessage, this);
gst_object_unref(bus);
m_state = CallSession::INITIATED;
Q_EMIT stateChanged();
}
void CallSession::end()
{
qCDebug(voip) << "Ending Call";
if (m_pipe) {
gst_element_set_state(m_pipe, GST_STATE_NULL);
gst_object_unref(m_pipe);
m_pipe = nullptr;
keyFrameRequestData.pipe = nullptr;
if (m_busWatchId) {
g_source_remove(m_busWatchId);
m_busWatchId = 0;
}
}
if (m_state != CallSession::DISCONNECTED) {
m_state = CallSession::DISCONNECTED;
Q_EMIT stateChanged();
}
}
void CallSession::createPipeline()
{
qCWarning(voip) << "Creating Pipeline";
auto device = AudioSources::instance().currentDevice();
if (!device) {
return;
}
m_pipe = gst_pipeline_new(nullptr);
auto source = gst_device_create_element(device, nullptr);
auto volume = createElement("volume", m_pipe, "srclevel");
auto convert = createElement("audioconvert", m_pipe);
auto resample = createElement("audioresample", m_pipe);
auto queue1 = createElement("queue", m_pipe);
auto opusenc = createElement("opusenc", m_pipe);
auto rtp = createElement("rtpopuspay", m_pipe);
auto queue2 = createElement("queue", m_pipe);
auto capsfilter = createElement("capsfilter", m_pipe);
auto rtpcaps = gst_caps_new_simple("application/x-rtp",
"media",
G_TYPE_STRING,
"audio",
"encoding-name",
G_TYPE_STRING,
"OPUS",
"payload",
G_TYPE_INT,
OPUS_PAYLOAD_TYPE,
nullptr);
Q_ASSERT(rtpcaps);
g_object_set(capsfilter, "caps", rtpcaps, nullptr);
gst_caps_unref(rtpcaps);
auto webrtcbin = createElement("webrtcbin", m_pipe, "webrtcbin");
g_object_set(webrtcbin, "bundle-policy", GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE, nullptr);
gst_bin_add_many(GST_BIN(m_pipe), source, nullptr);
if (!gst_element_link_many(source, volume, convert, resample, queue1, opusenc, rtp, queue2, capsfilter, webrtcbin, nullptr)) {
qCCritical(voip) << "Failed to link pipeline";
// TODO propagate errors up and end call
return;
}
}
void CallSession::toggleCamera()
{
// TODO do this only once
static bool inited = false;
if (!inited) {
addVideoPipeline();
inited = true;
}
}
bool CallSession::addVideoPipeline()
{
qCDebug(voip) << "Adding Video Pipeline";
auto videoconvert = createElement("videoconvertscale", m_pipe);
auto tee = createElement("tee", m_pipe);
auto device = VideoSources::instance().currentDevice();
auto deviceCaps = device->caps[VideoSources::instance().capsIndex()];
int width = deviceCaps.width;
int height = deviceCaps.height;
int framerate = deviceCaps.framerates.back();
if (!device) {
return false;
}
auto camera = gst_device_create_element(device->device, nullptr);
gst_bin_add_many(GST_BIN(m_pipe), camera, nullptr);
auto caps =
gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, framerate, 1, nullptr);
auto camerafilter = createElement("capsfilter", m_pipe);
g_object_set(camerafilter, "caps", caps, nullptr);
gst_caps_unref(caps);
gst_element_link(camera, videoconvert);
if (!gst_element_link_many(videoconvert, camerafilter, nullptr)) {
qCWarning(voip) << "Failed to link camera elements";
// TODO: Error handling
return false;
}
if (!gst_element_link(camerafilter, tee)) {
qCWarning(voip) << "Failed to link camerafilter -> tee";
// TODO: Error handling
return false;
}
auto queue = createElement("queue", m_pipe);
g_object_set(queue, "leaky", true, nullptr);
auto vp8enc = createElement("vp8enc", m_pipe);
g_object_set(vp8enc, "deadline", 1, nullptr);
g_object_set(vp8enc, "error-resilient", 1, nullptr);
auto rtpvp8pay = createElement("rtpvp8pay", m_pipe);
auto rtpqueue = createElement("queue", m_pipe);
auto rtpcapsfilter = createElement("capsfilter", m_pipe);
auto rtpcaps = gst_caps_new_simple("application/x-rtp",
"media",
G_TYPE_STRING,
"video",
"encoding-name",
G_TYPE_STRING,
"VP8",
"payload",
G_TYPE_INT,
VP8_PAYLOAD_TYPE,
nullptr);
g_object_set(rtpcapsfilter, "caps", rtpcaps, nullptr);
gst_caps_unref(rtpcaps);
auto webrtcbin = binGetByName(m_pipe, "webrtcbin");
if (!gst_element_link_many(tee, queue, vp8enc, rtpvp8pay, rtpqueue, rtpcapsfilter, webrtcbin, nullptr)) {
qCCritical(voip) << "Failed to link rtp video elements";
gst_object_unref(webrtcbin);
return false;
}
auto promise = gst_promise_new_with_change_func(setLocalDescription, this, nullptr);
g_signal_emit_by_name(webrtcbin, "create-offer", nullptr, promise);
gst_object_unref(webrtcbin);
auto newpad = gst_element_request_pad_simple(tee, "src_%u");
Q_ASSERT(newpad);
auto fake = createElement("fakesink", m_pipe);
auto selector = createElement("output-selector", m_pipe);
auto selectorSink = gst_element_get_static_pad(selector, "sink");
auto selectorSrc1 = gst_element_request_pad_simple(selector, "src_%u");
gst_pad_link(newpad, selectorSink);
auto fakepad = gst_element_get_static_pad(fake, "sink");
gst_pad_link(selectorSrc1, fakepad);
g_object_set(selector, "active-pad", selectorSrc1, nullptr);
// gst_pad_add_probe(newpad, GST_PAD_PROBE_TYPE_EVENT_BOTH, pad_cb, stream, nullptr);
auto manager = dynamic_cast<CallManager *>(parent());
auto participants = manager->callParticipants();
auto user = dynamic_cast<NeoChatUser *>(manager->room()->localUser());
participants->setHasCamera(user, true);
connectSingleShot(participants->callParticipantForUser(user), &CallParticipant::initialized, this, [=](QQuickItem *item) {
gst_pad_unlink(newpad, fakepad);
Q_ASSERT(item);
auto queue = newVideoSinkChain(m_pipe, item);
Q_ASSERT(queue);
auto queuepad = gst_element_get_static_pad(queue, "sink");
Q_ASSERT(queuepad);
auto selectorSrc = gst_element_request_pad_simple(selector, "src_%u");
Q_ASSERT(selectorSrc);
auto ok = GST_PAD_LINK_SUCCESSFUL(gst_pad_link(selectorSrc, queuepad));
Q_ASSERT(ok);
g_object_set(selector, "active-pad", selectorSrc, nullptr);
gst_object_unref(queuepad);
gst_element_set_state(m_pipe, GST_STATE_READY); // TODO experimental
gst_element_set_state(m_pipe, GST_STATE_PLAYING); // TODO experimental
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(m_pipe), GST_DEBUG_GRAPH_SHOW_ALL, "foo");
});
return true;
}
void CallSession::setTurnServers(QStringList servers)
{
qCDebug(voip) << "Setting Turn Servers";
qWarning() << "TURN SERVERS" << servers;
m_turnServers = servers;
}
void CallSession::acceptCandidates(const QVector<Candidate> &candidates)
{
qCDebug(voip) << "Accepting ICE Candidates";
auto webrtcbin = binGetByName(m_pipe, "webrtcbin");
for (const auto &c : candidates) {
qCDebug(voip) << "Remote candidate:" << c.candidate << c.sdpMLineIndex;
g_signal_emit_by_name(webrtcbin, "add-ice-candidate", c.sdpMLineIndex, c.candidate.toLatin1().data());
}
}
QStringList CallSession::missingPlugins()
{
GstRegistry *registry = gst_registry_get();
static const QVector<QString> videoPlugins = {
QLatin1String("compositor"),
QLatin1String("opengl"),
QLatin1String("qmlgl"),
QLatin1String("rtp"),
QLatin1String("videoconvertscale"),
QLatin1String("vpx"),
};
static const QVector<QString> audioPlugins = {
QStringLiteral("audioconvert"),
QStringLiteral("audioresample"),
QStringLiteral("autodetect"),
QStringLiteral("dtls"),
QStringLiteral("nice"),
QStringLiteral("opus"),
QStringLiteral("playback"),
QStringLiteral("rtpmanager"),
QStringLiteral("srtp"),
QStringLiteral("volume"),
QStringLiteral("webrtc"),
};
QStringList missingPlugins;
for (const auto &pluginName : videoPlugins + audioPlugins) {
auto plugin = gst_registry_find_plugin(registry, pluginName.toLatin1().data());
if (!plugin) {
missingPlugins << pluginName;
}
gst_object_unref(plugin);
}
return missingPlugins;
}
void CallSession::setMuted(bool muted)
{
const auto srclevel = binGetByName(m_pipe, "srclevel");
g_object_set(srclevel, "mute", muted, nullptr);
gst_object_unref(srclevel);
Q_EMIT mutedChanged();
}
bool CallSession::muted() const
{
if (m_state < CallSession::CONNECTING) {
return false;
}
if (!m_pipe) {
return false;
}
const auto srclevel = binGetByName(m_pipe, "srclevel");
bool muted;
if (!srclevel) {
return false;
}
g_object_get(srclevel, "mute", &muted, nullptr);
// gst_object_unref(srclevel); //TODO why does this crash?
return muted;
}
CallSession *
CallSession::acceptCall(const QString &sdp, const QVector<Candidate> &candidates, const QStringList &turnUris, const QString &userId, QObject *parent)
{
auto instance = new CallSession(parent);
instance->setTurnServers(turnUris);
instance->acceptOffer(sdp, candidates, userId);
return instance;
}
CallSession *CallSession::startCall(const QStringList &turnUris, QObject *parent)
{
auto instance = new CallSession(parent);
instance->setTurnServers(turnUris);
instance->createCall();
return instance;
}
CallSession::State CallSession::state() const
{
return m_state;
}
void CallSession::setState(CallSession::State state)
{
qCWarning(voip) << "Setting state" << state;
m_state = state;
Q_EMIT stateChanged();
}
void CallSession::setMetadata(QJsonObject metadata)
{
m_metadata = metadata;
}

113
src/call/callsession.h Normal file
View File

@@ -0,0 +1,113 @@
// SPDX-FileCopyrightText: 2021 Nheko Contributors
// SPDX-FileCopyrightText: 2021 Carl Schwan <carl@carlschwan.eu>
// SPDX-FileCopyrightText: 2021-2022 Tobias Fella <fella@posteo.de>
//
// SPDX-License-Identifier: GPL-3.0-or-later
#pragma once
#include <QJsonObject>
#include <QMetaType>
#include <QObject>
#include <QQuickItem>
#include <QString>
#include <variant>
#define GST_USE_UNSTABLE_API
#include <gst/webrtc/webrtc.h>
#include <gst/gst.h>
#define OPUS_PAYLOAD_TYPE 111
#define VP8_PAYLOAD_TYPE 96
class CallDevices;
class VideoStream;
struct Candidate {
QString candidate;
int sdpMLineIndex;
QString sdpMid;
};
Q_DECLARE_METATYPE(Candidate)
Q_DECLARE_METATYPE(QVector<Candidate>)
class CallSession : public QObject
{
Q_OBJECT
public:
enum State {
DISCONNECTED,
ICEFAILED,
INITIATING,
INITIATED,
OFFERSENT,
ANSWERSENT,
CONNECTING,
CONNECTED,
};
Q_ENUM(State);
Q_PROPERTY(CallSession::State state READ state NOTIFY stateChanged)
Q_PROPERTY(bool muted READ muted WRITE setMuted NOTIFY mutedChanged)
// For outgoing calls
static CallSession *startCall(const QStringList &turnUris, QObject *parent = nullptr);
void acceptAnswer(const QString &sdp, const QVector<Candidate> &candidates, const QString &parent);
// For incoming calls
static CallSession *
acceptCall(const QString &sdp, const QVector<Candidate> &candidates, const QStringList &turnUris, const QString &userId, QObject *parent = nullptr);
void end();
void renegotiateOffer(const QString &offer, const QString &userId, bool answer);
void setTurnServers(QStringList servers);
static QStringList missingPlugins();
CallSession::State state() const;
void toggleCamera();
bool muted() const;
void setMuted(bool muted);
void setMetadata(QJsonObject metadata);
void acceptCandidates(const QVector<Candidate> &candidates);
QMap<QString, QString> msidToUserId;
Q_SIGNALS:
void stateChanged();
void offerCreated(const QString &sdp, const QVector<Candidate> &candidates);
void answerCreated(const QString &sdp, const QVector<Candidate> &candidates);
void mutedChanged();
void newVideoStream(VideoStream *stream);
void renegotiate(QString sdp, const QString &type);
private:
CallSession(QObject *parent = nullptr);
void acceptOffer(const QString &sdp, const QVector<Candidate> remoteCandidates, const QString &userId);
void createCall();
void setRemoteDescription(GstWebRTCSessionDescription *remote, const QString &userId, GstPromise *promise = nullptr);
void startPipeline();
void createPipeline();
bool addVideoPipeline();
void setState(CallSession::State state);
GstPad *m_activePad;
GstElement *m_inputSelector;
CallSession::State m_state = CallSession::DISCONNECTED;
unsigned int m_busWatchId = 0;
QStringList m_turnServers;
QVector<Candidate> m_localCandidates;
QString m_localSdp;
GstElement *m_pipe = nullptr;
bool m_isOffering = false;
QMap<int, QString> ssrcToMsid;
QJsonObject m_metadata;
GstPad *m_inactivePad;
};

165
src/call/devicemonitor.cpp Normal file
View File

@@ -0,0 +1,165 @@
// SPDX-FileCopyrightText: 2021 Tobias Fella <fella@posteo.de>
// SPDX-License-Identifier: LGPL-2.0-or-later
#include "devicemonitor.h"
#include "voiplogging.h"
#include <QTimer>
QDebug operator<<(QDebug dbg, const GstStructure *props)
{
QDebugStateSaver saver(dbg);
auto asStr = gst_structure_to_string(props);
dbg << asStr;
g_free(asStr);
return dbg;
}
static gboolean deviceCallback(GstBus *bus, GstMessage *message, gpointer user_data)
{
Q_UNUSED(bus);
auto monitor = static_cast<DeviceMonitor *>(user_data);
return monitor->callback(message);
}
DeviceMonitor::DeviceMonitor()
: QObject()
{
QTimer::singleShot(0, this, &DeviceMonitor::init);
}
void DeviceMonitor::init()
{
if (m_monitor) {
return;
}
m_monitor = gst_device_monitor_new();
GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
gst_device_monitor_add_filter(m_monitor, "Audio/Source", caps);
gst_caps_unref(caps);
caps = gst_caps_new_empty_simple("video/x-raw");
gst_device_monitor_add_filter(m_monitor, "Video/Source", caps);
gst_caps_unref(caps);
GstBus *bus = gst_device_monitor_get_bus(m_monitor);
gst_bus_add_watch(bus, deviceCallback, this);
gst_object_unref(bus);
if (!gst_device_monitor_start(m_monitor)) {
qWarning() << "Failed to start device monitor";
}
}
QVector<AudioSource *> DeviceMonitor::audioSources() const
{
return m_audioSources;
}
QVector<VideoSource *> DeviceMonitor::videoSources() const
{
return m_videoSources;
}
void DeviceMonitor::handleVideoSource(GstDevice *device)
{
auto source = new VideoSource();
auto title = gst_device_get_display_name(device);
source->title = QString(title);
g_free(title);
source->device = device;
auto caps = gst_device_get_caps(device);
auto size = gst_caps_get_size(caps);
for (size_t i = 0; i < size; i++) {
VideoCap videoCap;
GstStructure *cap = gst_caps_get_structure(caps, i);
const gchar *name = gst_structure_get_name(cap);
if (strcmp(name, "video/x-raw")) {
// TODO g_free(name);
continue;
}
// TODO g_free(name);
gst_structure_get(cap, "width", G_TYPE_INT, &videoCap.width, "height", G_TYPE_INT, &videoCap.height, nullptr);
const auto framerate = gst_structure_get_value(cap, "framerate");
if (GST_VALUE_HOLDS_FRACTION(framerate)) {
auto numerator = gst_value_get_fraction_numerator(framerate);
auto denominator = gst_value_get_fraction_denominator(framerate);
videoCap.framerates += (float)numerator / denominator;
}
// unref cap?
source->caps += videoCap;
}
m_videoSources += source;
Q_EMIT videoSourceAdded();
}
void DeviceMonitor::handleAudioSource(GstDevice *device)
{
auto source = new AudioSource();
auto title = gst_device_get_display_name(device);
source->title = QString(title);
g_free(title);
GstStructure *props = gst_device_get_properties(device);
gboolean isDefault = false;
if (gst_structure_has_field(props, "is-default")) {
gst_structure_get_boolean(props, "is-default", &isDefault);
}
gst_structure_free(props);
source->isDefault = isDefault;
source->device = device;
m_audioSources += source;
Q_EMIT audioSourceAdded();
}
bool DeviceMonitor::callback(GstMessage *message)
{
GstDevice *device;
switch (GST_MESSAGE_TYPE(message)) {
case GST_MESSAGE_DEVICE_ADDED: {
gst_message_parse_device_added(message, &device);
auto name = gst_device_get_display_name(device);
auto props = gst_device_get_properties(device);
qCDebug(voip) << name << props;
gst_structure_free(props);
if (gst_device_has_classes(device, "Video/Source")) {
handleVideoSource(device);
} else if (gst_device_has_classes(device, "Audio/Source")) {
handleAudioSource(device);
}
g_free(name);
gst_object_unref(device);
break;
}
case GST_MESSAGE_DEVICE_REMOVED: {
gst_message_parse_device_removed(message, &device);
auto name = gst_device_get_display_name(device);
auto props = gst_device_get_properties(device);
qCDebug(voip) << name << props;
if (gst_device_has_classes(device, "Video/Source")) {
m_videoSources.erase(std::remove_if(m_videoSources.begin(),
m_videoSources.end(),
[name](auto d) {
return d->title == QString(name);
}),
m_videoSources.end());
Q_EMIT videoSourceRemoved();
} else if (gst_device_has_classes(device, "Audio/Source")) {
m_audioSources.erase(std::remove_if(m_audioSources.begin(),
m_audioSources.end(),
[name](auto d) {
return d->title == QString(name);
}),
m_audioSources.end());
Q_EMIT audioSourceRemoved();
}
g_free(name);
gst_object_unref(device);
break;
}
default:
break;
}
return G_SOURCE_CONTINUE;
}

59
src/call/devicemonitor.h Normal file
View File

@@ -0,0 +1,59 @@
// SPDX-FileCopyrightText: 2021 Tobias Fella <fella@posteo.de>
// SPDX-License-Identifier: LGPL-2.0-or-later
#pragma once
#include <QtCore/QDebug>
#include <QtCore/QObject>
#include <QtCore/QVector>
#include <gst/gst.h>
struct AudioSource {
QString title;
GstDevice *device;
bool isDefault;
};
struct VideoCap {
int width;
int height;
QVector<float> framerates;
};
struct VideoSource {
QString title;
GstDevice *device;
QVector<VideoCap> caps;
};
class DeviceMonitor : public QObject
{
Q_OBJECT
public:
static DeviceMonitor &instance()
{
static DeviceMonitor _instance;
return _instance;
}
QVector<AudioSource *> audioSources() const;
QVector<VideoSource *> videoSources() const;
bool callback(GstMessage *message);
void init();
Q_SIGNALS:
void videoSourceAdded();
void audioSourceAdded();
void videoSourceRemoved();
void audioSourceRemoved();
private:
DeviceMonitor();
GstDeviceMonitor *m_monitor = nullptr;
QVector<AudioSource *> m_audioSources;
QVector<VideoSource *> m_videoSources;
void handleVideoSource(GstDevice *device);
void handleAudioSource(GstDevice *device);
};

142
src/call/videosources.cpp Normal file
View File

@@ -0,0 +1,142 @@
// SPDX-FileCopyrightText: 2021 Tobias Fella <fella@posteo.de>
// SPDX-License-Identifier: LGPL-2.0-or-later
#include "videosources.h"
#include <gst/gst.h>
// #include "pipelinemanager.h"
#include <QDebug>
#include <QString>
#include "devicemonitor.h"
#include "neochatconfig.h"
int VideoSources::rowCount(const QModelIndex &parent) const
{
Q_UNUSED(parent);
return DeviceMonitor::instance().videoSources().size();
}
QVariant VideoSources::data(const QModelIndex &index, int role) const
{
if (index.row() >= DeviceMonitor::instance().videoSources().size()) {
return QVariant(QStringLiteral("DEADBEEF"));
}
if (role == TitleRole) {
return DeviceMonitor::instance().videoSources()[index.row()]->title;
}
return QVariant();
}
QHash<int, QByteArray> VideoSources::roleNames() const
{
return {
{TitleRole, "title"},
};
}
VideoSources::VideoSources()
: QAbstractListModel()
{
connect(&DeviceMonitor::instance(), &DeviceMonitor::videoSourceAdded, this, [this]() {
beginResetModel();
endResetModel();
Q_EMIT currentIndexChanged();
});
connect(&DeviceMonitor::instance(), &DeviceMonitor::videoSourceRemoved, this, [this]() {
beginResetModel();
endResetModel();
Q_EMIT currentIndexChanged();
});
}
void VideoSources::foo(int index)
{
auto device = DeviceMonitor::instance().videoSources()[index]->device;
auto bin = gst_bin_new(nullptr);
GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr);
// GstElement *videorate = gst_element_factory_make("videorate", nullptr);
GstElement *filter = gst_element_factory_make("capsfilter", nullptr);
GstCaps *caps = gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, 1920, "height", G_TYPE_INT, 1080, "framerate", GST_TYPE_FRACTION, 5, 1, nullptr);
g_object_set(filter, "caps", caps, nullptr);
gst_caps_unref(caps);
GstElement *deviceElement = gst_device_create_element(device, nullptr);
gst_bin_add_many(GST_BIN(bin), deviceElement, videoconvert, filter, nullptr);
gst_element_link_many(deviceElement, videoconvert, filter, nullptr);
// GstPad *pad = gst_element_get_static_pad(filter, "src");
GstPad *pad = gst_element_get_static_pad(filter, "src");
auto ghostpad = gst_ghost_pad_new("src", pad);
gst_element_add_pad(bin, ghostpad);
gst_object_unref(pad);
// PipelineManager::instance().add(bin);
}
const VideoSource *VideoSources::currentDevice() const
{
const auto config = NeoChatConfig::self();
const QString name = config->camera();
for (const auto &videoSource : DeviceMonitor::instance().videoSources()) {
if (videoSource->title == name) {
qDebug() << "WebRTC: camera:" << name;
return videoSource;
}
}
if (DeviceMonitor::instance().videoSources().length() == 0) {
return nullptr;
}
return DeviceMonitor::instance().videoSources()[0];
}
void VideoSources::setCurrentIndex(int index)
{
if (DeviceMonitor::instance().videoSources().size() == 0) {
return;
}
NeoChatConfig::setCamera(DeviceMonitor::instance().videoSources()[index]->title);
NeoChatConfig::self()->save();
setCapsIndex(0);
}
int VideoSources::currentIndex() const
{
const auto config = NeoChatConfig::self();
const QString name = config->camera();
for (auto i = 0; i < DeviceMonitor::instance().videoSources().size(); i++) {
if (DeviceMonitor::instance().videoSources()[i]->title == name) {
return i;
}
}
return 0;
}
QStringList VideoSources::caps(int index) const
{
if (index >= DeviceMonitor::instance().videoSources().size()) {
return QStringList();
}
const auto &caps = DeviceMonitor::instance().videoSources()[index]->caps;
QStringList strings;
for (const auto &cap : caps) {
strings += QStringLiteral("%1x%2, %3 FPS").arg(cap.width).arg(cap.height).arg(cap.framerates.back());
}
return strings;
}
void VideoSources::setCapsIndex(int index)
{
NeoChatConfig::self()->setCameraCaps(index);
NeoChatConfig::self()->save();
Q_EMIT capsIndexChanged();
}
int VideoSources::capsIndex() const
{
return NeoChatConfig::self()->cameraCaps();
}

51
src/call/videosources.h Normal file
View File

@@ -0,0 +1,51 @@
// SPDX-FileCopyrightText: 2021 Tobias Fella <fella@posteo.de>
// SPDX-License-Identifier: LGPL-2.0-or-later
#pragma once
#include <QtCore/QAbstractListModel>
#include <gst/gst.h>
#include "devicemonitor.h"
class VideoSources : public QAbstractListModel
{
Q_OBJECT
Q_PROPERTY(int currentIndex READ currentIndex WRITE setCurrentIndex NOTIFY currentIndexChanged)
Q_PROPERTY(int capsIndex READ capsIndex WRITE setCapsIndex NOTIFY capsIndexChanged)
public:
enum Roles {
TitleRole = Qt::UserRole + 1,
DeviceRole,
};
static VideoSources &instance()
{
static VideoSources _instance;
return _instance;
}
int rowCount(const QModelIndex &parent = QModelIndex()) const override;
QVariant data(const QModelIndex &index, int role = Qt::DisplayRole) const override;
QHash<int, QByteArray> roleNames() const override;
Q_INVOKABLE void foo(int index);
const VideoSource *currentDevice() const;
void setCurrentIndex(int index);
int currentIndex() const;
void setCapsIndex(int index);
int capsIndex() const;
Q_INVOKABLE QStringList caps(int index) const;
Q_SIGNALS:
void currentIndexChanged();
void capsIndexChanged();
private:
VideoSources();
};

View File

@@ -736,3 +736,12 @@ QVariantList Controller::getSupportedRoomVersions(Quotient::Connection *connecti
return supportedRoomVersions;
}
bool Controller::callsSupported() const
{
#ifdef GSTREAMER_AVAILABLE
return true;
#else
return false;
#endif
}

View File

@@ -100,6 +100,7 @@ class Controller : public QObject
* This is the only way to gate NeoChat features in flatpaks in QML.
*/
Q_PROPERTY(bool isFlatpak READ isFlatpak CONSTANT)
Q_PROPERTY(bool callsSupported READ callsSupported CONSTANT)
public:
/**
@@ -197,6 +198,7 @@ public:
int quotientMinorVersion() const;
bool isFlatpak() const;
bool callsSupported() const;
/**
* @brief Return a string for the input timestamp.

View File

@@ -86,6 +86,8 @@
#ifdef QUOTIENT_07
#include <keyverificationsession.h>
#endif
#include <room.h>
#ifdef HAVE_COLORSCHEME
#include "colorschemer.h"
#endif
@@ -93,6 +95,14 @@
#include "models/statemodel.h"
#include "neochatuser.h"
#ifdef GSTREAMER_AVAILABLE
#include "call/audiosources.h"
#include "call/callmanager.h"
#include "call/callparticipant.h"
#include "call/videosources.h"
#include "models/callparticipantsmodel.h"
#endif
#ifdef HAVE_RUNNER
#include "runner.h"
#include <QDBusConnection>
@@ -183,6 +193,22 @@ int main(int argc, char *argv[])
#endif
QStringLiteral("https://github.com/quotient-im/libquotient"),
KAboutLicense::LGPL_V2_1);
#ifdef GSTREAMER_AVAILABLE
guint major, minor, micro, nano;
gst_version(&major, &minor, &micro, &nano);
about.addComponent(QStringLiteral("GStreamer"),
i18nc("Description of GStreamer", "Open Source Multimedia Framework"),
i18nc("<version number> (built against <possibly different version number>)",
"%1.%2.%3.%4 (built against %5.%6.%7.%8)",
major,
minor,
micro,
nano,
GST_VERSION_MAJOR,
GST_VERSION_MINOR,
GST_VERSION_MICRO,
GST_VERSION_NANO));
#endif
KAboutData::setApplicationData(about);
QGuiApplication::setWindowIcon(QIcon::fromTheme(QStringLiteral("org.kde.neochat")));
@@ -263,6 +289,10 @@ int main(int argc, char *argv[])
qmlRegisterUncreatableType<NeoChatUser>("org.kde.neochat", 1, 0, "NeoChatUser", {});
qmlRegisterUncreatableType<NeoChatRoom>("org.kde.neochat", 1, 0, "NeoChatRoom", {});
#ifdef GSTREAMER_AVAILABLE
qmlRegisterUncreatableType<CallParticipantsModel>("org.kde.neochat", 1, 0, "CallParticipantsModel", "Get through CallManager");
qmlRegisterUncreatableType<CallParticipant>("org.kde.neochat", 1, 0, "CallParticipant", "Get through model");
#endif
qRegisterMetaType<User *>("User*");
qRegisterMetaType<User *>("const User*");
qRegisterMetaType<User *>("const Quotient::User*");
@@ -279,6 +309,13 @@ int main(int argc, char *argv[])
qmlRegisterUncreatableType<KeyVerificationSession>("org.kde.neochat", 1, 0, "KeyVerificationSession", {});
qRegisterMetaType<QVector<EmojiEntry>>("QVector<EmojiEntry>");
#endif
#endif
#ifdef GSTREAMER_AVAILABLE
qmlRegisterSingletonInstance("org.kde.neochat", 1, 0, "AudioSources", &AudioSources::instance());
qmlRegisterSingletonInstance("org.kde.neochat", 1, 0, "VideoSources", &VideoSources::instance());
qmlRegisterSingletonInstance("org.kde.neochat", 1, 0, "CallManager", &CallManager::instance());
qmlRegisterUncreatableType<CallSession>("org.kde.neochat", 1, 0, "CallSession", "ENUM");
#endif
qmlRegisterSingletonType("org.kde.neochat", 1, 0, "About", [](QQmlEngine *engine, QJSEngine *) -> QJSValue {
return engine->toScriptValue(KAboutData::applicationData());

View File

@@ -0,0 +1,58 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: GPL-2.0-or-later
#include "callparticipantsmodel.h"
QVariant CallParticipantsModel::data(const QModelIndex &index, int role) const
{
if (role == ObjectRole) {
return QVariant::fromValue(m_callParticipants[index.row()]);
}
return {};
}
int CallParticipantsModel::rowCount(const QModelIndex &parent) const
{
Q_UNUSED(parent);
return m_callParticipants.size();
}
void CallParticipantsModel::clear()
{
beginRemoveRows(QModelIndex(), 0, m_callParticipants.size() - 1);
m_callParticipants.clear();
endRemoveRows();
}
CallParticipant *CallParticipantsModel::callParticipantForUser(NeoChatUser *user)
{
for (const auto &callParticipant : m_callParticipants) {
if (callParticipant->m_user == user) {
return callParticipant;
}
}
return nullptr;
}
QHash<int, QByteArray> CallParticipantsModel::roleNames() const
{
return {
{WidthRole, "width"},
{HeightRole, "height"},
{PadRole, "pad"},
{ObjectRole, "object"},
};
}
void CallParticipantsModel::addParticipant(CallParticipant *callParticipant)
{
beginInsertRows(QModelIndex(), m_callParticipants.size(), m_callParticipants.size());
m_callParticipants += callParticipant;
endInsertRows();
}
void CallParticipantsModel::setHasCamera(NeoChatUser *user, bool hasCamera)
{
callParticipantForUser(user)->m_hasCamera = hasCamera;
Q_EMIT callParticipantForUser(user)->hasCameraChanged();
}

View File

@@ -0,0 +1,38 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: GPL-2.0-or-later
#pragma once
#include <QAbstractListModel>
#include <QVector>
#include "call/callparticipant.h"
#include "neochatuser.h"
class CallParticipantsModel : public QAbstractListModel
{
Q_OBJECT
public:
enum Roles {
WidthRole,
HeightRole,
PadRole,
ObjectRole,
};
Q_ENUM(Roles);
[[nodiscard]] QVariant data(const QModelIndex &index, int role = Qt::DisplayRole) const override;
[[nodiscard]] int rowCount(const QModelIndex &parent = QModelIndex()) const override;
[[nodiscard]] QHash<int, QByteArray> roleNames() const override;
;
void addParticipant(CallParticipant *callParticipant);
CallParticipant *callParticipantForUser(NeoChatUser *user);
void setHasCamera(NeoChatUser *user, bool hasCamera);
void clear();
private:
QVector<CallParticipant *> m_callParticipants;
};

View File

@@ -521,6 +521,9 @@ QVariant MessageEventModel::data(const QModelIndex &idx, int role) const
return DelegateType::Message;
}
if (evt.matrixType() == "m.call.invite") {
return DelegateType::CallInvite;
}
if (is<const StickerEvent>(evt)) {
return DelegateType::Sticker;
}

View File

@@ -53,6 +53,7 @@ public:
ReadMarker, /**< The local user read marker. */
Poll, /**< The initial event for a poll. */
Location, /**< A location event. */
CallInvite, /**< An invitation to a call. */
Other, /**< Anything that cannot be classified as another type. */
};
Q_ENUM(DelegateType);

View File

@@ -234,12 +234,23 @@ void RoomListModel::handleNotifications()
} else {
avatar_image = room->avatar(128);
}
NotificationsManager::instance().postNotification(dynamic_cast<NeoChatRoom *>(room),
sender->displayname(room),
notification["event"].toObject()["content"].toObject()["body"].toString(),
avatar_image,
notification["event"].toObject()["event_id"].toString(),
true);
if (notification["event"]["type"].toString() == QStringLiteral("m.call.invite")) {
#ifdef GSTREAMER_AVAILABLE
NotificationsManager::instance().postCallInviteNotification(
dynamic_cast<NeoChatRoom *>(room),
room->displayName(),
sender->displayname(room),
avatar_image,
notification["event"]["content"]["offer"]["sdp"].toString().contains(QStringLiteral("video")));
#endif
} else {
NotificationsManager::instance().postNotification(dynamic_cast<NeoChatRoom *>(room),
sender->displayname(room),
notification["event"].toObject()["content"].toObject()["body"].toString(),
avatar_image,
notification["event"].toObject()["event_id"].toString(),
true);
}
}
}
});

View File

@@ -54,12 +54,12 @@ QVariant AccountRegistry::data(const QModelIndex &index, int role) const
const auto account = m_accounts[index.row()];
switch (role) {
case ConnectionRole:
return QVariant::fromValue(account);
case UserIdRole:
return QVariant::fromValue(account->userId());
default:
return {};
case ConnectionRole:
return QVariant::fromValue(account);
case UserIdRole:
return QVariant::fromValue(account->userId());
default:
return {};
}
return {};

View File

@@ -147,5 +147,23 @@
<default></default>
</entry>
</group>
<group name="Voip">
<entry name="Microphone" type="string">
<label>Name of the microphone</label>
</entry>
<entry name="Camera" type="string">
<label>Name of the camera</label>
</entry>
<entry name="CameraCaps" type="int">
<label>Index of the camera caps</label>
</entry>
<entry name="ScreenShareFrameRate" type="int">
<label>Frame rate of the screenshare</label>
</entry>
<entry name="Ringtone" type="String">
<label>Ringtone</label>
<default>/usr/share/sounds/plasma-mobile/stereo/ringtones/Spatial.oga</default>
</entry>
</group>
</kcfg>

View File

@@ -42,6 +42,9 @@
#endif
#include <qt_connection_util.h>
#ifdef GSTREAMER_AVAILABLE
#include "call/callmanager.h"
#endif
#include "controller.h"
#include "events/joinrulesevent.h"
#include "neochatconfig.h"
@@ -122,6 +125,13 @@ NeoChatRoom::NeoChatRoom(Connection *connection, QString roomId, JoinState joinS
Q_EMIT canEncryptRoomChanged();
});
connect(connection, &Connection::capabilitiesLoaded, this, &NeoChatRoom::maxRoomVersionChanged);
#ifdef GSTREAMER_AVAILABLE
connect(this, &Room::callEvent, this, [=](Room *room, const RoomEvent *event) {
CallManager::instance().handleCallEvent(static_cast<NeoChatRoom *>(room), event);
});
#endif
connect(this, &Room::changed, this, [this]() {
Q_EMIT defaultUrlPreviewStateChanged();
});
@@ -1677,7 +1687,6 @@ void NeoChatRoom::setPushNotificationState(PushNotificationState::State state)
m_currentPushNotificationState = state;
Q_EMIT pushNotificationStateChanged(m_currentPushNotificationState);
}
void NeoChatRoom::updatePushNotificationState(QString type)

View File

@@ -22,6 +22,9 @@
#include <csapi/pushrules.h>
#include <jobs/basejob.h>
#include <user.h>
#ifdef GSTREAMER_AVAILABLE
#include "call/callmanager.h"
#endif
#include "controller.h"
#include "neochatconfig.h"
@@ -606,3 +609,35 @@ QVector<QVariant> NotificationsManager::toActions(PushNotificationAction::Action
return actions;
}
#ifdef GSTREAMER_AVAILABLE
void NotificationsManager::postCallInviteNotification(NeoChatRoom *room, const QString &roomName, const QString &sender, const QImage &icon, bool video)
{
QPixmap img;
img.convertFromImage(icon);
KNotification *notification = new KNotification("message");
if (sender == roomName) {
notification->setTitle(sender);
} else {
notification->setTitle(i18n("%1 (%2)", sender, roomName));
}
notification->setText(video ? i18n("%1 is inviting you to a video call", sender) : i18n("%1 is inviting you to a voice call", sender));
notification->setPixmap(img);
notification->setDefaultAction(i18n("Open NeoChat in this room"));
connect(notification, &KNotification::defaultActivated, this, [=]() {
RoomManager::instance().enterRoom(room);
WindowController::instance().showAndRaiseWindow(notification->xdgActivationToken());
});
notification->setActions({i18n("Accept"), i18n("Decline")});
connect(notification, &KNotification::action1Activated, this, [=]() {
CallManager::instance().acceptCall();
});
connect(notification, &KNotification::action2Activated, this, [=]() {
CallManager::instance().hangupCall();
});
notification->sendEvent();
m_notifications.insert(room->id(), notification);
}
#endif

View File

@@ -156,6 +156,7 @@ public:
* @brief Display a native notification for an invite.
*/
void postInviteNotification(NeoChatRoom *room, const QString &title, const QString &sender, const QImage &icon);
void postCallInviteNotification(NeoChatRoom *room, const QString &roomName, const QString &sender, const QImage &icon, bool video);
/**
* @brief Clear an existing invite notification for the given room.

View File

@@ -0,0 +1,86 @@
// SPDX-FileCopyrightText: 2022 Carson Black <uhhadd@gmail.com>
// SPDX-License-Identifier: LGPL-2.0-or-later
import QtQuick 2.0
import QtQuick.Controls 2.7 as QQC2
import QtQuick.Layouts 1.1
import org.kde.kirigami 2.13 as Kirigami
QQC2.AbstractButton {
id: control
property int temprament: CallPageButton.Neutral
property bool shimmering: false
enum Temprament {
Neutral,
Constructive,
Destructive
}
padding: Kirigami.Units.largeSpacing
contentItem: ColumnLayout {
QQC2.Control {
padding: Kirigami.Units.gridUnit
Kirigami.Theme.colorSet: Kirigami.Theme.Button
Layout.alignment: Qt.AlignHCenter
contentItem: Kirigami.Icon {
implicitHeight: Kirigami.Units.iconSizes.medium
implicitWidth: Kirigami.Units.iconSizes.medium
source: control.icon.name
}
background: Rectangle {
Kirigami.Theme.colorSet: Kirigami.Theme.Button
ShimmerGradient {
id: shimmerGradient
color: {
switch (control.temprament) {
case CallPageButton.Neutral:
return Kirigami.Theme.textColor
case CallPageButton.Constructive:
return Kirigami.Theme.positiveTextColor
case CallPageButton.Destructive:
return Kirigami.Theme.negativeTextColor
}
}
}
color: {
if (control.checked) {
return Kirigami.Theme.focusColor
}
switch (control.temprament) {
case CallPageButton.Neutral:
return Kirigami.Theme.backgroundColor
case CallPageButton.Constructive:
return Kirigami.Theme.positiveBackgroundColor
case CallPageButton.Destructive:
return Kirigami.Theme.negativeBackgroundColor
}
}
border.color: Kirigami.Theme.focusColor
border.width: control.visualFocus ? 2 : 0
radius: height/2
Rectangle {
visible: control.shimmering
anchors.fill: parent
radius: height/2
gradient: control.shimmering ? shimmerGradient : null
}
}
}
QQC2.Label {
text: control.text
font: Kirigami.Theme.smallFont
horizontalAlignment: Qt.AlignHCenter
Layout.fillWidth: true
}
}
}

View File

@@ -0,0 +1,67 @@
// SPDX-FileCopyrightText: 2022 Tobias Fella <fella@posteo.de>
// SPDX-License-Identifier: GPL-2.0-or-later
import QtQuick 2.15
import QtQuick.Controls 2.15 as QQC2
import QtQuick.Layouts 1.15
import org.kde.kirigami 2.15 as Kirigami
import org.freedesktop.gstreamer.GLVideoItem 1.0
import org.kde.neochat 1.0
Rectangle {
id: videoStreamDelegate
implicitWidth: height / 9 * 16
implicitHeight: 300
color: "black"
radius: 10
QQC2.Label {
anchors.top: parent.top
anchors.horizontalCenter: parent.horizontalCenter
color: "white"
text: model.object.user.id
}
RowLayout {
anchors.fill: parent
Loader {
active: model.object.hasCamera
Layout.maximumWidth: parent.width
Layout.maximumHeight: parent.height
Layout.preferredHeight: parent.height
Layout.preferredWidth: parent.width
Layout.alignment: Qt.AlignHCenter | Qt.AlignVCenter
onActiveChanged: {
if (active) {
model.object.initCamera(camera)
}
}
Component.onCompleted: if (active) model.object.initCamera(camera)
GstGLVideoItem {
id: camera
width: parent.width
height: parent.height
}
}
Loader {
active: false
Layout.maximumWidth: parent.width
Layout.maximumHeight: parent.height
Layout.preferredHeight: parent.height
Layout.preferredWidth: parent.width
Layout.alignment: Qt.AlignHCenter | Qt.AlignVCenter
GstGLVideoItem {
id: screenCast
width: parent.width
height: parent.height
Component.onCompleted: {
model.object.initCamera(this)
}
}
}
}
}

View File

@@ -0,0 +1,24 @@
// SPDX-FileCopyrightText: 2022 Carson Black <uhhadd@gmail.com>
// SPDX-License-Identifier: GPL-2.0-or-later
import QtQuick 2.15
import QtQuick.Controls 2.15 as QQC2
import QtQuick.Layouts 1.15
import org.kde.kirigami 2.15 as Kirigami
import org.kde.neochat 1.0
TimelineContainer {
id: root
width: ListView.view.width
innerObject: QQC2.Control {
Layout.leftMargin: Config.showAvatarInTimeline ? Kirigami.Units.largeSpacing : 0
padding: Kirigami.Units.gridUnit*2
contentItem: QQC2.Label {
text: root.author.isLocalUser ? i18n("Outgoing Call") : i18n("Incoming Call")
}
}
}

View File

@@ -18,6 +18,11 @@ DelegateChooser {
delegate: StateDelegate {}
}
DelegateChoice {
roleValue: MessageEventModel.CallInvite
delegate: CallInviteDelegate {}
}
DelegateChoice {
roleValue: MessageEventModel.Emote
delegate: MessageDelegate {}

138
src/qml/Page/CallPage.qml Normal file
View File

@@ -0,0 +1,138 @@
/* SPDX-FileCopyrightText: 2021 Tobias Fella <fella@posteo.de>
*
* SPDX-License-Identifier: GPL-2.0-or-later
*/
import QtQuick 2.12
import QtQuick.Controls 2.12 as QQC2
import QtQuick.Layouts 1.12
import org.kde.kirigami 2.14 as Kirigami
import QtGraphicalEffects 1.15
import org.kde.neochat 1.0
Kirigami.Page {
id: page
title: CallManager.hasInvite ? i18n("Incoming Call")
: CallManager.isInviting ? i18n("Calling")
: CallManager.state == CallSession.Initiating ? i18n("Configuring Call")
: i18n("Call")
ColumnLayout {
id: column
anchors.fill: parent
RowLayout {
id: streams
Layout.fillWidth: true
Layout.fillHeight: true
Repeater {
id: videos
model: CallManager.callParticipants
delegate: VideoStreamDelegate {
Layout.fillHeight: true
Layout.fillWidth: true
Layout.alignment: Qt.AlignHCenter
}
}
}
Kirigami.Avatar {
visible: videos.count === 0
Layout.preferredWidth: Kirigami.Units.iconSizes.huge
Layout.preferredHeight: Kirigami.Units.iconSizes.huge
Layout.alignment: Qt.AlignHCenter
name: CallManager.room.displayName
source: "image://mxc/" + CallManager.room.avatarMediaId
}
//QQC2.Label {
//text: CallManager.remoteUser.displayName
//horizontalAlignment: Text.AlignHCenter
//Layout.fillWidth: true
//}
//QQC2.Label {
//text: CallManager.room.displayName
//horizontalAlignment: Text.AlignHCenter
//Layout.fillWidth: true
//}
RowLayout {
Layout.alignment: Qt.AlignHCenter
id: buttonRow
spacing: Kirigami.Units.gridUnit
CallPageButton {
text: i18n("Accept")
icon.name: "call-start"
shimmering: true
temprament: CallPageButton.Constructive
visible: CallManager.globalState === CallManager.INCOMING
onClicked: {
visible = false; //TODO declarify
CallManager.acceptCall()
}
}
CallPageButton {
text: checked ? i18n("Disable Camera") : i18n("Enable Camera")
icon.name: checked ? "camera-on" : "camera-off"
checkable: true
onToggled: CallManager.toggleCamera()
}
CallPageButton {
text: checked ? i18n("Unmute Speaker") : i18n("Mute Speaker")
icon.name: checked ? "audio-volume-muted" : "audio-speakers-symbolic"
checkable: true
}
CallPageButton {
text: checked ? i18n("Unmute Microphone") : i18n("Mute Microphone")
icon.name: checked ? "microphone-sensitivity-muted" : "microphone-sensitivity-high"
checkable: true
checked: CallManager.muted
onToggled: CallManager.muted = !CallManager.muted
}
CallPageButton {
text: i18n("Configure Devices")
icon.name: "settings-configure"
onClicked: callConfigurationSheet.open()
}
CallPageButton {
id: denyButton
visible: CallManager.globalState === CallManager.INCOMING
text: i18n("Deny")
icon.name: "call-stop"
shimmering: true
temprament: CallPageButton.Destructive
onClicked: CallManager.hangupCall()
}
CallPageButton {
visible: !denyButton.visible
text: CallManager.isInviting ? i18n("Cancel") : i18n("Hang Up")
icon.name: "call-stop"
shimmering: CallManager.isInviting
temprament: CallPageButton.Destructive
onClicked: CallManager.hangupCall()
}
}
}
Connections {
target: CallManager
function onCallEnded() {
page.closeDialog()
}
}
}

View File

@@ -52,6 +52,16 @@ Kirigami.Page {
}
}
actions.main: Kirigami.Action {
text: i18n("Call")
icon.name: "call-start"
visible: Controller.callsSupported && root.currentRoom.joinedCount === 2
onTriggered: {
CallManager.startCall(root.currentRoom, true)
}
}
Loader {
id: timelineViewLoader
anchors.fill: parent

View File

@@ -0,0 +1,78 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: GPL-2.0-or-later
import QtQuick 2.15
import QtQuick.Controls 2.15 as QQC2
import QtQuick.Layouts 1.15
import org.kde.kirigami 2.15 as Kirigami
import org.kde.kirigamiaddons.labs.mobileform 0.1 as MobileForm
import org.kde.neochat 1.0
Kirigami.ScrollablePage {
title: i18nc("@title:window", "Calls")
leftPadding: 0
rightPadding: 0
ColumnLayout {
MobileForm.FormCard {
Layout.topMargin: Kirigami.Units.largeSpacing
Layout.fillWidth: true
contentItem: ColumnLayout {
spacing: 0
MobileForm.FormCardHeader {
title: i18n("Incoming Calls")
}
MobileForm.FormCheckDelegate {
text: i18n("Ring")
checked: Config.ring // TODO
enabled: !Config.isRingImmutable //TODO
onToggled: {
Config.ring = checked
Config.sync()
}
}
MobileForm.FormTextFieldDelegate {
label: i18n("Ringtone")
text: Config.ringtone
enabled: true //TODO
onEditingFinished: {
// TODO
}
}
//TODO file chooser
}
}
MobileForm.FormCard {
Layout.topMargin: Kirigami.Units.largeSpacing
Layout.fillWidth: true
contentItem: ColumnLayout {
spacing: 0
MobileForm.FormCardHeader {
title: i18n("Default Devices")
}
MobileForm.FormComboBoxDelegate {
text: i18n("Microphone")
description: i18n("This microphone will be used by default during calls. You can also switch the microphone during calls.")
model: AudioSources
enabled: true //TODO
onCurrentIndexChanged: {
// TODO
}
}
MobileForm.FormComboBoxDelegate {
text: i18n("Camera")
description: i18n("This camera will be used by default during calls. You can also switch the camera during calls.")
model: VideoSources
enabled: true // TODO
onCurrentIndexChanged: {
// TODO
}
}
}
}
}
}

View File

@@ -58,6 +58,12 @@ Kirigami.CategorizedSettings {
icon.name: "computer"
page: Qt.resolvedUrl("DevicesPage.qml")
},
Kirigami.SettingAction {
actionName: "calls"
text: i18n("Calls")
iconName: "call-start"
page: Qt.resolvedUrl("CallConfigurationPage.qml")
},
Kirigami.SettingAction {
actionName: "aboutNeochat"
text: i18n("About NeoChat")

View File

@@ -43,6 +43,18 @@ Kirigami.ApplicationWindow {
source: Qt.resolvedUrl("qrc:/GlobalMenu.qml")
}
Connections {
target: Controller.callsSupported ? CallManager : undefined
function onGlobalStateChanged() {
if (CallManager.globalState === CallManager.OUTGOING || CallManager.globalState === CallManager.INCOMING) {
pageStack.pushDialogLayer("qrc:/CallPage.qml", {}, {
title: i18n("Call")
})
}
}
}
// This timer allows to batch update the window size change to reduce
// the io load and also work around the fact that x/y/width/height are
// changed when loading the page and overwrite the saved geometry from

View File

@@ -105,6 +105,7 @@
<file alias="GlobalNotificationsPage.qml">qml/Settings/GlobalNotificationsPage.qml</file>
<file alias="NotificationRuleItem.qml">qml/Settings/NotificationRuleItem.qml</file>
<file alias="AppearanceSettingsPage.qml">qml/Settings/AppearanceSettingsPage.qml</file>
<file alias="CallConfigurationPage.qml">qml/Settings/CallConfigurationPage.qml</file>
<file alias="AccountsPage.qml">qml/Settings/AccountsPage.qml</file>
<file alias="AccountEditorPage.qml">qml/Settings/AccountEditorPage.qml</file>
<file alias="DevicesPage.qml">qml/Settings/DevicesPage.qml</file>
@@ -127,5 +128,9 @@
<file alias="AvatarTabButton.qml">qml/Component/AvatarTabButton.qml</file>
<file alias="SpaceDrawer.qml">qml/Page/RoomList/SpaceDrawer.qml</file>
<file alias="OsmLocationPlugin.qml">qml/Component/Timeline/OsmLocationPlugin.qml</file>
<file alias="CallPageButton.qml">qml/Component/Call/CallPageButton.qml</file>
<file alias="VideoStreamDelegate.qml">qml/Component/Call/VideoStreamDelegate.qml</file>
<file alias="CallPage.qml">qml/Page/CallPage.qml</file>
<file alias="CallInviteDelegate.qml">qml/Component/Timeline/CallInviteDelegate.qml</file>
</qresource>
</RCC>