This commit is contained in:
Tobias Fella
2024-04-23 18:02:20 +02:00
committed by Tobias Fella
parent f153e57fdb
commit ddc16a17d2
38 changed files with 2973 additions and 6 deletions

View File

@@ -56,7 +56,7 @@ ecm_setup_version(${PROJECT_VERSION}
VERSION_HEADER ${CMAKE_CURRENT_BINARY_DIR}/neochat-version.h
)
find_package(Qt6 ${QT_MIN_VERSION} NO_MODULE COMPONENTS Core Quick Gui QuickControls2 Multimedia Svg WebView)
find_package(Qt6 ${QT_MIN_VERSION} NO_MODULE COMPONENTS Core Quick Gui QuickControls2 Multimedia Svg Protobuf WebView)
set_package_properties(Qt6 PROPERTIES
TYPE REQUIRED
PURPOSE "Basic application components"
@@ -115,6 +115,8 @@ set_package_properties(QuotientQt6 PROPERTIES
PURPOSE "Talk with matrix server"
)
find_package(LiveKit REQUIRED)
find_package(cmark)
set_package_properties(cmark PROPERTIES
TYPE REQUIRED

View File

@@ -51,6 +51,10 @@ is primarily aimed at Linux development.
For Windows and Android [Craft](https://invent.kde.org/packaging/craft) is the primary choice. There are guides for setting up
development environments for [Windows](https://community.kde.org/Get_Involved/development/Windows) and [Android](https://develop.kde.org/docs/packaging/android/building_applications/).
### Building with support for voice / video calls
[LiveKit](https://livekit.io) is needed for call support. Build the [Rust SDK](https://github.com/livekit/rust-sdks) and copy `liblivekit_ffi.so` to your usual library folder. Copy `livekit_ffi.h` to somewhere under your usual include folder. NeoChat should then automatically pick it up.
## Running
Just start the executable in your preferred way - either from the build directory or from the installed location.

10
cmake/FindLiveKit.cmake Normal file
View File

@@ -0,0 +1,10 @@
# SPDX-FileCopyrightText: 2024 Tobias Fella <tobias.fella@kde.org>
# SPDX-License-Identifier: BSD-2-Clause
find_library(LIVEKIT_LIB NAMES livekit_ffi)
find_path(LIVEKIT_INCLUDE_DIR NAMES livekit_ffi.h)
add_library(LiveKit UNKNOWN IMPORTED)
set_target_properties(LiveKit PROPERTIES IMPORTED_LOCATION ${LIVEKIT_LIB})
set_target_properties(LiveKit PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${LIVEKIT_INCLUDE_DIR})
set(LiveKit_FOUND True)

View File

@@ -196,6 +196,15 @@ add_library(neochat STATIC
models/pinnedmessagemodel.h
models/commonroomsmodel.cpp
models/commonroomsmodel.h
events/callencryptionkeysevent.h
events/callmemberevent.h
events/callnotifyevent.h
calls/callcontroller.cpp
calls/callcontroller.h
livekitlogmodel.cpp
livekitlogmodel.h
events/callmemberevent.cpp
events/callmemberevent.h
)
set_source_files_properties(qml/OsmLocationPlugin.qml PROPERTIES
@@ -296,6 +305,9 @@ ecm_add_qml_module(neochat URI org.kde.neochat GENERATE_PLUGIN_SOURCE
qml/HoverLinkIndicator.qml
qml/AvatarNotification.qml
qml/ReasonDialog.qml
qml/LivekitLogViewer.qml
qml/CallPage.qml
qml/IncomingCallDialog.qml
SOURCES
messageattached.cpp
messageattached.h
@@ -399,6 +411,10 @@ if (NOT ANDROID AND NOT WIN32 AND NOT APPLE AND NOT HAIKU)
target_compile_definitions(neochat PUBLIC -DHAVE_RUNNER)
target_compile_definitions(neochat PUBLIC -DHAVE_X11=1)
target_sources(neochat PRIVATE runner.cpp)
if (NOT ANDROID AND NOT WIN32 AND NOT APPLE)
#target_compile_definitions(neochat PUBLIC -DHAVE_RUNNER)
target_compile_definitions(neochat PUBLIC -DHAVE_X11)
#target_sources(neochat PRIVATE runner.cpp)
if (TARGET KUnifiedPush)
target_sources(neochat PRIVATE fakerunner.cpp)
@@ -407,6 +423,20 @@ else()
target_compile_definitions(neochat PUBLIC -DHAVE_X11=0)
endif()
qt_add_protobuf(neochat
GENERATE_PACKAGE_SUBFOLDERS
PROTO_FILES
protocols/ffi.proto
protocols/room.proto
protocols/e2ee.proto
protocols/audio_frame.proto
protocols/video_frame.proto
protocols/handle.proto
protocols/participant.proto
protocols/stats.proto
protocols/track.proto
)
target_include_directories(neochat PRIVATE ${CMAKE_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/models ${CMAKE_CURRENT_SOURCE_DIR}/enums)
target_link_libraries(neochat PRIVATE settingsplugin timelineplugin devtoolsplugin loginplugin chatbarplugin)
target_link_libraries(neochat PUBLIC
@@ -417,6 +447,7 @@ target_link_libraries(neochat PUBLIC
Qt::Multimedia
Qt::Network
Qt::QuickControls2
Qt::Protobuf
KF6::I18n
KF6::Kirigami
KF6::Notifications
@@ -431,6 +462,7 @@ target_link_libraries(neochat PUBLIC
cmark::cmark
QCoro::Core
QCoro::Network
LiveKit
)
if (TARGET KF6::Crash)

0
src/calls/call.cpp Normal file
View File

74
src/calls/call.h Normal file
View File

@@ -0,0 +1,74 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
#include <QAbstractListModel>
#include <QObject>
#include <QQmlEngine>
#include "neochatroom.h"
class Participant : public QObject
{
Q_OBJECT
void setVolume(float volume);
void muteLocally();
void unmuteLocally();
void ring(); // See MSC4075
// TODO: if these are possible; check livekit api
void muteGlobally();
void forceDisableCamera();
void forceDisableScreenShare();
void setPermissions();
void kick();
void ban();
Q_SIGNALS:
void muted();
void unmuted();
void cameraEnabled();
void cameraDisabled();
void screenShareEnabled();
void screenShareDisabled();
};
class Call : public QObject
{
Q_OBJECT
QML_ELEMENT
QML_UNCREATABLE("")
Q_PROPERTY(bool cameraEnabled READ cameraEnabled WRITE setCameraEnabled NOTIFY cameraEnabledChanged)
Q_PROPERTY(bool microphoneMuted READ microphoneMuted WRITE setMicrophoneMuted NOTIFY microphoneMutedChanged)
Q_PROPERTY(bool screenshareEnabled READ screenshareEnabled NOTIFY screenshareEnabledChanged)
Q_PROPERTY(NeoChatRoom *room READ room CONSTANT)
public:
explicit Call(NeoChatRoom *room, QObject *parent = nullptr);
Q_SIGNALS:
void participantJoined(const Participant &participant);
void participantLeft(const Participant &participant);
private:
QList<Participant *> m_participants;
};
class CallParticipantsModel : public QAbstractListModel
{
Q_OBJECT
public:
enum Roles {
NameRoleRole,
HasCameraRole,
HasScreenShareRole,
IsMutedRole,
};
Q_ENUM(Roles)
};

View File

@@ -0,0 +1,449 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
#include "callcontroller.h"
#include <QAudioSink>
#include <QMediaDevices>
#include <QNetworkAccessManager>
#include <QNetworkReply>
#include <QProtobufSerializer>
#include <QVideoFrame>
#include <QVideoFrameFormat>
#include <QVideoSink>
#include <qprotobufregistration.h>
#include <livekit_ffi.h>
#include <Quotient/csapi/openid.h>
#include "audio_frame.qpb.h"
#include "ffi.qpb.h"
#include "livekitlogmodel.h"
#include "neochatroom.h"
#include "track.qpb.h"
#include "video_frame.qpb.h"
using namespace livekit::proto;
using namespace Quotient;
extern "C" {
void livekit_ffi_initialize(void(ffiCallbackFn(const uint8_t *, size_t)), bool capture_logs);
}
void callback(const uint8_t *data, size_t length)
{
auto byteArrayData = QByteArray::fromRawData((const char *)data, length);
QProtobufSerializer serializer;
FfiEvent event;
event.deserialize(&serializer, byteArrayData);
CallController::instance().handleEvent(std::move(event));
}
CallController::CallController()
: QObject()
{
init();
}
void CallController::init()
{
qRegisterProtobufTypes();
livekit_ffi_initialize(callback, true);
}
static void handleLog(LogRecordRepeated &&logs)
{
for (const auto &log : logs) {
if (log.level() < 3) {
qWarning() << log.message();
}
}
LivekitLogModel::instance().addMessages(logs);
}
void CallController::handleConnect(ConnectCallback &&callback)
{
qWarning() << "Connecting to" << callback.room().info().name() << "with id" << callback.asyncId();
if (!m_connectingRooms.contains(callback.asyncId()) || !m_connectingRooms[callback.asyncId()]
|| m_connectingRooms[callback.asyncId()]->id() != callback.room().info().name()) {
qWarning() << "Connecting to unexpected room";
return;
}
m_connectingRooms.remove(callback.asyncId());
m_rooms[callback.asyncId()] = callback.room();
localParticipant = callback.localParticipant().handle().id_proto();
}
void CallController::handleDispose(DisposeCallback &&callback)
{
qWarning() << "Disposing" << callback.asyncId();
if (m_rooms.contains(callback.asyncId())) {
qWarning() << " room" << m_rooms[callback.asyncId()].info().name();
m_rooms.erase(callback.asyncId());
} else {
qWarning() << " unknown object";
}
}
void CallController::handleRoomEvent(livekit::proto::RoomEvent &&event)
{
if (event.hasParticipantConnected()) {
qWarning() << "Participant connected" << event.participantConnected().info().info().identity();
} else if (event.hasParticipantDisconnected()) {
qWarning() << "Participant connected" << event.participantDisconnected().participantSid();
} else if (event.hasLocalTrackPublished()) {
qWarning() << "Local track published";
m_localVideoTrackSid = event.localTrackPublished().trackSid();
} else if (event.hasLocalTrackUnpublished()) {
qWarning() << "Local track unpublished";
} else if (event.hasTrackPublished()) {
qWarning() << "Track published";
} else if (event.hasTrackUnpublished()) {
qWarning() << "Track unpublished";
} else if (event.hasTrackSubscribed()) {
qWarning() << "Track subscribed";
auto track = event.trackSubscribed().track();
if (track.info().kind() == TrackKindGadget::KIND_AUDIO) {
NewAudioStreamRequest audioStreamRequest;
audioStreamRequest.setTrackHandle(track.handle().id_proto());
FfiRequest request;
request.setNewAudioStream(audioStreamRequest);
QProtobufSerializer serializer;
auto data = request.serialize(&serializer);
const uint8_t *ret_data;
size_t size;
livekit_ffi_request((const uint8_t *)data.data(), data.length(), &ret_data, &size);
FfiResponse newResponse;
newResponse.deserialize(&serializer, QByteArray::fromRawData((const char *)ret_data, size));
} else if (track.info().kind() == TrackKindGadget::KIND_VIDEO) {
NewVideoStreamRequest videoStreamRequest;
videoStreamRequest.setTrackHandle((track.handle().id_proto()));
FfiRequest request;
request.setNewVideoStream(videoStreamRequest);
QProtobufSerializer serializer;
auto data = request.serialize(&serializer);
const uint8_t *ret_data;
size_t size;
livekit_ffi_request((const uint8_t *)data.data(), data.length(), &ret_data, &size);
FfiResponse newResponse;
newResponse.deserialize(&serializer, QByteArray::fromRawData((const char *)ret_data, size));
}
} else if (event.hasTrackUnsubscribed()) {
qWarning() << "Track unsubscribed";
} else if (event.hasTrackSubscriptionFailed()) {
qWarning() << "Track subscription failed";
} else if (event.hasTrackMuted()) {
qWarning() << "Track muted";
} else if (event.hasTrackUnmuted()) {
qWarning() << "Track unmuted";
} else if (event.hasActiveSpeakersChanged()) {
// qWarning() << "Active speakers changed";
} else if (event.hasRoomMetadataChanged()) {
qWarning() << "room metadata changed";
} else if (event.hasParticipantMetadataChanged()) {
qWarning() << "participant metadata changed";
} else if (event.hasParticipantNameChanged()) {
qWarning() << "participant name changed";
} else if (event.hasConnectionQualityChanged()) {
qWarning() << "connection quality changed to" << event.connectionQualityChanged().quality();
} else if (event.hasDataPacketReceived()) {
qWarning() << "data received";
} else if (event.hasConnectionStateChanged()) {
qWarning() << "connection state changed";
} else if (event.hasDisconnected()) {
qWarning() << "disconnected";
} else if (event.hasReconnecting()) {
qWarning() << "reconnecting";
} else if (event.hasReconnected()) {
qWarning() << "Reconnected";
} else if (event.hasE2eeStateChanged()) {
qWarning() << "e2eeStateChanged";
} else if (event.hasEos()) {
qWarning() << "eos";
} else {
qWarning() << "Unknown room event";
}
}
void saveByteArray(const QByteArray &data, const QString &name)
{
QFile file("/home/tobias/"_ls + name);
file.open(QFile::WriteOnly);
file.write(data);
file.close();
}
void CallController::handleEvent(FfiEvent &&event)
{
if (event.hasLogs()) {
handleLog(std::move(event.logs().records()));
} else if (event.hasRoomEvent()) {
handleRoomEvent(std::move(event.roomEvent()));
} else if (event.hasTrackEvent()) {
qWarning() << "track event";
} else if (event.hasVideoStreamEvent()) {
qWarning() << "video stream event";
auto video = event.videoStreamEvent();
auto info = video.frameReceived().buffer().info();
QByteArray data((const char *)info.dataPtr(), info.width() * info.height() * 1.5);
auto frame = QVideoFrame(QVideoFrameFormat(QSize(info.width(), info.height()), QVideoFrameFormat::Format_YUV420P));
frame.map(QVideoFrame::WriteOnly);
memcpy(frame.bits(0), data.constData(), data.size() / 3 * 2);
memcpy(frame.bits(1), data.constData() + data.size() / 3 * 2, data.size() / 6);
memcpy(frame.bits(2), data.constData() + data.size() / 3 * 2 + data.size() / 6, data.size() / 6);
qWarning() << frame.size() << data.toBase64();
frame.unmap();
m_sink->setVideoFrame(frame);
delete (char *)info.dataPtr();
} else if (event.hasAudioStreamEvent()) {
return; //TODO remove
static bool initialized = false;
if (!initialized) {
initialized = true;
QAudioFormat format;
format.setSampleRate(48000);
format.setChannelCount(2);
format.setSampleFormat(QAudioFormat::Int16);
QAudioDevice info(QMediaDevices::defaultAudioOutput());
if (!info.isFormatSupported(format)) {
qWarning() << "Audio format not supported";
Q_ASSERT(false);
return;
}
sink = new QAudioSink(format);
audioData = sink->start();
QProtobufSerializer serializer;
NewAudioResamplerRequest narr;
FfiRequest request;
request.setNewAudioResampler(narr);
auto data = request.serialize(&serializer);
const uint8_t *ret_data;
size_t size;
livekit_ffi_request((const uint8_t *)data.data(), data.length(), &ret_data, &size);
FfiResponse newResponse;
newResponse.deserialize(&serializer, QByteArray::fromRawData((const char *)ret_data, size));
resampler = newResponse.newAudioResampler().resampler().handle().id_proto();
}
if (event.audioStreamEvent().hasFrameReceived()) {
FfiRequest request;
RemixAndResampleRequest rarr;
rarr.setBuffer(event.audioStreamEvent().frameReceived().frame().info());
rarr.setNumChannels(2);
rarr.setSampleRate(48000);
rarr.setResamplerHandle(resampler);
request = FfiRequest();
request.setRemixAndResample(rarr);
static QProtobufSerializer serializer;
auto data = request.serialize(&serializer);
const uint8_t *ret_data;
size_t size;
livekit_ffi_request((const uint8_t *)data.data(), data.length(), &ret_data, &size);
FfiResponse response;
response.deserialize(&serializer, QByteArray::fromRawData((const char *)ret_data, size));
Q_ASSERT(response.hasRemixAndResample());
auto info = response.remixAndResample().buffer().info();
auto bytes = info.numChannels() * info.samplesPerChannel() * 2;
data = QByteArray::fromRawData((const char *)info.dataPtr(), bytes);
audioData->write(data);
}
} else if (event.hasConnect()) {
handleConnect(std::move(event.connect()));
} else if (event.hasDisconnect()) {
qWarning() << "disconnect";
} else if (event.hasDispose()) {
handleDispose(std::move(event.dispose()));
} else if (event.hasPublishTrack()) {
qWarning() << "publish track";
} else if (event.hasUnpublishTrack()) {
qWarning() << "unpublish track";
} else if (event.hasPublishData()) {
qWarning() << "publish data";
} else if (event.hasCaptureAudioFrame()) {
qWarning() << "audio frame";
} else if (event.hasUpdateLocalMetadata()) {
qWarning() << "update local metadata";
} else if (event.hasUpdateLocalName()) {
qWarning() << "update local name";
} else if (event.hasGetStats()) {
qWarning() << "get stats";
} else if (event.hasGetSessionStats()) {
qWarning() << "get session stats";
} else if (event.hasPanic()) {
qWarning() << "panic";
} else {
qWarning() << event.messageField();
}
}
void CallController::handleCallMemberEvent(const Quotient::CallMemberEvent *event, NeoChatRoom *room)
{
qWarning() << event->fullJson();
Q_EMIT callStarted();
const auto connection = room->connection();
auto job = connection->callApi<RequestOpenIdTokenJob>(connection->userId());
connect(job, &BaseJob::finished, this, [this, room, job, connection, event]() {
auto nam = new QNetworkAccessManager;
auto json = QJsonDocument(QJsonObject{
{"room"_ls, room->id()},
{"openid_token"_ls,
QJsonObject{{"access_token"_ls, job->tokenData().accessToken},
{"token_type"_ls, job->tokenData().tokenType},
{"matrix_server_name"_ls, job->tokenData().matrixServerName}}},
{"device_id"_ls, connection->deviceId()},
})
.toJson();
if (event->memberships().isEmpty()) {
return;
}
auto membership = event->memberships()[0].toObject();
QNetworkRequest request(QUrl((membership["foci_active"_ls].toArray()[0]["livekit_service_url"_ls].toString() + "/sfu/get"_ls)));
request.setHeader(QNetworkRequest::ContentTypeHeader, "application/json"_ls);
auto reply = nam->post(request, json);
connect(reply, &QNetworkReply::finished, this, [reply, this, room]() {
auto json = QJsonDocument::fromJson(reply->readAll()).object();
FfiRequest message;
ConnectRequest connectRequest;
connectRequest.setUrl(json["url"_ls].toString());
connectRequest.setToken(json["jwt"_ls].toString());
message.setConnect(connectRequest);
QProtobufSerializer serializer;
auto data = message.serialize(&serializer);
size_t size;
const uint8_t *ret_data;
livekit_ffi_request((const uint8_t *)data.data(), data.length(), &ret_data, &size);
FfiResponse connectResponse;
connectResponse.deserialize(&serializer, QByteArray::fromRawData((const char *)ret_data, size));
if (!connectResponse.hasConnect()) {
qWarning() << "connectResponse has unexpected content" << connectResponse.messageField();
return;
}
m_connectingRooms[connectResponse.connect().asyncId()] = room;
});
});
}
FfiResponse request(FfiRequest &&request)
{
static QProtobufSerializer serializer;
auto data = request.serialize(&serializer);
size_t responseLength;
const char *responseData;
livekit_ffi_request((const uint8_t *)data.constData(), data.size(), (const uint8_t **)&responseData, &responseLength);
auto response = QByteArray::fromRawData(responseData, responseLength);
FfiResponse ffiResponse;
ffiResponse.deserialize(&serializer, response);
return ffiResponse;
}
void CallController::setCameraVideoSink(QVideoSink *videoSink)
{
m_cameraVideoSink = videoSink;
connect(videoSink, &QVideoSink::videoFrameChanged, this, [videoSink, this]() {
static bool initialized = false;
if (localParticipant == 100000) {
return; // TODO make less shitty
}
static QtProtobuf::uint64 handle;
if (!initialized) {
initialized = true;
NewVideoSourceRequest newVideoSourceRequest;
VideoSourceResolution resolution;
resolution.setHeight(videoSink->videoSize().height());
resolution.setWidth(videoSink->videoSize().width());
newVideoSourceRequest.setResolution(resolution);
newVideoSourceRequest.setType(VideoSourceTypeGadget::VIDEO_SOURCE_NATIVE);
FfiRequest ffiRequest;
ffiRequest.setNewVideoSource(newVideoSourceRequest);
auto response = request(std::move(ffiRequest));
handle = response.newVideoSource().source().handle().id_proto();
CreateVideoTrackRequest createVideoTrackRequest;
createVideoTrackRequest.setName("Camera"_ls);
createVideoTrackRequest.setSourceHandle(handle);
FfiRequest request;
request.setCreateVideoTrack(createVideoTrackRequest);
auto createResponse = ::request(std::move(request));
m_localVideoTrackId = createResponse.createVideoTrack().track().handle().id_proto();
publishTrack(m_localVideoTrackId);
}
auto image = videoSink->videoFrame().toImage();
image.convertTo(QImage::Format_RGB888);
CaptureVideoFrameRequest request;
VideoBufferInfo buffer;
buffer.setType(VideoBufferTypeGadget::RGB24);
buffer.setWidth(image.width());
buffer.setHeight(image.height());
buffer.setDataPtr((QtProtobuf::uint64)image.bits());
buffer.setStride(image.bytesPerLine());
VideoBufferInfo_QtProtobufNested::ComponentInfoRepeated components;
VideoBufferInfo_QtProtobufNested::ComponentInfo componentInfo;
componentInfo.setStride(image.bytesPerLine());
componentInfo.setDataPtr((QtProtobuf::uint64)image.bits());
componentInfo.setSize(image.sizeInBytes());
components += componentInfo;
buffer.setComponents(components);
request.setBuffer(buffer);
request.setSourceHandle(handle);
request.setTimestampUs(QDateTime::currentMSecsSinceEpoch() * 1000);
request.setRotation(VideoRotationGadget::VIDEO_ROTATION_0);
FfiRequest ffiRequest;
ffiRequest.setCaptureVideoFrame(request);
auto response = ::request(std::move(ffiRequest));
});
}
void CallController::setVideoSink(QObject *sink)
{
m_sink = dynamic_cast<QVideoSink *>(sink);
}
void LivekitVideoSink::setVideoSink(QVideoSink *videoSink)
{
m_videoSink = videoSink;
CallController::instance().setCameraVideoSink(videoSink);
Q_EMIT videoSinkChanged();
}
QVideoSink *LivekitVideoSink::videoSink() const
{
return m_videoSink;
}
void CallController::toggleCamera()
{
if (m_localVideoTrackSid.isEmpty()) {
publishTrack(m_localVideoTrackId);
} else {
FfiRequest request;
UnpublishTrackRequest unpublishRequest;
unpublishRequest.setLocalParticipantHandle(localParticipant);
unpublishRequest.setTrackSid(m_localVideoTrackSid);
request.setUnpublishTrack(unpublishRequest);
auto response = ::request(std::move(request));
m_localVideoTrackSid = QString();
}
}
void CallController::publishTrack(uint64_t id)
{
PublishTrackRequest publishTrackRequest;
publishTrackRequest.setTrackHandle(id);
publishTrackRequest.setLocalParticipantHandle(localParticipant);
TrackPublishOptions options;
options.setSource(TrackSourceGadget::SOURCE_CAMERA);
publishTrackRequest.setOptions(options);
auto request = FfiRequest();
request.setPublishTrack(publishTrackRequest);
auto publishResponse = ::request(std::move(request));
}

View File

@@ -0,0 +1,95 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
#pragma once
#include <QObject>
#include <QQmlEngine>
#include <QVideoSink>
#include "events/callmemberevent.h"
#include "room.qpb.h"
namespace livekit::proto
{
class FfiEvent;
class ConnectCallback;
class DisposeCallback;
class RoomEvent;
}
class LivekitMediaPlayer;
class NeoChatRoom;
class QAudioSink;
class CallController : public QObject
{
Q_OBJECT
QML_ELEMENT
QML_SINGLETON
public:
static CallController &instance()
{
static CallController _instance;
return _instance;
}
static CallController *create(QQmlEngine *, QJSEngine *)
{
QQmlEngine::setObjectOwnership(&instance(), QQmlEngine::CppOwnership);
return &instance();
}
void handleCallMemberEvent(const Quotient::CallMemberEvent *event, NeoChatRoom *room);
// Internal. Do not use.
void handleEvent(livekit::proto::FfiEvent &&event);
Q_INVOKABLE void setVideoSink(QObject *sink);
void setCameraVideoSink(QVideoSink *videoSink);
Q_INVOKABLE void toggleCamera();
Q_SIGNALS:
void callStarted();
private:
CallController();
void init();
QMap<uint64_t, QPointer<NeoChatRoom>> m_connectingRooms;
std::map<uint64_t, livekit::proto::OwnedRoom> m_rooms;
void handleConnect(livekit::proto::ConnectCallback &&callback);
void handleDispose(livekit::proto::DisposeCallback &&callback);
void handleRoomEvent(livekit::proto::RoomEvent &&event);
void publishTrack(uint64_t id);
QIODevice *audioData = nullptr;
QAudioSink *sink;
QVideoSink *m_sink;
uint64_t resampler;
QVideoSink *m_cameraVideoSink = nullptr;
uint64_t localParticipant = 100000;
QString m_localVideoTrackSid;
uint64_t m_localVideoTrackId;
};
class LivekitVideoSink : public QObject
{
Q_OBJECT
QML_ELEMENT
public:
Q_PROPERTY(QVideoSink *videoSink READ videoSink WRITE setVideoSink NOTIFY videoSinkChanged REQUIRED)
using QObject::QObject;
void setVideoSink(QVideoSink *videoSink);
QVideoSink *videoSink() const;
Q_SIGNALS:
void videoSinkChanged();
private:
QVideoSink *m_videoSink = nullptr;
};

View File

View File

View File

0
src/calls/participant.h Normal file
View File

View File

@@ -45,6 +45,8 @@ bool testMode = false;
using namespace Quotient;
Controller::Controller(QObject *parent)
: QObject(parent)
{
@@ -132,6 +134,7 @@ Controller::Controller(QObject *parent)
m_endpoint = connector->endpoint();
#endif
}
Controller &Controller::instance()

View File

@@ -7,5 +7,5 @@
using namespace Qt::StringLiterals;
QMultiHash<QString, QVariant> EmojiTones::_tones = {
#include "emojitones_data.h"
//#include "emojitones_data.h"
};

View File

@@ -0,0 +1,20 @@
// SPDX-FileCopyrightText: 2024 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.1-or-later
#pragma once
#include <Quotient/events/roomevent.h>
namespace Quotient
{
class CallEncryptionKeysEvent : public RoomEvent
{
public:
QUO_EVENT(CallEncryptionKeysEvent, "io.element.call.encryption_keys");
explicit CallEncryptionKeysEvent(const QJsonObject &obj)
: RoomEvent(obj)
{
}
};
}

View File

@@ -0,0 +1,38 @@
// SPDX-FileCopyrightText: 2024 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
#include "callmemberevent.h"
#include <QString>
using namespace Quotient;
using namespace Qt::Literals::StringLiterals;
CallMemberEventContent::CallMemberEventContent(const QJsonObject &json)
{
for (const auto &membership : json["memberships"_L1].toArray()) {
QList<Focus> foci;
for (const auto &focus : membership["foci_active"_L1].toArray()) {
foci.append(Focus{
.livekitAlias = focus["livekit_alias"_L1].toString(),
.livekitServiceUrl = focus["livekit_service_url"_L1].toString(),
.type = focus["livekit"_L1].toString(),
});
}
memberships.append(CallMembership{
.application = membership["application"_L1].toString(),
.callId = membership["call_id"_L1].toString(),
.deviceId = membership["device_id"_L1].toString(),
.expires = membership["expires"_L1].toInt(),
.expiresTs = membership["expires"_L1].toVariant().value<uint64_t>(),
.fociActive = foci,
.membershipId = membership["membershipID"_L1].toString(),
.scope = membership["scope"_L1].toString(),
});
}
}
QJsonObject CallMemberEventContent::toJson() const
{
return {};
}

View File

@@ -0,0 +1,59 @@
// SPDX-FileCopyrightText: 2024 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.1-or-later
#pragma once
#include <Quotient/events/stateevent.h>
namespace Quotient
{
struct Focus {
QString livekitAlias;
QString livekitServiceUrl;
QString type;
};
struct CallMembership {
QString application;
QString callId;
QString deviceId;
int expires;
uint64_t expiresTs;
QList<Focus> fociActive;
QString membershipId;
QString scope;
};
class CallMemberEventContent
{
public:
explicit CallMemberEventContent(const QJsonObject &json);
QJsonObject toJson() const;
QList<CallMembership> memberships;
};
/**
* @class CallMemberEvent
*
* Class to define a call member event.
*
* @sa Quotient::StateEvent
*/
class CallMemberEvent : public KeyedStateEventBase<CallMemberEvent, CallMemberEventContent>
{
public:
QUO_EVENT(CallMemberEvent, "org.matrix.msc3401.call.member")
explicit CallMemberEvent(const QJsonObject &obj)
: KeyedStateEventBase(obj)
{
}
QJsonArray memberships() const
{
return contentJson()["memberships"_ls].toArray();
}
};
}

View File

@@ -0,0 +1,16 @@
// SPDX-FileCopyrightText: 2024 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.1-or-later
#pragma once
#include <Quotient/events/roomevent.h>
namespace Quotient
{
class CallNotifyEvent : public RoomEvent
{
public:
QUO_EVENT(CallNotifyEvent, "org.matrix.msc4075.call.notify");
explicit CallNotifyEvent(const QJsonObject &obj);
};

39
src/livekitlogmodel.cpp Normal file
View File

@@ -0,0 +1,39 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
#include "livekitlogmodel.h"
using namespace livekit::proto;
QVariant LivekitLogModel::data(const QModelIndex &index, int role) const
{
const auto &message = m_messages[index.row()];
if (role == MessageRole) {
return message.message();
}
return {};
}
int LivekitLogModel::rowCount(const QModelIndex &parent) const
{
Q_UNUSED(parent);
return m_messages.size();
}
QHash<int, QByteArray> LivekitLogModel::roleNames() const
{
return {
{MessageRole, "message"},
};
}
void LivekitLogModel::addMessages(livekit::proto::LogRecordRepeated messages)
{
for (const auto &message : messages) {
if (message.level() < 3) {
beginInsertRows({}, m_messages.size(), m_messages.size() + 1);
m_messages += message;
endInsertRows();
}
}
}

61
src/livekitlogmodel.h Normal file
View File

@@ -0,0 +1,61 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
#pragma once
#include <QAbstractListModel>
#include <QQmlEngine>
#include <QList>
#include "ffi.qpb.h"
class LivekitLogModel : public QAbstractListModel
{
Q_OBJECT
QML_ELEMENT
QML_SINGLETON
public:
static LivekitLogModel &instance() {
static LivekitLogModel _instance;
return _instance;
}
static LivekitLogModel *create(QQmlEngine *, QJSEngine *) {
QQmlEngine::setObjectOwnership(&instance(), QQmlEngine::CppOwnership);
return &instance();
}
/**
* @brief Defines the model roles.
*/
enum Roles {
MessageRole = Qt::DisplayRole,
};
/**
* @brief Get the given role value at the given index.
*
* @sa QAbstractItemModel::data
*/
[[nodiscard]] QVariant data(const QModelIndex &index, int role) const override;
/**
* @brief Number of rows in the model.
*
* @sa QAbstractItemModel::rowCount
*/
[[nodiscard]] int rowCount(const QModelIndex &parent = QModelIndex()) const override;
/**
* @brief Returns a mapping from Role enum values to role names.
*
* @sa Roles, QAbstractItemModel::roleNames()
*/
[[nodiscard]] QHash<int, QByteArray> roleNames() const override;
void addMessages(livekit::proto::LogRecordRepeated messages);
private:
livekit::proto::LogRecordRepeated m_messages;
LivekitLogModel() = default;
};

View File

@@ -1,11 +1,141 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-FileCopyrightText: 2024 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
#include "mediamanager.h"
#include <QDirIterator>
#include <QMimeDatabase>
#include <Quotient/qt_connection_util.h>
#include "events/callmemberevent.h"
#include "neochatroom.h"
using namespace Qt::Literals::StringLiterals;
using namespace Quotient;
void MediaManager::startPlayback()
{
Q_EMIT playbackStarted();
}
#include "moc_mediamanager.cpp"
void MediaManager::ring(const QJsonObject &json, NeoChatRoom *room)
{
qWarning() << "start check ring";
// todo: check sender != us
if (json["content"_L1]["application"_L1].toString() != "m.call"_L1) {
qWarning() << "not m.call";
return;
}
qWarning() << json;
if (!json["content"_L1]["m.mentions"_L1]["room"_L1].toBool() || json["sender"_ls].toString() == room->connection()->userId()) {
bool mentioned = false;
for (const auto &user : json["content"_L1]["m.mentions"_L1]["user_ids"_L1].toArray()) {
if (user.toString() == room->connection()->userId()) {
mentioned = true;
break;
}
}
if (!mentioned) {
qWarning() << "not mentioned";
return;
}
}
if (json["content"_L1]["notify_type"_L1].toString() != "ring"_L1) {
qWarning() << "not ring";
return;
}
if (room->pushNotificationState() == PushNotificationState::Mute) {
qWarning() << "mute";
return;
}
if (isRinging()) {
qWarning() << "already ringing";
return;
}
if (const auto &event = room->currentState().get<CallMemberEvent>(room->connection()->userId())) {
if (event) {
auto memberships = event->contentJson()["memberships"_L1].toArray();
for (const auto &m : memberships) {
const auto &membership = m.toObject();
if (membership["application"_L1] == "m.call"_L1 && membership["call_id"_L1].toString().isEmpty()) {
qWarning() << "already in a call";
return;
}
}
}
}
connectUntil(room, &NeoChatRoom::changed, this, [this, room]() {
if (const auto &event = room->currentState().get<CallMemberEvent>(room->connection()->userId())) {
if (event) {
auto memberships = event->contentJson()["memberships"_L1].toArray();
for (const auto &m : memberships) {
const auto &membership = m.toObject();
if (membership["application"_L1] == "m.call"_L1 && membership["call_id"_L1].toString().isEmpty()) {
qWarning() << "stopping";
stopRinging();
return true;
}
}
}
}
return false;
});
if (json["unsigned"_L1]["age"_L1].toInt() > 10000) {
qWarning() << "too old";
return;
}
ringUnchecked();
}
void MediaManager::ringUnchecked()
{
qWarning() << "ring";
static QString path;
if (path.isEmpty()) {
for (const auto &dir : QString::fromUtf8(qgetenv("XDG_DATA_DIRS")).split(u':')) {
if (QFileInfo(dir + QStringLiteral("/sounds/freedesktop/stereo/phone-incoming-call.oga")).exists()) {
path = dir + QStringLiteral("/sounds/freedesktop/stereo/phone-incoming-call.oga");
break;
}
}
}
if (path.isEmpty()) {
return;
}
m_player->setSource(QUrl::fromLocalFile(path));
m_player->play();
Q_EMIT showIncomingCallDialog();
}
MediaManager::MediaManager(QObject *parent)
: QObject(parent)
, m_player(new QMediaPlayer())
, m_output(new QAudioOutput())
, m_timer(new QTimer())
{
m_player->setAudioOutput(m_output);
m_timer->setInterval(1000);
m_timer->setSingleShot(true);
connect(m_timer, &QTimer::timeout, this, [this]() {
m_player->play();
});
connect(m_player, &QMediaPlayer::playbackStateChanged, this, [this]() {
if (m_player->playbackState() == QMediaPlayer::StoppedState) {
m_timer->start();
}
});
}
bool MediaManager::isRinging() const
{
return m_ringing;
}
void MediaManager::stopRinging()
{
m_ringing = false;
m_player->pause();
m_timer->stop();
//Q_EMIT stopRinging();
}

View File

@@ -3,8 +3,13 @@
#pragma once
#include <QAudioOutput>
#include <QMediaPlayer>
#include <QObject>
#include <QQmlEngine>
#include <QTimer>
class NeoChatRoom;
/**
* @class MediaManager
@@ -34,9 +39,29 @@ public:
*/
Q_INVOKABLE void startPlayback();
/**
* Starts ringing if the criteria (see MSC / spec) are met.
*/
void ring(const QJsonObject &json, NeoChatRoom *room);
bool isRinging() const;
Q_SIGNALS:
/**
* @brief Emitted when any media player starts playing. Other objects should stop / pause playback.
*/
void playbackStarted();
void showIncomingCallDialog();
void closeIncomingCallDialog();
private:
void ringUnchecked();
void stopRinging();
QMediaPlayer *m_player;
QAudioOutput *m_output;
QTimer *m_timer;
bool m_ringing = false;
explicit MediaManager(QObject *parent = nullptr);
};

View File

@@ -20,7 +20,7 @@ EmojiModel::EmojiModel(QObject *parent)
, m_configGroup(KConfigGroup(m_config, u"Editor"_s))
{
if (_emojis.isEmpty()) {
#include "emojis.h"
//#include "emojis.h"
}
}

View File

@@ -8,6 +8,7 @@
#include <QMediaPlayer>
#include <QMimeDatabase>
#include <QTemporaryFile>
#include <QProtobufSerializer>
#include <Quotient/events/eventcontent.h>
#include <Quotient/events/eventrelation.h>
@@ -42,6 +43,7 @@
#include "eventhandler.h"
#include "events/pollevent.h"
#include "filetransferpseudojob.h"
#include "mediamanager.h"
#include "neochatconfig.h"
#include "neochatconnection.h"
#include "neochatroommember.h"
@@ -57,6 +59,10 @@
#include <KJobTrackerInterface>
#include <KLocalizedString>
#include "calls/callcontroller.h"
#include "events/callencryptionkeysevent.h"
#include "events/callmemberevent.h"
using namespace Quotient;
NeoChatRoom::NeoChatRoom(Connection *connection, QString roomId, JoinState joinState)
@@ -160,6 +166,26 @@ NeoChatRoom::NeoChatRoom(Connection *connection, QString roomId, JoinState joinS
const auto neochatconnection = static_cast<NeoChatConnection *>(connection);
Q_ASSERT(neochatconnection);
connect(neochatconnection, &NeoChatConnection::globalUrlPreviewEnabledChanged, this, &NeoChatRoom::urlPreviewEnabledChanged);
connect(this, &Room::aboutToAddNewMessages, this, [this](const auto &messages) {
for (const auto &message : messages) {
if (const auto &memberEvent = eventCast<const CallMemberEvent>(message.get())) {
CallController::instance().handleCallMemberEvent(memberEvent, this);
}
if (const auto &encryptionEvent = eventCast<const CallEncryptionKeysEvent>(message.get())) {
qWarning() << encryptionEvent->fullJson();
Q_ASSERT(false);
}
}
});
// connect(this, &NeoChatRoom::aboutToAddNewMessages, this, [this](const auto &events) {
// for (const auto &event : events) {
// qWarning() << event->fullJson();
// if (event->matrixType() == "org.matrix.msc4075.call.notify"_ls) {
// MediaManager::instance().ring(event->fullJson(), this);
// }
// }
// });
}
bool NeoChatRoom::visible() const

View File

@@ -0,0 +1,151 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
import "handle.proto";
// Create a new AudioStream
// AudioStream is used to receive audio frames from a track
message NewAudioStreamRequest {
uint64 track_handle = 1;
AudioStreamType type = 2;
}
message NewAudioStreamResponse { OwnedAudioStream stream = 1; }
// Create a new AudioSource
message NewAudioSourceRequest {
AudioSourceType type = 1;
optional AudioSourceOptions options = 2;
uint32 sample_rate = 3;
uint32 num_channels = 4;
}
message NewAudioSourceResponse { OwnedAudioSource source = 1; }
// Push a frame to an AudioSource
// The data provided must be available as long as the client receive the callback.
message CaptureAudioFrameRequest {
uint64 source_handle = 1;
AudioFrameBufferInfo buffer = 2;
}
message CaptureAudioFrameResponse {
uint64 async_id = 1;
}
message CaptureAudioFrameCallback {
uint64 async_id = 1;
optional string error = 2;
}
// Create a new AudioResampler
message NewAudioResamplerRequest {}
message NewAudioResamplerResponse {
OwnedAudioResampler resampler = 1;
}
// Remix and resample an audio frame
message RemixAndResampleRequest {
uint64 resampler_handle = 1;
AudioFrameBufferInfo buffer = 2;
uint32 num_channels = 3;
uint32 sample_rate = 4;
}
message RemixAndResampleResponse {
OwnedAudioFrameBuffer buffer = 1;
}
//
// AudioFrame buffer
//
message AudioFrameBufferInfo {
uint64 data_ptr = 1; // *const i16
uint32 num_channels = 2;
uint32 sample_rate = 3;
uint32 samples_per_channel = 4;
}
message OwnedAudioFrameBuffer {
FfiOwnedHandle handle = 1;
AudioFrameBufferInfo info = 2;
}
//
// AudioStream
//
enum AudioStreamType {
AUDIO_STREAM_NATIVE = 0;
AUDIO_STREAM_HTML = 1;
}
message AudioStreamInfo {
AudioStreamType type = 1;
}
message OwnedAudioStream {
FfiOwnedHandle handle = 1;
AudioStreamInfo info = 2;
}
message AudioStreamEvent {
uint64 stream_handle = 1;
oneof message {
AudioFrameReceived frame_received = 2;
AudioStreamEOS eos = 3;
}
}
message AudioFrameReceived {
OwnedAudioFrameBuffer frame = 1;
}
message AudioStreamEOS {}
//
// AudioSource
//
message AudioSourceOptions {
bool echo_cancellation = 1;
bool noise_suppression = 2;
bool auto_gain_control = 3;
}
enum AudioSourceType {
AUDIO_SOURCE_NATIVE = 0;
}
message AudioSourceInfo {
AudioSourceType type = 2;
}
message OwnedAudioSource {
FfiOwnedHandle handle = 1;
AudioSourceInfo info = 2;
}
//
// AudioResampler
//
message AudioResamplerInfo { }
message OwnedAudioResampler {
FfiOwnedHandle handle = 1;
AudioResamplerInfo info = 2;
}

154
src/protocols/e2ee.proto Normal file
View File

@@ -0,0 +1,154 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
// TODO(theomonnom): Should FrameCryptor be stateful on the client side and have their own handle?
enum EncryptionType {
NONE = 0;
GCM = 1;
CUSTOM = 2;
}
message FrameCryptor {
string participant_identity = 1;
string track_sid = 2;
int32 key_index = 3;
bool enabled = 4;
}
message KeyProviderOptions {
// Only specify if you want to use a shared_key
optional bytes shared_key = 1;
int32 ratchet_window_size = 2;
bytes ratchet_salt = 3;
int32 failure_tolerance = 4; // -1 = no tolerence
}
message E2eeOptions {
EncryptionType encryption_type = 1;
KeyProviderOptions key_provider_options = 2;
}
enum EncryptionState {
NEW = 0;
OK = 1;
ENCRYPTION_FAILED = 2;
DECRYPTION_FAILED = 3;
MISSING_KEY = 4;
KEY_RATCHETED = 5;
INTERNAL_ERROR = 6;
}
message E2eeManagerSetEnabledRequest {
bool enabled = 1;
}
message E2eeManagerSetEnabledResponse {}
message E2eeManagerGetFrameCryptorsRequest {}
message E2eeManagerGetFrameCryptorsResponse {
repeated FrameCryptor frame_cryptors = 1;
}
message FrameCryptorSetEnabledRequest {
string participant_identity = 1;
string track_sid = 2;
bool enabled = 3;
}
message FrameCryptorSetEnabledResponse { }
message FrameCryptorSetKeyIndexRequest {
string participant_identity = 1;
string track_sid = 2;
int32 key_index = 3;
}
message FrameCryptorSetKeyIndexResponse { }
message SetSharedKeyRequest {
bytes shared_key = 1;
int32 key_index = 2;
}
message SetSharedKeyResponse { }
message RatchetSharedKeyRequest {
int32 key_index = 1;
}
message RatchetSharedKeyResponse {
optional bytes new_key = 1;
}
message GetSharedKeyRequest {
int32 key_index = 1;
}
message GetSharedKeyResponse {
optional bytes key = 1;
}
message SetKeyRequest {
string participant_identity = 1;
bytes key = 2;
int32 key_index = 3;
}
message SetKeyResponse {}
message RatchetKeyRequest {
string participant_identity = 1;
int32 key_index = 2;
}
message RatchetKeyResponse {
optional bytes new_key = 1;
}
message GetKeyRequest {
string participant_identity = 1;
int32 key_index = 2;
}
message GetKeyResponse {
optional bytes key = 1;
}
message E2eeRequest {
uint64 room_handle = 1;
oneof message {
E2eeManagerSetEnabledRequest manager_set_enabled = 2;
E2eeManagerGetFrameCryptorsRequest manager_get_frame_cryptors = 3;
FrameCryptorSetEnabledRequest cryptor_set_enabled = 4;
FrameCryptorSetKeyIndexRequest cryptor_set_key_index = 5;
SetSharedKeyRequest set_shared_key = 6;
RatchetSharedKeyRequest ratchet_shared_key = 7;
GetSharedKeyRequest get_shared_key = 8;
SetKeyRequest set_key = 9;
RatchetKeyRequest ratchet_key = 10;
GetKeyRequest get_key = 11;
}
}
message E2eeResponse {
oneof message {
E2eeManagerSetEnabledResponse manager_set_enabled = 1;
E2eeManagerGetFrameCryptorsResponse manager_get_frame_cryptors = 2;
FrameCryptorSetEnabledResponse cryptor_set_enabled = 3;
FrameCryptorSetKeyIndexResponse cryptor_set_key_index = 4;
SetSharedKeyResponse set_shared_key = 5;
RatchetSharedKeyResponse ratchet_shared_key = 6;
GetSharedKeyResponse get_shared_key = 7;
SetKeyResponse set_key = 8;
RatchetKeyResponse ratchet_key = 9;
GetKeyResponse get_key = 10;
}
}

195
src/protocols/ffi.proto Normal file
View File

@@ -0,0 +1,195 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
// import "handle.proto";
import "e2ee.proto";
import "track.proto";
import "room.proto";
import "video_frame.proto";
import "audio_frame.proto";
// **How is the livekit-ffi working:
// We refer as the ffi server the Rust server that is running the LiveKit client implementation, and we
// refer as the ffi client the foreign language that commumicates with the ffi server. (e.g Python SDK, Unity SDK, etc...)
//
// We expose the Rust client implementation of livekit using the protocol defined here.
// Everything starts with a FfiRequest, which is a oneof message that contains all the possible
// requests that can be made to the ffi server.
// The server will then respond with a FfiResponse, which is also a oneof message that contains
// all the possible responses.
// The first request sent to the server must be an InitializeRequest, which contains the a pointer
// to the callback function that will be used to send events and async responses to the ffi client.
// (e.g participant joined, track published, etc...)
//
// **Useful things know when collaborating on the protocol:**
// Everything is subject to discussion and change :-)
//
// - The ffi client implementation must never forget to correctly dispose all the owned handles
// that it receives from the server.
//
// Therefore, the ffi client is easier to implement if there is less handles to manage.
//
// - We are mainly using FfiHandle on info messages (e.g: RoomInfo, TrackInfo, etc...)
// For this reason, info are only sent once, at creation (We're not using them for updates, we can infer them from
// events on the client implementation).
// e.g: set speaking to true when we receive a ActiveSpeakerChanged event.
// This is the input of livekit_ffi_request function
// We always expect a response (FFIResponse, even if it's empty)
message FfiRequest {
oneof message {
DisposeRequest dispose = 2;
// Room
ConnectRequest connect = 3;
DisconnectRequest disconnect = 4;
PublishTrackRequest publish_track = 5;
UnpublishTrackRequest unpublish_track = 6;
PublishDataRequest publish_data = 7;
SetSubscribedRequest set_subscribed = 8;
UpdateLocalMetadataRequest update_local_metadata = 9;
UpdateLocalNameRequest update_local_name = 10;
GetSessionStatsRequest get_session_stats = 11;
// Track
CreateVideoTrackRequest create_video_track = 12;
CreateAudioTrackRequest create_audio_track = 13;
GetStatsRequest get_stats = 14;
// Video
NewVideoStreamRequest new_video_stream = 16;
NewVideoSourceRequest new_video_source = 17;
CaptureVideoFrameRequest capture_video_frame = 18;
VideoConvertRequest video_convert = 19;
// Audio
NewAudioStreamRequest new_audio_stream = 22;
NewAudioSourceRequest new_audio_source = 23;
CaptureAudioFrameRequest capture_audio_frame = 24;
NewAudioResamplerRequest new_audio_resampler = 25;
RemixAndResampleRequest remix_and_resample = 26;
E2eeRequest e2ee = 27;
}
}
// This is the output of livekit_ffi_request function.
message FfiResponse {
oneof message {
DisposeResponse dispose = 2;
// Room
ConnectResponse connect = 3;
DisconnectResponse disconnect = 4;
PublishTrackResponse publish_track = 5;
UnpublishTrackResponse unpublish_track = 6;
PublishDataResponse publish_data = 7;
SetSubscribedResponse set_subscribed = 8;
UpdateLocalMetadataResponse update_local_metadata = 9;
UpdateLocalNameResponse update_local_name = 10;
GetSessionStatsResponse get_session_stats = 11;
// Track
CreateVideoTrackResponse create_video_track = 12;
CreateAudioTrackResponse create_audio_track = 13;
GetStatsResponse get_stats = 14;
// Video
NewVideoStreamResponse new_video_stream = 16;
NewVideoSourceResponse new_video_source = 17;
CaptureVideoFrameResponse capture_video_frame = 18;
VideoConvertResponse video_convert = 19;
// Audio
NewAudioStreamResponse new_audio_stream = 22;
NewAudioSourceResponse new_audio_source = 23;
CaptureAudioFrameResponse capture_audio_frame = 24;
NewAudioResamplerResponse new_audio_resampler = 25;
RemixAndResampleResponse remix_and_resample = 26;
E2eeResponse e2ee = 27;
}
}
// To minimize complexity, participant events are not included in the protocol.
// It is easily deducible from the room events and it turned out that is is easier to implement
// on the ffi client side.
message FfiEvent {
oneof message {
RoomEvent room_event = 1;
TrackEvent track_event = 2;
VideoStreamEvent video_stream_event = 3;
AudioStreamEvent audio_stream_event = 4;
ConnectCallback connect = 5;
DisconnectCallback disconnect = 6;
DisposeCallback dispose = 7;
PublishTrackCallback publish_track = 8;
UnpublishTrackCallback unpublish_track = 9;
PublishDataCallback publish_data = 10;
CaptureAudioFrameCallback capture_audio_frame = 11;
UpdateLocalMetadataCallback update_local_metadata = 12;
UpdateLocalNameCallback update_local_name = 13;
GetStatsCallback get_stats = 14;
LogBatch logs = 15;
GetSessionStatsCallback get_session_stats = 16;
Panic panic = 17;
}
}
// Stop all rooms synchronously (Do we need async here?).
// e.g: This is used for the Unity Editor after each assemblies reload.
// TODO(theomonnom): Implement a debug mode where we can find all leaked handles?
message DisposeRequest {
bool async = 1;
}
message DisposeResponse {
optional uint64 async_id = 1; // None if sync
}
message DisposeCallback {
uint64 async_id = 1;
}
enum LogLevel {
LOG_ERROR = 0;
LOG_WARN = 1;
LOG_INFO = 2;
LOG_DEBUG = 3;
LOG_TRACE = 4;
}
message LogRecord {
LogLevel level = 1;
string target = 2; // e.g "livekit", "libwebrtc", "tokio-tungstenite", etc...
optional string module_path = 3;
optional string file = 4;
optional uint32 line = 5;
string message = 6;
}
message LogBatch {
repeated LogRecord records = 1;
}
message Panic {
string message = 1;
}
// TODO(theomonnom): Debug messages (Print handles).

View File

@@ -0,0 +1,31 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
// # Safety
// The foreign language is responsable for disposing handles
// Forgetting to dispose the handle may lead to memory leaks
//
// Dropping a handle doesn't necessarily mean that the object is destroyed if it is still used
// on the FfiServer (Atomic reference counting)
//
// When refering to a handle without owning it, we just use a uint32 without this message.
// (the variable name is suffixed with "_handle")
message FfiOwnedHandle {
uint64 id = 1;
}

View File

@@ -0,0 +1,32 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
import "handle.proto";
message ParticipantInfo {
string sid = 1;
string name = 2;
string identity = 3;
string metadata = 4;
}
message OwnedParticipant {
FfiOwnedHandle handle = 1;
ParticipantInfo info = 2;
}

385
src/protocols/room.proto Normal file
View File

@@ -0,0 +1,385 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
import "e2ee.proto";
import "handle.proto";
import "participant.proto";
import "track.proto";
import "video_frame.proto";
import "stats.proto";
// Connect to a new LiveKit room
message ConnectRequest {
string url = 1;
string token = 2;
RoomOptions options = 3;
}
message ConnectResponse {
uint64 async_id = 1;
}
message ConnectCallback {
message ParticipantWithTracks {
OwnedParticipant participant = 1;
// TrackInfo are not needed here, if we're subscribed to a track, the FfiServer will send
// a TrackSubscribed event
repeated OwnedTrackPublication publications = 2;
}
uint64 async_id = 1;
optional string error = 2;
OwnedRoom room = 3;
OwnedParticipant local_participant = 4;
repeated ParticipantWithTracks participants = 5;
}
// Disconnect from the a room
message DisconnectRequest { uint64 room_handle = 1; }
message DisconnectResponse { uint64 async_id = 1; }
message DisconnectCallback { uint64 async_id = 1; }
// Publish a track to the room
message PublishTrackRequest {
uint64 local_participant_handle = 1;
uint64 track_handle = 2;
TrackPublishOptions options = 3;
}
message PublishTrackResponse {
uint64 async_id = 1;
}
message PublishTrackCallback {
uint64 async_id = 1;
optional string error = 2;
OwnedTrackPublication publication = 3;
}
// Unpublish a track from the room
message UnpublishTrackRequest {
uint64 local_participant_handle = 1;
string track_sid = 2;
bool stop_on_unpublish = 3;
}
message UnpublishTrackResponse {
uint64 async_id = 1;
}
message UnpublishTrackCallback {
uint64 async_id = 1;
optional string error = 2;
}
// Publish data to other participants
message PublishDataRequest {
uint64 local_participant_handle = 1;
uint64 data_ptr = 2;
uint64 data_len = 3;
DataPacketKind kind = 4;
repeated string destination_sids = 5; // destination
optional string topic = 6;
}
message PublishDataResponse {
uint64 async_id = 1;
}
message PublishDataCallback {
uint64 async_id = 1;
optional string error = 2;
}
// Change the local participant's metadata
message UpdateLocalMetadataRequest {
uint64 local_participant_handle = 1;
string metadata = 2;
}
message UpdateLocalMetadataResponse {
uint64 async_id = 1;
}
message UpdateLocalMetadataCallback {
uint64 async_id = 1;
}
// Change the local participant's name
message UpdateLocalNameRequest {
uint64 local_participant_handle = 1;
string name = 2;
}
message UpdateLocalNameResponse {
uint64 async_id = 1;
}
message UpdateLocalNameCallback {
uint64 async_id = 1;
}
// Change the "desire" to subs2ribe to a track
message SetSubscribedRequest {
bool subscribe = 1;
uint64 publication_handle = 2;
}
message SetSubscribedResponse {}
message GetSessionStatsRequest {
uint64 room_handle = 1;
}
message GetSessionStatsResponse {
uint64 async_id = 1;
}
message GetSessionStatsCallback {
uint64 async_id = 1;
optional string error = 2;
repeated RtcStats publisher_stats = 3;
repeated RtcStats subscriber_stats = 4;
}
//
// Options
//
message VideoEncoding {
uint64 max_bitrate = 1;
double max_framerate = 2;
}
message AudioEncoding {
uint64 max_bitrate = 1;
}
message TrackPublishOptions {
// encodings are optional
VideoEncoding video_encoding = 1;
AudioEncoding audio_encoding = 2;
VideoCodec video_codec = 3;
bool dtx = 4;
bool red = 5;
bool simulcast = 6;
TrackSource source = 7;
}
enum IceTransportType {
TRANSPORT_RELAY = 0;
TRANSPORT_NOHOST = 1;
TRANSPORT_ALL = 2;
}
enum ContinualGatheringPolicy {
GATHER_ONCE = 0;
GATHER_CONTINUALLY = 1;
}
message IceServer {
repeated string urls = 1;
string username = 2;
string password = 3;
}
message RtcConfig {
optional IceTransportType ice_transport_type = 1;
optional ContinualGatheringPolicy continual_gathering_policy = 2;
repeated IceServer ice_servers = 3; // empty fallback to default
}
message RoomOptions {
bool auto_subscribe = 1;
bool adaptive_stream = 2;
bool dynacast = 3;
optional E2eeOptions e2ee = 4;
optional RtcConfig rtc_config = 5; // allow to setup a custom RtcConfiguration
uint32 join_retries = 6;
}
//
// Room
//
enum ConnectionQuality {
QUALITY_POOR = 0;
QUALITY_GOOD = 1;
QUALITY_EXCELLENT = 2;
QUALITY_LOST = 3;
}
enum ConnectionState {
CONN_DISCONNECTED = 0;
CONN_CONNECTED = 1;
CONN_RECONNECTING = 2;
}
enum DataPacketKind {
KIND_LOSSY = 0;
KIND_RELIABLE = 1;
}
message BufferInfo {
uint64 data_ptr = 1;
uint64 data_len = 2;
}
message OwnedBuffer {
FfiOwnedHandle handle = 1;
BufferInfo data = 2;
}
message RoomEvent {
uint64 room_handle = 1;
oneof message {
ParticipantConnected participant_connected = 2;
ParticipantDisconnected participant_disconnected = 3;
LocalTrackPublished local_track_published = 4;
LocalTrackUnpublished local_track_unpublished = 5;
TrackPublished track_published = 6;
TrackUnpublished track_unpublished = 7;
TrackSubscribed track_subscribed = 8;
TrackUnsubscribed track_unsubscribed = 9;
TrackSubscriptionFailed track_subscription_failed = 10;
TrackMuted track_muted = 11;
TrackUnmuted track_unmuted = 12;
ActiveSpeakersChanged active_speakers_changed = 13;
RoomMetadataChanged room_metadata_changed = 14;
ParticipantMetadataChanged participant_metadata_changed = 15;
ParticipantNameChanged participant_name_changed = 16;
ConnectionQualityChanged connection_quality_changed = 17;
ConnectionStateChanged connection_state_changed = 19;
// Connected connected = 20;
Disconnected disconnected = 21;
Reconnecting reconnecting = 22;
Reconnected reconnected = 23;
E2eeStateChanged e2ee_state_changed = 24;
RoomEOS eos = 25; // The stream of room events has ended
DataPacketReceived data_packet_received = 26;
}
}
message RoomInfo {
string sid = 1;
string name = 2;
string metadata = 3;
}
message OwnedRoom {
FfiOwnedHandle handle = 1;
RoomInfo info = 2;
}
message ParticipantConnected { OwnedParticipant info = 1; }
message ParticipantDisconnected {
string participant_sid = 1;
}
message LocalTrackPublished {
// The TrackPublicationInfo comes from the PublishTrack response
// and the FfiClient musts wait for it before firing this event
string track_sid = 1;
}
message LocalTrackUnpublished {
string publication_sid = 1;
}
message TrackPublished {
string participant_sid = 1;
OwnedTrackPublication publication = 2;
}
message TrackUnpublished {
string participant_sid = 1;
string publication_sid = 2;
}
// Publication isn't needed for subscription events on the FFI
// The FFI will retrieve the publication using the Track sid
message TrackSubscribed {
string participant_sid = 1;
OwnedTrack track = 2;
}
message TrackUnsubscribed {
// The FFI language can dispose/remove the VideoSink here
string participant_sid = 1;
string track_sid = 2;
}
message TrackSubscriptionFailed {
string participant_sid = 1;
string track_sid = 2;
string error = 3;
}
message TrackMuted {
string participant_sid = 1;
string track_sid = 2;
}
message TrackUnmuted {
string participant_sid = 1;
string track_sid = 2;
}
message E2eeStateChanged {
string participant_sid = 1; // Using sid instead of identity for ffi communication
EncryptionState state = 2;
}
message ActiveSpeakersChanged { repeated string participant_sids = 1; }
message RoomMetadataChanged {
string metadata = 1;
}
message ParticipantMetadataChanged {
string participant_sid = 1;
string metadata = 2;
}
message ParticipantNameChanged {
string participant_sid = 1;
string name = 2;
}
message ConnectionQualityChanged {
string participant_sid = 1;
ConnectionQuality quality = 2;
}
message UserPacket {
OwnedBuffer data = 1;
optional string topic = 2;
}
message SipDTMF {
uint32 code = 1;
optional string digit = 2;
}
message DataPacketReceived {
DataPacketKind kind = 1;
string participant_identity = 2; // Can be empty if the data is sent a server SDK
optional string participant_sid = 3 [deprecated=true]; // Can be empty if the data is sent a server SDK
oneof value {
UserPacket user = 4;
SipDTMF sip_dtmf = 5;
}
}
message ConnectionStateChanged { ConnectionState state = 1; }
message Connected {}
message Disconnected {}
message Reconnecting {}
message Reconnected {}
message RoomEOS {}

449
src/protocols/stats.proto Normal file
View File

@@ -0,0 +1,449 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
enum DataChannelState {
DC_CONNECTING = 0;
DC_OPEN = 1;
DC_CLOSING = 2;
DC_CLOSED = 3;
}
enum QualityLimitationReason {
LIMITATION_NONE = 0;
LIMITATION_CPU = 1;
LIMITATION_BANDWIDTH = 2;
LIMITATION_OTHER = 3;
}
enum IceRole {
ICE_UNKNOWN = 0;
ICE_CONTROLLING = 1;
ICE_CONTROLLED = 2;
}
enum DtlsTransportState {
DTLS_TRANSPORT_NEW = 0;
DTLS_TRANSPORT_CONNECTING = 1;
DTLS_TRANSPORT_CONNECTED = 2;
DTLS_TRANSPORT_CLOSED = 3;
DTLS_TRANSPORT_FAILED = 4;
}
enum IceTransportState {
ICE_TRANSPORT_NEW = 0;
ICE_TRANSPORT_CHECKING = 1;
ICE_TRANSPORT_CONNECTED = 2;
ICE_TRANSPORT_COMPLETED = 3;
ICE_TRANSPORT_DISCONNECTED = 4;
ICE_TRANSPORT_FAILED = 5;
ICE_TRANSPORT_CLOSED = 6;
}
enum DtlsRole {
DTLS_CLIENT = 0;
DTLS_SERVER = 1;
DTLS_UNKNOWN = 2;
}
enum IceCandidatePairState {
PAIR_FROZEN = 0;
PAIR_WAITING = 1;
PAIR_IN_PROGRESS = 2;
PAIR_FAILED = 3;
PAIR_SUCCEEDED = 4;
}
enum IceCandidateType {
HOST = 0;
SRFLX = 1;
PRFLX = 2;
RELAY = 3;
}
enum IceServerTransportProtocol {
TRANSPORT_UDP = 0;
TRANSPORT_TCP = 1;
TRANSPORT_TLS = 2;
}
enum IceTcpCandidateType {
CANDIDATE_ACTIVE = 0;
CANDIDATE_PASSIVE = 1;
CANDIDATE_SO = 2;
}
message RtcStats {
message Codec {
RtcStatsData rtc = 1;
CodecStats codec = 2;
}
message InboundRtp {
RtcStatsData rtc = 1;
RtpStreamStats stream = 2;
ReceivedRtpStreamStats received = 3;
InboundRtpStreamStats inbound = 4;
}
message OutboundRtp {
RtcStatsData rtc = 1;
RtpStreamStats stream = 2;
SentRtpStreamStats sent = 3;
OutboundRtpStreamStats outbound = 4;
}
message RemoteInboundRtp {
RtcStatsData rtc = 1;
RtpStreamStats stream = 2;
ReceivedRtpStreamStats received = 3;
RemoteInboundRtpStreamStats remote_inbound = 4;
}
message RemoteOutboundRtp {
RtcStatsData rtc = 1;
RtpStreamStats stream = 2;
SentRtpStreamStats sent = 3;
RemoteOutboundRtpStreamStats remote_outbound = 4;
}
message MediaSource {
RtcStatsData rtc = 1;
MediaSourceStats source = 2;
AudioSourceStats audio = 3;
VideoSourceStats video = 4;
}
message MediaPlayout {
RtcStatsData rtc = 1;
AudioPlayoutStats audio_playout = 2;
}
message PeerConnection {
RtcStatsData rtc = 1;
PeerConnectionStats pc = 2;
}
message DataChannel {
RtcStatsData rtc = 1;
DataChannelStats dc = 2;
}
message Transport {
RtcStatsData rtc = 1;
TransportStats transport = 2;
}
message CandidatePair {
RtcStatsData rtc = 1;
CandidatePairStats candidate_pair = 2;
}
message LocalCandidate {
RtcStatsData rtc = 1;
IceCandidateStats candidate = 2;
}
message RemoteCandidate {
RtcStatsData rtc = 1;
IceCandidateStats candidate = 2;
}
message Certificate {
RtcStatsData rtc = 1;
CertificateStats certificate = 2;
}
message Track {
// Deprecated
}
oneof stats {
Codec codec = 3;
InboundRtp inbound_rtp = 4;
OutboundRtp outbound_rtp = 5;
RemoteInboundRtp remote_inbound_rtp = 6;
RemoteOutboundRtp remote_outbound_rtp = 7;
MediaSource media_source = 8;
MediaPlayout media_playout = 9;
PeerConnection peer_connection = 10;
DataChannel data_channel = 11;
Transport transport = 12;
CandidatePair candidate_pair = 13;
LocalCandidate local_candidate = 14;
RemoteCandidate remote_candidate = 15;
Certificate certificate = 16;
Track track = 17;
}
}
message RtcStatsData {
string id = 1;
int64 timestamp = 2;
}
message CodecStats {
uint32 payload_type = 1;
string transport_id = 2;
string mime_type = 3;
uint32 clock_rate = 4;
uint32 channels = 5;
string sdp_fmtp_line = 6;
}
message RtpStreamStats {
uint32 ssrc = 1;
string kind = 2;
string transport_id = 3;
string codec_id = 4;
}
message ReceivedRtpStreamStats {
uint64 packets_received = 1;
int64 packets_lost = 2;
double jitter = 3;
}
message InboundRtpStreamStats {
string track_identifier = 1;
string mid = 2;
string remote_id = 3;
uint32 frames_decoded = 4;
uint32 key_frames_decoded = 5;
uint32 frames_rendered = 6;
uint32 frames_dropped = 7;
uint32 frame_width = 8;
uint32 frame_height = 9;
double frames_per_second = 10;
uint64 qp_sum = 11;
double total_decode_time = 12;
double total_inter_frame_delay = 13;
double total_squared_inter_frame_delay = 14;
uint32 pause_count = 15;
double total_pause_duration = 16;
uint32 freeze_count = 17;
double total_freeze_duration = 18;
double last_packet_received_timestamp = 19;
uint64 header_bytes_received = 20;
uint64 packets_discarded = 21;
uint64 fec_bytes_received = 22;
uint64 fec_packets_received = 23;
uint64 fec_packets_discarded = 24;
uint64 bytes_received = 25;
uint32 nack_count = 26;
uint32 fir_count = 27;
uint32 pli_count = 28;
double total_processing_delay = 29;
double estimated_playout_timestamp = 30;
double jitter_buffer_delay = 31;
double jitter_buffer_target_delay = 32;
uint64 jitter_buffer_emitted_count = 33;
double jitter_buffer_minimum_delay = 34;
uint64 total_samples_received = 35;
uint64 concealed_samples = 36;
uint64 silent_concealed_samples = 37;
uint64 concealment_events = 38;
uint64 inserted_samples_for_deceleration = 39;
uint64 removed_samples_for_acceleration = 40;
double audio_level = 41;
double total_audio_energy = 42;
double total_samples_duration = 43;
uint64 frames_received = 44;
string decoder_implementation = 45;
string playout_id = 46;
bool power_efficient_decoder = 47;
uint64 frames_assembled_from_multiple_packets = 48;
double total_assembly_time = 49;
uint64 retransmitted_packets_received = 50;
uint64 retransmitted_bytes_received = 51;
uint32 rtx_ssrc = 52;
uint32 fec_ssrc = 53;
}
message SentRtpStreamStats {
uint64 packets_sent = 1;
uint64 bytes_sent = 2;
}
message OutboundRtpStreamStats {
string mid = 1;
string media_source_id = 2;
string remote_id = 3;
string rid = 4;
uint64 header_bytes_sent = 5;
uint64 retransmitted_packets_sent = 6;
uint64 retransmitted_bytes_sent = 7;
uint32 rtx_ssrc = 8;
double target_bitrate = 9;
uint64 total_encoded_bytes_target = 10;
uint32 frame_width = 11;
uint32 frame_height = 12;
double frames_per_second = 13;
uint32 frames_sent = 14;
uint32 huge_frames_sent = 15;
uint32 frames_encoded = 16;
uint32 key_frames_encoded = 17;
uint64 qp_sum = 18;
double total_encode_time = 19;
double total_packet_send_delay = 20;
QualityLimitationReason quality_limitation_reason = 21;
map<string, double> quality_limitation_durations = 22;
uint32 quality_limitation_resolution_changes = 23;
uint32 nack_count = 24;
uint32 fir_count = 25;
uint32 pli_count = 26;
string encoder_implementation = 27;
bool power_efficient_encoder = 28;
bool active = 29;
string scalibility_mode = 30;
}
message RemoteInboundRtpStreamStats {
string local_id = 1;
double round_trip_time = 2;
double total_round_trip_time = 3;
double fraction_lost = 4;
uint64 round_trip_time_measurements = 5;
}
message RemoteOutboundRtpStreamStats {
string local_id = 1;
double remote_timestamp = 2;
uint64 reports_sent = 3;
double round_trip_time = 4;
double total_round_trip_time = 5;
uint64 round_trip_time_measurements = 6;
}
message MediaSourceStats {
string track_identifier = 1;
string kind = 2;
}
message AudioSourceStats {
double audio_level = 1;
double total_audio_energy = 2;
double total_samples_duration = 3;
double echo_return_loss = 4;
double echo_return_loss_enhancement = 5;
double dropped_samples_duration = 6;
uint32 dropped_samples_events = 7;
double total_capture_delay = 8;
uint64 total_samples_captured = 9;
}
message VideoSourceStats {
uint32 width = 1;
uint32 height = 2;
uint32 frames = 3;
double frames_per_second = 4;
}
message AudioPlayoutStats {
string kind = 1;
double synthesized_samples_duration = 2;
uint32 synthesized_samples_events = 3;
double total_samples_duration = 4;
double total_playout_delay = 5;
uint64 total_samples_count = 6;
}
message PeerConnectionStats {
uint32 data_channels_opened = 1;
uint32 data_channels_closed = 2;
}
message DataChannelStats {
string label = 1;
string protocol = 2;
int32 data_channel_identifier = 3;
optional DataChannelState state = 4;
uint32 messages_sent = 5;
uint64 bytes_sent = 6;
uint32 messages_received = 7;
uint64 bytes_received = 8;
}
message TransportStats {
uint64 packets_sent = 1;
uint64 packets_received = 2;
uint64 bytes_sent = 3;
uint64 bytes_received = 4;
IceRole ice_role = 5;
string ice_local_username_fragment = 6;
optional DtlsTransportState dtls_state = 7;
optional IceTransportState ice_state = 8;
string selected_candidate_pair_id = 9;
string local_certificate_id = 10;
string remote_certificate_id = 11;
string tls_version = 12;
string dtls_cipher = 13;
DtlsRole dtls_role = 14;
string srtp_cipher = 15;
uint32 selected_candidate_pair_changes = 16;
}
message CandidatePairStats {
string transport_id = 1;
string local_candidate_id = 2;
string remote_candidate_id = 3;
optional IceCandidatePairState state = 4;
bool nominated = 5;
uint64 packets_sent = 6;
uint64 packets_received = 7;
uint64 bytes_sent = 8;
uint64 bytes_received = 9;
double last_packet_sent_timestamp = 10;
double last_packet_received_timestamp = 11;
double total_round_trip_time = 12;
double current_round_trip_time = 13;
double available_outgoing_bitrate = 14;
double available_incoming_bitrate = 15;
uint64 requests_received = 16;
uint64 requests_sent = 17;
uint64 responses_received = 18;
uint64 responses_sent = 19;
uint64 consent_requests_sent = 20;
uint32 packets_discarded_on_send = 21;
uint64 bytes_discarded_on_send = 22;
}
message IceCandidateStats {
string transport_id = 1;
string address = 2;
int32 port = 3;
string protocol = 4;
optional IceCandidateType candidate_type = 5;
int32 priority = 6;
string url = 7;
optional IceServerTransportProtocol relay_protocol = 8;
string foundation = 9;
string related_address = 10;
int32 related_port = 11;
string username_fragment = 12;
optional IceTcpCandidateType tcp_type = 13;
}
message CertificateStats {
string fingerprint = 1;
string fingerprint_algorithm = 2;
string base64_certificate = 3;
string issuer_certificate_id = 4;
}

111
src/protocols/track.proto Normal file
View File

@@ -0,0 +1,111 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
import "e2ee.proto";
import "handle.proto";
import "stats.proto";
// Create a new VideoTrack from a VideoSource
message CreateVideoTrackRequest {
string name = 1;
uint64 source_handle = 2;
}
message CreateVideoTrackResponse {
OwnedTrack track = 1;
}
// Create a new AudioTrack from a AudioSource
message CreateAudioTrackRequest {
string name = 1;
uint64 source_handle = 2;
}
message CreateAudioTrackResponse {
OwnedTrack track = 1;
}
message GetStatsRequest {
uint64 track_handle = 1;
}
message GetStatsResponse {
uint64 async_id = 1;
}
message GetStatsCallback {
uint64 async_id = 1;
optional string error = 2;
repeated RtcStats stats = 3;
}
//
// Track
//
message TrackEvent {}
enum TrackKind {
KIND_UNKNOWN = 0;
KIND_AUDIO = 1;
KIND_VIDEO = 2;
}
enum TrackSource {
SOURCE_UNKNOWN = 0;
SOURCE_CAMERA = 1;
SOURCE_MICROPHONE = 2;
SOURCE_SCREENSHARE = 3;
SOURCE_SCREENSHARE_AUDIO = 4;
}
enum StreamState {
STATE_UNKNOWN = 0;
STATE_ACTIVE = 1;
STATE_PAUSED = 2;
}
message TrackPublicationInfo {
string sid = 1;
string name = 2;
TrackKind kind = 3;
TrackSource source = 4;
bool simulcasted = 5;
uint32 width = 6;
uint32 height = 7;
string mime_type = 8;
bool muted = 9;
bool remote = 10;
EncryptionType encryption_type = 11;
}
message OwnedTrackPublication {
FfiOwnedHandle handle = 1;
TrackPublicationInfo info = 2;
}
message TrackInfo {
string sid = 1;
string name = 2;
TrackKind kind = 3;
StreamState stream_state = 4;
bool muted = 5;
bool remote = 6;
}
message OwnedTrack {
FfiOwnedHandle handle = 1;
TrackInfo info = 2;
}

View File

@@ -0,0 +1,175 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
import "handle.proto";
// Create a new VideoStream
// VideoStream is used to receive video frames from a track
message NewVideoStreamRequest {
uint64 track_handle = 1;
VideoStreamType type = 2;
// Get the frame on a specific format
optional VideoBufferType format = 3;
bool normalize_stride = 4; // if true, stride will be set to width/chroma_width
}
message NewVideoStreamResponse { OwnedVideoStream stream = 1; }
// Create a new VideoSource
// VideoSource is used to send video frame to a track
message NewVideoSourceRequest {
VideoSourceType type = 1;
// Used to determine which encodings to use + simulcast layers
// Most of the time it corresponds to the source resolution
VideoSourceResolution resolution = 2;
}
message NewVideoSourceResponse { OwnedVideoSource source = 1; }
// Push a frame to a VideoSource
message CaptureVideoFrameRequest {
uint64 source_handle = 1;
VideoBufferInfo buffer = 2;
int64 timestamp_us = 3; // In microseconds
VideoRotation rotation = 4;
}
message CaptureVideoFrameResponse {}
message VideoConvertRequest {
bool flip_y = 1;
VideoBufferInfo buffer = 2;
VideoBufferType dst_type = 3;
}
message VideoConvertResponse {
optional string error = 1;
OwnedVideoBuffer buffer = 2;
}
//
// VideoFrame buffers
//
message VideoResolution {
uint32 width = 1;
uint32 height = 2;
double frame_rate = 3;
}
enum VideoCodec {
VP8 = 0;
H264 = 1;
AV1 = 2;
VP9 = 3;
}
enum VideoRotation {
VIDEO_ROTATION_0 = 0;
VIDEO_ROTATION_90 = 1;
VIDEO_ROTATION_180 = 2;
VIDEO_ROTATION_270 = 3;
}
enum VideoBufferType {
RGBA = 0;
ABGR = 1;
ARGB = 2;
BGRA = 3;
RGB24 = 4;
I420 = 5;
I420A = 6;
I422 = 7;
I444 = 8;
I010 = 9;
NV12 = 10;
}
message VideoBufferInfo {
message ComponentInfo {
uint64 data_ptr = 1;
uint32 stride = 2;
uint32 size = 3;
}
VideoBufferType type = 1;
uint32 width = 2;
uint32 height = 3;
uint64 data_ptr = 4;
uint32 stride = 6; // only for packed formats
repeated ComponentInfo components = 7;
}
message OwnedVideoBuffer {
FfiOwnedHandle handle = 1;
VideoBufferInfo info = 2;
}
//
// VideoStream
//
enum VideoStreamType {
VIDEO_STREAM_NATIVE = 0;
VIDEO_STREAM_WEBGL = 1;
VIDEO_STREAM_HTML = 2;
}
message VideoStreamInfo {
VideoStreamType type = 1;
}
message OwnedVideoStream {
FfiOwnedHandle handle = 1;
VideoStreamInfo info = 2;
}
message VideoStreamEvent {
uint64 stream_handle = 1;
oneof message {
VideoFrameReceived frame_received = 2;
VideoStreamEOS eos = 3;
}
}
message VideoFrameReceived {
OwnedVideoBuffer buffer = 1;
int64 timestamp_us = 2; // In microseconds
VideoRotation rotation = 3;
}
message VideoStreamEOS {}
//
// VideoSource
//
message VideoSourceResolution {
uint32 width = 1;
uint32 height = 2;
}
enum VideoSourceType {
VIDEO_SOURCE_NATIVE = 0;
}
message VideoSourceInfo {
VideoSourceType type = 1;
}
message OwnedVideoSource {
FfiOwnedHandle handle = 1;
VideoSourceInfo info = 2;
}

View File

@@ -101,7 +101,11 @@ KirigamiComponents.ConvergentContextMenu {
}
enabled: Controller.csSupported
}
// QQC2.MenuItem {
// text: i18n("Show livekit logs")
// icon.name: "dialog-xml-editor"
// onTriggered: livekitLogViewerComponent.createObject(applicationWindow().overlay)
// }
QQC2.Action {
text: i18n("Logout")
icon.name: "im-kick-user"
@@ -111,4 +115,9 @@ KirigamiComponents.ConvergentContextMenu {
readonly property Component confirmLogoutDialogComponent: ConfirmLogoutDialog {
connection: root.connection
}
Component {
id: livekitLogViewerComponent
LivekitLogViewer {}
}
}

86
src/qml/CallPage.qml Normal file
View File

@@ -0,0 +1,86 @@
import QtQuick
import QtQuick.Controls
import QtQuick.Layouts
import QtMultimedia
import org.kde.kirigami as Kirigami
import org.kde.neochat
Kirigami.Page {
id: callPage
title: i18nc("@title", "Call")
VideoOutput {
id: video
anchors.fill: parent
visible: false
Component.onCompleted: CallController.setVideoSink(video.videoSink)
}
VideoOutput {
id: viewFinder
anchors.centerIn: parent
ToolBar {
id: toolbar
anchors.horizontalCenter: parent.horizontalCenter
anchors.bottom: parent.bottom
anchors.bottomMargin: Kirigami.Units.gridUnit * 8
z: 1000
background: Kirigami.ShadowedRectangle {
color: Kirigami.Theme.backgroundColor
radius: 5
shadow {
size: 15
yOffset: 3
color: Qt.rgba(0, 0, 0, 0.2)
}
border {
color: Kirigami.ColorUtils.tintWithAlpha(Kirigami.Theme.backgroundColor, Kirigami.Theme.textColor, 0.2)
width: 1
}
Kirigami.Theme.inherit: false
Kirigami.Theme.colorSet: Kirigami.Theme.Window
}
RowLayout {
ToolButton {
id: cameraButton
icon.name: "camera-on-symbolic"
text: i18nc("@action:button", "Enable Camera")
display: AbstractButton.IconOnly
checkable: true
onClicked: CallController.toggleCamera()
ToolTip.text: text
ToolTip.visible: hovered
ToolTip.delay: Kirigami.Units.toolTipDelay
}
}
}
}
LivekitVideoSink {
videoSink: viewFinder.videoSink
}
//Component.onCompleted: camera.start()
CaptureSession {
camera: Camera {
id: camera
}
imageCapture: ImageCapture {
id: imageCapture
}
videoOutput: viewFinder
}
}

View File

@@ -0,0 +1,45 @@
// SPDX-FileCopyrightText: 2024 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
import QtQuick
import QtQuick.Controls as QQC2
import QtQuick.Layouts
import org.kde.kirigami as Kirigami
import org.kde.kirigamiaddons.components as Components
import org.kde.neochat
Kirigami.Dialog {
id: root
title: i18nc("@title", "Incoming call")
width: Kirigami.Units.gridUnit * 16
height: Kirigami.Units.gridUnit * 8
standardButtons: QQC2.Dialog.NoButton
Connections {
target: MediaManager
function onCloseIncomingCallDialog() {
root.close()
}
}
contentItem: ColumnLayout {
Components.DoubleFloatingButton {
anchors.centerIn: parent
leadingAction: Kirigami.Action {
icon.name: "call-start"
text: i18nc("@action:button", "Accept Call")
tooltip: ""//text
}
trailingAction: Kirigami.Action {
icon.name: "call-stop"
text: i18nc("@action:button", "Decline Call")
tooltip: ""//text
}
}
}
}

View File

@@ -0,0 +1,36 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
import QtQuick
import QtQuick.Controls as QQC2
import org.kde.kirigami as Kirigami
import org.kde.neochat
Kirigami.ApplicationWindow {
id: root
title: i18nc("@title", "Livekit logs")
pageStack.initialPage: Kirigami.ScrollablePage {
title: i18nc("@title", "Livekit logs")
TableView {
id: messageList
width: root.width
model: LivekitLogModel
alternatingRows: true
delegate: QQC2.ItemDelegate {
id: messageDelegate
required property string message
width: parent.width
contentItem: QQC2.Label {
text: messageDelegate.message
wrapMode: QQC2.Label.Wrap
}
}
}
}
}

View File

@@ -4,6 +4,7 @@
import QtQuick
import QtQuick.Controls as QQC2
import QtMultimedia
import org.kde.kirigami as Kirigami
import org.kde.config as KConfig
@@ -30,6 +31,18 @@ Kirigami.ApplicationWindow {
}
}
Connections {
target: CallController
function onCallStarted() {
root.pageStack.pushDialogLayer(callPageComponent)
}
}
Component {
id: callPageComponent
CallPage {}
}
minimumWidth: Kirigami.Units.gridUnit * 20
minimumHeight: Kirigami.Units.gridUnit * 15
@@ -197,6 +210,7 @@ Kirigami.ApplicationWindow {
visible = true;
}
}
Connections {
target: NeoChatConfig
function onBlurChanged() {
@@ -347,4 +361,15 @@ Kirigami.ApplicationWindow {
initialized = true;
}
Connections {
target: MediaManager
function onShowIncomingCallDialog(): void {
incomingCallDialog.createObject(applicationWindow().overlay).open();
}
}
Component {
id: incomingCallDialog
IncomingCallDialog {}
}
}