Compare commits

...

5 Commits

Author SHA1 Message Date
Tobias Fella
88e0c38e5f Fixes 2025-03-13 21:58:07 +01:00
Joshua Goins
20a6b90904 Add command to forcibly end a call in the room 2025-03-13 21:32:35 +01:00
Joshua Goins
fcd7a320e7 Fix publishing video track 2025-03-13 21:32:35 +01:00
Joshua Goins
2f39e70b67 Update to latest LiveKit, fix up QML 2025-03-13 21:32:35 +01:00
Tobias Fella
ddc16a17d2 Calls! 2025-03-13 21:32:34 +01:00
40 changed files with 3399 additions and 7 deletions

View File

@@ -56,7 +56,7 @@ ecm_setup_version(${PROJECT_VERSION}
VERSION_HEADER ${CMAKE_CURRENT_BINARY_DIR}/neochat-version.h
)
find_package(Qt6 ${QT_MIN_VERSION} NO_MODULE COMPONENTS Core Quick Gui QuickControls2 Multimedia Svg WebView)
find_package(Qt6 ${QT_MIN_VERSION} NO_MODULE COMPONENTS Core Quick Gui QuickControls2 Multimedia Svg Protobuf WebView)
set_package_properties(Qt6 PROPERTIES
TYPE REQUIRED
PURPOSE "Basic application components"
@@ -115,6 +115,8 @@ set_package_properties(QuotientQt6 PROPERTIES
PURPOSE "Talk with matrix server"
)
find_package(LiveKit REQUIRED)
find_package(cmark)
set_package_properties(cmark PROPERTIES
TYPE REQUIRED

View File

@@ -51,6 +51,10 @@ is primarily aimed at Linux development.
For Windows and Android [Craft](https://invent.kde.org/packaging/craft) is the primary choice. There are guides for setting up
development environments for [Windows](https://community.kde.org/Get_Involved/development/Windows) and [Android](https://develop.kde.org/docs/packaging/android/building_applications/).
### Building with support for voice / video calls
[LiveKit](https://livekit.io) is needed for call support. Build the [Rust SDK](https://github.com/livekit/rust-sdks) and copy `liblivekit_ffi.so` to your usual library folder. Copy `livekit_ffi.h` to somewhere under your usual include folder. NeoChat should then automatically pick it up.
## Running
Just start the executable in your preferred way - either from the build directory or from the installed location.

10
cmake/FindLiveKit.cmake Normal file
View File

@@ -0,0 +1,10 @@
# SPDX-FileCopyrightText: 2024 Tobias Fella <tobias.fella@kde.org>
# SPDX-License-Identifier: BSD-2-Clause
find_library(LIVEKIT_LIB NAMES livekit_ffi)
find_path(LIVEKIT_INCLUDE_DIR NAMES livekit_ffi.h)
add_library(LiveKit UNKNOWN IMPORTED)
set_target_properties(LiveKit PROPERTIES IMPORTED_LOCATION ${LIVEKIT_LIB})
set_target_properties(LiveKit PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${LIVEKIT_INCLUDE_DIR})
set(LiveKit_FOUND True)

View File

@@ -196,6 +196,15 @@ add_library(neochat STATIC
models/pinnedmessagemodel.h
models/commonroomsmodel.cpp
models/commonroomsmodel.h
events/callencryptionkeysevent.h
events/callmemberevent.h
events/callnotifyevent.h
calls/callcontroller.cpp
calls/callcontroller.h
livekitlogmodel.cpp
livekitlogmodel.h
events/callmemberevent.cpp
events/callmemberevent.h
)
set_source_files_properties(qml/OsmLocationPlugin.qml PROPERTIES
@@ -296,6 +305,9 @@ ecm_add_qml_module(neochat URI org.kde.neochat GENERATE_PLUGIN_SOURCE
qml/HoverLinkIndicator.qml
qml/AvatarNotification.qml
qml/ReasonDialog.qml
qml/LivekitLogViewer.qml
qml/CallPage.qml
qml/IncomingCallDialog.qml
SOURCES
messageattached.cpp
messageattached.h
@@ -407,7 +419,22 @@ else()
target_compile_definitions(neochat PUBLIC -DHAVE_X11=0)
endif()
target_include_directories(neochat PRIVATE ${CMAKE_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/models ${CMAKE_CURRENT_SOURCE_DIR}/enums)
qt_add_protobuf(neochat
GENERATE_PACKAGE_SUBFOLDERS
PROTO_FILES
protocols/ffi.proto
protocols/room.proto
protocols/e2ee.proto
protocols/audio_frame.proto
protocols/video_frame.proto
protocols/handle.proto
protocols/participant.proto
protocols/stats.proto
protocols/track.proto
protocols/rpc.proto
)
target_include_directories(neochat PRIVATE ${CMAKE_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/models ${CMAKE_CURRENT_SOURCE_DIR}/enums ${CMAKE_CURRENT_SOURCE_DIR}/calls)
target_link_libraries(neochat PRIVATE settingsplugin timelineplugin devtoolsplugin loginplugin chatbarplugin)
target_link_libraries(neochat PUBLIC
Qt::Core
@@ -417,6 +444,7 @@ target_link_libraries(neochat PUBLIC
Qt::Multimedia
Qt::Network
Qt::QuickControls2
Qt::Protobuf
KF6::I18n
KF6::Kirigami
KF6::Notifications
@@ -431,6 +459,7 @@ target_link_libraries(neochat PUBLIC
cmark::cmark
QCoro::Core
QCoro::Network
LiveKit
)
if (TARGET KF6::Crash)

0
src/calls/call.cpp Normal file
View File

74
src/calls/call.h Normal file
View File

@@ -0,0 +1,74 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
#include <QAbstractListModel>
#include <QObject>
#include <QQmlEngine>
#include "neochatroom.h"
class Participant : public QObject
{
Q_OBJECT
void setVolume(float volume);
void muteLocally();
void unmuteLocally();
void ring(); // See MSC4075
// TODO: if these are possible; check livekit api
void muteGlobally();
void forceDisableCamera();
void forceDisableScreenShare();
void setPermissions();
void kick();
void ban();
Q_SIGNALS:
void muted();
void unmuted();
void cameraEnabled();
void cameraDisabled();
void screenShareEnabled();
void screenShareDisabled();
};
class Call : public QObject
{
Q_OBJECT
QML_ELEMENT
QML_UNCREATABLE("")
Q_PROPERTY(bool cameraEnabled READ cameraEnabled WRITE setCameraEnabled NOTIFY cameraEnabledChanged)
Q_PROPERTY(bool microphoneMuted READ microphoneMuted WRITE setMicrophoneMuted NOTIFY microphoneMutedChanged)
Q_PROPERTY(bool screenshareEnabled READ screenshareEnabled NOTIFY screenshareEnabledChanged)
Q_PROPERTY(NeoChatRoom *room READ room CONSTANT)
public:
explicit Call(NeoChatRoom *room, QObject *parent = nullptr);
Q_SIGNALS:
void participantJoined(const Participant &participant);
void participantLeft(const Participant &participant);
private:
QList<Participant *> m_participants;
};
class CallParticipantsModel : public QAbstractListModel
{
Q_OBJECT
public:
enum Roles {
NameRoleRole,
HasCameraRole,
HasScreenShareRole,
IsMutedRole,
};
Q_ENUM(Roles)
};

View File

@@ -0,0 +1,447 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
#include "callcontroller.h"
#include <QAudioSink>
#include <QMediaDevices>
#include <QNetworkAccessManager>
#include <QNetworkReply>
#include <QProtobufSerializer>
#include <QVideoFrame>
#include <QVideoFrameFormat>
#include <QVideoSink>
#include <livekit_ffi.h>
#include <Quotient/csapi/openid.h>
#include "audio_frame.qpb.h"
#include "ffi.qpb.h"
#include "livekitlogmodel.h"
#include "neochatroom.h"
#include "track.qpb.h"
#include "video_frame.qpb.h"
using namespace livekit::proto;
using namespace Quotient;
extern "C" {
void livekit_ffi_initialize(void(ffiCallbackFn(const uint8_t *, size_t)), bool capture_logs, const char *, const char *);
}
void callback(const uint8_t *data, size_t length)
{
auto byteArrayData = QByteArray::fromRawData((const char *)data, length);
QProtobufSerializer serializer;
FfiEvent event;
event.deserialize(&serializer, byteArrayData);
CallController::instance().handleEvent(std::move(event));
}
CallController::CallController()
: QObject()
{
init();
}
void CallController::init()
{
// qRegisterProtobufTypes();
livekit_ffi_initialize(callback, true, "test", "1.0");
}
static void handleLog(const LogRecordRepeated &&logs)
{
for (const auto &log : logs) {
if (log.level() <= LogLevelGadget::LogLevel::LOG_WARN) {
qWarning() << log.message();
}
}
LivekitLogModel::instance().addMessages(logs);
}
void CallController::handleConnect(ConnectCallback &&callback)
{
qWarning() << "Connecting to" << callback.result().room().info().name() << "with id" << callback.asyncId();
if (!m_connectingRooms.contains(callback.asyncId()) || !m_connectingRooms[callback.asyncId()]
|| m_connectingRooms[callback.asyncId()]->id() != callback.result().room().info().name()) {
qWarning() << "Connecting to unexpected room";
return;
}
m_connectingRooms.remove(callback.asyncId());
m_rooms[callback.asyncId()] = callback.result().room();
localParticipant = callback.result().localParticipant().handle().id_proto();
Q_EMIT connected();
}
void CallController::handleDispose(DisposeCallback &&callback)
{
qWarning() << "Disposing" << callback.asyncId();
if (m_rooms.contains(callback.asyncId())) {
qWarning() << " room" << m_rooms[callback.asyncId()].info().name();
m_rooms.erase(callback.asyncId());
} else {
qWarning() << " unknown object";
}
}
void CallController::handleRoomEvent(livekit::proto::RoomEvent &&event)
{
if (event.hasParticipantConnected()) {
qWarning() << "Participant connected" << event.participantConnected().info().info().identity();
} else if (event.hasParticipantDisconnected()) {
qWarning() << "Participant connected" << event.participantDisconnected().participantIdentity();
} else if (event.hasLocalTrackPublished()) {
qWarning() << "Local track published";
m_localVideoTrackSid = event.localTrackPublished().trackSid();
} else if (event.hasLocalTrackUnpublished()) {
qWarning() << "Local track unpublished";
} else if (event.hasTrackPublished()) {
qWarning() << "Track published";
} else if (event.hasTrackUnpublished()) {
qWarning() << "Track unpublished";
} else if (event.hasTrackSubscribed()) {
qWarning() << "Track subscribed";
auto track = event.trackSubscribed().track();
if (track.info().kind() == TrackKindGadget::TrackKind::KIND_AUDIO) {
NewAudioStreamRequest audioStreamRequest;
audioStreamRequest.setTrackHandle(track.handle().id_proto());
FfiRequest request;
request.setNewAudioStream(audioStreamRequest);
QProtobufSerializer serializer;
auto data = request.serialize(&serializer);
const uint8_t *ret_data;
size_t size;
livekit_ffi_request((const uint8_t *)data.data(), data.length(), &ret_data, &size);
FfiResponse newResponse;
newResponse.deserialize(&serializer, QByteArray::fromRawData((const char *)ret_data, size));
} else if (track.info().kind() == TrackKindGadget::TrackKind::KIND_VIDEO) {
NewVideoStreamRequest videoStreamRequest;
videoStreamRequest.setTrackHandle((track.handle().id_proto()));
FfiRequest request;
request.setNewVideoStream(videoStreamRequest);
QProtobufSerializer serializer;
auto data = request.serialize(&serializer);
const uint8_t *ret_data;
size_t size;
livekit_ffi_request((const uint8_t *)data.data(), data.length(), &ret_data, &size);
FfiResponse newResponse;
newResponse.deserialize(&serializer, QByteArray::fromRawData((const char *)ret_data, size));
}
} else if (event.hasTrackUnsubscribed()) {
qWarning() << "Track unsubscribed";
} else if (event.hasTrackSubscriptionFailed()) {
qWarning() << "Track subscription failed";
} else if (event.hasTrackMuted()) {
qWarning() << "Track muted";
} else if (event.hasTrackUnmuted()) {
qWarning() << "Track unmuted";
} else if (event.hasActiveSpeakersChanged()) {
qWarning() << "Active speakers changed";
} else if (event.hasRoomMetadataChanged()) {
qWarning() << "room metadata changed";
} else if (event.hasParticipantMetadataChanged()) {
qWarning() << "participant metadata changed";
} else if (event.hasParticipantNameChanged()) {
qWarning() << "participant name changed";
} else if (event.hasConnectionQualityChanged()) {
qWarning() << "connection quality changed to" << event.connectionQualityChanged().quality();
} else if (event.hasDataPacketReceived()) {
qWarning() << "data received";
} else if (event.hasConnectionStateChanged()) {
qWarning() << "connection state changed";
} else if (event.hasDisconnected()) {
qWarning() << "disconnected";
} else if (event.hasReconnecting()) {
qWarning() << "reconnecting";
} else if (event.hasReconnected()) {
qWarning() << "Reconnected";
} else if (event.hasE2eeStateChanged()) {
qWarning() << "e2eeStateChanged";
} else if (event.hasEos()) {
qWarning() << "eos";
} else {
qWarning() << "Unknown room event";
}
}
void saveByteArray(const QByteArray &data, const QString &name)
{
QFile file(u"/home/tobias/"_s + name);
file.open(QFile::WriteOnly);
file.write(data);
file.close();
}
void CallController::handleEvent(FfiEvent &&event)
{
if (event.hasLogs()) {
handleLog(std::move(event.logs().records()));
} else if (event.hasRoomEvent()) {
handleRoomEvent(std::move(event.roomEvent()));
} else if (event.hasTrackEvent()) {
qWarning() << "track event";
} else if (event.hasVideoStreamEvent()) {
qWarning() << "video stream event";
auto video = event.videoStreamEvent();
auto info = video.frameReceived().buffer().info();
QByteArray data((const char *)info.dataPtr(), info.width() * info.height() * 1.5);
auto frame = QVideoFrame(QVideoFrameFormat(QSize(info.width(), info.height()), QVideoFrameFormat::Format_YUV420P));
frame.map(QVideoFrame::WriteOnly);
memcpy(frame.bits(0), data.constData(), data.size() / 3 * 2);
memcpy(frame.bits(1), data.constData() + data.size() / 3 * 2, data.size() / 6);
memcpy(frame.bits(2), data.constData() + data.size() / 3 * 2 + data.size() / 6, data.size() / 6);
qWarning() << frame.size() << data.toBase64();
frame.unmap();
m_sink->setVideoFrame(frame);
delete (char *)info.dataPtr();
} else if (event.hasAudioStreamEvent()) {
return; //TODO remove
static bool initialized = false;
if (!initialized) {
initialized = true;
QAudioFormat format;
format.setSampleRate(48000);
format.setChannelCount(2);
format.setSampleFormat(QAudioFormat::Int16);
QAudioDevice info(QMediaDevices::defaultAudioOutput());
if (!info.isFormatSupported(format)) {
qWarning() << "Audio format not supported";
Q_ASSERT(false);
return;
}
sink = new QAudioSink(format);
audioData = sink->start();
QProtobufSerializer serializer;
NewAudioResamplerRequest narr;
FfiRequest request;
request.setNewAudioResampler(narr);
auto data = request.serialize(&serializer);
const uint8_t *ret_data;
size_t size;
livekit_ffi_request((const uint8_t *)data.data(), data.length(), &ret_data, &size);
FfiResponse newResponse;
newResponse.deserialize(&serializer, QByteArray::fromRawData((const char *)ret_data, size));
resampler = newResponse.newAudioResampler().resampler().handle().id_proto();
}
if (event.audioStreamEvent().hasFrameReceived()) {
FfiRequest request;
RemixAndResampleRequest rarr;
rarr.setBuffer(event.audioStreamEvent().frameReceived().frame().info());
rarr.setNumChannels(2);
rarr.setSampleRate(48000);
rarr.setResamplerHandle(resampler);
request = FfiRequest();
request.setRemixAndResample(rarr);
static QProtobufSerializer serializer;
auto data = request.serialize(&serializer);
const uint8_t *ret_data;
size_t size;
livekit_ffi_request((const uint8_t *)data.data(), data.length(), &ret_data, &size);
FfiResponse response;
response.deserialize(&serializer, QByteArray::fromRawData((const char *)ret_data, size));
Q_ASSERT(response.hasRemixAndResample());
auto info = response.remixAndResample().buffer().info();
auto bytes = info.numChannels() * info.samplesPerChannel() * 2;
data = QByteArray::fromRawData((const char *)info.dataPtr(), bytes);
audioData->write(data);
}
} else if (event.hasConnect()) {
handleConnect(std::move(event.connect()));
} else if (event.hasDisconnect()) {
qWarning() << "disconnect";
} else if (event.hasDispose()) {
handleDispose(std::move(event.dispose()));
} else if (event.hasPublishTrack()) {
qWarning() << "publish track";
} else if (event.hasUnpublishTrack()) {
qWarning() << "unpublish track";
} else if (event.hasPublishData()) {
qWarning() << "publish data";
} else if (event.hasCaptureAudioFrame()) {
qWarning() << "audio frame";
} else if (event.hasGetStats()) {
qWarning() << "get stats";
} else if (event.hasGetSessionStats()) {
qWarning() << "get session stats";
} else if (event.hasPanic()) {
qWarning() << "panic";
} else {
qWarning() << event.messageField();
}
}
void CallController::handleCallMemberEvent(const Quotient::CallMemberEvent *event, NeoChatRoom *room)
{
qWarning() << event->fullJson();
Q_EMIT callStarted();
const auto connection = room->connection();
auto job = connection->callApi<RequestOpenIdTokenJob>(connection->userId());
connect(job, &BaseJob::finished, this, [this, room, job, connection, event]() {
auto nam = new QNetworkAccessManager;
auto json = QJsonDocument(QJsonObject{
{"room"_L1, room->id()},
{"openid_token"_L1,
QJsonObject{{"access_token"_L1, job->tokenData().accessToken},
{"token_type"_L1, job->tokenData().tokenType},
{"matrix_server_name"_L1, job->tokenData().matrixServerName}}},
{"device_id"_L1, connection->deviceId()},
})
.toJson();
// This is an old event!
if (!event->contentJson().contains("foci_preferred"_L1)) {
return;
}
QNetworkRequest request(QUrl((event->contentJson()["foci_preferred"_L1].toArray()[0]["livekit_service_url"_L1].toString() + "/sfu/get"_L1)));
request.setHeader(QNetworkRequest::ContentTypeHeader, "application/json"_L1);
auto reply = nam->post(request, json);
connect(reply, &QNetworkReply::finished, this, [reply, this, room]() {
auto json = QJsonDocument::fromJson(reply->readAll()).object();
FfiRequest message;
ConnectRequest connectRequest;
connectRequest.setUrl(json["url"_L1].toString());
connectRequest.setToken(json["jwt"_L1].toString());
message.setConnect(connectRequest);
QProtobufSerializer serializer;
auto data = message.serialize(&serializer);
size_t size;
const uint8_t *ret_data;
livekit_ffi_request((const uint8_t *)data.data(), data.length(), &ret_data, &size);
FfiResponse connectResponse;
connectResponse.deserialize(&serializer, QByteArray::fromRawData((const char *)ret_data, size));
if (!connectResponse.hasConnect()) {
qWarning() << "connectResponse has unexpected content" << connectResponse.messageField();
return;
}
m_connectingRooms[connectResponse.connect().asyncId()] = room;
});
});
}
FfiResponse request(FfiRequest &&request)
{
static QProtobufSerializer serializer;
auto data = request.serialize(&serializer);
size_t responseLength;
const char *responseData;
livekit_ffi_request((const uint8_t *)data.constData(), data.size(), (const uint8_t **)&responseData, &responseLength);
auto response = QByteArray::fromRawData(responseData, responseLength);
FfiResponse ffiResponse;
ffiResponse.deserialize(&serializer, response);
return ffiResponse;
}
void CallController::setCameraVideoSink(QVideoSink *videoSink)
{
m_cameraVideoSink = videoSink;
connect(videoSink, &QVideoSink::videoFrameChanged, this, [videoSink, this]() {
static bool initialized = false;
if (localParticipant == 100000) {
return; // TODO make less shitty
}
static QtProtobuf::uint64 handle;
if (!initialized) {
initialized = true;
NewVideoSourceRequest newVideoSourceRequest;
VideoSourceResolution resolution;
resolution.setHeight(videoSink->videoSize().height());
resolution.setWidth(videoSink->videoSize().width());
newVideoSourceRequest.setResolution(resolution);
newVideoSourceRequest.setType(VideoSourceTypeGadget::VideoSourceType::VIDEO_SOURCE_NATIVE);
FfiRequest ffiRequest;
ffiRequest.setNewVideoSource(newVideoSourceRequest);
auto response = request(std::move(ffiRequest));
handle = response.newVideoSource().source().handle().id_proto();
m_localVideoTrackHandle = handle;
CreateVideoTrackRequest createVideoTrackRequest;
createVideoTrackRequest.setName("Camera"_L1);
createVideoTrackRequest.setSourceHandle(handle);
FfiRequest request;
request.setCreateVideoTrack(createVideoTrackRequest);
auto createResponse = ::request(std::move(request));
m_localVideoTrackId = createResponse.createVideoTrack().track().handle().id_proto();
publishTrack(m_localVideoTrackId);
}
auto image = videoSink->videoFrame().toImage();
image.convertTo(QImage::Format_RGB888);
CaptureVideoFrameRequest request;
VideoBufferInfo buffer;
buffer.setType(VideoBufferTypeGadget::VideoBufferType::RGB24);
buffer.setWidth(image.width());
buffer.setHeight(image.height());
buffer.setDataPtr((QtProtobuf::uint64)image.bits());
buffer.setStride(image.bytesPerLine());
QList<VideoBufferInfo_QtProtobufNested::ComponentInfo> components;
VideoBufferInfo_QtProtobufNested::ComponentInfo componentInfo;
componentInfo.setStride(image.bytesPerLine());
componentInfo.setDataPtr((QtProtobuf::uint64)image.bits());
componentInfo.setSize(image.sizeInBytes());
components += componentInfo;
buffer.setComponents(components);
request.setBuffer(buffer);
request.setSourceHandle(handle);
request.setTimestampUs(QDateTime::currentMSecsSinceEpoch() * 1000);
request.setRotation(VideoRotationGadget::VideoRotation::VIDEO_ROTATION_0);
FfiRequest ffiRequest;
ffiRequest.setCaptureVideoFrame(request);
auto response = ::request(std::move(ffiRequest));
});
}
void CallController::setVideoSink(QObject *sink)
{
m_sink = dynamic_cast<QVideoSink *>(sink);
}
void LivekitVideoSink::setVideoSink(QVideoSink *videoSink)
{
m_videoSink = videoSink;
CallController::instance().setVideoSink(videoSink);
Q_EMIT videoSinkChanged();
}
QVideoSink *LivekitVideoSink::videoSink() const
{
return m_videoSink;
}
void CallController::toggleCamera()
{
if (m_localVideoTrackSid.isEmpty()) {
publishTrack(m_localVideoTrackId);
} else {
FfiRequest request;
UnpublishTrackRequest unpublishRequest;
unpublishRequest.setLocalParticipantHandle(localParticipant);
unpublishRequest.setTrackSid(m_localVideoTrackSid);
request.setUnpublishTrack(unpublishRequest);
auto response = ::request(std::move(request));
m_localVideoTrackSid = QString();
}
}
void CallController::publishTrack(uint64_t id)
{
PublishTrackRequest publishTrackRequest;
publishTrackRequest.setTrackHandle(id);
publishTrackRequest.setLocalParticipantHandle(localParticipant);
TrackPublishOptions options;
options.setSource(TrackSourceGadget::TrackSource::SOURCE_CAMERA);
options.setVideoCodec(VideoCodecGadget::VideoCodec::VP8);
publishTrackRequest.setOptions(options);
auto request = FfiRequest();
request.setPublishTrack(publishTrackRequest);
auto publishResponse = ::request(std::move(request));
}

View File

@@ -0,0 +1,97 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
#pragma once
#include <QObject>
#include <QQmlEngine>
#include <QVideoSink>
#include "events/callmemberevent.h"
#include "room.qpb.h"
namespace livekit::proto
{
class FfiEvent;
class ConnectCallback;
class DisposeCallback;
class RoomEvent;
}
class LivekitMediaPlayer;
class NeoChatRoom;
class QAudioSink;
class CallController : public QObject
{
Q_OBJECT
QML_ELEMENT
QML_SINGLETON
public:
static CallController &instance()
{
static CallController _instance;
return _instance;
}
static CallController *create(QQmlEngine *, QJSEngine *)
{
QQmlEngine::setObjectOwnership(&instance(), QQmlEngine::CppOwnership);
return &instance();
}
void handleCallMemberEvent(const Quotient::CallMemberEvent *event, NeoChatRoom *room);
// Internal. Do not use.
void handleEvent(livekit::proto::FfiEvent &&event);
Q_INVOKABLE void setVideoSink(QObject *sink);
Q_INVOKABLE void setCameraVideoSink(QVideoSink *videoSink);
Q_INVOKABLE void toggleCamera();
Q_SIGNALS:
void callStarted();
void connected();
private:
CallController();
void init();
QMap<uint64_t, QPointer<NeoChatRoom>> m_connectingRooms;
std::map<uint64_t, livekit::proto::OwnedRoom> m_rooms;
void handleConnect(livekit::proto::ConnectCallback &&callback);
void handleDispose(livekit::proto::DisposeCallback &&callback);
void handleRoomEvent(livekit::proto::RoomEvent &&event);
void publishTrack(uint64_t id);
QIODevice *audioData = nullptr;
QAudioSink *sink;
QVideoSink *m_sink;
uint64_t resampler;
QVideoSink *m_cameraVideoSink = nullptr;
uint64_t localParticipant = 100000;
QString m_localVideoTrackSid;
uint64_t m_localVideoTrackId;
uint64_t m_localVideoTrackHandle;
};
class LivekitVideoSink : public QObject
{
Q_OBJECT
QML_ELEMENT
public:
Q_PROPERTY(QVideoSink *videoSink READ videoSink WRITE setVideoSink NOTIFY videoSinkChanged REQUIRED)
using QObject::QObject;
void setVideoSink(QVideoSink *videoSink);
QVideoSink *videoSink() const;
Q_SIGNALS:
void videoSinkChanged();
private:
QVideoSink *m_videoSink = nullptr;
};

View File

View File

View File

0
src/calls/participant.h Normal file
View File

View File

@@ -45,6 +45,8 @@ bool testMode = false;
using namespace Quotient;
Controller::Controller(QObject *parent)
: QObject(parent)
{
@@ -132,6 +134,7 @@ Controller::Controller(QObject *parent)
m_endpoint = connector->endpoint();
#endif
}
Controller &Controller::instance()

View File

@@ -7,5 +7,5 @@
using namespace Qt::StringLiterals;
QMultiHash<QString, QVariant> EmojiTones::_tones = {
#include "emojitones_data.h"
//#include "emojitones_data.h"
};

View File

@@ -0,0 +1,20 @@
// SPDX-FileCopyrightText: 2024 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.1-or-later
#pragma once
#include <Quotient/events/roomevent.h>
namespace Quotient
{
class CallEncryptionKeysEvent : public RoomEvent
{
public:
QUO_EVENT(CallEncryptionKeysEvent, "io.element.call.encryption_keys");
explicit CallEncryptionKeysEvent(const QJsonObject &obj)
: RoomEvent(obj)
{
}
};
}

View File

@@ -0,0 +1,38 @@
// SPDX-FileCopyrightText: 2024 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
#include "callmemberevent.h"
#include <QString>
using namespace Quotient;
using namespace Qt::Literals::StringLiterals;
CallMemberEventContent::CallMemberEventContent(const QJsonObject &json)
{
for (const auto &membership : json["memberships"_L1].toArray()) {
QList<Focus> foci;
for (const auto &focus : membership["foci_active"_L1].toArray()) {
foci.append(Focus{
.livekitAlias = focus["livekit_alias"_L1].toString(),
.livekitServiceUrl = focus["livekit_service_url"_L1].toString(),
.type = focus["livekit"_L1].toString(),
});
}
memberships.append(CallMembership{
.application = membership["application"_L1].toString(),
.callId = membership["call_id"_L1].toString(),
.deviceId = membership["device_id"_L1].toString(),
.expires = membership["expires"_L1].toInt(),
.expiresTs = membership["expires"_L1].toVariant().value<uint64_t>(),
.fociActive = foci,
.membershipId = membership["membershipID"_L1].toString(),
.scope = membership["scope"_L1].toString(),
});
}
}
QJsonObject CallMemberEventContent::toJson() const
{
return {};
}

View File

@@ -0,0 +1,59 @@
// SPDX-FileCopyrightText: 2024 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.1-or-later
#pragma once
#include <Quotient/events/stateevent.h>
namespace Quotient
{
struct Focus {
QString livekitAlias;
QString livekitServiceUrl;
QString type;
};
struct CallMembership {
QString application;
QString callId;
QString deviceId;
int expires;
uint64_t expiresTs;
QList<Focus> fociActive;
QString membershipId;
QString scope;
};
class CallMemberEventContent
{
public:
explicit CallMemberEventContent(const QJsonObject &json);
QJsonObject toJson() const;
QList<CallMembership> memberships;
};
/**
* @class CallMemberEvent
*
* Class to define a call member event.
*
* @sa Quotient::StateEvent
*/
class CallMemberEvent : public KeyedStateEventBase<CallMemberEvent, CallMemberEventContent>
{
public:
QUO_EVENT(CallMemberEvent, "org.matrix.msc3401.call.member")
explicit CallMemberEvent(const QJsonObject &obj)
: KeyedStateEventBase(obj)
{
}
QJsonArray memberships() const
{
return contentJson()[u"memberships"_s].toArray();
}
};
}

View File

@@ -0,0 +1,16 @@
// SPDX-FileCopyrightText: 2024 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.1-or-later
#pragma once
#include <Quotient/events/roomevent.h>
namespace Quotient
{
class CallNotifyEvent : public RoomEvent
{
public:
QUO_EVENT(CallNotifyEvent, "org.matrix.msc4075.call.notify");
explicit CallNotifyEvent(const QJsonObject &obj);
};

39
src/livekitlogmodel.cpp Normal file
View File

@@ -0,0 +1,39 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
#include "livekitlogmodel.h"
using namespace livekit::proto;
QVariant LivekitLogModel::data(const QModelIndex &index, int role) const
{
const auto &message = m_messages[index.row()];
if (role == MessageRole) {
return message.message();
}
return {};
}
int LivekitLogModel::rowCount(const QModelIndex &parent) const
{
Q_UNUSED(parent);
return m_messages.size();
}
QHash<int, QByteArray> LivekitLogModel::roleNames() const
{
return {
{MessageRole, "message"},
};
}
void LivekitLogModel::addMessages(livekit::proto::LogRecordRepeated messages)
{
for (const auto &message : messages) {
// if (message.level() < 3) {
beginInsertRows({}, m_messages.size(), m_messages.size() + 1);
m_messages += message;
endInsertRows();
// }
}
}

61
src/livekitlogmodel.h Normal file
View File

@@ -0,0 +1,61 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
#pragma once
#include <QAbstractListModel>
#include <QQmlEngine>
#include <QList>
#include "ffi.qpb.h"
class LivekitLogModel : public QAbstractListModel
{
Q_OBJECT
QML_ELEMENT
QML_SINGLETON
public:
static LivekitLogModel &instance() {
static LivekitLogModel _instance;
return _instance;
}
static LivekitLogModel *create(QQmlEngine *, QJSEngine *) {
QQmlEngine::setObjectOwnership(&instance(), QQmlEngine::CppOwnership);
return &instance();
}
/**
* @brief Defines the model roles.
*/
enum Roles {
MessageRole = Qt::DisplayRole,
};
/**
* @brief Get the given role value at the given index.
*
* @sa QAbstractItemModel::data
*/
[[nodiscard]] QVariant data(const QModelIndex &index, int role) const override;
/**
* @brief Number of rows in the model.
*
* @sa QAbstractItemModel::rowCount
*/
[[nodiscard]] int rowCount(const QModelIndex &parent = QModelIndex()) const override;
/**
* @brief Returns a mapping from Role enum values to role names.
*
* @sa Roles, QAbstractItemModel::roleNames()
*/
[[nodiscard]] QHash<int, QByteArray> roleNames() const override;
void addMessages(livekit::proto::LogRecordRepeated messages);
private:
livekit::proto::LogRecordRepeated m_messages;
LivekitLogModel() = default;
};

View File

@@ -1,11 +1,141 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-FileCopyrightText: 2024 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
#include "mediamanager.h"
#include <QDirIterator>
#include <QMimeDatabase>
#include <Quotient/qt_connection_util.h>
#include "events/callmemberevent.h"
#include "neochatroom.h"
using namespace Qt::Literals::StringLiterals;
using namespace Quotient;
void MediaManager::startPlayback()
{
Q_EMIT playbackStarted();
}
#include "moc_mediamanager.cpp"
void MediaManager::ring(const QJsonObject &json, NeoChatRoom *room)
{
qWarning() << "start check ring";
// todo: check sender != us
if (json["content"_L1]["application"_L1].toString() != "m.call"_L1) {
qWarning() << "not m.call";
return;
}
qWarning() << json;
if (!json["content"_L1]["m.mentions"_L1]["room"_L1].toBool() || json[u"sender"_s].toString() == room->connection()->userId()) {
bool mentioned = false;
for (const auto &user : json["content"_L1]["m.mentions"_L1]["user_ids"_L1].toArray()) {
if (user.toString() == room->connection()->userId()) {
mentioned = true;
break;
}
}
if (!mentioned) {
qWarning() << "not mentioned";
return;
}
}
if (json["content"_L1]["notify_type"_L1].toString() != "ring"_L1) {
qWarning() << "not ring";
return;
}
if (room->pushNotificationState() == PushNotificationState::Mute) {
qWarning() << "mute";
return;
}
if (isRinging()) {
qWarning() << "already ringing";
return;
}
if (const auto &event = room->currentState().get<CallMemberEvent>(room->connection()->userId())) {
if (event) {
auto memberships = event->contentJson()["memberships"_L1].toArray();
for (const auto &m : memberships) {
const auto &membership = m.toObject();
if (membership["application"_L1] == "m.call"_L1 && membership["call_id"_L1].toString().isEmpty()) {
qWarning() << "already in a call";
return;
}
}
}
}
connectUntil(room, &NeoChatRoom::changed, this, [this, room]() {
if (const auto &event = room->currentState().get<CallMemberEvent>(room->connection()->userId())) {
if (event) {
auto memberships = event->contentJson()["memberships"_L1].toArray();
for (const auto &m : memberships) {
const auto &membership = m.toObject();
if (membership["application"_L1] == "m.call"_L1 && membership["call_id"_L1].toString().isEmpty()) {
qWarning() << "stopping";
stopRinging();
return true;
}
}
}
}
return false;
});
if (json["unsigned"_L1]["age"_L1].toInt() > 10000) {
qWarning() << "too old";
return;
}
ringUnchecked();
}
void MediaManager::ringUnchecked()
{
qWarning() << "ring";
static QString path;
if (path.isEmpty()) {
for (const auto &dir : QString::fromUtf8(qgetenv("XDG_DATA_DIRS")).split(u':')) {
if (QFileInfo(dir + QStringLiteral("/sounds/freedesktop/stereo/phone-incoming-call.oga")).exists()) {
path = dir + QStringLiteral("/sounds/freedesktop/stereo/phone-incoming-call.oga");
break;
}
}
}
if (path.isEmpty()) {
return;
}
m_player->setSource(QUrl::fromLocalFile(path));
m_player->play();
Q_EMIT showIncomingCallDialog();
}
MediaManager::MediaManager(QObject *parent)
: QObject(parent)
, m_player(new QMediaPlayer())
, m_output(new QAudioOutput())
, m_timer(new QTimer())
{
m_player->setAudioOutput(m_output);
m_timer->setInterval(1000);
m_timer->setSingleShot(true);
connect(m_timer, &QTimer::timeout, this, [this]() {
m_player->play();
});
connect(m_player, &QMediaPlayer::playbackStateChanged, this, [this]() {
if (m_player->playbackState() == QMediaPlayer::StoppedState) {
m_timer->start();
}
});
}
bool MediaManager::isRinging() const
{
return m_ringing;
}
void MediaManager::stopRinging()
{
m_ringing = false;
m_player->pause();
m_timer->stop();
//Q_EMIT stopRinging();
}

View File

@@ -3,8 +3,13 @@
#pragma once
#include <QAudioOutput>
#include <QMediaPlayer>
#include <QObject>
#include <QQmlEngine>
#include <QTimer>
class NeoChatRoom;
/**
* @class MediaManager
@@ -34,9 +39,29 @@ public:
*/
Q_INVOKABLE void startPlayback();
/**
* Starts ringing if the criteria (see MSC / spec) are met.
*/
void ring(const QJsonObject &json, NeoChatRoom *room);
bool isRinging() const;
Q_SIGNALS:
/**
* @brief Emitted when any media player starts playing. Other objects should stop / pause playback.
*/
void playbackStarted();
void showIncomingCallDialog();
void closeIncomingCallDialog();
private:
void ringUnchecked();
void stopRinging();
QMediaPlayer *m_player;
QAudioOutput *m_output;
QTimer *m_timer;
bool m_ringing = false;
explicit MediaManager(QObject *parent = nullptr);
};

View File

@@ -505,6 +505,19 @@ QList<ActionsModel::Action> actions{
kli18n("<user id> [<reason>]"),
kli18n("Removes the user from the room"),
},
Action{
QStringLiteral("endcall"),
[](const QString &text, NeoChatRoom *room, ChatBarCache *) {
auto events = room->currentState().eventsOfType(QStringLiteral("org.matrix.msc3401.call.member"));
for (auto event : events) {
room->setRoomState(QStringLiteral("org.matrix.msc3401.call.member"), event->stateKey(), {});
}
return QString();
},
std::nullopt,
kli18n(""),
kli18n("Forcibly end the call in this room"),
},
};
int ActionsModel::rowCount(const QModelIndex &parent) const

View File

@@ -20,7 +20,7 @@ EmojiModel::EmojiModel(QObject *parent)
, m_configGroup(KConfigGroup(m_config, u"Editor"_s))
{
if (_emojis.isEmpty()) {
#include "emojis.h"
//#include "emojis.h"
}
}

View File

@@ -8,6 +8,7 @@
#include <QMediaPlayer>
#include <QMimeDatabase>
#include <QTemporaryFile>
#include <QProtobufSerializer>
#include <Quotient/events/eventcontent.h>
#include <Quotient/events/eventrelation.h>
@@ -42,6 +43,7 @@
#include "eventhandler.h"
#include "events/pollevent.h"
#include "filetransferpseudojob.h"
#include "mediamanager.h"
#include "neochatconfig.h"
#include "neochatconnection.h"
#include "neochatroommember.h"
@@ -57,6 +59,10 @@
#include <KJobTrackerInterface>
#include <KLocalizedString>
#include "calls/callcontroller.h"
#include "events/callencryptionkeysevent.h"
#include "events/callmemberevent.h"
using namespace Quotient;
NeoChatRoom::NeoChatRoom(Connection *connection, QString roomId, JoinState joinState)
@@ -160,6 +166,26 @@ NeoChatRoom::NeoChatRoom(Connection *connection, QString roomId, JoinState joinS
const auto neochatconnection = static_cast<NeoChatConnection *>(connection);
Q_ASSERT(neochatconnection);
connect(neochatconnection, &NeoChatConnection::globalUrlPreviewEnabledChanged, this, &NeoChatRoom::urlPreviewEnabledChanged);
connect(this, &Room::aboutToAddNewMessages, this, [this](const auto &messages) {
for (const auto &message : messages) {
if (const auto &memberEvent = eventCast<const CallMemberEvent>(message.get())) {
CallController::instance().handleCallMemberEvent(memberEvent, this);
}
if (const auto &encryptionEvent = eventCast<const CallEncryptionKeysEvent>(message.get())) {
qWarning() << encryptionEvent->fullJson();
Q_ASSERT(false);
}
}
});
// connect(this, &NeoChatRoom::aboutToAddNewMessages, this, [this](const auto &events) {
// for (const auto &event : events) {
// qWarning() << event->fullJson();
// if (event->matrixType() == "org.matrix.msc4075.call.notify"_ls) {
// MediaManager::instance().ring(event->fullJson(), this);
// }
// }
// });
}
bool NeoChatRoom::visible() const

View File

@@ -0,0 +1,251 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto2";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
import "handle.proto";
import "track.proto";
// Create a new AudioStream
// AudioStream is used to receive audio frames from a track
message NewAudioStreamRequest {
required uint64 track_handle = 1;
required AudioStreamType type = 2;
optional uint32 sample_rate = 3;
optional uint32 num_channels = 4;
}
message NewAudioStreamResponse { required OwnedAudioStream stream = 1; }
message AudioStreamFromParticipantRequest {
required uint64 participant_handle = 1;
required AudioStreamType type = 2;
optional TrackSource track_source = 3;
optional uint32 sample_rate = 5;
optional uint32 num_channels = 6;
}
message AudioStreamFromParticipantResponse { required OwnedAudioStream stream = 1; }
// Create a new AudioSource
message NewAudioSourceRequest {
required AudioSourceType type = 1;
optional AudioSourceOptions options = 2;
required uint32 sample_rate = 3;
required uint32 num_channels = 4;
optional uint32 queue_size_ms = 5;
}
message NewAudioSourceResponse { required OwnedAudioSource source = 1; }
// Push a frame to an AudioSource
// The data provided must be available as long as the client receive the callback.
message CaptureAudioFrameRequest {
required uint64 source_handle = 1;
required AudioFrameBufferInfo buffer = 2;
}
message CaptureAudioFrameResponse {
required uint64 async_id = 1;
}
message CaptureAudioFrameCallback {
required uint64 async_id = 1;
optional string error = 2;
}
message ClearAudioBufferRequest {
required uint64 source_handle = 1;
}
message ClearAudioBufferResponse {}
// Create a new AudioResampler
message NewAudioResamplerRequest {}
message NewAudioResamplerResponse {
required OwnedAudioResampler resampler = 1;
}
// Remix and resample an audio frame
message RemixAndResampleRequest {
required uint64 resampler_handle = 1;
required AudioFrameBufferInfo buffer = 2;
required uint32 num_channels = 3;
required uint32 sample_rate = 4;
}
message RemixAndResampleResponse {
required OwnedAudioFrameBuffer buffer = 1;
}
// New resampler using SoX (much better quality)
message NewSoxResamplerRequest {
required double input_rate = 1;
required double output_rate = 2;
required uint32 num_channels = 3;
required SoxResamplerDataType input_data_type = 4;
required SoxResamplerDataType output_data_type = 5;
required SoxQualityRecipe quality_recipe = 6;
optional uint32 flags = 7;
}
message NewSoxResamplerResponse {
oneof message {
OwnedSoxResampler resampler = 1;
string error = 2;
}
}
message PushSoxResamplerRequest {
required uint64 resampler_handle = 1;
required uint64 data_ptr = 2; // *const i16
required uint32 size = 3; // in bytes
}
message PushSoxResamplerResponse {
required uint64 output_ptr = 1; // *const i16 (could be null)
required uint32 size = 2; // in bytes
optional string error = 3;
}
message FlushSoxResamplerRequest {
required uint64 resampler_handle = 1;
}
message FlushSoxResamplerResponse {
required uint64 output_ptr = 1; // *const i16 (could be null)
required uint32 size = 2; // in bytes
optional string error = 3;
}
enum SoxResamplerDataType {
// TODO(theomonnom): support other datatypes (shouldn't really be needed)
SOXR_DATATYPE_INT16I = 0;
SOXR_DATATYPE_INT16S = 1;
}
enum SoxQualityRecipe {
SOXR_QUALITY_QUICK = 0;
SOXR_QUALITY_LOW = 1;
SOXR_QUALITY_MEDIUM = 2;
SOXR_QUALITY_HIGH = 3;
SOXR_QUALITY_VERYHIGH = 4;
}
enum SoxFlagBits {
SOXR_ROLLOFF_SMALL = 0; // 1 << 0
SOXR_ROLLOFF_MEDIUM = 1; // 1 << 1
SOXR_ROLLOFF_NONE = 2; // 1 << 2
SOXR_HIGH_PREC_CLOCK = 3; // 1 << 3
SOXR_DOUBLE_PRECISION = 4; // 1 << 4
SOXR_VR = 5; // 1 << 5
}
//
// AudioFrame buffer
//
message AudioFrameBufferInfo {
required uint64 data_ptr = 1; // *const i16
required uint32 num_channels = 2;
required uint32 sample_rate = 3;
required uint32 samples_per_channel = 4;
}
message OwnedAudioFrameBuffer {
required FfiOwnedHandle handle = 1;
required AudioFrameBufferInfo info = 2;
}
//
// AudioStream
//
enum AudioStreamType {
AUDIO_STREAM_NATIVE = 0;
AUDIO_STREAM_HTML = 1;
}
message AudioStreamInfo {
required AudioStreamType type = 1;
}
message OwnedAudioStream {
required FfiOwnedHandle handle = 1;
required AudioStreamInfo info = 2;
}
message AudioStreamEvent {
required uint64 stream_handle = 1;
oneof message {
AudioFrameReceived frame_received = 2;
AudioStreamEOS eos = 3;
}
}
message AudioFrameReceived {
required OwnedAudioFrameBuffer frame = 1;
}
message AudioStreamEOS {}
//
// AudioSource
//
message AudioSourceOptions {
required bool echo_cancellation = 1;
required bool noise_suppression = 2;
required bool auto_gain_control = 3;
}
enum AudioSourceType {
AUDIO_SOURCE_NATIVE = 0;
}
message AudioSourceInfo {
required AudioSourceType type = 2;
}
message OwnedAudioSource {
required FfiOwnedHandle handle = 1;
required AudioSourceInfo info = 2;
}
//
// AudioResampler
//
message AudioResamplerInfo { }
message OwnedAudioResampler {
required FfiOwnedHandle handle = 1;
required AudioResamplerInfo info = 2;
}
//
// Sox AudioResampler
//
message SoxResamplerInfo {}
message OwnedSoxResampler {
required FfiOwnedHandle handle = 1;
required SoxResamplerInfo info = 2;
}

154
src/protocols/e2ee.proto Normal file
View File

@@ -0,0 +1,154 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto2";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
// TODO(theomonnom): Should FrameCryptor be stateful on the client side and have their own handle?
enum EncryptionType {
NONE = 0;
GCM = 1;
CUSTOM = 2;
}
message FrameCryptor {
required string participant_identity = 1;
required string track_sid = 2;
required int32 key_index = 3;
required bool enabled = 4;
}
message KeyProviderOptions {
// Only specify if you want to use a shared_key
optional bytes shared_key = 1;
required int32 ratchet_window_size = 2;
required bytes ratchet_salt = 3;
required int32 failure_tolerance = 4; // -1 = no tolerance
}
message E2eeOptions {
required EncryptionType encryption_type = 1;
required KeyProviderOptions key_provider_options = 2;
}
enum EncryptionState {
NEW = 0;
OK = 1;
ENCRYPTION_FAILED = 2;
DECRYPTION_FAILED = 3;
MISSING_KEY = 4;
KEY_RATCHETED = 5;
INTERNAL_ERROR = 6;
}
message E2eeManagerSetEnabledRequest {
required bool enabled = 1;
}
message E2eeManagerSetEnabledResponse {}
message E2eeManagerGetFrameCryptorsRequest {}
message E2eeManagerGetFrameCryptorsResponse {
repeated FrameCryptor frame_cryptors = 1;
}
message FrameCryptorSetEnabledRequest {
required string participant_identity = 1;
required string track_sid = 2;
required bool enabled = 3;
}
message FrameCryptorSetEnabledResponse {}
message FrameCryptorSetKeyIndexRequest {
required string participant_identity = 1;
required string track_sid = 2;
required int32 key_index = 3;
}
message FrameCryptorSetKeyIndexResponse {}
message SetSharedKeyRequest {
required bytes shared_key = 1;
required int32 key_index = 2;
}
message SetSharedKeyResponse {}
message RatchetSharedKeyRequest {
required int32 key_index = 1;
}
message RatchetSharedKeyResponse {
optional bytes new_key = 1;
}
message GetSharedKeyRequest {
required int32 key_index = 1;
}
message GetSharedKeyResponse {
optional bytes key = 1;
}
message SetKeyRequest {
required string participant_identity = 1;
required bytes key = 2;
required int32 key_index = 3;
}
message SetKeyResponse {}
message RatchetKeyRequest {
required string participant_identity = 1;
required int32 key_index = 2;
}
message RatchetKeyResponse {
optional bytes new_key = 1;
}
message GetKeyRequest {
required string participant_identity = 1;
required int32 key_index = 2;
}
message GetKeyResponse {
optional bytes key = 1;
}
message E2eeRequest {
required uint64 room_handle = 1;
oneof message {
E2eeManagerSetEnabledRequest manager_set_enabled = 2;
E2eeManagerGetFrameCryptorsRequest manager_get_frame_cryptors = 3;
FrameCryptorSetEnabledRequest cryptor_set_enabled = 4;
FrameCryptorSetKeyIndexRequest cryptor_set_key_index = 5;
SetSharedKeyRequest set_shared_key = 6;
RatchetSharedKeyRequest ratchet_shared_key = 7;
GetSharedKeyRequest get_shared_key = 8;
SetKeyRequest set_key = 9;
RatchetKeyRequest ratchet_key = 10;
GetKeyRequest get_key = 11;
}
}
message E2eeResponse {
oneof message {
E2eeManagerSetEnabledResponse manager_set_enabled = 1;
E2eeManagerGetFrameCryptorsResponse manager_get_frame_cryptors = 2;
FrameCryptorSetEnabledResponse cryptor_set_enabled = 3;
FrameCryptorSetKeyIndexResponse cryptor_set_key_index = 4;
SetSharedKeyResponse set_shared_key = 5;
RatchetSharedKeyResponse ratchet_shared_key = 6;
GetSharedKeyResponse get_shared_key = 7;
SetKeyResponse set_key = 8;
RatchetKeyResponse ratchet_key = 9;
GetKeyResponse get_key = 10;
}
}

239
src/protocols/ffi.proto Normal file
View File

@@ -0,0 +1,239 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto2";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
// import "handle.proto";
import "e2ee.proto";
import "track.proto";
import "room.proto";
import "video_frame.proto";
import "audio_frame.proto";
import "rpc.proto";
// **How is the livekit-ffi working:
// We refer as the ffi server the Rust server that is running the LiveKit client implementation, and we
// refer as the ffi client the foreign language that commumicates with the ffi server. (e.g Python SDK, Unity SDK, etc...)
//
// We expose the Rust client implementation of livekit using the protocol defined here.
// Everything starts with a FfiRequest, which is a oneof message that contains all the possible
// requests that can be made to the ffi server.
// The server will then respond with a FfiResponse, which is also a oneof message that contains
// all the possible responses.
// The first request sent to the server must be an InitializeRequest, which contains the a pointer
// to the callback function that will be used to send events and async responses to the ffi client.
// (e.g participant joined, track published, etc...)
//
// **Useful things know when collaborating on the protocol:**
// Everything is subject to discussion and change :-)
//
// - The ffi client implementation must never forget to correctly dispose all the owned handles
// that it receives from the server.
//
// Therefore, the ffi client is easier to implement if there is less handles to manage.
//
// - We are mainly using FfiHandle on info messages (e.g: RoomInfo, TrackInfo, etc...)
// For this reason, info are only sent once, at creation (We're not using them for updates, we can infer them from
// events on the client implementation).
// e.g: set speaking to true when we receive a ActiveSpeakerChanged event.
// This is the input of livekit_ffi_request function
// We always expect a response (FFIResponse, even if it's empty)
message FfiRequest {
oneof message {
DisposeRequest dispose = 2;
// Room
ConnectRequest connect = 3;
DisconnectRequest disconnect = 4;
PublishTrackRequest publish_track = 5;
UnpublishTrackRequest unpublish_track = 6;
PublishDataRequest publish_data = 7;
SetSubscribedRequest set_subscribed = 8;
SetLocalMetadataRequest set_local_metadata = 9;
SetLocalNameRequest set_local_name = 10;
SetLocalAttributesRequest set_local_attributes = 11;
GetSessionStatsRequest get_session_stats = 12;
PublishTranscriptionRequest publish_transcription = 13;
PublishSipDtmfRequest publish_sip_dtmf = 14;
// Track
CreateVideoTrackRequest create_video_track = 15;
CreateAudioTrackRequest create_audio_track = 16;
LocalTrackMuteRequest local_track_mute = 17;
EnableRemoteTrackRequest enable_remote_track = 18;
GetStatsRequest get_stats = 19;
// Video
NewVideoStreamRequest new_video_stream = 20;
NewVideoSourceRequest new_video_source = 21;
CaptureVideoFrameRequest capture_video_frame = 22;
VideoConvertRequest video_convert = 23;
VideoStreamFromParticipantRequest video_stream_from_participant = 24;
// Audio
NewAudioStreamRequest new_audio_stream = 25;
NewAudioSourceRequest new_audio_source = 26;
CaptureAudioFrameRequest capture_audio_frame = 27;
ClearAudioBufferRequest clear_audio_buffer = 28;
NewAudioResamplerRequest new_audio_resampler = 29;
RemixAndResampleRequest remix_and_resample = 30;
E2eeRequest e2ee = 31;
AudioStreamFromParticipantRequest audio_stream_from_participant = 32;
NewSoxResamplerRequest new_sox_resampler = 33;
PushSoxResamplerRequest push_sox_resampler = 34;
FlushSoxResamplerRequest flush_sox_resampler = 35;
SendChatMessageRequest send_chat_message = 36;
EditChatMessageRequest edit_chat_message = 37;
// RPC
PerformRpcRequest perform_rpc = 38;
RegisterRpcMethodRequest register_rpc_method = 39;
UnregisterRpcMethodRequest unregister_rpc_method = 40;
RpcMethodInvocationResponseRequest rpc_method_invocation_response = 41;
}
}
// This is the output of livekit_ffi_request function.
message FfiResponse {
oneof message {
DisposeResponse dispose = 2;
// Room
ConnectResponse connect = 3;
DisconnectResponse disconnect = 4;
PublishTrackResponse publish_track = 5;
UnpublishTrackResponse unpublish_track = 6;
PublishDataResponse publish_data = 7;
SetSubscribedResponse set_subscribed = 8;
SetLocalMetadataResponse set_local_metadata = 9;
SetLocalNameResponse set_local_name = 10;
SetLocalAttributesResponse set_local_attributes = 11;
GetSessionStatsResponse get_session_stats = 12;
PublishTranscriptionResponse publish_transcription = 13;
PublishSipDtmfResponse publish_sip_dtmf = 14;
// Track
CreateVideoTrackResponse create_video_track = 15;
CreateAudioTrackResponse create_audio_track = 16;
LocalTrackMuteResponse local_track_mute = 17;
EnableRemoteTrackResponse enable_remote_track = 18;
GetStatsResponse get_stats = 19;
// Video
NewVideoStreamResponse new_video_stream = 20;
NewVideoSourceResponse new_video_source = 21;
CaptureVideoFrameResponse capture_video_frame = 22;
VideoConvertResponse video_convert = 23;
VideoStreamFromParticipantResponse video_stream_from_participant = 24;
// Audio
NewAudioStreamResponse new_audio_stream = 25;
NewAudioSourceResponse new_audio_source = 26;
CaptureAudioFrameResponse capture_audio_frame = 27;
ClearAudioBufferResponse clear_audio_buffer = 28;
NewAudioResamplerResponse new_audio_resampler = 29;
RemixAndResampleResponse remix_and_resample = 30;
AudioStreamFromParticipantResponse audio_stream_from_participant = 31;
E2eeResponse e2ee = 32;
NewSoxResamplerResponse new_sox_resampler = 33;
PushSoxResamplerResponse push_sox_resampler = 34;
FlushSoxResamplerResponse flush_sox_resampler = 35;
SendChatMessageResponse send_chat_message = 36;
// RPC
PerformRpcResponse perform_rpc = 37;
RegisterRpcMethodResponse register_rpc_method = 38;
UnregisterRpcMethodResponse unregister_rpc_method = 39;
RpcMethodInvocationResponseResponse rpc_method_invocation_response = 40;
}
}
// To minimize complexity, participant events are not included in the protocol.
// It is easily deducible from the room events and it turned out that is is easier to implement
// on the ffi client side.
message FfiEvent {
oneof message {
RoomEvent room_event = 1;
TrackEvent track_event = 2;
VideoStreamEvent video_stream_event = 3;
AudioStreamEvent audio_stream_event = 4;
ConnectCallback connect = 5;
DisconnectCallback disconnect = 7;
DisposeCallback dispose = 8;
PublishTrackCallback publish_track = 9;
UnpublishTrackCallback unpublish_track = 10;
PublishDataCallback publish_data = 11;
PublishTranscriptionCallback publish_transcription = 12;
CaptureAudioFrameCallback capture_audio_frame = 13;
SetLocalMetadataCallback set_local_metadata = 14;
SetLocalNameCallback set_local_name = 15;
SetLocalAttributesCallback set_local_attributes = 16;
GetStatsCallback get_stats = 17;
LogBatch logs = 18;
GetSessionStatsCallback get_session_stats = 19;
Panic panic = 20;
PublishSipDtmfCallback publish_sip_dtmf = 21;
SendChatMessageCallback chat_message = 22;
PerformRpcCallback perform_rpc = 23;
RpcMethodInvocationEvent rpc_method_invocation = 24;
}
}
// Stop all rooms synchronously (Do we need async here?).
// e.g: This is used for the Unity Editor after each assemblies reload.
// TODO(theomonnom): Implement a debug mode where we can find all leaked handles?
message DisposeRequest {
required bool async = 1;
}
message DisposeResponse {
optional uint64 async_id = 1; // None if sync
}
message DisposeCallback {
required uint64 async_id = 1;
}
enum LogLevel {
LOG_ERROR = 0;
LOG_WARN = 1;
LOG_INFO = 2;
LOG_DEBUG = 3;
LOG_TRACE = 4;
}
message LogRecord {
required LogLevel level = 1;
required string target = 2; // e.g "livekit", "libwebrtc", "tokio-tungstenite", etc...
optional string module_path = 3;
optional string file = 4;
optional uint32 line = 5;
required string message = 6;
}
message LogBatch {
repeated LogRecord records = 1;
}
message Panic {
required string message = 1;
}
// TODO(theomonnom): Debug messages (Print handles).

View File

@@ -0,0 +1,31 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto2";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
// # Safety
// The foreign language is responsable for disposing handles
// Forgetting to dispose the handle may lead to memory leaks
//
// Dropping a handle doesn't necessarily mean that the object is destroyed if it is still used
// on the FfiServer (Atomic reference counting)
//
// When refering to a handle without owning it, we just use a uint32 without this message.
// (the variable name is suffixed with "_handle")
message FfiOwnedHandle {
required uint64 id = 1;
}

View File

@@ -0,0 +1,42 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto2";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
import "handle.proto";
message ParticipantInfo {
required string sid = 1;
required string name = 2;
required string identity = 3;
required string metadata = 4;
map<string, string> attributes = 5;
required ParticipantKind kind = 6;
}
message OwnedParticipant {
required FfiOwnedHandle handle = 1;
required ParticipantInfo info = 2;
}
enum ParticipantKind {
PARTICIPANT_KIND_STANDARD = 0;
PARTICIPANT_KIND_INGRESS = 1;
PARTICIPANT_KIND_EGRESS = 2;
PARTICIPANT_KIND_SIP = 3;
PARTICIPANT_KIND_AGENT = 4;
}

552
src/protocols/room.proto Normal file
View File

@@ -0,0 +1,552 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto2";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
import "e2ee.proto";
import "handle.proto";
import "participant.proto";
import "track.proto";
import "video_frame.proto";
import "stats.proto";
// Connect to a new LiveKit room
message ConnectRequest {
required string url = 1;
required string token = 2;
required RoomOptions options = 3;
}
message ConnectResponse {
required uint64 async_id = 1;
}
message ConnectCallback {
message ParticipantWithTracks {
required OwnedParticipant participant = 1;
// TrackInfo are not needed here, if we're subscribed to a track, the FfiServer will send
// a TrackSubscribed event
repeated OwnedTrackPublication publications = 2;
}
message Result {
required OwnedRoom room = 1;
required OwnedParticipant local_participant = 2;
repeated ParticipantWithTracks participants = 3;
}
required uint64 async_id = 1;
oneof message {
string error = 2;
Result result = 3;
}
}
// Disconnect from the a room
message DisconnectRequest { required uint64 room_handle = 1; }
message DisconnectResponse { required uint64 async_id = 1; }
message DisconnectCallback { required uint64 async_id = 1; }
// Publish a track to the room
message PublishTrackRequest {
required uint64 local_participant_handle = 1;
required uint64 track_handle = 2;
required TrackPublishOptions options = 3;
}
message PublishTrackResponse {
required uint64 async_id = 1;
}
message PublishTrackCallback {
required uint64 async_id = 1;
oneof message {
string error = 2;
OwnedTrackPublication publication = 3;
}
}
// Unpublish a track from the room
message UnpublishTrackRequest {
required uint64 local_participant_handle = 1;
required string track_sid = 2;
required bool stop_on_unpublish = 3;
}
message UnpublishTrackResponse {
required uint64 async_id = 1;
}
message UnpublishTrackCallback {
required uint64 async_id = 1;
optional string error = 2;
}
// Publish data to other participants
message PublishDataRequest {
required uint64 local_participant_handle = 1;
required uint64 data_ptr = 2;
required uint64 data_len = 3;
required bool reliable = 4;
repeated string destination_sids = 5 [deprecated=true];
optional string topic = 6;
repeated string destination_identities = 7;
}
message PublishDataResponse {
required uint64 async_id = 1;
}
message PublishDataCallback {
required uint64 async_id = 1;
optional string error = 2;
}
// Publish transcription messages to room
message PublishTranscriptionRequest {
required uint64 local_participant_handle = 1;
required string participant_identity = 2;
required string track_id = 3;
repeated TranscriptionSegment segments = 4;
}
message PublishTranscriptionResponse {
required uint64 async_id = 1;
}
message PublishTranscriptionCallback {
required uint64 async_id = 1;
optional string error = 2;
}
// Publish Sip DTMF messages to other participants
message PublishSipDtmfRequest {
required uint64 local_participant_handle = 1;
required uint32 code = 2;
required string digit = 3;
repeated string destination_identities = 4;
}
message PublishSipDtmfResponse {
required uint64 async_id = 1;
}
message PublishSipDtmfCallback {
required uint64 async_id = 1;
optional string error = 2;
}
// Change the local participant's metadata
message SetLocalMetadataRequest {
required uint64 local_participant_handle = 1;
required string metadata = 2;
}
message SetLocalMetadataResponse {
required uint64 async_id = 1;
}
message SetLocalMetadataCallback {
required uint64 async_id = 1;
optional string error = 2;
}
message SendChatMessageRequest {
required uint64 local_participant_handle = 1;
required string message = 2;
repeated string destination_identities = 3;
optional string sender_identity = 4;
}
message EditChatMessageRequest {
required uint64 local_participant_handle = 1;
required string edit_text = 2;
required ChatMessage original_message = 3;
repeated string destination_identities = 4;
optional string sender_identity = 5;
}
message SendChatMessageResponse {
required uint64 async_id = 1;
}
message SendChatMessageCallback {
required uint64 async_id = 1;
oneof message {
string error = 2;
ChatMessage chat_message = 3;
}
}
// Change the local participant's attributes
message SetLocalAttributesRequest {
required uint64 local_participant_handle = 1;
repeated AttributesEntry attributes = 2;
}
message AttributesEntry {
required string key = 1;
required string value = 2;
}
message SetLocalAttributesResponse {
required uint64 async_id = 1;
}
message SetLocalAttributesCallback {
required uint64 async_id = 1;
optional string error = 2;
}
// Change the local participant's name
message SetLocalNameRequest {
required uint64 local_participant_handle = 1;
required string name = 2;
}
message SetLocalNameResponse {
required uint64 async_id = 1;
}
message SetLocalNameCallback {
required uint64 async_id = 1;
optional string error = 2;
}
// Change the "desire" to subs2ribe to a track
message SetSubscribedRequest {
required bool subscribe = 1;
required uint64 publication_handle = 2;
}
message SetSubscribedResponse {}
message GetSessionStatsRequest {
required uint64 room_handle = 1;
}
message GetSessionStatsResponse {
required uint64 async_id = 1;
}
message GetSessionStatsCallback {
message Result {
repeated RtcStats publisher_stats = 1;
repeated RtcStats subscriber_stats = 2;
}
required uint64 async_id = 1;
oneof message {
string error = 2;
Result result = 3;
}
}
//
// Options
//
message VideoEncoding {
required uint64 max_bitrate = 1;
required double max_framerate = 2;
}
message AudioEncoding {
required uint64 max_bitrate = 1;
}
message TrackPublishOptions {
// encodings are optional
optional VideoEncoding video_encoding = 1;
optional AudioEncoding audio_encoding = 2;
optional VideoCodec video_codec = 3;
optional bool dtx = 4;
optional bool red = 5;
optional bool simulcast = 6;
optional TrackSource source = 7;
optional string stream = 8;
}
enum IceTransportType {
TRANSPORT_RELAY = 0;
TRANSPORT_NOHOST = 1;
TRANSPORT_ALL = 2;
}
enum ContinualGatheringPolicy {
GATHER_ONCE = 0;
GATHER_CONTINUALLY = 1;
}
message IceServer {
repeated string urls = 1;
optional string username = 2;
optional string password = 3;
}
message RtcConfig {
optional IceTransportType ice_transport_type = 1;
optional ContinualGatheringPolicy continual_gathering_policy = 2;
repeated IceServer ice_servers = 3; // empty fallback to default
}
message RoomOptions {
optional bool auto_subscribe = 1;
optional bool adaptive_stream = 2;
optional bool dynacast = 3;
optional E2eeOptions e2ee = 4;
optional RtcConfig rtc_config = 5; // allow to setup a custom RtcConfiguration
optional uint32 join_retries = 6;
}
//
// Room
//
enum ConnectionQuality {
QUALITY_POOR = 0;
QUALITY_GOOD = 1;
QUALITY_EXCELLENT = 2;
QUALITY_LOST = 3;
}
enum ConnectionState {
CONN_DISCONNECTED = 0;
CONN_CONNECTED = 1;
CONN_RECONNECTING = 2;
}
enum DataPacketKind {
KIND_LOSSY = 0;
KIND_RELIABLE = 1;
}
enum DisconnectReason {
UNKNOWN_REASON = 0;
// the client initiated the disconnect
CLIENT_INITIATED = 1;
// another participant with the same identity has joined the room
DUPLICATE_IDENTITY = 2;
// the server instance is shutting down
SERVER_SHUTDOWN = 3;
// RoomService.RemoveParticipant was called
PARTICIPANT_REMOVED = 4;
// RoomService.DeleteRoom was called
ROOM_DELETED = 5;
// the client is attempting to resume a session, but server is not aware of it
STATE_MISMATCH = 6;
// client was unable to connect fully
JOIN_FAILURE = 7;
// Cloud-only, the server requested Participant to migrate the connection elsewhere
MIGRATION = 8;
// the signal websocket was closed unexpectedly
SIGNAL_CLOSE = 9;
// the room was closed, due to all Standard and Ingress participants having left
ROOM_CLOSED = 10;
}
message TranscriptionSegment {
required string id = 1;
required string text = 2;
required uint64 start_time = 3;
required uint64 end_time = 4;
required bool final = 5;
required string language = 6;
}
message BufferInfo {
required uint64 data_ptr = 1;
required uint64 data_len = 2;
}
message OwnedBuffer {
required FfiOwnedHandle handle = 1;
required BufferInfo data = 2;
}
message RoomEvent {
required uint64 room_handle = 1;
oneof message {
ParticipantConnected participant_connected = 2;
ParticipantDisconnected participant_disconnected = 3;
LocalTrackPublished local_track_published = 4;
LocalTrackUnpublished local_track_unpublished = 5;
LocalTrackSubscribed local_track_subscribed = 6;
TrackPublished track_published = 7;
TrackUnpublished track_unpublished = 8;
TrackSubscribed track_subscribed = 9;
TrackUnsubscribed track_unsubscribed = 10;
TrackSubscriptionFailed track_subscription_failed = 11;
TrackMuted track_muted = 12;
TrackUnmuted track_unmuted = 13;
ActiveSpeakersChanged active_speakers_changed = 14;
RoomMetadataChanged room_metadata_changed = 15;
RoomSidChanged room_sid_changed = 16;
ParticipantMetadataChanged participant_metadata_changed = 17;
ParticipantNameChanged participant_name_changed = 18;
ParticipantAttributesChanged participant_attributes_changed = 19;
ConnectionQualityChanged connection_quality_changed = 20;
ConnectionStateChanged connection_state_changed = 21;
// Connected connected = 21;
Disconnected disconnected = 22;
Reconnecting reconnecting = 23;
Reconnected reconnected = 24;
E2eeStateChanged e2ee_state_changed = 25;
RoomEOS eos = 26; // The stream of room events has ended
DataPacketReceived data_packet_received = 27;
TranscriptionReceived transcription_received = 28;
ChatMessageReceived chat_message = 29;
}
}
message RoomInfo {
optional string sid = 1;
required string name = 2;
required string metadata = 3;
}
message OwnedRoom {
required FfiOwnedHandle handle = 1;
required RoomInfo info = 2;
}
message ParticipantConnected { required OwnedParticipant info = 1; }
message ParticipantDisconnected {
required string participant_identity = 1;
}
message LocalTrackPublished {
// The TrackPublicationInfo comes from the PublishTrack response
// and the FfiClient musts wait for it before firing this event
required string track_sid = 1;
}
message LocalTrackUnpublished {
required string publication_sid = 1;
}
message LocalTrackSubscribed {
required string track_sid = 2;
}
message TrackPublished {
required string participant_identity = 1;
required OwnedTrackPublication publication = 2;
}
message TrackUnpublished {
required string participant_identity = 1;
required string publication_sid = 2;
}
// Publication isn't needed for subscription events on the FFI
// The FFI will retrieve the publication using the Track sid
message TrackSubscribed {
required string participant_identity = 1;
required OwnedTrack track = 2;
}
message TrackUnsubscribed {
// The FFI language can dispose/remove the VideoSink here
required string participant_identity = 1;
required string track_sid = 2;
}
message TrackSubscriptionFailed {
required string participant_identity = 1;
required string track_sid = 2;
required string error = 3;
}
message TrackMuted {
required string participant_identity = 1;
required string track_sid = 2;
}
message TrackUnmuted {
required string participant_identity = 1;
required string track_sid = 2;
}
message E2eeStateChanged {
required string participant_identity = 1; // Using sid instead of identity for ffi communication
required EncryptionState state = 2;
}
message ActiveSpeakersChanged { repeated string participant_identities = 1; }
message RoomMetadataChanged {
required string metadata = 1;
}
message RoomSidChanged {
required string sid = 1;
}
message ParticipantMetadataChanged {
required string participant_identity = 1;
required string metadata = 2;
}
message ParticipantAttributesChanged {
required string participant_identity = 1;
repeated AttributesEntry attributes = 2;
repeated AttributesEntry changed_attributes = 3;
}
message ParticipantNameChanged {
required string participant_identity = 1;
required string name = 2;
}
message ConnectionQualityChanged {
required string participant_identity = 1;
required ConnectionQuality quality = 2;
}
message UserPacket {
required OwnedBuffer data = 1;
optional string topic = 2;
}
message ChatMessage {
required string id = 1;
required int64 timestamp = 2;
required string message = 3;
optional int64 edit_timestamp = 4;
optional bool deleted = 5;
optional bool generated = 6;
}
message ChatMessageReceived {
required ChatMessage message = 1;
required string participant_identity = 2;
}
message SipDTMF {
required uint32 code = 1;
optional string digit = 2;
}
message DataPacketReceived {
required DataPacketKind kind = 1;
required string participant_identity = 2; // Can be empty if the data is sent a server SDK
oneof value {
UserPacket user = 4;
SipDTMF sip_dtmf = 5;
}
}
message TranscriptionReceived {
optional string participant_identity = 1;
optional string track_sid = 2;
repeated TranscriptionSegment segments = 3;
}
message ConnectionStateChanged { required ConnectionState state = 1; }
message Connected {}
message Disconnected {
required DisconnectReason reason = 1;
}
message Reconnecting {}
message Reconnected {}
message RoomEOS {}

81
src/protocols/rpc.proto Normal file
View File

@@ -0,0 +1,81 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto2";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
message RpcError {
required uint32 code = 1;
required string message = 2;
optional string data = 3;
}
// FFI Requests
message PerformRpcRequest {
required uint64 local_participant_handle = 1;
required string destination_identity = 2;
required string method = 3;
required string payload = 4;
optional uint32 response_timeout_ms = 5;
}
message RegisterRpcMethodRequest {
required uint64 local_participant_handle = 1;
required string method = 2;
}
message UnregisterRpcMethodRequest {
required uint64 local_participant_handle = 1;
required string method = 2;
}
message RpcMethodInvocationResponseRequest {
required uint64 local_participant_handle = 1;
required uint64 invocation_id = 2;
optional string payload = 3;
optional RpcError error = 4;
}
// FFI Responses
message PerformRpcResponse {
required uint64 async_id = 1;
}
message RegisterRpcMethodResponse {}
message UnregisterRpcMethodResponse {}
message RpcMethodInvocationResponseResponse {
optional string error = 1;
}
// FFI Callbacks
message PerformRpcCallback {
required uint64 async_id = 1;
optional string payload = 2;
optional RpcError error = 3;
}
// FFI Events
message RpcMethodInvocationEvent {
required uint64 local_participant_handle = 1;
required uint64 invocation_id = 2;
required string method = 3;
required string request_id = 4;
required string caller_identity = 5;
required string payload = 6;
required uint32 response_timeout_ms = 7;
}

449
src/protocols/stats.proto Normal file
View File

@@ -0,0 +1,449 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto2";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
enum DataChannelState {
DC_CONNECTING = 0;
DC_OPEN = 1;
DC_CLOSING = 2;
DC_CLOSED = 3;
}
enum QualityLimitationReason {
LIMITATION_NONE = 0;
LIMITATION_CPU = 1;
LIMITATION_BANDWIDTH = 2;
LIMITATION_OTHER = 3;
}
enum IceRole {
ICE_UNKNOWN = 0;
ICE_CONTROLLING = 1;
ICE_CONTROLLED = 2;
}
enum DtlsTransportState {
DTLS_TRANSPORT_NEW = 0;
DTLS_TRANSPORT_CONNECTING = 1;
DTLS_TRANSPORT_CONNECTED = 2;
DTLS_TRANSPORT_CLOSED = 3;
DTLS_TRANSPORT_FAILED = 4;
}
enum IceTransportState {
ICE_TRANSPORT_NEW = 0;
ICE_TRANSPORT_CHECKING = 1;
ICE_TRANSPORT_CONNECTED = 2;
ICE_TRANSPORT_COMPLETED = 3;
ICE_TRANSPORT_DISCONNECTED = 4;
ICE_TRANSPORT_FAILED = 5;
ICE_TRANSPORT_CLOSED = 6;
}
enum DtlsRole {
DTLS_CLIENT = 0;
DTLS_SERVER = 1;
DTLS_UNKNOWN = 2;
}
enum IceCandidatePairState {
PAIR_FROZEN = 0;
PAIR_WAITING = 1;
PAIR_IN_PROGRESS = 2;
PAIR_FAILED = 3;
PAIR_SUCCEEDED = 4;
}
enum IceCandidateType {
HOST = 0;
SRFLX = 1;
PRFLX = 2;
RELAY = 3;
}
enum IceServerTransportProtocol {
TRANSPORT_UDP = 0;
TRANSPORT_TCP = 1;
TRANSPORT_TLS = 2;
}
enum IceTcpCandidateType {
CANDIDATE_ACTIVE = 0;
CANDIDATE_PASSIVE = 1;
CANDIDATE_SO = 2;
}
message RtcStats {
message Codec {
required RtcStatsData rtc = 1;
required CodecStats codec = 2;
}
message InboundRtp {
required RtcStatsData rtc = 1;
required RtpStreamStats stream = 2;
required ReceivedRtpStreamStats received = 3;
required InboundRtpStreamStats inbound = 4;
}
message OutboundRtp {
required RtcStatsData rtc = 1;
required RtpStreamStats stream = 2;
required SentRtpStreamStats sent = 3;
required OutboundRtpStreamStats outbound = 4;
}
message RemoteInboundRtp {
required RtcStatsData rtc = 1;
required RtpStreamStats stream = 2;
required ReceivedRtpStreamStats received = 3;
required RemoteInboundRtpStreamStats remote_inbound = 4;
}
message RemoteOutboundRtp {
required RtcStatsData rtc = 1;
required RtpStreamStats stream = 2;
required SentRtpStreamStats sent = 3;
required RemoteOutboundRtpStreamStats remote_outbound = 4;
}
message MediaSource {
required RtcStatsData rtc = 1;
required MediaSourceStats source = 2;
required AudioSourceStats audio = 3;
required VideoSourceStats video = 4;
}
message MediaPlayout {
required RtcStatsData rtc = 1;
required AudioPlayoutStats audio_playout = 2;
}
message PeerConnection {
required RtcStatsData rtc = 1;
required PeerConnectionStats pc = 2;
}
message DataChannel {
required RtcStatsData rtc = 1;
required DataChannelStats dc = 2;
}
message Transport {
required RtcStatsData rtc = 1;
required TransportStats transport = 2;
}
message CandidatePair {
required RtcStatsData rtc = 1;
required CandidatePairStats candidate_pair = 2;
}
message LocalCandidate {
required RtcStatsData rtc = 1;
required IceCandidateStats candidate = 2;
}
message RemoteCandidate {
required RtcStatsData rtc = 1;
required IceCandidateStats candidate = 2;
}
message Certificate {
required RtcStatsData rtc = 1;
required CertificateStats certificate = 2;
}
message Track {
// Deprecated
}
oneof stats {
Codec codec = 3;
InboundRtp inbound_rtp = 4;
OutboundRtp outbound_rtp = 5;
RemoteInboundRtp remote_inbound_rtp = 6;
RemoteOutboundRtp remote_outbound_rtp = 7;
MediaSource media_source = 8;
MediaPlayout media_playout = 9;
PeerConnection peer_connection = 10;
DataChannel data_channel = 11;
Transport transport = 12;
CandidatePair candidate_pair = 13;
LocalCandidate local_candidate = 14;
RemoteCandidate remote_candidate = 15;
Certificate certificate = 16;
Track track = 17;
}
}
message RtcStatsData {
required string id = 1;
required int64 timestamp = 2;
}
message CodecStats {
required uint32 payload_type = 1;
required string transport_id = 2;
required string mime_type = 3;
required uint32 clock_rate = 4;
required uint32 channels = 5;
required string sdp_fmtp_line = 6;
}
message RtpStreamStats {
required uint32 ssrc = 1;
required string kind = 2;
required string transport_id = 3;
required string codec_id = 4;
}
message ReceivedRtpStreamStats {
required uint64 packets_received = 1;
required int64 packets_lost = 2;
required double jitter = 3;
}
message InboundRtpStreamStats {
required string track_identifier = 1;
required string mid = 2;
required string remote_id = 3;
required uint32 frames_decoded = 4;
required uint32 key_frames_decoded = 5;
required uint32 frames_rendered = 6;
required uint32 frames_dropped = 7;
required uint32 frame_width = 8;
required uint32 frame_height = 9;
required double frames_per_second = 10;
required uint64 qp_sum = 11;
required double total_decode_time = 12;
required double total_inter_frame_delay = 13;
required double total_squared_inter_frame_delay = 14;
required uint32 pause_count = 15;
required double total_pause_duration = 16;
required uint32 freeze_count = 17;
required double total_freeze_duration = 18;
required double last_packet_received_timestamp = 19;
required uint64 header_bytes_received = 20;
required uint64 packets_discarded = 21;
required uint64 fec_bytes_received = 22;
required uint64 fec_packets_received = 23;
required uint64 fec_packets_discarded = 24;
required uint64 bytes_received = 25;
required uint32 nack_count = 26;
required uint32 fir_count = 27;
required uint32 pli_count = 28;
required double total_processing_delay = 29;
required double estimated_playout_timestamp = 30;
required double jitter_buffer_delay = 31;
required double jitter_buffer_target_delay = 32;
required uint64 jitter_buffer_emitted_count = 33;
required double jitter_buffer_minimum_delay = 34;
required uint64 total_samples_received = 35;
required uint64 concealed_samples = 36;
required uint64 silent_concealed_samples = 37;
required uint64 concealment_events = 38;
required uint64 inserted_samples_for_deceleration = 39;
required uint64 removed_samples_for_acceleration = 40;
required double audio_level = 41;
required double total_audio_energy = 42;
required double total_samples_duration = 43;
required uint64 frames_received = 44;
required string decoder_implementation = 45;
required string playout_id = 46;
required bool power_efficient_decoder = 47;
required uint64 frames_assembled_from_multiple_packets = 48;
required double total_assembly_time = 49;
required uint64 retransmitted_packets_received = 50;
required uint64 retransmitted_bytes_received = 51;
required uint32 rtx_ssrc = 52;
required uint32 fec_ssrc = 53;
}
message SentRtpStreamStats {
required uint64 packets_sent = 1;
required uint64 bytes_sent = 2;
}
message OutboundRtpStreamStats {
required string mid = 1;
required string media_source_id = 2;
required string remote_id = 3;
required string rid = 4;
required uint64 header_bytes_sent = 5;
required uint64 retransmitted_packets_sent = 6;
required uint64 retransmitted_bytes_sent = 7;
required uint32 rtx_ssrc = 8;
required double target_bitrate = 9;
required uint64 total_encoded_bytes_target = 10;
required uint32 frame_width = 11;
required uint32 frame_height = 12;
required double frames_per_second = 13;
required uint32 frames_sent = 14;
required uint32 huge_frames_sent = 15;
required uint32 frames_encoded = 16;
required uint32 key_frames_encoded = 17;
required uint64 qp_sum = 18;
required double total_encode_time = 19;
required double total_packet_send_delay = 20;
required QualityLimitationReason quality_limitation_reason = 21;
map<string, double> quality_limitation_durations = 22;
required uint32 quality_limitation_resolution_changes = 23;
required uint32 nack_count = 24;
required uint32 fir_count = 25;
required uint32 pli_count = 26;
required string encoder_implementation = 27;
required bool power_efficient_encoder = 28;
required bool active = 29;
required string scalability_mode = 30;
}
message RemoteInboundRtpStreamStats {
required string local_id = 1;
required double round_trip_time = 2;
required double total_round_trip_time = 3;
required double fraction_lost = 4;
required uint64 round_trip_time_measurements = 5;
}
message RemoteOutboundRtpStreamStats {
required string local_id = 1;
required double remote_timestamp = 2;
required uint64 reports_sent = 3;
required double round_trip_time = 4;
required double total_round_trip_time = 5;
required uint64 round_trip_time_measurements = 6;
}
message MediaSourceStats {
required string track_identifier = 1;
required string kind = 2;
}
message AudioSourceStats {
required double audio_level = 1;
required double total_audio_energy = 2;
required double total_samples_duration = 3;
required double echo_return_loss = 4;
required double echo_return_loss_enhancement = 5;
required double dropped_samples_duration = 6;
required uint32 dropped_samples_events = 7;
required double total_capture_delay = 8;
required uint64 total_samples_captured = 9;
}
message VideoSourceStats {
required uint32 width = 1;
required uint32 height = 2;
required uint32 frames = 3;
required double frames_per_second = 4;
}
message AudioPlayoutStats {
required string kind = 1;
required double synthesized_samples_duration = 2;
required uint32 synthesized_samples_events = 3;
required double total_samples_duration = 4;
required double total_playout_delay = 5;
required uint64 total_samples_count = 6;
}
message PeerConnectionStats {
required uint32 data_channels_opened = 1;
required uint32 data_channels_closed = 2;
}
message DataChannelStats {
required string label = 1;
required string protocol = 2;
required int32 data_channel_identifier = 3;
optional DataChannelState state = 4;
required uint32 messages_sent = 5;
required uint64 bytes_sent = 6;
required uint32 messages_received = 7;
required uint64 bytes_received = 8;
}
message TransportStats {
required uint64 packets_sent = 1;
required uint64 packets_received = 2;
required uint64 bytes_sent = 3;
required uint64 bytes_received = 4;
required IceRole ice_role = 5;
required string ice_local_username_fragment = 6;
optional DtlsTransportState dtls_state = 7;
optional IceTransportState ice_state = 8;
required string selected_candidate_pair_id = 9;
required string local_certificate_id = 10;
required string remote_certificate_id = 11;
required string tls_version = 12;
required string dtls_cipher = 13;
required DtlsRole dtls_role = 14;
required string srtp_cipher = 15;
required uint32 selected_candidate_pair_changes = 16;
}
message CandidatePairStats {
required string transport_id = 1;
required string local_candidate_id = 2;
required string remote_candidate_id = 3;
optional IceCandidatePairState state = 4;
required bool nominated = 5;
required uint64 packets_sent = 6;
required uint64 packets_received = 7;
required uint64 bytes_sent = 8;
required uint64 bytes_received = 9;
required double last_packet_sent_timestamp = 10;
required double last_packet_received_timestamp = 11;
required double total_round_trip_time = 12;
required double current_round_trip_time = 13;
required double available_outgoing_bitrate = 14;
required double available_incoming_bitrate = 15;
required uint64 requests_received = 16;
required uint64 requests_sent = 17;
required uint64 responses_received = 18;
required uint64 responses_sent = 19;
required uint64 consent_requests_sent = 20;
required uint32 packets_discarded_on_send = 21;
required uint64 bytes_discarded_on_send = 22;
}
message IceCandidateStats {
required string transport_id = 1;
required string address = 2;
required int32 port = 3;
required string protocol = 4;
optional IceCandidateType candidate_type = 5;
required int32 priority = 6;
required string url = 7;
optional IceServerTransportProtocol relay_protocol = 8;
required string foundation = 9;
required string related_address = 10;
required int32 related_port = 11;
required string username_fragment = 12;
optional IceTcpCandidateType tcp_type = 13;
}
message CertificateStats {
required string fingerprint = 1;
required string fingerprint_algorithm = 2;
required string base64_certificate = 3;
required string issuer_certificate_id = 4;
}

131
src/protocols/track.proto Normal file
View File

@@ -0,0 +1,131 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto2";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
import "e2ee.proto";
import "handle.proto";
import "stats.proto";
// Create a new VideoTrack from a VideoSource
message CreateVideoTrackRequest {
required string name = 1;
required uint64 source_handle = 2;
}
message CreateVideoTrackResponse {
required OwnedTrack track = 1;
}
// Create a new AudioTrack from a AudioSource
message CreateAudioTrackRequest {
required string name = 1;
required uint64 source_handle = 2;
}
message CreateAudioTrackResponse {
required OwnedTrack track = 1;
}
message GetStatsRequest {
required uint64 track_handle = 1;
}
message GetStatsResponse {
required uint64 async_id = 1;
}
message GetStatsCallback {
required uint64 async_id = 1;
optional string error = 2;
repeated RtcStats stats = 3;
}
//
// Track
//
message TrackEvent {}
enum TrackKind {
KIND_UNKNOWN = 0;
KIND_AUDIO = 1;
KIND_VIDEO = 2;
}
enum TrackSource {
SOURCE_UNKNOWN = 0;
SOURCE_CAMERA = 1;
SOURCE_MICROPHONE = 2;
SOURCE_SCREENSHARE = 3;
SOURCE_SCREENSHARE_AUDIO = 4;
}
enum StreamState {
STATE_UNKNOWN = 0;
STATE_ACTIVE = 1;
STATE_PAUSED = 2;
}
message TrackPublicationInfo {
required string sid = 1;
required string name = 2;
required TrackKind kind = 3;
required TrackSource source = 4;
required bool simulcasted = 5;
required uint32 width = 6;
required uint32 height = 7;
required string mime_type = 8;
required bool muted = 9;
required bool remote = 10;
required EncryptionType encryption_type = 11;
}
message OwnedTrackPublication {
required FfiOwnedHandle handle = 1;
required TrackPublicationInfo info = 2;
}
message TrackInfo {
required string sid = 1;
required string name = 2;
required TrackKind kind = 3;
required StreamState stream_state = 4;
required bool muted = 5;
required bool remote = 6;
}
message OwnedTrack {
required FfiOwnedHandle handle = 1;
required TrackInfo info = 2;
}
// Mute/UnMute a track
message LocalTrackMuteRequest {
required uint64 track_handle = 1;
required bool mute = 2;
}
message LocalTrackMuteResponse {
required bool muted = 1;
}
// Enable/Disable a remote track
message EnableRemoteTrackRequest {
required uint64 track_handle = 1;
required bool enabled = 2;
}
message EnableRemoteTrackResponse {
required bool enabled = 1;
}

View File

@@ -0,0 +1,189 @@
// Copyright 2023 LiveKit, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto2";
package livekit.proto;
option csharp_namespace = "LiveKit.Proto";
import "handle.proto";
import "track.proto";
// Create a new VideoStream
// VideoStream is used to receive video frames from a track
message NewVideoStreamRequest {
required uint64 track_handle = 1;
required VideoStreamType type = 2;
// Get the frame on a specific format
optional VideoBufferType format = 3;
optional bool normalize_stride = 4; // if true, stride will be set to width/chroma_width
}
message NewVideoStreamResponse { required OwnedVideoStream stream = 1; }
// Request a video stream from a participant
message VideoStreamFromParticipantRequest {
required uint64 participant_handle = 1;
required VideoStreamType type = 2;
required TrackSource track_source = 3;
optional VideoBufferType format = 4;
optional bool normalize_stride = 5;
}
message VideoStreamFromParticipantResponse { required OwnedVideoStream stream = 1;}
// Create a new VideoSource
// VideoSource is used to send video frame to a track
message NewVideoSourceRequest {
required VideoSourceType type = 1;
// Used to determine which encodings to use + simulcast layers
// Most of the time it corresponds to the source resolution
required VideoSourceResolution resolution = 2;
}
message NewVideoSourceResponse { required OwnedVideoSource source = 1; }
// Push a frame to a VideoSource
message CaptureVideoFrameRequest {
required uint64 source_handle = 1;
required VideoBufferInfo buffer = 2;
required int64 timestamp_us = 3; // In microseconds
required VideoRotation rotation = 4;
}
message CaptureVideoFrameResponse {}
message VideoConvertRequest {
optional bool flip_y = 1;
required VideoBufferInfo buffer = 2;
required VideoBufferType dst_type = 3;
}
message VideoConvertResponse {
oneof message {
string error = 1;
OwnedVideoBuffer buffer = 2;
}
}
//
// VideoFrame buffers
//
message VideoResolution {
required uint32 width = 1;
required uint32 height = 2;
required double frame_rate = 3;
}
enum VideoCodec {
VP8 = 0;
H264 = 1;
AV1 = 2;
VP9 = 3;
}
enum VideoRotation {
VIDEO_ROTATION_0 = 0;
VIDEO_ROTATION_90 = 1;
VIDEO_ROTATION_180 = 2;
VIDEO_ROTATION_270 = 3;
}
enum VideoBufferType {
RGBA = 0;
ABGR = 1;
ARGB = 2;
BGRA = 3;
RGB24 = 4;
I420 = 5;
I420A = 6;
I422 = 7;
I444 = 8;
I010 = 9;
NV12 = 10;
}
message VideoBufferInfo {
message ComponentInfo {
required uint64 data_ptr = 1;
required uint32 stride = 2;
required uint32 size = 3;
}
required VideoBufferType type = 1;
required uint32 width = 2;
required uint32 height = 3;
required uint64 data_ptr = 4;
required uint32 stride = 6; // only for packed formats
repeated ComponentInfo components = 7;
}
message OwnedVideoBuffer {
required FfiOwnedHandle handle = 1;
required VideoBufferInfo info = 2;
}
//
// VideoStream
//
enum VideoStreamType {
VIDEO_STREAM_NATIVE = 0;
VIDEO_STREAM_WEBGL = 1;
VIDEO_STREAM_HTML = 2;
}
message VideoStreamInfo {
required VideoStreamType type = 1;
}
message OwnedVideoStream {
required FfiOwnedHandle handle = 1;
required VideoStreamInfo info = 2;
}
message VideoStreamEvent {
required uint64 stream_handle = 1;
oneof message {
VideoFrameReceived frame_received = 2;
VideoStreamEOS eos = 3;
}
}
message VideoFrameReceived {
required OwnedVideoBuffer buffer = 1;
required int64 timestamp_us = 2; // In microseconds
required VideoRotation rotation = 3;
}
message VideoStreamEOS {}
//
// VideoSource
//
message VideoSourceResolution {
required uint32 width = 1;
required uint32 height = 2;
}
enum VideoSourceType {
VIDEO_SOURCE_NATIVE = 0;
}
message VideoSourceInfo {
required VideoSourceType type = 1;
}
message OwnedVideoSource {
required FfiOwnedHandle handle = 1;
required VideoSourceInfo info = 2;
}

View File

@@ -101,7 +101,11 @@ KirigamiComponents.ConvergentContextMenu {
}
enabled: Controller.csSupported
}
// QQC2.MenuItem {
// text: i18n("Show livekit logs")
// icon.name: "dialog-xml-editor"
// onTriggered: livekitLogViewerComponent.createObject(applicationWindow().overlay)
// }
QQC2.Action {
text: i18n("Logout")
icon.name: "im-kick-user"
@@ -111,4 +115,9 @@ KirigamiComponents.ConvergentContextMenu {
readonly property Component confirmLogoutDialogComponent: ConfirmLogoutDialog {
connection: root.connection
}
// Component {
// id: livekitLogViewerComponent
// LivekitLogViewer {}
// }
}

65
src/qml/CallPage.qml Normal file
View File

@@ -0,0 +1,65 @@
import QtQuick
import QtQuick.Controls
import QtQuick.Layouts
import QtMultimedia
import org.kde.kirigami as Kirigami
import org.kde.neochat
Kirigami.Page {
id: callPage
title: i18nc("@title", "Call")
RowLayout {
anchors.fill: parent
VideoOutput {
id: viewFinder
Layout.fillWidth: parent.width / 2
Layout.preferredHeight: parent.height
Label {
text: "You"
}
}
VideoOutput {
id: otherViewFinder
Layout.fillWidth: parent.width / 2
Layout.preferredHeight: parent.height
Label {
text: "Them"
}
}
}
LivekitVideoSink {
videoSink: otherViewFinder.videoSink
}
Component.onCompleted: camera.start()
Connections {
target: CallController
function onConnected(): void {
CallController.setCameraVideoSink(viewFinder.videoSink)
CallController.toggleCamera()
}
}
CaptureSession {
camera: Camera {
id: camera
}
imageCapture: ImageCapture {
id: imageCapture
}
videoOutput: viewFinder
}
}

View File

@@ -0,0 +1,45 @@
// SPDX-FileCopyrightText: 2024 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
import QtQuick
import QtQuick.Controls as QQC2
import QtQuick.Layouts
import org.kde.kirigami as Kirigami
import org.kde.kirigamiaddons.components as Components
import org.kde.neochat
Kirigami.Dialog {
id: root
title: i18nc("@title", "Incoming call")
width: Kirigami.Units.gridUnit * 16
height: Kirigami.Units.gridUnit * 8
standardButtons: QQC2.Dialog.NoButton
Connections {
target: MediaManager
function onCloseIncomingCallDialog() {
root.close()
}
}
contentItem: ColumnLayout {
Components.DoubleFloatingButton {
anchors.centerIn: parent
leadingAction: Kirigami.Action {
icon.name: "call-start"
text: i18nc("@action:button", "Accept Call")
tooltip: ""//text
}
trailingAction: Kirigami.Action {
icon.name: "call-stop"
text: i18nc("@action:button", "Decline Call")
tooltip: ""//text
}
}
}
}

View File

@@ -0,0 +1,36 @@
// SPDX-FileCopyrightText: 2023 Tobias Fella <tobias.fella@kde.org>
// SPDX-License-Identifier: LGPL-2.0-or-later
import QtQuick
import QtQuick.Controls as QQC2
import org.kde.kirigami as Kirigami
import org.kde.neochat
Kirigami.ApplicationWindow {
id: root
title: i18nc("@title", "Livekit logs")
pageStack.initialPage: Kirigami.ScrollablePage {
title: i18nc("@title", "Livekit logs")
TableView {
id: messageList
width: root.width
model: LivekitLogModel
alternatingRows: true
delegate: QQC2.ItemDelegate {
id: messageDelegate
required property string message
width: parent.width
contentItem: QQC2.Label {
text: messageDelegate.message
wrapMode: QQC2.Label.Wrap
}
}
}
}
}

View File

@@ -4,6 +4,7 @@
import QtQuick
import QtQuick.Controls as QQC2
import QtMultimedia
import org.kde.kirigami as Kirigami
import org.kde.config as KConfig
@@ -30,6 +31,18 @@ Kirigami.ApplicationWindow {
}
}
Connections {
target: CallController
function onCallStarted() {
root.pageStack.pushDialogLayer(callPageComponent)
}
}
Component {
id: callPageComponent
CallPage {}
}
minimumWidth: Kirigami.Units.gridUnit * 20
minimumHeight: Kirigami.Units.gridUnit * 15
@@ -197,6 +210,7 @@ Kirigami.ApplicationWindow {
visible = true;
}
}
Connections {
target: NeoChatConfig
function onBlurChanged() {
@@ -347,4 +361,15 @@ Kirigami.ApplicationWindow {
initialized = true;
}
Connections {
target: MediaManager
function onShowIncomingCallDialog(): void {
incomingCallDialog.createObject(applicationWindow().overlay).open();
}
}
Component {
id: incomingCallDialog
IncomingCallDialog {}
}
}