Ring Daemon
Loading...
Searching...
No Matches
conference.cpp
Go to the documentation of this file.
1/*
2 * Copyright (C) 2004-2026 Savoir-faire Linux Inc.
3 *
4 * This program is free software: you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License as published by
6 * the Free Software Foundation, either version 3 of the License, or
7 * (at your option) any later version.
8 *
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
13 *
14 * You should have received a copy of the GNU General Public License
15 * along with this program. If not, see <https://www.gnu.org/licenses/>.
16 */
17
18#include "conference.h"
19#include "manager.h"
20#include "jamidht/jamiaccount.h"
22#include "string_utils.h"
23#include "sip/siptransport.h"
24
25#include "client/videomanager.h"
26#include "tracepoint.h"
27#ifdef ENABLE_VIDEO
28#include "call.h"
29#include "video/video_mixer.h"
30#endif
31
32#ifdef ENABLE_PLUGIN
34#endif
35
36#include "call_factory.h"
37
38#include "logger.h"
39#include "jami/media_const.h"
41#include "sip/sipcall.h"
42#include "json_utils.h"
43
44#include <opendht/thread_pool.h>
45
46using namespace std::literals;
47
48namespace jami {
49
50Conference::Conference(const std::shared_ptr<Account>& account, const std::string& confId)
51 : id_(confId.empty() ? Manager::instance().callFactory.getNewCallID() : confId)
52 , account_(account)
54 , videoEnabled_(account->isVideoEnabled())
55#endif
56{
57 JAMI_LOG("[conf:{}] Creating conference", id_);
58 duration_start_ = clock::now();
59
60#ifdef ENABLE_VIDEO
62#endif
63 registerProtocolHandlers();
64
66}
67
68#ifdef ENABLE_VIDEO
69void
70Conference::setupVideoMixer()
71{
72 videoMixer_ = std::make_shared<video::VideoMixer>(id_);
73 videoMixer_->setOnSourcesUpdated([this](std::vector<video::SourceInfo>&& infos) {
74 runOnMainThread([w = weak(), infos = std::move(infos)]() mutable {
75 if (auto shared = w.lock())
76 shared->onVideoSourcesUpdated(std::move(infos));
77 });
78 });
79
80 auto conf_res = split_string_to_unsigned(jami::Manager::instance().videoPreferences.getConferenceResolution(), 'x');
81 if (conf_res.size() == 2u) {
82#if defined(__APPLE__) && TARGET_OS_MAC
83 videoMixer_->setParameters(static_cast<int>(conf_res[0]), static_cast<int>(conf_res[1]), AV_PIX_FMT_NV12);
84#else
85 videoMixer_->setParameters(static_cast<int>(conf_res[0]), static_cast<int>(conf_res[1]));
86#endif
87 } else {
88 JAMI_ERROR("[conf:{}] Conference resolution is invalid", id_);
89 }
90}
91
92void
93Conference::onVideoSourcesUpdated(const std::vector<video::SourceInfo>& infos)
94{
95 auto acc = std::dynamic_pointer_cast<JamiAccount>(account_.lock());
96 if (!acc)
97 return;
98
99 ConfInfo newInfo;
100 {
101 std::lock_guard lock(confInfoMutex_);
102 newInfo.w = confInfo_.w;
103 newInfo.h = confInfo_.h;
104 newInfo.layout = confInfo_.layout;
105 }
106
107 bool hostAdded = false;
108 for (const auto& info : infos) {
109 if (!info.callId.empty()) {
111 } else {
113 }
114 }
115
116 if (auto videoMixer = videoMixer_) {
117 newInfo.h = videoMixer->getHeight();
118 newInfo.w = videoMixer->getWidth();
119 }
120
121 if (!hostAdded) {
122 ParticipantInfo pi;
123 pi.videoMuted = true;
124 pi.audioLocalMuted = isMediaSourceMuted(MediaType::MEDIA_AUDIO);
125 pi.isModerator = true;
126 newInfo.emplace_back(pi);
127 }
128
129 updateConferenceInfo(std::move(newInfo));
130}
131
132ParticipantInfo
133Conference::createParticipantInfoFromRemoteSource(const video::SourceInfo& info)
134{
135 ParticipantInfo participant;
136 participant.x = info.x;
137 participant.y = info.y;
138 participant.w = info.w;
139 participant.h = info.h;
140 participant.videoMuted = !info.hasVideo;
141
142 std::string callId = info.callId;
143 if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(callId))) {
144 participant.uri = call->getPeerNumber();
145 participant.audioLocalMuted = call->isPeerMuted();
146 participant.recording = call->isPeerRecording();
147 if (auto* transport = call->getTransport())
148 participant.device = transport->deviceId();
149 }
150
151 std::string_view peerId = string_remove_suffix(participant.uri, '@');
152 participant.isModerator = isModerator(peerId);
153 participant.handRaised = isHandRaised(participant.device);
154 participant.audioModeratorMuted = isMuted(callId);
155 participant.voiceActivity = isVoiceActive(info.streamId);
156 participant.sinkId = info.streamId;
157
158 if (auto videoMixer = videoMixer_)
159 participant.active = videoMixer->verifyActive(info.streamId);
160
161 return participant;
162}
163
164ParticipantInfo
165Conference::createParticipantInfoFromLocalSource(const video::SourceInfo& info,
166 const std::shared_ptr<JamiAccount>& acc,
167 bool& hostAdded)
168{
169 ParticipantInfo participant;
170 participant.x = info.x;
171 participant.y = info.y;
172 participant.w = info.w;
173 participant.h = info.h;
174 participant.videoMuted = !info.hasVideo;
175
176 auto streamInfo = videoMixer_->streamInfo(info.source);
177 std::string streamId = streamInfo.streamId;
178
179 if (!streamId.empty()) {
180 // Retrieve calls participants
181 // TODO: this is a first version, we assume that the peer is not
182 // a master of a conference and there is only one remote
183 // In the future, we should retrieve confInfo from the call
184 // To merge layout information
185 participant.audioModeratorMuted = isMuted(streamId);
186 if (auto videoMixer = videoMixer_)
187 participant.active = videoMixer->verifyActive(streamId);
188 if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(streamInfo.callId))) {
189 participant.uri = call->getPeerNumber();
190 participant.audioLocalMuted = call->isPeerMuted();
191 participant.recording = call->isPeerRecording();
192 if (auto* transport = call->getTransport())
193 participant.device = transport->deviceId();
194 }
195 } else {
197 if (auto videoMixer = videoMixer_)
198 participant.active = videoMixer->verifyActive(streamId);
199 }
200
201 std::string_view peerId = string_remove_suffix(participant.uri, '@');
202 participant.isModerator = isModerator(peerId);
203
204 // Check if this is the local host
205 if (participant.uri.empty() && !hostAdded) {
206 hostAdded = true;
207 participant.device = acc->currentDeviceId();
209 participant.recording = isRecording();
210 }
211
212 participant.handRaised = isHandRaised(participant.device);
213 participant.voiceActivity = isVoiceActive(streamId);
214 participant.sinkId = std::move(streamId);
215
216 return participant;
217}
218#endif
219
220void
221Conference::registerProtocolHandlers()
222{
223 parser_.onVersion([&](uint32_t) {}); // TODO
224 parser_.onCheckAuthorization([&](std::string_view peerId) { return isModerator(peerId); });
225 parser_.onHangupParticipant(
226 [&](const auto& accountUri, const auto& deviceId) { hangupParticipant(accountUri, deviceId); });
227 parser_.onRaiseHand([&](const auto& deviceId, bool state) { setHandRaised(deviceId, state); });
228 parser_.onSetActiveStream([&](const auto& streamId, bool state) { setActiveStream(streamId, state); });
229 parser_.onMuteStreamAudio([&](const auto& accountUri, const auto& deviceId, const auto& streamId, bool state) {
230 muteStream(accountUri, deviceId, streamId, state);
231 });
232 parser_.onSetLayout([&](int layout) { setLayout(layout); });
233
234 // Version 0, deprecated
235 parser_.onKickParticipant([&](const auto& participantId) { hangupParticipant(participantId); });
237 parser_.onMuteParticipant([&](const auto& participantId, bool state) { muteParticipant(participantId, state); });
238 parser_.onRaiseHandUri([&](const auto& uri, bool state) {
239 if (auto call = std::dynamic_pointer_cast<SIPCall>(getCallFromPeerID(uri)))
240 if (auto* transport = call->getTransport())
241 setHandRaised(std::string(transport->deviceId()), state);
242 });
243
244 parser_.onVoiceActivity([&](const auto& streamId, bool state) { setVoiceActivity(streamId, state); });
245}
246
248{
249 JAMI_LOG("[conf:{}] Destroying conference", id_);
250
251#ifdef ENABLE_VIDEO
253 auto defaultDevice = videoManager ? videoManager->videoDeviceMonitor.getMRLForDefaultDevice() : std::string {};
254 foreachCall([&](const auto& call) {
255 call->exitConference();
256 // Reset distant callInfo
257 call->resetConfInfo();
258 // Trigger the SIP negotiation to update the resolution for the remaining call
259 // ideally this sould be done without renegotiation
260 call->switchInput(defaultDevice);
261
262 // Continue the recording for the call if the conference was recorded
263 if (isRecording()) {
264 JAMI_DEBUG("[conf:{}] Stopping recording", getConfId());
266 if (not call->isRecording()) {
267 JAMI_DEBUG("[call:{}] Starting recording (conference was recorded)", call->getCallId());
268 call->toggleRecording();
269 }
270 }
271 // Notify that the remaining peer is still recording after conference
272 if (call->isPeerRecording())
273 call->peerRecording(true);
274 });
275 if (videoMixer_) {
276 auto& sink = videoMixer_->getSink();
277 for (auto it = confSinksMap_.begin(); it != confSinksMap_.end();) {
278 sink->detach(it->second.get());
279 it->second->stop();
280 it = confSinksMap_.erase(it);
281 }
282 }
283#endif // ENABLE_VIDEO
284#ifdef ENABLE_PLUGIN
285 {
286 std::lock_guard lk(avStreamsMtx_);
287 jami::Manager::instance().getJamiPluginManager().getCallServicesManager().clearCallHandlerMaps(getConfId());
288 Manager::instance().getJamiPluginManager().getCallServicesManager().clearAVSubject(getConfId());
289 confAVStreams.clear();
290 }
291#endif // ENABLE_PLUGIN
292 if (shutdownCb_)
293 shutdownCb_(static_cast<int>(getDuration().count()));
294 // do not propagate sharing from conf host to calls
295 closeMediaPlayer(mediaPlayerId_);
296 jami_tracepoint(conference_end, id_.c_str());
297}
298
301{
302 return confState_;
303}
304
305void
307{
308 JAMI_DEBUG("[conf:{}] State change: {} -> {}", id_, getStateStr(), getStateStr(state));
309
310 confState_ = state;
311}
312
313void
314Conference::initSourcesForHost()
315{
316 hostSources_.clear();
317 // Setup local audio source
319 if (confState_ == State::ACTIVE_ATTACHED) {
321 }
322
323 JAMI_DEBUG("[conf:{}] Setting local host audio source: {}", id_, audioAttr.toString());
324 hostSources_.emplace_back(audioAttr);
325
326#ifdef ENABLE_VIDEO
327 if (isVideoEnabled()) {
328 MediaAttribute videoAttr;
329 // Setup local video source
330 if (confState_ == State::ACTIVE_ATTACHED) {
332 false,
333 false,
334 true,
335 Manager::instance().getVideoManager()->videoDeviceMonitor.getMRLForDefaultDevice(),
337 }
338 JAMI_DEBUG("[conf:{}] Setting local host video source: {}", id_, videoAttr.toString());
339 hostSources_.emplace_back(videoAttr);
340 }
341#endif
342
344}
345
346void
352
353std::vector<std::map<std::string, std::string>>
358
359#ifdef ENABLE_PLUGIN
360void
361Conference::createConfAVStreams()
362{
363 std::string accountId = getAccountId();
364
365 auto audioMap = [](const std::shared_ptr<jami::MediaFrame>& m) -> AVFrame* {
366 return std::static_pointer_cast<AudioFrame>(m)->pointer();
367 };
368
369 // Preview and Received
370 if ((audioMixer_ = jami::getAudioInput(getConfId()))) {
371 auto audioSubject = std::make_shared<MediaStreamSubject>(audioMap);
376 }
377
378#ifdef ENABLE_VIDEO
379
380 if (videoMixer_) {
381 // Review
382 auto receiveSubject = std::make_shared<MediaStreamSubject>(pluginVideoMap_);
385
386 // Preview
387 if (auto videoPreview = videoMixer_->getVideoLocal()) {
388 auto previewSubject = std::make_shared<MediaStreamSubject>(pluginVideoMap_);
391 }
392 }
393#endif // ENABLE_VIDEO
394}
395
396void
397Conference::createConfAVStream(const StreamData& StreamData,
399 const std::shared_ptr<MediaStreamSubject>& mediaStreamSubject,
400 bool force)
401{
402 std::lock_guard lk(avStreamsMtx_);
403 const std::string AVStreamId = StreamData.id + std::to_string(static_cast<int>(StreamData.type))
404 + std::to_string(StreamData.direction);
405 auto it = confAVStreams.find(AVStreamId);
406 if (!force && it != confAVStreams.end())
407 return;
408
411 streamSource.attachPriorityObserver(mediaStreamSubject);
412 jami::Manager::instance().getJamiPluginManager().getCallServicesManager().createAVSubject(StreamData,
414}
415#endif // ENABLE_PLUGIN
416
417void
419{
420 for (auto& source : hostSources_)
421 if (source.type_ == type) {
422 source.muted_ = muted;
423 }
424}
425
426bool
428{
430 // Assume muted if not attached.
431 return true;
432 }
433
435 JAMI_ERROR("Unsupported media type");
436 return true;
437 }
438
439 // Check only the primary (first) source of the given type.
440 // Secondary sources (e.g. additional audio streams) being muted
441 // should not affect the overall mute state of the host.
442 for (const auto& source : hostSources_) {
443 if (source.type_ == type) {
444 if (source.type_ == MediaType::MEDIA_NONE) {
445 JAMI_WARNING("The host source for {} is not set. The mute state is meaningless",
446 source.mediaTypeToString(source.type_));
447 return true;
448 }
449 return source.muted_;
450 }
451 }
452 // No source of this type found so assume muted.
453 return true;
454}
455
456void
457Conference::takeOverMediaSourceControl(const std::string& callId)
458{
459 auto call = getCall(callId);
460 if (not call) {
461 JAMI_ERROR("[conf:{}] No call matches participant {}", id_, callId);
462 return;
463 }
464
465 auto account = call->getAccount().lock();
466 if (not account) {
467 JAMI_ERROR("[conf:{}] No account detected for call {}", id_, callId);
468 return;
469 }
470
471 auto mediaList = call->getMediaAttributeList();
472
474
475 for (auto mediaType : mediaTypeList) {
476 // Try to find a media with a valid source type
477 auto check = [mediaType](auto const& mediaAttr) {
478 return (mediaAttr.type_ == mediaType);
479 };
480
481 auto iter = std::find_if(mediaList.begin(), mediaList.end(), check);
482
483 if (iter == mediaList.end()) {
484 // Nothing to do if the call does not have a stream with
485 // the requested media.
486 JAMI_DEBUG("[conf:{}] Call {} does not have an active {} media source",
487 id_,
488 callId,
490 continue;
491 }
492
494 // To mute the local source, all the sources of the participating
495 // calls must be muted. If it's the first participant, just use
496 // its mute state.
497 if (subCalls_.size() == 1) {
498 setLocalHostMuteState(iter->type_, iter->muted_);
499 } else {
500 setLocalHostMuteState(iter->type_, iter->muted_ or isMediaSourceMuted(iter->type_));
501 }
502 }
503 }
504
505 // Update the media states in the newly added call.
506 call->requestMediaChange(MediaAttribute::mediaAttributesToMediaMaps(mediaList));
507
508 // Notify the client
509 for (auto mediaType : mediaTypeList) {
510 if (mediaType == MediaType::MEDIA_AUDIO) {
512 JAMI_DEBUG("[conf:{}] Taking over audio control from call {} - current state: {}",
513 id_,
514 callId,
515 muted ? "muted" : "unmuted");
517 } else {
519 JAMI_DEBUG("[conf:{}] Taking over video control from call {} - current state: {}",
520 id_,
521 callId,
522 muted ? "muted" : "unmuted");
524 }
525 }
526}
527
528bool
529Conference::requestMediaChange(const std::vector<libjami::MediaMap>& mediaList)
530{
532 JAMI_ERROR("[conf {}] Request media change can be performed only in attached mode", getConfId());
533 return false;
534 }
535
536 JAMI_DEBUG("[conf:{}] Processing media change request", getConfId());
537
539
540#ifdef ENABLE_VIDEO
541 // Check if the host previously had video
544 // Check if the host will have video after this change
545 bool hostWillHaveVideo = false;
546 for (const auto& media : mediaAttrList) {
547 if (media.type_ == MediaType::MEDIA_VIDEO && media.enabled_ && !media.muted_) {
548 hostWillHaveVideo = true;
549 break;
550 }
551 }
552#endif
553
554 bool hasFileSharing {false};
555 for (const auto& media : mediaAttrList) {
556 if (!media.enabled_ || media.sourceUri_.empty())
557 continue;
558
559 // Supported MRL schemes
560 static const std::string sep = libjami::Media::VideoProtocolPrefix::SEPARATOR;
561
562 const auto pos = media.sourceUri_.find(sep);
563 if (pos == std::string::npos)
564 continue;
565
566 const auto prefix = media.sourceUri_.substr(0, pos);
567 if ((pos + sep.size()) >= media.sourceUri_.size())
568 continue;
569
571 hasFileSharing = true;
572 mediaPlayerId_ = media.sourceUri_;
573 createMediaPlayer(mediaPlayerId_);
574 }
575 }
576
577 if (!hasFileSharing) {
578 closeMediaPlayer(mediaPlayerId_);
579 mediaPlayerId_ = "";
580 }
581
582 for (auto const& mediaAttr : mediaAttrList) {
583 JAMI_DEBUG("[conf:{}] Requested media: {}", getConfId(), mediaAttr.toString(true));
584 }
585
586 std::vector<std::string> newVideoInputs;
587 for (auto const& mediaAttr : mediaAttrList) {
588 // Find media
589 auto oldIdx = std::find_if(hostSources_.begin(), hostSources_.end(), [&](const auto& oldAttr) {
590 return oldAttr.label_ == mediaAttr.label_;
591 });
592 // If video, add to newVideoInputs
593#ifdef ENABLE_VIDEO
594 if (mediaAttr.type_ == MediaType::MEDIA_VIDEO) {
595 auto srcUri = mediaAttr.sourceUri_;
596 // If no sourceUri, use the default video device
597 if (srcUri.empty()) {
598 if (auto* vm = Manager::instance().getVideoManager())
599 srcUri = vm->videoDeviceMonitor.getMRLForDefaultDevice();
600 else
601 continue;
602 }
603 if (!mediaAttr.muted_)
604 newVideoInputs.emplace_back(std::move(srcUri));
605 } else {
606#endif
607 hostAudioInputs_[mediaAttr.label_] = jami::getAudioInput(mediaAttr.label_);
608#ifdef ENABLE_VIDEO
609 }
610#endif
611 if (oldIdx != hostSources_.end()) {
612 // Check if muted status changes
613 if (mediaAttr.muted_ != oldIdx->muted_) {
614 // Secondary audio sources (e.g. screenshare audio) must be
615 // handled per-stream. The global muteLocalHost() would
616 // mute/unmute ALL audio sources (including the microphone),
617 // so we skip it here and let bindHostAudio() apply the
618 // per-source mute state after hostSources_ is updated.
620 JAMI_DEBUG("[conf:{}] Secondary audio mute handled per-stream", getConfId());
621 } else {
626 }
627 }
628 }
629 }
630
631#ifdef ENABLE_VIDEO
632 if (videoMixer_) {
633 if (newVideoInputs.empty()) {
634 videoMixer_->addAudioOnlySource("", sip_utils::streamId("", sip_utils::DEFAULT_AUDIO_STREAMID));
635 } else {
636 videoMixer_->switchInputs(newVideoInputs);
637 }
638 }
639#endif
640 hostSources_ = mediaAttrList; // New medias
641 if (!isMuted("host"sv) && !isMediaSourceMuted(MediaType::MEDIA_AUDIO))
642 bindHostAudio();
643
644#ifdef ENABLE_VIDEO
645 // If the host is adding video (didn't have video before, has video now),
646 // we need to ensure all subcalls also have video negotiated so they can
647 // receive the mixed video stream.
649 JAMI_DEBUG("[conf:{}] Host added video, negotiating video with all subcalls", getConfId());
651 }
652#endif
653
654 // Inform the client about the media negotiation status.
656 return true;
657}
658
659void
660Conference::handleMediaChangeRequest(const std::shared_ptr<Call>& call,
661 const std::vector<libjami::MediaMap>& remoteMediaList)
662{
663 JAMI_DEBUG("[conf:{}] Answering media change request from call {}", getConfId(), call->getCallId());
664 auto currentMediaList = hostSources_;
665
666#ifdef ENABLE_VIDEO
667 // Check if the participant previously had video
668 auto previousMediaList = call->getMediaAttributeList();
670
671 // If the new media list has video, remove the participant from audioonlylist.
676 "[conf:{}] [call:{}] remoteHasVideo={}, removing from audio-only sources BEFORE media negotiation completes",
677 getConfId(),
678 call->getCallId(),
680 if (videoMixer_ && participantWillHaveVideo) {
681 auto callId = call->getCallId();
683 JAMI_WARNING("[conf:{}] [call:{}] Removing audio-only source '{}' - participant may briefly disappear from "
684 "layout until video is attached",
685 getConfId(),
686 callId,
688 videoMixer_->removeAudioOnlySource(callId, audioStreamId);
689 }
690#endif
691
693 for (auto it = remoteList.begin(); it != remoteList.end();) {
696 it = remoteList.erase(it);
697 } else {
698 ++it;
699 }
700 }
701 // Create minimum media list (ignore muted and disabled medias)
702 std::vector<libjami::MediaMap> newMediaList;
703 newMediaList.reserve(remoteMediaList.size());
704 for (auto const& media : currentMediaList) {
705 if (media.enabled_ and not media.muted_)
707 }
708 for (auto idx = newMediaList.size(); idx < remoteMediaList.size(); idx++)
709 newMediaList.emplace_back(remoteMediaList[idx]);
710
711 // NOTE:
712 // Since this is a conference, newly added media will be also
713 // accepted.
714 // This also means that if original call was an audio-only call,
715 // the local camera will be enabled, unless the video is disabled
716 // in the account settings.
717 call->answerMediaChangeRequest(newMediaList);
718 call->enterConference(shared_from_this());
719
720 // Rebind audio after media renegotiation so that any newly added
721 // audio streams are wired into the conference mixing mesh.
722 unbindSubCallAudio(call->getCallId());
723 bindSubCallAudio(call->getCallId());
724
725#ifdef ENABLE_VIDEO
726 // If a participant is adding video (didn't have it before, has it now),
727 // we need to make sure all other subcalls also have video negotiated so they
728 // can receive the mixed video stream that now includes the new participant's video
730 JAMI_DEBUG("[conf:{}] [call:{}] Participant added video, negotiating video with other subcalls",
731 getConfId(),
732 call->getCallId());
733 negotiateVideoWithSubcalls(call->getCallId());
734 }
735#endif
736}
737
738void
739Conference::addSubCall(const std::string& callId)
740{
741 JAMI_DEBUG("[conf:{}] Adding call {}", id_, callId);
742
743 jami_tracepoint(conference_add_participant, id_.c_str(), callId.c_str());
744
745 {
746 std::lock_guard lk(subcallsMtx_);
747 if (!subCalls_.insert(callId).second)
748 return;
749 }
750
751 if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(callId))) {
752 // Check if participant was muted before conference
753 if (call->isPeerMuted())
754 participantsMuted_.emplace(call->getCallId());
755
756 // NOTE:
757 // When a call joins a conference, the media source of the call
758 // will be set to the output of the conference mixer.
759 takeOverMediaSourceControl(callId);
760 auto w = call->getAccount();
761 auto account = w.lock();
762 if (account) {
763 // Add defined moderators for the account link to the call
764 for (const auto& mod : account->getDefaultModerators()) {
765 moderators_.emplace(mod);
766 }
767
768 // Check for localModeratorsEnabled preference
769 if (account->isLocalModeratorsEnabled() && not localModAdded_) {
771 for (const auto& account : accounts) {
772 moderators_.emplace(account->getUsername());
773 }
774 localModAdded_ = true;
775 }
776
777 // Check for allModeratorEnabled preference
778 if (account->isAllModerators())
779 moderators_.emplace(getRemoteId(call));
780 }
781#ifdef ENABLE_VIDEO
782 // In conference, if a participant joins with an audio only
783 // call, it must be listed in the audioonlylist.
784 auto mediaList = call->getMediaAttributeList();
785 if (call->peerUri().find("swarm:") != 0) { // We're hosting so it's already ourself.
787 // Normally not called, as video stream is added for audio-only answers.
788 // The audio-only source will be added in VideoRtpSession startReceiver,
789 // after ICE negotiation, when peers can properly create video sinks.
790 videoMixer_->addAudioOnlySource(call->getCallId(),
791 sip_utils::streamId(call->getCallId(),
793 }
794 }
795 call->enterConference(shared_from_this());
796 // Continue the recording for the conference if one participant was recording
797 if (call->isRecording()) {
798 JAMI_DEBUG("[call:{}] Stopping recording", call->getCallId());
799 call->toggleRecording();
800 if (not this->isRecording()) {
801 JAMI_DEBUG("[conf:{}] Starting recording (participant was recording)", getConfId());
802 this->toggleRecording();
803 }
804 }
805 bindSubCallAudio(callId);
806#endif // ENABLE_VIDEO
807 } else
808 JAMI_ERROR("[conf:{}] No call associated with participant {}", id_, callId);
809#ifdef ENABLE_PLUGIN
811#endif
812}
813
814void
815Conference::removeSubCall(const std::string& callId)
816{
817 JAMI_DEBUG("[conf:{}] Removing call {}", id_, callId);
818 {
819 std::lock_guard lk(subcallsMtx_);
820 if (!subCalls_.erase(callId))
821 return;
822 }
823
824 clearParticipantData(callId);
825
826 if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(callId))) {
827#ifdef ENABLE_VIDEO
828 if (videoMixer_) {
829 for (auto const& rtpSession : call->getRtpSessionList()) {
830 if (rtpSession->getMediaType() == MediaType::MEDIA_AUDIO)
831 videoMixer_->removeAudioOnlySource(callId, rtpSession->streamId());
832 if (videoMixer_->verifyActive(rtpSession->streamId()))
833 videoMixer_->resetActiveStream();
834 }
835 }
836#endif // ENABLE_VIDEO
837 unbindSubCallAudio(callId);
838 call->exitConference();
839 if (call->isPeerRecording())
840 call->peerRecording(false);
841 }
842}
843
844#ifdef ENABLE_VIDEO
845void
846Conference::negotiateVideoWithSubcalls(const std::string& excludeCallId)
847{
848 if (!isVideoEnabled()) {
849 JAMI_DEBUG("[conf:{}] Video is disabled in account, skipping subcall video negotiation", id_);
850 return;
851 }
852
853 JAMI_DEBUG("[conf:{}] Negotiating video with subcalls (excluding: {})",
854 id_,
855 excludeCallId.empty() ? "none" : excludeCallId);
856
857 for (const auto& callId : getSubCalls()) {
858 if (callId == excludeCallId) {
859 continue;
860 }
861
862 auto call = std::dynamic_pointer_cast<SIPCall>(getCall(callId));
863 if (!call) {
864 continue;
865 }
866
867 auto mediaList = call->getMediaAttributeList();
868 auto videoIt = std::find_if(mediaList.begin(), mediaList.end(), [](const auto& media) {
869 return media.type_ == MediaType::MEDIA_VIDEO;
870 });
871
872 if (videoIt == mediaList.end()) {
873 JAMI_DEBUG("[conf:{}] [call:{}] Call does not have video, triggering renegotiation to add video",
874 id_,
875 callId);
876
877 MediaAttribute videoAttr;
879 videoAttr.enabled_ = true;
880 videoAttr.muted_ = false;
882 // Source not needed because the mixer becomes the data source for the video stream
883 videoAttr.sourceUri_.clear();
884
885 mediaList.emplace_back(videoAttr);
886
887 call->requestMediaChange(MediaAttribute::mediaAttributesToMediaMaps(mediaList));
888 call->enterConference(shared_from_this());
889 } else {
890 bool needsUpdate = false;
891 if (!videoIt->enabled_) {
892 videoIt->enabled_ = true;
893 needsUpdate = true;
894 }
895 if (videoIt->muted_) {
896 videoIt->muted_ = false;
897 needsUpdate = true;
898 }
899 if (!videoIt->sourceUri_.empty()) {
900 // Source not needed because the mixer becomes the data source for the video stream
901 videoIt->sourceUri_.clear();
902 needsUpdate = true;
903 }
904
905 if (needsUpdate) {
906 JAMI_DEBUG("[conf:{}] [call:{}] Unmuting existing video stream for renegotiation", id_, callId);
907 call->requestMediaChange(MediaAttribute::mediaAttributesToMediaMaps(mediaList));
908 call->enterConference(shared_from_this());
909 }
910 }
911 }
912}
913#endif
914
915void
917{
918#ifdef ENABLE_VIDEO
919 if (!videoMixer_)
920 return;
921 if (isHost(participant_id)) {
922 videoMixer_->setActiveStream(sip_utils::streamId("", sip_utils::DEFAULT_VIDEO_STREAMID));
923 return;
924 }
925 if (auto call = getCallFromPeerID(participant_id)) {
926 videoMixer_->setActiveStream(sip_utils::streamId(call->getCallId(), sip_utils::DEFAULT_VIDEO_STREAMID));
927 return;
928 }
929
930 auto remoteHost = findHostforRemoteParticipant(participant_id);
931 if (not remoteHost.empty()) {
932 // This logic will be handled client side
933 return;
934 }
935 // Unset active participant by default
936 videoMixer_->resetActiveStream();
937#endif
938}
939
940void
941Conference::setActiveStream(const std::string& streamId, bool state)
942{
943#ifdef ENABLE_VIDEO
944 if (!videoMixer_)
945 return;
946 if (state)
947 videoMixer_->setActiveStream(streamId);
948 else
949 videoMixer_->resetActiveStream();
950#endif
951}
952
953void
955{
956#ifdef ENABLE_VIDEO
958 JAMI_ERROR("[conf:{}] Unknown layout {}", id_, layout);
959 return;
960 }
961 if (!videoMixer_)
962 return;
963 {
964 std::lock_guard lk(confInfoMutex_);
965 confInfo_.layout = layout;
966 }
967 videoMixer_->setVideoLayout(static_cast<video::Layout>(layout));
968#endif
969}
970
971std::vector<std::map<std::string, std::string>>
973{
974 std::vector<std::map<std::string, std::string>> infos;
975 infos.reserve(size());
976 for (const auto& info : *this)
977 infos.emplace_back(info.toMap());
978 return infos;
979}
980
981std::string
983{
984 Json::Value val = {};
985 for (const auto& info : *this) {
986 val["p"].append(info.toJson());
987 }
988 val["w"] = w;
989 val["h"] = h;
990 val["v"] = v;
991 val["layout"] = layout;
992 return json::toString(val);
993}
994
995void
996Conference::sendConferenceInfos()
997{
998 // Inform calls that the layout has changed
999 foreachCall([&](const auto& call) {
1000 // Produce specific JSON for each participant (2 separate accounts can host ...
1001 // a conference on a same device, the conference is not link to one account).
1002 auto w = call->getAccount();
1003 auto account = w.lock();
1004 if (!account)
1005 return;
1006
1007 dht::ThreadPool::io().run(
1008 [call, confInfo = getConfInfoHostUri(account->getUsername() + "@ring.dht", call->getPeerNumber())] {
1009 call->sendConfInfo(confInfo.toString());
1010 });
1011 });
1012
1013 auto confInfo = getConfInfoHostUri("", "");
1014#ifdef ENABLE_VIDEO
1016#endif
1017
1018 // Inform client that layout has changed
1020}
1021
1022#ifdef ENABLE_VIDEO
1023void
1024Conference::createSinks(const ConfInfo& infos)
1025{
1026 std::lock_guard lk(sinksMtx_);
1027 if (!videoMixer_)
1028 return;
1029 auto& sink = videoMixer_->getSink();
1030 Manager::instance().createSinkClients(getConfId(),
1031 infos,
1032 {std::static_pointer_cast<video::VideoFrameActiveWriter>(sink)},
1034 getAccountId());
1035}
1036#endif
1037
1038void
1039Conference::attachHost(const std::vector<libjami::MediaMap>& mediaList)
1040{
1041 JAMI_DEBUG("[conf:{}] Attaching host", id_);
1042
1045 if (mediaList.empty()) {
1046 JAMI_DEBUG("[conf:{}] Empty media list, initializing default sources", id_);
1047 initSourcesForHost();
1048 bindHostAudio();
1049#ifdef ENABLE_VIDEO
1050 if (videoMixer_) {
1051 std::vector<std::string> videoInputs;
1052 for (const auto& source : hostSources_) {
1053 if (source.type_ == MediaType::MEDIA_VIDEO)
1054 videoInputs.emplace_back(source.sourceUri_);
1055 }
1056 if (videoInputs.empty()) {
1057 videoMixer_->addAudioOnlySource("", sip_utils::streamId("", sip_utils::DEFAULT_AUDIO_STREAMID));
1058 } else {
1059 videoMixer_->switchInputs(videoInputs);
1060 }
1061 }
1062#endif
1063 } else {
1065 }
1066 } else {
1067 JAMI_WARNING("[conf:{}] Invalid conference state in attach participant: current \"{}\" - expected \"{}\"",
1068 id_,
1069 getStateStr(),
1070 "ACTIVE_DETACHED");
1071 }
1072}
1073
1074void
1076{
1077 JAMI_LOG("[conf:{}] Detaching host", id_);
1078
1079 lastMediaList_ = currentMediaList();
1080
1082 unbindHostAudio();
1083
1084#ifdef ENABLE_VIDEO
1085 if (videoMixer_)
1086 videoMixer_->stopInputs();
1087#endif
1088 } else {
1089 JAMI_WARNING("[conf:{}] Invalid conference state in detach participant: current \"{}\" - expected \"{}\"",
1090 id_,
1091 getStateStr(),
1092 "ACTIVE_ATTACHED");
1093 return;
1094 }
1095
1097 initSourcesForHost();
1098}
1099
1102{
1103 std::lock_guard lk(subcallsMtx_);
1104 return subCalls_;
1105}
1106
1107bool
1109{
1110 bool newState = not isRecording();
1111 if (newState)
1112 initRecorder(recorder_);
1113 else if (recorder_)
1114 deinitRecorder(recorder_);
1115
1116 // Notify each participant
1117 foreachCall([&](const auto& call) { call->updateRecState(newState); });
1118
1121 return res;
1122}
1123
1124std::string
1126{
1127 if (auto account = getAccount())
1128 return account->getAccountID();
1129 return {};
1130}
1131
1132void
1133Conference::switchInput(const std::string& input)
1134{
1135#ifdef ENABLE_VIDEO
1136 JAMI_DEBUG("[conf:{}] Switching video input to {}", id_, input);
1137 std::vector<MediaAttribute> newSources;
1138 auto firstVideo = true;
1139 // Rewrite hostSources (remove all except one video input)
1140 // This method is replaced by requestMediaChange
1141 for (auto& source : hostSources_) {
1142 if (source.type_ == MediaType::MEDIA_VIDEO) {
1143 if (firstVideo) {
1144 firstVideo = false;
1145 source.sourceUri_ = input;
1146 newSources.emplace_back(source);
1147 }
1148 } else {
1149 newSources.emplace_back(source);
1150 }
1151 }
1152
1153 // Done if the video is disabled
1154 if (not isVideoEnabled())
1155 return;
1156
1157 if (auto mixer = videoMixer_) {
1158 mixer->switchInputs({input});
1159#ifdef ENABLE_PLUGIN
1160 // Preview
1161 if (auto videoPreview = mixer->getVideoLocal()) {
1162 auto previewSubject = std::make_shared<MediaStreamSubject>(pluginVideoMap_);
1165 }
1166#endif
1167 }
1168#endif
1169}
1170
1171bool
1173{
1174 if (auto shared = account_.lock())
1175 return shared->isVideoEnabled();
1176 return false;
1177}
1178
1179#ifdef ENABLE_VIDEO
1180std::shared_ptr<video::VideoMixer>
1181Conference::getVideoMixer()
1182{
1183 return videoMixer_;
1184}
1185
1186std::string
1187Conference::getVideoInput() const
1188{
1189 for (const auto& source : hostSources_) {
1190 if (source.type_ == MediaType::MEDIA_VIDEO)
1191 return source.sourceUri_;
1192 }
1193 return {};
1194}
1195#endif
1196
1197void
1198Conference::initRecorder(std::shared_ptr<MediaRecorder>& rec)
1199{
1200#ifdef ENABLE_VIDEO
1201 // Video
1202 if (videoMixer_) {
1203 if (auto* ob = rec->addStream(videoMixer_->getStream("v:mixer"))) {
1204 videoMixer_->attach(ob);
1205 }
1206 }
1207#endif
1208
1209 // Audio
1210 // Create ghost participant for ringbufferpool
1212 ghostRingBuffer_ = rbPool.createRingBuffer(getConfId());
1213
1214 // Bind it to ringbufferpool in order to get the all mixed frames
1215 bindSubCallAudio(getConfId());
1216
1217 // Add stream to recorder
1218 audioMixer_ = jami::getAudioInput(getConfId());
1219 if (auto* ob = rec->addStream(audioMixer_->getInfo("a:mixer"))) {
1220 audioMixer_->attach(ob);
1221 }
1222}
1223
1224void
1225Conference::deinitRecorder(std::shared_ptr<MediaRecorder>& rec)
1226{
1227#ifdef ENABLE_VIDEO
1228 // Video
1229 if (videoMixer_) {
1230 if (auto* ob = rec->getStream("v:mixer")) {
1231 videoMixer_->detach(ob);
1232 }
1233 }
1234#endif
1235
1236 // Audio
1237 if (auto* ob = rec->getStream("a:mixer"))
1238 audioMixer_->detach(ob);
1239 audioMixer_.reset();
1241 ghostRingBuffer_.reset();
1242}
1243
1244void
1245Conference::onConfOrder(const std::string& callId, const std::string& confOrder)
1246{
1247 // Check if the peer is a master
1248 if (auto call = getCall(callId)) {
1249 const auto& peerId = getRemoteId(call);
1250 Json::Value root;
1251 if (!json::parse(confOrder, root)) {
1252 JAMI_WARNING("[conf:{}] Unable to parse conference order from {}", id_, peerId);
1253 return;
1254 }
1255
1256 parser_.initData(std::move(root), peerId);
1257 parser_.parse();
1258 }
1259}
1260
1261std::shared_ptr<Call>
1262Conference::getCall(const std::string& callId)
1263{
1264 return Manager::instance().callFactory.getCall(callId);
1265}
1266
1267bool
1268Conference::isModerator(std::string_view uri) const
1269{
1270 return moderators_.find(uri) != moderators_.end() or isHost(uri);
1271}
1272
1273bool
1274Conference::isHandRaised(std::string_view deviceId) const
1275{
1276 return isHostDevice(deviceId) ? handsRaised_.find("host"sv) != handsRaised_.end()
1277 : handsRaised_.find(deviceId) != handsRaised_.end();
1278}
1279
1280void
1281Conference::setHandRaised(const std::string& deviceId, const bool& state)
1282{
1283 if (isHostDevice(deviceId)) {
1284 auto isPeerRequiringAttention = isHandRaised("host"sv);
1285 if (state and not isPeerRequiringAttention) {
1286 handsRaised_.emplace("host"sv);
1287 updateHandsRaised();
1288 } else if (not state and isPeerRequiringAttention) {
1289 handsRaised_.erase("host");
1290 updateHandsRaised();
1291 }
1292 } else {
1293 for (const auto& p : getSubCalls()) {
1294 if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(p))) {
1295 auto isPeerRequiringAttention = isHandRaised(deviceId);
1296 std::string callDeviceId;
1297 if (auto* transport = call->getTransport())
1298 callDeviceId = transport->deviceId();
1299 if (deviceId == callDeviceId) {
1300 if (state and not isPeerRequiringAttention) {
1301 handsRaised_.emplace(deviceId);
1302 updateHandsRaised();
1303 } else if (not state and isPeerRequiringAttention) {
1304 handsRaised_.erase(deviceId);
1305 updateHandsRaised();
1306 }
1307 return;
1308 }
1309 }
1310 }
1311 JAMI_WARNING("[conf:{}] Failed to set hand raised for {} (participant not found)", id_, deviceId);
1312 }
1313}
1314
1315bool
1316Conference::isVoiceActive(std::string_view streamId) const
1317{
1318 return streamsVoiceActive.find(streamId) != streamsVoiceActive.end();
1319}
1320
1321void
1322Conference::setVoiceActivity(const std::string& streamId, const bool& newState)
1323{
1324 // verify that streamID exists in our confInfo
1325 bool exists = false;
1326 for (auto& participant : confInfo_) {
1327 if (participant.sinkId == streamId) {
1328 exists = true;
1329 break;
1330 }
1331 }
1332
1333 if (!exists) {
1334 JAMI_ERROR("[conf:{}] Participant not found with streamId: {}", id_, streamId);
1335 return;
1336 }
1337
1338 auto previousState = isVoiceActive(streamId);
1339
1340 if (previousState == newState) {
1341 // no change, do not send out updates
1342 return;
1343 }
1344
1346 // voice going from inactive to active
1347 streamsVoiceActive.emplace(streamId);
1349 return;
1350 }
1351
1353 // voice going from active to inactive
1354 streamsVoiceActive.erase(streamId);
1356 return;
1357 }
1358}
1359
1360void
1361Conference::setModerator(const std::string& participant_id, const bool& state)
1362{
1363 for (const auto& p : getSubCalls()) {
1364 if (auto call = getCall(p)) {
1365 auto isPeerModerator = isModerator(participant_id);
1366 if (participant_id == getRemoteId(call)) {
1367 if (state and not isPeerModerator) {
1368 moderators_.emplace(participant_id);
1369 updateModerators();
1370 } else if (not state and isPeerModerator) {
1371 moderators_.erase(participant_id);
1372 updateModerators();
1373 }
1374 return;
1375 }
1376 }
1377 }
1378 JAMI_WARNING("[conf:{}] Failed to set moderator {} (participant not found)", id_, participant_id);
1379}
1380
1381void
1382Conference::updateModerators()
1383{
1384 std::lock_guard lk(confInfoMutex_);
1385 for (auto& info : confInfo_) {
1386 info.isModerator = isModerator(string_remove_suffix(info.uri, '@'));
1387 }
1388 sendConferenceInfos();
1389}
1390
1391void
1392Conference::updateHandsRaised()
1393{
1394 std::lock_guard lk(confInfoMutex_);
1395 for (auto& info : confInfo_)
1396 info.handRaised = isHandRaised(info.device);
1397 sendConferenceInfos();
1398}
1399
1400void
1402{
1403 std::lock_guard lk(confInfoMutex_);
1404
1405 // streamId is actually sinkId
1406 for (ParticipantInfo& participantInfo : confInfo_) {
1407 bool newActivity;
1408
1409 if (auto call = getCallWith(std::string(string_remove_suffix(participantInfo.uri, '@')),
1410 participantInfo.device)) {
1411 // if this participant is in a direct call with us
1412 // grab voice activity info directly from the call
1413 newActivity = call->hasPeerVoice();
1414 } else {
1415 // check for it
1416 newActivity = isVoiceActive(participantInfo.sinkId);
1417 }
1418
1419 if (participantInfo.voiceActivity != newActivity) {
1420 participantInfo.voiceActivity = newActivity;
1421 }
1422 }
1423 sendConferenceInfos(); // also emits signal to client
1424}
1425
1426void
1427Conference::foreachCall(const std::function<void(const std::shared_ptr<Call>& call)>& cb)
1428{
1429 for (const auto& p : getSubCalls())
1430 if (auto call = getCall(p))
1431 cb(call);
1432}
1433
1434bool
1435Conference::isMuted(std::string_view callId) const
1436{
1437 return participantsMuted_.find(callId) != participantsMuted_.end();
1438}
1439
1440void
1441Conference::muteStream(const std::string& accountUri, const std::string& deviceId, const std::string&, const bool& state)
1442{
1443 if (auto acc = std::dynamic_pointer_cast<JamiAccount>(account_.lock())) {
1444 if (accountUri == acc->getUsername() && deviceId == acc->currentDeviceId()) {
1445 muteHost(state);
1446 } else if (auto call = getCallWith(accountUri, deviceId)) {
1447 muteCall(call->getCallId(), state);
1448 } else {
1449 JAMI_WARNING("[conf:{}] No call with {} - {}", id_, accountUri, deviceId);
1450 }
1451 }
1452}
1453
1454void
1455Conference::muteHost(bool state)
1456{
1457 auto isHostMuted = isMuted("host"sv);
1458 if (state and not isHostMuted) {
1459 participantsMuted_.emplace("host"sv);
1461 unbindHostAudio();
1462 }
1463 } else if (not state and isHostMuted) {
1464 participantsMuted_.erase("host");
1466 bindHostAudio();
1467 }
1468 }
1469 updateMuted();
1470}
1471
1472void
1473Conference::muteCall(const std::string& callId, bool state)
1474{
1475 auto isPartMuted = isMuted(callId);
1476 if (state and not isPartMuted) {
1477 participantsMuted_.emplace(callId);
1478 unbindSubCallAudio(callId);
1479 updateMuted();
1480 } else if (not state and isPartMuted) {
1481 participantsMuted_.erase(callId);
1482 bindSubCallAudio(callId);
1483 updateMuted();
1484 }
1485}
1486
1487void
1488Conference::muteParticipant(const std::string& participant_id, const bool& state)
1489{
1490 // Prioritize remote mute, otherwise the mute info is lost during
1491 // the conference merge (we don't send back info to remoteHost,
1492 // cf. getConfInfoHostUri method)
1493
1494 // Transfer remote participant mute
1495 auto remoteHost = findHostforRemoteParticipant(participant_id);
1496 if (not remoteHost.empty()) {
1497 if (auto call = getCallFromPeerID(string_remove_suffix(remoteHost, '@'))) {
1498 auto w = call->getAccount();
1499 auto account = w.lock();
1500 if (!account)
1501 return;
1502 Json::Value root;
1503 root["muteParticipant"] = participant_id;
1504 root["muteState"] = state ? TRUE_STR : FALSE_STR;
1505 call->sendConfOrder(root);
1506 return;
1507 }
1508 }
1509
1510 // NOTE: For now we have no way to mute only one stream
1511 if (isHost(participant_id))
1512 muteHost(state);
1513 else if (auto call = getCallFromPeerID(participant_id))
1514 muteCall(call->getCallId(), state);
1515}
1516
1517void
1519{
1520 std::lock_guard lk(confInfoMutex_);
1521 for (auto& info : confInfo_) {
1522 if (info.uri.empty()) {
1523 info.recording = isRecording();
1524 } else if (auto call = getCallWith(std::string(string_remove_suffix(info.uri, '@')), info.device)) {
1525 info.recording = call->isPeerRecording();
1526 }
1527 }
1528 sendConferenceInfos();
1529}
1530
1531void
1533{
1534 std::lock_guard lk(confInfoMutex_);
1535 for (auto& info : confInfo_) {
1536 if (info.uri.empty()) {
1537 info.audioModeratorMuted = isMuted("host"sv);
1538 info.audioLocalMuted = isMediaSourceMuted(MediaType::MEDIA_AUDIO);
1539 } else if (auto call = getCallWith(std::string(string_remove_suffix(info.uri, '@')), info.device)) {
1540 info.audioModeratorMuted = isMuted(call->getCallId());
1541 info.audioLocalMuted = call->isPeerMuted();
1542 }
1543 }
1544 sendConferenceInfos();
1545}
1546
1548Conference::getConfInfoHostUri(std::string_view localHostURI, std::string_view destURI)
1549{
1550 ConfInfo newInfo = confInfo_;
1551
1552 for (auto it = newInfo.begin(); it != newInfo.end();) {
1553 bool isRemoteHost = remoteHosts_.find(it->uri) != remoteHosts_.end();
1554 if (it->uri.empty() and not destURI.empty()) {
1555 // fill the empty uri with the local host URI, let void for local client
1556 it->uri = localHostURI;
1557 // If we're detached, remove the host
1559 it = newInfo.erase(it);
1560 continue;
1561 }
1562 }
1563 if (isRemoteHost) {
1564 // Don't send back the ParticipantInfo for remote Host
1565 // For other than remote Host, the new info is in remoteHosts_
1566 it = newInfo.erase(it);
1567 } else {
1568 ++it;
1569 }
1570 }
1571 // Add remote Host info
1572 for (const auto& [hostUri, confInfo] : remoteHosts_) {
1573 // Add remote info for remote host destination
1574 // Example: ConfA, ConfB & ConfC
1575 // ConfA send ConfA and ConfB for ConfC
1576 // ConfA send ConfA and ConfC for ConfB
1577 // ...
1578 if (destURI != hostUri)
1579 newInfo.insert(newInfo.end(), confInfo.begin(), confInfo.end());
1580 }
1581 return newInfo;
1582}
1583
1584bool
1585Conference::isHost(std::string_view uri) const
1586{
1587 if (uri.empty())
1588 return true;
1589
1590 // Check if the URI is a local URI (AccountID) for at least one of the subcall
1591 // (a local URI can be in the call with another device)
1592 for (const auto& p : getSubCalls()) {
1593 if (auto call = getCall(p)) {
1594 if (auto account = call->getAccount().lock()) {
1595 if (account->getUsername() == uri)
1596 return true;
1597 }
1598 }
1599 }
1600 return false;
1601}
1602
1603bool
1604Conference::isHostDevice(std::string_view deviceId) const
1605{
1606 if (auto acc = std::dynamic_pointer_cast<JamiAccount>(account_.lock()))
1607 return deviceId == acc->currentDeviceId();
1608 return false;
1609}
1610
1611void
1613{
1614 std::lock_guard lk(confInfoMutex_);
1615 confInfo_ = std::move(confInfo);
1616 sendConferenceInfos();
1617}
1618
1619void
1620Conference::hangupParticipant(const std::string& accountUri, const std::string& deviceId)
1621{
1622 if (auto acc = std::dynamic_pointer_cast<JamiAccount>(account_.lock())) {
1623 if (deviceId.empty()) {
1624 // If deviceId is empty, hangup all calls with device
1625 while (auto call = getCallFromPeerID(accountUri)) {
1626 Manager::instance().hangupCall(acc->getAccountID(), call->getCallId());
1627 }
1628 return;
1629 } else {
1630 if (accountUri == acc->getUsername() && deviceId == acc->currentDeviceId()) {
1632 return;
1633 } else if (auto call = getCallWith(accountUri, deviceId)) {
1634 Manager::instance().hangupCall(acc->getAccountID(), call->getCallId());
1635 return;
1636 }
1637 }
1638 // Else, it may be a remote host
1639 auto remoteHost = findHostforRemoteParticipant(accountUri, deviceId);
1640 if (remoteHost.empty()) {
1641 JAMI_WARNING("[conf:{}] Unable to hangup {} (peer not found)", id_, accountUri);
1642 return;
1643 }
1644 if (auto call = getCallFromPeerID(string_remove_suffix(remoteHost, '@'))) {
1645 // Forward to the remote host.
1646 libjami::hangupParticipant(acc->getAccountID(), call->getCallId(), accountUri, deviceId);
1647 }
1648 }
1649}
1650
1651void
1652Conference::muteLocalHost(bool is_muted, const std::string& mediaType)
1653{
1654 if (mediaType.compare(libjami::Media::Details::MEDIA_TYPE_AUDIO) == 0) {
1656 JAMI_DEBUG("[conf:{}] Local audio source already {}", id_, is_muted ? "muted" : "unmuted");
1657 return;
1658 }
1659
1660 auto isHostMuted = isMuted("host"sv);
1662 unbindHostAudio();
1664 bindHostAudio();
1665 }
1667 updateMuted();
1669 return;
1670 } else if (mediaType.compare(libjami::Media::Details::MEDIA_TYPE_VIDEO) == 0) {
1671#ifdef ENABLE_VIDEO
1672 if (not isVideoEnabled()) {
1673 JAMI_ERROR("Unable to stop camera, the camera is disabled!");
1674 return;
1675 }
1676
1678 JAMI_DEBUG("[conf:{}] Local camera source already {}", id_, is_muted ? "stopped" : "started");
1679 return;
1680 }
1682 if (is_muted) {
1683 if (auto mixer = videoMixer_) {
1684 mixer->stopInputs();
1685 }
1686 } else {
1687 if (auto mixer = videoMixer_) {
1688 std::vector<std::string> videoInputs;
1689 for (const auto& source : hostSources_) {
1690 if (source.type_ == MediaType::MEDIA_VIDEO)
1691 videoInputs.emplace_back(source.sourceUri_);
1692 }
1693 mixer->switchInputs(videoInputs);
1694 }
1695 }
1697 return;
1698#endif
1699 }
1700}
1701
1702#ifdef ENABLE_VIDEO
1703void
1704Conference::resizeRemoteParticipants(ConfInfo& confInfo, std::string_view peerURI)
1705{
1707 int remoteFrameWidth = confInfo.w;
1708
1709 if (remoteFrameHeight == 0 or remoteFrameWidth == 0) {
1710 // get the size of the remote frame from receiveThread
1711 // if the one from confInfo is empty
1712 if (auto call = std::dynamic_pointer_cast<SIPCall>(getCallFromPeerID(string_remove_suffix(peerURI, '@')))) {
1713 for (auto const& videoRtp : call->getRtpSessionList(MediaType::MEDIA_VIDEO)) {
1714 auto recv = std::static_pointer_cast<video::VideoRtpSession>(videoRtp)->getVideoReceive();
1715 remoteFrameHeight = recv->getHeight();
1716 remoteFrameWidth = recv->getWidth();
1717 // NOTE: this may be not the behavior we want, but this is only called
1718 // when we receive conferences information from a call, so the peer is
1719 // mixing the video and send only one stream, so we can break here
1720 break;
1721 }
1722 }
1723 }
1724
1725 if (remoteFrameHeight == 0 or remoteFrameWidth == 0) {
1726 JAMI_WARNING("[conf:{}] Remote frame size not found", id_);
1727 return;
1728 }
1729
1730 // get the size of the local frame
1731 ParticipantInfo localCell;
1732 for (const auto& p : confInfo_) {
1733 if (p.uri == peerURI) {
1734 localCell = p;
1735 break;
1736 }
1737 }
1738
1739 const auto zoomX = static_cast<double>(remoteFrameWidth) / static_cast<double>(localCell.w);
1740 const auto zoomY = static_cast<double>(remoteFrameHeight) / static_cast<double>(localCell.h);
1741
1742 // Do the resize for each remote participant
1743 for (auto& remoteCell : confInfo) {
1744 remoteCell.x = static_cast<int>(
1745 std::lround(static_cast<double>(remoteCell.x) / zoomX + static_cast<double>(localCell.x)));
1746 remoteCell.y = static_cast<int>(
1747 std::lround(static_cast<double>(remoteCell.y) / zoomY + static_cast<double>(localCell.y)));
1748 remoteCell.w = static_cast<int>(std::lround(static_cast<double>(remoteCell.w) / zoomX));
1749 remoteCell.h = static_cast<int>(std::lround(static_cast<double>(remoteCell.h) / zoomY));
1750 }
1751}
1752#endif
1753
1754void
1756{
1757 JAMI_DEBUG("[conf:{}] Merging confInfo from {}", id_, peerURI);
1758 if (newInfo.empty()) {
1759 JAMI_DEBUG("[conf:{}] confInfo empty, removing remoteHost {}", id_, peerURI);
1760 std::lock_guard lk(confInfoMutex_);
1761 remoteHosts_.erase(peerURI);
1762 sendConferenceInfos();
1763 return;
1764 }
1765
1766#ifdef ENABLE_VIDEO
1768#endif
1769
1770 std::lock_guard lk(confInfoMutex_);
1771 bool updateNeeded = false;
1772 auto it = remoteHosts_.find(peerURI);
1773 if (it != remoteHosts_.end()) {
1774 // Compare confInfo before update
1775 if (it->second != newInfo) {
1776 it->second = newInfo;
1777 updateNeeded = true;
1778 }
1779 } else {
1780 remoteHosts_.emplace(peerURI, newInfo);
1781 updateNeeded = true;
1782 }
1783 // Send confInfo only if needed to avoid loops
1784#ifdef ENABLE_VIDEO
1785 if (updateNeeded and videoMixer_) {
1786 // Trigger the layout update in the mixer because the frame resolution may
1787 // change from participant to conference and cause a mismatch between
1788 // confInfo layout and rendering layout.
1789 videoMixer_->updateLayout();
1790 }
1791#endif
1792 if (updateNeeded)
1793 sendConferenceInfos();
1794}
1795
1796std::string_view
1797Conference::findHostforRemoteParticipant(std::string_view uri, std::string_view deviceId)
1798{
1799 for (const auto& host : remoteHosts_) {
1800 for (const auto& p : host.second) {
1801 if (uri == string_remove_suffix(p.uri, '@') && (deviceId == "" || deviceId == p.device))
1802 return host.first;
1803 }
1804 }
1805 return "";
1806}
1807
1808std::shared_ptr<Call>
1810{
1811 for (const auto& p : getSubCalls()) {
1812 auto call = getCall(p);
1813 if (call && getRemoteId(call) == peerID) {
1814 return call;
1815 }
1816 }
1817 return nullptr;
1818}
1819
1820std::shared_ptr<Call>
1821Conference::getCallWith(const std::string& accountUri, const std::string& deviceId)
1822{
1823 for (const auto& p : getSubCalls()) {
1824 if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(p))) {
1825 auto* transport = call->getTransport();
1826 if (accountUri == string_remove_suffix(call->getPeerNumber(), '@') && transport
1827 && deviceId == transport->deviceId()) {
1828 return call;
1829 }
1830 }
1831 }
1832 return {};
1833}
1834
1835std::string
1836Conference::getRemoteId(const std::shared_ptr<jami::Call>& call) const
1837{
1838 if (auto* transport = std::dynamic_pointer_cast<SIPCall>(call)->getTransport())
1839 if (auto cert = transport->getTlsInfos().peerCert)
1840 if (cert->issuer)
1841 return cert->issuer->getId().toString();
1842 return {};
1843}
1844
1845void
1851
1852bool
1853Conference::startRecording(const std::string& path)
1854{
1855 auto res = Recordable::startRecording(path);
1857 return res;
1858}
1859
1861
1862void
1863Conference::bindHostAudio()
1864{
1865 JAMI_DEBUG("[conf:{}] Binding host audio", id_);
1866
1868
1869 // Collect and start host audio sources, separating primary from secondary.
1870 // The primary host buffer (DEFAULT_ID) forms the bidirectional link with
1871 // each subcall's primary stream. Secondary host buffers are added as
1872 // half-duplex sources so that participants hear the mix of all host streams.
1873 std::string hostPrimaryBuffer;
1874 std::vector<std::string> hostSecondaryBuffers;
1875
1876 for (const auto& source : hostSources_) {
1877 if (source.type_ != MediaType::MEDIA_AUDIO)
1878 continue;
1879
1880 // Start audio input
1881 auto& hostAudioInput = hostAudioInputs_[source.label_];
1882 if (!hostAudioInput)
1883 hostAudioInput = std::make_shared<AudioInput>(source.label_);
1884 hostAudioInput->switchInput(source.sourceUri_);
1885
1886 if (source.label_ == sip_utils::DEFAULT_AUDIO_STREAMID) {
1888 JAMI_DEBUG("[conf:{}] Primary host buffer: {}", id_, hostPrimaryBuffer);
1889 } else {
1890 // Use the ring buffer ID that initCapture/initFile actually
1891 // created, not the raw sourceUri which may differ (e.g.
1892 // "display://:0+0,0 1920x1080" vs the normalized "desktop").
1893 auto bufferId = hostAudioInput->getSourceRingBufferId();
1894 if (!bufferId.empty()) {
1895 if (source.muted_) {
1896 // Muted secondary source: silence the AudioInput and
1897 // remove its buffer from the mix so participants no
1898 // longer receive data from it.
1899 JAMI_DEBUG("[conf:{}] Secondary host buffer {} is muted – unbinding", id_, bufferId);
1900 hostAudioInput->setMuted(true);
1901 rbPool.unBindAllHalfDuplexIn(bufferId);
1902 } else {
1903 JAMI_DEBUG("[conf:{}] Secondary host buffer: {}", id_, bufferId);
1904 hostAudioInput->setMuted(false);
1905 hostSecondaryBuffers.push_back(std::move(bufferId));
1906 }
1907 } else {
1908 JAMI_WARNING("[conf:{}] No source ring buffer for host audio {}", id_, source.label_);
1909 }
1910 }
1911 }
1912
1913 if (hostPrimaryBuffer.empty())
1914 return;
1915
1916 for (const auto& item : getSubCalls()) {
1917 auto call = getCall(item);
1918 if (!call)
1919 continue;
1920
1921 const bool participantMuted = isMuted(call->getCallId());
1922 const auto medias = call->getRemoteAudioStreams();
1923
1924 // Identify participant's primary (first) and secondary audio streams.
1925 // Only the primary stream receives the conference mix (bidirectional).
1926 // Secondary streams are mixed in as sources for other participants.
1927 std::string participantPrimary;
1928 std::vector<std::string> participantSecondaries;
1929 for (const auto& [id, muted] : medias) {
1930 if (participantPrimary.empty())
1931 participantPrimary = id;
1932 else
1933 participantSecondaries.push_back(id);
1934 }
1935
1936 if (participantPrimary.empty())
1937 continue;
1938
1939 const bool primaryMuted = medias.at(participantPrimary);
1941
1942 // Host primary <-> participant primary (bidirectional with mute logic)
1945 else
1946 rbPool.bindHalfDuplexOut(participantPrimary, hostPrimaryBuffer);
1947
1948 // Host secondary sources -> participant primary
1949 // (participant hears all host audio streams mixed together)
1950 for (const auto& secBuffer : hostSecondaryBuffers)
1951 rbPool.bindHalfDuplexOut(participantPrimary, secBuffer);
1952
1953 // Participant secondary streams -> host primary
1954 // (host hears all participant audio streams mixed together)
1955 for (const auto& secId : participantSecondaries) {
1956 const bool secMuted = medias.at(secId);
1957 if (!(participantMuted || secMuted))
1958 rbPool.bindHalfDuplexOut(hostPrimaryBuffer, secId);
1959 }
1960
1962 for (const auto& secId : participantSecondaries)
1963 rbPool.flush(secId);
1964 }
1965
1967 for (const auto& secBuffer : hostSecondaryBuffers)
1968 rbPool.flush(secBuffer);
1969}
1970
1971void
1972Conference::unbindHostAudio()
1973{
1974 JAMI_DEBUG("[conf:{}] Unbinding host audio", id_);
1976
1977 for (const auto& source : hostSources_) {
1978 if (source.type_ != MediaType::MEDIA_AUDIO)
1979 continue;
1980
1981 // Determine the buffer ID to unbind before stopping the input,
1982 // since switchInput("") resets the source ring buffer ID.
1983 std::string bufferId;
1984 auto hostAudioInput = hostAudioInputs_.find(source.label_);
1985 if (hostAudioInput != hostAudioInputs_.end() && hostAudioInput->second) {
1986 if (source.label_ == sip_utils::DEFAULT_AUDIO_STREAMID)
1987 bufferId = std::string(RingBufferPool::DEFAULT_ID);
1988 else
1989 bufferId = hostAudioInput->second->getSourceRingBufferId();
1990 // Stop audio input
1991 hostAudioInput->second->switchInput("");
1992 }
1993
1994 // Unbind audio: remove this buffer as a source from all readers.
1995 if (!bufferId.empty())
1996 rbPool.unBindAllHalfDuplexIn(bufferId);
1997 }
1998}
1999
2000void
2001Conference::bindSubCallAudio(const std::string& callId)
2002{
2004
2005 auto participantCall = getCall(callId);
2006 if (!participantCall)
2007 return;
2008
2009 const bool participantMuted = isMuted(callId);
2010 const auto participantStreams = participantCall->getRemoteAudioStreams();
2011 JAMI_DEBUG("[conf:{}] Binding participant audio: {} with {} streams", id_, callId, participantStreams.size());
2012
2013 // Identify participant's primary (first) and secondary audio streams.
2014 // The primary stream forms the bidirectional link with other participants'
2015 // primary streams and the host. Secondary streams are mixed in as
2016 // half-duplex sources so that other participants (and the host) hear the
2017 // combined audio from all of this participant's streams.
2018 std::string primaryStreamId;
2019 std::vector<std::string> secondaryStreamIds;
2020 for (const auto& [streamId, muted] : participantStreams) {
2021 if (primaryStreamId.empty())
2023 else
2024 secondaryStreamIds.push_back(streamId);
2025 }
2026
2027 if (primaryStreamId.empty())
2028 return;
2029
2032
2033 // --- Bind with other subcalls ---
2034 for (const auto& otherId : getSubCalls()) {
2035 if (otherId == callId)
2036 continue;
2037
2038 auto otherCall = getCall(otherId);
2039 if (!otherCall)
2040 continue;
2041
2042 const bool otherMuted = isMuted(otherId);
2043 const auto otherStreams = otherCall->getRemoteAudioStreams();
2044
2045 // Identify the other participant's primary and secondary streams
2046 std::string otherPrimaryId;
2047 std::vector<std::string> otherSecondaryIds;
2048 for (const auto& [streamId, muted] : otherStreams) {
2049 if (otherPrimaryId.empty())
2051 else
2052 otherSecondaryIds.push_back(streamId);
2053 }
2054
2055 if (otherPrimaryId.empty())
2056 continue;
2057
2060
2061 // Primary <-> primary (bidirectional with mute logic)
2063 rbPool.bindRingBuffers(primaryStreamId, otherPrimaryId);
2064 } else {
2066 rbPool.bindHalfDuplexOut(otherPrimaryId, primaryStreamId);
2068 rbPool.bindHalfDuplexOut(primaryStreamId, otherPrimaryId);
2069 }
2070
2071 // Participant's secondaries -> other's primary
2072 // (other participant hears all of this participant's streams mixed)
2073 for (const auto& secId : secondaryStreamIds) {
2074 const bool secMuted = participantStreams.at(secId);
2075 if (!(participantMuted || secMuted))
2076 rbPool.bindHalfDuplexOut(otherPrimaryId, secId);
2077 }
2078
2079 // Other's secondaries -> participant's primary
2080 // (this participant hears all of the other's streams mixed)
2081 for (const auto& otherSecId : otherSecondaryIds) {
2082 const bool otherSecMuted = otherStreams.at(otherSecId);
2083 if (!(otherMuted || otherSecMuted))
2084 rbPool.bindHalfDuplexOut(primaryStreamId, otherSecId);
2085 }
2086
2087 rbPool.flush(primaryStreamId);
2088 rbPool.flush(otherPrimaryId);
2089 }
2090
2091 // --- Bind with host (if attached) ---
2093 const bool hostCanSend = !(isMuted("host"sv) || isMediaSourceMuted(MediaType::MEDIA_AUDIO));
2094
2095 // Primary <-> host default buffer (bidirectional with mute logic)
2098 } else {
2101 if (hostCanSend)
2103 }
2104
2105 // Participant's secondaries -> host
2106 // (host hears all of this participant's streams mixed)
2107 for (const auto& secId : secondaryStreamIds) {
2108 const bool secMuted = participantStreams.at(secId);
2109 if (!(participantMuted || secMuted))
2110 rbPool.bindHalfDuplexOut(RingBufferPool::DEFAULT_ID, secId);
2111 }
2112
2113 // Host's secondary sources -> participant primary
2114 // (participant hears all host audio sources mixed)
2115 for (const auto& source : hostSources_) {
2116 if (source.type_ == MediaType::MEDIA_AUDIO && source.label_ != sip_utils::DEFAULT_AUDIO_STREAMID) {
2117 auto it = hostAudioInputs_.find(source.label_);
2118 if (it != hostAudioInputs_.end() && it->second) {
2119 auto buffer = it->second->getSourceRingBufferId();
2120 if (!buffer.empty())
2121 rbPool.bindHalfDuplexOut(primaryStreamId, buffer);
2122 }
2123 }
2124 }
2125
2126 rbPool.flush(primaryStreamId);
2128 }
2129
2130 // Flush secondary streams
2131 for (const auto& secId : secondaryStreamIds)
2132 rbPool.flush(secId);
2133}
2134
2135void
2136Conference::unbindSubCallAudio(const std::string& callId)
2137{
2138 JAMI_DEBUG("[conf:{}] Unbinding participant audio: {}", id_, callId);
2139 if (auto call = getCall(callId)) {
2140 auto medias = call->getAudioStreams();
2142
2143 bool isPrimary = true;
2144 for (const auto& [id, muted] : medias) {
2145 // Remove this stream as a source from all readers.
2147 // For the primary stream, also remove its reader bindings
2148 // (it was the only stream receiving the conference mix).
2149 if (isPrimary) {
2150 rbPool.unBindAllHalfDuplexOut(id);
2151 isPrimary = false;
2152 }
2153 }
2154 }
2155}
2156
2157void
2158Conference::clearParticipantData(const std::string& callId)
2159{
2160 JAMI_DEBUG("[conf:{}] Clearing participant data for call {}", id_, callId);
2161
2162 if (callId.empty()) {
2163 JAMI_WARNING("[conf:{}] Cannot clear participant data: empty call id", id_);
2164 return;
2165 }
2166
2167 auto call = std::dynamic_pointer_cast<SIPCall>(getCall(callId));
2168 if (!call) {
2169 JAMI_WARNING("[conf:{}] Unable to find call {} to clear participant", id_, callId);
2170 return;
2171 }
2172
2173 auto* transport = call->getTransport();
2174 if (!transport) {
2175 JAMI_WARNING("[conf:{}] Unable to find transport for call {} to clear participant", id_, callId);
2176 return;
2177 }
2178
2179 const std::string deviceId = std::string(transport->deviceId());
2180 const std::string participantId = getRemoteId(call);
2181
2182 {
2183 std::lock_guard lk(confInfoMutex_);
2184 for (auto it = confInfo_.begin(); it != confInfo_.end();) {
2185 if (it->uri == participantId) {
2186 it = confInfo_.erase(it);
2187 } else {
2188 ++it;
2189 }
2190 }
2191 auto remoteIt = remoteHosts_.find(participantId);
2192 if (remoteIt != remoteHosts_.end()) {
2193 remoteHosts_.erase(remoteIt);
2194 }
2195 handsRaised_.erase(deviceId);
2196 moderators_.erase(participantId);
2197 participantsMuted_.erase(callId);
2198 }
2199
2200 sendConferenceInfos();
2201}
2202
2203} // namespace jami
std::shared_ptr< Call > getCall(const std::string &id) const
Return call pointer associated to given ID.Type can optionally be specified.
void onHangupParticipant(std::function< void(const std::string &, const std::string &)> &&cb)
void onMuteStreamAudio(std::function< void(const std::string &, const std::string &, const std::string &, bool)> &&cb)
void onKickParticipant(std::function< void(const std::string &)> &&cb)
void onRaiseHand(std::function< void(const std::string &, bool)> &&cb)
void initData(Json::Value &&d, std::string_view peerId)
Inject in the parser the data to parse.
void onMuteParticipant(std::function< void(const std::string &, bool)> &&cb)
void onSetActiveParticipant(std::function< void(const std::string &)> &&cb)
void onSetActiveStream(std::function< void(const std::string &, bool)> &&cb)
void onCheckAuthorization(std::function< bool(std::string_view)> &&cb)
Ask the caller to check if a peer is authorized (moderator of the conference)
void onSetLayout(std::function< void(int)> &&cb)
void onVersion(std::function< void(uint32_t)> &&cb)
void parse()
Parse the datas, this will call the methods injected if necessary.
void onVoiceActivity(std::function< void(const std::string &, bool)> &&cb)
void onRaiseHandUri(std::function< void(const std::string &, bool)> &&cb)
const char * getStateStr() const
Definition conference.h:239
void setVoiceActivity(const std::string &streamId, const bool &newState)
std::string getAccountId() const
bool isVideoEnabled() const
void hangupParticipant(const std::string &accountUri, const std::string &deviceId="")
void muteLocalHost(bool is_muted, const std::string &mediaType)
std::chrono::milliseconds getDuration() const
Definition conference.h:375
bool startRecording(const std::string &path) override
Start recording.
const std::string & getConfId() const
Return the conference id.
Definition conference.h:201
void removeSubCall(const std::string &callId)
Remove a subcall from the conference.
std::vector< libjami::MediaMap > currentMediaList() const
Retrieve current medias list.
void detachHost()
Detach local audio/video from the conference.
~Conference()
Destructor for this class, decrement static counter.
void mergeConfInfo(ConfInfo &newInfo, const std::string &peerURI)
void setActiveParticipant(const std::string &participant_id)
void handleMediaChangeRequest(const std::shared_ptr< Call > &call, const std::vector< libjami::MediaMap > &remoteMediaList)
Process incoming media change request.
void setLocalHostMuteState(MediaType type, bool muted)
Set the mute state of the local host.
void setState(State state)
Set conference state.
bool requestMediaChange(const std::vector< libjami::MediaMap > &mediaList)
Process a media change request.
void onConfOrder(const std::string &callId, const std::string &order)
void addSubCall(const std::string &callId)
Add a new subcall to the conference.
std::shared_ptr< Account > getAccount() const
Definition conference.h:203
Conference(const std::shared_ptr< Account > &, const std::string &confId="")
Constructor for this class, increment static counter.
std::shared_ptr< Call > getCallFromPeerID(std::string_view peerId)
void setActiveStream(const std::string &streamId, bool state)
void setModerator(const std::string &uri, const bool &state)
void stopRecording() override
Stop recording.
bool toggleRecording() override
Start/stop recording toggle.
CallIdSet getSubCalls() const
Get the participant list for this conference.
void setHandRaised(const std::string &uri, const bool &state)
void attachHost(const std::vector< libjami::MediaMap > &mediaList)
Attach host.
void muteParticipant(const std::string &uri, const bool &state)
State getState() const
Return the current conference state.
void muteStream(const std::string &accountUri, const std::string &deviceId, const std::string &streamId, const bool &state)
The client shows one tile per stream (video/audio related to a media)
void updateConferenceInfo(ConfInfo confInfo)
void switchInput(const std::string &input)
void setLayout(int layout)
void reportMediaNegotiationStatus()
Announce to the client that medias are successfully negotiated.
bool isMediaSourceMuted(MediaType type) const
Get the mute state of the local host.
Ring Account is build on top of SIPAccountBase and uses DHT to handle call connectivity.
Definition jamiaccount.h:93
Manager (controller) of daemon.
Definition manager.h:66
std::vector< std::shared_ptr< T > > getAllAccounts() const
Get a list of account pointers of type T (baseclass Account)
Definition manager.h:763
static LIBJAMI_TEST_EXPORT Manager & instance()
Definition manager.cpp:694
CallFactory callFactory
Definition manager.h:826
VideoManager * getVideoManager() const
Definition manager.cpp:3215
bool hangupCall(const std::string &accountId, const std::string &callId)
Functions which occur with a user's action Hangup the call.
Definition manager.cpp:1158
bool detachHost(const std::shared_ptr< Conference > &conf={})
Detach the local participant from curent conference.
Definition manager.cpp:1590
RingBufferPool & getRingBufferPool()
Return a pointer to the instance of the RingBufferPool.
Definition manager.cpp:3197
static std::vector< MediaAttribute > buildMediaAttributesList(const std::vector< libjami::MediaMap > &mediaList, bool secure)
static bool hasMediaType(const std::vector< MediaAttribute > &mediaList, MediaType type)
static std::vector< libjami::MediaMap > mediaAttributesToMediaMaps(const std::vector< MediaAttribute > &mediaAttrList)
static char const * mediaTypeToString(MediaType type)
static libjami::MediaMap toMediaMap(const MediaAttribute &mediaAttr)
virtual bool startRecording(const std::string &path)
Start recording.
virtual void stopRecording()
Stop recording.
virtual bool toggleRecording()
This method must be implemented for this interface as calls and conferences have different behavior.
std::shared_ptr< MediaRecorder > recorder_
Definition recordable.h:69
bool isRecording() const
Return recording state (true/false)
Definition recordable.h:36
void unBindAllHalfDuplexIn(const std::string &sourceBufferId)
Detaches a source from all its readers.
static const char *const DEFAULT_ID
void unBindAll(const std::string &ringbufferId)
#define JAMI_ERROR(formatstr,...)
Definition logger.h:243
#define JAMI_DEBUG(formatstr,...)
Definition logger.h:238
#define JAMI_WARNING(formatstr,...)
Definition logger.h:242
#define JAMI_LOG(formatstr,...)
Definition logger.h:237
std::string toString(const Json::Value &jsonVal)
Definition json_utils.h:42
bool parse(std::string_view jsonStr, Json::Value &jsonVal)
Definition json_utils.h:30
std::string streamId(const std::string &callId, std::string_view label)
constexpr std::string_view DEFAULT_VIDEO_STREAMID
Definition sip_utils.h:144
constexpr std::string_view DEFAULT_AUDIO_STREAMID
Definition sip_utils.h:145
std::set< std::string > CallIdSet
Definition conference.h:180
static constexpr const char TRUE_STR[]
bool closeMediaPlayer(const std::string &id)
void emitSignal(Args... args)
Definition jami_signal.h:64
std::shared_ptr< AudioInput > getAudioInput(const std::string &device)
std::vector< unsigned > split_string_to_unsigned(std::string_view str, char delim)
std::string_view string_remove_suffix(std::string_view str, char separator)
static constexpr const char FALSE_STR[]
std::string createMediaPlayer(const std::string &path)
@ MEDIA_AUDIO
Definition media_codec.h:46
@ MEDIA_VIDEO
Definition media_codec.h:47
@ MEDIA_NONE
Definition media_codec.h:45
static void runOnMainThread(Callback &&cb)
Definition manager.h:930
static constexpr char MEDIA_TYPE_AUDIO[]
Definition media_const.h:37
static constexpr char MEDIA_TYPE_VIDEO[]
Definition media_const.h:38
static constexpr char ENABLED[]
Definition media_const.h:50
static constexpr char MUTED[]
Definition media_const.h:51
static constexpr const char * SEPARATOR
Definition media_const.h:32
static constexpr const char * FILE
Definition media_const.h:30
void hangupParticipant(const std::string &accountId, const std::string &confId, const std::string &accountUri, const std::string &deviceId)
SIPCall are SIP implementation of a normal Call.
Contains information about an AV subject.
Definition streamdata.h:30
const bool direction
Definition streamdata.h:52
const std::string id
Definition streamdata.h:50
const StreamType type
Definition streamdata.h:54
std::string toString() const
std::vector< std::map< std::string, std::string > > toVectorMapStringString() const
#define jami_tracepoint(...)
Definition tracepoint.h:48