Skip to content

Commit

Permalink
Use metrics object data
Browse files Browse the repository at this point in the history
  • Loading branch information
disa6302 committed Dec 29, 2023
1 parent 2c184b9 commit 18146f6
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 32 deletions.
3 changes: 0 additions & 3 deletions examples/app.css
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,6 @@
width: 100%;
}

#videoPlayerFromMaster {
border: 2px solid blue;
}

#logs-header {
margin-top: 20px;
Expand Down
4 changes: 1 addition & 3 deletions examples/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -328,9 +328,7 @@ <h5>Return Channel</h5>
</div>
<div class="col">
<h5>From Master</h5>
<div class="video-container">
<video id="videoPlayerFromMaster" class="remote-view" autoplay playsinline controls></video>
</div>
<div class="video-container"><video class="remote-view" autoplay playsinline controls></video></div>
</div>
</div>
<div class="row datachannel">
Expand Down
42 changes: 16 additions & 26 deletions examples/viewer.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ let audioRateArray = [];
let timeArray = [];
let chartHeight = 0;

let offerSentTime = 0;
let signalingSetUpTime = 0;
let timeToSetUpViewerMedia = 0;
let timeToFirstFrameFromOffer = 0;
Expand Down Expand Up @@ -259,37 +258,31 @@ async function startViewer(localView, remoteView, formValues, onStatsReport, rem
setTimeout(profilingCalculations, profilingTestLength * 1000);
}

if(formValues.enableDQPmetrics) {
const videoElement = document.getElementById('videoPlayerFromMaster');
videoElement.onloadeddata = function() {
let firstFrameTime = Date.now();
timeToFirstFrameFromOffer = firstFrameTime - offerSentTime;
timeToFirstFrameFromViewerStart = firstFrameTime - viewerButtonPressed.getTime();
};
}
viewer.localView = localView;
viewer.remoteView = remoteView;

if (formValues.enableProfileTimeline) {
viewer.remoteView.addEventListener('loadeddata', () => {
metrics.viewer.ttff.endTime = Date.now();

viewer.remoteView.addEventListener('loadeddata', () => {
metrics.viewer.ttff.endTime = Date.now();
if (formValues.enableProfileTimeline) {
metrics.viewer.ttffAfterPc.endTime = metrics.viewer.ttff.endTime;
metrics.master.ttffAfterPc.endTime = metrics.viewer.ttff.endTime;


// if the ice-gathering on the master side is not complete by the time the metrics are sent, the endTime > startTime
// in order to plot it, we can show it as an ongoing process
if (metrics.master.iceGathering.startTime > metrics.master.iceGathering.endTime) {
metrics.master.iceGathering.endTime = metrics.viewer.ttff.endTime;
metrics.master.iceGathering.endTime = metrics.viewer.ttff.endTime;
}
});
}
}
if(formValues.enableDQPmetrics) {
timeToFirstFrameFromOffer = metrics.viewer.ttff.endTime - metrics.viewer.offAnswerTime.startTime;
timeToFirstFrameFromViewerStart = metrics.viewer.ttff.endTime - viewerButtonPressed.getTime();
}
});

if (formValues.enableProfileTimeline) {
metrics.viewer.ttff.startTime = viewerButtonPressed;
metrics.master.waitTime.endTime = viewerButtonPressed;

metrics.viewer.ttff.startTime = viewerButtonPressed.getTime();
metrics.master.waitTime.endTime = viewerButtonPressed.getTime();
}

if (formValues.enableDQPmetrics) {
Expand Down Expand Up @@ -648,11 +641,10 @@ async function startViewer(localView, remoteView, formValues, onStatsReport, rem
console.log('[VIEWER] Connected to signaling service');

metrics.viewer.setupMediaPlayer.startTime = Date.now();
signalingSetUpTime = Date.now() - viewerButtonPressed.getTime();
signalingSetUpTime = metrics.viewer.setupMediaPlayer.startTime - viewerButtonPressed.getTime();
// Get a stream from the webcam, add it to the peer connection, and display it in the local view.
// If no video/audio needed, no need to request for the sources.
// Otherwise, the browser will throw an error saying that either video or audio has to be enabled.
let startViewerMediaSetUp = Date.now();
if (formValues.sendVideo || formValues.sendAudio) {
try {
viewer.localStream = await navigator.mediaDevices.getUserMedia(constraints);
Expand All @@ -663,12 +655,10 @@ async function startViewer(localView, remoteView, formValues, onStatsReport, rem
return;
}
}
timeToSetUpViewerMedia = Date.now() - startViewerMediaSetUp;


metrics.viewer.setupMediaPlayer.endTime = Date.now();
timeToSetUpViewerMedia = metrics.viewer.setupMediaPlayer.endTime - metrics.viewer.setupMediaPlayer.startTime;

metrics.viewer.offAnswerTime.startTime = Date.now();
// Create an SDP offer to send to the master
console.log('[VIEWER] Creating SDP offer');
await viewer.peerConnection.setLocalDescription(
Expand All @@ -682,7 +672,7 @@ async function startViewer(localView, remoteView, formValues, onStatsReport, rem
if (formValues.useTrickleICE) {
console.log('[VIEWER] Sending SDP offer');
console.debug('SDP offer:', viewer.peerConnection.localDescription);
offerSentTime = Date.now();
metrics.viewer.offAnswerTime.startTime = Date.now();
viewer.signalingClient.sendSdpOffer(viewer.peerConnection.localDescription);
}
console.log('[VIEWER] Generating ICE candidates');
Expand Down Expand Up @@ -917,7 +907,7 @@ function calcStats(stats, clientId) {
}
}

// Capture the IP and port of the remote candidate
// Capture the IP and port of the local candidate
if (localCandidate) {
localCandidateConnectionString = '[' + localCandidate.candidateType + '] '
if (localCandidate.address && localCandidate.port) {
Expand Down

0 comments on commit 18146f6

Please sign in to comment.