jam-cloud/web/app/assets/javascripts/session.js

2209 lines
93 KiB
JavaScript

(function(context,$) {
"use strict";
context.JK = context.JK || {};
context.JK.SessionScreen = function(app) {
var EVENTS = context.JK.EVENTS;
var MIX_MODES = context.JK.MIX_MODES;
var NAMED_MESSAGES = context.JK.NAMED_MESSAGES;
var gearUtils = context.JK.GearUtils;
var sessionUtils = context.JK.SessionUtils;
var modUtils = context.JK.ModUtils;
var logger = context.JK.logger;
var self = this;
var sessionModel = null;
var sessionId;
var tracks = {};
var myTracks = [];
var masterMixers = [];
var personalMixers = [];
var allMixers = {};
var mixersByResourceId = {};
var mixersByTrackId = {};
var configureTrackDialog;
var addNewGearDialog;
var localRecordingsDialog = null;
var recordingFinishedDialog = null;
var friendSelectorDialog = null;
var inviteMusiciansUtil = null;
var screenActive = false;
var currentMixerRangeMin = null;
var currentMixerRangeMax = null;
var lookingForMixersCount = 0;
var lookingForMixersTimer = null;
var lookingForMixers = [];
var $recordingTimer = null;
var recordingTimerInterval = null;
var startTimeDate = null;
var startingRecording = false; // double-click guard
var claimedRecording = null;
var playbackControls = null;
var promptLeave = false;
var rateSessionDialog = null;
var friendInput = null;
var sessionPageDone = null;
var $recordingManagerViewer = null;
var $screen = null;
var $mixModeDropdown = null;
var $templateMixerModeChange = null;
var $otherAudioContainer = null;
var $myTracksContainer = null;
var $liveTracksContainer = null;
var downloadJamTrack = null;
var rest = context.JK.Rest();
var RENDER_SESSION_DELAY = 750; // When I need to render a session, I have to wait a bit for the mixers to be there.
var defaultParticipant = {
tracks: [{
instrument_id: "unknown"
}],
user: {
first_name: 'Unknown',
last_name: 'User',
photo_url: null
}
};
// Be sure to copy/extend these instead of modifying in place
var trackVuOpts = {
vuType: "vertical",
lightCount: 13,
lightWidth: 3,
lightHeight: 17
};
// Must add faderId key to this
var trackFaderOpts = {
faderType: "vertical",
height: 83
};
// Recreate ChannelGroupIDs ENUM from C++
var ChannelGroupIds = {
"MasterGroup": 0,
"MonitorGroup": 1,
"AudioInputMusicGroup": 2,
"AudioInputChatGroup": 3,
"MediaTrackGroup": 4,
"StreamOutMusicGroup": 5,
"StreamOutChatGroup": 6,
"UserMusicInputGroup": 7,
"UserChatInputGroup": 8,
"PeerAudioInputMusicGroup": 9,
"PeerMediaTrackGroup": 10,
"JamTrackGroup": 11,
"MetronomeGroup": 12
};
function beforeShow(data) {
sessionId = data.id;
if(!sessionId) {
window.location = '/client#/home';
}
promptLeave = true;
$myTracksContainer.empty();
displayDoneRecording(); // assumption is that you can't join a recording session, so this should be safe
var shareDialog = new JK.ShareDialog(context.JK.app, sessionId, "session");
shareDialog.initialize(context.JK.FacebookHelperInstance);
}
function beforeDisconnect() {
return { freezeInteraction: true };
}
function initializeSession() {
// Subscribe for callbacks on audio events
context.jamClient.SessionRegisterCallback("JK.HandleBridgeCallback");
context.jamClient.RegisterRecordingCallbacks("JK.HandleRecordingStartResult", "JK.HandleRecordingStopResult", "JK.HandleRecordingStarted", "JK.HandleRecordingStopped", "JK.HandleRecordingAborted");
context.jamClient.SessionSetConnectionStatusRefreshRate(1000);
// If you load this page directly, the loading of the current user
// is happening in parallel. We can't join the session until the
// current user has been completely loaded. Poll for the current user
// before proceeding with session joining.
function checkForCurrentUser() {
if (context.JK.userMe) {
afterCurrentUserLoaded();
} else {
context.setTimeout(checkForCurrentUser, 100);
}
}
checkForCurrentUser();
}
function afterShow(data) {
if(!context.JK.JamServer.connected) {
promptLeave = false;
app.notifyAlert("Not Connected", 'To create or join a session, you must be connected to the server.');
window.location = '/client#/home'
return;
}
// The SessionModel is a singleton.
// a client can only be in one session at a time,
// and other parts of the code want to know at any certain times
// about the current session, if any (for example, reconnect logic)
if(context.JK.CurrentSessionModel) {
context.JK.CurrentSessionModel.ensureEnded();
}
context.JK.CurrentSessionModel = sessionModel = new context.JK.SessionModel(
context.JK.app,
context.JK.JamServer,
context.jamClient,
self
);
sessionModel.start(sessionId);
// indicate that the screen is active, so that
// body-scoped drag handlers can go active
screenActive = true;
gearUtils.guardAgainstInvalidConfiguration(app)
.fail(function() {
promptLeave = false;
window.location = '/client#/home'
})
.done(function(){
var result = sessionUtils.SessionPageEnter();
gearUtils.guardAgainstActiveProfileMissing(app, result)
.fail(function(data) {
promptLeave = false;
if(data && data.reason == 'handled') {
if(data.nav == 'BACK') {
window.history.go(-1);
}
else {
window.location = data.nav;
}
}
else {
window.location = '/client#/home';
}
})
.done(function(){
sessionModel.waitForSessionPageEnterDone()
.done(function(userTracks) {
context.JK.CurrentSessionModel.setUserTracks(userTracks);
initializeSession();
})
.fail(function(data) {
if(data == "timeout") {
context.JK.alertSupportedNeeded('The audio system has not reported your configured tracks in a timely fashion.')
}
else if(data == 'session_over') {
// do nothing; session ended before we got the user track info. just bail
}
else {
contetx.JK.alertSupportedNeeded('Unable to determine configured tracks due to reason: ' + data)
}
promptLeave = false;
window.location = '/client#/home'
});
})
})
}
function notifyWithUserInfo(title , text, clientId) {
sessionModel.findUserBy({clientId: clientId})
.done(function(user) {
app.notify({
"title": title,
"text": user.name + " " + text,
"icon_url": context.JK.resolveAvatarUrl(user.photo_url)
});
})
.fail(function() {
app.notify({
"title": title,
"text": 'Someone ' + text,
"icon_url": "/assets/content/icon_alert_big.png"
});
});
}
function afterCurrentUserLoaded() {
var sessionModel = context.JK.CurrentSessionModel;
$(sessionModel.recordingModel)
.on('startingRecording', function(e, data) {
displayStartingRecording();
})
.on('startedRecording', function(e, data) {
if(data.reason) {
var reason = data.reason;
var detail = data.detail;
var title = "Could Not Start Recording";
if(data.reason == 'client-no-response') {
notifyWithUserInfo(title, 'did not respond to the start signal.', detail);
}
else if(data.reason == 'empty-recording-id') {
app.notifyAlert(title, "No recording ID specified.");
}
else if(data.reason == 'missing-client') {
notifyWithUserInfo(title, 'could not be signalled to start recording.', detail);
}
else if(data.reason == 'already-recording') {
app.notifyAlert(title, 'Already recording. If this appears incorrect, try restarting JamKazam.');
}
else if(data.reason == 'recording-engine-unspecified') {
notifyWithUserInfo(title, 'had a problem writing recording data to disk.', detail);
}
else if(data.reason == 'recording-engine-create-directory') {
notifyWithUserInfo(title, 'had a problem creating a recording folder.', detail);
}
else if(data.reason == 'recording-engine-create-file') {
notifyWithUserInfo(title, 'had a problem creating a recording file.', detail);
}
else if(data.reason == 'recording-engine-sample-rate') {
notifyWithUserInfo(title, 'had a problem recording at the specified sample rate.', detail);
}
else if(data.reason == 'rest') {
var jqXHR = detail[0];
app.notifyServerError(jqXHR);
}
else {
notifyWithUserInfo(title, 'Error Reason: ' + reason);
}
displayDoneRecording();
}
else
{
displayStartedRecording();
displayWhoCreated(data.clientId);
}
})
.on('stoppingRecording', function(e, data) {
displayStoppingRecording(data);
})
.on('stoppedRecording', function(e, data) {
if(data.reason) {
logger.warn("Recording Discarded: ", data);
var reason = data.reason;
var detail = data.detail;
var title = "Recording Discarded";
if(data.reason == 'client-no-response') {
notifyWithUserInfo(title, 'did not respond to the stop signal.', detail);
}
else if(data.reason == 'missing-client') {
notifyWithUserInfo(title, 'could not be signalled to stop recording.', detail);
}
else if(data.reason == 'empty-recording-id') {
app.notifyAlert(title, "No recording ID specified.");
}
else if(data.reason == 'wrong-recording-id') {
app.notifyAlert(title, "Wrong recording ID specified.");
}
else if(data.reason == 'not-recording') {
app.notifyAlert(title, "Not currently recording.");
}
else if(data.reason == 'already-stopping') {
app.notifyAlert(title, "Already stopping the current recording.");
}
else if(data.reason == 'start-before-stop') {
notifyWithUserInfo(title, 'asked that we start a new recording; cancelling the current one.', detail);
}
else {
app.notifyAlert(title, "Error reason: " + reason);
}
displayDoneRecording();
}
else {
displayDoneRecording();
promptUserToSave(data.recordingId);
}
})
.on('abortedRecording', function(e, data) {
var reason = data.reason;
var detail = data.detail;
var title = "Recording Cancelled";
if(data.reason == 'client-no-response') {
notifyWithUserInfo(title, 'did not respond to the start signal.', detail);
}
else if(data.reason == 'missing-client') {
notifyWithUserInfo(title, 'could not be signalled to start recording.', detail);
}
else if(data.reason == 'populate-recording-info') {
notifyWithUserInfo(title, 'could not synchronize with the server.', detail);
}
else if(data.reason == 'recording-engine-unspecified') {
notifyWithUserInfo(title, 'had a problem writing recording data to disk.', detail);
}
else if(data.reason == 'recording-engine-create-directory') {
notifyWithUserInfo(title, 'had a problem creating a recording folder.', detail);
}
else if(data.reason == 'recording-engine-create-file') {
notifyWithUserInfo(title, 'had a problem creating a recording file.', detail);
}
else if(data.reason == 'recording-engine-sample-rate') {
notifyWithUserInfo(title, 'had a problem recording at the specified sample rate.', detail);
}
else {
app.notifyAlert(title, "Error reason: " + reason);
}
displayDoneRecording();
})
sessionModel.subscribe('sessionScreen', sessionChanged);
sessionModel.joinSession(sessionId)
.fail(function(xhr, textStatus, errorMessage) {
if(xhr.status == 404) {
// we tried to join the session, but it's already gone. kick user back to join session screen
promptLeave = false;
context.window.location = "/client#/findSession";
app.notify(
{ title: "Unable to Join Session",
text: "The session you attempted to join is over."
},
null,
true);
}
else if(xhr.status == 422) {
var response = JSON.parse(xhr.responseText);
if(response["errors"] && response["errors"]["tracks"] && response["errors"]["tracks"][0] == "Please select at least one track") {
app.notifyAlert("No Inputs Configured", $('<span>You will need to reconfigure your audio device.</span>'));
}
else if(response["errors"] && response["errors"]["music_session"] && response["errors"]["music_session"][0] == ["is currently recording"]) {
promptLeave = false;
context.window.location = "/client#/findSession";
app.notify( { title: "Unable to Join Session", text: "The session is currently recording." }, null, true);
}
else {
app.notifyServerError(xhr, 'Unable to Join Session');
}
}
else {
app.notifyServerError(xhr, 'Unable to Join Session');
}
});
}
// not leave session but leave screen
function beforeLeave(data) {
if(promptLeave) {
var leaveSessionWarningDialog = new context.JK.LeaveSessionWarningDialog(context.JK.app,
function() { promptLeave = false; context.location.hash = data.hash });
leaveSessionWarningDialog.initialize();
app.layout.showDialog('leave-session-warning');
return false;
}
return true;
}
function beforeHide(data) {
if(screenActive) {
// this path is possible if FTUE is invoked on session page, and they cancel
sessionModel.leaveCurrentSession()
.fail(function(jqXHR) {
if(jqXHR.status != 404) {
logger.debug("leave session failed");
app.ajaxError(arguments)
}
});
}
screenActive = false;
sessionUtils.SessionPageLeave();
}
function handleTransitionsInRecordingPlayback() {
// let's see if we detect a transition to start playback or stop playback
var currentSession = sessionModel.getCurrentSession();
if(claimedRecording == null && (currentSession && currentSession.claimed_recording != null)) {
// this is a 'started with a claimed_recording' transition.
// we need to start a timer to watch for the state of the play session
playbackControls.startMonitor();
}
else if(claimedRecording && (currentSession == null || currentSession.claimed_recording == null)) {
playbackControls.stopMonitor();
}
claimedRecording = currentSession == null ? null : currentSession.claimed_recording;
}
function sessionChanged() {
handleTransitionsInRecordingPlayback();
// TODO - in the specific case of a user changing their tracks using the configureTrack dialog,
// this event appears to fire before the underlying mixers have updated. I have no event to
// know definitively when the underlying mixers are up to date, so for now, we just delay slightly.
// This obviously has the possibility of introducing time-based bugs.
context.setTimeout(renderSession, RENDER_SESSION_DELAY);
}
/**
* the mixers object is a list. In order to find one by key,
* you must iterate. Convenience method to locate a particular
* mixer by id.
*/
function getMixer(mixerId) {
return allMixers[mixerId];
}
function getMixerByResourceId(resourceId, mode) {
var mixerPair = mixersByResourceId[resourceId];
if(!mixerPair) {return null;}
if(mode === undefined) {
return mixerPair;
}
else {
if(mode == MIX_MODES.MASTER) {
return mixerPair.master;
}
else {
return mixerPair.personal;
}
}
}
function getMixerByTrackId(trackId, mode) {
var mixerPair = mixersByTrackId[trackId];
if(!mixerPair) {return null;}
if(mode === undefined) {
return mixerPair;
}
else {
if(mode == MIX_MODES.MASTER) {
return mixerPair.master;
}
else {
return mixerPair.personal;
}
}
}
function resetOtherAudioContent() {
if ($('.session-recordings .track').length === 0 && $('.session-recordings .download-jamtrack').length === 0) {
$('.session-recordings .when-empty').show();
$('.session-recording-name-wrapper').hide();
$('.session-recordings .recording-controls').hide();
$('.session-recordings .session-recording-name').text('(No audio loaded)')
}
}
function renderSession() {
$myTracksContainer.empty();
$('.session-track').remove(); // Remove previous tracks
var $voiceChat = $('#voice-chat');
$voiceChat.hide();
_updateMixers();
_renderTracks();
_renderLocalMediaTracks();
_wireTopVolume();
_wireTopMix();
_addVoiceChat();
_initDialogs();
if ($('.session-livetracks .track').length === 0) {
$('.session-livetracks .when-empty').show();
}
resetOtherAudioContent();
}
function _initDialogs() {
configureTrackDialog.initialize();
addNewGearDialog.initialize();
}
// Get the latest list of underlying audio mixer channels, and populates:
// * mixersByResourceId - a hash of resourceId / { master: mixer, personal: mixer } personal: can be null in case of PeerAudioInputMusicGroup
// * mixersByTrackId - a hash of track id / {master: mixer, personal: mixer}.
// * allMixers - a hash of mixer.id / mixer
// * masterMixers - array of master mode mixers
// * personalMixers - array of personal mode mixers
function _updateMixers() {
masterMixers = context.jamClient.SessionGetAllControlState(true);
personalMixers = context.jamClient.SessionGetAllControlState(false);
//logger.debug("masterMixers", masterMixers)
//logger.debug("personalMixers", personalMixers)
mixersByResourceId = {}
mixersByTrackId = {}
allMixers = {}
var i;
for(i = 0; i < masterMixers.length; i++) {
var masterMixer = masterMixers[i];
allMixers[masterMixer.id] = masterMixer; // populate allMixers by mixer.id
// populate mixer pair
var mixerPair = {}
mixersByResourceId[masterMixer.rid] = mixerPair
mixersByTrackId[masterMixer.id] = mixerPair
mixerPair.master = masterMixer;
}
for(i = 0; i < personalMixers.length; i++) {
var personalMixer = personalMixers[i];
if(personalMixer.group_id == ChannelGroupIds.MediaTrackGroup) {
continue;
}
allMixers[personalMixer.id] = personalMixer
// populate other side of mixer pair
var mixerPair = mixersByResourceId[personalMixer.rid]
if(!mixerPair) {
if(personalMixer.group_id != ChannelGroupIds.MonitorGroup) {
logger.warn("there is no master version of ", personalMixer)
}
mixerPair = {}
mixersByResourceId[personalMixer.rid] = mixerPair
}
mixersByTrackId[personalMixer.id] = mixerPair;
mixerPair.personal = personalMixer;
}
// Always add a hard-coded simplified 'mixer' for the L2M mix
/**
var l2m_mixer = {
id: '__L2M__',
range_low: -80,
range_high: 20,
volume_left: context.jamClient.SessionGetMasterLocalMix()
};
mixers.push(l2m_mixer);*/
}
function _mixersForGroupId(groupId, mixMode) {
var foundMixers = [];
var mixers = mixMode == MIX_MODES.MASTER ? masterMixers : personalMixers;
$.each(mixers, function(index, mixer) {
if ( mixer.group_id === groupId) {
foundMixers.push(mixer);
}
});
return foundMixers;
}
function _mixersForGroupIds(groupIds, mixMode) {
var foundMixers = [];
var mixers = mixMode == MIX_MODES.MASTER ? masterMixers : personalMixers;
$.each(mixers, function(index, mixer) {
var groupIdLen = groupIds.length;
for (var i = 0; i < groupIdLen; i++) {
if ( mixer.group_id === groupIds[i]) {
foundMixers.push(mixer);
}
}
});
return foundMixers;
}
function _getMyVoiceChatMixers() {
var mixers = _mixersForGroupId(ChannelGroupIds.AudioInputChatGroup, sessionModel.getMixMode());
if (mixers.length == 0) { return null; }
var oppositeMixers = _mixersForGroupId(ChannelGroupIds.AudioInputChatGroup, !sessionModel.getMixMode());
if(oppositeMixers.length == 0) {
logger.warn("unable to find opposite mixer for voice chat");
return null;
}
var mixer = mixers[0];
var oppositeMixer = oppositeMixers[0];
return {
mixer: mixer,
oppositeMixer: oppositeMixer,
vuMixer: mixer,
muteMixer: mixer
}
}
function _clientIdForUserInputMixer(mixerId, mixMode) {
var found = null;
var mixers = mixMode == MIX_MODES.MASTER ? masterMixers : personalMixers;
$.each(mixers, function(index, mixer) {
if (mixer.group_id === ChannelGroupIds.UserMusicInputGroup && mixer.id == mixerId) {
found = mixer.client_id;
return false;
}
});
return found;
}
// TODO FIXME - This needs to support multiple tracks for an individual
// client id and group.
function _mixerForClientId(clientId, groupIds, usedMixers) {
//logger.debug("clientId", clientId, "groupIds", groupIds, "mixers", mixers)
var foundMixer = null;
$.each(mixers, function(index, mixer) {
if (mixer.client_id === clientId) {
for (var i=0; i<groupIds.length; i++) {
if (mixer.group_id === groupIds[i]) {
if (!(mixer.id in usedMixers)) {
foundMixer = mixer;
return false;
}
}
}
}
});
return foundMixer;
}
function _groupedMixersForClientId(clientId, groupIds, usedMixers, mixMode) {
//logger.debug("clientId", clientId, "groupIds", groupIds, "mixers", mixers)
var foundMixers = {};
var mixers = mixMode == MIX_MODES.MASTER ? masterMixers : personalMixers;
// console.log("_groupedMixersForClientId", mixers)
$.each(mixers, function(index, mixer) {
if (mixer.client_id === clientId) {
for (var i=0; i<groupIds.length; i++) {
if (mixer.group_id === groupIds[i]) {
if ((mixer.groupId != ChannelGroupIds.UserMusicInputGroup) && !(mixer.id in usedMixers)) {
var mixers = foundMixers[mixer.group_id]
if(!mixers) {
mixers = []
foundMixers[mixer.group_id] = mixers;
}
mixers.push(mixer)
}
}
}
}
});
return foundMixers;
}
function _wireTopVolume() {
var gainPercent = 0;
var mixerIds = [];
var mixers = sessionModel.isMasterMixMode() ? masterMixers : personalMixers;
$.each(mixers, function(index, mixer) {
if (sessionModel.isMasterMixMode() && mixer.group_id === ChannelGroupIds.MasterGroup) {
mixerIds.push(mixer.id);
gainPercent = percentFromMixerValue(
mixer.range_low, mixer.range_high, mixer.volume_left);
}
else if (!sessionModel.isMasterMixMode() && mixer.group_id === ChannelGroupIds.MonitorGroup) {
mixerIds.push(mixer.id);
gainPercent = percentFromMixerValue(
mixer.range_low, mixer.range_high, mixer.volume_left);
}
});
if(mixerIds.length == 0) {
logger.debug("did not find master/monitor volume", mixers)
}
var faderId = mixerIds.join(',');
var $volume = $('#volume');
$volume.attr('mixer-id', faderId);
var faderOpts = {
faderId: faderId,
faderType: "horizontal",
width: 50,
style: {
"background-image": "none",
"background-repeat":"no-repeat",
"height": "24px"
}
};
context.JK.FaderHelpers.renderFader($volume, faderOpts);
$volume.on('fader_change', faderChanged);
// Visually update fader to underlying mixer start value.
// Always do this, even if gainPercent is zero.
context.JK.FaderHelpers.setFaderValue(faderId, gainPercent);
}
/**
* This control has it's own Set/Get methods, so we don't need to
* line it up with some mixer later. We'll use a special mixer-id value
* to let us know we're dealing with the mix control.
*/
function _wireTopMix() {
var $mixSlider = $('#l2m');
var l2m_mixer = {
range_low: -80,
range_high: 20,
volume_left: context.jamClient.SessionGetMasterLocalMix()
};
// var gainPercent = percentFromMixerValue(
// l2m_mixer.range_low, l2m_mixer.range_high, l2m_mixer.volume_left);
var faderId = '#l2m'; // also the selector for renderFader
var faderOpts = {
faderId: faderId,
faderType: "horizontal",
width: 70,
style: {
"background-image": "none",
"background-repeat":"no-repeat",
"height": "24px"
}
};
context.JK.FaderHelpers.renderFader($mixSlider, faderOpts);
$mixSlider.on('fader_change', l2mChanged);
var value = context.jamClient.SessionGetMasterLocalMix();
context.JK.FaderHelpers.setFaderValue(faderId, percentFromMixerValue(-80, 20, value));
}
/**
* This has a specialized jamClient call, so custom handler.
*/
function l2mChanged(e, data) {
//var dbValue = context.JK.FaderHelpers.convertLinearToDb(newValue);
context.jamClient.SessionSetMasterLocalMix(data.percentage - 80);
}
function _addVoiceChat() {
// If, and only if, there is a mixer in group 3 (voice chat)
// Add the voice chat controls below my tracks, and hook up the mixer.
// Assumption is that there is only ever one, so we just take the first one.
var voiceChatMixers = _getMyVoiceChatMixers();
if(voiceChatMixers) {
var mixer = voiceChatMixers.mixer;
var $voiceChat = $('#voice-chat');
$voiceChat.show();
$voiceChat.attr('mixer-id', mixer.id);
var $voiceChatGain = $voiceChat.find('.voicechat-gain');
$voiceChatGain.attr('mixer-id', mixer.id);
var $voiceChatMute = $voiceChat.find('.voicechat-mute').attr('mixer-id', mixer.id).data('mixer', mixer).data('opposite-mixer', voiceChatMixers.oppositeMixer)
var gainPercent = percentFromMixerValue(
mixer.range_low, mixer.range_high, mixer.volume_left);
var faderOpts = {
faderId: mixer.id,
faderType: "horizontal",
width: 50
};
context.JK.FaderHelpers.renderFader($voiceChatGain, faderOpts);
$voiceChatGain.on('fader_change', faderChanged);
context.JK.FaderHelpers.setFaderValue(mixer.id, gainPercent);
if (mixer.mute) {
_toggleVisualMuteControl($voiceChatMute, mixer.mute);
}
}
}
function _renderLocalMediaTracks() {
// local media mixers come in different groups (MediaTrack, JamTrack, Metronome), but peer mixers are always PeerMediaTrackGroup
var localMediaMixers = _mixersForGroupIds([ChannelGroupIds.MediaTrackGroup, ChannelGroupIds.JamTrackGroup, ChannelGroupIds.MetronomeGroup], MIX_MODES.MASTER);
var peerLocalMediaMixers = _mixersForGroupId(ChannelGroupIds.PeerMediaTrackGroup, MIX_MODES.MASTER);
// with mixer info, we use these to decide what kind of tracks are open in the backend
// each mixer has a media_type field, which describes the type of media track it is.
// * JamTrack
// * BackingTrack
// * RecordingTrack
// * MetronomeTrack
// * "" - adhoc track (not supported visually)
// it is supposed to be the case that there are only one type of track open at a time, however, that's a business policy/logic
// constraint; and may be buggy. **So, we should render whatever we have, so that it's obvious what's really going on.**
// so, let's group up all mixers by type, and then ask them to be rendered
var recordingTrackMixers = [];
var backingTrackMixers = [];
var jamTrackMixers = [];
var metronomeTrackMixers = [];
var adhocTrackMixers = [];
function groupByType(mixers) {
context._.each(mixers, function(mixer) {
var mediaType = mixer.media_type;
if(mediaType == 'RecordingTrack') {
recordingTrackMixers.push(mixer)
}
else if(mediaType == 'BackingTrack') {
backingTrackMixers.push(mixer);
}
else if(mediaType == 'MetronomeTrack') {
metronomeTrackMixers.push(mixer);
}
else if(mediaType == 'JamTrack') {
jamTrackMixers.push(mixer);
}
else {
adhocTrackMixers.push(mixer);
}
});
}
groupByType(localMediaMixers);
groupByType(peerLocalMediaMixers);
if(recordingTrackMixers.length > 0) {
renderRecordingTracks(recordingTrackMixers)
}
if(backingTrackMixers.length > 0) {
renderBackingTracks(backingTrackMixers)
}
if(jamTrackMixers.length > 0) {
renderJamTracks(jamTrackMixers);
}
if(metronomeTrackMixers.length > 0) {
renderMetronomeTracks(jamTrackMixers);
}
if(adhocTrackMixers.length > 0) {
logger.warn("some tracks are open that we don't know how to show")
}
}
function renderBackingTracks(backingTrackMixers) {
logger.error("do not know how to draw backing tracks yet")
}
function renderJamTracks(jamTrackMixers) {
logger.debug("rendering jam tracks")
var jamTracks = sessionModel.jamTracks();
// pluck the 1st mixer, and assume that all other mixers in this group are of the same type (between JamTrack vs Peer)
// if it's a locally opened track (JamTrackGroup), then we can say this person is the opener
var isOpener = jamTrackMixers[0].group_id == ChannelGroupIds.JamTrackGroup;
// using the server's info in conjuction with the client's, draw the recording tracks
if(jamTracks) {
$('.session-recording-name').text(sessionModel.getCurrentSession().jam_track.name);
var noCorrespondingTracks = false;
$.each(jamTrackMixers, function(index, mixer) {
var preMasteredClass = "";
// find the track or tracks that correspond to the mixer
var correspondingTracks = []
console.log("mixer", mixer)
$.each(jamTracks, function(i, jamTrack) {
if(mixer.id.indexOf("L") == 0) {
if(mixer.id.substring(1) == jamTrack.id) {
correspondingTracks.push(jamTrack);
}
else {
// this should not be possible
alert("Invalid state: the recorded track had neither persisted_track_id or persisted_client_id");
}
}
});
if(correspondingTracks.length == 0) {
noCorrespondingTracks = true;
app.notify({
title: "Unable to Open JamTrack",
text: "Could not correlate server and client tracks",
icon_url: "/assets/content/icon_alert_big.png"});
return false;
}
// prune found recorded tracks
jamTracks = $.grep(jamTracks, function(value) {
return $.inArray(value, correspondingTracks) < 0;
});
var oneOfTheTracks = correspondingTracks[0];
var instrumentIcon = context.JK.getInstrumentIcon45(oneOfTheTracks.instrument_id);
var photoUrl = "/assets/content/icon_recording.png";
var name = oneOfTheTracks.part
if (!name) {
name = oneOfTheTracks.instrument;
}
// Default trackData to participant + no Mixer state.
var trackData = {
trackId: oneOfTheTracks.id,
clientId: oneOfTheTracks.client_id,
name: name,
instrumentIcon: instrumentIcon,
avatar: photoUrl,
latency: "good",
gainPercent: 0,
muteClass: 'muted',
mixerId: "",
avatarClass : 'avatar-recording',
preMasteredClass: ""
};
var gainPercent = percentFromMixerValue(
mixer.range_low, mixer.range_high, mixer.volume_left);
var muteClass = "enabled";
if (mixer.mute) {
muteClass = "muted";
}
trackData.gainPercent = gainPercent;
trackData.muteClass = muteClass;
trackData.mixerId = mixer.id; // the master mixer controls the volume control for recordings (no personal controls in either master or personal mode)
trackData.vuMixerId = mixer.id; // the master mixer controls the VUs for recordings (no personal controls in either master or personal mode)
trackData.muteMixerId = mixer.id; // the master mixer controls the mute for recordings (no personal controls in either master or personal mode)
if(sessionModel.isPersonalMixMode() || !isOpener) {
trackData.mediaControlsDisabled = true;
trackData.mediaTrackOpener = isOpener;
}
_addRecordingTrack(trackData);
});
if(!noCorrespondingTracks && jamTracks.length > 0) {
logger.error("unable to find all jam tracks against client tracks");
app.notify({title:"All tracks not found",
text: "Some tracks in the jam tracks are not present in the playback",
icon_url: "/assets/content/icon_alert_big.png"})
}
}
}
function renderMetronomeTracks(metronomeTrackMixers) {
logger.error("do not know how to draw metronome tracks yet")
}
function renderRecordingTracks(recordingMixers) {
// get the server's info for the recording
var recordedTracks = sessionModel.recordedTracks();
// pluck the 1st mixer, and assume that all other mixers in this group are of the same type (between Local vs Peer)
// if it's a locally opened track (MediaTrackGroup), then we can say this person is the opener
var isOpener = recordingMixers[0].group_id == ChannelGroupIds.MediaTrackGroup;
// using the server's info in conjuction with the client's, draw the recording tracks
if(recordedTracks) {
$('.session-recording-name').text(sessionModel.getCurrentSession().claimed_recording.name);
var noCorrespondingTracks = false;
$.each(recordingMixers, function(index, mixer) {
var preMasteredClass = "";
// find the track or tracks that correspond to the mixer
var correspondingTracks = []
$.each(recordedTracks, function(i, recordedTrack) {
if(mixer.id.indexOf("L") == 0) {
if(mixer.id.substring(1) == recordedTrack.client_track_id) {
correspondingTracks.push(recordedTrack);
}
}
else if(mixer.id.indexOf("C") == 0) {
if(mixer.id.substring(1) == recordedTrack.client_id) {
correspondingTracks.push(recordedTrack);
preMasteredClass = "pre-mastered-track";
}
}
else {
// this should not be possible
alert("Invalid state: the recorded track had neither persisted_track_id or persisted_client_id");
}
});
if(correspondingTracks.length == 0) {
noCorrespondingTracks = true;
app.notify({
title: "Unable to Open Recording",
text: "Could not correlate server and client tracks",
icon_url: "/assets/content/icon_alert_big.png"});
return false;
}
// prune found recorded tracks
recordedTracks = $.grep(recordedTracks, function(value) {
return $.inArray(value, correspondingTracks) < 0;
});
var oneOfTheTracks = correspondingTracks[0];
var instrumentIcon = context.JK.getInstrumentIcon45(oneOfTheTracks.instrument_id);
var photoUrl = "/assets/content/icon_recording.png";
var name = oneOfTheTracks.user.name;
if (!(name)) {
name = oneOfTheTracks.user.first_name + ' ' + oneOfTheTracks.user.last_name;
}
// Default trackData to participant + no Mixer state.
var trackData = {
trackId: oneOfTheTracks.id,
clientId: oneOfTheTracks.client_id,
name: name,
instrumentIcon: instrumentIcon,
avatar: photoUrl,
latency: "good",
gainPercent: 0,
muteClass: 'muted',
mixerId: "",
avatarClass : 'avatar-recording',
preMasteredClass: preMasteredClass
};
var gainPercent = percentFromMixerValue(
mixer.range_low, mixer.range_high, mixer.volume_left);
var muteClass = "enabled";
if (mixer.mute) {
muteClass = "muted";
}
trackData.gainPercent = gainPercent;
trackData.muteClass = muteClass;
trackData.mixerId = mixer.id; // the master mixer controls the volume control for recordings (no personal controls in either master or personal mode)
trackData.vuMixerId = mixer.id; // the master mixer controls the VUs for recordings (no personal controls in either master or personal mode)
trackData.muteMixerId = mixer.id; // the master mixer controls the mute for recordings (no personal controls in either master or personal mode)
if(sessionModel.isPersonalMixMode() || !isOpener) {
trackData.mediaControlsDisabled = true;
trackData.mediaTrackOpener = isOpener;
}
_addRecordingTrack(trackData);
});
if(!noCorrespondingTracks && recordedTracks.length > 0) {
logger.error("unable to find all recorded tracks against client tracks");
app.notify({title:"All tracks not found",
text: "Some tracks in the recording are not present in the playback",
icon_url: "/assets/content/icon_alert_big.png"})
}
}
}
function trackMuteSelected(e, data) {
var muteOption = data.muteOption; // muteOption is going to be either 'master' or 'personal'. We mute the correct one, based on track info
var $muteControl = $(this);
// mixer is the mixer object returned from the backend corresponding to the mixer in this particular mode
// oppositeMixer is the mixer correspond to the opposite mode.
// Note that oppositeMixer is not ever set for ChannelGroupIds.AudioInputMusicGroup or ChannelGroupIds.MediaTrackGroup
var mixer = $muteControl.data('mixer')
var oppositeMixer = $muteControl.data('opposite-mixer')
logger.debug("muting tracks. current mixer id=" + mixer.id + ", opposite mixer id=" + oppositeMixer.id)
var mixerPair = {}
if(sessionModel.isMasterMixMode()) {
mixerPair.master = mixer;
mixerPair.personal = oppositeMixer;
}
else {
mixerPair.master = oppositeMixer;
mixerPair.personal = mixer;
}
if(muteOption == 'master') {
_toggleAudioMute(mixerPair.master.id, true, mixerPair.master.mode);
_toggleAudioMute(mixerPair.personal.id, true, mixerPair.personal.mode);
}
else {
_toggleAudioMute(mixerPair.personal.id, true, mixerPair.personal.mode);
_toggleAudioMute(mixerPair.master.id, false, mixerPair.master.mode);
}
_toggleVisualMuteControl($muteControl, true);
}
// find backend mixer based on track data, and target client_id
function findMixerForTrack(client_id, track, myTrack) {
var mixer = null; // what is the best mixer for this track/client ID?
var oppositeMixer = null; // what is the corresponding mixer in the opposite mode?
var vuMixer = null;
var muteMixer = null;
var mixMode = sessionModel.getMixMode();
if(myTrack) {
// when it's your track, look it up by the backend resource ID
mixer = getMixerByTrackId(track.client_track_id, mixMode)
vuMixer = mixer;
muteMixer = mixer;
// sanity checks
if(mixer && (mixer.group_id != ChannelGroupIds.AudioInputMusicGroup)) { logger.error("found local mixer that was not of groupID: AudioInputMusicGroup", mixer) }
if(mixer) {
// find the matching AudioInputMusicGroup for the opposite mode
oppositeMixer = getMixerByTrackId(track.client_track_id, !mixMode)
if(mixMode == MIX_MODES.PERSONAL) {
muteMixer = oppositeMixer; // make the master mixer the mute mixer
}
// sanity checks
if(!oppositeMixer) {logger.error("unable to find opposite mixer for local mixer", mixer)}
else if(oppositeMixer.group_id != ChannelGroupIds.AudioInputMusicGroup) { logger.error("found local mixer in opposite mode that was not of groupID: AudioInputMusicGroup", mixer, oppositeMixer)}
}
else {
logger.debug("local track is not present: ", track)
}
}
else {
if(mixMode === MIX_MODES.MASTER) {
// when it's a remote track and in master mode, we should find the PeerAudioInputMusicGroup
mixer = getMixerByTrackId(track.client_track_id, MIX_MODES.MASTER)
if(mixer && (mixer.group_id != ChannelGroupIds.PeerAudioInputMusicGroup)) { logger.error("found remote mixer that was not of groupID: PeerAudioInputMusicGroup", mixer) }
vuMixer = mixer;
muteMixer = mixer;
if(mixer) {
// we should be able to find a UserMusicInputGroup for this clientId in personal mode
var oppositeMixers = _groupedMixersForClientId(client_id, [ ChannelGroupIds.UserMusicInputGroup], {}, MIX_MODES.PERSONAL);
if (oppositeMixers[ChannelGroupIds.UserMusicInputGroup]) { oppositeMixer = oppositeMixers[ChannelGroupIds.UserMusicInputGroup][0]; }
if(!oppositeMixer) {logger.error("unable to find UserMusicInputGroup corresponding to PeerAudioInputMusicGroup mixer", mixer ) }
}
}
else {
// when it's a remote track and in personal mode, we want the 'Peer Stream', which is UserMusicInputGroup
// this spans N tracks for the remote user
var mixers = _groupedMixersForClientId(client_id, [ ChannelGroupIds.UserMusicInputGroup], {}, MIX_MODES.PERSONAL);
if (mixers[ChannelGroupIds.UserMusicInputGroup]) { mixer = mixers[ChannelGroupIds.UserMusicInputGroup][0]; }
vuMixer = mixer;
muteMixer = mixer;
if(mixer) {
// now grab the PeerAudioInputMusicGroup in master mode to satisfy the 'opposite' mixer
oppositeMixer = getMixerByTrackId(track.client_track_id, MIX_MODES.MASTER)
if(!oppositeMixer) {logger.debug("unable to find a PeerAudioInputMusicGroup master mixer matching a UserMusicInput", track.client_track_id, mixersByTrackId)}
else if(oppositeMixer.group_id != ChannelGroupIds.PeerAudioInputMusicGroup) { logger.error("found remote mixer that was not of groupID: PeerAudioInputMusicGroup", mixer) }
vuMixer = oppositeMixer; // for personal mode, use the PeerAudioInputMusicGroup's VUs
}
}
}
return {
mixer: mixer,
oppositeMixer: oppositeMixer,
vuMixer: vuMixer,
muteMixer: muteMixer
}
}
function _renderTracks() {
myTracks = [];
// Participants are here now, but the mixers don't update right away.
// Draw tracks from participants, then setup timers to look for the
// mixers that go with those participants, if they're missing.
lookingForMixers = [] // clear this back out as we are restarting from scratch
lookingForMixersCount = 0;
$.each(sessionModel.participants(), function(index, participant) {
var name = participant.user.name;
if (!(name)) {
name = participant.user.first_name + ' ' + participant.user.last_name;
}
var myTrack = app.clientId == participant.client_id;
// loop through all tracks for each participant
$.each(participant.tracks, function(index, track) {
var instrumentIcon = context.JK.getInstrumentIcon45(track.instrument_id);
var photoUrl = context.JK.resolveAvatarUrl(participant.user.photo_url);
// Default trackData to participant + no Mixer state.
var trackData = {
trackId: track.id,
connection_id: track.connection_id,
client_track_id: track.client_track_id,
client_resource_id: track.client_resource_id,
clientId: participant.client_id,
name: name,
instrumentIcon: instrumentIcon,
avatar: photoUrl,
latency: "good",
gainPercent: 0,
muteClass: 'muted',
mixerId: "",
avatarClass: 'avatar-med',
preMasteredClass: "",
myTrack: myTrack
};
var mixerData = findMixerForTrack(participant.client_id, track, myTrack)
var mixer = mixerData.mixer;
var vuMixer = mixerData.vuMixer;
var muteMixer = mixerData.muteMixer;
var oppositeMixer = mixerData.oppositeMixer;
if (mixer && oppositeMixer) {
myTrack = (mixer.group_id === ChannelGroupIds.AudioInputMusicGroup);
if(!myTrack) {
// it only makes sense to track 'audio established' for tracks that don't belong to you
sessionModel.setAudioEstablished(participant.client_id, true);
}
var gainPercent = percentFromMixerValue(
mixer.range_low, mixer.range_high, mixer.volume_left);
var muteClass = "enabled";
if (mixer.mute) {
muteClass = "muted";
}
trackData.gainPercent = gainPercent;
trackData.muteClass = muteClass;
trackData.mixerId = mixer.id;
trackData.vuMixerId = vuMixer.id;
trackData.oppositeMixer = oppositeMixer;
trackData.muteMixerId = muteMixer.id;
trackData.noaudio = false;
trackData.group_id = mixer.group_id;
context.jamClient.SessionSetUserName(participant.client_id,name);
} else { // No mixer to match, yet
lookingForMixers.push({track: track, clientId: participant.client_id})
trackData.noaudio = true;
if (!(lookingForMixersTimer)) {
logger.debug("waiting for mixer to show up for track: " + track.id)
lookingForMixersTimer = context.setInterval(lookForMixers, 500);
}
}
var allowDelete = myTrack && index > 0;
_addTrack(allowDelete, trackData, mixer, oppositeMixer);
// Show settings icons only for my tracks
if (myTrack) {
myTracks.push(trackData);
}
});
});
configureTrackDialog = new context.JK.ConfigureTrackDialog(app, myTracks, sessionId, sessionModel);
addNewGearDialog = new context.JK.AddNewGearDialog(app, self);
}
function connectTrackToMixer(trackSelector, track, mixerId, gainPercent, groupId) {
var vuOpts = $.extend({}, trackVuOpts);
var faderOpts = $.extend({}, trackFaderOpts);
faderOpts.faderId = mixerId;
var vuLeftSelector = trackSelector + " .track-vu-left";
var vuRightSelector = trackSelector + " .track-vu-right";
var faderSelector = trackSelector + " .track-gain";
var $fader = $(faderSelector).attr('mixer-id', mixerId).data('groupId', groupId)
if(track.mediaControlsDisabled) {
$fader.data('media-controls-disabled', true).data('media-track-opener', track.mediaTrackOpener) // this we be applied later to the fader handle $element
}
var $track = $(trackSelector);
// Set mixer-id attributes and render VU/Fader
context.JK.VuHelpers.renderVU(vuLeftSelector, vuOpts);
$track.find('.track-vu-left').attr('mixer-id', track.vuMixerId + '_vul').data('groupId', groupId)
context.JK.VuHelpers.renderVU(vuRightSelector, vuOpts);
$track.find('.track-vu-right').attr('mixer-id', track.vuMixerId + '_vur').data('groupId', groupId)
context.JK.FaderHelpers.renderFader($fader, faderOpts);
// Set gain position
context.JK.FaderHelpers.setFaderValue(mixerId, gainPercent);
$fader.on('fader_change', faderChanged);
return $track;
}
// Function called on an interval when participants change. Mixers seem to
// show up later, so we render the tracks from participants, but keep track
// of the ones there weren't any mixers for, and continually try to find them
// and get them connected to the mixers underneath.
function lookForMixers() {
lookingForMixersCount++;
_updateMixers();
var usedMixers = {};
var keysToDelete = [];
context._.each(lookingForMixers, function(data) {
var clientId = data.clientId;
var track = data.track;
var myTrack = app.clientId == clientId;
var mixerData = findMixerForTrack(clientId, track, myTrack)
var mixer = mixerData.mixer;
var oppositeMixer = mixerData.oppositeMixer;
var vuMixer = mixerData.vuMixer;
var muteMixer = mixerData.muteMixer;
if (mixer && oppositeMixer) {
if(!myTrack) {
// it only makes sense to track 'audio established' for tracks that don't belong to you
sessionModel.setAudioEstablished(clientId, true);
}
var participant = (sessionModel.getParticipant(clientId) || {name:'unknown'}).name;
logger.debug("found mixer=" + mixer.id + ", participant=" + participant)
usedMixers[mixer.id] = true;
keysToDelete.push(data);
var gainPercent = percentFromMixerValue(
mixer.range_low, mixer.range_high, mixer.volume_left);
var trackSelector = 'div.track[track-id="' + track.id + '"]';
connectTrackToMixer(trackSelector, track, mixer.id, gainPercent, mixer.group_id);
var $track = $('div.track[client-id="' + clientId + '"]');
var $trackIconMute = $track.find('.track-icon-mute')
$trackIconMute.attr('mixer-id', muteMixer.id).data('mixer', mixer).data('opposite-mixer', oppositeMixer)
$trackIconMute.muteSelector().on(EVENTS.MUTE_SELECTED, trackMuteSelected)
// hide overlay for all tracks associated with this client id (if one mixer is present, then all tracks are valid)
$('.disabled-track-overlay', $track).hide();
$('.track-connection', $track).removeClass('red yellow green').addClass('grey');
// Set mute state
_toggleVisualMuteControl($trackIconMute, mixer.mute || oppositeMixer.mute);
}
else {
// if 1 second has gone by and still no mixer, then we gray the participant's tracks
if(lookingForMixersCount == 2) {
var $track = $('div.track[client-id="' + clientId + '"]');
$('.disabled-track-overlay', $track).show();
$('.track-connection', $track).removeClass('red yellow green').addClass('red');
}
// if 5 seconds have gone by and no mixer, then we tell the server failed to establish audio
else if(lookingForMixersCount == 10) {
if(!myTrack) {
// it only makes sense to track 'audio established' for tracks that don't belong to you
sessionModel.setAudioEstablished(clientId, false);
}
}
var participant = (sessionModel.getParticipant(clientId) || { user: {name: 'unknown'}}).user.name;
logger.debug("still looking for mixer for participant=" + participant + ", clientId=" + clientId)
}
})
for (var i=0; i<keysToDelete.length; i++) {
var index = lookingForMixers.indexOf(keysToDelete[i]);
lookingForMixers.splice(index, 1);
}
if (lookingForMixers.length === 0 ||
lookingForMixersCount > 20) {
lookingForMixersCount = 0;
lookingForMixers = []
context.clearTimeout(lookingForMixersTimer);
lookingForMixersTimer = null;
}
}
// Given a mixerID and a value between 0.0-1.0,
// light up the proper VU lights.
function _updateVU(mixerId, value, isClipping) {
// Special-case for mono tracks. If mono, and it's a _vul id,
// update both sides, otherwise do nothing.
// If it's a stereo track, just do the normal thing.
var selector;
var pureMixerId = mixerId.replace("_vul", "");
pureMixerId = pureMixerId.replace("_vur", "");
var mixer = getMixer(pureMixerId);
if (mixer) {
if (!(mixer.stereo)) { // mono track
if (mixerId.substr(-4) === "_vul") {
// Do the left
selector = $('#tracks [mixer-id="' + pureMixerId + '_vul"]');
context.JK.VuHelpers.updateVU(selector, value);
// Do the right
selector = $('#tracks [mixer-id="' + pureMixerId + '_vur"]');
context.JK.VuHelpers.updateVU(selector, value);
} // otherwise, it's a mono track, _vur event - ignore.
} else { // stereo track
selector = $('#tracks [mixer-id="' + mixerId + '"]');
context.JK.VuHelpers.updateVU(selector, value);
}
}
}
function _addTrack(allowDelete, trackData, mixer, oppositeMixer) {
var $destination = $myTracksContainer;
if (trackData.clientId !== app.clientId) {
$destination = $liveTracksContainer
$('.session-livetracks .when-empty').hide();
}
var template = $('#template-session-track').html();
var newTrack = $(context.JK.fillTemplate(template, trackData));
var audioOverlay = $('.disabled-track-overlay', newTrack);
var $trackIconMute = newTrack.find('.track-icon-mute')
$trackIconMute.muteSelector().on(EVENTS.MUTE_SELECTED, trackMuteSelected)
$trackIconMute.data('mixer', mixer).data('opposite-mixer', oppositeMixer)
audioOverlay.hide(); // always start with overlay hidden, and only show if no audio persists
$destination.append(newTrack);
// Render VU meters and gain fader
var trackSelector = $destination.selector + ' .session-track[track-id="' + trackData.trackId + '"]';
var gainPercent = trackData.gainPercent || 0;
connectTrackToMixer(trackSelector, trackData, trackData.mixerId, gainPercent, trackData.group_id);
var $closeButton = $('#div-track-close', 'div[track-id="' + trackData.trackId + '"]');
if (!allowDelete) {
$closeButton.hide();
}
else {
$closeButton.click(deleteTrack);
}
// is this used?
tracks[trackData.trackId] = new context.JK.SessionTrack(trackData.clientId);
}
// something is being shown now in the other audio area
function otherAudioFilled() {
$('.session-recordings .when-empty').hide();
$('.session-recording-name-wrapper').show();
}
function _addRecordingTrack(trackData) {
otherAudioFilled();
$('.session-recordings .recording-controls').show();
var template = $('#template-session-track').html();
var newTrack = $(context.JK.fillTemplate(template, trackData));
$otherAudioContainer.append(newTrack);
if(trackData.preMasteredClass) {
context.JK.helpBubble($('.track-instrument', newTrack), 'pre-processed-track', {}, {offsetParent: newTrack.closest('.content-body')});
}
// Render VU meters and gain fader
var trackSelector = $otherAudioContainer.selector + ' .session-track[track-id="' + trackData.trackId + '"]';
var gainPercent = trackData.gainPercent || 0;
var $track = connectTrackToMixer(trackSelector, trackData, trackData.mixerId, gainPercent, null);
var $trackIconMute = $track.find('.track-icon-mute')
if(trackData.mediaControlsDisabled) {
$trackIconMute.data('media-controls-disabled', true).data('media-track-opener', trackData.mediaTrackOpener)
}
// is this used?
tracks[trackData.trackId] = new context.JK.SessionTrack(trackData.clientId);
}
/**
* Will be called when fader changes. The fader id (provided at subscribe time),
* the new value (0-100) and whether the fader is still being dragged are passed.
*/
function faderChanged(e, data) {
var $target = $(this);
var faderId = $target.attr('mixer-id');
var groupId = $target.data('groupId');
var mixerIds = faderId.split(',');
$.each(mixerIds, function(i,v) {
var broadcast = !(data.dragging); // If fader is still dragging, don't broadcast
fillTrackVolumeObject(v, broadcast);
setMixerVolume(v, data.percentage);
if(groupId == ChannelGroupIds.UserMusicInputGroup) {
// there may be other mixers with this same ID in the case of a Peer Music Stream, so update them as well
context.JK.FaderHelpers.setFaderValue(v, data.percentage);
}
});
}
function handleVolumeChangeCallback(mixerId, isLeft, value, isMuted) {
// Visually update mixer
// There is no need to actually set the back-end mixer value as the
// back-end will already have updated the audio mixer directly prior to sending
// me this event. I simply need to visually show the new fader position.
// TODO: Use mixer's range
var faderValue = percentFromMixerValue(-80, 20, value);
context.JK.FaderHelpers.setFaderValue(mixerId, faderValue);
var $muteControl = $('[control="mute"][mixer-id="' + mixerId + '"]');
_toggleVisualMuteControl($muteControl, isMuted);
}
function handleBridgeCallback(vuData) {
var j;
var eventName = null;
var mixerId = null;
var value = null;
var vuInfo = null;
for (j = 0; j < vuData.length; j++) {
vuInfo = vuData[j];
var eventName = vuInfo[0];
var vuVal = 0.0;
if(eventName === "vu") {
var mixerId = vuInfo[1];
var leftValue = vuInfo[2];
var leftClipping = vuInfo[3];
var rightValue = vuInfo[4];
var rightClipping = vuInfo[5];
// TODO - no guarantee range will be -80 to 20. Get from the
// GetControlState for this mixer which returns min/max
// value is a DB value from -80 to 20. Convert to float from 0.0-1.0
_updateVU(mixerId + "_vul", (leftValue + 80) / 100, leftClipping);
_updateVU(mixerId + "_vur", (rightValue + 80) / 100, rightClipping);
}
else if(eventName === 'connection_status') {
var mixerId = vuInfo[1];
var value = vuInfo[2];
// Connection Quality Change
var connectionClass = 'green';
if (value < 7) {
connectionClass = 'yellow';
}
if (value < 4) {
connectionClass = 'red';
}
var mixerPair = getMixerByTrackId(mixerId);
var clientId = mixerPair ? mixerPair.master.client_id : null;
if(clientId) {
var $connection = $('.session-track[client-id="' + clientId + '"] .track-connection');
if($connection.length == 0) {
logger.debug("connection status: looking for clientId: " + clientId + ", mixer: " + mixerId)
}
else {
$connection.removeClass('red yellow green grey');
$connection.addClass(connectionClass);
}
}
}
else if(eventName === 'add' || eventName === 'remove') {
// TODO - _renderSession. Note I get streams of these in
// sequence, so have Nat fix, or buffer/spam protect
// Note - this is already handled from websocket events.
// However, there may be use of these two events to avoid
// the polling-style check for when a mixer has been added
// to match a participant track.
}
else {
logger.debug('non-vu event: ' + JSON.stringify(vuInfo));
}
}
}
function deleteSession(evt) {
var sessionId = $(evt.currentTarget).attr("action-id");
if (sessionId) {
$.ajax({
type: "DELETE",
url: "/api/sessions/" + sessionId,
success: function(response) {
context.location="/client#/home";
},
error: function(jqXHR, textStatus, errorThrown) {
logger.error("Error deleting session " + sessionId);
}
});
}
}
function deleteTrack(evt) {
var trackId = $(evt.currentTarget).attr("track-id");
sessionModel.deleteTrack(sessionId, trackId);
}
function _toggleVisualMuteControl($control, mute) {
if (mute) {
$control.removeClass('enabled');
$control.addClass('muted');
} else {
$control.removeClass('muted');
$control.addClass('enabled');
}
}
function _toggleAudioMute(mixerId, muting, mode) {
fillTrackVolumeObject(mixerId);
context.trackVolumeObject.mute = muting;
if(mode === undefined) {
mode = sessionModel.getMixMode();
}
context.jamClient.SessionSetControlState(mixerId, mode);
}
function showMuteDropdowns($control) {
$control.btOn();
}
function toggleMute(evt) {
var $control = $(evt.currentTarget);
var muting = ($control.hasClass('enabled'));
var mixerIds = $control.attr('mixer-id').split(',');
// track icons have a special mute behavior
if($control.is('.track-icon-mute')) {
var mediaControlsDisabled = $control.data('media-controls-disabled');
if(mediaControlsDisabled) {
var mediaTrackOpener = $control.data('media-track-opener');
context.JK.prodBubble($control, 'media-controls-disabled', {mediaTrackOpener:mediaTrackOpener}, {positions:['bottom'], offsetParent: $control.closest('.screen')})
return false;
}
$.each(mixerIds, function(i,v) {
var mixerId = v;
// behavior: if this is the user's track in personal mode, then we mute the track globally
// otherwise, for any other track (user+master mode, or remote track in any mode)
// we just mute the type of track for that mode
var mixer = $control.data('mixer');
var oppositeMixer = $control.data('opposite-mixer')
if(mixer && oppositeMixer && mixer.group_id == ChannelGroupIds.AudioInputMusicGroup) {
// this is the user's local track; mute both personal and master mode
_toggleAudioMute(mixer.id, muting, getMixer(mixer.id).mode)
_toggleAudioMute(oppositeMixer.id, muting, getMixer(oppositeMixer.id).mode)
}
else {
_toggleAudioMute(mixer.id, muting, getMixer(mixer.id).mode)
}
// look for all controls matching this mixer id (important when it's personal mode + UserMusicInputGroup)
var $controls = $screen.find('.track-icon-mute[mixer-id=' + mixerId +']');
_toggleVisualMuteControl($controls, muting);
});
}
else {
// this path is taken for voice chat, but maybe others eventually
$.each(mixerIds, function(i,v) {
var mixerId = v;
var mixer = $control.data('mixer');
var oppositeMixer = $control.data('opposite-mixer')
if(mixer && oppositeMixer && mixer.group_id == ChannelGroupIds.AudioInputChatGroup) {
_toggleAudioMute(mixer.id, muting, mixer.mode);
_toggleAudioMute(oppositeMixer.id, muting, oppositeMixer.mode);
}
else {
_toggleAudioMute(mixerId, muting);
}
});
_toggleVisualMuteControl($control, muting);
}
}
function fillTrackVolumeObject(mixerId, broadcast) {
_updateMixers();
var _broadcast = true;
if (broadcast !== undefined) {
_broadcast = broadcast;
}
var mixer = getMixer(mixerId);
context.trackVolumeObject.clientID = mixer.client_id;
context.trackVolumeObject.broadcast = _broadcast;
context.trackVolumeObject.master = mixer.master;
context.trackVolumeObject.monitor = mixer.monitor;
context.trackVolumeObject.mute = mixer.mute;
context.trackVolumeObject.name = mixer.name;
context.trackVolumeObject.record = mixer.record;
context.trackVolumeObject.volL = mixer.volume_left;
context.trackVolumeObject.volR = mixer.volume_right;
// trackVolumeObject doesn't have a place for range min/max
currentMixerRangeMin = mixer.range_low;
currentMixerRangeMax = mixer.range_high;
}
// Given a mixer's min/max and current value, return it as
// a percent from 0-100. Return an integer.
function percentFromMixerValue(min, max, value) {
try {
var range = Math.abs(max - min);
var magnitude = value - min;
var percent = Math.round(100*(magnitude/range));
return percent;
} catch(err) {
return 0;
}
}
// Given a mixer's min/max and a percent value, return it as
// the mixer's value. Returns an integer.
function percentToMixerValue(min, max, percent) {
var range = Math.abs(max - min);
var multiplier = percent/100; // Change 85 into 0.85
var value = min + (multiplier * range);
// Protect against percents < 0 and > 100
if (value < min) {
value = min;
}
if (value > max) {
value = max;
}
return value;
}
// Given a volume percent (0-100), set the underlying
// audio volume level of the passed mixerId to the correct
// value.
function setMixerVolume(mixerId, volumePercent) {
// The context.trackVolumeObject has been filled with the mixer values
// that go with mixerId, and the range of that mixer
// has been set in currentMixerRangeMin-Max.
// All that needs doing is to translate the incoming percent
// into the real value ont the sliders range. Set Left/Right
// volumes on trackVolumeObject, and call SetControlState to stick.
var sliderValue = percentToMixerValue(
currentMixerRangeMin, currentMixerRangeMax, volumePercent);
context.trackVolumeObject.volL = context.JK.FaderHelpers.convertPercentToAudioTaper(volumePercent);
context.trackVolumeObject.volR = context.JK.FaderHelpers.convertPercentToAudioTaper(volumePercent);
// Special case for L2M mix:
if (mixerId === '__L2M__') {
logger.debug("L2M volumePercent=" + volumePercent);
var dbValue = context.JK.FaderHelpers.convertLinearToDb(volumePercent);
context.jamClient.SessionSetMasterLocalMix(dbValue);
// context.jamClient.SessionSetMasterLocalMix(sliderValue);
} else {
context.jamClient.SessionSetControlState(mixerId, sessionModel.getMixMode());
}
}
function bailOut() {
promptLeave = false;
context.window.location = '/client#/home';
}
function sessionLeave(evt) {
evt.preventDefault();
rateSession();
bailOut();
return false;
}
function rateSession() {
if (rateSessionDialog === null) {
rateSessionDialog = new context.JK.RateSessionDialog(context.JK.app);
rateSessionDialog.initialize();
}
rateSessionDialog.showDialog();
return true;
}
function sessionResync(evt) {
evt.preventDefault();
var response = context.jamClient.SessionAudioResync();
if (response) {
app.notify({
"title": "Error",
"text": response,
"icon_url": "/assets/content/icon_alert_big.png"});
}
return false;
}
// http://stackoverflow.com/questions/2604450/how-to-create-a-jquery-clock-timer
function updateRecordingTimer() {
function pretty_time_string(num) {
return ( num < 10 ? "0" : "" ) + num;
}
var total_seconds = (new Date - startTimeDate) / 1000;
var hours = Math.floor(total_seconds / 3600);
total_seconds = total_seconds % 3600;
var minutes = Math.floor(total_seconds / 60);
total_seconds = total_seconds % 60;
var seconds = Math.floor(total_seconds);
hours = pretty_time_string(hours);
minutes = pretty_time_string(minutes);
seconds = pretty_time_string(seconds);
if(hours > 0) {
var currentTimeString = hours + ":" + minutes + ":" + seconds;
}
else {
var currentTimeString = minutes + ":" + seconds;
}
$recordingTimer.text('(' + currentTimeString + ')');
}
function displayStartingRecording() {
$('#recording-start-stop').addClass('currently-recording');
$('#recording-status').text("Starting...")
}
function displayStartedRecording() {
startTimeDate = new Date;
$recordingTimer = $("<span id='recording-timer'>(0:00)</span>");
var $recordingStatus = $('<span></span>').append("<span>Stop Recording</span>").append($recordingTimer);
$('#recording-status').html( $recordingStatus );
recordingTimerInterval = setInterval(updateRecordingTimer, 1000);
}
function displayStoppingRecording(data) {
if(data) {
if(data.reason) {
app.notify({
"title": "Recording Aborted",
"text": "The recording was aborted due to '" + data.reason + '"',
"icon_url": "/assets/content/icon_alert_big.png"
});
}
}
$('#recording-status').text("Stopping...");
}
function displayDoneRecording() {
if(recordingTimerInterval) {
clearInterval(recordingTimerInterval);
recordingTimerInterval = null;
startTimeDate = null;
}
$recordingTimer = null;
$('#recording-start-stop').removeClass('currently-recording');
$('#recording-status').text("Make a Recording");
}
function displayWhoCreated(clientId) {
if(app.clientId != clientId) { // don't show to creator
sessionModel.findUserBy({clientId: clientId})
.done(function(user) {
app.notify({
"title": "Recording Started",
"text": user.name + " started a recording",
"icon_url": context.JK.resolveAvatarUrl(user.photo_url)
});
})
.fail(function() {
app.notify({
"title": "Recording Started",
"text": "Oops! Can't determine who started this recording",
"icon_url": "/assets/content/icon_alert_big.png"
});
})
}
}
function promptUserToSave(recordingId) {
rest.getRecording( {id: recordingId} )
.done(function(recording) {
recordingFinishedDialog.setRecording(recording);
app.layout.showDialog('recordingFinished').one(EVENTS.DIALOG_CLOSED, function(e, data) {
if(data.result && data.result.keep){
context.JK.prodBubble($recordingManagerViewer, 'file-manager-poke', {}, {positions:['top', 'left', 'right', 'bottom'], offsetParent: $screen.parent()})
}
})
})
.fail(app.ajaxError);
}
function openJamTrack(e) {
// just ignore the click if they are currently recording for now
if(sessionModel.recordingModel.isRecording()) {
app.notify({
"title": "Currently Recording",
"text": "You can't open a jam track while creating a recording.",
"icon_url": "/assets/content/icon_alert_big.png"
});
return false;
}
app.layout.showDialog('open-jam-track-dialog').one(EVENTS.DIALOG_CLOSED, function(e, data) {
// once the dialog is closed, see if the user has a jamtrack selected
if(!data.canceled && data.result.jamTrack) {
var jamTrack = data.result.jamTrack;
// hide 'other audio' placeholder
otherAudioFilled();
if(downloadJamTrack) {
// if there was one showing before somehow, destroy it.
logger.warn("destroying existing JamTrack")
downloadJamTrack.root.remove();
downloadJamTrack.destroy();
downloadJamTrack = null
}
downloadJamTrack = new context.JK.DownloadJamTrack(app, jamTrack.id, 'large');
// the widget indicates when it gets to any transition; we can hide it once it reaches completion
$(downloadJamTrack).on(EVENTS.JAMTRACK_DOWNLOADER_STATE_CHANGED, function(e, data) {
if(data.state == downloadJamTrack.states.synchronized) {
logger.debug("jamtrack synchronized; hide widget and show tracks")
downloadJamTrack.root.remove();
downloadJamTrack.destroy();
downloadJamTrack = null;
// XXX: test with this removed; it should be unnecessary
context.jamClient.JamTrackStopPlay();
// JamTrackPlay means 'load'
var result = context.jamClient.JamTrackPlay(jamTrack.id);
if(!result) {
app.notify(
{ title: "JamTrack Can Not Open",
text: "Unable to open your JamTrack. Please contact support@jamkazam.com"
}, null, true);
}
}
})
// show it on the page
$otherAudioContainer.append(downloadJamTrack.root)
// kick off the download JamTrack process
downloadJamTrack.init()
}
else {
logger.debug("OpenJamTrack dialog closed with no selection; ignoring", data)
}
})
return false;
}
function openRecording(e) {
// just ignore the click if they are currently recording for now
if(sessionModel.recordingModel.isRecording()) {
app.notify({
"title": "Currently Recording",
"text": "You can't open a recording while creating a recording.",
"icon_url": "/assets/content/icon_alert_big.png"
});
return false;
}
if(!localRecordingsDialog.isShowing()) {
app.layout.showDialog('localRecordings');
}
return false;
}
function closeOpenMedia() {
if(sessionModel.recordedTracks()) {
closeRecording();
}
else if(sessionModel.jamTracks() || downloadJamTrack) {
closeJamTrack();
}
else {
logger.error("don't know how to close open media (backing track maybe?)");
}
return false;
}
function closeJamTrack() {
logger.debug("closing recording");
if(downloadJamTrack) {
logger.debug("closing DownloadJamTrack widget")
downloadJamTrack.root.remove();
downloadJamTrack.destroy();
downloadJamTrack = null;
// this is necessary because a syncing widget means no jamtracks are loaded;
// so removing the widget will not cause a backend media change event (and so renderSession will not be called, ultimately)
resetOtherAudioContent();
}
rest.closeJamTrack({id: sessionModel.id()})
.done(function() {
sessionModel.refreshCurrentSession();
})
.fail(function(jqXHR) {
app.notify({
"title": "Couldn't Close JamTrack",
"text": "Couldn't inform the server to close JamTrack. msg=" + jqXHR.responseText,
"icon_url": "/assets/content/icon_alert_big.png"
});
});
context.jamClient.JamTrackStopPlay();
return false;
}
function closeRecording() {
logger.debug("closing recording");
rest.stopPlayClaimedRecording({id: sessionModel.id(), claimed_recording_id: sessionModel.getCurrentSession().claimed_recording.id})
.done(function() {
sessionModel.refreshCurrentSession();
})
.fail(function(jqXHR) {
app.notify({
"title": "Couldn't Stop Recording Playback",
"text": "Couldn't inform the server to stop playback. msg=" + jqXHR.responseText,
"icon_url": "/assets/content/icon_alert_big.png"
});
});
context.jamClient.CloseRecording();
return false;
}
function onPause() {
logger.debug("calling jamClient.SessionStopPlay");
context.jamClient.SessionStopPlay();
}
function onPlay(e, data) {
logger.debug("calling jamClient.SessionStartPlay");
context.jamClient.SessionStartPlay(data.playbackMode);
}
function onChangePlayPosition(e, data){
logger.debug("calling jamClient.SessionTrackSeekMs(" + data.positionMs + ")");
context.jamClient.SessionTrackSeekMs(data.positionMs);
}
function startStopRecording() {
if(sessionModel.recordingModel.isRecording()) {
sessionModel.recordingModel.stopRecording();
}
else {
sessionModel.recordingModel.startRecording();
}
}
function inviteMusicians() {
friendInput = inviteMusiciansUtil.inviteSessionUpdate('#update-session-invite-musicians',
sessionId);
inviteMusiciansUtil.loadFriends();
$(friendInput).show();
}
function onMixerModeChanged(e, data)
{
$mixModeDropdown.easyDropDown('select', data.mode, true);
setTimeout(renderSession, 1);
}
function onUserChangeMixMode(e) {
var mode = $mixModeDropdown.val() == "master" ? MIX_MODES.MASTER : MIX_MODES.PERSONAL;
context.jamClient.SetMixerMode(mode)
modUtils.shouldShow(NAMED_MESSAGES.MASTER_VS_PERSONAL_MIX).done(function(shouldShow) {
if(shouldShow) {
var modeChangeHtml = $($templateMixerModeChange.html());
context.JK.Banner.show({title: 'Master vs. Personal Mix', text: modeChangeHtml, no_show: NAMED_MESSAGES.MASTER_VS_PERSONAL_MIX});
}
})
return true;
}
function events() {
$('#session-leave').on('click', sessionLeave);
$('#session-resync').on('click', sessionResync);
$('#session-contents').on("click", '[action="delete"]', deleteSession);
$('#tracks').on('click', 'div[control="mute"]', toggleMute);
$('#recording-start-stop').on('click', startStopRecording);
$('#open-a-recording').on('click', openRecording);
$('#open-a-jamtrack').on('click', openJamTrack);
$('#session-invite-musicians').on('click', inviteMusicians);
$('#session-invite-musicians2').on('click', inviteMusicians);
$('#track-settings').click(function() {
configureTrackDialog.refresh();
configureTrackDialog.showVoiceChatPanel(true);
configureTrackDialog.showMusicAudioPanel(true);
});
$('#close-playback-recording').on('click', closeOpenMedia);
$(playbackControls)
.on('pause', onPause)
.on('play', onPlay)
.on('change-position', onChangePlayPosition);
$(friendInput).focus(function() { $(this).val(''); })
$(document).on(EVENTS.MIXER_MODE_CHANGED, onMixerModeChanged)
$mixModeDropdown.change(onUserChangeMixMode)
}
this.initialize = function(localRecordingsDialogInstance, recordingFinishedDialogInstance, friendSelectorDialog) {
inviteMusiciansUtil = new JK.InviteMusiciansUtil(JK.app);
inviteMusiciansUtil.initialize(friendSelectorDialog);
localRecordingsDialog = localRecordingsDialogInstance;
recordingFinishedDialog = recordingFinishedDialogInstance;
context.jamClient.SetVURefreshRate(150);
context.jamClient.RegisterVolChangeCallBack("JK.HandleVolumeChangeCallback");
playbackControls = new context.JK.PlaybackControls($('.session-recordings .recording-controls'));
var screenBindings = {
'beforeShow': beforeShow,
'afterShow': afterShow,
'beforeHide': beforeHide,
'beforeLeave' : beforeLeave,
'beforeDisconnect' : beforeDisconnect,
};
app.bindScreen('session', screenBindings);
$recordingManagerViewer = $('#recording-manager-viewer');
$screen = $('#session-screen');
$mixModeDropdown = $screen.find('select.monitor-mode')
$templateMixerModeChange = $('#template-mixer-mode-change');
$otherAudioContainer = $('#session-recordedtracks-container');
$myTracksContainer = $('#session-mytracks-container')
$liveTracksContainer = $('#session-livetracks-container');
events();
// make sure no previous plays are still going on by accident
context.jamClient.SessionStopPlay();
if(context.jamClient.SessionRemoveAllPlayTracks) {
// upgrade guard
context.jamClient.SessionRemoveAllPlayTracks();
}
};
this.tracks = tracks;
this.getCurrentSession = function() {
return sessionModel.getCurrentSession();
};
this.refreshCurrentSession = function(force) {
sessionModel.refreshCurrentSession(force);
};
this.setPromptLeave = function(_promptLeave) {
promptLeave = _promptLeave;
}
context.JK.HandleVolumeChangeCallback = handleVolumeChangeCallback;
context.JK.HandleBridgeCallback = handleBridgeCallback;
};
})(window,jQuery);