VRFS-2498, VRFS-2499 : Mesh out backing track/metronome track support at necessary layers rest/api/UI/etc. Implement necessary methods in fakeJamClient for quicker iterating.

This commit is contained in:
Steven Miers 2015-01-20 22:14:48 -06:00
parent fe64995f54
commit ff95fe1c35
10 changed files with 344 additions and 120 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.3 KiB

View File

@ -82,18 +82,18 @@
// tell the server we are about to open a backing track:
rest.openBackingTrack({id: context.JK.CurrentSessionModel.id(), backing_track_path: backingTrack.path})
.done(function(response) {
// TODO: Client stuff available?:
// context.jamClient.BackingTrackStopPlay();
// var result = context.jamClient.BackingTrackPlay('t');
var result = context.jamClient.SessionOpenBackingTrackFile(backingTrack.path);
console.log("BackingTrackPlay response: %o", result);
// logger.debug("BackingTrackPlay response: %o", result);
// if(result) {
// app.layout.closeDialog('open-backing-track-dialog');
// TODO: Possibly actually check the result. Investigate
// what real client returns:
// // if(result) {
app.layout.closeDialog('open-backing-track-dialog');
// }
// else {
// logger.error("unable to open backing track")
// }
context.JK.CurrentSessionModel.refreshCurrentSession(true);
})
.fail(function(jqXHR) {

View File

@ -21,6 +21,12 @@
var frameSize = 2.5;
var fakeJamClientRecordings = null;
var p2pCallbacks = null;
var metronomeActive=false;
var metronomeBPM=false;
var metronomeSound=false;
var metronomeMeter=0;
var backingTrackPath="";
var backingTrackLoop=false;
function dbg(msg) { logger.debug('FakeJamClient: ' + msg); }
@ -398,21 +404,47 @@
}
function SessionGetControlState(mixerIds, isMasterOrPersonal) {
dbg("SessionGetControlState");
var groups = [0, 1, 2, 3, 7, 9];
var groups = [0, 1, 2, 2, 3, 7, 8, 10, 11, 12];
var names = [
"FW AP Multi",
"FW AP Multi",
"FW AP Multi",
"FW AP Multi",
"",
"",
"",
"",
"",
"",
"",
""
];
var media_types = [
"Master",
"Monitor",
"AudioInputMusic",
"AudioInputChat",
"StreamOutMusic",
"UserMusicInput",
"PeerAudioInputMusic",
"PeerMediaTrack",
"JamTrack",
"MetronomeTrack"
]
var clientIds = [
"",
"",
"",
"",
"3933ebec-913b-43ab-a4d3-f21dc5f8955b",
"",
"",
"",
"",
"",
"",
"",
""
];
var response = [];
@ -422,6 +454,7 @@
group_id: groups[i],
id: mixerIds[i] + (isMasterOrPersonal ? 'm' : 'p'),
master: isMasterOrPersonal,
media_type: media_types[i],
monitor: !isMasterOrPersonal,
mute: false,
name: names[i],
@ -445,7 +478,14 @@
"FW AP Multi_2_10200",
"FW AP Multi_3_10500",
"User@208.191.152.98#",
"User@208.191.152.98_*"
"User@208.191.152.98_*",
"",
"",
"",
"",
"",
"",
"",
];
}
@ -686,6 +726,32 @@
function GetScoreWorkTimingInterval() { return {interval: 1000, backoff:60000} }
function SetScoreWorkTimingInterval(knobs) {return true;}
function SessionOpenBackingTrackFile(path, loop) {
backingTrackPath=path
backingTrackLoop = loop
}
function SessionCloseBackingTrackFile(path) {
backingTrackPath=""
}
function SessionOpenMetronome(bpm, click, meter, mode){
metronomeActive =true
metronomeBPM = bpm
metronomeSound = click
metronomeMeter = meter
}
//change setting - click. Mode 0: = mono, 1, = left ear, 2= right ear
function SessionSetMetronome(bmp, click,meter, mode){
SessionOpenMetronome(bmp, click, meter, mode)
}
//close everywhere
function SessionCloseMetronome(){
metronomeActive=false
}
// stun
function NetworkTestResult() { return {remote_udp_blocked: false} }
@ -977,6 +1043,13 @@
this.GetScoreWorkTimingInterval = GetScoreWorkTimingInterval;
this.SetScoreWorkTimingInterval = SetScoreWorkTimingInterval;
this.SessionCloseBackingTrackFile = SessionCloseBackingTrackFile;
this.SessionOpenBackingTrackFile = SessionOpenBackingTrackFile;
this.SessionCloseMetronome = SessionCloseMetronome;
this.SessionOpenMetronome = SessionOpenMetronome;
this.SessionSetMetronome = SessionSetMetronome;
// Client Update
this.IsAppInWritableVolume = IsAppInWritableVolume;
this.ClientUpdateVersion = ClientUpdateVersion;

View File

@ -1211,6 +1211,32 @@
})
}
function openMetronome(options) {
var musicSessionId = options["id"];
delete options["id"];
return $.ajax({
type: "POST",
dataType: "json",
contentType: 'application/json',
url: "/api/sessions/" + musicSessionId + "/metronome/open",
data: JSON.stringify(options)
})
}
function closeMetronome(options) {
var musicSessionId = options["id"];
delete options["id"];
return $.ajax({
type: "POST",
dataType: "json",
contentType: 'application/json',
url: "/api/sessions/" + musicSessionId + "/metronome/close",
data: JSON.stringify(options)
})
}
function discardRecording(options) {
var recordingId = options["id"];
@ -1598,9 +1624,13 @@
this.claimRecording = claimRecording;
this.startPlayClaimedRecording = startPlayClaimedRecording;
this.stopPlayClaimedRecording = stopPlayClaimedRecording;
this.openJamTrack = openJamTrack;
this.openBackingTrack = openBackingTrack;
this.openJamTrack = openJamTrack
this.openBackingTrack = openBackingTrack
this.closeBackingTrack = closeBackingTrack
this.closeMetronome = closeMetronome;
this.closeJamTrack = closeJamTrack;
this.openMetronome = openMetronome;
this.closeMetronome = closeMetronome;
this.discardRecording = discardRecording;
this.putTrackSyncChange = putTrackSyncChange;
this.createBand = createBand;

View File

@ -500,6 +500,7 @@
}
function renderSession() {
console.log("RENDERING SESSION")
$('#session-mytracks-container').empty();
$('.session-track').remove(); // Remove previous tracks
var $voiceChat = $('#voice-chat');
@ -535,6 +536,7 @@
function _updateMixers() {
masterMixers = context.jamClient.SessionGetAllControlState(true);
personalMixers = context.jamClient.SessionGetAllControlState(false);
context.jamClient
//logger.debug("masterMixers", masterMixers)
//logger.debug("personalMixers", personalMixers)
@ -811,7 +813,7 @@
}
function _renderLocalMediaTracks() {
console.log("_renderLocalMediaTracks!!!")
// local media mixers come in different groups (MediaTrack, JamTrack, Metronome), but peer mixers are always PeerMediaTrackGroup
var localMediaMixers = _mixersForGroupIds([ChannelGroupIds.MediaTrackGroup, ChannelGroupIds.JamTrackGroup, ChannelGroupIds.MetronomeGroup], MIX_MODES.MASTER);
var peerLocalMediaMixers = _mixersForGroupId(ChannelGroupIds.PeerMediaTrackGroup, MIX_MODES.MASTER);
@ -843,7 +845,8 @@
if(mediaType == 'RecordingTrack') {
recordingTrackMixers.push(mixer)
}
else if(mediaType == 'BackingTrack') {
else if(mediaType == 'PeerMediaTrack') {
// BackingTrack
backingTrackMixers.push(mixer);
}
else if(mediaType == 'MetronomeTrack') {
@ -853,6 +856,7 @@
jamTrackMixers.push(mixer);
}
else {
console.log("Unknown track type: " + mediaType)
adhocTrackMixers.push(mixer);
}
});
@ -871,7 +875,7 @@
renderJamTracks(jamTrackMixers);
}
if(metronomeTrackMixers.length > 0) {
renderMetronomeTracks(jamTrackMixers);
renderMetronomeTracks(metronomeTrackMixers);
}
if(adhocTrackMixers.length > 0) {
logger.warn("some tracks are open that we don't know how to show")
@ -880,100 +884,86 @@
function renderBackingTracks(backingTrackMixers) {
log.debug("rendering backing tracks")
var backingTracksPath = sessionModel.backingTrack();
var backingTrack = sessionModel.backingTrack()
var backingTrackPath = backingTrack ? backingTrack.path : null
console.log("rendering backing track",backingTrackPath)
// pluck the 1st mixer, and assume that all other mixers in this group are of the same type (between JamTrack vs Peer)
// if it's a locally opened track (MediaTrackGroup), then we can say this person is the opener
var isOpener = backingTrackMixers[0].group_id == ChannelGroupIds.MediaTrackGroup;
// using the server's info in conjuction with the client's, draw the recording tracks
if(backingTracksPath) {
$('.session-recording-name').text(sessionModel.getCurrentSession().backing_track_path);
if(backingTrackPath && backingTrackMixers.length > 0) {
var backingTrack = {path: backingTrackPath}
var name = sessionModel.getCurrentSession().backing_track_path
$('.session-recording-name').text(name);
var noCorrespondingTracks = false;
$.each(backingTrackMixers, function(index, mixer) {
var preMasteredClass = "";
// find the track or tracks that correspond to the mixer
var correspondingTracks = []
console.log("mixer", mixer)
if(mixer.id.indexOf("L") == 0) {
if(mixer.id.substring(1) == backingTrack.id) {
correspondingTracks.push(backingTrack);
} else {
// this should not be possible
alert("Invalid state: the recorded track had neither persisted_track_id or persisted_client_id");
}
}
if(correspondingTracks.length == 0) {
noCorrespondingTracks = true;
app.notify({
title: "Unable to Open BackingTrack",
text: "Could not correlate server and client tracks",
icon_url: "/assets/content/icon_alert_big.png"});
return false;
}
// prune found recorded tracks
backingTracks = $.grep(backingTracks, function(value) {
return $.inArray(value, correspondingTracks) < 0;
});
var oneOfTheTracks = correspondingTracks[0];
var instrumentIcon = context.JK.getInstrumentIcon45(oneOfTheTracks.instrument_id);
var photoUrl = "/assets/content/icon_recording.png";
var name = oneOfTheTracks.part
if (!name) {
name = oneOfTheTracks.instrument;
}
// Default trackData to participant + no Mixer state.
var trackData = {
trackId: oneOfTheTracks.id,
clientId: oneOfTheTracks.client_id,
name: name,
instrumentIcon: instrumentIcon,
avatar: photoUrl,
latency: "good",
gainPercent: 0,
muteClass: 'muted',
mixerId: "",
avatarClass : 'avatar-recording',
preMasteredClass: ""
};
var gainPercent = percentFromMixerValue(
mixer.range_low, mixer.range_high, mixer.volume_left);
var muteClass = "enabled";
if (mixer.mute) {
muteClass = "muted";
}
trackData.gainPercent = gainPercent;
trackData.muteClass = muteClass;
trackData.mixerId = mixer.id; // the master mixer controls the volume control for recordings (no personal controls in either master or personal mode)
trackData.vuMixerId = mixer.id; // the master mixer controls the VUs for recordings (no personal controls in either master or personal mode)
trackData.muteMixerId = mixer.id; // the master mixer controls the mute for recordings (no personal controls in either master or personal mode)
if(sessionModel.isPersonalMixMode() || !isOpener) {
trackData.mediaControlsDisabled = true;
trackData.mediaTrackOpener = isOpener;
}
_addRecordingTrack(trackData);
});
if(!noCorrespondingTracks && backingTracks.length > 0) {
logger.error("unable to find all backing tracks against client tracks");
app.notify({title:"All tracks not found",
text: "Some tracks in the backing tracks are not present in the playback",
icon_url: "/assets/content/icon_alert_big.png"})
var mixer = backingTrackMixers[0]
var preMasteredClass = "";
// find the track or tracks that correspond to the mixer
var correspondingTracks = []
console.log("mixer", mixer)
correspondingTracks.push(backingTrack);
if(correspondingTracks.length == 0) {
noCorrespondingTracks = true;
app.notify({
title: "Unable to Open BackingTrack",
text: "Could not correlate server and client tracks",
icon_url: "/assets/content/icon_alert_big.png"});
return false;
}
}
// prune found recorded tracks
// backingTracks = $.grep(backingTracks, function(value) {
// return $.inArray(value, correspondingTracks) < 0;
// });
var oneOfTheTracks = correspondingTracks[0];
var instrumentIcon = context.JK.getInstrumentIcon45(oneOfTheTracks.instrument_id);
var photoUrl = "/assets/content/icon_recording.png";
// Default trackData to participant + no Mixer state.
var trackData = {
trackId: oneOfTheTracks.id,
clientId: oneOfTheTracks.client_id,
name: name,
instrumentIcon: instrumentIcon,
avatar: photoUrl,
latency: "good",
gainPercent: 0,
muteClass: 'muted',
mixerId: "",
avatarClass : 'avatar-recording',
preMasteredClass: ""
};
var gainPercent = percentFromMixerValue(
mixer.range_low, mixer.range_high, mixer.volume_left);
var muteClass = "enabled";
if (mixer.mute) {
muteClass = "muted";
}
trackData.gainPercent = gainPercent;
trackData.muteClass = muteClass;
trackData.mixerId = mixer.id; // the master mixer controls the volume control for recordings (no personal controls in either master or personal mode)
trackData.vuMixerId = mixer.id; // the master mixer controls the VUs for recordings (no personal controls in either master or personal mode)
trackData.muteMixerId = mixer.id; // the master mixer controls the mute for recordings (no personal controls in either master or personal mode)
if(sessionModel.isPersonalMixMode() || !isOpener) {
trackData.mediaControlsDisabled = true;
trackData.mediaTrackOpener = isOpener;
}
_addRecordingTrack(trackData);
}// if
}
function renderJamTracks(jamTrackMixers) {
log.debug("rendering jam tracks")
console.log("rendering jam tracks")
var jamTracks = sessionModel.jamTracks();
// pluck the 1st mixer, and assume that all other mixers in this group are of the same type (between JamTrack vs Peer)
@ -1069,7 +1059,80 @@
}
function renderMetronomeTracks(metronomeTrackMixers) {
logger.error("do not know how to draw metronome tracks yet")
var metronomeActive = sessionModel.metronomeActive();
console.log("rendering metronome track",metronomeActive)
// pluck the 1st mixer, and assume that all other mixers in this group are of the same type (between JamTrack vs Peer)
// if it's a locally opened track (MediaTrackGroup), then we can say this person is the opener
var isOpener = metronomeTrackMixers[0].group_id == ChannelGroupIds.MediaTrackGroup;
var name = "Metronome"
// using the server's info in conjuction with the client's, draw the recording tracks
if(metronomeActive && metronomeTrackMixers.length > 0) {
console.log("Rendering active metronome.");
var metronome = {active: metronomeActive}
$('.session-recording-name').text(name);//sessionModel.getCurrentSession().backing_track_path);
var noCorrespondingTracks = false;
var mixer = metronomeTrackMixers[0]
var preMasteredClass = "";
// find the track or tracks that correspond to the mixer
var correspondingTracks = []
console.log("mixer", mixer)
correspondingTracks.push(metronome);
if(correspondingTracks.length == 0) {
noCorrespondingTracks = true;
app.notify({
title: "Unable to Open Metronome",
text: "Could not correlate server and client tracks",
icon_url: "/assets/content/icon_metronome_small.png"});
return false;
}
// prune found recorded tracks
// Metronomes = $.grep(Metronomes, function(value) {
// return $.inArray(value, correspondingTracks) < 0;
// });
var oneOfTheTracks = correspondingTracks[0];
var instrumentIcon = context.JK.getInstrumentIcon45(oneOfTheTracks.instrument_id);
var photoUrl = "/assets/content/icon_metronome_small.png";
// Default trackData to participant + no Mixer state.
var trackData = {
trackId: oneOfTheTracks.id,
clientId: oneOfTheTracks.client_id,
name: name,
instrumentIcon: instrumentIcon,
avatar: photoUrl,
latency: "good",
gainPercent: 0,
muteClass: 'muted',
mixerId: "",
avatarClass : 'avatar-recording',
preMasteredClass: ""
};
var gainPercent = percentFromMixerValue(
mixer.range_low, mixer.range_high, mixer.volume_left);
var muteClass = "enabled";
if (mixer.mute) {
muteClass = "muted";
}
trackData.gainPercent = gainPercent;
trackData.muteClass = muteClass;
trackData.mixerId = mixer.id; // the master mixer controls the volume control for recordings (no personal controls in either master or personal mode)
trackData.vuMixerId = mixer.id; // the master mixer controls the VUs for recordings (no personal controls in either master or personal mode)
trackData.muteMixerId = mixer.id; // the master mixer controls the mute for recordings (no personal controls in either master or personal mode)
if(sessionModel.isPersonalMixMode() || !isOpener) {
trackData.mediaControlsDisabled = true;
trackData.mediaTrackOpener = isOpener;
}
_addRecordingTrack(trackData);
}// if
}
@ -1882,6 +1945,7 @@
}
function sessionResync(evt) {
console.log("Calling session resync...")
evt.preventDefault();
var response = context.jamClient.SessionAudioResync();
if (response) {
@ -2031,23 +2095,53 @@
return false;
}
function openBackingTrackFile(e) {
// just ignore the click if they are currently recording for now
console.log("opening backing track file")
if(sessionModel.recordingModel.isRecording()) {
app.notify({
"title": "Currently Recording",
"text": "You can't open a backing track while creating a recording.",
"icon_url": "/assets/content/icon_alert_big.png"
});
return false;
} else {
context.jamClient.openBackingTrackFile(sessionModel.backing_track)
context.JK.CurrentSessionModel.refreshCurrentSession(true);
}
return false;
}
function openMetronome(e) {
// just ignore the click if they are currently recording for now
if(sessionModel.recordingModel.isRecording()) {
app.notify({
"title": "Currently Recording",
"text": "You can't open a metronome while creating a recording.",
"icon_url": "/assets/content/icon_alert_big.png"
});
app.notify({
"title": "Currently Recording",
"text": "You can't open a metronome while creating a recording.",
"icon_url": "/assets/content/icon_alert_big.png"
});
return false;
} else {
rest.openMetronome({id: sessionModel.id()})
.done(function() {
context.jamClient.SessionSetMetronome(120, "click");
context.JK.CurrentSessionModel.refreshCurrentSession(true);
})
.fail(function(jqXHR) {
console.log(jqXHR, jqXHR)
app.notify({
"title": "Couldn't open metronome",
"text": "Couldn't inform open metronome. msg=" + jqXHR.responseText,
"icon_url": "/assets/content/icon_alert_big.png"
});
});
return false;
}
// TODO:
// Start metronome:
return false;
}
function openRecording(e) {
// just ignore the click if they are currently recording for now
if(sessionModel.recordingModel.isRecording()) {
@ -2073,6 +2167,12 @@
else if(sessionModel.jamTracks()) {
closeJamTrack();
}
else if(sessionModel.backingTrack() && sessionModel.backingTrack().path) {
closeBackingTrack();
}
else if(sessionModel.metronomeActive()) {
closeMetronomeTrack();
}
else {
logger.error("don't know how to close open media (backing track?)");
}
@ -2081,7 +2181,7 @@
function closeBackingTrack() {
rest.closeBackingTrack({id: sessionModel.id()})
.done(function() {
sessionModel.refreshCurrentSession();
sessionModel.refreshCurrentSession(true);
})
.fail(function(jqXHR) {
app.notify({
@ -2094,14 +2194,12 @@
context.jamClient.closeBackingTrackFile();
return false;
}
}
function closeJamTrack() {
rest.closeJamTrack({id: sessionModel.id()})
.done(function() {
sessionModel.refreshCurrentSession();
sessionModel.refreshCurrentSession(true);
})
.fail(function(jqXHR) {
app.notify({
@ -2116,12 +2214,26 @@
return false;
}
function closeMetronomeTrack() {
rest.closeMetronome({id: sessionModel.id()})
.done(function() {
context.jamClient.SessionCloseMetronome();
sessionModel.refreshCurrentSession(true);
})
.fail(function(jqXHR) {
app.notify({
"title": "Couldn't Close MetronomeTrack",
"text": "Couldn't inform the server to close MetronomeTrack. msg=" + jqXHR.responseText,
"icon_url": "/assets/content/icon_alert_big.png"
});
});
return false;
}
function closeRecording() {
rest.stopPlayClaimedRecording({id: sessionModel.id(), claimed_recording_id: sessionModel.getCurrentSession().claimed_recording.id})
.done(function() {
sessionModel.refreshCurrentSession();
sessionModel.refreshCurrentSession(true);
})
.fail(function(jqXHR) {
app.notify({

View File

@ -90,7 +90,10 @@
function backingTrack() {
if(currentSession) {
return currentSession.backing_track_path
// TODO: objectize this for VRFS-2665, VRFS-2666, VRFS-2667, VRFS-2668
return {
path: currentSession.backing_track_path
}
}
else {
return null;
@ -747,6 +750,8 @@
// Public interface
this.id = id;
this.start = start;
this.backingTrack = backingTrack;
this.metronomeActive = metronomeActive;
this.setUserTracks = setUserTracks;
this.recordedTracks = recordedTracks;
this.jamTracks = jamTracks;

View File

@ -13,7 +13,7 @@ if !current_user
}
else
attributes :id, :name, :description, :musician_access, :approval_required, :fan_access, :fan_chat, :band_id, :user_id, :claimed_recording_initiator_id, :track_changes_counter, :max_score
attributes :id, :name, :description, :musician_access, :approval_required, :fan_access, :fan_chat, :band_id, :user_id, :claimed_recording_initiator_id, :track_changes_counter, :max_score, :backing_track_path, :metronome_active
node :can_join do |session|
session.can_join?(current_user, true)

View File

@ -125,7 +125,7 @@
<br clear="all" />
<div class="when-empty use-metronome-header">
<%= image_tag "content/icon_metronome.png", {width:22, height:20} %>
<a href="#" id="use-metronome">Use Metronome</a>
<a href="#" id="open-a-metronome">Use Metronome</a>
</div>

View File

@ -180,8 +180,8 @@ SampleApp::Application.routes.draw do
match '/sessions/:id/jam_tracks/close' => 'api_music_sessions#jam_track_close', :via => :post
match '/sessions/:id/backing_tracks/open' => 'api_music_sessions#backing_track_open', :via => :post
match '/sessions/:id/backing_tracks/close' => 'api_music_sessions#backing_track_close', :via => :post
match '/sessions/:id/jam_tracks/:metronome_id/open' => 'api_music_sessions#metronome_open', :via => :post
match '/sessions/:id/metronomes/close' => 'api_music_sessions#metronome_close', :via => :post
match '/sessions/:id/metronome/open' => 'api_music_sessions#metronome_open', :via => :post
match '/sessions/:id/metronome/close' => 'api_music_sessions#metronome_close', :via => :post
# music session tracks
match '/sessions/:id/tracks' => 'api_music_sessions#track_create', :via => :post

View File

@ -706,6 +706,10 @@ module JamWebsockets
unless music_session_upon_reentry.nil? || music_session_upon_reentry.destroyed?
if music_session_upon_reentry.backing_track_initiator == user
music_session_upon_reentry.close_backing_track
end
# if a jamtrack is open and this user is no longer in the session, close it
if music_session_upon_reentry.jam_track_initiator == user
music_session_upon_reentry.close_jam_track