jam-cloud/jam-ui/src/components/client/JKSessionAudioInputs.js

66 lines
2.6 KiB
JavaScript

import React, { memo } from 'react';
import JKSessionMyTrack from './JKSessionMyTrack.js';
import { getInstrumentIcon45, convertClientInstrumentToServer } from '../../helpers/utils';
const JKSessionAudioInputs = memo(function JKSessionAudioInputs({ myTracks, chat, mixerHelper, isRemote = false, mixType = 'default' }) {
return (
<div className='d-flex' style={{ gap: '0.5rem' }}>
<div>
{myTracks.length === 0 && !chat ? (
<div>No tracks available</div>
) : (
<>
{myTracks.map((track, index) => {
// Determine the server instrument string for icon lookup
// - On page load: instrument_id may be a string from server ("piano")
// - After selection: instrument_id is numeric client_id (61) that needs conversion
const instrumentId = track.track?.instrument_id;
const serverInstrument = typeof instrumentId === 'number'
? convertClientInstrumentToServer(instrumentId)
: (instrumentId || track.track?.instrument);
//const instrumentIcon = getInstrumentIcon45(serverInstrument);
// Select the appropriate mixer based on mixType:
// - 'personal': Audio Mix view (what I hear) - uses personalMixers
// - 'master': Session Mix view (what goes out) - uses masterMixers
// - 'default': use the mixers based on current mixMode (legacy behavior)
let selectedMixers = track.mixers;
if (mixType === 'personal' && track.personalMixers) {
selectedMixers = track.personalMixers;
} else if (mixType === 'master' && track.masterMixers) {
selectedMixers = track.masterMixers;
}
return (
<div key={track.track.client_track_id || index}>
<JKSessionMyTrack
{...track}
mixers={selectedMixers}
instrument={serverInstrument}
mode={mixerHelper.mixMode}
isRemote={isRemote}
/></div>
);
})}
{chat && (
<JKSessionMyTrack
key="chat"
{...chat}
trackName="Chat"
instrument="headphones"
hasMixer={true}
isChat={true}
isRemote={isRemote}
/>
)}
</>
)}
</div>
</div>
);
});
JKSessionAudioInputs.displayName = 'JKSessionAudioInputs';
export default JKSessionAudioInputs;