1

I have an application NodeJs with typescript having AWS Kinesis Video Streams WebRTC SDK. Link: https://github.com/awslabs/amazon-kinesis-video-streams-webrtc-sdk-js

In this application i have a chat module (dataChannnel) with 2 connection (Master and Viewer).

Following is my code in master.ts:

const master: any = { 
kinesisVideoClient: null, 
signalingClient: null, 
channelARN: null, 
peerConnectionByClientId: {}, 
dataChannelByClientId: {}, 
localStream: null, 
remoteStreams: [], 
peerConnectionStatsInterval: null 
};

export const startMaster = async(localView, remoteView, formValues, onStatsReport, onRemoteDataMessage) => { master.localView = localView; master.remoteView = remoteView;

// Create KVS client
const kinesisVideoClient = new AWS.KinesisVideo({
    region: AWSCredentials.REGION,
    accessKeyId: AWSCredentials.ACCESSKEYID,
    secretAccessKey: AWSCredentials.SECRETACCESSKEY,
    sessionToken: '',
    endpoint: formValues.endpoint,
    correctClockSkew: true,
});

master.kinesisVideoClient = kinesisVideoClient;

// Get signaling channel ARN
const describeSignalingChannelResponse = await kinesisVideoClient
    .describeSignalingChannel({
        ChannelName: AWSCredentials.CHANNELNAME,
    })
    .promise();
const channelARN = describeSignalingChannelResponse.ChannelInfo.ChannelARN;
console.log('[MASTER] Channel ARN: ', channelARN);

master.channelARN = channelARN;

// Get signaling channel endpoints
const getSignalingChannelEndpointResponse = await kinesisVideoClient
    .getSignalingChannelEndpoint({
        ChannelARN: channelARN,
        SingleMasterChannelEndpointConfiguration: {
            Protocols: ['WSS', 'HTTPS'],
            Role: Role.MASTER,
        },
    })
    .promise();
const endpointsByProtocol = getSignalingChannelEndpointResponse.ResourceEndpointList.reduce((endpoints:any, endpoint: any) => {
    endpoints[endpoint.Protocol] = endpoint.ResourceEndpoint;
    return endpoints;
}, {});
console.log('[MASTER] Endpoints: ', endpointsByProtocol);

// Create Signaling Client
master.signalingClient = new SignalingClient({
    channelARN,
    channelEndpoint: endpointsByProtocol.WSS,
    role: Role.MASTER,
    region: AWSCredentials.REGION,
    credentials: {
        accessKeyId: AWSCredentials.ACCESSKEYID,
        secretAccessKey: AWSCredentials.SECRETACCESSKEY,
        sessionToken: '',
    },
    systemClockOffset: kinesisVideoClient.config.systemClockOffset,
});

// Get ICE server configuration
const kinesisVideoSignalingChannelsClient = new AWS.KinesisVideoSignalingChannels({
    region: AWSCredentials.REGION,
    accessKeyId: AWSCredentials.ACCESSKEYID,
    secretAccessKey: AWSCredentials.SECRETACCESSKEY,
    sessionToken: '',
    endpoint: endpointsByProtocol.HTTPS,
    correctClockSkew: true,
});
const getIceServerConfigResponse = await kinesisVideoSignalingChannelsClient
    .getIceServerConfig({
        ChannelARN: channelARN,
    })
    .promise();
const iceServers = [];
// if (!formValues.natTraversalDisabled && !formValues.forceTURN) {
    iceServers.push({ urls: `stun:stun.kinesisvideo.${AWSCredentials.REGION}.amazonaws.com:443` });
// }
// if (!formValues.natTraversalDisabled) {
    getIceServerConfigResponse.IceServerList.forEach(iceServer =>
        iceServers.push({
            urls: iceServer.Uris,
            username: iceServer.Username,
            credential: iceServer.Password,
        }),
    );
// }
console.log('[MASTER] ICE servers: ', iceServers);

const configuration = {
    iceServers,
    // iceTransportPolicy: formValues.forceTURN ? 'relay' : 'all',
};

const resolution = formValues.widescreen ? { width: { ideal: 1280 }, height: { ideal: 720 } } : { width: { ideal: 640 }, height: { ideal: 480 } };
const constraints = {
    video: formValues.sendVideo ? resolution : false,
    audio: formValues.sendAudio,
};

// Get a stream from the webcam and display it in the local view.
// If no video/audio needed, no need to request for the sources.
// Otherwise, the browser will throw an error saying that either video or audio has to be enabled.
if (formValues.sendVideo || formValues.sendAudio) {
    try {
        master.localStream = await navigator.mediaDevices.getUserMedia(constraints);
        localView.srcObject = master.localStream;
    } catch (e) {
        console.error('[MASTER] Could not find webcam');
    }
}

master.signalingClient.on('open', async () => {
    console.log('[MASTER] Connected to signaling service');
});

master.signalingClient.on('sdpOffer', async (offer, remoteClientId) => {
    printSignalingLog('[MASTER] Received SDP offer from client', remoteClientId);

    // Create a new peer connection using the offer from the given client
    const peerConnection = new RTCPeerConnection(configuration);
    master.peerConnectionByClientId[remoteClientId] = peerConnection;

    if (formValues.openDataChannel) {
        master.dataChannelByClientId[remoteClientId] = peerConnection.createDataChannel('kvsDataChannel');
        peerConnection.ondatachannel = event => {
            event.channel.onmessage = onRemoteDataMessage;
        };
    }

    // Poll for connection stats
    if (!master.peerConnectionStatsInterval) {
        master.peerConnectionStatsInterval = setInterval(() => peerConnection.getStats().then(onStatsReport), 1000);
    }

    // Send any ICE candidates to the other peer
    peerConnection.addEventListener('icecandidate', ({ candidate }) => {
        if (candidate) {
            printSignalingLog('[MASTER] Generated ICE candidate for client', remoteClientId);

            // When trickle ICE is enabled, send the ICE candidates as they are generated.
            if (formValues.useTrickleICE) {
                printSignalingLog('[MASTER] Sending ICE candidate to client', remoteClientId);
                master.signalingClient.sendIceCandidate(candidate, remoteClientId);
            }
        } else {
            printSignalingLog('[MASTER] All ICE candidates have been generated for client', remoteClientId);

            // When trickle ICE is disabled, send the answer now that all the ICE candidates have ben generated.
            if (!formValues.useTrickleICE) {
                printSignalingLog('[MASTER] Sending SDP answer to client', remoteClientId);
                master.signalingClient.sendSdpAnswer(peerConnection.localDescription, remoteClientId);
            }
        }
    });

    // As remote tracks are received, add them to the remote view
    peerConnection.addEventListener('track', event => {
        printSignalingLog('[MASTER] Received remote track from client', remoteClientId);
        if (remoteView.srcObject) {
            return;
        }
        remoteView.srcObject = event.streams[0];
    });

    // If there's no video/audio, master.localStream will be null. So, we should skip adding the tracks from it.
    if (master.localStream) {
        master.localStream.getTracks().forEach(track => peerConnection.addTrack(track, master.localStream));
    }
    await peerConnection.setRemoteDescription(offer);

    // Create an SDP answer to send back to the client
    printSignalingLog('[MASTER] Creating SDP answer for client', remoteClientId);
    await peerConnection.setLocalDescription(
        await peerConnection.createAnswer({
            offerToReceiveAudio: true,
            offerToReceiveVideo: true,
        }),
    );

    // When trickle ICE is enabled, send the answer now and then send ICE candidates as they are generated. Otherwise wait on the ICE candidates.
    if (formValues.useTrickleICE) {
        printSignalingLog('[MASTER] Sending SDP answer to client', remoteClientId);
        master.signalingClient.sendSdpAnswer(peerConnection.localDescription, remoteClientId);
    }
    printSignalingLog('[MASTER] Generating ICE candidates for client', remoteClientId);
});

master.signalingClient.on('iceCandidate', async (candidate, remoteClientId) => {
    printSignalingLog('[MASTER] Received ICE candidate from client', remoteClientId);

    // Add the ICE candidate received from the client to the peer connection
    const peerConnection = master.peerConnectionByClientId[remoteClientId];
    peerConnection.addIceCandidate(candidate);
});

master.signalingClient.on('close', () => {
    console.log('[MASTER] Disconnected from signaling channel');
});

master.signalingClient.on('error', () => {
    console.error('[MASTER] Signaling client error');
});

console.log('[MASTER] Starting master connection');
master.signalingClient.open();

}

The issue is: When I send dataChannel message from viewer to master it is working and master is able to receive the message but when i tried sending dataChannel message from master it always throws the following error: InvalidStateError: Failed to execute 'send' on 'RTCDataChannel': RTCDataChannel.readyState is not 'open'

I am listening for open, sdpOffer, icecandidate, track events for both master and viewers. So, why I'm getting this error? And when the value of readtState will be 'open'?

0 Answers0