Enable Discovery debug logging for production troubleshooting

- Add DISCOVERY_LOG_LEVEL=debug
- Add DISCOVERY_SHOW_PROGRESS=true
- Temporary changes for debugging InitializerProcessor fixes on production
This commit is contained in:
2025-08-11 20:13:26 +02:00
parent 59fd3dd3b1
commit 55a330b223
3683 changed files with 2956207 additions and 16948 deletions

View File

@@ -0,0 +1,554 @@
// modules/api-manager/MediaManager.js
import { Logger } from '../../core/logger.js';
/**
* Media APIs Manager - Camera, Microphone, WebRTC, Audio, Recording
*/
export class MediaManager {
constructor(config = {}) {
this.config = config;
this.activeStreams = new Map();
this.activeConnections = new Map();
this.audioContext = null;
// Check API support
this.support = {
mediaDevices: navigator.mediaDevices !== undefined,
webRTC: 'RTCPeerConnection' in window,
webAudio: 'AudioContext' in window || 'webkitAudioContext' in window,
mediaRecorder: 'MediaRecorder' in window,
screenShare: navigator.mediaDevices?.getDisplayMedia !== undefined
};
Logger.info('[MediaManager] Initialized with support:', this.support);
}
/**
* Get user camera stream
*/
async getUserCamera(constraints = {}) {
if (!this.support.mediaDevices) {
throw new Error('MediaDevices API not supported');
}
const defaultConstraints = {
video: {
width: { ideal: 1280 },
height: { ideal: 720 },
facingMode: 'user'
},
audio: false,
...constraints
};
try {
const stream = await navigator.mediaDevices.getUserMedia(defaultConstraints);
const streamId = this.generateId('camera');
this.activeStreams.set(streamId, {
stream,
type: 'camera',
constraints: defaultConstraints,
tracks: stream.getTracks()
});
Logger.info(`[MediaManager] Camera stream acquired: ${streamId}`);
return {
id: streamId,
stream,
video: stream.getVideoTracks()[0],
audio: stream.getAudioTracks()[0],
stop: () => this.stopStream(streamId),
switchCamera: () => this.switchCamera(streamId),
takePhoto: (canvas) => this.takePhoto(stream, canvas),
applyFilter: (filter) => this.applyVideoFilter(streamId, filter)
};
} catch (error) {
Logger.warn('[MediaManager] Camera access failed:', error.message);
throw error;
}
}
/**
* Get user microphone stream
*/
async getUserMicrophone(constraints = {}) {
if (!this.support.mediaDevices) {
throw new Error('MediaDevices API not supported');
}
const defaultConstraints = {
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true,
...constraints.audio
},
video: false,
...constraints
};
try {
const stream = await navigator.mediaDevices.getUserMedia(defaultConstraints);
const streamId = this.generateId('microphone');
this.activeStreams.set(streamId, {
stream,
type: 'microphone',
constraints: defaultConstraints,
tracks: stream.getTracks()
});
Logger.info(`[MediaManager] Microphone stream acquired: ${streamId}`);
return {
id: streamId,
stream,
audio: stream.getAudioTracks()[0],
stop: () => this.stopStream(streamId),
getVolume: () => this.getAudioLevel(stream),
startRecording: (options) => this.startRecording(stream, options)
};
} catch (error) {
Logger.warn('[MediaManager] Microphone access failed:', error.message);
throw error;
}
}
/**
* Get screen share stream
*/
async getScreenShare(constraints = {}) {
if (!this.support.screenShare) {
throw new Error('Screen sharing not supported');
}
const defaultConstraints = {
video: {
cursor: 'always'
},
audio: false,
...constraints
};
try {
const stream = await navigator.mediaDevices.getDisplayMedia(defaultConstraints);
const streamId = this.generateId('screen');
this.activeStreams.set(streamId, {
stream,
type: 'screen',
constraints: defaultConstraints,
tracks: stream.getTracks()
});
// Auto-cleanup when user stops sharing
stream.getTracks().forEach(track => {
track.addEventListener('ended', () => {
this.stopStream(streamId);
});
});
Logger.info(`[MediaManager] Screen share acquired: ${streamId}`);
return {
id: streamId,
stream,
video: stream.getVideoTracks()[0],
audio: stream.getAudioTracks()[0],
stop: () => this.stopStream(streamId)
};
} catch (error) {
Logger.warn('[MediaManager] Screen share failed:', error.message);
throw error;
}
}
/**
* Start recording media stream
*/
async startRecording(stream, options = {}) {
if (!this.support.mediaRecorder) {
throw new Error('MediaRecorder API not supported');
}
const defaultOptions = {
mimeType: 'video/webm;codecs=vp9',
videoBitsPerSecond: 2000000,
audioBitsPerSecond: 128000,
...options
};
// Find supported MIME type
const mimeType = this.getSupportedMimeType([
'video/webm;codecs=vp9',
'video/webm;codecs=vp8',
'video/webm',
'video/mp4'
]) || defaultOptions.mimeType;
const recorder = new MediaRecorder(stream, {
...defaultOptions,
mimeType
});
const recordingId = this.generateId('recording');
const chunks = [];
recorder.ondataavailable = (event) => {
if (event.data.size > 0) {
chunks.push(event.data);
}
};
recorder.onstop = () => {
const blob = new Blob(chunks, { type: mimeType });
this.onRecordingComplete(recordingId, blob);
};
recorder.start();
Logger.info(`[MediaManager] Recording started: ${recordingId}`);
return {
id: recordingId,
recorder,
stop: () => {
recorder.stop();
return new Promise(resolve => {
recorder.onstop = () => {
const blob = new Blob(chunks, { type: mimeType });
resolve({
blob,
url: URL.createObjectURL(blob),
size: blob.size,
type: blob.type,
download: (filename = `recording-${Date.now()}.webm`) => {
this.downloadBlob(blob, filename);
}
});
};
});
},
pause: () => recorder.pause(),
resume: () => recorder.resume(),
get state() { return recorder.state; }
};
}
/**
* Take photo from video stream
*/
takePhoto(stream, canvas) {
const video = document.createElement('video');
video.srcObject = stream;
video.autoplay = true;
video.muted = true;
return new Promise((resolve) => {
video.onloadedmetadata = () => {
if (!canvas) {
canvas = document.createElement('canvas');
}
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
const ctx = canvas.getContext('2d');
ctx.drawImage(video, 0, 0);
canvas.toBlob((blob) => {
resolve({
canvas,
blob,
url: URL.createObjectURL(blob),
dataURL: canvas.toDataURL('image/jpeg', 0.9),
download: (filename = `photo-${Date.now()}.jpg`) => {
this.downloadBlob(blob, filename);
}
});
}, 'image/jpeg', 0.9);
video.remove();
};
});
}
/**
* Web Audio Context setup
*/
getAudioContext() {
if (!this.audioContext) {
if (this.support.webAudio) {
this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
Logger.info('[MediaManager] Audio context created');
} else {
Logger.warn('[MediaManager] Web Audio API not supported');
return null;
}
}
return this.audioContext;
}
/**
* Create audio analyzer for visualizations
*/
createAudioAnalyzer(stream, options = {}) {
const audioContext = this.getAudioContext();
if (!audioContext) return null;
const source = audioContext.createMediaStreamSource(stream);
const analyzer = audioContext.createAnalyser();
analyzer.fftSize = options.fftSize || 256;
analyzer.smoothingTimeConstant = options.smoothing || 0.8;
source.connect(analyzer);
const bufferLength = analyzer.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
return {
analyzer,
bufferLength,
dataArray,
getFrequencyData: () => {
analyzer.getByteFrequencyData(dataArray);
return Array.from(dataArray);
},
getTimeDomainData: () => {
analyzer.getByteTimeDomainData(dataArray);
return Array.from(dataArray);
},
getAverageVolume: () => {
analyzer.getByteFrequencyData(dataArray);
return dataArray.reduce((sum, value) => sum + value, 0) / bufferLength;
}
};
}
/**
* Simple WebRTC peer connection setup
*/
async createPeerConnection(config = {}) {
if (!this.support.webRTC) {
throw new Error('WebRTC not supported');
}
const defaultConfig = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' }
],
...config
};
const pc = new RTCPeerConnection(defaultConfig);
const connectionId = this.generateId('rtc');
this.activeConnections.set(connectionId, pc);
// Enhanced peer connection with event handling
const enhancedPC = {
id: connectionId,
connection: pc,
// Event handlers
onTrack: (callback) => pc.addEventListener('track', callback),
onIceCandidate: (callback) => pc.addEventListener('icecandidate', callback),
onConnectionStateChange: (callback) => pc.addEventListener('connectionstatechange', callback),
// Methods
addStream: (stream) => {
stream.getTracks().forEach(track => {
pc.addTrack(track, stream);
});
},
createOffer: () => pc.createOffer(),
createAnswer: () => pc.createAnswer(),
setLocalDescription: (desc) => pc.setLocalDescription(desc),
setRemoteDescription: (desc) => pc.setRemoteDescription(desc),
addIceCandidate: (candidate) => pc.addIceCandidate(candidate),
close: () => {
pc.close();
this.activeConnections.delete(connectionId);
},
get connectionState() { return pc.connectionState; },
get iceConnectionState() { return pc.iceConnectionState; }
};
Logger.info(`[MediaManager] Peer connection created: ${connectionId}`);
return enhancedPC;
}
/**
* Get available media devices
*/
async getDevices() {
if (!this.support.mediaDevices) {
return { cameras: [], microphones: [], speakers: [] };
}
try {
const devices = await navigator.mediaDevices.enumerateDevices();
return {
cameras: devices.filter(d => d.kind === 'videoinput'),
microphones: devices.filter(d => d.kind === 'audioinput'),
speakers: devices.filter(d => d.kind === 'audiooutput'),
all: devices
};
} catch (error) {
Logger.warn('[MediaManager] Device enumeration failed:', error);
return { cameras: [], microphones: [], speakers: [] };
}
}
/**
* Check device permissions
*/
async checkPermissions() {
const permissions = {};
try {
if (navigator.permissions) {
const camera = await navigator.permissions.query({ name: 'camera' });
const microphone = await navigator.permissions.query({ name: 'microphone' });
permissions.camera = camera.state;
permissions.microphone = microphone.state;
}
} catch (error) {
Logger.warn('[MediaManager] Permission check failed:', error);
}
return permissions;
}
// Helper methods
stopStream(streamId) {
const streamData = this.activeStreams.get(streamId);
if (streamData) {
streamData.tracks.forEach(track => track.stop());
this.activeStreams.delete(streamId);
Logger.info(`[MediaManager] Stream stopped: ${streamId}`);
}
}
stopAllStreams() {
this.activeStreams.forEach((streamData, id) => {
streamData.tracks.forEach(track => track.stop());
});
this.activeStreams.clear();
Logger.info('[MediaManager] All streams stopped');
}
async switchCamera(streamId) {
const streamData = this.activeStreams.get(streamId);
if (!streamData || streamData.type !== 'camera') return null;
const currentFacing = streamData.constraints.video.facingMode;
const newFacing = currentFacing === 'user' ? 'environment' : 'user';
// Stop current stream
this.stopStream(streamId);
// Get new stream with switched camera
return this.getUserCamera({
video: {
...streamData.constraints.video,
facingMode: newFacing
}
});
}
getAudioLevel(stream) {
const audioContext = this.getAudioContext();
if (!audioContext) return 0;
const source = audioContext.createMediaStreamSource(stream);
const analyzer = audioContext.createAnalyser();
source.connect(analyzer);
const dataArray = new Uint8Array(analyzer.frequencyBinCount);
analyzer.getByteFrequencyData(dataArray);
return dataArray.reduce((sum, value) => sum + value, 0) / dataArray.length;
}
getSupportedMimeType(types) {
return types.find(type => MediaRecorder.isTypeSupported(type));
}
downloadBlob(blob, filename) {
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = filename;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
URL.revokeObjectURL(url);
}
generateId(prefix = 'media') {
return `${prefix}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
onRecordingComplete(id, blob) {
Logger.info(`[MediaManager] Recording completed: ${id}, size: ${blob.size} bytes`);
// Custom completion handling can be added here
}
/**
* Apply video filters using CSS filters
*/
applyVideoFilter(streamId, filterName) {
const streamData = this.activeStreams.get(streamId);
if (!streamData || streamData.type !== 'camera') return;
const filters = {
none: 'none',
blur: 'blur(2px)',
brightness: 'brightness(1.2)',
contrast: 'contrast(1.3)',
grayscale: 'grayscale(1)',
sepia: 'sepia(1)',
invert: 'invert(1)',
vintage: 'sepia(0.8) contrast(1.4) brightness(1.1)',
cool: 'hue-rotate(180deg) saturate(1.5)',
warm: 'hue-rotate(25deg) saturate(1.2)'
};
return {
filter: filters[filterName] || filterName,
apply: (videoElement) => {
videoElement.style.filter = filters[filterName] || filterName;
}
};
}
/**
* Get current status of all media operations
*/
getStatus() {
return {
activeStreams: this.activeStreams.size,
activeConnections: this.activeConnections.size,
audioContextState: this.audioContext?.state || 'none',
support: this.support,
streams: Array.from(this.activeStreams.entries()).map(([id, data]) => ({
id,
type: data.type,
tracks: data.tracks.length,
active: data.tracks.some(track => track.readyState === 'live')
}))
};
}
}