fix: add backend upload proxy endpoint

This commit is contained in:
2026-04-18 14:35:00 +01:00
parent 9b69be299f
commit b03ee8ddb5
2 changed files with 148 additions and 0 deletions

View File

@@ -28,6 +28,10 @@ const downloadUrlSchema = z.object({
objectKey: z.string().trim().min(1), objectKey: z.string().trim().min(1),
}); });
const uploadProxyParamsSchema = z.object({
recordingId: z.string().uuid(),
});
const listSchema = z.object({ const listSchema = z.object({
prefix: z.string().trim().optional(), prefix: z.string().trim().optional(),
limit: z.coerce.number().int().min(1).max(100).default(20), limit: z.coerce.number().int().min(1).max(100).default(20),
@@ -44,6 +48,105 @@ const buildObjectKey = (userId: string, fileName: string, prefix?: string): stri
router.use(requireAuth); router.use(requireAuth);
router.put('/upload/:recordingId', async (req, res) => {
const parsedParams = uploadProxyParamsSchema.safeParse(req.params);
if (!parsedParams.success) {
res.status(400).json({ message: 'Invalid recordingId', errors: parsedParams.error.flatten() });
return;
}
const authSession = req.auth;
if (!authSession?.user) {
res.status(401).json({ message: 'Unauthorized' });
return;
}
const recording = await db.query.recordings.findFirst({
where: and(eq(recordings.id, parsedParams.data.recordingId), eq(recordings.ownerUserId, authSession.user.id)),
});
if (!recording) {
res.status(404).json({ message: 'Recording not found' });
return;
}
if (!recording.bucket || !recording.objectKey) {
res.status(409).json({ message: 'Recording does not have a storage target yet' });
return;
}
if (recording.status !== 'awaiting_upload') {
res.status(409).json({ message: `Recording is not awaiting upload (current status: ${recording.status})` });
return;
}
const contentType = typeof req.headers['content-type'] === 'string' && req.headers['content-type'].trim()
? req.headers['content-type'].trim()
: 'application/octet-stream';
const rawContentLength = Array.isArray(req.headers['content-length'])
? req.headers['content-length'][0]
: req.headers['content-length'];
const parsedSize = rawContentLength ? Number(rawContentLength) : undefined;
if (parsedSize !== undefined && (!Number.isFinite(parsedSize) || parsedSize < 0)) {
res.status(400).json({ message: 'Invalid Content-Length header' });
return;
}
try {
await ensureMinioBucket();
console.info('[recording.proxy-upload] streaming upload via backend', {
ownerUserId: authSession.user.id,
recordingId: recording.id,
deviceId: recording.cameraDeviceId,
bucket: recording.bucket,
objectKey: recording.objectKey,
contentType,
sizeBytes: parsedSize ?? null,
});
const uploadResult = await minioClient.putObject(
recording.bucket,
recording.objectKey,
req,
parsedSize,
{ 'Content-Type': contentType },
);
console.info('[recording.proxy-upload] upload complete', {
ownerUserId: authSession.user.id,
recordingId: recording.id,
bucket: recording.bucket,
objectKey: recording.objectKey,
etag: uploadResult.etag,
versionId: uploadResult.versionId ?? null,
sizeBytes: parsedSize ?? null,
});
res.status(201).json({
message: 'Recording uploaded via backend proxy',
recordingId: recording.id,
bucket: recording.bucket,
objectKey: recording.objectKey,
etag: uploadResult.etag,
versionId: uploadResult.versionId ?? null,
sizeBytes: parsedSize ?? null,
});
} catch (error) {
console.error('[recording.proxy-upload] failed', {
ownerUserId: authSession.user.id,
recordingId: recording.id,
bucket: recording.bucket,
objectKey: recording.objectKey,
error: error instanceof Error ? error.message : String(error),
});
throw error;
}
});
router.post('/upload-url', async (req, res) => { router.post('/upload-url', async (req, res) => {
const parsed = uploadUrlSchema.safeParse(req.body); const parsed = uploadUrlSchema.safeParse(req.body);

View File

@@ -13,6 +13,17 @@ const toBackendUrl = (path) => {
return `${backendUrl}${path.startsWith('/') ? path : `/${path}`}`; return `${backendUrl}${path.startsWith('/') ? path : `/${path}`}`;
}; };
const parseResponseBody = async (response) => {
const contentType = response.headers.get('content-type') || '';
if (contentType.includes('application/json')) {
return response.json().catch(() => ({}));
}
const text = await response.text().catch(() => '');
return text ? { message: text } : {};
};
const request = async (path, options = {}) => { const request = async (path, options = {}) => {
const { deviceToken } = getAppState(); const { deviceToken } = getAppState();
const headers = { 'Content-Type': 'application/json' }; const headers = { 'Content-Type': 'application/json' };
@@ -38,6 +49,36 @@ const request = async (path, options = {}) => {
return data; return data;
}; };
const uploadBinary = async (path, body, options = {}) => {
const { deviceToken } = getAppState();
const headers = {};
if (deviceToken) {
headers.Authorization = `Bearer ${deviceToken}`;
}
if (options.contentType) {
headers['Content-Type'] = options.contentType;
}
const response = await fetch(toBackendUrl(path), {
method: options.method || 'PUT',
body,
credentials: 'include',
headers: {
...headers,
...(options.headers || {})
}
});
const data = await parseResponseBody(response);
if (!response.ok) {
throw new Error(data.message || data.error || response.statusText || 'Request failed');
}
return data;
};
export const getBackendUrl = () => backendUrl; export const getBackendUrl = () => backendUrl;
export const api = { export const api = {
@@ -83,5 +124,9 @@ export const api = {
}, },
pushNotifications: { pushNotifications: {
markRead: (notificationId) => request(`/push-notifications/${notificationId}/read`, { method: 'POST', body: JSON.stringify({}) }) markRead: (notificationId) => request(`/push-notifications/${notificationId}/read`, { method: 'POST', body: JSON.stringify({}) })
},
uploads: {
uploadRecordingBlob: (recordingId, blob, contentType = 'application/octet-stream') =>
uploadBinary(`/videos/upload/${recordingId}`, blob, { method: 'PUT', contentType })
} }
}; };