diff --git a/index.js b/index.js
index eb1d8cd..482ece9 100644
--- a/index.js
+++ b/index.js
@@ -2,6 +2,9 @@ const dotenv = require('dotenv');
dotenv.config();
const logger = require('./src/config/logger');
+
+logger.info('Starting CDN application 🚀');
+
const {App} = require('@slack/bolt');
const fileUpload = require('./src/fileUpload');
const express = require('express');
@@ -28,7 +31,12 @@ expressApp.use('/api', apiRoutes);
// Error handling middleware
expressApp.use((err, req, res, next) => {
- logger.error('API Error:', err);
+ logger.error('API Error:', {
+ error: err.message,
+ stack: err.stack,
+ path: req.path,
+ method: req.method
+ });
res.status(500).json({ error: 'Internal server error' });
});
@@ -40,40 +48,34 @@ expressApp.use((req, res, next) => {
// Event listener for file_shared events
app.event('file_shared', async ({event, client}) => {
- logger.debug(`Received file_shared event: ${JSON.stringify(event)}`);
-
- if (parseFloat(event.event_ts) < BOT_START_TIME) {
- logger.debug(`Ignoring file event from before bot start: ${new Date(parseFloat(event.event_ts) * 1000).toISOString()}`);
- return;
- }
-
- const targetChannelId = process.env.SLACK_CHANNEL_ID;
- const channelId = event.channel_id;
-
- if (channelId !== targetChannelId) {
- logger.debug(`Ignoring file shared in channel: ${channelId}`);
- return;
- }
+ if (parseFloat(event.event_ts) < BOT_START_TIME) return;
+ if (event.channel_id !== process.env.SLACK_CHANNEL_ID) return;
try {
await fileUpload.handleFileUpload(event, client);
} catch (error) {
- logger.error(`Error processing file upload: ${error.message}`);
+ logger.error(`Upload failed: ${error.message}`);
}
});
-// Slack bot and API server
+// Startup LOGs
(async () => {
try {
await fileUpload.initialize();
await app.start();
const port = parseInt(process.env.PORT || '4553', 10);
expressApp.listen(port, () => {
- logger.info(`⚡️ Slack app is running in Socket Mode!`);
- logger.info(`🚀 API server is running on port ${port}`);
+ logger.info('CDN started successfully 🔥', {
+ slackMode: 'Socket Mode',
+ apiPort: port,
+ startTime: new Date().toISOString()
+ });
});
} catch (error) {
- logger.error('Failed to start:', error);
+ logger.error('Failed to start application:', {
+ error: error.message,
+ stack: error.stack
+ });
process.exit(1);
}
})();
diff --git a/package.json b/package.json
index 8ad828c..9003d87 100644
--- a/package.json
+++ b/package.json
@@ -1,15 +1,15 @@
{
"name": "cdn-v2-hackclub",
"version": "1.0.0",
- "description": "Slack app and API to upload files to Backblaze B2 with unique URLs",
+ "description": "Slack app and API to upload files to S3-compatible storage with unique URLs",
"main": "index.js",
"scripts": {
"start": "node index.js"
},
"dependencies": {
+ "@aws-sdk/client-s3": "^3.478.0",
"@slack/bolt": "^4.2.0",
"@slack/web-api": "^7.8.0",
- "backblaze-b2": "^1.3.0",
"cors": "^2.8.5",
"dotenv": "^10.0.0",
"multer": "^1.4.5-lts.1",
diff --git a/src/api/deploy.js b/src/api/deploy.js
index a2014bc..5c5b4f1 100644
--- a/src/api/deploy.js
+++ b/src/api/deploy.js
@@ -6,6 +6,7 @@ const deployEndpoint = async (files) => {
const deployedFiles = files.map(file => ({
deployedUrl: generateApiUrl('v3', file.file),
cdnUrl: getCdnUrl(),
+ contentType: file.contentType || 'application/octet-stream',
...file
}));
@@ -15,7 +16,7 @@ const deployEndpoint = async (files) => {
cdnBase: getCdnUrl()
};
} catch (error) {
- logger.error('Deploy error:', error);
+ logger.error('S3 deploy error:', error);
return {
status: 500,
files: []
diff --git a/src/api/index.js b/src/api/index.js
index a5e2342..4d2bf25 100644
--- a/src/api/index.js
+++ b/src/api/index.js
@@ -77,8 +77,8 @@ router.post('/upload', async (req, res) => {
const result = await handleUpload(req);
res.status(result.status).json(result.body);
} catch (error) {
- logger.error('Upload handler error:', error);
- res.status(500).json({error: 'Internal server error'});
+ logger.error('S3 upload handler error:', error);
+ res.status(500).json({error: 'Storage upload failed'});
}
});
diff --git a/src/api/upload.js b/src/api/upload.js
index c47bf45..dbdd1e2 100644
--- a/src/api/upload.js
+++ b/src/api/upload.js
@@ -1,6 +1,6 @@
const fetch = require('node-fetch');
const crypto = require('crypto');
-const {uploadToBackblaze} = require('../backblaze');
+const {uploadToStorage} = require('../storage');
const {generateUrl, getCdnUrl} = require('./utils');
const logger = require('../config/logger');
@@ -13,15 +13,19 @@ function sanitizeFileName(fileName) {
return sanitizedFileName;
}
-// Handle remote file upload to B2 storage
+// Handle remote file upload to S3 storage
const uploadEndpoint = async (url, authorization = null) => {
try {
- logger.debug(`Downloading: ${url}`);
+ logger.debug('Starting download', { url });
const response = await fetch(url, {
headers: authorization ? {'Authorization': authorization} : {}
});
- if (!response.ok) throw new Error(`Download failed: ${response.statusText}`);
+ if (!response.ok) {
+ const error = new Error(`Download failed: ${response.statusText}`);
+ error.statusCode = response.status;
+ throw error;
+ }
// Generate unique filename using SHA1 (hash) of file contents
const buffer = await response.buffer();
@@ -30,27 +34,60 @@ const uploadEndpoint = async (url, authorization = null) => {
const sanitizedFileName = sanitizeFileName(originalName);
const fileName = `${sha}_${sanitizedFileName}`;
- // Upload to B2 storage
+ // Upload to S3 storage
logger.debug(`Uploading: ${fileName}`);
- const uploaded = await uploadToBackblaze('s/v3', fileName, buffer);
- if (!uploaded) throw new Error('Storage upload failed');
+ const uploadResult = await uploadToStorage('s/v3', fileName, buffer, response.headers.get('content-type'));
+ if (uploadResult.success === false) {
+ throw new Error(`Storage upload failed: ${uploadResult.error}`);
+ }
return {
url: generateUrl('s/v3', fileName),
sha,
- size: buffer.length
+ size: buffer.length,
+ type: response.headers.get('content-type')
};
} catch (error) {
- logger.error('Upload failed:', error);
- throw error;
+ logger.error('Upload process failed', {
+ url,
+ error: error.message,
+ statusCode: error.statusCode,
+ stack: error.stack
+ });
+
+ // Format error (pain)
+ const statusCode = error.statusCode || 500;
+ const errorResponse = {
+ error: {
+ message: error.message,
+ code: error.code || 'INTERNAL_ERROR',
+ details: error.details || null
+ },
+ success: false
+ };
+
+ throw { statusCode, ...errorResponse };
}
};
// Express request handler for file uploads
const handleUpload = async (req) => {
- const url = req.body || await req.text();
- const result = await uploadEndpoint(url, req.headers?.authorization);
- return {status: 200, body: result};
+ try {
+ const url = req.body || await req.text();
+ const result = await uploadEndpoint(url, req.headers?.authorization);
+ return { status: 200, body: result };
+ } catch (error) {
+ return {
+ status: error.statusCode || 500,
+ body: {
+ error: error.error || {
+ message: 'Internal server error',
+ code: 'INTERNAL_ERROR'
+ },
+ success: false
+ }
+ };
+ }
};
module.exports = {uploadEndpoint, handleUpload};
diff --git a/src/api/utils.js b/src/api/utils.js
index 53b3d2f..4ce7c20 100644
--- a/src/api/utils.js
+++ b/src/api/utils.js
@@ -1,6 +1,6 @@
const logger = require('../config/logger');
-const getCdnUrl = () => process.env.B2_CDN_URL;
+const getCdnUrl = () => process.env.AWS_CDN_URL;
const generateUrl = (version, fileName) => {
return `${getCdnUrl()}/${version}/${fileName}`;
diff --git a/src/config/logger.js b/src/config/logger.js
index 6a543f2..6b9ca0a 100644
--- a/src/config/logger.js
+++ b/src/config/logger.js
@@ -1,23 +1,19 @@
const winston = require('winston');
-const consoleFormat = winston.format.combine(
- winston.format.colorize(),
- winston.format.timestamp(),
- winston.format.printf(({level, message, timestamp}) => {
- return `${timestamp} ${level}: ${message}`;
- })
-);
-
const logger = winston.createLogger({
- level: process.env.LOG_LEVEL || 'info',
- format: consoleFormat,
- transports: [
- new winston.transports.Console()
- ]
-});
-
-logger.on('error', error => {
- console.error('Logger error:', error);
+ level: 'info',
+ format: winston.format.combine(
+ winston.format.timestamp(),
+ winston.format.colorize(),
+ winston.format.printf(({ level, message, timestamp, ...meta }) => {
+ let output = `${timestamp} ${level}: ${message}`;
+ if (Object.keys(meta).length > 0) {
+ output += ` ${JSON.stringify(meta)}`;
+ }
+ return output;
+ })
+ ),
+ transports: [new winston.transports.Console()]
});
module.exports = logger;
\ No newline at end of file
diff --git a/src/fileUpload.js b/src/fileUpload.js
index 407f094..458f894 100644
--- a/src/fileUpload.js
+++ b/src/fileUpload.js
@@ -1,16 +1,13 @@
const fetch = require('node-fetch');
-const path = require('path');
const crypto = require('crypto');
const logger = require('./config/logger');
-const {uploadToBackblaze} = require('./backblaze');
+const storage = require('./storage');
const {generateFileUrl} = require('./utils');
const MAX_FILE_SIZE = 2 * 1024 * 1024 * 1024; // 2GB in bytes
const CONCURRENT_UPLOADS = 3; // Max concurrent uploads (messages)
-// processed messages
const processedMessages = new Map();
-
let uploadLimit;
async function initialize() {
@@ -18,16 +15,12 @@ async function initialize() {
uploadLimit = pLimit(CONCURRENT_UPLOADS);
}
-// Check if the message is older than 24 hours for when the bot was offline
+// Basic stuff
function isMessageTooOld(eventTs) {
const eventTime = parseFloat(eventTs) * 1000;
- const currentTime = Date.now();
- const timeDifference = currentTime - eventTime;
- const maxAge = 24 * 60 * 60 * 1000; // 24 hours in milliseconds
- return timeDifference > maxAge;
+ return (Date.now() - eventTime) > 24 * 60 * 60 * 1000;
}
-// check if the message has already been processed
function isMessageProcessed(messageTs) {
return processedMessages.has(messageTs);
}
@@ -36,7 +29,63 @@ function markMessageAsProcessing(messageTs) {
processedMessages.set(messageTs, true);
}
-// Processing reaction
+// File processing
+function sanitizeFileName(fileName) {
+ let sanitized = fileName.replace(/[^a-zA-Z0-9.-]/g, '_');
+ return sanitized || `upload_${Date.now()}`;
+}
+
+function generateUniqueFileName(fileName) {
+ return `${Date.now()}-${crypto.randomBytes(16).toString('hex')}-${sanitizeFileName(fileName)}`;
+}
+
+// upload functionality
+async function processFiles(fileMessage, client) {
+ const uploadedFiles = [];
+ const failedFiles = [];
+
+ logger.info(`Processing ${fileMessage.files?.length || 0} files`);
+
+ for (const file of fileMessage.files || []) {
+ try {
+ if (file.size > MAX_FILE_SIZE) {
+ failedFiles.push(file.name);
+ continue;
+ }
+
+ const response = await fetch(file.url_private, {
+ headers: {Authorization: `Bearer ${process.env.SLACK_BOT_TOKEN}`}
+ });
+
+ if (!response.ok) throw new Error('Download failed');
+
+ const buffer = await response.buffer();
+ const uniqueFileName = generateUniqueFileName(file.name);
+ const userDir = `s/${fileMessage.user}`;
+
+ const success = await uploadLimit(() =>
+ storage.uploadToStorage(userDir, uniqueFileName, buffer, file.mimetype)
+ );
+
+ if (!success) throw new Error('Upload failed');
+
+ uploadedFiles.push({
+ name: uniqueFileName,
+ url: generateFileUrl(userDir, uniqueFileName),
+ contentType: file.mimetype
+ });
+
+ } catch (error) {
+ logger.error(`Failed: ${file.name} - ${error.message}`);
+ failedFiles.push(file.name);
+ }
+ }
+
+ logger.info(`Completed: ${uploadedFiles.length} ok, ${failedFiles.length} failed`);
+ return {uploadedFiles, failedFiles};
+}
+
+// Slack interaction
async function addProcessingReaction(client, event, fileMessage) {
try {
await client.reactions.add({
@@ -45,63 +94,10 @@ async function addProcessingReaction(client, event, fileMessage) {
channel: event.channel_id
});
} catch (error) {
- logger.error('Failed to add processing reaction:', error.message);
+ logger.error('Failed to add reaction:', error.message);
}
}
-// sanitize file names and ensure it's not empty (I don't even know if that's possible but let's be safe)
-function sanitizeFileName(fileName) {
- let sanitizedFileName = fileName.replace(/[^a-zA-Z0-9.-]/g, '_');
- if (!sanitizedFileName) {
- sanitizedFileName = 'upload_' + Date.now();
- }
- return sanitizedFileName;
-}
-
-// Generate a unique, non-guessable file name
-function generateUniqueFileName(fileName) {
- const sanitizedFileName = sanitizeFileName(fileName);
- const uniqueFileName = `${Date.now()}-${crypto.randomBytes(16).toString('hex')}-${sanitizedFileName}`;
- return uniqueFileName;
-}
-
-// upload files to the /s/ directory
-async function processFiles(fileMessage, client) {
- const uploadedFiles = [];
- const failedFiles = [];
-
- const files = fileMessage.files || [];
- for (const file of files) {
- if (file.size > MAX_FILE_SIZE) {
- failedFiles.push(file.name);
- continue;
- }
-
- try {
- const buffer = await fetch(file.url_private, {
- headers: {Authorization: `Bearer ${process.env.SLACK_BOT_TOKEN}`}
- }).then(res => res.buffer());
-
- const uniqueFileName = generateUniqueFileName(file.name);
- const userDir = `s/${fileMessage.user}`;
-
- const success = await uploadLimit(() => uploadToBackblaze(userDir, uniqueFileName, buffer));
- if (success) {
- const url = generateFileUrl(userDir, uniqueFileName);
- uploadedFiles.push({name: uniqueFileName, url});
- } else {
- failedFiles.push(file.name);
- }
- } catch (error) {
- logger.error(`Failed to process file ${file.name}:`, error.message);
- failedFiles.push(file.name);
- }
- }
-
- return {uploadedFiles, failedFiles};
-}
-
-// update reactions based on success
async function updateReactions(client, event, fileMessage, success) {
try {
await client.reactions.remove({
@@ -119,7 +115,6 @@ async function updateReactions(client, event, fileMessage, success) {
}
}
-// find a file message
async function findFileMessage(event, client) {
try {
const fileInfo = await client.files.info({
@@ -217,11 +212,10 @@ async function handleFileUpload(event, client) {
const {uploadedFiles, failedFiles} = await processFiles(fileMessage, client);
await sendResultsMessage(client, event.channel_id, fileMessage, uploadedFiles, failedFiles);
-
await updateReactions(client, event, fileMessage, failedFiles.length === 0);
} catch (error) {
- logger.error('Upload failed:', error.message);
+ logger.error(`Upload failed: ${error.message}`);
await handleError(client, event.channel_id, fileMessage, reactionAdded);
throw error;
}
diff --git a/src/storage.js b/src/storage.js
new file mode 100644
index 0000000..dbb755f
--- /dev/null
+++ b/src/storage.js
@@ -0,0 +1,310 @@
+const { S3Client, PutObjectCommand } = require('@aws-sdk/client-s3');
+const path = require('path');
+const crypto = require('crypto');
+const logger = require('./config/logger');
+const {generateFileUrl} = require('./utils');
+
+const MAX_FILE_SIZE = 2 * 1024 * 1024 * 1024; // 2GB in bytes
+const CONCURRENT_UPLOADS = 3; // Max concurrent uploads (messages)
+
+// processed messages
+const processedMessages = new Map();
+
+let uploadLimit;
+
+async function initialize() {
+ const pLimit = (await import('p-limit')).default;
+ uploadLimit = pLimit(CONCURRENT_UPLOADS);
+}
+
+// Check if the message is older than 24 hours for when the bot was offline
+function isMessageTooOld(eventTs) {
+ const eventTime = parseFloat(eventTs) * 1000;
+ const currentTime = Date.now();
+ const timeDifference = currentTime - eventTime;
+ const maxAge = 24 * 60 * 60 * 1000; // 24 hours in milliseconds
+ return timeDifference > maxAge;
+}
+
+// check if the message has already been processed
+function isMessageProcessed(messageTs) {
+ return processedMessages.has(messageTs);
+}
+
+function markMessageAsProcessing(messageTs) {
+ processedMessages.set(messageTs, true);
+}
+
+// Processing reaction
+async function addProcessingReaction(client, event, fileMessage) {
+ try {
+ await client.reactions.add({
+ name: 'beachball',
+ timestamp: fileMessage.ts,
+ channel: event.channel_id
+ });
+ } catch (error) {
+ logger.error('Failed to add processing reaction:', error.message);
+ }
+}
+
+// sanitize file names and ensure it's not empty (I don't even know if that's possible but let's be safe)
+function sanitizeFileName(fileName) {
+ let sanitizedFileName = fileName.replace(/[^a-zA-Z0-9.-]/g, '_');
+ if (!sanitizedFileName) {
+ sanitizedFileName = 'upload_' + Date.now();
+ }
+ return sanitizedFileName;
+}
+
+// Generate a unique, non-guessable file name
+function generateUniqueFileName(fileName) {
+ const sanitizedFileName = sanitizeFileName(fileName);
+ const uniqueFileName = `${Date.now()}-${crypto.randomBytes(16).toString('hex')}-${sanitizedFileName}`;
+ return uniqueFileName;
+}
+
+// upload files to the /s/ directory
+async function processFiles(fileMessage, client) {
+ const uploadedFiles = [];
+ const failedFiles = [];
+
+ logger.debug('Starting file processing', {
+ userId: fileMessage.user,
+ fileCount: fileMessage.files?.length || 0
+ });
+
+ const files = fileMessage.files || [];
+ for (const file of files) {
+ logger.debug('Processing file', {
+ name: file.name,
+ size: file.size,
+ type: file.mimetype,
+ id: file.id
+ });
+
+ if (file.size > MAX_FILE_SIZE) {
+ logger.warn('File exceeds size limit', {
+ name: file.name,
+ size: file.size,
+ limit: MAX_FILE_SIZE
+ });
+ failedFiles.push(file.name);
+ continue;
+ }
+
+ try {
+ logger.debug('Fetching file from Slack', {
+ name: file.name,
+ url: file.url_private
+ });
+
+ const response = await fetch(file.url_private, {
+ headers: {Authorization: `Bearer ${process.env.SLACK_BOT_TOKEN}`}
+ });
+
+ if (!response.ok) {
+ throw new Error(`Slack download failed: ${response.status} ${response.statusText}`);
+ }
+
+ const buffer = await response.buffer();
+ const contentType = file.mimetype || 'application/octet-stream';
+ const uniqueFileName = generateUniqueFileName(file.name);
+ const userDir = `s/${fileMessage.user}`;
+
+ const uploadResult = await uploadLimit(() =>
+ uploadToStorage(userDir, uniqueFileName, buffer, contentType)
+ );
+
+ if (uploadResult.success === false) {
+ throw new Error(uploadResult.error);
+ }
+
+ const url = generateFileUrl(userDir, uniqueFileName);
+ uploadedFiles.push({
+ name: uniqueFileName,
+ url,
+ contentType
+ });
+ } catch (error) {
+ logger.error('File processing failed', {
+ fileName: file.name,
+ error: error.message,
+ stack: error.stack,
+ slackFileId: file.id,
+ userId: fileMessage.user
+ });
+ failedFiles.push(file.name);
+ }
+ }
+
+ logger.debug('File processing complete', {
+ successful: uploadedFiles.length,
+ failed: failedFiles.length
+ });
+
+ return {uploadedFiles, failedFiles};
+}
+
+// update reactions based on success
+async function updateReactions(client, event, fileMessage, success) {
+ try {
+ await client.reactions.remove({
+ name: 'beachball',
+ timestamp: fileMessage.ts,
+ channel: event.channel_id
+ });
+ await client.reactions.add({
+ name: success ? 'white_check_mark' : 'x',
+ timestamp: fileMessage.ts,
+ channel: event.channel_id
+ });
+ } catch (error) {
+ logger.error('Failed to update reactions:', error.message);
+ }
+}
+
+// find a file message
+async function findFileMessage(event, client) {
+ try {
+ const fileInfo = await client.files.info({
+ file: event.file_id,
+ include_shares: true
+ });
+
+ if (!fileInfo.ok || !fileInfo.file) {
+ throw new Error('Could not get file info');
+ }
+
+ const channelShare = fileInfo.file.shares?.public?.[event.channel_id] ||
+ fileInfo.file.shares?.private?.[event.channel_id];
+
+ if (!channelShare || !channelShare.length) {
+ throw new Error('No share info found for this channel');
+ }
+
+ // Get the exact message using the ts from share info
+ const messageTs = channelShare[0].ts;
+
+ const messageInfo = await client.conversations.history({
+ channel: event.channel_id,
+ latest: messageTs,
+ limit: 1,
+ inclusive: true
+ });
+
+ if (!messageInfo.ok || !messageInfo.messages.length) {
+ throw new Error('Could not find original message');
+ }
+
+ return messageInfo.messages[0];
+ } catch (error) {
+ logger.error('Error finding file message:', error);
+ return null;
+ }
+}
+
+async function sendResultsMessage(client, channelId, fileMessage, uploadedFiles, failedFiles) {
+ let message = `Hey <@${fileMessage.user}>, `;
+ if (uploadedFiles.length > 0) {
+ message += `here ${uploadedFiles.length === 1 ? 'is your link' : 'are your links'}:\n`;
+ message += uploadedFiles.map(f => `• ${f.name}: ${f.url}`).join('\n');
+ }
+ if (failedFiles.length > 0) {
+ message += `\n\nFailed to process: ${failedFiles.join(', ')}`;
+ }
+
+ await client.chat.postMessage({
+ channel: channelId,
+ thread_ts: fileMessage.ts,
+ text: message
+ });
+}
+
+async function handleError(client, channelId, fileMessage, reactionAdded) {
+ if (fileMessage && reactionAdded) {
+ try {
+ await client.reactions.remove({
+ name: 'beachball',
+ timestamp: fileMessage.ts,
+ channel: channelId
+ });
+ } catch (cleanupError) {
+ if (cleanupError.data.error !== 'no_reaction') {
+ logger.error('Cleanup error:', cleanupError);
+ }
+ }
+ try {
+ await client.reactions.add({
+ name: 'x',
+ timestamp: fileMessage.ts,
+ channel: channelId
+ });
+ } catch (cleanupError) {
+ logger.error('Cleanup error:', cleanupError);
+ }
+ }
+}
+
+async function handleFileUpload(event, client) {
+ let fileMessage = null;
+ let reactionAdded = false;
+
+ try {
+ if (isMessageTooOld(event.event_ts)) return;
+
+ fileMessage = await findFileMessage(event, client);
+ if (!fileMessage || isMessageProcessed(fileMessage.ts)) return;
+
+ markMessageAsProcessing(fileMessage.ts);
+ await addProcessingReaction(client, event, fileMessage);
+ reactionAdded = true;
+
+ const {uploadedFiles, failedFiles} = await processFiles(fileMessage, client);
+ await sendResultsMessage(client, event.channel_id, fileMessage, uploadedFiles, failedFiles);
+
+ await updateReactions(client, event, fileMessage, failedFiles.length === 0);
+
+ } catch (error) {
+ logger.error('Upload failed:', error.message);
+ await handleError(client, event.channel_id, fileMessage, reactionAdded);
+ throw error;
+ }
+}
+
+const s3Client = new S3Client({
+ region: process.env.AWS_REGION,
+ endpoint: process.env.AWS_ENDPOINT,
+ credentials: {
+ accessKeyId: process.env.AWS_ACCESS_KEY_ID,
+ secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY
+ }
+});
+
+async function uploadToStorage(userDir, uniqueFileName, buffer, contentType = 'application/octet-stream') {
+ try {
+ const params = {
+ Bucket: process.env.AWS_BUCKET_NAME,
+ Key: `${userDir}/${uniqueFileName}`,
+ Body: buffer,
+ ContentType: contentType,
+ CacheControl: 'public, immutable, max-age=31536000'
+ };
+
+ logger.info(`Uploading: ${uniqueFileName}`);
+ await s3Client.send(new PutObjectCommand(params));
+ return true;
+ } catch (error) {
+ logger.error(`Upload failed: ${error.message}`, {
+ path: `${userDir}/${uniqueFileName}`,
+ error: error.message
+ });
+ return false;
+ }
+}
+
+module.exports = {
+ handleFileUpload,
+ initialize,
+ uploadToStorage
+};
diff --git a/src/upload.js b/src/upload.js
index b390b9f..167d897 100644
--- a/src/upload.js
+++ b/src/upload.js
@@ -1,6 +1,6 @@
const fs = require('fs');
const path = require('path');
-const {uploadToBackblaze} = require('../backblaze');
+const {uploadToStorage} = require('../storage');
const {generateUrl} = require('./utils');
const logger = require('../config/logger');
@@ -9,16 +9,19 @@ const handleUpload = async (file) => {
try {
const buffer = fs.readFileSync(file.path);
const fileName = path.basename(file.originalname);
+ // Add content type detection for S3
+ const contentType = file.mimetype || 'application/octet-stream';
const uniqueFileName = `${Date.now()}-${fileName}`;
- // Upload to B2 storage
+ // Upload to S3 storage with content type
logger.debug(`Uploading: ${uniqueFileName}`);
- const uploaded = await uploadToBackblaze('s/v3', uniqueFileName, buffer);
+ const uploaded = await uploadToStorage('s/v3', uniqueFileName, buffer, contentType);
if (!uploaded) throw new Error('Storage upload failed');
return {
name: fileName,
- url: generateUrl('s/v3', uniqueFileName)
+ url: generateUrl('s/v3', uniqueFileName),
+ contentType
};
} catch (error) {
logger.error('Upload failed:', error);
diff --git a/src/utils.js b/src/utils.js
index 6a0003f..b3fabf8 100644
--- a/src/utils.js
+++ b/src/utils.js
@@ -1,7 +1,7 @@
// Make the CDN URL
function generateFileUrl(userDir, uniqueFileName) {
- const cdnUrl = process.env.B2_CDN_URL;
+ const cdnUrl = process.env.AWS_CDN_URL;
return `${cdnUrl}/${userDir}/${uniqueFileName}`;
}
From 0655a0d8a7bca9928feb0800210d28fc4b4c7a22 Mon Sep 17 00:00:00 2001
From: Deployor <129990841+deployor@users.noreply.github.com>
Date: Thu, 20 Feb 2025 01:12:28 +0100
Subject: [PATCH 09/16] Added the Fun lines and error images! Also added
partitial error support!
---
.gitignore | 3 +-
src/config/messages.js | 137 +++++++++++++++++++++++++++++++++++++++++
src/fileUpload.js | 126 +++++++++++++++++++++++++++++--------
src/storage.js | 2 +-
src/upload.js | 4 +-
5 files changed, 244 insertions(+), 28 deletions(-)
create mode 100644 src/config/messages.js
diff --git a/.gitignore b/.gitignore
index 5cc472e..a3b3a54 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,4 +3,5 @@
/.idea/
/.env
/bun.lockb
-/package-lock.json
\ No newline at end of file
+/package-lock.json
+/.history
\ No newline at end of file
diff --git a/src/config/messages.js b/src/config/messages.js
new file mode 100644
index 0000000..cb579e3
--- /dev/null
+++ b/src/config/messages.js
@@ -0,0 +1,137 @@
+const messages = {
+ success: {
+ singleFile: "Hey <@{userId}>, here's your link:",
+ multipleFiles: "Hey <@{userId}>, here are your links:",
+ alternateSuccess: [
+ "thanks!",
+ "thanks, i'm gonna sell these to adfly!",
+ "tysm!",
+ "file away!"
+ ]
+ },
+ fileTypes: {
+ gif: [
+ "_gif_ that file to me and i'll upload it",
+ "_gif_ me all all your files!"
+ ],
+ heic: [
+ "What the heic???"
+ ],
+ mov: [
+ "I'll _mov_ that to a permanent link for you"
+ ],
+ html: [
+ "Oh, launching a new website?",
+ "uwu, what's this site?",
+ "WooOOAAah hey! Are you serving a site?",
+ "h-t-m-ello :wave:"
+ ],
+ rar: [
+ ".rawr xD",
+ "i also go \"rar\" sometimes!"
+ ]
+ },
+ errors: {
+ tooBig: {
+ messages: [
+ "File too big!",
+ "That's a chonky file!",
+ "_orpheus struggles to lift the massive file_",
+ "Sorry, that file's too thicc for me to handle!"
+ ],
+ images: [
+ "https://cloud-3tq9t10za-hack-club-bot.vercel.app/2too_big_4.png",
+ "https://cloud-3tq9t10za-hack-club-bot.vercel.app/3too_big_2.png",
+ "https://cloud-3tq9t10za-hack-club-bot.vercel.app/4too_big_1.png",
+ "https://cloud-3tq9t10za-hack-club-bot.vercel.app/6too_big_5.png",
+ "https://cloud-3tq9t10za-hack-club-bot.vercel.app/7too_big_3.png"
+ ]
+ },
+ generic: {
+ messages: [
+ "_orpheus sneezes and drops the files on the ground before blowing her nose on a blank jpeg._",
+ "_orpheus trips and your files slip out of her hands and into an inconveniently placed sewer grate._",
+ "_orpheus accidentally slips the files into a folder in her briefcase labeled \"homework\". she starts sweating profusely._"
+ ],
+ images: [
+ "https://cloud-3tq9t10za-hack-club-bot.vercel.app/0generic_3.png",
+ "https://cloud-3tq9t10za-hack-club-bot.vercel.app/1generic_2.png",
+ "https://cloud-3tq9t10za-hack-club-bot.vercel.app/5generic_1.png"
+ ]
+ }
+ }
+};
+
+function getRandomItem(array) {
+ return array[Math.floor(Math.random() * array.length)];
+}
+
+function getFileTypeMessage(fileExtension) {
+ const ext = fileExtension.toLowerCase();
+ return messages.fileTypes[ext] ? getRandomItem(messages.fileTypes[ext]) : null;
+}
+
+function formatErrorMessage(failedFiles, isSizeError = false) {
+ const errorType = isSizeError ? messages.errors.tooBig : messages.errors.generic;
+ const errorMessage = getRandomItem(errorType.messages);
+ const errorImage = getRandomItem(errorType.images);
+
+ return [
+ errorMessage,
+ `Failed files: ${failedFiles.join(', ')}`,
+ '',
+ `<${errorImage}|image>`
+ ].join('\n');
+}
+
+function formatSuccessMessage(userId, files, failedFiles = [], sizeFailedFiles = []) {
+ const messageLines = [];
+
+ const baseMessage = files.length === 1 ?
+ messages.success.singleFile :
+ messages.success.multipleFiles;
+ messageLines.push(baseMessage.replace('{userId}', userId), '');
+
+ const fileGroups = new Map();
+ files.forEach(file => {
+ const ext = file.originalName.split('.').pop();
+ const typeMessage = getFileTypeMessage(ext);
+ const key = typeMessage || 'noType';
+
+ if (!fileGroups.has(key)) {
+ fileGroups.set(key, []);
+ }
+ fileGroups.get(key).push(file);
+ });
+
+ fileGroups.forEach((groupFiles, typeMessage) => {
+ if (typeMessage !== 'noType') {
+ messageLines.push('', typeMessage);
+ }
+
+ groupFiles.forEach(file => {
+ messageLines.push(`• ${file.originalName}: ${file.url}`);
+ });
+ });
+
+ if (sizeFailedFiles.length > 0) {
+ messageLines.push(formatErrorMessage(sizeFailedFiles, true));
+ }
+ if (failedFiles.length > 0) {
+ messageLines.push(formatErrorMessage(failedFiles, false));
+ }
+
+ if (files.length > 0) {
+ messageLines.push('', `_${getRandomItem(messages.success.alternateSuccess)}_`);
+ }
+
+ return messageLines.join('\n');
+}
+
+module.exports = {
+ messages,
+ getFileTypeMessage,
+ formatSuccessMessage,
+ formatErrorMessage,
+ getRandomItem
+};
diff --git a/src/fileUpload.js b/src/fileUpload.js
index 458f894..3a3e616 100644
--- a/src/fileUpload.js
+++ b/src/fileUpload.js
@@ -3,8 +3,15 @@ const crypto = require('crypto');
const logger = require('./config/logger');
const storage = require('./storage');
const {generateFileUrl} = require('./utils');
+const path = require('path');
+const {
+ messages,
+ formatSuccessMessage,
+ formatErrorMessage,
+ getFileTypeMessage
+} = require('./config/messages');
-const MAX_FILE_SIZE = 2 * 1024 * 1024 * 1024; // 2GB in bytes
+const MAX_FILE_SIZE = 50 * 1024 * 1024; // 50MB in bytes
const CONCURRENT_UPLOADS = 3; // Max concurrent uploads (messages)
const processedMessages = new Map();
@@ -43,16 +50,23 @@ function generateUniqueFileName(fileName) {
async function processFiles(fileMessage, client) {
const uploadedFiles = [];
const failedFiles = [];
+ const sizeFailedFiles = [];
+ const fileTypeResponses = new Set();
logger.info(`Processing ${fileMessage.files?.length || 0} files`);
for (const file of fileMessage.files || []) {
try {
if (file.size > MAX_FILE_SIZE) {
- failedFiles.push(file.name);
+ sizeFailedFiles.push(file.name);
continue;
}
+ // Get file extension message if applicable
+ const ext = path.extname(file.name).slice(1);
+ const typeMessage = getFileTypeMessage(ext);
+ if (typeMessage) fileTypeResponses.add(typeMessage);
+
const response = await fetch(file.url_private, {
headers: {Authorization: `Bearer ${process.env.SLACK_BOT_TOKEN}`}
});
@@ -71,6 +85,7 @@ async function processFiles(fileMessage, client) {
uploadedFiles.push({
name: uniqueFileName,
+ originalName: file.name,
url: generateFileUrl(userDir, uniqueFileName),
contentType: file.mimetype
});
@@ -81,8 +96,12 @@ async function processFiles(fileMessage, client) {
}
}
- logger.info(`Completed: ${uploadedFiles.length} ok, ${failedFiles.length} failed`);
- return {uploadedFiles, failedFiles};
+ return {
+ uploadedFiles,
+ failedFiles,
+ sizeFailedFiles,
+ isSizeError: sizeFailedFiles.length > 0
+ };
}
// Slack interaction
@@ -98,15 +117,26 @@ async function addProcessingReaction(client, event, fileMessage) {
}
}
-async function updateReactions(client, event, fileMessage, success) {
+async function updateReactions(client, event, fileMessage, totalFiles, failedCount) {
try {
await client.reactions.remove({
name: 'beachball',
timestamp: fileMessage.ts,
channel: event.channel_id
});
+
+ // Choose reaction based on how many files failed or well succeded
+ let reactionName;
+ if (failedCount === totalFiles) {
+ reactionName = 'x'; // All files failed
+ } else if (failedCount > 0) {
+ reactionName = 'warning'; // Some files failed
+ } else {
+ reactionName = 'white_check_mark'; // All files succeeded
+ }
+
await client.reactions.add({
- name: success ? 'white_check_mark' : 'x',
+ name: reactionName,
timestamp: fileMessage.ts,
channel: event.channel_id
});
@@ -133,7 +163,7 @@ async function findFileMessage(event, client) {
throw new Error('No share info found for this channel');
}
- // Get the exact message using the ts from share info
+ // Get the EXACT message using the ts from share info (channelShare)
const messageTs = channelShare[0].ts;
const messageInfo = await client.conversations.history({
@@ -154,21 +184,51 @@ async function findFileMessage(event, client) {
}
}
-async function sendResultsMessage(client, channelId, fileMessage, uploadedFiles, failedFiles) {
- let message = `Hey <@${fileMessage.user}>, `;
- if (uploadedFiles.length > 0) {
- message += `here ${uploadedFiles.length === 1 ? 'is your link' : 'are your links'}:\n`;
- message += uploadedFiles.map(f => `• ${f.name}: ${f.url}`).join('\n');
- }
- if (failedFiles.length > 0) {
- message += `\n\nFailed to process: ${failedFiles.join(', ')}`;
- }
+async function sendResultsMessage(client, channelId, fileMessage, uploadedFiles, failedFiles, sizeFailedFiles) {
+ try {
+ let message;
+ if (uploadedFiles.length === 0 && (failedFiles.length > 0 || sizeFailedFiles.length > 0)) {
+ // All files failed - use appropriate error type
+ message = formatErrorMessage(
+ [...failedFiles, ...sizeFailedFiles],
+ sizeFailedFiles.length > 0 && failedFiles.length === 0 // Only use size error if all failures are size-related (i hope this is how it makes most sense)
+ );
+ } else {
+ // Mixed success/failure or all success
+ message = formatSuccessMessage(
+ fileMessage.user,
+ uploadedFiles,
+ failedFiles,
+ sizeFailedFiles
+ );
+ }
- await client.chat.postMessage({
- channel: channelId,
- thread_ts: fileMessage.ts,
- text: message
- });
+ const lines = message.split('\n');
+ const attachments = [];
+ let textBuffer = '';
+
+ for (const line of lines) {
+ if (line.match(/^<.*\|image>$/)) {
+ const imageUrl = line.replace(/^<|>$/g, '').replace('|image', '');
+ attachments.push({
+ image_url: imageUrl,
+ fallback: 'Error image'
+ });
+ } else {
+ textBuffer += line + '\n';
+ }
+ }
+
+ await client.chat.postMessage({
+ channel: channelId,
+ thread_ts: fileMessage.ts,
+ text: textBuffer.trim(),
+ attachments: attachments.length > 0 ? attachments : undefined
+ });
+ } catch (error) {
+ logger.error('Failed to send results message:', error);
+ throw error;
+ }
}
async function handleError(client, channelId, fileMessage, reactionAdded) {
@@ -210,9 +270,27 @@ async function handleFileUpload(event, client) {
await addProcessingReaction(client, event, fileMessage);
reactionAdded = true;
- const {uploadedFiles, failedFiles} = await processFiles(fileMessage, client);
- await sendResultsMessage(client, event.channel_id, fileMessage, uploadedFiles, failedFiles);
- await updateReactions(client, event, fileMessage, failedFiles.length === 0);
+ const {uploadedFiles, failedFiles, sizeFailedFiles} = await processFiles(fileMessage, client);
+
+ const totalFiles = uploadedFiles.length + failedFiles.length + sizeFailedFiles.length;
+ const failedCount = failedFiles.length + sizeFailedFiles.length;
+
+ await sendResultsMessage(
+ client,
+ event.channel_id,
+ fileMessage,
+ uploadedFiles,
+ failedFiles,
+ sizeFailedFiles
+ );
+
+ await updateReactions(
+ client,
+ event,
+ fileMessage,
+ totalFiles,
+ failedCount
+ );
} catch (error) {
logger.error(`Upload failed: ${error.message}`);
diff --git a/src/storage.js b/src/storage.js
index dbb755f..0968212 100644
--- a/src/storage.js
+++ b/src/storage.js
@@ -57,7 +57,7 @@ function sanitizeFileName(fileName) {
return sanitizedFileName;
}
-// Generate a unique, non-guessable file name
+// Generate a unique file name
function generateUniqueFileName(fileName) {
const sanitizedFileName = sanitizeFileName(fileName);
const uniqueFileName = `${Date.now()}-${crypto.randomBytes(16).toString('hex')}-${sanitizedFileName}`;
diff --git a/src/upload.js b/src/upload.js
index 167d897..fa73ab2 100644
--- a/src/upload.js
+++ b/src/upload.js
@@ -9,11 +9,11 @@ const handleUpload = async (file) => {
try {
const buffer = fs.readFileSync(file.path);
const fileName = path.basename(file.originalname);
- // Add content type detection for S3
+ // content type detection for S3
const contentType = file.mimetype || 'application/octet-stream';
const uniqueFileName = `${Date.now()}-${fileName}`;
- // Upload to S3 storage with content type
+ // Upload to S3
logger.debug(`Uploading: ${uniqueFileName}`);
const uploaded = await uploadToStorage('s/v3', uniqueFileName, buffer, contentType);
if (!uploaded) throw new Error('Storage upload failed');
From d9901a4285905690908e6d15f914eab89c2facd2 Mon Sep 17 00:00:00 2001
From: Max Wofford
Date: Mon, 24 Feb 2025 20:33:38 -0500
Subject: [PATCH 10/16] Create Dockerfile
---
Dockerfile | 20 ++++++++++++++++++++
1 file changed, 20 insertions(+)
create mode 100644 Dockerfile
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..5cd891d
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,20 @@
+# Use the official Bun image as base
+FROM oven/bun:1
+
+# Set working directory
+WORKDIR /app
+
+# Copy package.json and bun.lockb (if exists)
+COPY package*.json bun.lockb* ./
+
+# Install dependencies
+RUN bun install
+
+# Copy the rest of the application
+COPY . .
+
+# Expose the port your Express server runs on
+EXPOSE 3000
+
+# Start the server
+CMD ["bun", "run", "start"]
From d09d2934a30056f52e8beb5c17c2ebe06c54c327 Mon Sep 17 00:00:00 2001
From: Max Wofford
Date: Mon, 24 Feb 2025 20:33:51 -0500
Subject: [PATCH 11/16] Setup .env.example
---
.env.example | 17 +++++++++++++++++
README.md | 29 +++++------------------------
2 files changed, 22 insertions(+), 24 deletions(-)
create mode 100644 .env.example
diff --git a/.env.example b/.env.example
new file mode 100644
index 0000000..77c617d
--- /dev/null
+++ b/.env.example
@@ -0,0 +1,17 @@
+# Slack
+SLACK_BOT_TOKEN=xoxb- # From OAuth & Permissions
+SLACK_SIGNING_SECRET= # From Basic Information
+SLACK_APP_TOKEN=xapp- # From Basic Information (for Socket Mode)
+SLACK_CHANNEL_ID=channel-id # Channel where bot operates
+
+# S3 Config CF in this example
+AWS_ACCESS_KEY_ID=1234567890abcdef
+AWS_SECRET_ACCESS_KEY=abcdef1234567890
+AWS_BUCKET_NAME=my-cdn-bucket
+AWS_REGION=auto
+AWS_ENDPOINT=https://.r2.cloudflarestorage.com
+AWS_CDN_URL=https://cdn.beans.com
+
+# API
+API_TOKEN=beans # Set a secure random string
+PORT=3000
\ No newline at end of file
diff --git a/README.md b/README.md
index 1a6e014..b136e0b 100644
--- a/README.md
+++ b/README.md
@@ -77,26 +77,7 @@ This CDN supports any S3-compatible storage service. Here's how to set it up usi
### 3. Environment Setup
-Create a `.env` file with:
-```env
-# Slack
-SLACK_BOT_TOKEN=xoxb- # From OAuth & Permissions
-SLACK_SIGNING_SECRET= # From Basic Information
-SLACK_APP_TOKEN=xapp- # From Basic Information (for Socket Mode)
-SLACK_CHANNEL_ID=channel-id # Channel where bot operates
-
-# S3 Config CF in this example
-AWS_ACCESS_KEY_ID=1234567890abcdef
-AWS_SECRET_ACCESS_KEY=abcdef1234567890
-AWS_BUCKET_NAME=my-cdn-bucket
-AWS_REGION=auto
-AWS_ENDPOINT=https://.r2.cloudflarestorage.com
-AWS_CDN_URL=https://cdn.beans.com
-
-# API
-API_TOKEN=beans # Set a secure random string
-PORT=3000
-```
+Check out the `example.env` file for getting started!
### **4. Installation & Running**
@@ -145,7 +126,7 @@ pm2 startup
### V3 API (Latest)
-**Endpoint:** `POST https://e2.example.hackclub.app/api/v3/new`
+**Endpoint:** `POST https://cdn.hackclub.com/api/v3/new`
**Headers:**
```
@@ -155,7 +136,7 @@ Content-Type: application/json
**Request Example:**
```bash
-curl --location 'https://e2.example.hackclub.app/api/v3/new' \
+curl --location 'https://cdn.hackclub.com/api/v3/new' \
--header 'Authorization: Bearer beans' \
--header 'Content-Type: application/json' \
--data '[
@@ -203,7 +184,7 @@ curl --location 'https://e2.example.hackclub.app/api/v3/new' \
-**Endpoint:** `POST https://e2.example.hackclub.app/api/v2/new`
+**Endpoint:** `POST https://cdn.hackclub.com/api/v2/new`
**Headers:**
```
@@ -235,7 +216,7 @@ Content-Type: application/json
-**Endpoint:** `POST https://e2.example.hackclub.app/api/v1/new`
+**Endpoint:** `POST https://cdn.hackclub.com/api/v1/new`
**Headers:**
```
From a521298112be19d217a6ac8314b68249f507c7f9 Mon Sep 17 00:00:00 2001
From: Max Wofford
Date: Mon, 24 Feb 2025 21:06:38 -0500
Subject: [PATCH 12/16] Add root redirect to repo
---
index.js | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/index.js b/index.js
index 482ece9..9b32503 100644
--- a/index.js
+++ b/index.js
@@ -29,6 +29,11 @@ expressApp.use(express.urlencoded({ extended: true }));
// Mount API for all versions
expressApp.use('/api', apiRoutes);
+// redirect route to "https://github.com/hackclub/cdn"
+expressApp.get('/', (req, res) => {
+ res.redirect('https://github.com/hackclub/cdn');
+});
+
// Error handling middleware
expressApp.use((err, req, res, next) => {
logger.error('API Error:', {
From e4835328dcdb971ef66108aed18d11041e9814d3 Mon Sep 17 00:00:00 2001
From: Max Wofford
Date: Mon, 24 Feb 2025 21:08:14 -0500
Subject: [PATCH 13/16] Fix dockerfile for coolify
---
Dockerfile | 3 +++
1 file changed, 3 insertions(+)
diff --git a/Dockerfile b/Dockerfile
index 5cd891d..2c87544 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,6 +1,9 @@
# Use the official Bun image as base
FROM oven/bun:1
+# install wget for coolify healthcheck
+RUN apt-get update && apt-get install -y wget
+
# Set working directory
WORKDIR /app
From cd71eb3b620fe5e99c93a10b296fe1be1352e85e Mon Sep 17 00:00:00 2001
From: Max Wofford
Date: Mon, 24 Feb 2025 21:11:50 -0500
Subject: [PATCH 14/16] Temporarily turn off slack bot while trying attempting
deploy
---
index.js | 34 +++++++++++++++++-----------------
1 file changed, 17 insertions(+), 17 deletions(-)
diff --git a/index.js b/index.js
index 9b32503..5893217 100644
--- a/index.js
+++ b/index.js
@@ -5,7 +5,7 @@ const logger = require('./src/config/logger');
logger.info('Starting CDN application 🚀');
-const {App} = require('@slack/bolt');
+// const {App} = require('@slack/bolt');
const fileUpload = require('./src/fileUpload');
const express = require('express');
const cors = require('cors');
@@ -13,12 +13,12 @@ const apiRoutes = require('./src/api/index.js');
const BOT_START_TIME = Date.now() / 1000;
-const app = new App({
- token: process.env.SLACK_BOT_TOKEN,
- signingSecret: process.env.SLACK_SIGNING_SECRET,
- socketMode: true,
- appToken: process.env.SLACK_APP_TOKEN
-});
+// const app = new App({
+// token: process.env.SLACK_BOT_TOKEN,
+// signingSecret: process.env.SLACK_SIGNING_SECRET,
+// socketMode: false,
+// appToken: process.env.SLACK_APP_TOKEN
+// });
// API server
const expressApp = express();
@@ -52,22 +52,22 @@ expressApp.use((req, res, next) => {
});
// Event listener for file_shared events
-app.event('file_shared', async ({event, client}) => {
- if (parseFloat(event.event_ts) < BOT_START_TIME) return;
- if (event.channel_id !== process.env.SLACK_CHANNEL_ID) return;
+// app.event('file_shared', async ({event, client}) => {
+// if (parseFloat(event.event_ts) < BOT_START_TIME) return;
+// if (event.channel_id !== process.env.SLACK_CHANNEL_ID) return;
- try {
- await fileUpload.handleFileUpload(event, client);
- } catch (error) {
- logger.error(`Upload failed: ${error.message}`);
- }
-});
+// try {
+// await fileUpload.handleFileUpload(event, client);
+// } catch (error) {
+// logger.error(`Upload failed: ${error.message}`);
+// }
+// });
// Startup LOGs
(async () => {
try {
await fileUpload.initialize();
- await app.start();
+ // await app.start();
const port = parseInt(process.env.PORT || '4553', 10);
expressApp.listen(port, () => {
logger.info('CDN started successfully 🔥', {
From 826aec6878cdd0af6141f4d4ccb56641882611cb Mon Sep 17 00:00:00 2001
From: Max Wofford
Date: Mon, 24 Feb 2025 21:13:37 -0500
Subject: [PATCH 15/16] Switch wget -> curl
---
Dockerfile | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index 2c87544..aca2a62 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,8 +1,8 @@
# Use the official Bun image as base
FROM oven/bun:1
-# install wget for coolify healthcheck
-RUN apt-get update && apt-get install -y wget
+# install curl for coolify healthcheck
+RUN apt-get update && apt-get install -y curl
# Set working directory
WORKDIR /app
From 264399b93ea20093fa2d80b4d3e1ecae08a93c86 Mon Sep 17 00:00:00 2001
From: Max Wofford
Date: Mon, 24 Feb 2025 21:16:00 -0500
Subject: [PATCH 16/16] Try curl + wget
---
Dockerfile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Dockerfile b/Dockerfile
index aca2a62..737842c 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -2,7 +2,7 @@
FROM oven/bun:1
# install curl for coolify healthcheck
-RUN apt-get update && apt-get install -y curl
+RUN apt-get update && apt-get install -y curl wget
# Set working directory
WORKDIR /app