Migrated Fully to S3 Api 2

This commit is contained in:
Tom (Whity) 2025-01-23 15:19:24 +01:00
parent 495d1410a9
commit 16c690583d
No known key found for this signature in database
GPG key ID: 5F3E3278F02734C2
12 changed files with 526 additions and 169 deletions

View file

@ -44,19 +44,36 @@
4. Enable Event Subscriptions and subscribe to `file_shared` event
5. Install the app to your workspace
### 2. CDN Configuration (Cloudflare + Backblaze)
### 2. Storage Configuration
1. Create a Backblaze B2 bucket
2. Set up Cloudflare DNS:
- Add a CNAME record pointing to your B2 bucket (e.g., `f003.backblazeb2.com`) you can upload a file and check in info!
- Enable Cloudflare proxy
3. Configure SSL/TLS:
- Set SSL mode to "Full (strict)"
- ⚠️ **WARNING**: This setting may break other configurations on your domain! You could use another domain!
4. Create a Transform Rule:
- Filter: `hostname equals "your-cdn.example.com"`
- Rewrite to: `concat("/file/(bucket name)", http.request.uri.path)` (make sure u get the bucket name)
- Preserve query string
This CDN supports any S3-compatible storage service. Here's how to set it up using Cloudflare R2 as an example:
#### Setting up Cloudflare R2 (Example)
1. **Create R2 Bucket**
- Go to Cloudflare Dashboard > R2
- Click "Create Bucket"
- Name your bucket
- Enable public access
2. **Generate API Credentials**
- Go to R2
- Click "Manage API tokens" in API
- Click "Create API Token"
- Permissions: "Object Read & Write"
- Save both Access Key ID and Secret Access Key (S3)
3. **Get Your URL**
- Go to R2
- Click "Use R2 with APIs" in API
- Select S3 Compatible API
- The URL is your Endpoint
4. **Configure Custom Domain (Optional)**
- Go to R2 > Bucket Settings > Custom Domains
- Add your domain (e.g., cdn.beans.com)
- Follow DNS configuration steps
### 3. Environment Setup
@ -68,21 +85,19 @@ SLACK_SIGNING_SECRET= # From Basic Information
SLACK_APP_TOKEN=xapp- # From Basic Information (for Socket Mode)
SLACK_CHANNEL_ID=channel-id # Channel where bot operates
# Backblaze (Public Bucket)
B2_APP_KEY_ID=key-id # From B2 Application Keys
B2_APP_KEY=app-key # From B2 Application Keys
B2_BUCKET_ID=bucket-id # From B2 Bucket Settings
B2_CDN_URL=https://cdn.example.com
# S3 Config CF in this example
AWS_ACCESS_KEY_ID=1234567890abcdef
AWS_SECRET_ACCESS_KEY=abcdef1234567890
AWS_BUCKET_NAME=my-cdn-bucket
AWS_REGION=auto
AWS_ENDPOINT=https://<accountid>.r2.cloudflarestorage.com
AWS_CDN_URL=https://cdn.beans.com
# API
API_TOKEN=beans # Set a secure random string
PORT=3000
PORT=3000
```
Here's an improved version of your README section with better clarity and formatting:
---
### **4. Installation & Running**
#### **Install Dependencies**
@ -130,7 +145,7 @@ pm2 startup
### V3 API (Latest)
<img alt="Version 3" src="https://files.catbox.moe/e3ravk.png" align="right" width="300">
**Endpoint:** `POST https://e2.deployor.hackclub.app/api/v3/new`
**Endpoint:** `POST https://e2.example.hackclub.app/api/v3/new`
**Headers:**
```
@ -140,7 +155,7 @@ Content-Type: application/json
**Request Example:**
```bash
curl --location 'https://e2.deployor.hackclub.app/api/v3/new' \
curl --location 'https://e2.example.hackclub.app/api/v3/new' \
--header 'Authorization: Bearer beans' \
--header 'Content-Type: application/json' \
--data '[
@ -155,31 +170,31 @@ curl --location 'https://e2.deployor.hackclub.app/api/v3/new' \
{
"files": [
{
"deployedUrl": "https://cdn.deployor.dev/s/v3/3e48b91a4599a3841c028e9a683ef5ce58cea372_flag-standalone.svg",
"deployedUrl": "https://cdn.example.dev/s/v3/3e48b91a4599a3841c028e9a683ef5ce58cea372_flag-standalone.svg",
"file": "0_16361167e11b0d172a47e726b40d70e9873c792b_upload_1736985095691",
"sha": "16361167e11b0d172a47e726b40d70e9873c792b",
"size": 90173
},
{
"deployedUrl": "https://cdn.deployor.dev/s/v3/4e48b91a4599a3841c028e9a683ef5ce58cea372_flag-orpheus-left.png",
"deployedUrl": "https://cdn.example.dev/s/v3/4e48b91a4599a3841c028e9a683ef5ce58cea372_flag-orpheus-left.png",
"file": "1_16361167e11b0d172a47e726b40d70e9873c792b_upload_1736985095692",
"sha": "16361167e11b0d172a47e726b40d70e9873c792b",
"size": 80234
},
{
"deployedUrl": "https://cdn.deployor.dev/s/v3/5e48b91a4599a3841c028e9a683ef5ce58cea372_icon-progress-marker.svg",
"deployedUrl": "https://cdn.example.dev/s/v3/5e48b91a4599a3841c028e9a683ef5ce58cea372_icon-progress-marker.svg",
"file": "2_16361167e11b0d172a47e726b40d70e9873c792b_upload_1736985095693",
"sha": "16361167e11b0d172a47e726b40d70e9873c792b",
"size": 70345
},
{
"deployedUrl": "https://cdn.deployor.dev/s/v3/6e48b91a4599a3841c028e9a683ef5ce58cea372_flag-orpheus-right.png",
"deployedUrl": "https://cdn.example.dev/s/v3/6e48b91a4599a3841c028e9a683ef5ce58cea372_flag-orpheus-right.png",
"file": "3_16361167e11b0d172a47e726b40d70e9873c792b_upload_1736985095694",
"sha": "16361167e11b0d172a47e726b40d70e9873c792b",
"size": 60456
}
],
"cdnBase": "https://cdn.deployor.dev"
"cdnBase": "https://cdn.example.dev"
}
```
@ -188,7 +203,7 @@ curl --location 'https://e2.deployor.hackclub.app/api/v3/new' \
<img alt="Version 2" src="https://files.catbox.moe/uuk1vm.png" align="right" width="300">
**Endpoint:** `POST https://e2.deployor.hackclub.app/api/v2/new`
**Endpoint:** `POST https://e2.example.hackclub.app/api/v2/new`
**Headers:**
```
@ -208,9 +223,9 @@ Content-Type: application/json
**Response:**
```json
{
"flag-standalone.svg": "https://cdn.deployor.dev/s/v2/flag-standalone.svg",
"flag-orpheus-left.png": "https://cdn.deployor.dev/s/v2/flag-orpheus-left.png",
"icon-progress-marker.svg": "https://cdn.deployor.dev/s/v2/icon-progress-marker.svg"
"flag-standalone.svg": "https://cdn.example.dev/s/v2/flag-standalone.svg",
"flag-orpheus-left.png": "https://cdn.example.dev/s/v2/flag-orpheus-left.png",
"icon-progress-marker.svg": "https://cdn.example.dev/s/v2/icon-progress-marker.svg"
}
```
</details>
@ -220,7 +235,7 @@ Content-Type: application/json
<img alt="Version 1" src="https://files.catbox.moe/tnzdfe.png" align="right" width="300">
**Endpoint:** `POST https://e2.deployor.hackclub.app/api/v1/new`
**Endpoint:** `POST https://e2.example.hackclub.app/api/v1/new`
**Headers:**
```
@ -240,9 +255,9 @@ Content-Type: application/json
**Response:**
```json
[
"https://cdn.deployor.dev/s/v1/0_flag-standalone.svg",
"https://cdn.deployor.dev/s/v1/1_flag-orpheus-left.png",
"https://cdn.deployor.dev/s/v1/2_icon-progress-marker.svg"
"https://cdn.example.dev/s/v1/0_flag-standalone.svg",
"https://cdn.example.dev/s/v1/1_flag-orpheus-left.png",
"https://cdn.example.dev/s/v1/2_icon-progress-marker.svg"
]
```
</details>
@ -264,7 +279,7 @@ Content-Type: application/json
- **Storage Structure:** `/s/v3/{HASH}_{filename}`
- **File Naming:** `/s/{slackUserId}/{unix}_{sanitizedFilename}`
- **Cost Efficiency:** Uses B2 storage for significant cost savings
- **Cost Efficiency:** Uses object storage for significant cost savings
- **Security:** Token-based authentication for API access
## 💻 Slack Bot Behavior
@ -279,8 +294,7 @@ Content-Type: application/json
## 💰 Cost Optimization
- Uses Cloudflare CDN with Backblaze B2 storage
- Free egress thanks to Cloudflare-Backblaze Alliance
- Uses Object storage
- 87-98% cost reduction compared to Vercel CDN
<div align="center">

View file

@ -2,6 +2,9 @@ const dotenv = require('dotenv');
dotenv.config();
const logger = require('./src/config/logger');
logger.info('Starting CDN application 🚀');
const {App} = require('@slack/bolt');
const fileUpload = require('./src/fileUpload');
const express = require('express');
@ -28,7 +31,12 @@ expressApp.use('/api', apiRoutes);
// Error handling middleware
expressApp.use((err, req, res, next) => {
logger.error('API Error:', err);
logger.error('API Error:', {
error: err.message,
stack: err.stack,
path: req.path,
method: req.method
});
res.status(500).json({ error: 'Internal server error' });
});
@ -40,40 +48,34 @@ expressApp.use((req, res, next) => {
// Event listener for file_shared events
app.event('file_shared', async ({event, client}) => {
logger.debug(`Received file_shared event: ${JSON.stringify(event)}`);
if (parseFloat(event.event_ts) < BOT_START_TIME) {
logger.debug(`Ignoring file event from before bot start: ${new Date(parseFloat(event.event_ts) * 1000).toISOString()}`);
return;
}
const targetChannelId = process.env.SLACK_CHANNEL_ID;
const channelId = event.channel_id;
if (channelId !== targetChannelId) {
logger.debug(`Ignoring file shared in channel: ${channelId}`);
return;
}
if (parseFloat(event.event_ts) < BOT_START_TIME) return;
if (event.channel_id !== process.env.SLACK_CHANNEL_ID) return;
try {
await fileUpload.handleFileUpload(event, client);
} catch (error) {
logger.error(`Error processing file upload: ${error.message}`);
logger.error(`Upload failed: ${error.message}`);
}
});
// Slack bot and API server
// Startup LOGs
(async () => {
try {
await fileUpload.initialize();
await app.start();
const port = parseInt(process.env.PORT || '4553', 10);
expressApp.listen(port, () => {
logger.info(`⚡️ Slack app is running in Socket Mode!`);
logger.info(`🚀 API server is running on port ${port}`);
logger.info('CDN started successfully 🔥', {
slackMode: 'Socket Mode',
apiPort: port,
startTime: new Date().toISOString()
});
});
} catch (error) {
logger.error('Failed to start:', error);
logger.error('Failed to start application:', {
error: error.message,
stack: error.stack
});
process.exit(1);
}
})();

View file

@ -1,15 +1,15 @@
{
"name": "cdn-v2-hackclub",
"version": "1.0.0",
"description": "Slack app and API to upload files to Backblaze B2 with unique URLs",
"description": "Slack app and API to upload files to S3-compatible storage with unique URLs",
"main": "index.js",
"scripts": {
"start": "node index.js"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.478.0",
"@slack/bolt": "^4.2.0",
"@slack/web-api": "^7.8.0",
"backblaze-b2": "^1.3.0",
"cors": "^2.8.5",
"dotenv": "^10.0.0",
"multer": "^1.4.5-lts.1",

View file

@ -6,6 +6,7 @@ const deployEndpoint = async (files) => {
const deployedFiles = files.map(file => ({
deployedUrl: generateApiUrl('v3', file.file),
cdnUrl: getCdnUrl(),
contentType: file.contentType || 'application/octet-stream',
...file
}));
@ -15,7 +16,7 @@ const deployEndpoint = async (files) => {
cdnBase: getCdnUrl()
};
} catch (error) {
logger.error('Deploy error:', error);
logger.error('S3 deploy error:', error);
return {
status: 500,
files: []

View file

@ -77,8 +77,8 @@ router.post('/upload', async (req, res) => {
const result = await handleUpload(req);
res.status(result.status).json(result.body);
} catch (error) {
logger.error('Upload handler error:', error);
res.status(500).json({error: 'Internal server error'});
logger.error('S3 upload handler error:', error);
res.status(500).json({error: 'Storage upload failed'});
}
});

View file

@ -1,6 +1,6 @@
const fetch = require('node-fetch');
const crypto = require('crypto');
const {uploadToBackblaze} = require('../backblaze');
const {uploadToStorage} = require('../storage');
const {generateUrl, getCdnUrl} = require('./utils');
const logger = require('../config/logger');
@ -13,15 +13,19 @@ function sanitizeFileName(fileName) {
return sanitizedFileName;
}
// Handle remote file upload to B2 storage
// Handle remote file upload to S3 storage
const uploadEndpoint = async (url, authorization = null) => {
try {
logger.debug(`Downloading: ${url}`);
logger.debug('Starting download', { url });
const response = await fetch(url, {
headers: authorization ? {'Authorization': authorization} : {}
});
if (!response.ok) throw new Error(`Download failed: ${response.statusText}`);
if (!response.ok) {
const error = new Error(`Download failed: ${response.statusText}`);
error.statusCode = response.status;
throw error;
}
// Generate unique filename using SHA1 (hash) of file contents
const buffer = await response.buffer();
@ -30,27 +34,60 @@ const uploadEndpoint = async (url, authorization = null) => {
const sanitizedFileName = sanitizeFileName(originalName);
const fileName = `${sha}_${sanitizedFileName}`;
// Upload to B2 storage
// Upload to S3 storage
logger.debug(`Uploading: ${fileName}`);
const uploaded = await uploadToBackblaze('s/v3', fileName, buffer);
if (!uploaded) throw new Error('Storage upload failed');
const uploadResult = await uploadToStorage('s/v3', fileName, buffer, response.headers.get('content-type'));
if (uploadResult.success === false) {
throw new Error(`Storage upload failed: ${uploadResult.error}`);
}
return {
url: generateUrl('s/v3', fileName),
sha,
size: buffer.length
size: buffer.length,
type: response.headers.get('content-type')
};
} catch (error) {
logger.error('Upload failed:', error);
throw error;
logger.error('Upload process failed', {
url,
error: error.message,
statusCode: error.statusCode,
stack: error.stack
});
// Format error (pain)
const statusCode = error.statusCode || 500;
const errorResponse = {
error: {
message: error.message,
code: error.code || 'INTERNAL_ERROR',
details: error.details || null
},
success: false
};
throw { statusCode, ...errorResponse };
}
};
// Express request handler for file uploads
const handleUpload = async (req) => {
const url = req.body || await req.text();
const result = await uploadEndpoint(url, req.headers?.authorization);
return {status: 200, body: result};
try {
const url = req.body || await req.text();
const result = await uploadEndpoint(url, req.headers?.authorization);
return { status: 200, body: result };
} catch (error) {
return {
status: error.statusCode || 500,
body: {
error: error.error || {
message: 'Internal server error',
code: 'INTERNAL_ERROR'
},
success: false
}
};
}
};
module.exports = {uploadEndpoint, handleUpload};

View file

@ -1,6 +1,6 @@
const logger = require('../config/logger');
const getCdnUrl = () => process.env.B2_CDN_URL;
const getCdnUrl = () => process.env.AWS_CDN_URL;
const generateUrl = (version, fileName) => {
return `${getCdnUrl()}/${version}/${fileName}`;

View file

@ -1,23 +1,19 @@
const winston = require('winston');
const consoleFormat = winston.format.combine(
winston.format.colorize(),
winston.format.timestamp(),
winston.format.printf(({level, message, timestamp}) => {
return `${timestamp} ${level}: ${message}`;
})
);
const logger = winston.createLogger({
level: process.env.LOG_LEVEL || 'info',
format: consoleFormat,
transports: [
new winston.transports.Console()
]
});
logger.on('error', error => {
console.error('Logger error:', error);
level: 'info',
format: winston.format.combine(
winston.format.timestamp(),
winston.format.colorize(),
winston.format.printf(({ level, message, timestamp, ...meta }) => {
let output = `${timestamp} ${level}: ${message}`;
if (Object.keys(meta).length > 0) {
output += ` ${JSON.stringify(meta)}`;
}
return output;
})
),
transports: [new winston.transports.Console()]
});
module.exports = logger;

View file

@ -1,16 +1,13 @@
const fetch = require('node-fetch');
const path = require('path');
const crypto = require('crypto');
const logger = require('./config/logger');
const {uploadToBackblaze} = require('./backblaze');
const storage = require('./storage');
const {generateFileUrl} = require('./utils');
const MAX_FILE_SIZE = 2 * 1024 * 1024 * 1024; // 2GB in bytes
const CONCURRENT_UPLOADS = 3; // Max concurrent uploads (messages)
// processed messages
const processedMessages = new Map();
let uploadLimit;
async function initialize() {
@ -18,16 +15,12 @@ async function initialize() {
uploadLimit = pLimit(CONCURRENT_UPLOADS);
}
// Check if the message is older than 24 hours for when the bot was offline
// Basic stuff
function isMessageTooOld(eventTs) {
const eventTime = parseFloat(eventTs) * 1000;
const currentTime = Date.now();
const timeDifference = currentTime - eventTime;
const maxAge = 24 * 60 * 60 * 1000; // 24 hours in milliseconds
return timeDifference > maxAge;
return (Date.now() - eventTime) > 24 * 60 * 60 * 1000;
}
// check if the message has already been processed
function isMessageProcessed(messageTs) {
return processedMessages.has(messageTs);
}
@ -36,7 +29,63 @@ function markMessageAsProcessing(messageTs) {
processedMessages.set(messageTs, true);
}
// Processing reaction
// File processing
function sanitizeFileName(fileName) {
let sanitized = fileName.replace(/[^a-zA-Z0-9.-]/g, '_');
return sanitized || `upload_${Date.now()}`;
}
function generateUniqueFileName(fileName) {
return `${Date.now()}-${crypto.randomBytes(16).toString('hex')}-${sanitizeFileName(fileName)}`;
}
// upload functionality
async function processFiles(fileMessage, client) {
const uploadedFiles = [];
const failedFiles = [];
logger.info(`Processing ${fileMessage.files?.length || 0} files`);
for (const file of fileMessage.files || []) {
try {
if (file.size > MAX_FILE_SIZE) {
failedFiles.push(file.name);
continue;
}
const response = await fetch(file.url_private, {
headers: {Authorization: `Bearer ${process.env.SLACK_BOT_TOKEN}`}
});
if (!response.ok) throw new Error('Download failed');
const buffer = await response.buffer();
const uniqueFileName = generateUniqueFileName(file.name);
const userDir = `s/${fileMessage.user}`;
const success = await uploadLimit(() =>
storage.uploadToStorage(userDir, uniqueFileName, buffer, file.mimetype)
);
if (!success) throw new Error('Upload failed');
uploadedFiles.push({
name: uniqueFileName,
url: generateFileUrl(userDir, uniqueFileName),
contentType: file.mimetype
});
} catch (error) {
logger.error(`Failed: ${file.name} - ${error.message}`);
failedFiles.push(file.name);
}
}
logger.info(`Completed: ${uploadedFiles.length} ok, ${failedFiles.length} failed`);
return {uploadedFiles, failedFiles};
}
// Slack interaction
async function addProcessingReaction(client, event, fileMessage) {
try {
await client.reactions.add({
@ -45,63 +94,10 @@ async function addProcessingReaction(client, event, fileMessage) {
channel: event.channel_id
});
} catch (error) {
logger.error('Failed to add processing reaction:', error.message);
logger.error('Failed to add reaction:', error.message);
}
}
// sanitize file names and ensure it's not empty (I don't even know if that's possible but let's be safe)
function sanitizeFileName(fileName) {
let sanitizedFileName = fileName.replace(/[^a-zA-Z0-9.-]/g, '_');
if (!sanitizedFileName) {
sanitizedFileName = 'upload_' + Date.now();
}
return sanitizedFileName;
}
// Generate a unique, non-guessable file name
function generateUniqueFileName(fileName) {
const sanitizedFileName = sanitizeFileName(fileName);
const uniqueFileName = `${Date.now()}-${crypto.randomBytes(16).toString('hex')}-${sanitizedFileName}`;
return uniqueFileName;
}
// upload files to the /s/ directory
async function processFiles(fileMessage, client) {
const uploadedFiles = [];
const failedFiles = [];
const files = fileMessage.files || [];
for (const file of files) {
if (file.size > MAX_FILE_SIZE) {
failedFiles.push(file.name);
continue;
}
try {
const buffer = await fetch(file.url_private, {
headers: {Authorization: `Bearer ${process.env.SLACK_BOT_TOKEN}`}
}).then(res => res.buffer());
const uniqueFileName = generateUniqueFileName(file.name);
const userDir = `s/${fileMessage.user}`;
const success = await uploadLimit(() => uploadToBackblaze(userDir, uniqueFileName, buffer));
if (success) {
const url = generateFileUrl(userDir, uniqueFileName);
uploadedFiles.push({name: uniqueFileName, url});
} else {
failedFiles.push(file.name);
}
} catch (error) {
logger.error(`Failed to process file ${file.name}:`, error.message);
failedFiles.push(file.name);
}
}
return {uploadedFiles, failedFiles};
}
// update reactions based on success
async function updateReactions(client, event, fileMessage, success) {
try {
await client.reactions.remove({
@ -119,7 +115,6 @@ async function updateReactions(client, event, fileMessage, success) {
}
}
// find a file message
async function findFileMessage(event, client) {
try {
const fileInfo = await client.files.info({
@ -217,11 +212,10 @@ async function handleFileUpload(event, client) {
const {uploadedFiles, failedFiles} = await processFiles(fileMessage, client);
await sendResultsMessage(client, event.channel_id, fileMessage, uploadedFiles, failedFiles);
await updateReactions(client, event, fileMessage, failedFiles.length === 0);
} catch (error) {
logger.error('Upload failed:', error.message);
logger.error(`Upload failed: ${error.message}`);
await handleError(client, event.channel_id, fileMessage, reactionAdded);
throw error;
}

310
src/storage.js Normal file
View file

@ -0,0 +1,310 @@
const { S3Client, PutObjectCommand } = require('@aws-sdk/client-s3');
const path = require('path');
const crypto = require('crypto');
const logger = require('./config/logger');
const {generateFileUrl} = require('./utils');
const MAX_FILE_SIZE = 2 * 1024 * 1024 * 1024; // 2GB in bytes
const CONCURRENT_UPLOADS = 3; // Max concurrent uploads (messages)
// processed messages
const processedMessages = new Map();
let uploadLimit;
async function initialize() {
const pLimit = (await import('p-limit')).default;
uploadLimit = pLimit(CONCURRENT_UPLOADS);
}
// Check if the message is older than 24 hours for when the bot was offline
function isMessageTooOld(eventTs) {
const eventTime = parseFloat(eventTs) * 1000;
const currentTime = Date.now();
const timeDifference = currentTime - eventTime;
const maxAge = 24 * 60 * 60 * 1000; // 24 hours in milliseconds
return timeDifference > maxAge;
}
// check if the message has already been processed
function isMessageProcessed(messageTs) {
return processedMessages.has(messageTs);
}
function markMessageAsProcessing(messageTs) {
processedMessages.set(messageTs, true);
}
// Processing reaction
async function addProcessingReaction(client, event, fileMessage) {
try {
await client.reactions.add({
name: 'beachball',
timestamp: fileMessage.ts,
channel: event.channel_id
});
} catch (error) {
logger.error('Failed to add processing reaction:', error.message);
}
}
// sanitize file names and ensure it's not empty (I don't even know if that's possible but let's be safe)
function sanitizeFileName(fileName) {
let sanitizedFileName = fileName.replace(/[^a-zA-Z0-9.-]/g, '_');
if (!sanitizedFileName) {
sanitizedFileName = 'upload_' + Date.now();
}
return sanitizedFileName;
}
// Generate a unique, non-guessable file name
function generateUniqueFileName(fileName) {
const sanitizedFileName = sanitizeFileName(fileName);
const uniqueFileName = `${Date.now()}-${crypto.randomBytes(16).toString('hex')}-${sanitizedFileName}`;
return uniqueFileName;
}
// upload files to the /s/ directory
async function processFiles(fileMessage, client) {
const uploadedFiles = [];
const failedFiles = [];
logger.debug('Starting file processing', {
userId: fileMessage.user,
fileCount: fileMessage.files?.length || 0
});
const files = fileMessage.files || [];
for (const file of files) {
logger.debug('Processing file', {
name: file.name,
size: file.size,
type: file.mimetype,
id: file.id
});
if (file.size > MAX_FILE_SIZE) {
logger.warn('File exceeds size limit', {
name: file.name,
size: file.size,
limit: MAX_FILE_SIZE
});
failedFiles.push(file.name);
continue;
}
try {
logger.debug('Fetching file from Slack', {
name: file.name,
url: file.url_private
});
const response = await fetch(file.url_private, {
headers: {Authorization: `Bearer ${process.env.SLACK_BOT_TOKEN}`}
});
if (!response.ok) {
throw new Error(`Slack download failed: ${response.status} ${response.statusText}`);
}
const buffer = await response.buffer();
const contentType = file.mimetype || 'application/octet-stream';
const uniqueFileName = generateUniqueFileName(file.name);
const userDir = `s/${fileMessage.user}`;
const uploadResult = await uploadLimit(() =>
uploadToStorage(userDir, uniqueFileName, buffer, contentType)
);
if (uploadResult.success === false) {
throw new Error(uploadResult.error);
}
const url = generateFileUrl(userDir, uniqueFileName);
uploadedFiles.push({
name: uniqueFileName,
url,
contentType
});
} catch (error) {
logger.error('File processing failed', {
fileName: file.name,
error: error.message,
stack: error.stack,
slackFileId: file.id,
userId: fileMessage.user
});
failedFiles.push(file.name);
}
}
logger.debug('File processing complete', {
successful: uploadedFiles.length,
failed: failedFiles.length
});
return {uploadedFiles, failedFiles};
}
// update reactions based on success
async function updateReactions(client, event, fileMessage, success) {
try {
await client.reactions.remove({
name: 'beachball',
timestamp: fileMessage.ts,
channel: event.channel_id
});
await client.reactions.add({
name: success ? 'white_check_mark' : 'x',
timestamp: fileMessage.ts,
channel: event.channel_id
});
} catch (error) {
logger.error('Failed to update reactions:', error.message);
}
}
// find a file message
async function findFileMessage(event, client) {
try {
const fileInfo = await client.files.info({
file: event.file_id,
include_shares: true
});
if (!fileInfo.ok || !fileInfo.file) {
throw new Error('Could not get file info');
}
const channelShare = fileInfo.file.shares?.public?.[event.channel_id] ||
fileInfo.file.shares?.private?.[event.channel_id];
if (!channelShare || !channelShare.length) {
throw new Error('No share info found for this channel');
}
// Get the exact message using the ts from share info
const messageTs = channelShare[0].ts;
const messageInfo = await client.conversations.history({
channel: event.channel_id,
latest: messageTs,
limit: 1,
inclusive: true
});
if (!messageInfo.ok || !messageInfo.messages.length) {
throw new Error('Could not find original message');
}
return messageInfo.messages[0];
} catch (error) {
logger.error('Error finding file message:', error);
return null;
}
}
async function sendResultsMessage(client, channelId, fileMessage, uploadedFiles, failedFiles) {
let message = `Hey <@${fileMessage.user}>, `;
if (uploadedFiles.length > 0) {
message += `here ${uploadedFiles.length === 1 ? 'is your link' : 'are your links'}:\n`;
message += uploadedFiles.map(f => `${f.name}: ${f.url}`).join('\n');
}
if (failedFiles.length > 0) {
message += `\n\nFailed to process: ${failedFiles.join(', ')}`;
}
await client.chat.postMessage({
channel: channelId,
thread_ts: fileMessage.ts,
text: message
});
}
async function handleError(client, channelId, fileMessage, reactionAdded) {
if (fileMessage && reactionAdded) {
try {
await client.reactions.remove({
name: 'beachball',
timestamp: fileMessage.ts,
channel: channelId
});
} catch (cleanupError) {
if (cleanupError.data.error !== 'no_reaction') {
logger.error('Cleanup error:', cleanupError);
}
}
try {
await client.reactions.add({
name: 'x',
timestamp: fileMessage.ts,
channel: channelId
});
} catch (cleanupError) {
logger.error('Cleanup error:', cleanupError);
}
}
}
async function handleFileUpload(event, client) {
let fileMessage = null;
let reactionAdded = false;
try {
if (isMessageTooOld(event.event_ts)) return;
fileMessage = await findFileMessage(event, client);
if (!fileMessage || isMessageProcessed(fileMessage.ts)) return;
markMessageAsProcessing(fileMessage.ts);
await addProcessingReaction(client, event, fileMessage);
reactionAdded = true;
const {uploadedFiles, failedFiles} = await processFiles(fileMessage, client);
await sendResultsMessage(client, event.channel_id, fileMessage, uploadedFiles, failedFiles);
await updateReactions(client, event, fileMessage, failedFiles.length === 0);
} catch (error) {
logger.error('Upload failed:', error.message);
await handleError(client, event.channel_id, fileMessage, reactionAdded);
throw error;
}
}
const s3Client = new S3Client({
region: process.env.AWS_REGION,
endpoint: process.env.AWS_ENDPOINT,
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY
}
});
async function uploadToStorage(userDir, uniqueFileName, buffer, contentType = 'application/octet-stream') {
try {
const params = {
Bucket: process.env.AWS_BUCKET_NAME,
Key: `${userDir}/${uniqueFileName}`,
Body: buffer,
ContentType: contentType,
CacheControl: 'public, immutable, max-age=31536000'
};
logger.info(`Uploading: ${uniqueFileName}`);
await s3Client.send(new PutObjectCommand(params));
return true;
} catch (error) {
logger.error(`Upload failed: ${error.message}`, {
path: `${userDir}/${uniqueFileName}`,
error: error.message
});
return false;
}
}
module.exports = {
handleFileUpload,
initialize,
uploadToStorage
};

View file

@ -1,6 +1,6 @@
const fs = require('fs');
const path = require('path');
const {uploadToBackblaze} = require('../backblaze');
const {uploadToStorage} = require('../storage');
const {generateUrl} = require('./utils');
const logger = require('../config/logger');
@ -9,16 +9,19 @@ const handleUpload = async (file) => {
try {
const buffer = fs.readFileSync(file.path);
const fileName = path.basename(file.originalname);
// Add content type detection for S3
const contentType = file.mimetype || 'application/octet-stream';
const uniqueFileName = `${Date.now()}-${fileName}`;
// Upload to B2 storage
// Upload to S3 storage with content type
logger.debug(`Uploading: ${uniqueFileName}`);
const uploaded = await uploadToBackblaze('s/v3', uniqueFileName, buffer);
const uploaded = await uploadToStorage('s/v3', uniqueFileName, buffer, contentType);
if (!uploaded) throw new Error('Storage upload failed');
return {
name: fileName,
url: generateUrl('s/v3', uniqueFileName)
url: generateUrl('s/v3', uniqueFileName),
contentType
};
} catch (error) {
logger.error('Upload failed:', error);

View file

@ -1,7 +1,7 @@
// Make the CDN URL
function generateFileUrl(userDir, uniqueFileName) {
const cdnUrl = process.env.B2_CDN_URL;
const cdnUrl = process.env.AWS_CDN_URL;
return `${cdnUrl}/${userDir}/${uniqueFileName}`;
}