cdn/app/models/upload.rb
End Nightshade 53f80ef244
batchy yay
2026-02-10 11:14:41 -07:00

167 lines
4.7 KiB
Ruby

# frozen_string_literal: true
require "open-uri"
class Upload < ApplicationRecord
include PgSearch::Model
belongs_to :user
belongs_to :blob, class_name: "ActiveStorage::Blob"
after_destroy :purge_blob
delegate :filename, :byte_size, :content_type, :checksum, to: :blob
pg_search_scope :search_by_filename,
associated_against: {
blob: :filename
},
using: {
tsearch: { prefix: true }
}
pg_search_scope :search,
against: [ :original_url ],
associated_against: {
blob: :filename,
user: [ :email, :name ]
},
using: { tsearch: { prefix: true } }
alias_method :file_size, :byte_size
alias_method :mime_type, :content_type
enum :provenance, {
slack: "slack",
web: "web",
api: "api",
rescued: "rescued"
}, validate: true
validates :provenance, presence: true
scope :recent, -> { order(created_at: :desc) }
scope :by_user, ->(user) { where(user: user) }
scope :today, -> { where("created_at >= ?", Time.zone.now.beginning_of_day) }
scope :this_week, -> { where("created_at >= ?", Time.zone.now.beginning_of_week) }
scope :this_month, -> { where("created_at >= ?", Time.zone.now.beginning_of_month) }
def human_file_size
ActiveSupport::NumberHelper.number_to_human_size(byte_size)
end
def assets_url
host = ENV.fetch("CDN_ASSETS_HOST", "cdn.hackclub-assets.com")
"https://#{host}/#{blob.key}"
end
def cdn_url
Rails.application.routes.url_helpers.external_upload_url(
id:,
filename:,
host: ENV["CDN_HOST"] || "cdn.hackclub.com"
)
end
# Rename the display filename (storage key stays the same)
def rename!(new_filename)
sanitized = ActiveStorage::Filename.new(new_filename).sanitized
# Preserve the original extension if the user didn't provide one
original_ext = File.extname(blob.filename.to_s)
new_ext = File.extname(sanitized)
sanitized = "#{sanitized}#{original_ext}" if new_ext.blank? && original_ext.present?
blob.update!(filename: sanitized)
end
def self.create_from_url(url, user:, provenance:, original_url: nil, authorization: nil, filename: nil)
conn = build_http_client
headers = {}
headers["Authorization"] = authorization if authorization.present?
pre_check_quota_via_head(conn, url, headers, user)
response = conn.get(url, nil, headers)
if response.status.between?(300, 399)
location = response.headers["location"]
raise "Failed to download: #{response.status} redirect to #{location}"
end
raise "Failed to download: #{response.status}" unless response.success?
filename ||= extract_filename_from_url(url)
body = response.body
content_type = Marcel::MimeType.for(StringIO.new(body), name: filename) ||
response.headers["content-type"] ||
"application/octet-stream"
upload_id = SecureRandom.uuid_v7
sanitized_filename = ActiveStorage::Filename.new(filename).sanitized
storage_key = "#{upload_id}/#{sanitized_filename}"
blob = ActiveStorage::Blob.create_and_upload!(
io: StringIO.new(body),
filename: filename,
content_type: content_type,
identify: false,
key: storage_key
)
create!(
id: upload_id,
user: user,
blob: blob,
provenance: provenance,
original_url: original_url
)
end
class << self
private
def build_http_client
Faraday.new(ssl: { verify: true, verify_mode: OpenSSL::SSL::VERIFY_PEER }) do |f|
f.response :follow_redirects, limit: 5
f.adapter Faraday.default_adapter
end.tap do |conn|
conn.options.open_timeout = 30
conn.options.timeout = 120
end
end
def pre_check_quota_via_head(conn, url, headers, user)
head_response = conn.head(url, nil, headers)
return unless head_response.success?
content_length = head_response.headers["content-length"]&.to_i
return unless content_length && content_length > 0
quota_service = QuotaService.new(user)
policy = quota_service.current_policy
if content_length > policy.max_file_size
raise "File too large: #{ActiveSupport::NumberHelper.number_to_human_size(content_length)} " \
"exceeds limit of #{ActiveSupport::NumberHelper.number_to_human_size(policy.max_file_size)}"
end
return if quota_service.can_upload?(content_length)
raise "File would exceed storage quota"
rescue Faraday::Error
nil
end
def extract_filename_from_url(url)
File.basename(URI.parse(url).path).presence || "download"
end
end
private
def purge_blob
blob.purge
rescue Aws::S3::Errors::NoSuchKey
Rails.logger.info("Blob #{blob.key} already deleted from S3, skipping purge")
end
end