0.8.0: Single SQL queries, video thumbnails, UNIX socket permissions fixed.
- Same 'checksum' variable name everywhere - Use GMT timezone for uploaded files - More efficient checksum method
This commit is contained in:
parent
8bbb33a77f
commit
fe1417180a
9 changed files with 132 additions and 45 deletions
|
@ -9,6 +9,7 @@ Already replaced lol.
|
|||
- Temporary file uploads like Uguu
|
||||
- File deletion link (not available in frontend for now)
|
||||
- Chatterino and ShareX support
|
||||
- Video Thumbnails for Chatterino and FrankerFaceZ
|
||||
- Unix socket support if you don't want to deal with all the TCP overhead
|
||||
- Automatic protocol detection (HTTPS or HTTP)
|
||||
- Low memory usage: Between 6MB at idle and 25MB if a file is being uploaded or retrieved. It will depend of your traffic.
|
||||
|
|
|
@ -6,7 +6,7 @@ filename_length: 3
|
|||
size_limit: 512
|
||||
port: 8080
|
||||
# If you define the unix socket, it will only listen on the socket and not the port.
|
||||
unix_socket: "/tmp/file-uploader.sock"
|
||||
#unix_socket: "/tmp/file-uploader.sock"
|
||||
# In days
|
||||
delete_files_after: 7
|
||||
# In seconds
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
name: file-uploader
|
||||
version: 0.7.0
|
||||
version: 0.8.0
|
||||
|
||||
authors:
|
||||
- Fijxu <fijxu@nadeko.net>
|
||||
|
|
|
@ -4,6 +4,7 @@ class Config
|
|||
include YAML::Serializable
|
||||
|
||||
property files : String = "./files"
|
||||
property thumbnails : String = "./thumbnails"
|
||||
property db : String = "./db.sqlite3"
|
||||
property db_table_name : String = "files"
|
||||
property filename_length : Int8 = 3
|
||||
|
|
|
@ -15,12 +15,13 @@ require "./lib/**"
|
|||
|
||||
CONFIG = Config.load
|
||||
Kemal.config.port = CONFIG.port
|
||||
SQL = DB.open("sqlite3://#{CONFIG.db}")
|
||||
Kemal.config.shutdown_message = false
|
||||
Kemal.config.app_name = "file-uploader-crystal"
|
||||
# https://github.com/iv-org/invidious/blob/90e94d4e6cc126a8b7a091d12d7a5556bfe369d5/src/invidious.cr#L136C1-L136C61
|
||||
# OUTPUT = File.open(File::NULL, "w")
|
||||
LOGGER = LogHandler.new(STDOUT, CONFIG.log_level)
|
||||
# Give me a 128 bit CPU
|
||||
# MAX_FILES = 58**CONFIG.filename_length
|
||||
SQL = DB.open("sqlite3://#{CONFIG.db}")
|
||||
|
||||
# https://github.com/iv-org/invidious/blob/90e94d4e6cc126a8b7a091d12d7a5556bfe369d5/src/invidious.cr#L78
|
||||
CURRENT_BRANCH = {{ "#{`git branch | sed -n '/* /s///p'`.strip}" }}
|
||||
|
@ -41,8 +42,12 @@ Jobs.run
|
|||
# Set permissions to 777 so NGINX can read and write to it (BROKEN)
|
||||
if !CONFIG.unix_socket.nil?
|
||||
sleep 1.second
|
||||
LOGGER.info "Setting sock permissions to 777"
|
||||
LOGGER.info "Changing socket permissions to 777"
|
||||
begin
|
||||
File.chmod("#{CONFIG.unix_socket}", File::Permissions::All)
|
||||
rescue ex
|
||||
LOGGER.fatal "#{ex.message}"
|
||||
end
|
||||
end
|
||||
|
||||
sleep
|
||||
|
|
|
@ -17,12 +17,14 @@ module Handling
|
|||
extension = ""
|
||||
original_filename = ""
|
||||
uploaded_at = ""
|
||||
file_hash = ""
|
||||
checksum = ""
|
||||
ip_address = ""
|
||||
delete_key = nil
|
||||
# TODO: Return the file that matches a checksum inside the database
|
||||
HTTP::FormData.parse(env.request) do |upload|
|
||||
next if upload.filename.nil? || upload.filename.to_s.empty?
|
||||
# TODO: upload.body is emptied when is copied or read
|
||||
# Utils.check_duplicate(upload.dup)
|
||||
extension = File.extname("#{upload.filename}")
|
||||
if CONFIG.blocked_extensions.includes?(extension.split(".")[1])
|
||||
error401("Extension '#{extension}' is not allowed")
|
||||
|
@ -33,8 +35,8 @@ module Handling
|
|||
IO.copy(upload.body, file)
|
||||
end
|
||||
original_filename = upload.filename
|
||||
uploaded_at = Time.utc
|
||||
file_hash = Utils.hash_file(file_path)
|
||||
uploaded_at = Time::Format::HTTP_DATE.format(Time.utc)
|
||||
checksum = Utils.hash_file(file_path)
|
||||
# X-Forwarded-For if behind a reverse proxy and the header is set in the reverse
|
||||
# proxy configuration.
|
||||
ip_address = env.request.headers.try &.["X-Forwarded-For"]? ? env.request.headers.["X-Forwarded-For"] : env.request.remote_address.to_s.split(":").first
|
||||
|
@ -49,7 +51,7 @@ module Handling
|
|||
j.field "id", filename
|
||||
j.field "ext", extension
|
||||
j.field "name", original_filename
|
||||
j.field "checksum", file_hash
|
||||
j.field "checksum", checksum
|
||||
if CONFIG.delete_key_length > 0
|
||||
delete_key = Random.base58(CONFIG.delete_key_length)
|
||||
j.field "deleteKey", delete_key
|
||||
|
@ -57,14 +59,21 @@ module Handling
|
|||
end
|
||||
end
|
||||
end
|
||||
begin
|
||||
LOGGER.debug "Generating thumbnail in background"
|
||||
spawn { Utils.generate_thumbnail(filename, extension) }
|
||||
rescue ex
|
||||
LOGGER.error "An error ocurred when trying to generate a thumbnail: #{ex.message}"
|
||||
end
|
||||
begin
|
||||
# Insert SQL data just before returning the upload information
|
||||
SQL.exec "INSERT INTO #{CONFIG.db_table_name} VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||
original_filename, filename, extension, uploaded_at, file_hash, ip_address, delete_key
|
||||
SQL.exec "INSERT INTO #{CONFIG.db_table_name} VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
original_filename, filename, extension, uploaded_at, checksum, ip_address, delete_key, nil
|
||||
rescue ex
|
||||
LOGGER.error "An error ocurred when trying to insert the data into the DB: #{ex.message}"
|
||||
error500("An error ocurred when trying to insert the data into the DB")
|
||||
end
|
||||
|
||||
return json
|
||||
else
|
||||
LOGGER.debug "No file provided by the user"
|
||||
|
@ -73,20 +82,55 @@ module Handling
|
|||
end
|
||||
|
||||
def retrieve_file(env)
|
||||
protocol = env.request.headers.try &.["X-Forwarded-Proto"]? ? env.request.headers["X-Forwarded-Proto"] : "http"
|
||||
host = env.request.headers.try &.["X-Forwarded-Host"]? ? env.request.headers["X-Forwarded-Host"] : env.request.headers["Host"]
|
||||
begin
|
||||
LOGGER.debug "#{env.request.headers["X-Forwarded-For"]} /#{env.params.url["filename"]}"
|
||||
rescue
|
||||
LOGGER.debug "NO X-Forwarded-For @ /#{env.params.url["filename"]}"
|
||||
fileinfo = SQL.query_all("SELECT filename, original_filename, uploaded_at, extension, checksum
|
||||
FROM #{CONFIG.db_table_name}
|
||||
WHERE filename = ?",
|
||||
env.params.url["filename"],
|
||||
as: {filename: String, ofilename: String, up_at: String, ext: String, checksum: String})[0]
|
||||
|
||||
headers(env, {"Content-Disposition" => "inline; filename*=UTF-8''#{fileinfo[:ofilename]}"})
|
||||
headers(env, {"Last-Modified" => "#{fileinfo[:up_at]}"})
|
||||
headers(env, {"ETag" => "#{fileinfo[:checksum]}"})
|
||||
|
||||
if env.request.headers.try &.["User-Agent"].includes?("chatterino-api-cache/") || env.request.headers.try &.["User-Agent"].includes?("FFZBot/")
|
||||
env.response.content_type = "text/html"
|
||||
return %(
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta property="og:title" content="#{fileinfo[:ofilename]}">
|
||||
<meta property="og:image" content="#{protocol}://#{host}#{CONFIG.thumbnails.split(".")[1]}/#{fileinfo[:filename]}.jpg">
|
||||
</head>
|
||||
</html>
|
||||
)
|
||||
end
|
||||
begin
|
||||
filename = SQL.query_one "SELECT filename FROM #{CONFIG.db_table_name} WHERE filename = ?", env.params.url["filename"].to_s.split(".").first, as: String
|
||||
original_filename = SQL.query_one "SELECT original_filename FROM #{CONFIG.db_table_name} WHERE filename = ?", env.params.url["filename"].to_s.split(".").first, as: String
|
||||
extension = SQL.query_one "SELECT extension FROM #{CONFIG.db_table_name} WHERE filename = ?", filename, as: String
|
||||
headers(env, {"Content-Disposition" => "inline; filename*=UTF-8''#{original_filename}"})
|
||||
send_file env, "#{CONFIG.files}/#{filename}#{extension}"
|
||||
send_file env, "#{CONFIG.files}/#{fileinfo[:filename]}#{fileinfo[:ext]}"
|
||||
rescue ex
|
||||
LOGGER.debug "File #{filename} does not exist: #{ex.message}"
|
||||
error403("File #{filename} does not exist")
|
||||
LOGGER.debug "File '#{env.params.url["filename"]}' does not exist: #{ex.message}"
|
||||
error403("File '#{env.params.url["filename"]}' does not exist")
|
||||
end
|
||||
end
|
||||
|
||||
def retrieve_thumbnail(env)
|
||||
begin
|
||||
# fileinfo = SQL.query_all("SELECT filename, original_filename, uploaded_at, extension, checksum
|
||||
# FROM #{CONFIG.db_table_name}
|
||||
# WHERE filename = ?",
|
||||
# env.params.url["filename"],
|
||||
# as: {filename: String, ofilename: String, up_at: String, ext: String, checksum: String})[0]
|
||||
|
||||
# headers(env, {"Content-Disposition" => "inline; filename*=UTF-8''#{fileinfo[:ofilename]}"})
|
||||
# headers(env, {"Last-Modified" => "#{fileinfo[:up_at]}"})
|
||||
# headers(env, {"ETag" => "#{fileinfo[:checksum]}"})
|
||||
|
||||
send_file env, "#{CONFIG.thumbnails}/#{env.params.url["thumbnail"]}"
|
||||
rescue ex
|
||||
LOGGER.debug "Thumbnail '#{env.params.url["thumbnail"]}' does not exist: #{ex.message}"
|
||||
error403("Thumbnail '#{env.params.url["thumbnail"]}' does not exist")
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -113,12 +157,16 @@ module Handling
|
|||
def delete_file(env)
|
||||
if SQL.query_one "SELECT EXISTS(SELECT 1 FROM #{CONFIG.db_table_name} WHERE delete_key = ?)", env.params.query["key"], as: Bool
|
||||
begin
|
||||
file_to_delete = SQL.query_one "SELECT filename FROM #{CONFIG.db_table_name} WHERE delete_key = ?", env.params.query["key"], as: String
|
||||
file_extension = SQL.query_one "SELECT extension FROM #{CONFIG.db_table_name} WHERE delete_key = ?", env.params.query["key"], as: String
|
||||
File.delete("#{CONFIG.files}/#{file_to_delete}#{file_extension}")
|
||||
fileinfo = SQL.query_all("SELECT filename, extension
|
||||
FROM #{CONFIG.db_table_name}
|
||||
WHERE delete_key = ?",
|
||||
env.params.query["key"],
|
||||
as: {filename: String, extension: String})[0]
|
||||
|
||||
File.delete("#{CONFIG.files}/#{fileinfo[:filename]}#{fileinfo[:extension]}")
|
||||
SQL.exec "DELETE FROM #{CONFIG.db_table_name} WHERE delete_key = ?", env.params.query["key"]
|
||||
LOGGER.debug "File '#{file_to_delete}' was deleted using key '#{env.params.query["key"]}'}"
|
||||
msg("File '#{file_to_delete}' deleted successfully")
|
||||
LOGGER.debug "File '#{fileinfo[:filename]}' was deleted using key '#{env.params.query["key"]}'}"
|
||||
msg("File '#{fileinfo[:filename]}' deleted successfully")
|
||||
rescue ex
|
||||
LOGGER.error("Unknown error: #{ex.message}")
|
||||
error500("Unknown error")
|
||||
|
|
11
src/jobs.cr
11
src/jobs.cr
|
@ -5,17 +5,16 @@ module Jobs
|
|||
LOGGER.info "File deletion is disabled"
|
||||
return
|
||||
end
|
||||
fiber = Fiber.new do
|
||||
spawn do
|
||||
loop do
|
||||
Utils.check_old_files
|
||||
sleep CONFIG.delete_files_after_check_seconds
|
||||
end
|
||||
end
|
||||
return fiber
|
||||
end
|
||||
|
||||
def self.kemal
|
||||
fiber = Fiber.new do
|
||||
spawn do
|
||||
if !CONFIG.unix_socket.nil?
|
||||
Kemal.run do |config|
|
||||
config.server.not_nil!.bind_unix "#{CONFIG.unix_socket}"
|
||||
|
@ -24,12 +23,10 @@ module Jobs
|
|||
Kemal.run
|
||||
end
|
||||
end
|
||||
return fiber
|
||||
end
|
||||
|
||||
def self.run
|
||||
# Tries to run the .enqueue method, if is not able to I will just not execute.
|
||||
check_old_files.try &.enqueue
|
||||
kemal.try &.enqueue
|
||||
check_old_files
|
||||
kemal
|
||||
end
|
||||
end
|
||||
|
|
|
@ -20,6 +20,10 @@ module Routing
|
|||
Handling.retrieve_file(env)
|
||||
end
|
||||
|
||||
get "/thumbnails/:thumbnail" do |env|
|
||||
Handling.retrieve_thumbnail(env)
|
||||
end
|
||||
|
||||
get "/delete" do |env|
|
||||
Handling.delete_file(env)
|
||||
end
|
||||
|
|
45
src/utils.cr
45
src/utils.cr
|
@ -6,7 +6,7 @@ module Utils
|
|||
LOGGER.info "Creating sqlite3 database at '#{CONFIG.db}'"
|
||||
begin
|
||||
SQL.exec "CREATE TABLE IF NOT EXISTS #{CONFIG.db_table_name}
|
||||
(original_filename text, filename text, extension text, uploaded_at text, hash text, ip text, delete_key text)"
|
||||
(original_filename text, filename text, extension text, uploaded_at text, checksum text, ip text, delete_key text, thumbnail text)"
|
||||
rescue ex
|
||||
LOGGER.fatal "#{ex.message}"
|
||||
exit(1)
|
||||
|
@ -47,14 +47,27 @@ module Utils
|
|||
dir.close
|
||||
end
|
||||
|
||||
# TODO:
|
||||
# def check_duplicate(upload)
|
||||
# file_checksum = SQL.query_all("SELECT checksum FROM #{CONFIG.db_table_name} WHERE original_filename = ?", upload.filename, as:String).try &.[0]?
|
||||
# if file_checksum.nil?
|
||||
# return
|
||||
# else
|
||||
# uploaded_file_checksum = hash_io(upload.body)
|
||||
# pp file_checksum
|
||||
# pp uploaded_file_checksum
|
||||
# if file_checksum == uploaded_file_checksum
|
||||
# puts "Dupl"
|
||||
# end
|
||||
# end
|
||||
# end
|
||||
|
||||
def hash_file(file_path : String)
|
||||
File.open(file_path, "r") do |file|
|
||||
# https://crystal-lang.org/api/master/IO/Digest.html
|
||||
buffer = Bytes.new(256)
|
||||
io = IO::Digest.new(file, Digest::SHA1.new)
|
||||
io.read(buffer)
|
||||
return io.final.hexstring
|
||||
Digest::SHA1.hexdigest &.file(file_path)
|
||||
end
|
||||
|
||||
def hash_io(file_path : IO)
|
||||
Digest::SHA1.hexdigest &.update(file_path)
|
||||
end
|
||||
|
||||
# TODO: Check if there are no other possibilities to get a random filename and exit
|
||||
|
@ -70,6 +83,24 @@ module Utils
|
|||
end
|
||||
end
|
||||
|
||||
# TODO: Thumbnail generation for videos. Done but error checking IS NOT DONE
|
||||
def generate_thumbnail(filename, extension)
|
||||
Process.run("ffmpeg",
|
||||
[
|
||||
"-hide_banner",
|
||||
"-i",
|
||||
"#{CONFIG.files}/#{filename+extension}",
|
||||
"-movflags", "faststart",
|
||||
"-f", "mjpeg",
|
||||
"-q:v", "2",
|
||||
"-vf", "scale='min(350,iw)':'min(350,ih)':force_original_aspect_ratio=decrease, thumbnail=100",
|
||||
"-frames:v", "1",
|
||||
"-update", "1",
|
||||
"#{CONFIG.thumbnails}/#{filename}.jpg"
|
||||
])
|
||||
SQL.exec "UPDATE #{CONFIG.db_table_name} SET thumbnail = ? WHERE filename = ?", filename+".jpg", filename
|
||||
end
|
||||
|
||||
# Delete socket if the server has not been previously cleaned by the server (Due to unclean exits, crashes, etc.)
|
||||
def delete_socket
|
||||
if File.exists?("#{CONFIG.unix_socket}")
|
||||
|
|
Loading…
Reference in a new issue