Compare commits
1 commit
Author | SHA1 | Date | |
---|---|---|---|
c049642ffb |
14 changed files with 207 additions and 215 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -5,5 +5,3 @@
|
||||||
*.dwarf
|
*.dwarf
|
||||||
data
|
data
|
||||||
torexitnodes.txt
|
torexitnodes.txt
|
||||||
files
|
|
||||||
thumbnails
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# Based on https://github.com/iv-org/invidious/blob/master/docker/Dockerfile
|
# Based on https://github.com/iv-org/invidious/blob/master/docker/Dockerfile
|
||||||
FROM crystallang/crystal:1.14.0-alpine AS builder
|
FROM crystallang/crystal:1.13.2-alpine AS builder
|
||||||
|
|
||||||
RUN apk add --no-cache sqlite-static yaml-static
|
RUN apk add --no-cache sqlite-static yaml-static
|
||||||
|
|
||||||
|
@ -19,8 +19,8 @@ RUN crystal build ./src/file-uploader-crystal.cr \
|
||||||
--release \
|
--release \
|
||||||
--static --warnings all
|
--static --warnings all
|
||||||
|
|
||||||
FROM alpine:3.20
|
RUN apk add --no-cache tini
|
||||||
RUN apk add --no-cache tini ffmpeg
|
FROM alpine:3.18
|
||||||
WORKDIR /file-uploader-crystal
|
WORKDIR /file-uploader-crystal
|
||||||
RUN addgroup -g 1000 -S file-uploader-crystal && \
|
RUN addgroup -g 1000 -S file-uploader-crystal && \
|
||||||
adduser -u 1000 -S file-uploader-crystal -G file-uploader-crystal
|
adduser -u 1000 -S file-uploader-crystal -G file-uploader-crystal
|
||||||
|
|
|
@ -85,4 +85,5 @@ WantedBy=default.target
|
||||||
- Small CLI to upload files (like `rpaste` from rustypaste)
|
- Small CLI to upload files (like `rpaste` from rustypaste)
|
||||||
- Add more endpoints to Admin API
|
- Add more endpoints to Admin API
|
||||||
|
|
||||||
-
|
- Image filters https://github.com/HaschekSolutions/pictshare/blob/master/rtfm/IMAGEFILTERS.md using imagemagick or ffmpeg
|
||||||
|
- Strip exif
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
colorize_logs: true
|
|
||||||
files: "./files"
|
files: "./files"
|
||||||
thumbnails: "./thumbnails"
|
thumbnails: "./thumbnails"
|
||||||
generateThumbnails: true
|
generateThumbnails: true
|
||||||
db: "./db/db.sqlite3"
|
db: "./db.sqlite3"
|
||||||
|
dbTableName: "files"
|
||||||
adminEnabled: true
|
adminEnabled: true
|
||||||
adminApiKey: "asd"
|
adminApiKey: "asd"
|
||||||
fileameLength: 3
|
fileameLength: 3
|
||||||
|
@ -15,21 +15,21 @@ torExitNodesCheck: 1600
|
||||||
torExitNodesUrl: "https://check.torproject.org/exit-addresses"
|
torExitNodesUrl: "https://check.torproject.org/exit-addresses"
|
||||||
torExitNodesFile: "./torexitnodes.txt"
|
torExitNodesFile: "./torexitnodes.txt"
|
||||||
torMessage: "TOR IS BLOCKED!"
|
torMessage: "TOR IS BLOCKED!"
|
||||||
# Set this to 0 to disable rate limiting
|
|
||||||
filesPerIP: 2
|
filesPerIP: 2
|
||||||
|
ipTableName: "ips"
|
||||||
rateLimitPeriod: 20
|
rateLimitPeriod: 20
|
||||||
rateLimitMessage: ""
|
rateLimitMessage: ""
|
||||||
# If you define the unix socket, it will only listen on the socket and not the port.
|
# If you define the unix socket, it will only listen on the socket and not the port.
|
||||||
#unix_socket: "/tmp/file-uploader.sock"
|
#unix_socket: "/tmp/file-uploader.sock"
|
||||||
# In days
|
# In days
|
||||||
deleteFilesAfter: 7
|
deleteFilesAfter: 1
|
||||||
# In seconds
|
# In seconds
|
||||||
deleteFilesCheck: 1600
|
deleteFilesCheck: 1600
|
||||||
deleteKeyLength: 4
|
deleteKeyLength: 4
|
||||||
siteInfo: "Whatever you want to put here"
|
siteInfo: "Whatever you want to put here"
|
||||||
siteWarning: "WARNING!"
|
siteWarning: "WARNING!"
|
||||||
log_level: "debug"
|
log_level: "debug"
|
||||||
|
|
||||||
blockedExtensions:
|
blockedExtensions:
|
||||||
- "exe"
|
- "exe"
|
||||||
|
|
||||||
|
@ -38,9 +38,7 @@ opengraphUseragents:
|
||||||
- "chatterino-api-cache/"
|
- "chatterino-api-cache/"
|
||||||
- "FFZBot/"
|
- "FFZBot/"
|
||||||
- "Twitterbot/"
|
- "Twitterbot/"
|
||||||
- "Synapse/"
|
|
||||||
- "Mastodon/"
|
|
||||||
|
|
||||||
# You can leave it empty, or add your own domains.
|
alternativeDomains:
|
||||||
alternativeDomains:
|
- "ayaya.beauty"
|
||||||
- "example.com"
|
- "lamartina.gay"
|
|
@ -1,17 +0,0 @@
|
||||||
services:
|
|
||||||
file-uploader:
|
|
||||||
image: git.nadeko.net/fijxu/file-uploader-crystal:latest
|
|
||||||
# This program should never use that many memory and more than 50% of the CPU
|
|
||||||
mem_limit: 512MB
|
|
||||||
cpus: 0.5
|
|
||||||
# If you want to use a custom config file, you can mount it here.
|
|
||||||
volumes:
|
|
||||||
# - ./config/config.yml:/file-uploader-crystal/config/config.yml
|
|
||||||
- ./public:/file-uploader-crystal/public
|
|
||||||
- ./files:/file-uploader-crystal/files
|
|
||||||
- ./thumbnails:/file-uploader-crystal/thumbnails
|
|
||||||
- ./db:/file-uploader-crystal/db
|
|
||||||
- ./torexitnodes.txt:/file-uploader-crystal/torexitnodes.txt
|
|
||||||
ports:
|
|
||||||
- 127.0.0.1:8080:8080
|
|
||||||
|
|
|
@ -3,8 +3,6 @@ require "yaml"
|
||||||
class Config
|
class Config
|
||||||
include YAML::Serializable
|
include YAML::Serializable
|
||||||
|
|
||||||
# Colorize logs
|
|
||||||
property colorize_logs : Bool = true
|
|
||||||
# Where the uploaded files will be located
|
# Where the uploaded files will be located
|
||||||
property files : String = "./files"
|
property files : String = "./files"
|
||||||
# Where the thumbnails will be located when they are successfully generated
|
# Where the thumbnails will be located when they are successfully generated
|
||||||
|
@ -14,6 +12,8 @@ class Config
|
||||||
property generateThumbnails : Bool = false
|
property generateThumbnails : Bool = false
|
||||||
# Where the SQLITE3 database will be located
|
# Where the SQLITE3 database will be located
|
||||||
property db : String = "./db.sqlite3"
|
property db : String = "./db.sqlite3"
|
||||||
|
# Name of the table that will be used for file information
|
||||||
|
property dbTableName : String = "files"
|
||||||
# Enable or disable the admin API
|
# Enable or disable the admin API
|
||||||
property adminEnabled : Bool = false
|
property adminEnabled : Bool = false
|
||||||
# The API key for admin routes. It's passed as a "X-Api-Key" header to the
|
# The API key for admin routes. It's passed as a "X-Api-Key" header to the
|
||||||
|
@ -25,10 +25,8 @@ class Config
|
||||||
property fileameLength : Int32 = 3
|
property fileameLength : Int32 = 3
|
||||||
# In MiB
|
# In MiB
|
||||||
property size_limit : Int16 = 512
|
property size_limit : Int16 = 512
|
||||||
# Port on which the uploader will bind
|
# TCP port
|
||||||
property port : Int32 = 8080
|
property port : Int32 = 8080
|
||||||
# IP address on which the uploader will bind
|
|
||||||
property host : String = "127.0.0.1"
|
|
||||||
# A file path where do you want to place a unix socket (THIS WILL DISABLE ACCESS
|
# A file path where do you want to place a unix socket (THIS WILL DISABLE ACCESS
|
||||||
# BY IP ADDRESS)
|
# BY IP ADDRESS)
|
||||||
property unix_socket : String?
|
property unix_socket : String?
|
||||||
|
@ -47,6 +45,8 @@ class Config
|
||||||
property torMessage : String? = "Tor is blocked!"
|
property torMessage : String? = "Tor is blocked!"
|
||||||
# How many files an IP address can upload to the server
|
# How many files an IP address can upload to the server
|
||||||
property filesPerIP : Int32 = 32
|
property filesPerIP : Int32 = 32
|
||||||
|
# Name of the table that will be used for rate limit information
|
||||||
|
property ipTableName : String = "ips"
|
||||||
# How often is the file limit per IP reset? (in seconds)
|
# How often is the file limit per IP reset? (in seconds)
|
||||||
property rateLimitPeriod : Int32 = 600
|
property rateLimitPeriod : Int32 = 600
|
||||||
# TODO: UNUSED CONSTANT
|
# TODO: UNUSED CONSTANT
|
||||||
|
@ -87,12 +87,5 @@ class Config
|
||||||
puts "Config: fileameLength cannot be #{config.fileameLength}"
|
puts "Config: fileameLength cannot be #{config.fileameLength}"
|
||||||
exit(1)
|
exit(1)
|
||||||
end
|
end
|
||||||
|
|
||||||
if config.files.ends_with?('/')
|
|
||||||
config.files = config.files.chomp('/')
|
|
||||||
end
|
|
||||||
if config.thumbnails.ends_with?('/')
|
|
||||||
config.thumbnails = config.thumbnails.chomp('/')
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -15,11 +15,10 @@ require "./lib/**"
|
||||||
|
|
||||||
CONFIG = Config.load
|
CONFIG = Config.load
|
||||||
Kemal.config.port = CONFIG.port
|
Kemal.config.port = CONFIG.port
|
||||||
Kemal.config.host_binding = CONFIG.host
|
|
||||||
Kemal.config.shutdown_message = false
|
Kemal.config.shutdown_message = false
|
||||||
Kemal.config.app_name = "file-uploader-crystal"
|
Kemal.config.app_name = "file-uploader-crystal"
|
||||||
# https://github.com/iv-org/invidious/blob/90e94d4e6cc126a8b7a091d12d7a5556bfe369d5/src/invidious.cr#L136C1-L136C61
|
# https://github.com/iv-org/invidious/blob/90e94d4e6cc126a8b7a091d12d7a5556bfe369d5/src/invidious.cr#L136C1-L136C61
|
||||||
LOGGER = LogHandler.new(STDOUT, CONFIG.log_level, CONFIG.colorize_logs)
|
LOGGER = LogHandler.new(STDOUT, CONFIG.log_level)
|
||||||
# Give me a 128 bit CPU
|
# Give me a 128 bit CPU
|
||||||
# MAX_FILES = 58**CONFIG.fileameLength
|
# MAX_FILES = 58**CONFIG.fileameLength
|
||||||
SQL = DB.open("sqlite3://#{CONFIG.db}")
|
SQL = DB.open("sqlite3://#{CONFIG.db}")
|
||||||
|
|
|
@ -17,7 +17,7 @@ module Handling::Admin
|
||||||
file = file.to_s
|
file = file.to_s
|
||||||
begin
|
begin
|
||||||
fileinfo = SQL.query_one("SELECT filename, extension, thumbnail
|
fileinfo = SQL.query_one("SELECT filename, extension, thumbnail
|
||||||
FROM files
|
FROM #{CONFIG.dbTableName}
|
||||||
WHERE filename = ?",
|
WHERE filename = ?",
|
||||||
file,
|
file,
|
||||||
as: {filename: String, extension: String, thumbnail: String | Nil})
|
as: {filename: String, extension: String, thumbnail: String | Nil})
|
||||||
|
@ -29,7 +29,7 @@ module Handling::Admin
|
||||||
File.delete("#{CONFIG.thumbnails}/#{fileinfo[:thumbnail]}")
|
File.delete("#{CONFIG.thumbnails}/#{fileinfo[:thumbnail]}")
|
||||||
end
|
end
|
||||||
# Delete entry from db
|
# Delete entry from db
|
||||||
SQL.exec "DELETE FROM files WHERE filename = ?", file
|
SQL.exec "DELETE FROM #{CONFIG.dbTableName} WHERE filename = ?", file
|
||||||
LOGGER.debug "File '#{fileinfo[:filename]}' was deleted"
|
LOGGER.debug "File '#{fileinfo[:filename]}' was deleted"
|
||||||
successfull_files << file
|
successfull_files << file
|
||||||
rescue ex : DB::NoResultsError
|
rescue ex : DB::NoResultsError
|
||||||
|
@ -37,7 +37,7 @@ module Handling::Admin
|
||||||
failed_files << file
|
failed_files << file
|
||||||
rescue ex
|
rescue ex
|
||||||
LOGGER.error "Unknown error: #{ex.message}"
|
LOGGER.error "Unknown error: #{ex.message}"
|
||||||
http_error 500,"Unknown error: #{ex.message}"
|
error500 "Unknown error: #{ex.message}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
json = JSON.build do |j|
|
json = JSON.build do |j|
|
||||||
|
@ -61,7 +61,7 @@ module Handling::Admin
|
||||||
item = item.to_s
|
item = item.to_s
|
||||||
begin
|
begin
|
||||||
# Delete entry from db
|
# Delete entry from db
|
||||||
SQL.exec "DELETE FROM ips WHERE ip = ?", item
|
SQL.exec "DELETE FROM #{CONFIG.ipTableName} WHERE ip = ?", item
|
||||||
LOGGER.debug "Rate limit for '#{item}' was deleted"
|
LOGGER.debug "Rate limit for '#{item}' was deleted"
|
||||||
successfull << item
|
successfull << item
|
||||||
rescue ex : DB::NoResultsError
|
rescue ex : DB::NoResultsError
|
||||||
|
@ -69,7 +69,7 @@ module Handling::Admin
|
||||||
failed << item
|
failed << item
|
||||||
rescue ex
|
rescue ex
|
||||||
LOGGER.error "Unknown error: #{ex.message}"
|
LOGGER.error "Unknown error: #{ex.message}"
|
||||||
http_error 500, "Unknown error: #{ex.message}"
|
error500 "Unknown error: #{ex.message}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
json = JSON.build do |j|
|
json = JSON.build do |j|
|
||||||
|
@ -95,7 +95,7 @@ module Handling::Admin
|
||||||
begin
|
begin
|
||||||
fileinfo = SQL.query_one("SELECT original_filename, filename, extension,
|
fileinfo = SQL.query_one("SELECT original_filename, filename, extension,
|
||||||
uploaded_at, checksum, ip, delete_key, thumbnail
|
uploaded_at, checksum, ip, delete_key, thumbnail
|
||||||
FROM files
|
FROM #{CONFIG.dbTableName}
|
||||||
WHERE filename = ?",
|
WHERE filename = ?",
|
||||||
item,
|
item,
|
||||||
as: {original_filename: String, filename: String, extension: String,
|
as: {original_filename: String, filename: String, extension: String,
|
||||||
|
@ -107,7 +107,7 @@ module Handling::Admin
|
||||||
failed << item
|
failed << item
|
||||||
rescue ex
|
rescue ex
|
||||||
LOGGER.error "Unknown error: #{ex.message}"
|
LOGGER.error "Unknown error: #{ex.message}"
|
||||||
http_error 500,"Unknown error: #{ex.message}"
|
error500 "Unknown error: #{ex.message}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
json = JSON.build do |j|
|
json = JSON.build do |j|
|
||||||
|
@ -152,13 +152,13 @@ module Handling::Admin
|
||||||
|
|
||||||
# /api/admin/whitelist
|
# /api/admin/whitelist
|
||||||
# curl -X GET -H "X-Api-Key: asd" http://localhost:8080/api/admin/torexitnodes | jq
|
# curl -X GET -H "X-Api-Key: asd" http://localhost:8080/api/admin/torexitnodes | jq
|
||||||
# def add_ip_to_whitelist(env, nodes)
|
# def add_ip_to_whitelist(env, nodes)
|
||||||
# json = JSON.build do |j|
|
# json = JSON.build do |j|
|
||||||
# j.object do
|
# j.object do
|
||||||
# j.field "ips", nodes
|
# j.field "ips", nodes
|
||||||
# end
|
# end
|
||||||
# end
|
# end
|
||||||
# end
|
# end
|
||||||
|
|
||||||
# /api/admin/blacklist
|
# /api/admin/blacklist
|
||||||
# curl -X GET -H "X-Api-Key: asd" http://localhost:8080/api/admin/torexitnodes | jq
|
# curl -X GET -H "X-Api-Key: asd" http://localhost:8080/api/admin/torexitnodes | jq
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
require "../http-errors"
|
require "../http-errors"
|
||||||
require "http/client"
|
require "http/client"
|
||||||
require "benchmark"
|
require "benchmark"
|
||||||
|
require "../filters"
|
||||||
# require "../filters"
|
|
||||||
|
|
||||||
module Handling
|
module Handling
|
||||||
extend self
|
extend self
|
||||||
|
@ -12,13 +11,13 @@ module Handling
|
||||||
ip_address = Utils.ip_address(env)
|
ip_address = Utils.ip_address(env)
|
||||||
protocol = Utils.protocol(env)
|
protocol = Utils.protocol(env)
|
||||||
host = Utils.host(env)
|
host = Utils.host(env)
|
||||||
# filter = env.params.query["filter"]?
|
filter = env.params.query["filter"]?
|
||||||
# You can modify this if you want to allow files smaller than 1MiB.
|
# You can modify this if you want to allow files smaller than 1MiB.
|
||||||
# This is generally a good way to check the filesize but there is a better way to do it
|
# This is generally a good way to check the filesize but there is a better way to do it
|
||||||
# which is inspecting the file directly (If I'm not wrong).
|
# which is inspecting the file directly (If I'm not wrong).
|
||||||
if CONFIG.size_limit > 0
|
if CONFIG.size_limit > 0
|
||||||
if env.request.headers["Content-Length"].to_i > 1048576*CONFIG.size_limit
|
if env.request.headers["Content-Length"].to_i > 1048576*CONFIG.size_limit
|
||||||
return http_error 413, "File is too big. The maximum size allowed is #{CONFIG.size_limit}MiB"
|
return error413("File is too big. The maximum size allowed is #{CONFIG.size_limit}MiB")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
filename = ""
|
filename = ""
|
||||||
|
@ -33,26 +32,26 @@ module Handling
|
||||||
HTTP::FormData.parse(env.request) do |upload|
|
HTTP::FormData.parse(env.request) do |upload|
|
||||||
if upload.filename.nil? || upload.filename.to_s.empty?
|
if upload.filename.nil? || upload.filename.to_s.empty?
|
||||||
LOGGER.debug "No file provided by the user"
|
LOGGER.debug "No file provided by the user"
|
||||||
return http_error 403, "No file provided"
|
return error403("No file provided")
|
||||||
end
|
end
|
||||||
# TODO: upload.body is emptied when is copied or read
|
# TODO: upload.body is emptied when is copied or read
|
||||||
# Utils.check_duplicate(upload.dup)
|
# Utils.check_duplicate(upload.dup)
|
||||||
extension = File.extname("#{upload.filename}")
|
extension = File.extname("#{upload.filename}")
|
||||||
if CONFIG.blockedExtensions.includes?(extension.split(".")[1])
|
if CONFIG.blockedExtensions.includes?(extension.split(".")[1])
|
||||||
return http_error 401, "Extension '#{extension}' is not allowed"
|
return error401("Extension '#{extension}' is not allowed")
|
||||||
end
|
end
|
||||||
filename = Utils.generate_filename
|
filename = Utils.generate_filename
|
||||||
file_path = "#{CONFIG.files}/#{filename}#{extension}"
|
file_path = ::File.join ["#{CONFIG.files}", filename + extension]
|
||||||
File.open(file_path, "w") do |output|
|
File.open(file_path, "w") do |output|
|
||||||
IO.copy(upload.body, output)
|
IO.copy(upload.body, output)
|
||||||
end
|
end
|
||||||
original_filename = upload.filename
|
original_filename = upload.filename
|
||||||
uploaded_at = Time.utc
|
uploaded_at = Time.utc
|
||||||
checksum = Utils.hash_file(file_path)
|
checksum = Utils.hash_file(file_path)
|
||||||
# TODO: Apply filters
|
# Applies filter
|
||||||
# if filter
|
if filter
|
||||||
# Filters.apply_filter(file_path, filter)
|
Filters.apply_filter(file_path, filter)
|
||||||
# end
|
end
|
||||||
end
|
end
|
||||||
# X-Forwarded-For if behind a reverse proxy and the header is set in the reverse
|
# X-Forwarded-For if behind a reverse proxy and the header is set in the reverse
|
||||||
# proxy configuration.
|
# proxy configuration.
|
||||||
|
@ -63,14 +62,14 @@ module Handling
|
||||||
end
|
end
|
||||||
begin
|
begin
|
||||||
# Insert SQL data just before returning the upload information
|
# Insert SQL data just before returning the upload information
|
||||||
SQL.exec "INSERT INTO files VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
SQL.exec "INSERT INTO #{CONFIG.dbTableName} VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||||
original_filename, filename, extension, uploaded_at, checksum, ip_address, delete_key, nil
|
original_filename, filename, extension, uploaded_at, checksum, ip_address, delete_key, nil
|
||||||
SQL.exec "INSERT OR IGNORE INTO ips (ip, date) VALUES (?, ?)", ip_address, Time.utc.to_unix
|
SQL.exec "INSERT OR IGNORE INTO #{CONFIG.ipTableName} (ip, date) VALUES (?, ?)", ip_address, Time.utc.to_unix
|
||||||
# SQL.exec "INSERT OR IGNORE INTO ips (ip) VALUES ('#{ip_address}')"
|
# SQL.exec "INSERT OR IGNORE INTO #{CONFIG.ipTableName} (ip) VALUES ('#{ip_address}')"
|
||||||
SQL.exec "UPDATE ips SET count = count + 1 WHERE ip = ('#{ip_address}')"
|
SQL.exec "UPDATE #{CONFIG.ipTableName} SET count = count + 1 WHERE ip = ('#{ip_address}')"
|
||||||
rescue ex
|
rescue ex
|
||||||
LOGGER.error "An error ocurred when trying to insert the data into the DB: #{ex.message}"
|
LOGGER.error "An error ocurred when trying to insert the data into the DB: #{ex.message}"
|
||||||
return http_error 500, "An error ocurred when trying to insert the data into the DB"
|
return error500("An error ocurred when trying to insert the data into the DB")
|
||||||
end
|
end
|
||||||
json = JSON.build do |j|
|
json = JSON.build do |j|
|
||||||
j.object do
|
j.object do
|
||||||
|
@ -99,10 +98,10 @@ module Handling
|
||||||
files = env.params.json["files"].as((Array(JSON::Any)))
|
files = env.params.json["files"].as((Array(JSON::Any)))
|
||||||
rescue ex : JSON::ParseException
|
rescue ex : JSON::ParseException
|
||||||
LOGGER.error "Body malformed: #{ex.message}"
|
LOGGER.error "Body malformed: #{ex.message}"
|
||||||
return http_error 400, "Body malformed: #{ex.message}"
|
return error400 "Body malformed: #{ex.message}"
|
||||||
rescue ex
|
rescue ex
|
||||||
LOGGER.error "Unknown error: #{ex.message}"
|
LOGGER.error "Unknown error: #{ex.message}"
|
||||||
return http_error 500, "Unknown error"
|
return error500 "Unknown error"
|
||||||
end
|
end
|
||||||
successfull_files = [] of NamedTuple(filename: String, extension: String, original_filename: String, checksum: String, delete_key: String | Nil)
|
successfull_files = [] of NamedTuple(filename: String, extension: String, original_filename: String, checksum: String, delete_key: String | Nil)
|
||||||
failed_files = [] of String
|
failed_files = [] of String
|
||||||
|
@ -119,7 +118,7 @@ module Handling
|
||||||
if CONFIG.deleteKeyLength > 0
|
if CONFIG.deleteKeyLength > 0
|
||||||
delete_key = Random.base58(CONFIG.deleteKeyLength)
|
delete_key = Random.base58(CONFIG.deleteKeyLength)
|
||||||
end
|
end
|
||||||
file_path = "#{CONFIG.files}/#{filename}#{extension}"
|
file_path = ::File.join ["#{CONFIG.files}", filename + extension]
|
||||||
File.open(file_path, "w") do |output|
|
File.open(file_path, "w") do |output|
|
||||||
begin
|
begin
|
||||||
HTTP::Client.get(url) do |res|
|
HTTP::Client.get(url) do |res|
|
||||||
|
@ -127,7 +126,7 @@ module Handling
|
||||||
end
|
end
|
||||||
rescue ex
|
rescue ex
|
||||||
LOGGER.debug "Failed to download file '#{url}': #{ex.message}"
|
LOGGER.debug "Failed to download file '#{url}': #{ex.message}"
|
||||||
return http_error 403, "Failed to download file '#{url}'"
|
return error403("Failed to download file '#{url}'")
|
||||||
failed_files << url
|
failed_files << url
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -136,7 +135,7 @@ module Handling
|
||||||
if extension.empty?
|
if extension.empty?
|
||||||
extension = Utils.detect_extension(file_path)
|
extension = Utils.detect_extension(file_path)
|
||||||
File.rename(file_path, file_path + extension)
|
File.rename(file_path, file_path + extension)
|
||||||
file_path = "#{CONFIG.files}/#{filename}#{extension}"
|
file_path = ::File.join ["#{CONFIG.files}", filename + extension]
|
||||||
end
|
end
|
||||||
# The second one is faster and it uses less memory
|
# The second one is faster and it uses less memory
|
||||||
# original_filename = URI.parse("https://ayaya.beauty/PqC").path.split("/").last
|
# original_filename = URI.parse("https://ayaya.beauty/PqC").path.split("/").last
|
||||||
|
@ -149,7 +148,7 @@ module Handling
|
||||||
end
|
end
|
||||||
begin
|
begin
|
||||||
# Insert SQL data just before returning the upload information
|
# Insert SQL data just before returning the upload information
|
||||||
SQL.exec("INSERT INTO files VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
SQL.exec("INSERT INTO #{CONFIG.dbTableName} VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||||
original_filename, filename, extension, uploaded_at, checksum, ip_address, delete_key, nil)
|
original_filename, filename, extension, uploaded_at, checksum, ip_address, delete_key, nil)
|
||||||
successfull_files << {filename: filename,
|
successfull_files << {filename: filename,
|
||||||
original_filename: original_filename,
|
original_filename: original_filename,
|
||||||
|
@ -158,7 +157,7 @@ module Handling
|
||||||
checksum: checksum}
|
checksum: checksum}
|
||||||
rescue ex
|
rescue ex
|
||||||
LOGGER.error "An error ocurred when trying to insert the data into the DB: #{ex.message}"
|
LOGGER.error "An error ocurred when trying to insert the data into the DB: #{ex.message}"
|
||||||
return http_error 500, "An error ocurred when trying to insert the data into the DB"
|
return error500("An error ocurred when trying to insert the data into the DB")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
json = JSON.build do |j|
|
json = JSON.build do |j|
|
||||||
|
@ -183,6 +182,7 @@ module Handling
|
||||||
json
|
json
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# TODO: If the user
|
||||||
def upload_url(env)
|
def upload_url(env)
|
||||||
env.response.content_type = "application/json"
|
env.response.content_type = "application/json"
|
||||||
ip_address = Utils.ip_address(env)
|
ip_address = Utils.ip_address(env)
|
||||||
|
@ -202,24 +202,24 @@ module Handling
|
||||||
if CONFIG.deleteKeyLength > 0
|
if CONFIG.deleteKeyLength > 0
|
||||||
delete_key = Random.base58(CONFIG.deleteKeyLength)
|
delete_key = Random.base58(CONFIG.deleteKeyLength)
|
||||||
end
|
end
|
||||||
file_path = "#{CONFIG.files}/#{filename}#{extension}"
|
file_path = ::File.join ["#{CONFIG.files}", filename + extension]
|
||||||
File.open(file_path, "w") do |output|
|
File.open(file_path, "w") do |output|
|
||||||
begin
|
begin
|
||||||
# TODO: Connect timeout to prevent possible Denial of Service to the external website spamming requests
|
# TODO: Connect timeout to prevent possible Denial of Service spamming requests
|
||||||
# https://crystal-lang.org/api/1.13.2/HTTP/Client.html#connect_timeout
|
# https://crystal-lang.org/api/1.13.2/HTTP/Client.html#connect_timeout
|
||||||
HTTP::Client.get(url) do |res|
|
HTTP::Client.get(url) do |res|
|
||||||
IO.copy(res.body_io, output)
|
IO.copy(res.body_io, output)
|
||||||
end
|
end
|
||||||
rescue ex
|
rescue ex
|
||||||
LOGGER.debug "Failed to download file '#{url}': #{ex.message}"
|
LOGGER.debug "Failed to download file '#{url}': #{ex.message}"
|
||||||
return http_error 403, "Failed to download file '#{url}': #{ex.message}"
|
return error403("Failed to download file '#{url}': #{ex.message}")
|
||||||
failed_files << url
|
failed_files << url
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
if extension.empty?
|
if extension.empty?
|
||||||
extension = Utils.detect_extension(file_path)
|
extension = Utils.detect_extension(file_path)
|
||||||
File.rename(file_path, file_path + extension)
|
File.rename(file_path, file_path + extension)
|
||||||
file_path = "#{CONFIG.files}/#{filename}#{extension}"
|
file_path = ::File.join ["#{CONFIG.files}", filename + extension]
|
||||||
end
|
end
|
||||||
# The second one is faster and it uses less memory
|
# The second one is faster and it uses less memory
|
||||||
# original_filename = URI.parse("https://ayaya.beauty/PqC").path.split("/").last
|
# original_filename = URI.parse("https://ayaya.beauty/PqC").path.split("/").last
|
||||||
|
@ -232,7 +232,7 @@ module Handling
|
||||||
end
|
end
|
||||||
begin
|
begin
|
||||||
# Insert SQL data just before returning the upload information
|
# Insert SQL data just before returning the upload information
|
||||||
SQL.exec("INSERT INTO files VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
SQL.exec("INSERT INTO #{CONFIG.dbTableName} VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||||
original_filename, filename, extension, uploaded_at, checksum, ip_address, delete_key, nil)
|
original_filename, filename, extension, uploaded_at, checksum, ip_address, delete_key, nil)
|
||||||
successfull_files << {filename: filename,
|
successfull_files << {filename: filename,
|
||||||
original_filename: original_filename,
|
original_filename: original_filename,
|
||||||
|
@ -241,7 +241,7 @@ module Handling
|
||||||
checksum: checksum}
|
checksum: checksum}
|
||||||
rescue ex
|
rescue ex
|
||||||
LOGGER.error "An error ocurred when trying to insert the data into the DB: #{ex.message}"
|
LOGGER.error "An error ocurred when trying to insert the data into the DB: #{ex.message}"
|
||||||
return http_error 500, "An error ocurred when trying to insert the data into the DB"
|
return error500("An error ocurred when trying to insert the data into the DB")
|
||||||
end
|
end
|
||||||
json = JSON.build do |j|
|
json = JSON.build do |j|
|
||||||
j.array do
|
j.array do
|
||||||
|
@ -266,30 +266,34 @@ module Handling
|
||||||
end
|
end
|
||||||
|
|
||||||
def retrieve_file(env)
|
def retrieve_file(env)
|
||||||
protocol = Utils.protocol(env)
|
|
||||||
host = Utils.host(env)
|
|
||||||
begin
|
begin
|
||||||
fileinfo = SQL.query_one?("SELECT filename, original_filename, uploaded_at, extension, checksum, thumbnail
|
protocol = Utils.protocol(env)
|
||||||
FROM files
|
host = Utils.host(env)
|
||||||
|
fileinfo = SQL.query_all("SELECT filename, original_filename, uploaded_at, extension, checksum, thumbnail
|
||||||
|
FROM #{CONFIG.dbTableName}
|
||||||
WHERE filename = ?",
|
WHERE filename = ?",
|
||||||
env.params.url["filename"].split(".").first,
|
env.params.url["filename"].split(".").first,
|
||||||
as: {filename: String, ofilename: String, up_at: String, ext: String, checksum: String, thumbnail: String | Nil})
|
as: {filename: String, ofilename: String, up_at: String, ext: String, checksum: String, thumbnail: String | Nil})[0]
|
||||||
if fileinfo.nil?
|
# Benchmark.ips do |x|
|
||||||
# TODO: Switch this to 404, if I use 404, it will use the kemal error page (ANOYING!)
|
# x.report("header multiple") { headers(env, {"Content-Disposition" => "inline; filename*=UTF-8''#{fileinfo[:ofilename]}",
|
||||||
return http_error 418, "File '#{env.params.url["filename"]}' does not exist"
|
# "Last-Modified" => "#{fileinfo[:up_at]}",
|
||||||
end
|
# "ETag" => "#{fileinfo[:checksum]}"}) }
|
||||||
rescue ex
|
# x.report("shorter sleep") do
|
||||||
LOGGER.debug "Error when retrieving file '#{env.params.url["filename"]}': #{ex.message}"
|
# env.response.headers["Content-Disposition"] = "inline; filename*=UTF-8''#{fileinfo[:ofilename]}"
|
||||||
return http_error 500, "Error when retrieving file '#{env.params.url["filename"]}'"
|
# env.response.headers["Last-Modified"] = "#{fileinfo[:up_at]}"
|
||||||
end
|
# env.response.headers["ETag"] = "#{fileinfo[:checksum]}"
|
||||||
env.response.headers["Content-Disposition"] = "inline; filename*=UTF-8''#{fileinfo[:ofilename]}"
|
# end
|
||||||
# env.response.headers["Last-Modified"] = "#{fileinfo[:up_at]}"
|
# end
|
||||||
env.response.headers["ETag"] = "#{fileinfo[:checksum]}"
|
# `env.response.headers` is faster than `headers(env, Hash(String, String))`
|
||||||
|
# https://github.com/kemalcr/kemal/blob/3243b8e0e03568ad3bd9f0ad6f445c871605b821/src/kemal/helpers/helpers.cr#L102C1-L104C4
|
||||||
|
env.response.headers["Content-Disposition"] = "inline; filename*=UTF-8''#{fileinfo[:ofilename]}"
|
||||||
|
# env.response.headers["Last-Modified"] = "#{fileinfo[:up_at]}"
|
||||||
|
env.response.headers["ETag"] = "#{fileinfo[:checksum]}"
|
||||||
|
|
||||||
CONFIG.opengraphUseragents.each do |useragent|
|
CONFIG.opengraphUseragents.each do |useragent|
|
||||||
if env.request.headers.try &.["User-Agent"].includes?(useragent)
|
if env.request.headers.try &.["User-Agent"].includes?(useragent)
|
||||||
env.response.content_type = "text/html"
|
env.response.content_type = "text/html"
|
||||||
return %(
|
return %(
|
||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
|
@ -302,9 +306,13 @@ module Handling
|
||||||
</head>
|
</head>
|
||||||
</html>
|
</html>
|
||||||
)
|
)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
send_file env, "#{CONFIG.files}/#{fileinfo[:filename]}#{fileinfo[:ext]}"
|
||||||
|
rescue ex
|
||||||
|
LOGGER.debug "File '#{env.params.url["filename"]}' does not exist: #{ex.message}"
|
||||||
|
return error403("File '#{env.params.url["filename"]}' does not exist")
|
||||||
end
|
end
|
||||||
send_file env, "#{CONFIG.files}/#{fileinfo[:filename]}#{fileinfo[:ext]}"
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def retrieve_thumbnail(env)
|
def retrieve_thumbnail(env)
|
||||||
|
@ -312,7 +320,7 @@ module Handling
|
||||||
send_file env, "#{CONFIG.thumbnails}/#{env.params.url["thumbnail"]}"
|
send_file env, "#{CONFIG.thumbnails}/#{env.params.url["thumbnail"]}"
|
||||||
rescue ex
|
rescue ex
|
||||||
LOGGER.debug "Thumbnail '#{env.params.url["thumbnail"]}' does not exist: #{ex.message}"
|
LOGGER.debug "Thumbnail '#{env.params.url["thumbnail"]}' does not exist: #{ex.message}"
|
||||||
return http_error 403, "Thumbnail '#{env.params.url["thumbnail"]}' does not exist"
|
return error403("Thumbnail '#{env.params.url["thumbnail"]}' does not exist")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -323,7 +331,7 @@ module Handling
|
||||||
json.object do
|
json.object do
|
||||||
json.field "stats" do
|
json.field "stats" do
|
||||||
json.object do
|
json.object do
|
||||||
json.field "filesHosted", SQL.query_one? "SELECT COUNT (filename) FROM files", as: Int32
|
json.field "filesHosted", SQL.query_one "SELECT COUNT (filename) FROM #{CONFIG.dbTableName}", as: Int32
|
||||||
json.field "maxUploadSize", CONFIG.size_limit
|
json.field "maxUploadSize", CONFIG.size_limit
|
||||||
json.field "thumbnailGeneration", CONFIG.generateThumbnails
|
json.field "thumbnailGeneration", CONFIG.generateThumbnails
|
||||||
json.field "filenameLength", CONFIG.fileameLength
|
json.field "filenameLength", CONFIG.fileameLength
|
||||||
|
@ -334,16 +342,16 @@ module Handling
|
||||||
end
|
end
|
||||||
rescue ex
|
rescue ex
|
||||||
LOGGER.error "Unknown error: #{ex.message}"
|
LOGGER.error "Unknown error: #{ex.message}"
|
||||||
return http_error 500, "Unknown error"
|
return error500("Unknown error")
|
||||||
end
|
end
|
||||||
json_data
|
json_data
|
||||||
end
|
end
|
||||||
|
|
||||||
def delete_file(env)
|
def delete_file(env)
|
||||||
if SQL.query_one "SELECT EXISTS(SELECT 1 FROM files WHERE delete_key = ?)", env.params.query["key"], as: Bool
|
if SQL.query_one "SELECT EXISTS(SELECT 1 FROM #{CONFIG.dbTableName} WHERE delete_key = ?)", env.params.query["key"], as: Bool
|
||||||
begin
|
begin
|
||||||
fileinfo = SQL.query_all("SELECT filename, extension, thumbnail
|
fileinfo = SQL.query_all("SELECT filename, extension, thumbnail
|
||||||
FROM files
|
FROM #{CONFIG.dbTableName}
|
||||||
WHERE delete_key = ?",
|
WHERE delete_key = ?",
|
||||||
env.params.query["key"],
|
env.params.query["key"],
|
||||||
as: {filename: String, extension: String, thumbnail: String | Nil})[0]
|
as: {filename: String, extension: String, thumbnail: String | Nil})[0]
|
||||||
|
@ -355,16 +363,16 @@ module Handling
|
||||||
File.delete("#{CONFIG.thumbnails}/#{fileinfo[:thumbnail]}")
|
File.delete("#{CONFIG.thumbnails}/#{fileinfo[:thumbnail]}")
|
||||||
end
|
end
|
||||||
# Delete entry from db
|
# Delete entry from db
|
||||||
SQL.exec "DELETE FROM files WHERE delete_key = ?", env.params.query["key"]
|
SQL.exec "DELETE FROM #{CONFIG.dbTableName} WHERE delete_key = ?", env.params.query["key"]
|
||||||
LOGGER.debug "File '#{fileinfo[:filename]}' was deleted using key '#{env.params.query["key"]}'}"
|
LOGGER.debug "File '#{fileinfo[:filename]}' was deleted using key '#{env.params.query["key"]}'}"
|
||||||
return msg("File '#{fileinfo[:filename]}' deleted successfully")
|
return msg("File '#{fileinfo[:filename]}' deleted successfully")
|
||||||
rescue ex
|
rescue ex
|
||||||
LOGGER.error("Unknown error: #{ex.message}")
|
LOGGER.error("Unknown error: #{ex.message}")
|
||||||
return http_error 500, "Unknown error"
|
return error500("Unknown error")
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
LOGGER.debug "Key '#{env.params.query["key"]}' does not exist"
|
LOGGER.debug "Key '#{env.params.query["key"]}' does not exist"
|
||||||
return http_error 401, "Delete key '#{env.params.query["key"]}' does not exist. No files were deleted"
|
return error401("Delete key '#{env.params.query["key"]}' does not exist. No files were deleted")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -390,7 +398,7 @@ module Handling
|
||||||
def chatterino_config(env)
|
def chatterino_config(env)
|
||||||
host = Utils.host(env)
|
host = Utils.host(env)
|
||||||
protocol = Utils.protocol(env)
|
protocol = Utils.protocol(env)
|
||||||
env.response.content_type = "application/json"
|
env.response.content_type = "application/json"
|
||||||
return %({
|
return %({
|
||||||
"requestUrl": "#{protocol}://#{host}/upload",
|
"requestUrl": "#{protocol}://#{host}/upload",
|
||||||
"formField": "data",
|
"formField": "data",
|
||||||
|
@ -399,3 +407,4 @@ module Handling
|
||||||
})
|
})
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,44 @@
|
||||||
macro http_error(status_code, message)
|
macro error400(message)
|
||||||
env.response.content_type = "application/json"
|
env.response.content_type = "application/json"
|
||||||
env.response.status_code = {{status_code}}
|
env.response.status_code = 400
|
||||||
error_message = {"error" => {{message}}}.to_json
|
error_message = {"error" => {{message}}}.to_json
|
||||||
error_message
|
error_message
|
||||||
end
|
end
|
||||||
|
|
||||||
|
macro error401(message)
|
||||||
|
env.response.content_type = "application/json"
|
||||||
|
env.response.status_code = 401
|
||||||
|
error_message = {"error" => {{message}}}.to_json
|
||||||
|
error_message
|
||||||
|
end
|
||||||
|
|
||||||
|
macro error403(message)
|
||||||
|
env.response.content_type = "application/json"
|
||||||
|
env.response.status_code = 403
|
||||||
|
error_message = {"error" => {{message}}}.to_json
|
||||||
|
error_message
|
||||||
|
end
|
||||||
|
|
||||||
|
macro error404(message)
|
||||||
|
env.response.content_type = "application/json"
|
||||||
|
env.response.status_code = 404
|
||||||
|
error_message = {"error" => {{message}}}.to_json
|
||||||
|
error_message
|
||||||
|
end
|
||||||
|
|
||||||
|
macro error413(message)
|
||||||
|
env.response.content_type = "application/json"
|
||||||
|
env.response.status_code = 413
|
||||||
|
error_message = {"error" => {{message}}}.to_json
|
||||||
|
error_message
|
||||||
|
end
|
||||||
|
|
||||||
|
macro error500(message)
|
||||||
|
env.response.content_type = "application/json"
|
||||||
|
env.response.status_code = 500
|
||||||
|
error_message = {"error" => {{message}}}.to_json
|
||||||
|
error_message
|
||||||
|
end
|
||||||
|
|
||||||
macro msg(message)
|
macro msg(message)
|
||||||
env.response.content_type = "application/json"
|
env.response.content_type = "application/json"
|
||||||
|
|
|
@ -8,7 +8,7 @@ module Jobs
|
||||||
spawn do
|
spawn do
|
||||||
loop do
|
loop do
|
||||||
Utils.check_old_files
|
Utils.check_old_files
|
||||||
sleep CONFIG.deleteFilesCheck.seconds
|
sleep CONFIG.deleteFilesCheck
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -17,13 +17,12 @@ module Jobs
|
||||||
if !CONFIG.blockTorAddresses
|
if !CONFIG.blockTorAddresses
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
LOGGER.info("Blocking Tor exit nodes")
|
|
||||||
spawn do
|
spawn do
|
||||||
loop do
|
loop do
|
||||||
Utils.retrieve_tor_exit_nodes
|
Utils.retrieve_tor_exit_nodes
|
||||||
# Updates the @@exit_nodes array instantly
|
# Updates the @@exit_nodes array instantly
|
||||||
Routing.reload_exit_nodes
|
Routing.reload_exit_nodes
|
||||||
sleep CONFIG.torExitNodesCheck.seconds
|
sleep CONFIG.torExitNodesCheck
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
# https://github.com/iv-org/invidious/blob/master/src/invidious/helpers/logger.cr
|
# https://github.com/iv-org/invidious/blob/master/src/invidious/helpers/logger.cr
|
||||||
require "colorize"
|
|
||||||
|
|
||||||
enum LogLevel
|
enum LogLevel
|
||||||
All = 0
|
All = 0
|
||||||
Trace = 1
|
Trace = 1
|
||||||
|
@ -13,9 +11,7 @@ enum LogLevel
|
||||||
end
|
end
|
||||||
|
|
||||||
class LogHandler < Kemal::BaseLogHandler
|
class LogHandler < Kemal::BaseLogHandler
|
||||||
def initialize(@io : IO = STDOUT, @level = LogLevel::Debug, use_color : Bool = true)
|
def initialize(@io : IO = STDOUT, @level = LogLevel::Debug)
|
||||||
Colorize.enabled = use_color
|
|
||||||
Colorize.on_tty_only!
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def call(context : HTTP::Server::Context)
|
def call(context : HTTP::Server::Context)
|
||||||
|
@ -39,27 +35,28 @@ class LogHandler < Kemal::BaseLogHandler
|
||||||
context
|
context
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def puts(message : String)
|
||||||
|
@io << message << '\n'
|
||||||
|
@io.flush
|
||||||
|
end
|
||||||
|
|
||||||
def write(message : String)
|
def write(message : String)
|
||||||
@io << message
|
@io << message
|
||||||
@io.flush
|
@io.flush
|
||||||
end
|
end
|
||||||
|
|
||||||
def color(level)
|
def set_log_level(level : String)
|
||||||
case level
|
@level = LogLevel.parse(level)
|
||||||
when LogLevel::Trace then :cyan
|
end
|
||||||
when LogLevel::Debug then :green
|
|
||||||
when LogLevel::Info then :white
|
def set_log_level(level : LogLevel)
|
||||||
when LogLevel::Warn then :yellow
|
@level = level
|
||||||
when LogLevel::Error then :red
|
|
||||||
when LogLevel::Fatal then :magenta
|
|
||||||
else :default
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
{% for level in %w(trace debug info warn error fatal) %}
|
{% for level in %w(trace debug info warn error fatal) %}
|
||||||
def {{level.id}}(message : String)
|
def {{level.id}}(message : String)
|
||||||
if LogLevel::{{level.id.capitalize}} >= @level
|
if LogLevel::{{level.id.capitalize}} >= @level
|
||||||
puts("#{Time.utc} [{{level.id}}] #{message}".colorize(color(LogLevel::{{level.id.capitalize}})))
|
puts("#{Time.utc} [{{level.id}}] #{message}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
{% end %}
|
{% end %}
|
||||||
|
|
|
@ -7,36 +7,12 @@ module Routing
|
||||||
def reload_exit_nodes
|
def reload_exit_nodes
|
||||||
LOGGER.debug "Updating Tor exit nodes array"
|
LOGGER.debug "Updating Tor exit nodes array"
|
||||||
@@exit_nodes = Utils.load_tor_exit_nodes
|
@@exit_nodes = Utils.load_tor_exit_nodes
|
||||||
LOGGER.debug "IPs inside the Tor exit nodes array: #{@@exit_nodes.size}"
|
LOGGER.debug "IPs inside the exit nodes array: #{@@exit_nodes.size}"
|
||||||
end
|
end
|
||||||
|
|
||||||
before_post "/api/admin/*" do |env|
|
before_post "/api/admin/*" do |env|
|
||||||
if env.request.headers.try &.["X-Api-Key"]? != CONFIG.adminApiKey || nil
|
if env.request.headers.try &.["X-Api-Key"]? != CONFIG.adminApiKey || nil
|
||||||
halt env, status_code: 401, response: http_error 401, "Wrong API Key"
|
halt env, status_code: 401, response: error401("Wrong API Key")
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
before_post "/upload" do |env|
|
|
||||||
begin
|
|
||||||
ip_info = SQL.query_one?("SELECT ip, count, date FROM ips WHERE ip = ?", Utils.ip_address(env), as: {ip: String, count: Int32, date: Int32})
|
|
||||||
rescue ex
|
|
||||||
LOGGER.error "Error when trying to enforce rate limits: #{ex.message}"
|
|
||||||
next
|
|
||||||
end
|
|
||||||
|
|
||||||
if ip_info.nil?
|
|
||||||
next
|
|
||||||
end
|
|
||||||
|
|
||||||
time_since_first_upload = Time.utc.to_unix - ip_info[:date]
|
|
||||||
time_until_unban = ip_info[:date] - Time.utc.to_unix + CONFIG.rateLimitPeriod
|
|
||||||
if time_since_first_upload > CONFIG.rateLimitPeriod
|
|
||||||
SQL.exec "DELETE FROM ips WHERE ip = ?", ip_info[:ip]
|
|
||||||
end
|
|
||||||
if CONFIG.filesPerIP > 0
|
|
||||||
if ip_info[:count] >= CONFIG.filesPerIP && time_since_first_upload < CONFIG.rateLimitPeriod
|
|
||||||
halt env, status_code: 401, response: http_error 401, "Rate limited! Try again in #{time_until_unban} seconds"
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -46,20 +22,38 @@ module Routing
|
||||||
next
|
next
|
||||||
end
|
end
|
||||||
if CONFIG.blockTorAddresses && @@exit_nodes.includes?(Utils.ip_address(env))
|
if CONFIG.blockTorAddresses && @@exit_nodes.includes?(Utils.ip_address(env))
|
||||||
halt env, status_code: 401, response: http_error 401, CONFIG.torMessage
|
halt env, status_code: 401, response: error401(CONFIG.torMessage)
|
||||||
|
end
|
||||||
|
# There is a better way to do this
|
||||||
|
if env.request.resource == "/upload"
|
||||||
|
begin
|
||||||
|
ip_info = SQL.query_all("SELECT ip, count, date FROM #{CONFIG.ipTableName} WHERE ip = ?", Utils.ip_address(env), as: {ip: String, count: Int32, date: Int32})[0]
|
||||||
|
time_since_first_upload = Time.utc.to_unix - ip_info[:date]
|
||||||
|
time_until_unban = ip_info[:date] - Time.utc.to_unix + CONFIG.rateLimitPeriod
|
||||||
|
if time_since_first_upload > CONFIG.rateLimitPeriod
|
||||||
|
SQL.exec "DELETE FROM #{CONFIG.ipTableName} WHERE ip = ?", ip_info[:ip]
|
||||||
|
end
|
||||||
|
if ip_info[:count] >= CONFIG.filesPerIP && time_since_first_upload < CONFIG.rateLimitPeriod
|
||||||
|
halt env, status_code: 401, response: error401("Rate limited! Try again in #{time_until_unban} seconds")
|
||||||
|
end
|
||||||
|
rescue ex
|
||||||
|
LOGGER.error "Error when trying to enforce rate limits: #{ex.message}"
|
||||||
|
next
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def register_all
|
def register_all
|
||||||
get "/" do |env|
|
get "/" do |env|
|
||||||
host = Utils.host(env)
|
host = Utils.host(env)
|
||||||
files_hosted = SQL.query_one "SELECT COUNT (filename) FROM files", as: Int32
|
files_hosted = SQL.query_one "SELECT COUNT (filename) FROM #{CONFIG.dbTableName}", as: Int32
|
||||||
render "src/views/index.ecr"
|
render "src/views/index.ecr"
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/chatterino" do |env|
|
get "/chatterino" do |env|
|
||||||
host = Utils.host(env)
|
host = Utils.host(env)
|
||||||
protocol = Utils.protocol(env)
|
protocol = Utils.protocol(env)
|
||||||
|
files_hosted = SQL.query_one "SELECT COUNT (filename) FROM #{CONFIG.dbTableName}", as: Int32
|
||||||
render "src/views/chatterino.ecr"
|
render "src/views/chatterino.ecr"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -95,7 +89,7 @@ module Routing
|
||||||
Handling.sharex_config(env)
|
Handling.sharex_config(env)
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/chatterinoconfig" do |env|
|
get "/chatterinoconfig" do |env|
|
||||||
Handling.chatterino_config(env)
|
Handling.chatterino_config(env)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -124,8 +118,4 @@ module Routing
|
||||||
get "/api/admin/torexitnodes" do |env|
|
get "/api/admin/torexitnodes" do |env|
|
||||||
Handling::Admin.retrieve_tor_exit_nodes(env, @@exit_nodes)
|
Handling::Admin.retrieve_tor_exit_nodes(env, @@exit_nodes)
|
||||||
end
|
end
|
||||||
|
|
||||||
error 404 do
|
|
||||||
"File not found"
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
60
src/utils.cr
60
src/utils.cr
|
@ -2,13 +2,13 @@ module Utils
|
||||||
extend self
|
extend self
|
||||||
|
|
||||||
def create_db
|
def create_db
|
||||||
if !SQL.query_one "SELECT EXISTS (SELECT 1 FROM sqlite_schema WHERE type='table' AND name='files')
|
if !SQL.query_one "SELECT EXISTS (SELECT 1 FROM sqlite_schema WHERE type='table' AND name='#{CONFIG.dbTableName}')
|
||||||
AND EXISTS (SELECT 1 FROM sqlite_schema WHERE type='table' AND name='ips');", as: Bool
|
AND EXISTS (SELECT 1 FROM sqlite_schema WHERE type='table' AND name='#{CONFIG.ipTableName}');", as: Bool
|
||||||
LOGGER.info "Creating sqlite3 database at '#{CONFIG.db}'"
|
LOGGER.info "Creating sqlite3 database at '#{CONFIG.db}'"
|
||||||
begin
|
begin
|
||||||
SQL.exec "CREATE TABLE IF NOT EXISTS files
|
SQL.exec "CREATE TABLE IF NOT EXISTS #{CONFIG.dbTableName}
|
||||||
(original_filename text, filename text, extension text, uploaded_at text, checksum text, ip text, delete_key text, thumbnail text)"
|
(original_filename text, filename text, extension text, uploaded_at text, checksum text, ip text, delete_key text, thumbnail text)"
|
||||||
SQL.exec "CREATE TABLE IF NOT EXISTS ips
|
SQL.exec "CREATE TABLE IF NOT EXISTS #{CONFIG.ipTableName}
|
||||||
(ip text UNIQUE, count integer DEFAULT 0, date integer)"
|
(ip text UNIQUE, count integer DEFAULT 0, date integer)"
|
||||||
rescue ex
|
rescue ex
|
||||||
LOGGER.fatal "#{ex.message}"
|
LOGGER.fatal "#{ex.message}"
|
||||||
|
@ -45,25 +45,23 @@ module Utils
|
||||||
|
|
||||||
def check_old_files
|
def check_old_files
|
||||||
LOGGER.info "Deleting old files"
|
LOGGER.info "Deleting old files"
|
||||||
fileinfo = SQL.query_all("SELECT filename, extension, thumbnail
|
dir = Dir.new("#{CONFIG.files}")
|
||||||
FROM files
|
# Delete entries from DB
|
||||||
WHERE uploaded_at < datetime('now', '-#{CONFIG.deleteFilesAfter} days')",
|
SQL.exec "DELETE FROM #{CONFIG.dbTableName} WHERE uploaded_at < date('now', '-#{CONFIG.deleteFilesAfter} days');"
|
||||||
as: {filename: String, extension: String, thumbnail: String | Nil})
|
# Delete files
|
||||||
|
dir.each_child do |file|
|
||||||
fileinfo.each do |file|
|
if (Time.utc - File.info("#{CONFIG.files}/#{file}").modification_time).days >= CONFIG.deleteFilesAfter
|
||||||
LOGGER.debug "Deleting file '#{file[:filename]}#{file[:extension]}'"
|
LOGGER.debug "Deleting file '#{file}'"
|
||||||
begin
|
begin
|
||||||
File.delete("#{CONFIG.files}/#{file[:filename]}#{file[:extension]}")
|
File.delete("#{CONFIG.files}/#{file}")
|
||||||
if file[:thumbnail]
|
rescue ex
|
||||||
File.delete("#{CONFIG.thumbnails}/#{file[:thumbnail]}")
|
LOGGER.error "#{ex.message}"
|
||||||
end
|
end
|
||||||
SQL.exec "DELETE FROM files WHERE filename = ?", file[:filename]
|
|
||||||
rescue ex
|
|
||||||
LOGGER.error "#{ex.message}"
|
|
||||||
# Also delete the file entry from the DB if it doesn't exist.
|
|
||||||
SQL.exec "DELETE FROM files WHERE filename = ?", file[:filename]
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
# Close directory to prevent `Too many open files (File::Error)` error.
|
||||||
|
# This is because the directory class is still saved on memory for some reason.
|
||||||
|
dir.close
|
||||||
end
|
end
|
||||||
|
|
||||||
def check_dependencies
|
def check_dependencies
|
||||||
|
@ -71,7 +69,7 @@ module Utils
|
||||||
dependencies.each do |dep|
|
dependencies.each do |dep|
|
||||||
next if !CONFIG.generateThumbnails
|
next if !CONFIG.generateThumbnails
|
||||||
if !Process.find_executable(dep)
|
if !Process.find_executable(dep)
|
||||||
LOGGER.fatal("'#{dep}' was not found.")
|
LOGGER.fatal("'#{dep}' was not found, this is necessary to")
|
||||||
exit(1)
|
exit(1)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -79,7 +77,7 @@ module Utils
|
||||||
|
|
||||||
# TODO:
|
# TODO:
|
||||||
# def check_duplicate(upload)
|
# def check_duplicate(upload)
|
||||||
# file_checksum = SQL.query_all("SELECT checksum FROM files WHERE original_filename = ?", upload.filename, as:String).try &.[0]?
|
# file_checksum = SQL.query_all("SELECT checksum FROM #{CONFIG.dbTableName} WHERE original_filename = ?", upload.filename, as:String).try &.[0]?
|
||||||
# if file_checksum.nil?
|
# if file_checksum.nil?
|
||||||
# return
|
# return
|
||||||
# else
|
# else
|
||||||
|
@ -103,9 +101,8 @@ module Utils
|
||||||
# TODO: Check if there are no other possibilities to get a random filename and exit
|
# TODO: Check if there are no other possibilities to get a random filename and exit
|
||||||
def generate_filename
|
def generate_filename
|
||||||
filename = Random.base58(CONFIG.fileameLength)
|
filename = Random.base58(CONFIG.fileameLength)
|
||||||
|
|
||||||
loop do
|
loop do
|
||||||
if SQL.query_one("SELECT COUNT(filename) FROM files WHERE filename = ?", filename, as: Int32) == 0
|
if SQL.query_one("SELECT COUNT(filename) FROM #{CONFIG.dbTableName} WHERE filename = ?", filename, as: Int32) == 0
|
||||||
return filename
|
return filename
|
||||||
else
|
else
|
||||||
LOGGER.debug "Filename collision! Generating a new filename"
|
LOGGER.debug "Filename collision! Generating a new filename"
|
||||||
|
@ -115,12 +112,6 @@ module Utils
|
||||||
end
|
end
|
||||||
|
|
||||||
def generate_thumbnail(filename, extension)
|
def generate_thumbnail(filename, extension)
|
||||||
exts = [".jpg", ".jpeg", ".png", ".gif", ".bmp", ".tiff", ".webp", ".heic", ".jxl", ".avif", ".crw", ".dng",
|
|
||||||
".mp4", ".mkv", ".webm", ".avi", ".wmv", ".flv", "m4v", ".mov", ".amv", ".3gp", ".mpg", ".mpeg", ".yuv"]
|
|
||||||
# To prevent thumbnail generation on non image extensions
|
|
||||||
return if exts.none? do |ext|
|
|
||||||
extension.downcase.includes?(ext)
|
|
||||||
end
|
|
||||||
# Disable generation if false
|
# Disable generation if false
|
||||||
return if !CONFIG.generateThumbnails || !CONFIG.thumbnails
|
return if !CONFIG.generateThumbnails || !CONFIG.thumbnails
|
||||||
LOGGER.debug "Generating thumbnail for #{filename + extension} in background"
|
LOGGER.debug "Generating thumbnail for #{filename + extension} in background"
|
||||||
|
@ -139,9 +130,8 @@ module Utils
|
||||||
])
|
])
|
||||||
if process.exit_code == 0
|
if process.exit_code == 0
|
||||||
LOGGER.debug "Thumbnail for #{filename + extension} generated successfully"
|
LOGGER.debug "Thumbnail for #{filename + extension} generated successfully"
|
||||||
SQL.exec "UPDATE files SET thumbnail = ? WHERE filename = ?", filename + ".jpg", filename
|
SQL.exec "UPDATE #{CONFIG.dbTableName} SET thumbnail = ? WHERE filename = ?", filename + ".jpg", filename
|
||||||
else
|
else
|
||||||
# TODO: Add some sort of message when the thumbnail is not generated
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -169,11 +159,11 @@ module Utils
|
||||||
# Delete file
|
# Delete file
|
||||||
File.delete("#{CONFIG.files}/#{fileinfo[:filename]}#{fileinfo[:extension]}")
|
File.delete("#{CONFIG.files}/#{fileinfo[:filename]}#{fileinfo[:extension]}")
|
||||||
if fileinfo[:thumbnail]
|
if fileinfo[:thumbnail]
|
||||||
|
# Delete thumbnail
|
||||||
File.delete("#{CONFIG.thumbnails}/#{fileinfo[:thumbnail]}")
|
File.delete("#{CONFIG.thumbnails}/#{fileinfo[:thumbnail]}")
|
||||||
end
|
end
|
||||||
# Delete entry from db
|
# Delete entry from db
|
||||||
SQL.exec "DELETE FROM files WHERE delete_key = ?", env.params.query["key"]
|
SQL.exec "DELETE FROM #{CONFIG.dbTableName} WHERE delete_key = ?", env.params.query["key"]
|
||||||
|
|
||||||
LOGGER.debug "File '#{fileinfo[:filename]}' was deleted using key '#{env.params.query["key"]}'}"
|
LOGGER.debug "File '#{fileinfo[:filename]}' was deleted using key '#{env.params.query["key"]}'}"
|
||||||
msg("File '#{fileinfo[:filename]}' deleted successfully")
|
msg("File '#{fileinfo[:filename]}' deleted successfully")
|
||||||
end
|
end
|
||||||
|
@ -230,7 +220,7 @@ module Utils
|
||||||
begin
|
begin
|
||||||
File.open(CONFIG.torExitNodesFile, "w") { |output| IO.copy(res.body_io, output) }
|
File.open(CONFIG.torExitNodesFile, "w") { |output| IO.copy(res.body_io, output) }
|
||||||
rescue ex
|
rescue ex
|
||||||
LOGGER.error "Failed to save exit nodes list: #{ex.message}"
|
LOGGER.error "Failed to write to file: #{ex.message}"
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
LOGGER.error "Failed to retrieve exit nodes list. Status Code: #{res.status_code}"
|
LOGGER.error "Failed to retrieve exit nodes list. Status Code: #{res.status_code}"
|
||||||
|
|
Loading…
Add table
Reference in a new issue