This repository has been archived on 2023-11-26. You can view files and clone it, but cannot push or open issues or pull requests.
bdfr-browser/lib/bdfr_browser/http/plug.ex

314 lines
8.6 KiB
Elixir
Raw Normal View History

2023-05-17 22:13:55 +00:00
defmodule BdfrBrowser.HTTP.Plug do
use Plug.Router
2023-08-16 23:12:58 +00:00
alias BdfrBrowser.{Chat, Comment, Importer, Message, Repo, Post, Subreddit}
2023-08-13 23:18:55 +00:00
2023-05-17 22:13:55 +00:00
plug :match
plug :dispatch
get "/" do
config_file = Application.fetch_env!(:bdfr_browser, :config_file)
2023-10-02 10:59:52 +00:00
archived_subreddits = Enum.sort_by(config_file["archived_subreddits"], &String.downcase/1)
archived_users = Enum.sort_by(config_file["archived_users"], &String.downcase/1)
simulated_multireddits = Enum.sort_by(config_file["simulated_multireddits"], fn {t, _} -> String.downcase(t) end)
saved_searches = Enum.sort_by(config_file["saved_searches"], &String.downcase/1)
tpl_args = [
subreddits: archived_subreddits,
users: archived_users,
multireddits: simulated_multireddits,
searches: saved_searches
]
2023-08-14 11:07:38 +00:00
content = render_template("index", tpl_args)
2023-05-17 22:13:55 +00:00
conn
|> put_resp_header("content-type", "text/html; charset=utf-8")
|> send_resp(200, content)
end
2023-10-01 22:33:41 +00:00
get "/all/subreddits" do
2023-10-02 10:59:52 +00:00
config_file = Application.fetch_env!(:bdfr_browser, :config_file)
archived_subreddits = MapSet.new(config_file["archived_subreddits"])
tpl_args = [subreddits: Subreddit.names() |> Repo.all(), archived_subreddits: archived_subreddits]
2023-10-01 22:33:41 +00:00
content = render_template("all_subreddits", tpl_args)
conn
|> put_resp_header("content-type", "text/html; charset=utf-8")
|> send_resp(200, content)
end
2023-05-17 22:13:55 +00:00
get "/r/:subreddit" do
2023-08-13 23:18:55 +00:00
subreddit_record = Repo.get_by(Subreddit, name: subreddit)
2023-08-14 11:07:38 +00:00
tpl_args = [
2023-05-17 22:13:55 +00:00
subreddit: subreddit,
dates: subreddit_record |> Post.date_listing() |> Repo.all(),
statistics: subreddit |> Subreddit.statistics() |> Repo.all()
2023-05-17 22:13:55 +00:00
]
2023-08-14 11:07:38 +00:00
content = render_template("subreddit", tpl_args)
2023-05-17 22:13:55 +00:00
conn
|> put_resp_header("content-type", "text/html; charset=utf-8")
|> send_resp(200, content)
end
get "/r/:subreddit/:date" do
2023-08-13 23:18:55 +00:00
subreddit_record = Repo.get_by(Subreddit, name: subreddit)
2023-08-14 11:07:38 +00:00
tpl_args = [
2023-05-17 22:13:55 +00:00
subreddit: subreddit,
date: date,
2023-08-13 23:18:55 +00:00
posts: subreddit_record |> Post.during_month(date) |> Repo.all()
2023-05-17 22:13:55 +00:00
]
2023-08-14 11:07:38 +00:00
content = render_template("subreddit_posts", tpl_args)
2023-05-17 22:13:55 +00:00
conn
|> put_resp_header("content-type", "text/html; charset=utf-8")
|> send_resp(200, content)
end
2023-08-13 23:18:55 +00:00
get "/r/:subreddit/:date/:id" do
post_record = id |> Post.get_full() |> Repo.one()
2023-08-13 23:18:55 +00:00
2023-08-14 11:07:38 +00:00
tpl_args = [
2023-05-17 22:13:55 +00:00
subreddit: subreddit,
date: date,
2023-08-13 23:18:55 +00:00
post: post_record,
media: post_media(post_record.filename, paths: [subreddit, date]),
2023-05-17 22:13:55 +00:00
comment_template: Application.app_dir(:bdfr_browser, "priv/templates/http/_comment.eex")
]
2023-08-14 11:07:38 +00:00
content = render_template("post", tpl_args)
2023-05-17 22:13:55 +00:00
conn
|> put_resp_header("content-type", "text/html; charset=utf-8")
|> send_resp(200, content)
end
2023-10-02 10:59:52 +00:00
get "/m/:subreddits" do
subreddit_names = String.split(subreddits, "+")
subreddit_records = Subreddit.multiple_names(subreddit_names) |> Repo.all()
tpl_args = [
subreddit: subreddit_names,
dates: subreddit_records |> Post.date_listing() |> Repo.all(),
statistics: subreddit_names |> Subreddit.statistics() |> Repo.all()
2023-10-02 10:59:52 +00:00
]
content = render_template("subreddit", tpl_args)
conn
|> put_resp_header("content-type", "text/html; charset=utf-8")
|> send_resp(200, content)
end
get "/m/:subreddits/:date" do
subreddit_names = String.split(subreddits, "+")
subreddit_records = Subreddit.multiple_names(subreddit_names) |> Repo.all()
tpl_args = [
subreddit: subreddit_names,
date: date,
posts: subreddit_records |> Post.during_month(date) |> Repo.all()
]
content = render_template("subreddit_posts", tpl_args)
conn
|> put_resp_header("content-type", "text/html; charset=utf-8")
|> send_resp(200, content)
end
get "/chats" do
tpl_args = [chats: Chat.listing() |> Repo.all()]
content = render_template("chats", tpl_args)
conn
|> put_resp_header("content-type", "text/html; charset=utf-8")
|> send_resp(200, content)
end
get "/chats/:id" do
chat_record = Repo.get(Chat, id)
tpl_args = [
chat: chat_record,
messages: chat_record |> Message.listing() |> Repo.all()
]
content = render_template("chat", tpl_args)
conn
|> put_resp_header("content-type", "text/html; charset=utf-8")
|> send_resp(200, content)
end
2023-08-15 13:03:46 +00:00
get "/user/:name" do
tpl_args = [
name: name,
posts: name |> Post.by_author() |> Repo.all(),
comments: name |> Comment.by_author() |> Repo.all(),
chats: name |> Chat.by_author() |> Repo.all()
2023-08-15 13:03:46 +00:00
]
content = render_template("user", tpl_args)
conn
|> put_resp_header("content-type", "text/html; charset=utf-8")
|> send_resp(200, content)
end
2023-08-18 14:55:47 +00:00
get "/search" do
conn = Plug.Conn.fetch_query_params(conn)
params = conn.query_params
2023-08-19 10:43:26 +00:00
search = params["search"]
subreddits = params["subreddit"]
2023-08-18 14:55:47 +00:00
2023-08-19 10:43:26 +00:00
tpl_args = [
search: search,
posts: search |> Post.search(subreddits) |> Repo.all(),
comments: search |> Comment.search(subreddits) |> Repo.all()
2023-08-19 10:43:26 +00:00
]
2023-08-18 14:55:47 +00:00
2023-08-19 10:43:26 +00:00
content = render_template("search", tpl_args)
2023-08-18 14:55:47 +00:00
conn
|> put_resp_header("content-type", "text/html; charset=utf-8")
|> send_resp(200, content)
end
2023-08-14 11:07:38 +00:00
get "/static/*path" do
file_path = Application.app_dir(:bdfr_browser, Path.join("priv/static", path))
if File.exists?(file_path) do
{:ok, file} = File.read(file_path)
conn
|> put_resp_header("content-type", mime_from_ext(file_path))
|> send_resp(200, file)
else
send_resp(conn, 404, "Not Found")
end
end
2023-05-17 22:13:55 +00:00
get "/media/*path" do
base_directory = Application.fetch_env!(:bdfr_browser, :base_directory)
joined_path = Path.join(path)
file_path = Path.join([base_directory, joined_path])
2023-05-17 22:13:55 +00:00
if File.exists?(file_path) do
{:ok, media} = File.read(file_path)
conn
|> put_resp_header("content-type", mime_from_ext(file_path))
|> send_resp(200, media)
else
send_resp(conn, 404, "Not Found")
end
end
get "/chat_media/*path" do
chat_directory = Application.fetch_env!(:bdfr_browser, :chat_directory)
joined_path = Path.join(path)
file_path = Path.join([chat_directory, "images", joined_path])
if File.exists?(file_path) do
{:ok, media} = File.read(file_path)
conn
|> put_resp_header("content-type", mime_from_ext(file_path))
|> send_resp(200, media)
else
send_resp(conn, 404, "Not Found")
end
2023-05-17 22:13:55 +00:00
end
2023-08-14 00:06:10 +00:00
post "/_import" do
2023-08-16 23:12:58 +00:00
:ok = Importer.background_import()
2023-08-13 23:18:55 +00:00
send_resp(conn, 200, "IMPORTING")
2023-05-17 22:13:55 +00:00
end
2023-08-14 16:23:29 +00:00
post "/_import_changes" do
2023-08-16 23:12:58 +00:00
:ok = Importer.background_import_changes()
2023-08-14 16:23:29 +00:00
send_resp(conn, 200, "IMPORTING CHANGES")
end
2023-10-01 22:33:41 +00:00
post "/_reload" do
config_file = YamlElixir.read_from_file!(System.fetch_env!("BDFR_BROWSER_CONFIG_FILE"))
:ok = Application.put_env(:bdfr_browser, :config_file, config_file)
send_resp(conn, 200, "RELOAD")
end
2023-08-16 23:12:58 +00:00
post "/_cleanup" do
2023-08-16 23:48:03 +00:00
_ = Importer.cleanup_messages()
2023-08-16 23:12:58 +00:00
send_resp(conn, 200, "CLEANED UP")
end
2023-08-13 23:18:55 +00:00
get "/_ping" do
send_resp(conn, 200, "PONG")
2023-05-17 22:13:55 +00:00
end
2023-08-13 23:18:55 +00:00
match _ do
send_resp(conn, 404, "Not Found")
2023-05-25 20:07:28 +00:00
end
2023-08-13 23:18:55 +00:00
# Helper
2023-05-17 22:13:55 +00:00
2023-08-14 11:07:38 +00:00
defp render_template(name, args) do
tpl_file = Application.app_dir(:bdfr_browser, "priv/templates/http/application.eex")
embedded_tpl = Application.app_dir(:bdfr_browser, "priv/templates/http/#{name}.eex")
EEx.eval_file(tpl_file, embedded_template: embedded_tpl, embedded_args: args)
end
2023-05-17 22:13:55 +00:00
defp post_media(post, args) do
base_directory = Application.fetch_env!(:bdfr_browser, :base_directory)
post_dir = Path.join([base_directory | Keyword.fetch!(args, :paths)])
2023-08-24 22:27:30 +00:00
post_img = "#{post}*.{jpg,JPG,jpeg,JPEG,png,PNG,gif,GIF,webp,WEBP}"
post_vid = "#{post}*.{mp4,MP4,webm,WEBM}"
2023-05-17 22:13:55 +00:00
%{
2023-08-24 17:51:44 +00:00
images: post_media_for_type(post_dir, post_img),
videos: post_media_for_type(post_dir, post_vid)
2023-05-17 22:13:55 +00:00
}
end
2023-08-24 17:51:44 +00:00
defp post_media_for_type(post_dir, post_type) do
[post_dir, post_type]
|> Path.join()
|> Path.wildcard()
|> Enum.map(&media_path/1)
|> Enum.sort()
end
2023-05-17 22:13:55 +00:00
defp media_path(full_path) do
base_directory = Application.fetch_env!(:bdfr_browser, :base_directory)
2023-05-18 18:20:45 +00:00
full_path
|> String.replace("#{base_directory}/", "/media/")
|> String.split("/")
|> Enum.map(fn p -> URI.encode(p, &URI.char_unreserved?/1) end)
|> Enum.join("/")
2023-05-17 22:13:55 +00:00
end
defp mime_from_ext(path) do
normalized_path = String.downcase(path)
case Path.extname(normalized_path) do
".jpg" -> "image/jpeg"
".jpeg" -> "image/jpeg"
".png" -> "image/png"
".gif" -> "image/gif"
".mp4" -> "video/mp4"
2023-08-24 22:27:30 +00:00
".webp" -> "image/webp"
".webm" -> "video/webm"
2023-08-14 11:07:38 +00:00
".js" -> "text/javascript"
".css" -> "text/css"
2023-05-17 22:13:55 +00:00
end
end
end