Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
.boxed()
}

// Make a request to a Reddit API and parse the JSON response
/// Make a request to a Reddit API and parse the JSON response
#[cached(size = 100, time = 30, result = true)]
pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
// Closure to quickly build errors
Expand Down
9 changes: 4 additions & 5 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,12 @@ use std::{env::var, fs::read_to_string};

// Waiting for https://github.com/rust-lang/rust/issues/74465 to land, so we
// can reduce reliance on once_cell.
//
// This is the local static that is initialized at runtime (technically at
// first request) and contains the instance settings.
/// This is the local static that is initialized at runtime (technically at
/// first request) and contains the instance settings.
pub static CONFIG: Lazy<Config> = Lazy::new(Config::load);

// This serves as the frontend for an archival API - on removed comments, this URL
// will be the base of a link, to display removed content (on another site).
/// This serves as the frontend for an archival API - on removed comments, this URL
/// will be the base of a link, to display removed content (on another site).
pub const DEFAULT_PUSHSHIFT_FRONTEND: &str = "undelete.pullpush.io";

/// Stores the configuration parsed from the environment variables and the
Expand Down
2 changes: 1 addition & 1 deletion src/duplicates.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Handler for post duplicates.
//! Handler for post duplicates.

use crate::client::json;
use crate::server::RequestExt;
Expand Down
6 changes: 3 additions & 3 deletions src/instance_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@ use rinja::Template;
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;

// This is the local static that is intialized at runtime (technically at
// the first request to the info endpoint) and contains the data
// retrieved from the info endpoint.
/// This is the local static that is initialized at runtime (technically at
/// the first request to the info endpoint) and contains the data
/// retrieved from the info endpoint.
pub static INSTANCE_INFO: Lazy<InstanceInfo> = Lazy::new(InstanceInfo::new);

/// Handles instance info endpoint
Expand Down
2 changes: 1 addition & 1 deletion src/search.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ struct SearchTemplate {
no_posts: bool,
}

// Regex matched against search queries to determine if they are reddit urls.
/// Regex matched against search queries to determine if they are reddit urls.
static REDDIT_URL_MATCH: Lazy<Regex> = Lazy::new(|| Regex::new(r"^https?://([^\./]+\.)*reddit.com/").unwrap());

// SERVICES
Expand Down
6 changes: 3 additions & 3 deletions src/settings.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ const PREFS: [&str; 19] = [

// FUNCTIONS

// Retrieve cookies from request "Cookie" header
/// Retrieve cookies from request "Cookie" header
pub async fn get(req: Request<Body>) -> Result<Response<Body>, String> {
let url = req.uri().to_string();
Ok(template(&SettingsTemplate {
Expand All @@ -57,7 +57,7 @@ pub async fn get(req: Request<Body>) -> Result<Response<Body>, String> {
}))
}

// Set cookies using response "Set-Cookie" header
/// Set cookies using response "Set-Cookie" header
pub async fn set(req: Request<Body>) -> Result<Response<Body>, String> {
// Split the body into parts
let (parts, mut body) = req.into_parts();
Expand Down Expand Up @@ -263,7 +263,7 @@ fn set_cookies_method(req: Request<Body>, remove_cookies: bool) -> Response<Body
response
}

// Set cookies using response "Set-Cookie" header
/// Set cookies using response "Set-Cookie" header
pub async fn restore(req: Request<Body>) -> Result<Response<Body>, String> {
Ok(set_cookies_method(req, true))
}
Expand Down
50 changes: 25 additions & 25 deletions src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ pub enum ResourceType {
Post,
}

// Post flair with content, background color and foreground color
/// Post flair with content, background color and foreground color
#[derive(Serialize)]
pub struct Flair {
pub flair_parts: Vec<FlairPart>,
Expand All @@ -60,7 +60,7 @@ pub struct Flair {
pub foreground_color: String,
}

// Part of flair, either emoji or text
/// Part of flair, either emoji or text
#[derive(Clone, Serialize)]
pub struct FlairPart {
pub flair_part_type: String,
Expand Down Expand Up @@ -167,7 +167,7 @@ impl PollOption {
}
}

// Post flags with nsfw and stickied
/// Post flags with NSFW and stickied
#[derive(Serialize)]
pub struct Flags {
pub spoiler: bool,
Expand Down Expand Up @@ -323,7 +323,7 @@ impl GalleryMedia {
}
}

// Post containing content, metadata and media
/// Post containing content, metadata and media
#[derive(Serialize)]
pub struct Post {
pub id: String,
Expand Down Expand Up @@ -355,7 +355,7 @@ pub struct Post {
}

impl Post {
// Fetch posts of a user or subreddit and return a vector of posts and the "after" value
/// Fetch posts of a user or subreddit and return a vector of posts and the "after" value
pub async fn fetch(path: &str, quarantine: bool) -> Result<(Vec<Self>, String), String> {
// Send a request to the url
let res = match json(path.to_string(), quarantine).await {
Expand Down Expand Up @@ -468,7 +468,7 @@ impl Post {

#[derive(Template)]
#[template(path = "comment.html")]
// Comment with content, post, score and data/time that it was posted
/// Comment with content, post, score and data/time that it was posted
pub struct Comment {
pub id: String,
pub kind: String,
Expand Down Expand Up @@ -522,8 +522,8 @@ impl std::fmt::Display for Awards {
}
}

// Convert Reddit awards JSON to Awards struct
impl Awards {
/// Convert Reddit awards JSON to Awards struct
pub fn parse(items: &Value) -> Self {
let parsed = items.as_array().unwrap_or(&Vec::new()).iter().fold(Vec::new(), |mut awards, item| {
let name = item["name"].as_str().unwrap_or_default().to_string();
Expand Down Expand Up @@ -583,7 +583,7 @@ pub struct NSFWLandingTemplate {
}

#[derive(Default)]
// User struct containing metadata about user
/// User struct containing metadata about user
pub struct User {
pub name: String,
pub title: String,
Expand All @@ -596,7 +596,7 @@ pub struct User {
}

#[derive(Default)]
// Subreddit struct containing metadata about community
/// Subreddit struct containing metadata about community
pub struct Subreddit {
pub name: String,
pub title: String,
Expand All @@ -610,7 +610,7 @@ pub struct Subreddit {
pub nsfw: bool,
}

// Parser for query params, used in sorting (eg. /r/rust/?sort=hot)
/// Parser for query params, used in sorting (eg. /r/rust/?sort=hot)
#[derive(serde::Deserialize)]
pub struct Params {
pub t: Option<String>,
Expand Down Expand Up @@ -696,7 +696,7 @@ where
pub struct ThemeAssets;

impl Preferences {
// Build preferences from cookies
/// Build preferences from cookies
pub fn new(req: &Request<Body>) -> Self {
// Read available theme names from embedded css files.
// Always make the default "system" theme available.
Expand Down Expand Up @@ -895,7 +895,7 @@ pub async fn parse_post(post: &Value) -> Post {
// FORMATTING
//

// Grab a query parameter from a url
/// Grab a query parameter from a url
pub fn param(path: &str, value: &str) -> Option<String> {
Some(
Url::parse(format!("https://libredd.it/{path}").as_str())
Expand All @@ -908,7 +908,7 @@ pub fn param(path: &str, value: &str) -> Option<String> {
)
}

// Retrieve the value of a setting by name
/// Retrieve the value of a setting by name
pub fn setting(req: &Request<Body>, name: &str) -> String {
// Parse a cookie value from request

Expand Down Expand Up @@ -979,7 +979,7 @@ pub fn setting(req: &Request<Body>, name: &str) -> String {
}
}

// Retrieve the value of a setting by name or the default value
/// Retrieve the value of a setting by name or the default value
pub fn setting_or_default(req: &Request<Body>, name: &str, default: String) -> String {
let value = setting(req, name);
if value.is_empty() {
Expand All @@ -989,7 +989,7 @@ pub fn setting_or_default(req: &Request<Body>, name: &str, default: String) -> S
}
}

// Detect and redirect in the event of a random subreddit
/// Detect and redirect in the event of a random subreddit
pub async fn catch_random(sub: &str, additional: &str) -> Result<Response<Body>, String> {
if sub == "random" || sub == "randnsfw" {
Ok(redirect(&format!(
Expand Down Expand Up @@ -1018,7 +1018,7 @@ static REGEX_URL_EXTERNAL_PREVIEW: Lazy<Regex> = Lazy::new(|| Regex::new(r"https
static REGEX_URL_STYLES: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://styles\.redditmedia\.com/(.*)").unwrap());
static REGEX_URL_STATIC_MEDIA: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://www\.redditstatic\.com/(.*)").unwrap());

// Direct urls to proxy if proxy is enabled
/// Direct urls to proxy if proxy is enabled
pub fn format_url(url: &str) -> String {
if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" {
String::new()
Expand Down Expand Up @@ -1093,7 +1093,7 @@ static REDDIT_EMOJI_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://(www
static REDLIB_PREVIEW_LINK_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r#"/(img|preview/)(pre|external-pre)?/(.*?)>"#).unwrap());
static REDLIB_PREVIEW_TEXT_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r">(.*?)</a>").unwrap());

// Rewrite Reddit links to Redlib in body of text
/// Rewrite Reddit links to Redlib in body of text
pub fn rewrite_urls(input_text: &str) -> String {
let mut text1 =
// Rewrite Reddit links to Redlib
Expand Down Expand Up @@ -1242,9 +1242,9 @@ pub fn rewrite_emotes(media_metadata: &Value, comment: String) -> String {
rewrite_urls(&comment)
}

// Format vote count to a string that will be displayed.
// Append `m` and `k` for millions and thousands respectively, and
// round to the nearest tenth.
/// Format vote count to a string that will be displayed.
/// Append `m` and `k` for millions and thousands respectively, and
/// round to the nearest tenth.
pub fn format_num(num: i64) -> (String, String) {
let truncated = if num >= 1_000_000 || num <= -1_000_000 {
format!("{:.1}m", num as f64 / 1_000_000.0)
Expand All @@ -1257,7 +1257,7 @@ pub fn format_num(num: i64) -> (String, String) {
(truncated, num.to_string())
}

// Parse a relative and absolute time from a UNIX timestamp
/// Parse a relative and absolute time from a UNIX timestamp
pub fn time(created: f64) -> (String, String) {
let time = OffsetDateTime::from_unix_timestamp(created.round() as i64).unwrap_or(OffsetDateTime::UNIX_EPOCH);
let now = OffsetDateTime::now_utc();
Expand Down Expand Up @@ -1293,7 +1293,7 @@ pub fn time(created: f64) -> (String, String) {
)
}

// val() function used to parse JSON from Reddit APIs
/// val() function used to parse JSON from Reddit APIs
pub fn val(j: &Value, k: &str) -> String {
j["data"][k].as_str().unwrap_or_default().to_string()
}
Expand Down Expand Up @@ -1384,7 +1384,7 @@ pub fn disable_indexing() -> bool {
}
}

// Determines if a request shoud redirect to a nsfw landing gate.
/// Determines if a request should redirect to a NSFW landing gate.
pub fn should_be_nsfw_gated(req: &Request<Body>, _req_url: &str) -> bool {
(setting(req, "show_nsfw") != "on") || sfw_only()
}
Expand Down Expand Up @@ -1419,7 +1419,7 @@ pub async fn nsfw_landing(req: Request<Body>, req_url: String) -> Result<Respons
Ok(Response::builder().status(403).header("content-type", "text/html").body(body.into()).unwrap_or_default())
}

// Returns the last (non-empty) segment of a path string
/// Returns the last (non-empty) segment of a path string
pub fn url_path_basename(path: &str) -> String {
let url_result = Url::parse(format!("https://libredd.it/{path}").as_str());

Expand All @@ -1433,7 +1433,7 @@ pub fn url_path_basename(path: &str) -> String {
}
}

// Returns the URL of a post, as needed by RSS feeds
/// Returns the URL of a post, as needed by RSS feeds
pub fn get_post_url(post: &Post) -> String {
if let Some(out_url) = &post.out_url {
// Handle cross post
Expand Down
Loading