Compare commits
42 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
dd5353d244 | ||
![]() |
6a68241a7c | ||
![]() |
401061b7b7 | ||
![]() |
fc88b70e5e | ||
![]() |
24bfd7e5e7 | ||
![]() |
13a8983437 | ||
![]() |
8906b66cb3 | ||
![]() |
4afd963c64 | ||
![]() |
8b1d6e8a49 | ||
![]() |
5733197fdd | ||
![]() |
3a6b76374e | ||
![]() |
d632fea6a7 | ||
![]() |
660c7e600a | ||
![]() |
dee73dfa33 | ||
![]() |
7099e7ab33 | ||
![]() |
010aad4727 | ||
![]() |
192a2faa00 | ||
![]() |
5844ba629d | ||
![]() |
be3a4013db | ||
![]() |
a19f46ad18 | ||
![]() |
e60b913452 | ||
![]() |
9ba47f458f | ||
![]() |
8ea797f880 | ||
![]() |
b263234dc2 | ||
![]() |
a125b8a43e | ||
![]() |
03a87fcd23 | ||
![]() |
7b90d95a95 | ||
![]() |
634e6b6354 | ||
![]() |
a0db2144a5 | ||
![]() |
238ef73ed4 | ||
![]() |
5f37e8e8ee | ||
![]() |
2dd12756df | ||
![]() |
e2c84879d6 | ||
![]() |
977d5cfce0 | ||
![]() |
3c0cb2ecec | ||
![]() |
607b3bda4a | ||
![]() |
4a9f405947 | ||
![]() |
a9d5993b20 | ||
![]() |
dafa1ab1d5 | ||
![]() |
f5e7c6eabf | ||
![]() |
1284de8c13 | ||
![]() |
94dc7b567c |
2
.github/FUNDING.yml
vendored
|
@ -1,2 +0,0 @@
|
|||
liberapay: spike
|
||||
custom: ['https://www.buymeacoffee.com/spikecodes']
|
4
.github/ISSUE_TEMPLATE/feature_parity.md
vendored
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
name: ✨ Feature parity
|
||||
about: Suggest implementing a feature into Libreddit that is found in Reddit.com
|
||||
about: Suggest implementing a feature into Ferrit that is found in Reddit.com
|
||||
title: '✨ Feature parity: '
|
||||
labels: feature parity
|
||||
assignees: ''
|
||||
|
@ -12,7 +12,7 @@ assignees: ''
|
|||
A clear and concise description of what the feature is.
|
||||
-->
|
||||
|
||||
## Describe how this could be implemented into Libreddit
|
||||
## Describe how this could be implemented into Ferrit
|
||||
<!--
|
||||
A clear and concise description of what you want to happen.
|
||||
-->
|
||||
|
|
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
name: 💡 Feature request
|
||||
about: Suggest a feature for Libreddit that is not found in Reddit
|
||||
about: Suggest a feature for Ferrit that is not found in Reddit
|
||||
title: '💡 Feature request: '
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
|
38
.github/workflows/docker-arm.yml
vendored
|
@ -1,38 +0,0 @@
|
|||
name: Docker ARM Build
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build-docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
platforms: all
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
version: latest
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.arm
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: spikecodes/libreddit:arm
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
41
.github/workflows/docker-armv7.yml
vendored
|
@ -1,41 +0,0 @@
|
|||
name: Docker ARM V7 Build
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build-docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
platforms: all
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
version: latest
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Build and push
|
||||
id: build_push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.armv7
|
||||
platforms: linux/arm/v7
|
||||
push: true
|
||||
tags: spikecodes/libreddit:armv7
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
38
.github/workflows/docker.yml
vendored
|
@ -1,38 +0,0 @@
|
|||
name: Docker amd64 Build
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build-docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
platforms: all
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
version: latest
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: spikecodes/libreddit:latest
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
63
.github/workflows/rust.yml
vendored
|
@ -2,58 +2,29 @@ name: Rust
|
|||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
branches:
|
||||
- master
|
||||
branches: [ "master" ]
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-18.04
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Cache Packages
|
||||
uses: Swatinem/rust-cache@v1.0.1
|
||||
|
||||
- name: Build
|
||||
run: cargo build --release
|
||||
|
||||
- name: Publish to crates.io
|
||||
continue-on-error: true
|
||||
run: cargo publish --no-verify --token ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Run formatting check
|
||||
run: cargo fmt --check
|
||||
|
||||
- uses: actions/upload-artifact@v2.2.1
|
||||
name: Upload a Build Artifact
|
||||
with:
|
||||
name: libreddit
|
||||
path: target/release/libreddit
|
||||
|
||||
- name: Versions
|
||||
id: version
|
||||
run: |
|
||||
echo "::set-output name=version::$(cargo metadata --format-version 1 --no-deps | jq .packages[0].version -r | sed 's/^/v/')"
|
||||
echo "::set-output name=tag::$(git describe --tags)"
|
||||
|
||||
- name: Calculate SHA512 checksum
|
||||
run: sha512sum target/release/libreddit > libreddit.sha512
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: github.base_ref != 'master'
|
||||
with:
|
||||
tag_name: ${{ steps.version.outputs.version }}
|
||||
name: ${{ steps.version.outputs.version }} - ${{ github.event.head_commit.message }}
|
||||
draft: true
|
||||
files: |
|
||||
target/release/libreddit
|
||||
libreddit.sha512
|
||||
body: |
|
||||
- ${{ github.event.head_commit.message }} ${{ github.sha }}
|
||||
generate_release_notes: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||
- name: Run cargo clippy check
|
||||
run: cargo clippy
|
||||
|
||||
- name: Build
|
||||
run: cargo build --verbose
|
||||
|
||||
- name: Run tests
|
||||
run: cargo test --verbose
|
||||
|
|
4
.replit
|
@ -1,2 +1,2 @@
|
|||
run = "while true; do wget -O libreddit https://github.com/spikecodes/libreddit/releases/latest/download/libreddit;chmod +x libreddit;./libreddit -H 63115200;sleep 1;done"
|
||||
language = "bash"
|
||||
run = "while :; do set -ex; curl -o./ferrit -fsSL -- https://github.com/ferritreader/ferrit/releases/latest/download/ferrit.x86_64-unknown-linux-gnu ; chmod +x ferrit; set +e; ./ferrit -H 63115200; sleep 1; done"
|
||||
language = "bash"
|
||||
|
|
73
CREDITS
Normal file
|
@ -0,0 +1,73 @@
|
|||
5trongthany <65565784+5trongthany@users.noreply.github.com>
|
||||
674Y3r <87250374+674Y3r@users.noreply.github.com>
|
||||
accountForIssues <52367365+accountForIssues@users.noreply.github.com>
|
||||
Adrian Lebioda <adrianlebioda@gmail.com>
|
||||
alefvanoon <53198048+alefvanoon@users.noreply.github.com>
|
||||
alyaeanyx <alexandra.hollmeier@mailbox.org>
|
||||
AndreVuillemot160 <84594011+AndreVuillemot160@users.noreply.github.com>
|
||||
Andrew Kaufman <57281817+andrew-kaufman@users.noreply.github.com>
|
||||
Artemis <51862164+artemislena@users.noreply.github.com>
|
||||
Arya K <73596856+gi-yt@users.noreply.github.com>
|
||||
Austin Huang <im@austinhuang.me>
|
||||
Basti <pred2k@users.noreply.github.com>
|
||||
Ben Smith <37027883+smithbm2316@users.noreply.github.com>
|
||||
BobIsMyManager <ahoumatt@yahoo.com>
|
||||
curlpipe <11898833+curlpipe@users.noreply.github.com>
|
||||
dacousb <53299044+dacousb@users.noreply.github.com>
|
||||
Daniel Valentine <Daniel-Valentine@users.noreply.github.com>
|
||||
dbrennand <52419383+dbrennand@users.noreply.github.com>
|
||||
Diego Magdaleno <38844659+DiegoMagdaleno@users.noreply.github.com>
|
||||
Dyras <jevwmguf@duck.com>
|
||||
Edward <101938856+EdwardLangdon@users.noreply.github.com>
|
||||
erdnaxe <erdnaxe@users.noreply.github.com>
|
||||
Esmail EL BoB <github.defilable@simplelogin.co>
|
||||
FireMasterK <20838718+FireMasterK@users.noreply.github.com>
|
||||
George Roubos <cowkingdom@hotmail.com>
|
||||
git-bruh <e817509a-8ee9-4332-b0ad-3a6bdf9ab63f@aleeas.com>
|
||||
guaddy <67671414+guaddy@users.noreply.github.com>
|
||||
Harsh Mishra <erbeusgriffincasper@gmail.com>
|
||||
imabritishcow <bcow@protonmail.com>
|
||||
Josiah <70736638+fres7h@users.noreply.github.com>
|
||||
JPyke3 <pyke.jacob1@gmail.com>
|
||||
Kavin <20838718+FireMasterK@users.noreply.github.com>
|
||||
Kazi <kzshantonu@users.noreply.github.com>
|
||||
Kieran <42723993+EnderDev@users.noreply.github.com>
|
||||
Kieran <kieran@dothq.co>
|
||||
Kyle Roth <kylrth@gmail.com>
|
||||
Laurențiu Nicola <lnicola@users.noreply.github.com>
|
||||
Mario A <10923513+Midblyte@users.noreply.github.com>
|
||||
Matt <69441971+sigaloid@users.noreply.github.com>
|
||||
Matthew Crossman <matt@crossman.page>
|
||||
Mennaruuk <52135169+Mennaruuk@users.noreply.github.com>
|
||||
mikupls <93015331+mikupls@users.noreply.github.com>
|
||||
Nainar <nainar.mb@gmail.com>
|
||||
Nathan Moos <moosingin3space@gmail.com>
|
||||
Nicholas Christopher <nchristopher@tuta.io>
|
||||
Nick Lowery <ClockVapor@users.noreply.github.com>
|
||||
Nico <github@dr460nf1r3.org>
|
||||
obeho <71698631+obeho@users.noreply.github.com>
|
||||
obscurity <z@x4.pm>
|
||||
RiversideRocks <59586759+RiversideRocks@users.noreply.github.com>
|
||||
robin <8597693+robrobinbin@users.noreply.github.com>
|
||||
Robin <8597693+robrobinbin@users.noreply.github.com>
|
||||
robrobinbin <>
|
||||
robrobinbin <8597693+robrobinbin@users.noreply.github.com>
|
||||
robrobinbin <robindepril@gmail.com>
|
||||
Ruben Elshof <15641671+rubenelshof@users.noreply.github.com>
|
||||
Scoder12 <34356756+Scoder12@users.noreply.github.com>
|
||||
Slayer <51095261+GhostSlayer@users.noreply.github.com>
|
||||
Soheb <somoso@users.noreply.github.com>
|
||||
somini <somini@users.noreply.github.com>
|
||||
somoso <github@soheb.anonaddy.com>
|
||||
Spike <19519553+spikecodes@users.noreply.github.com>
|
||||
spikecodes <19519553+spikecodes@users.noreply.github.com>
|
||||
sybenx <syb@duck.com>
|
||||
TheCultLeader666 <65368815+TheCultLeader666@users.noreply.github.com>
|
||||
TheFrenchGhosty <47571719+TheFrenchGhosty@users.noreply.github.com>
|
||||
The TwilightBlood <hwengerstickel@protonmail.com>
|
||||
tirz <36501933+tirz@users.noreply.github.com>
|
||||
Tsvetomir Bonev <invakid404@riseup.net>
|
||||
Walkx <walkxnl@gmail.com>
|
||||
Wichai <1482605+Chengings@users.noreply.github.com>
|
||||
xatier <xatierlike@gmail.com>
|
||||
Zach <72994911+zachjmurphy@users.noreply.github.com>
|
676
Cargo.lock
generated
18
Cargo.toml
|
@ -1,16 +1,16 @@
|
|||
[package]
|
||||
name = "libreddit"
|
||||
name = "ferrit"
|
||||
description = " Alternative private front-end to Reddit"
|
||||
license = "AGPL-3.0"
|
||||
repository = "https://github.com/spikecodes/libreddit"
|
||||
version = "0.22.9"
|
||||
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
||||
repository = "https://github.com/ferritreader/ferrit"
|
||||
version = "0.1.1"
|
||||
authors = ["Daniel Valentine <Daniel-Valentine@users.noreply.github.com>", "spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
askama = { version = "0.11.1", default-features = false }
|
||||
async-recursion = "1.0.0"
|
||||
cached = "0.34.0"
|
||||
cached = "0.40.0"
|
||||
clap = { version = "3.1.18", default-features = false, features = ["std"] }
|
||||
regex = "1.5.6"
|
||||
serde = { version = "1.0.137", features = ["derive"] }
|
||||
|
@ -25,3 +25,11 @@ tokio = { version = "1.18.2", features = ["full"] }
|
|||
time = "0.3.9"
|
||||
url = "2.2.2"
|
||||
rust-embed = "6.4.0"
|
||||
libflate = "1.2.0"
|
||||
brotli = { version = "3.3.4", features = ["std"] }
|
||||
toml = "0.5.9"
|
||||
once_cell = "1.16.0"
|
||||
|
||||
[dev-dependencies]
|
||||
lipsum = "0.8.2"
|
||||
sealed_test = "1.0.0"
|
||||
|
|
12
Dockerfile
|
@ -5,7 +5,7 @@ FROM rust:alpine AS builder
|
|||
|
||||
RUN apk add --no-cache musl-dev
|
||||
|
||||
WORKDIR /libreddit
|
||||
WORKDIR /ferrit
|
||||
|
||||
COPY . .
|
||||
|
||||
|
@ -21,16 +21,16 @@ COPY --from=builder /usr/share/ca-certificates /usr/share/ca-certificates
|
|||
COPY --from=builder /etc/ssl/certs /etc/ssl/certs
|
||||
|
||||
# Copy our build
|
||||
COPY --from=builder /libreddit/target/x86_64-unknown-linux-musl/release/libreddit /usr/local/bin/libreddit
|
||||
COPY --from=builder /ferrit/target/x86_64-unknown-linux-musl/release/ferrit /usr/local/bin/ferrit
|
||||
|
||||
# Use an unprivileged user.
|
||||
RUN adduser --home /nonexistent --no-create-home --disabled-password libreddit
|
||||
USER libreddit
|
||||
RUN adduser --home /nonexistent --no-create-home --disabled-password ferrit
|
||||
USER ferrit
|
||||
|
||||
# Tell Docker to expose port 8080
|
||||
EXPOSE 8080
|
||||
|
||||
# Run a healthcheck every minute to make sure Libreddit is functional
|
||||
# Run a healthcheck every minute to make sure ferrit is functional
|
||||
HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1
|
||||
|
||||
CMD ["libreddit"]
|
||||
CMD ["ferrit"]
|
||||
|
|
|
@ -5,7 +5,7 @@ FROM rust:alpine AS builder
|
|||
|
||||
RUN apk add --no-cache g++
|
||||
|
||||
WORKDIR /usr/src/libreddit
|
||||
WORKDIR /usr/src/ferrit
|
||||
|
||||
COPY . .
|
||||
|
||||
|
@ -21,16 +21,16 @@ COPY --from=builder /usr/share/ca-certificates /usr/share/ca-certificates
|
|||
COPY --from=builder /etc/ssl/certs /etc/ssl/certs
|
||||
|
||||
# Copy our build
|
||||
COPY --from=builder /usr/local/cargo/bin/libreddit /usr/local/bin/libreddit
|
||||
COPY --from=builder /usr/local/cargo/bin/ferrit /usr/local/bin/ferrit
|
||||
|
||||
# Use an unprivileged user.
|
||||
RUN adduser --home /nonexistent --no-create-home --disabled-password libreddit
|
||||
USER libreddit
|
||||
RUN adduser --home /nonexistent --no-create-home --disabled-password ferrit
|
||||
USER ferrit
|
||||
|
||||
# Tell Docker to expose port 8080
|
||||
EXPOSE 8080
|
||||
|
||||
# Run a healthcheck every minute to make sure Libreddit is functional
|
||||
# Run a healthcheck every minute to make sure ferrit is functional
|
||||
HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1
|
||||
|
||||
CMD ["libreddit"]
|
||||
CMD ["ferrit"]
|
||||
|
|
|
@ -12,7 +12,7 @@ RUN apt-get update && apt-get -y install gcc-arm-linux-gnueabihf \
|
|||
|
||||
RUN rustup target add armv7-unknown-linux-musleabihf
|
||||
|
||||
WORKDIR /libreddit
|
||||
WORKDIR /ferrit
|
||||
|
||||
COPY . .
|
||||
|
||||
|
@ -28,16 +28,16 @@ COPY --from=builder /usr/share/ca-certificates /usr/share/ca-certificates
|
|||
COPY --from=builder /etc/ssl/certs /etc/ssl/certs
|
||||
|
||||
# Copy our build
|
||||
COPY --from=builder /libreddit/target/armv7-unknown-linux-musleabihf/release/libreddit /usr/local/bin/libreddit
|
||||
COPY --from=builder /ferrit/target/armv7-unknown-linux-musleabihf/release/ferrit /usr/local/bin/ferrit
|
||||
|
||||
# Use an unprivileged user.
|
||||
RUN adduser --home /nonexistent --no-create-home --disabled-password libreddit
|
||||
USER libreddit
|
||||
RUN adduser --home /nonexistent --no-create-home --disabled-password ferrit
|
||||
USER ferrit
|
||||
|
||||
# Tell Docker to expose port 8080
|
||||
EXPOSE 8080
|
||||
|
||||
# Run a healthcheck every minute to make sure Libreddit is functional
|
||||
# Run a healthcheck every minute to make sure ferrit is functional
|
||||
HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1
|
||||
|
||||
CMD ["libreddit"]
|
||||
CMD ["ferrit"]
|
||||
|
|
327
README.md
|
@ -1,324 +1,31 @@
|
|||
# Libreddit
|
||||
# Ferrit
|
||||
|
||||
> An alternative private front-end to Reddit
|
||||
**Ferrit** is a front-end for Reddit, written in [Rust](https://www.rust-lang.org/). It is a fork of the [Libreddit project](https://github.com/spikecodes/libreddit) started by [spikecodes](https://spike.codes).
|
||||
|
||||

|
||||
## Why Ferrit?
|
||||
|
||||
---
|
||||
Ferrit is a fast, light, private, and secure way to browse Reddit. View the hottest takes on **/r/unpopularopinion** without having to enable Javascript or unwillingly submit any information to Reddit's servers. For more information, see the following pages on our [wiki](https://github.com/ferritreader/ferrit/wiki):
|
||||
|
||||
**10 second pitch:** Libreddit is a portmanteau of "libre" (meaning freedom) and "Reddit". It is a private front-end like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the coldest takes of [r/unpopularopinion](https://libreddit.spike.codes/r/unpopularopinion) without being [tracked](#reddit).
|
||||
* [FAQ/What are the advantages to using Ferrit over Reddit?](https://github.com/ferritreader/ferrit/wiki/FAQ#what-are-the-advantages-to-using-ferrit-over-reddit): what Ferrit does and why it is preferable to visiting Reddit directly
|
||||
* [FAQ/What are the disadvantages to using Ferrit over Reddit?](https://github.com/ferritreader/ferrit/wiki/FAQ#what-are-the-disadvantages-to-using-ferrit-over-reddit): what Ferrit is _not_
|
||||
* [Privacy/Reddit](https://github.com/ferritreader/ferrit/wiki/Privacy#reddit): an acounting of the data Reddit may and will collect from its visitors
|
||||
|
||||
- 🚀 Fast: written in Rust for blazing-fast speeds and memory safety
|
||||
- ☁️ Light: no JavaScript, no ads, no tracking, no bloat
|
||||
- 🕵 Private: all requests are proxied through the server, including media
|
||||
- 🔒 Secure: strong [Content Security Policy](https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP) prevents browser requests to Reddit
|
||||
The [whole FAQ](https://github.com/ferritreader/ferrit/wiki/FAQ) may address other questions you may have about Ferrit.
|
||||
|
||||
---
|
||||
## Instances
|
||||
|
||||
I appreciate any donations! Your support allows me to continue developing Libreddit.
|
||||
Visit the [_Instances_ wiki page](https://github.com/ferritreader/ferrit/wiki/Instances) for a list of running Ferrit instances.
|
||||
|
||||
<a href="https://www.buymeacoffee.com/spikecodes" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 40px" ></a>
|
||||
<a href="https://liberapay.com/spike/donate"><img alt="Donate using Liberapay" src="https://liberapay.com/assets/widgets/donate.svg" style="height: 40px"></a>
|
||||
## Getting and Installing Ferrit
|
||||
|
||||
Visit [Building, Installation, and Deployment](https://github.com/ferritreader/ferrit/wiki/Building,-Installation,-and-Deployment) on the wiki for detailed instructions on how to obtain and set up Ferrit.
|
||||
|
||||
**Bitcoin:** `bc1qwyxjnafpu3gypcpgs025cw9wa7ryudtecmwa6y`
|
||||
## Discuss
|
||||
|
||||
**Monero:** `45FJrEuFPtG2o7QZz2Nps77TbHD4sPqxViwbdyV9A6ktfHiWs47UngG5zXPcLoDXAc8taeuBgeNjfeprwgeXYXhN3C9tVSR`
|
||||
You can talk with the Ferrit maintainers and other uses in the official forums. See the [_Discuss_ wiki page](https://github.com/ferritreader/ferrit/wiki/Discuss) for more information.
|
||||
|
||||
---
|
||||
## Credits
|
||||
|
||||
# Instances
|
||||
See the CREDITS file in the repository root for a list of contributors up to and including the commit of the latest release. This file is updated with each release and on occasion in between releases.
|
||||
|
||||
Feel free to [open an issue](https://github.com/spikecodes/libreddit/issues/new) to have your [selfhosted instance](#deployment) listed here!
|
||||
|
||||
🔗 **Want to automatically redirect Reddit links to Libreddit? Use [LibRedirect](https://github.com/libredirect/libredirect) or [Privacy Redirect](https://github.com/SimonBrazell/privacy-redirect)!**
|
||||
|
||||
| Website | Country | Cloudflare |
|
||||
|-|-|-|
|
||||
| [libredd.it](https://libredd.it) (official) | 🇺🇸 US | |
|
||||
| [libreddit.spike.codes](https://libreddit.spike.codes) (official) | 🇺🇸 US | |
|
||||
| [libreddit.dothq.co](https://libreddit.dothq.co) | 🇩🇪 DE | ✅ |
|
||||
| [libreddit.kavin.rocks](https://libreddit.kavin.rocks) | 🇮🇳 IN | |
|
||||
| [reddit.invak.id](https://reddit.invak.id) | 🇧🇬 BG | |
|
||||
| [reddit.phii.me](https://reddit.phii.me) | 🇺🇸 US | |
|
||||
| [lr.riverside.rocks](https://lr.riverside.rocks) | 🇺🇸 US | |
|
||||
| [libreddit.strongthany.cc](https://libreddit.strongthany.cc) | 🇺🇸 US | |
|
||||
| [libreddit.database.red](https://libreddit.database.red) | 🇺🇸 US | ✅ |
|
||||
| [libreddit.privacy.com.de](https://libreddit.privacy.com.de) | 🇩🇪 DE | |
|
||||
| [libreddit.domain.glass](https://libreddit.domain.glass) | 🇺🇸 US | ✅ |
|
||||
| [libreddit.sugoma.tk](https://libreddit.sugoma.tk) | 🇺🇸 US | |
|
||||
| [libreddit.jamiethalacker.dev](https://libreddit.jamiethalacker.dev) | 🇺🇸 US | ✅ |
|
||||
| [reddit.artemislena.eu](https://reddit.artemislena.eu) | 🇩🇪 DE | |
|
||||
| [r.nf](https://r.nf) | 🇩🇪 DE | ✅ |
|
||||
| [libreddit.some-things.org](https://libreddit.some-things.org) | 🇨🇭 CH | |
|
||||
| [reddit.stuehieyr.com](https://reddit.stuehieyr.com) | 🇩🇪 DE | |
|
||||
| [lr.mint.lgbt](https://lr.mint.lgbt) | 🇨🇦 CA | |
|
||||
| [libreddit.igna.rocks](https://libreddit.igna.rocks) | 🇺🇸 US | |
|
||||
| [libreddit.autarkic.org](https://libreddit.autarkic.org) | 🇺🇸 US | |
|
||||
| [libreddit.flux.industries](https://libreddit.flux.industries) | 🇩🇪 DE | ✅ |
|
||||
| [libreddit.drivet.xyz](https://libreddit.drivet.xyz) | 🇵🇱 PL | |
|
||||
| [lr.oversold.host](https://lr.oversold.host) | 🇱🇺 LU | |
|
||||
| [libreddit.de](https://libreddit.de) | 🇩🇪 DE | |
|
||||
| [libreddit.pussthecat.org](https://libreddit.pussthecat.org) | 🇩🇪 DE | |
|
||||
| [libreddit.mutahar.rocks](https://libreddit.mutahar.rocks) | 🇫🇷 FR | |
|
||||
| [libreddit.northboot.xyz](https://libreddit.northboot.xyz) | 🇩🇪 DE | |
|
||||
| [leddit.xyz](https://leddit.xyz) | 🇺🇸 US | |
|
||||
| [de.leddit.xyz](https://de.leddit.xyz) | 🇩🇪 DE | |
|
||||
| [lr.cowfee.moe](https://lr.cowfee.moe) | 🇺🇸 US | |
|
||||
| [libreddit.hu](https://libreddit.hu) | 🇫🇮 FI | ✅ |
|
||||
| [libreddit.totaldarkness.net](https://libreddit.totaldarkness.net) | 🇨🇦 CA | |
|
||||
| [libreddit.esmailelbob.xyz](https://libreddit.esmailelbob.xyz) | 🇨🇦 CA | |
|
||||
| [lr.vern.cc](https://lr.vern.cc) | 🇨🇦 CA | |
|
||||
| [libreddit.nl](https://libreddit.nl) | 🇳🇱 NL | |
|
||||
| [lr.stilic.ml](https://lr.stilic.ml) | 🇫🇷 FR | ✅ |
|
||||
| [reddi.tk](https://reddi.tk) | 🇺🇸 US | ✅ |
|
||||
| [libreddit.bus-hit.me](https://libreddit.bus-hit.me) | 🇨🇦 CA | |
|
||||
| [libreddit.datatunnel.xyz](https://libreddit.datatunnel.xyz) | 🇫🇮 FI | |
|
||||
| [libreddit.crewz.me](https://libreddit.crewz.me) | 🇳🇱 NL | ✅ |
|
||||
| [r.walkx.org](https://r.walkx.org) | 🇳🇱 NL | ✅ |
|
||||
| [libreddit.kylrth.com](https://libreddit.kylrth.com) | 🇨🇦 CA | |
|
||||
| [libreddit.yonalee.eu](https://libreddit.yonalee.eu) | 🇱🇺 LU | ✅ |
|
||||
| [libreddit.winscloud.net](https://libreddit.winscloud.net) | 🇹🇭 TH | ✅ |
|
||||
| [libreddit.tiekoetter.com](https://libreddit.tiekoetter.com) | 🇩🇪 DE | |
|
||||
| [reddit.rtrace.io](https://reddit.rtrace.io) | 🇩🇪 DE | |
|
||||
| [libreddit.lunar.icu](https://libreddit.lunar.icu) | 🇩🇪 DE | ✅ |
|
||||
| [libreddit.privacydev.net](https://libreddit.privacydev.net) | 🇺🇸 US | |
|
||||
| [libreddit.notyourcomputer.net](https://libreddit.notyourcomputer.net) | 🇺🇸 US | |
|
||||
| [r.ahwx.org](https://r.ahwx.org) | 🇳🇱 NL | ✅ |
|
||||
| [bob.fr.to](https://bob.fr.to) | 🇺🇸 US | |
|
||||
| [reddit.beparanoid.de](https://reddit.beparanoid.de) | 🇨🇭 CH | |
|
||||
| [libreddit.dcs0.hu](https://libreddit.dcs0.hu) | 🇭🇺 HU | |
|
||||
| [reddit.dr460nf1r3.org](https://reddit.dr460nf1r3.org) | 🇩🇪 DE | ✅ |
|
||||
| [rd.jae.su](https://rd.jae.su) | 🇫🇮 FI | |
|
||||
| [libreddit.mha.fi](https://libreddit.mha.fi) | 🇫🇮 FI | |
|
||||
| [libreddit.foss.wtf](https://libreddit.foss.wtf) | 🇩🇪 DE | |
|
||||
| [libreddit.encrypted-data.xyz](https://libreddit.encrypted-data.xyz)| 🇫🇷 FR | ✅ |
|
||||
| [libreddit.eu.org](https://libreddit.eu.org)| 🇮🇪 IE | ✅ |
|
||||
| [l.opnxng.com](https://l.opnxng.com)| 🇸🇬 SG | |
|
||||
| [spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion](http://spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion) | 🇮🇳 IN | |
|
||||
| [fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion](http://fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion) | 🇩🇪 DE | |
|
||||
| [kphht2jcflojtqte4b4kyx7p2ahagv4debjj32nre67dxz7y57seqwyd.onion](http://kphht2jcflojtqte4b4kyx7p2ahagv4debjj32nre67dxz7y57seqwyd.onion) | 🇳🇱 NL | |
|
||||
| [inytumdgnri7xsqtvpntjevaelxtgbjqkuqhtf6txxhwbll2fwqtakqd.onion](http://inytumdgnri7xsqtvpntjevaelxtgbjqkuqhtf6txxhwbll2fwqtakqd.onion) | 🇨🇭 CH | |
|
||||
| [liredejj74h5xjqr2dylnl5howb2bpikfowqoveub55ru27x43357iid.onion](http://liredejj74h5xjqr2dylnl5howb2bpikfowqoveub55ru27x43357iid.onion) | 🇩🇪 DE | |
|
||||
| [kzhfp3nvb4qp575vy23ccbrgfocezjtl5dx66uthgrhu7nscu6rcwjyd.onion](http://kzhfp3nvb4qp575vy23ccbrgfocezjtl5dx66uthgrhu7nscu6rcwjyd.onion) | 🇺🇸 US | |
|
||||
| [ecue64ybzvn6vjzl37kcsnwt4ycmbsyf74nbttyg7rkc3t3qwnj7mcyd.onion](http://ecue64ybzvn6vjzl37kcsnwt4ycmbsyf74nbttyg7rkc3t3qwnj7mcyd.onion) | 🇩🇪 DE | |
|
||||
| [ledditqo2mxfvlgobxnlhrkq4dh34jss6evfkdkb2thlvy6dn4f4gpyd.onion](http://ledditqo2mxfvlgobxnlhrkq4dh34jss6evfkdkb2thlvy6dn4f4gpyd.onion) | 🇺🇸 US | |
|
||||
| [libredoxhxwnmsb6dvzzd35hmgzmawsq5i764es7witwhddvpc2razid.onion](http://libredoxhxwnmsb6dvzzd35hmgzmawsq5i764es7witwhddvpc2razid.onion) | 🇺🇸 US | |
|
||||
| [libreddit.2syis2nnyytz6jnusnjurva4swlaizlnleiks5mjp46phuwjbdjqwgqd.onion](http://libreddit.2syis2nnyytz6jnusnjurva4swlaizlnleiks5mjp46phuwjbdjqwgqd.onion) | 🇪🇬 EG | |
|
||||
| [ol5begilptoou34emq2sshf3may3hlblvipdjtybbovpb7c7zodxmtqd.onion](http://ol5begilptoou34emq2sshf3may3hlblvipdjtybbovpb7c7zodxmtqd.onion) | 🇩🇪 DE | |
|
||||
| [lbrdtjaj7567ptdd4rv74lv27qhxfkraabnyphgcvptl64ijx2tijwid.onion](http://lbrdtjaj7567ptdd4rv74lv27qhxfkraabnyphgcvptl64ijx2tijwid.onion) | 🇨🇦 CA | |
|
||||
| [libreddit.lqs5fjmajyp7rvp4qvyubwofzi6d4imua7vs237rkc4m5qogitqwrgyd.onion](http://libreddit.lqs5fjmajyp7rvp4qvyubwofzi6d4imua7vs237rkc4m5qogitqwrgyd.onion) | 🇨🇦 CA | |
|
||||
| [reddit.prnoid54e44a4bduq5due64jkk7wcnkxcp5kv3juncm7veptjcqudgyd.onion](http://reddit.prnoid54e44a4bduq5due64jkk7wcnkxcp5kv3juncm7veptjcqudgyd.onion) | 🇨🇭 CH | |
|
||||
| [inz6tbezfwzexva6dize4cqraj2tjdhygxabmcgysccesvw2pybzhbyd.onion](http://inz6tbezfwzexva6dize4cqraj2tjdhygxabmcgysccesvw2pybzhbyd.onion) | 🇫🇮 FI | |
|
||||
| [libreddit.micohauwkjbyw5meacrb4ipicwvwg4xtzl7y7viv53kig2mdcsvwkyyd.onion](http://libreddit.micohauwkjbyw5meacrb4ipicwvwg4xtzl7y7viv53kig2mdcsvwkyyd.onion/)| 🇫🇮 FI | |
|
||||
| [lr.vernccvbvyi5qhfzyqengccj7lkove6bjot2xhh5kajhwvidqafczrad.onion](http://lr.vernccvbvyi5qhfzyqengccj7lkove6bjot2xhh5kajhwvidqafczrad.onion/) | 🇨🇦 CA | |
|
||||
A checkmark in the "Cloudflare" category here refers to the use of the reverse proxy, [Cloudflare](https://cloudflare.com). The checkmark will not be listed for a site that uses Cloudflare DNS but rather the proxying service which grants Cloudflare the ability to monitor traffic to the website.
|
||||
|
||||
---
|
||||
|
||||
# About
|
||||
|
||||
Find Libreddit on 💬 [Matrix](https://matrix.to/#/#libreddit:kde.org), 🐋 [Docker](https://hub.docker.com/r/spikecodes/libreddit), :octocat: [GitHub](https://github.com/spikecodes/libreddit), and 🦊 [GitLab](https://gitlab.com/spikecodes/libreddit).
|
||||
|
||||
## Built with
|
||||
|
||||
- [Rust](https://www.rust-lang.org/) - Programming language
|
||||
- [Hyper](https://github.com/hyperium/hyper) - HTTP server and client
|
||||
- [Askama](https://github.com/djc/askama) - Templating engine
|
||||
- [Rustls](https://github.com/ctz/rustls) - TLS library
|
||||
|
||||
## Info
|
||||
Libreddit hopes to provide an easier way to browse Reddit, without the ads, trackers, and bloat. Libreddit was inspired by other alternative front-ends to popular services such as [Invidious](https://github.com/iv-org/invidious) for YouTube, [Nitter](https://github.com/zedeus/nitter) for Twitter, and [Bibliogram](https://sr.ht/~cadence/bibliogram/) for Instagram.
|
||||
|
||||
Libreddit currently implements most of Reddit's (signed-out) functionalities but still lacks [a few features](https://github.com/spikecodes/libreddit/issues).
|
||||
|
||||
## How does it compare to Teddit?
|
||||
|
||||
Teddit is another awesome open source project designed to provide an alternative frontend to Reddit. There is no connection between the two and you're welcome to use whichever one you favor. Competition fosters innovation and Teddit's release has motivated me to build Libreddit into an even more polished product.
|
||||
|
||||
If you are looking to compare, the biggest differences I have noticed are:
|
||||
- Libreddit is themed around Reddit's redesign whereas Teddit appears to stick much closer to Reddit's old design. This may suit some users better as design is always subjective.
|
||||
- Libreddit is written in [Rust](https://www.rust-lang.org) for speed and memory safety. It uses [Hyper](https://hyper.rs), a speedy and lightweight HTTP server/client implementation.
|
||||
|
||||
---
|
||||
|
||||
# Comparison
|
||||
|
||||
This section outlines how Libreddit compares to Reddit.
|
||||
|
||||
## Speed
|
||||
|
||||
Lasted tested Jan 17, 2021.
|
||||
|
||||
Results from Google Lighthouse ([Libreddit Report](https://lighthouse-dot-webdotdevsite.appspot.com/lh/html?url=https%3A%2F%2Flibredd.it), [Reddit Report](https://lighthouse-dot-webdotdevsite.appspot.com/lh/html?url=https%3A%2F%2Fwww.reddit.com%2F)).
|
||||
|
||||
| | Libreddit | Reddit |
|
||||
|------------------------|---------------|------------|
|
||||
| Requests | 20 | 70 |
|
||||
| Resource Size (card ui)| 1,224 KiB | 1,690 KiB |
|
||||
| Time to Interactive | **1.5 s** | **11.2 s** |
|
||||
|
||||
## Privacy
|
||||
|
||||
### Reddit
|
||||
|
||||
**Logging:** According to Reddit's [privacy policy](https://www.redditinc.com/policies/privacy-policy), they "may [automatically] log information" including:
|
||||
- IP address
|
||||
- User-agent string
|
||||
- Browser type
|
||||
- Operating system
|
||||
- Referral URLs
|
||||
- Device information (e.g., device IDs)
|
||||
- Device settings
|
||||
- Pages visited
|
||||
- Links clicked
|
||||
- The requested URL
|
||||
- Search terms
|
||||
|
||||
**Location:** The same privacy policy goes on to describe that location data may be collected through the use of:
|
||||
- GPS (consensual)
|
||||
- Bluetooth (consensual)
|
||||
- Content associated with a location (consensual)
|
||||
- Your IP Address
|
||||
|
||||
**Cookies:** Reddit's [cookie notice](https://www.redditinc.com/policies/cookies) documents the array of cookies used by Reddit including/regarding:
|
||||
- Authentication
|
||||
- Functionality
|
||||
- Analytics and Performance
|
||||
- Advertising
|
||||
- Third-Party Cookies
|
||||
- Third-Party Site
|
||||
|
||||
### Libreddit
|
||||
|
||||
For transparency, I hope to describe all the ways Libreddit handles user privacy.
|
||||
|
||||
**Logging:** In production (when running the binary, hosting with docker, or using the official instances), Libreddit logs nothing. When debugging (running from source without `--release`), Libreddit logs post IDs fetched to aid with troubleshooting.
|
||||
|
||||
**DNS:** Both official domains (`libredd.it` and `libreddit.spike.codes`) use Cloudflare as the DNS resolver. Though, the sites are not proxied through Cloudflare meaning Cloudflare doesn't have access to user traffic.
|
||||
|
||||
**Cookies:** Libreddit uses optional cookies to store any configured settings in [the settings menu](https://libreddit.spike.codes/settings). These are not cross-site cookies and the cookies hold no personal data.
|
||||
|
||||
**Hosting:** The official instances are hosted on [Replit](https://replit.com/) which monitors usage to prevent abuse. I can understand if this invalidates certain users' threat models and therefore, self-hosting, using unofficial instances, and browsing through Tor are welcomed.
|
||||
|
||||
---
|
||||
|
||||
# Installation
|
||||
|
||||
## 1) Cargo
|
||||
|
||||
Make sure Rust stable is installed along with `cargo`, Rust's package manager.
|
||||
|
||||
```
|
||||
cargo install libreddit
|
||||
```
|
||||
|
||||
## 2) Docker
|
||||
|
||||
Deploy the [Docker image](https://hub.docker.com/r/spikecodes/libreddit) of Libreddit:
|
||||
```
|
||||
docker pull spikecodes/libreddit
|
||||
docker run -d --name libreddit -p 8080:8080 spikecodes/libreddit
|
||||
```
|
||||
|
||||
Deploy using a different port (in this case, port 80):
|
||||
```
|
||||
docker pull spikecodes/libreddit
|
||||
docker run -d --name libreddit -p 80:8080 spikecodes/libreddit
|
||||
```
|
||||
|
||||
To deploy on `arm64` platforms, simply replace `spikecodes/libreddit` in the commands above with `spikecodes/libreddit:arm`.
|
||||
|
||||
To deploy on `armv7` platforms, simply replace `spikecodes/libreddit` in the commands above with `spikecodes/libreddit:armv7`.
|
||||
|
||||
## 3) AUR
|
||||
|
||||
For ArchLinux users, Libreddit is available from the AUR as [`libreddit-git`](https://aur.archlinux.org/packages/libreddit-git).
|
||||
|
||||
```
|
||||
yay -S libreddit-git
|
||||
```
|
||||
|
||||
## 4) GitHub Releases
|
||||
|
||||
If you're on Linux and none of these methods work for you, you can grab a Linux binary from [the newest release](https://github.com/spikecodes/libreddit/releases/latest).
|
||||
|
||||
## 5) Replit/Heroku/Glitch
|
||||
|
||||
**Note:** These are free hosting options but they are *not* private and will monitor server usage to prevent abuse. If you need a free and easy setup, this method may work best for you.
|
||||
|
||||
<a href="https://repl.it/github/spikecodes/libreddit"><img src="https://repl.it/badge/github/spikecodes/libreddit" alt="Run on Repl.it" height="32" /></a>
|
||||
[](https://heroku.com/deploy?template=https://github.com/spikecodes/libreddit)
|
||||
[](https://glitch.com/edit/#!/remix/libreddit)
|
||||
|
||||
---
|
||||
|
||||
# Deployment
|
||||
|
||||
Once installed, deploy Libreddit to `0.0.0.0:8080` by running:
|
||||
|
||||
```
|
||||
libreddit
|
||||
```
|
||||
|
||||
## Change Default Settings
|
||||
|
||||
Assign a default value for each setting by passing environment variables to Libreddit in the format `LIBREDDIT_DEFAULT_{X}`. Replace `{X}` with the setting name (see list below) in capital letters.
|
||||
|
||||
| Name | Possible values | Default value |
|
||||
|-------------------------|-----------------------------------------------------------------------------------------------------|---------------|
|
||||
| `THEME` | `["system", "light", "dark", "black", "dracula", "nord", "laserwave", "violet", "gold", "rosebox"]` | `system` |
|
||||
| `FRONT_PAGE` | `["default", "popular", "all"]` | `default` |
|
||||
| `LAYOUT` | `["card", "clean", "compact"]` | `card` |
|
||||
| `WIDE` | `["on", "off"]` | `off` |
|
||||
| `POST_SORT` | `["hot", "new", "top", "rising", "controversial"]` | `hot` |
|
||||
| `COMMENT_SORT` | `["confidence", "top", "new", "controversial", "old"]` | `confidence` |
|
||||
| `SHOW_NSFW` | `["on", "off"]` | `off` |
|
||||
| `USE_HLS` | `["on", "off"]` | `off` |
|
||||
| `HIDE_HLS_NOTIFICATION` | `["on", "off"]` | `off` |
|
||||
| `AUTOPLAY_VIDEOS` | `["on", "off"]` | `off` |
|
||||
|
||||
### Examples
|
||||
|
||||
```bash
|
||||
LIBREDDIT_DEFAULT_SHOW_NSFW=on libreddit
|
||||
```
|
||||
|
||||
```bash
|
||||
LIBREDDIT_DEFAULT_WIDE=on LIBREDDIT_DEFAULT_THEME=dark libreddit -r
|
||||
```
|
||||
|
||||
## Proxying using NGINX
|
||||
|
||||
**NOTE** If you're [proxying Libreddit through an NGINX Reverse Proxy](https://github.com/spikecodes/libreddit/issues/122#issuecomment-782226853), add
|
||||
```nginx
|
||||
proxy_http_version 1.1;
|
||||
```
|
||||
to your NGINX configuration file above your `proxy_pass` line.
|
||||
|
||||
## systemd
|
||||
|
||||
You can use the systemd service available in `contrib/libreddit.service`
|
||||
(install it on `/etc/systemd/system/libreddit.service`).
|
||||
|
||||
That service can be optionally configured in terms of environment variables by
|
||||
creating a file in `/etc/libreddit.conf`. Use the `contrib/libreddit.conf` as a
|
||||
template. You can also add the `LIBREDDIT_DEFAULT__{X}` settings explained
|
||||
above.
|
||||
|
||||
When "Proxying using NGINX" where the proxy is on the same machine, you should
|
||||
guarantee nginx waits for this service to start. Edit
|
||||
`/etc/systemd/system/libreddit.service.d/reverse-proxy.conf`:
|
||||
|
||||
```conf
|
||||
[Unit]
|
||||
Before=nginx.service
|
||||
```
|
||||
|
||||
## Building
|
||||
|
||||
```
|
||||
git clone https://github.com/spikecodes/libreddit
|
||||
cd libreddit
|
||||
cargo run
|
||||
```
|
||||
We offer special thanks to **spikecodes** for his foundational work on the Libreddit project.
|
||||
|
|
23
app.json
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"name": "Libreddit",
|
||||
"name": "Ferrit",
|
||||
"description": "Private front-end for Reddit",
|
||||
"buildpacks": [
|
||||
{
|
||||
|
@ -11,31 +11,34 @@
|
|||
],
|
||||
"stack": "container",
|
||||
"env": {
|
||||
"LIBREDDIT_DEFAULT_THEME": {
|
||||
"FERRIT_DEFAULT_THEME": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_FRONT_PAGE": {
|
||||
"FERRIT_DEFAULT_FRONT_PAGE": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_LAYOUT": {
|
||||
"FERRIT_DEFAULT_LAYOUT": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_WIDE": {
|
||||
"FERRIT_DEFAULT_WIDE": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_COMMENT_SORT": {
|
||||
"FERRIT_DEFAULT_COMMENT_SORT": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_POST_SORT": {
|
||||
"FERRIT_DEFAULT_POST_SORT": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_SHOW_NSFW": {
|
||||
"FERRIT_DEFAULT_SHOW_NSFW": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_USE_HLS": {
|
||||
"FERRIT_DEFAULT_BLUR_NSFW": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_HIDE_HLS_NOTIFICATION": {
|
||||
"FERRIT_USE_HLS": {
|
||||
"required": false
|
||||
},
|
||||
"FERRIT_HIDE_HLS_NOTIFICATION": {
|
||||
"required": false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
[Unit]
|
||||
Description=libreddit daemon
|
||||
Description=Ferrit daemon
|
||||
After=network.service
|
||||
|
||||
[Service]
|
||||
|
@ -8,8 +8,8 @@ DynamicUser=yes
|
|||
Environment=ADDRESS=0.0.0.0
|
||||
Environment=PORT=8080
|
||||
# Optional Override
|
||||
EnvironmentFile=-/etc/libreddit.conf
|
||||
ExecStart=/usr/bin/libreddit -a ${ADDRESS} -p ${PORT}
|
||||
EnvironmentFile=-/etc/ferrit.conf
|
||||
ExecStart=/usr/bin/ferrit -a ${ADDRESS} -p ${PORT}
|
||||
|
||||
# Hardening
|
||||
DeviceAllow=
|
|
@ -4,7 +4,7 @@ services:
|
|||
web:
|
||||
build: .
|
||||
restart: always
|
||||
container_name: "libreddit"
|
||||
container_name: "ferrit"
|
||||
ports:
|
||||
- 8080:8080
|
||||
healthcheck:
|
||||
|
|
18
scripts/gen-credits.sh
Executable file
|
@ -0,0 +1,18 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# This scripts generates the CREDITS file in the repository root.
|
||||
#
|
||||
# The generated file contains a list of all contributors to the Ferrit project,
|
||||
# including those who contributed to the original Libreddit project
|
||||
# (https://github.com/spikecodes/libreddit).
|
||||
#
|
||||
# We use git-log to surface the names and emails of all authors and committers,
|
||||
# and grep will filter any automated commits due to GitHub.
|
||||
|
||||
set -o pipefail
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")/../" || exit 1
|
||||
git --no-pager log --pretty='%an <%ae>%n%cn <%ce>' master \
|
||||
| sort -t'<' -u -k1,1 -k2,2 \
|
||||
| grep -Fv -- 'GitHub <noreply@github.com>' \
|
||||
> CREDITS
|
137
src/client.rs
|
@ -1,12 +1,37 @@
|
|||
use cached::proc_macro::cached;
|
||||
use futures_lite::{future::Boxed, FutureExt};
|
||||
use hyper::{body::Buf, client, Body, Request, Response, Uri};
|
||||
use hyper::{body, body::Buf, client, header, Body, Method, Request, Response, Uri};
|
||||
use libflate::gzip;
|
||||
use percent_encoding::{percent_encode, CONTROLS};
|
||||
use serde_json::Value;
|
||||
use std::result::Result;
|
||||
use std::{io, result::Result};
|
||||
|
||||
use crate::dbg_msg;
|
||||
use crate::server::RequestExt;
|
||||
|
||||
const REDDIT_URL_BASE: &str = "https://www.reddit.com";
|
||||
|
||||
/// Gets the canonical path for a resource on Reddit. On success, a
|
||||
/// `Some(Option<String>)` will be returned. If Reddit responds with
|
||||
/// anything other than an HTTP 3xx, a `None` will be returned. Any
|
||||
/// other error results in an `Err(String)`.
|
||||
#[cached(size = 1024, time = 600, result = true)]
|
||||
pub async fn canonical_path(path: String) -> Result<Option<String>, String> {
|
||||
let res = reddit_head(path, true).await?;
|
||||
|
||||
if res.status() == 429 {
|
||||
return Err("Too many requests.".to_string());
|
||||
};
|
||||
|
||||
match res.headers().get(header::LOCATION) {
|
||||
None => Ok(None),
|
||||
Some(hdr) => match hdr.to_str() {
|
||||
Ok(val) => Ok(Some(val.to_string().trim_start_matches(REDDIT_URL_BASE).to_string())),
|
||||
Err(e) => Err(e.to_string()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn proxy(req: Request<Body>, format: &str) -> Result<Response<Body>, String> {
|
||||
let mut url = format!("{}?{}", format, req.uri().query().unwrap_or_default());
|
||||
|
||||
|
@ -27,7 +52,7 @@ async fn stream(url: &str, req: &Request<Body>) -> Result<Response<Body>, String
|
|||
let https = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_only().enable_http1().build();
|
||||
|
||||
// Build the hyper client from the HTTPS connector.
|
||||
let client: client::Client<_, hyper::Body> = client::Client::builder().build(https);
|
||||
let client: client::Client<_, Body> = client::Client::builder().build(https);
|
||||
|
||||
let mut builder = Request::get(uri);
|
||||
|
||||
|
@ -62,20 +87,39 @@ async fn stream(url: &str, req: &Request<Body>) -> Result<Response<Body>, String
|
|||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
fn request(url: String, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||
/// Makes a GET request to Reddit at `path`. By default, this will honor HTTP
|
||||
/// 3xx codes Reddit returns and will automatically redirect.
|
||||
fn reddit_get(path: String, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||
request(&Method::GET, path, true, quarantine)
|
||||
}
|
||||
|
||||
/// Makes a HEAD request to Reddit at `path`. This will not follow redirects.
|
||||
fn reddit_head(path: String, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||
request(&Method::HEAD, path, false, quarantine)
|
||||
}
|
||||
|
||||
/// Makes a request to Reddit. If `redirect` is `true`, request_with_redirect
|
||||
/// will recurse on the URL that Reddit provides in the Location HTTP header
|
||||
/// in its response.
|
||||
fn request(method: &'static Method, path: String, redirect: bool, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||
// Build Reddit URL from path.
|
||||
let url = format!("{}{}", REDDIT_URL_BASE, path);
|
||||
|
||||
// Prepare the HTTPS connector.
|
||||
let https = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_or_http().enable_http1().build();
|
||||
|
||||
// Construct the hyper client from the HTTPS connector.
|
||||
let client: client::Client<_, hyper::Body> = client::Client::builder().build(https);
|
||||
let client: client::Client<_, Body> = client::Client::builder().build(https);
|
||||
|
||||
// Build request
|
||||
// Build request to Reddit. When making a GET, request gzip compression
|
||||
// (Reddit doesn't do brotli yet)
|
||||
let builder = Request::builder()
|
||||
.method("GET")
|
||||
.method(method)
|
||||
.uri(&url)
|
||||
.header("User-Agent", format!("web:libreddit:{}", env!("CARGO_PKG_VERSION")))
|
||||
.header("User-Agent", format!("web:ferrit:{}", env!("CARGO_PKG_VERSION")))
|
||||
.header("Host", "www.reddit.com")
|
||||
.header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
|
||||
.header("Accept-Encoding", if method == Method::GET { "gzip" } else { "identity" })
|
||||
.header("Accept-Language", "en-US,en;q=0.5")
|
||||
.header("Connection", "keep-alive")
|
||||
.header("Cookie", if quarantine { "_options=%7B%22pref_quarantine_optin%22%3A%20true%7D" } else { "" })
|
||||
|
@ -84,9 +128,16 @@ fn request(url: String, quarantine: bool) -> Boxed<Result<Response<Body>, String
|
|||
async move {
|
||||
match builder {
|
||||
Ok(req) => match client.request(req).await {
|
||||
Ok(response) => {
|
||||
Ok(mut response) => {
|
||||
// Reddit may respond with a 3xx. Decide whether or not to
|
||||
// redirect based on caller params.
|
||||
if response.status().to_string().starts_with('3') {
|
||||
request(
|
||||
if !redirect {
|
||||
return Ok(response);
|
||||
};
|
||||
|
||||
return request(
|
||||
method,
|
||||
response
|
||||
.headers()
|
||||
.get("Location")
|
||||
|
@ -96,14 +147,65 @@ fn request(url: String, quarantine: bool) -> Boxed<Result<Response<Body>, String
|
|||
})
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
true,
|
||||
quarantine,
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
Ok(response)
|
||||
.await;
|
||||
};
|
||||
|
||||
match response.headers().get(header::CONTENT_ENCODING) {
|
||||
// Content not compressed.
|
||||
None => Ok(response),
|
||||
|
||||
// Content encoded (hopefully with gzip).
|
||||
Some(hdr) => {
|
||||
match hdr.to_str() {
|
||||
Ok(val) => match val {
|
||||
"gzip" => {}
|
||||
"identity" => return Ok(response),
|
||||
_ => return Err("Reddit response was encoded with an unsupported compressor".to_string()),
|
||||
},
|
||||
Err(_) => return Err("Reddit response was invalid".to_string()),
|
||||
}
|
||||
|
||||
// We get here if the body is gzip-compressed.
|
||||
|
||||
// The body must be something that implements
|
||||
// std::io::Read, hence the conversion to
|
||||
// bytes::buf::Buf and then transformation into a
|
||||
// Reader.
|
||||
let mut decompressed: Vec<u8>;
|
||||
{
|
||||
let mut aggregated_body = match body::aggregate(response.body_mut()).await {
|
||||
Ok(b) => b.reader(),
|
||||
Err(e) => return Err(e.to_string()),
|
||||
};
|
||||
|
||||
let mut decoder = match gzip::Decoder::new(&mut aggregated_body) {
|
||||
Ok(decoder) => decoder,
|
||||
Err(e) => return Err(e.to_string()),
|
||||
};
|
||||
|
||||
decompressed = Vec::<u8>::new();
|
||||
match io::copy(&mut decoder, &mut decompressed) {
|
||||
Ok(_) => {}
|
||||
Err(e) => return Err(e.to_string()),
|
||||
};
|
||||
}
|
||||
|
||||
response.headers_mut().remove(header::CONTENT_ENCODING);
|
||||
response.headers_mut().insert(header::CONTENT_LENGTH, decompressed.len().into());
|
||||
*(response.body_mut()) = Body::from(decompressed);
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => Err(e.to_string()),
|
||||
Err(e) => {
|
||||
dbg_msg!("{} {}: {}", method, path, e);
|
||||
|
||||
Err(e.to_string())
|
||||
}
|
||||
},
|
||||
Err(_) => Err("Post url contains non-ASCII characters".to_string()),
|
||||
}
|
||||
|
@ -114,9 +216,6 @@ fn request(url: String, quarantine: bool) -> Boxed<Result<Response<Body>, String
|
|||
// Make a request to a Reddit API and parse the JSON response
|
||||
#[cached(size = 100, time = 30, result = true)]
|
||||
pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
|
||||
// Build Reddit url from path
|
||||
let url = format!("https://www.reddit.com{}", path);
|
||||
|
||||
// Closure to quickly build errors
|
||||
let err = |msg: &str, e: String| -> Result<Value, String> {
|
||||
// eprintln!("{} - {}: {}", url, msg, e);
|
||||
|
@ -124,7 +223,7 @@ pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
|
|||
};
|
||||
|
||||
// Fetch the url...
|
||||
match request(url.clone(), quarantine).await {
|
||||
match reddit_get(path.clone(), quarantine).await {
|
||||
Ok(response) => {
|
||||
let status = response.status();
|
||||
|
||||
|
@ -142,7 +241,7 @@ pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
|
|||
.as_str()
|
||||
.unwrap_or_else(|| {
|
||||
json["message"].as_str().unwrap_or_else(|| {
|
||||
eprintln!("{} - Error parsing reddit error", url);
|
||||
eprintln!("{}{} - Error parsing reddit error", REDDIT_URL_BASE, path);
|
||||
"Error parsing reddit error"
|
||||
})
|
||||
})
|
||||
|
|
142
src/config.rs
Normal file
|
@ -0,0 +1,142 @@
|
|||
use once_cell::sync::Lazy;
|
||||
use std::env::var;
|
||||
|
||||
// Waiting for https://github.com/rust-lang/rust/issues/74465 to land, so we
|
||||
// can reduce reliance on once_cell.
|
||||
//
|
||||
// This is the local static that is initialized at runtime (technically at
|
||||
// first request) and contains the instance settings.
|
||||
static CONFIG: Lazy<Config> = Lazy::new(Config::load);
|
||||
|
||||
/// Stores the configuration parsed from the environment variables and the
|
||||
/// config file. `Config::Default()` contains None for each setting.
|
||||
#[derive(Default, serde::Deserialize)]
|
||||
pub struct Config {
|
||||
#[serde(rename = "FERRIT_SFW_ONLY")]
|
||||
sfw_only: Option<String>,
|
||||
|
||||
#[serde(rename = "FERRIT_DEFAULT_THEME")]
|
||||
default_theme: Option<String>,
|
||||
|
||||
#[serde(rename = "FERRIT_DEFAULT_FRONT_PAGE")]
|
||||
default_front_page: Option<String>,
|
||||
|
||||
#[serde(rename = "FERRIT_DEFAULT_LAYOUT")]
|
||||
default_layout: Option<String>,
|
||||
|
||||
#[serde(rename = "FERRIT_DEFAULT_WIDE")]
|
||||
default_wide: Option<String>,
|
||||
|
||||
#[serde(rename = "FERRIT_DEFAULT_COMMENT_SORT")]
|
||||
default_comment_sort: Option<String>,
|
||||
|
||||
#[serde(rename = "FERRIT_DEFAULT_POST_SORT")]
|
||||
default_post_sort: Option<String>,
|
||||
|
||||
#[serde(rename = "FERRIT_DEFAULT_SHOW_NSFW")]
|
||||
default_show_nsfw: Option<String>,
|
||||
|
||||
#[serde(rename = "FERRIT_DEFAULT_BLUR_NSFW")]
|
||||
default_blur_nsfw: Option<String>,
|
||||
|
||||
#[serde(rename = "FERRIT_DEFAULT_USE_HLS")]
|
||||
default_use_hls: Option<String>,
|
||||
|
||||
#[serde(rename = "FERRIT_DEFAULT_HIDE_HLS_NOTIFICATION")]
|
||||
default_hide_hls_notification: Option<String>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
/// Load the configuration from the environment variables and the config file.
|
||||
/// In the case that there are no environment variables set and there is no
|
||||
/// config file, this function returns a Config that contains all None values.
|
||||
pub fn load() -> Self {
|
||||
// Read from ferrit.toml config file. If for any reason, it fails, the
|
||||
// default `Config` is used (all None values)
|
||||
let config: Config = toml::from_str(&std::fs::read_to_string("ferrit.toml").unwrap_or_default()).unwrap_or_default();
|
||||
// This function defines the order of preference - first check for
|
||||
// environment variables with "FERRIT", then check for environment variables
|
||||
// with "LIBREDDIT" for reverse compatibility, then check the config, then if
|
||||
// both are `None`, return a `None` via the `map_or_else` function
|
||||
let parse = |key: &str| -> Option<String> {
|
||||
var(key)
|
||||
.ok()
|
||||
.map_or_else(|| var(key.replace("FERRIT", "LIBREDDIT")).ok(), Some)
|
||||
.map_or_else(|| get_setting_from_config(key, &config), Some)
|
||||
};
|
||||
Self {
|
||||
sfw_only: parse("FERRIT_SFW_ONLY"),
|
||||
default_theme: parse("FERRIT_DEFAULT_THEME"),
|
||||
default_front_page: parse("FERRIT_DEFAULT_FRONT_PAGE"),
|
||||
default_layout: parse("FERRIT_DEFAULT_LAYOUT"),
|
||||
default_post_sort: parse("FERRIT_DEFAULT_POST_SORT"),
|
||||
default_wide: parse("FERRIT_DEFAULT_WIDE"),
|
||||
default_comment_sort: parse("FERRIT_DEFAULT_COMMENT_SORT"),
|
||||
default_show_nsfw: parse("FERRIT_DEFAULT_SHOW_NSFW"),
|
||||
default_blur_nsfw: parse("FERRIT_DEFAULT_BLUR_NSFW"),
|
||||
default_use_hls: parse("FERRIT_DEFAULT_USE_HLS"),
|
||||
default_hide_hls_notification: parse("FERRIT_DEFAULT_HIDE_HLS"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_setting_from_config(name: &str, config: &Config) -> Option<String> {
|
||||
match name {
|
||||
"FERRIT_SFW_ONLY" => config.sfw_only.clone(),
|
||||
"FERRIT_DEFAULT_THEME" => config.default_theme.clone(),
|
||||
"FERRIT_DEFAULT_FRONT_PAGE" => config.default_front_page.clone(),
|
||||
"FERRIT_DEFAULT_LAYOUT" => config.default_layout.clone(),
|
||||
"FERRIT_DEFAULT_COMMENT_SORT" => config.default_comment_sort.clone(),
|
||||
"FERRIT_DEFAULT_POST_SORT" => config.default_post_sort.clone(),
|
||||
"FERRIT_DEFAULT_SHOW_NSFW" => config.default_show_nsfw.clone(),
|
||||
"FERRIT_DEFAULT_BLUR_NSFW" => config.default_blur_nsfw.clone(),
|
||||
"FERRIT_DEFAULT_USE_HLS" => config.default_use_hls.clone(),
|
||||
"FERRIT_DEFAULT_HIDE_HLS_NOTIFICATION" => config.default_hide_hls_notification.clone(),
|
||||
"FERRIT_DEFAULT_WIDE" => config.default_wide.clone(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves setting from environment variable or config file.
|
||||
pub(crate) fn get_setting(name: &str) -> Option<String> {
|
||||
get_setting_from_config(name, &CONFIG)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
use sealed_test::prelude::*;
|
||||
|
||||
#[test]
|
||||
#[sealed_test(env = [("FERRIT_SFW_ONLY", "1")])]
|
||||
fn test_env_var() {
|
||||
assert!(crate::utils::sfw_only())
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[sealed_test(env = [("FERRIT_DEFAULT_COMMENT_SORT", "top"), ("LIBREDDIT_DEFAULT_COMMENT_SORT", "best")])]
|
||||
fn test_env_precedence() {
|
||||
assert_eq!(crate::config::get_setting("FERRIT_DEFAULT_COMMENT_SORT"), Some("top".into()))
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[sealed_test]
|
||||
fn test_config() {
|
||||
let config_to_write = r#"FERRIT_DEFAULT_COMMENT_SORT = "best""#;
|
||||
std::fs::write("ferrit.toml", config_to_write).unwrap();
|
||||
assert_eq!(crate::config::get_setting("FERRIT_DEFAULT_COMMENT_SORT"), Some("best".into()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[sealed_test(env = [("FERRIT_DEFAULT_COMMENT_SORT", "top")])]
|
||||
fn test_env_config_precedence() {
|
||||
let config_to_write = r#"FERRIT_DEFAULT_COMMENT_SORT = "best""#;
|
||||
std::fs::write("ferrit.toml", config_to_write).unwrap();
|
||||
assert_eq!(crate::config::get_setting("FERRIT_DEFAULT_COMMENT_SORT"), Some("top".into()))
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[sealed_test(env = [("LIBREDDIT_DEFAULT_COMMENT_SORT", "top")])]
|
||||
fn test_alt_env_config_precedence() {
|
||||
let config_to_write = r#"FERRIT_DEFAULT_COMMENT_SORT = "best""#;
|
||||
std::fs::write("Ferrit.toml", config_to_write).unwrap();
|
||||
assert_eq!(crate::config::get_setting("FERRIT_DEFAULT_COMMENT_SORT"), Some("top".into()))
|
||||
}
|
236
src/duplicates.rs
Normal file
|
@ -0,0 +1,236 @@
|
|||
// Handler for post duplicates.
|
||||
|
||||
use crate::client::json;
|
||||
use crate::server::RequestExt;
|
||||
use crate::subreddit::{can_access_quarantine, quarantine};
|
||||
use crate::utils::{error, filter_posts, get_filters, nsfw_landing, parse_post, setting, template, Post, Preferences};
|
||||
|
||||
use askama::Template;
|
||||
use hyper::{Body, Request, Response};
|
||||
use serde_json::Value;
|
||||
use std::borrow::ToOwned;
|
||||
use std::collections::HashSet;
|
||||
use std::vec::Vec;
|
||||
|
||||
/// DuplicatesParams contains the parameters in the URL.
|
||||
struct DuplicatesParams {
|
||||
before: String,
|
||||
after: String,
|
||||
sort: String,
|
||||
}
|
||||
|
||||
/// DuplicatesTemplate defines an Askama template for rendering duplicate
|
||||
/// posts.
|
||||
#[derive(Template)]
|
||||
#[template(path = "duplicates.html")]
|
||||
struct DuplicatesTemplate {
|
||||
/// params contains the relevant request parameters.
|
||||
params: DuplicatesParams,
|
||||
|
||||
/// post is the post whose ID is specified in the reqeust URL. Note that
|
||||
/// this is not necessarily the "original" post.
|
||||
post: Post,
|
||||
|
||||
/// duplicates is the list of posts that, per Reddit, are duplicates of
|
||||
/// Post above.
|
||||
duplicates: Vec<Post>,
|
||||
|
||||
/// prefs are the user preferences.
|
||||
prefs: Preferences,
|
||||
|
||||
/// url is the request URL.
|
||||
url: String,
|
||||
|
||||
/// num_posts_filtered counts how many posts were filtered from the
|
||||
/// duplicates list.
|
||||
num_posts_filtered: u64,
|
||||
|
||||
/// all_posts_filtered is true if every duplicate was filtered. This is an
|
||||
/// edge case but can still happen.
|
||||
all_posts_filtered: bool,
|
||||
}
|
||||
|
||||
/// Make the GET request to Reddit. It assumes `req` is the appropriate Reddit
|
||||
/// REST endpoint for enumerating post duplicates.
|
||||
pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let path: String = format!("{}.json?{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default());
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
let quarantined = can_access_quarantine(&req, &sub);
|
||||
|
||||
// Log the request in debugging mode
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(req.param("id").unwrap_or_default());
|
||||
|
||||
// Send the GET, and await JSON.
|
||||
match json(path, quarantined).await {
|
||||
// Process response JSON.
|
||||
Ok(response) => {
|
||||
let post = parse_post(&response[0]["data"]["children"][0]).await;
|
||||
|
||||
// Return landing page if this post if this Reddit deems this post
|
||||
// NSFW, but we have also disabled the display of NSFW content
|
||||
// or if the instance is SFW-only.
|
||||
if post.nsfw && (setting(&req, "show_nsfw") != "on" || crate::utils::sfw_only()) {
|
||||
return Ok(nsfw_landing(req).await.unwrap_or_default());
|
||||
}
|
||||
|
||||
let filters = get_filters(&req);
|
||||
let (duplicates, num_posts_filtered, all_posts_filtered) = parse_duplicates(&response[1], &filters).await;
|
||||
|
||||
// These are the values for the "before=", "after=", and "sort="
|
||||
// query params, respectively.
|
||||
let mut before: String = String::new();
|
||||
let mut after: String = String::new();
|
||||
let mut sort: String = String::new();
|
||||
|
||||
// FIXME: We have to perform a kludge to work around a Reddit API
|
||||
// bug.
|
||||
//
|
||||
// The JSON object in "data" will never contain a "before" value so
|
||||
// it is impossible to use it to determine our position in a
|
||||
// listing. We'll make do by getting the ID of the first post in
|
||||
// the listing, setting that as our "before" value, and ask Reddit
|
||||
// to give us a batch of duplicate posts up to that post.
|
||||
//
|
||||
// Likewise, if we provide a "before" request in the GET, the
|
||||
// result won't have an "after" in the JSON, in addition to missing
|
||||
// the "before." So we will have to use the final post in the list
|
||||
// of duplicates.
|
||||
//
|
||||
// That being said, we'll also need to capture the value of the
|
||||
// "sort=" parameter as well, so we will need to inspect the
|
||||
// query key-value pairs anyway.
|
||||
let l = duplicates.len();
|
||||
if l > 0 {
|
||||
// This gets set to true if "before=" is one of the GET params.
|
||||
let mut have_before: bool = false;
|
||||
|
||||
// This gets set to true if "after=" is one of the GET params.
|
||||
let mut have_after: bool = false;
|
||||
|
||||
// Inspect the query key-value pairs. We will need to record
|
||||
// the value of "sort=", along with checking to see if either
|
||||
// one of "before=" or "after=" are given.
|
||||
//
|
||||
// If we're in the middle of the batch (evidenced by the
|
||||
// presence of a "before=" or "after=" parameter in the GET),
|
||||
// then use the first post as the "before" reference.
|
||||
//
|
||||
// We'll do this iteratively. Better than with .map_or()
|
||||
// since a closure will continue to operate on remaining
|
||||
// elements even after we've determined one of "before=" or
|
||||
// "after=" (or both) are in the GET request.
|
||||
//
|
||||
// In practice, here should only ever be one of "before=" or
|
||||
// "after=" and never both.
|
||||
let query_str = req.uri().query().unwrap_or_default().to_string();
|
||||
|
||||
if !query_str.is_empty() {
|
||||
for param in query_str.split('&') {
|
||||
let kv: Vec<&str> = param.split('=').collect();
|
||||
if kv.len() < 2 {
|
||||
// Reject invalid query parameter.
|
||||
continue;
|
||||
}
|
||||
|
||||
let key: &str = kv[0];
|
||||
match key {
|
||||
"before" => have_before = true,
|
||||
"after" => have_after = true,
|
||||
"sort" => {
|
||||
let val: &str = kv[1];
|
||||
match val {
|
||||
"new" | "num_comments" => sort = val.to_string(),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if have_after {
|
||||
before = "t3_".to_owned();
|
||||
before.push_str(&duplicates[0].id);
|
||||
}
|
||||
|
||||
// Address potentially missing "after". If "before=" is in the
|
||||
// GET, then "after" will be null in the JSON (see FIXME
|
||||
// above).
|
||||
if have_before {
|
||||
// The next batch will need to start from one after the
|
||||
// last post in the current batch.
|
||||
after = "t3_".to_owned();
|
||||
after.push_str(&duplicates[l - 1].id);
|
||||
|
||||
// Here is where things get terrible. Notice that we
|
||||
// haven't set `before`. In order to do so, we will
|
||||
// need to know if there is a batch that exists before
|
||||
// this one, and doing so requires actually fetching the
|
||||
// previous batch. In other words, we have to do yet one
|
||||
// more GET to Reddit. There is no other way to determine
|
||||
// whether or not to define `before`.
|
||||
//
|
||||
// We'll mitigate that by requesting at most one duplicate.
|
||||
let new_path: String = format!(
|
||||
"{}.json?before=t3_{}&sort={}&limit=1&raw_json=1",
|
||||
req.uri().path(),
|
||||
&duplicates[0].id,
|
||||
if sort.is_empty() { "num_comments".to_string() } else { sort.clone() }
|
||||
);
|
||||
match json(new_path, true).await {
|
||||
Ok(response) => {
|
||||
if !response[1]["data"]["children"].as_array().unwrap_or(&Vec::new()).is_empty() {
|
||||
before = "t3_".to_owned();
|
||||
before.push_str(&duplicates[0].id);
|
||||
}
|
||||
}
|
||||
Err(msg) => {
|
||||
// Abort entirely if we couldn't get the previous
|
||||
// batch.
|
||||
return error(req, msg).await;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
after = response[1]["data"]["after"].as_str().unwrap_or_default().to_string();
|
||||
}
|
||||
}
|
||||
let url = req.uri().to_string();
|
||||
|
||||
template(DuplicatesTemplate {
|
||||
params: DuplicatesParams { before, after, sort },
|
||||
post,
|
||||
duplicates,
|
||||
prefs: Preferences::new(req),
|
||||
url,
|
||||
num_posts_filtered,
|
||||
all_posts_filtered,
|
||||
})
|
||||
}
|
||||
|
||||
// Process error.
|
||||
Err(msg) => {
|
||||
if msg == "quarantined" {
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
quarantine(req, sub)
|
||||
} else {
|
||||
error(req, msg).await
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// DUPLICATES
|
||||
async fn parse_duplicates(json: &serde_json::Value, filters: &HashSet<String>) -> (Vec<Post>, u64, bool) {
|
||||
let post_duplicates: &Vec<Value> = &json["data"]["children"].as_array().map_or(Vec::new(), ToOwned::to_owned);
|
||||
let mut duplicates: Vec<Post> = Vec::new();
|
||||
|
||||
// Process each post and place them in the Vec<Post>.
|
||||
for val in post_duplicates.iter() {
|
||||
let post: Post = parse_post(val).await;
|
||||
duplicates.push(post);
|
||||
}
|
||||
|
||||
let (num_posts_filtered, all_posts_filtered) = filter_posts(&mut duplicates, filters);
|
||||
(duplicates, num_posts_filtered, all_posts_filtered)
|
||||
}
|
51
src/main.rs
|
@ -3,6 +3,8 @@
|
|||
#![allow(clippy::cmp_owned)]
|
||||
|
||||
// Reference local files
|
||||
mod config;
|
||||
mod duplicates;
|
||||
mod post;
|
||||
mod search;
|
||||
mod settings;
|
||||
|
@ -17,7 +19,7 @@ use futures_lite::FutureExt;
|
|||
use hyper::{header::HeaderValue, Body, Request, Response};
|
||||
|
||||
mod client;
|
||||
use client::proxy;
|
||||
use client::{canonical_path, proxy};
|
||||
use server::RequestExt;
|
||||
use utils::{error, redirect, ThemeAssets};
|
||||
|
||||
|
@ -104,7 +106,7 @@ async fn style() -> Result<Response<Body>, String> {
|
|||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let matches = Command::new("Libreddit")
|
||||
let matches = Command::new("Ferrit")
|
||||
.version(env!("CARGO_PKG_VERSION"))
|
||||
.about("Private front-end for Reddit written in Rust ")
|
||||
.arg(
|
||||
|
@ -149,7 +151,7 @@ async fn main() {
|
|||
|
||||
let listener = [address, ":", &port].concat();
|
||||
|
||||
println!("Starting Libreddit...");
|
||||
println!("Starting Ferrit...");
|
||||
|
||||
// Begin constructing a server
|
||||
let mut app = server::Server::new();
|
||||
|
@ -188,7 +190,7 @@ async fn main() {
|
|||
.at("/hls.min.js")
|
||||
.get(|_| resource(include_str!("../static/hls.min.js"), "text/javascript", false).boxed());
|
||||
|
||||
// Proxy media through Libreddit
|
||||
// Proxy media through Ferrit
|
||||
app.at("/vid/:id/:size").get(|r| proxy(r, "https://v.redd.it/{id}/DASH_{size}").boxed());
|
||||
app.at("/hls/:id/*path").get(|r| proxy(r, "https://v.redd.it/{id}/{path}").boxed());
|
||||
app.at("/img/*path").get(|r| proxy(r, "https://i.redd.it/{path}").boxed());
|
||||
|
@ -238,6 +240,16 @@ async fn main() {
|
|||
app.at("/r/:sub/comments/:id").get(|r| post::item(r).boxed());
|
||||
app.at("/r/:sub/comments/:id/:title").get(|r| post::item(r).boxed());
|
||||
app.at("/r/:sub/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
||||
app.at("/comments/:id").get(|r| post::item(r).boxed());
|
||||
app.at("/comments/:id/comments").get(|r| post::item(r).boxed());
|
||||
app.at("/comments/:id/comments/:comment_id").get(|r| post::item(r).boxed());
|
||||
app.at("/comments/:id/:title").get(|r| post::item(r).boxed());
|
||||
app.at("/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
||||
|
||||
app.at("/r/:sub/duplicates/:id").get(|r| duplicates::item(r).boxed());
|
||||
app.at("/r/:sub/duplicates/:id/:title").get(|r| duplicates::item(r).boxed());
|
||||
app.at("/duplicates/:id").get(|r| duplicates::item(r).boxed());
|
||||
app.at("/duplicates/:id/:title").get(|r| duplicates::item(r).boxed());
|
||||
|
||||
app.at("/r/:sub/search").get(|r| search::find(r).boxed());
|
||||
|
||||
|
@ -254,9 +266,6 @@ async fn main() {
|
|||
|
||||
app.at("/r/:sub/:sort").get(|r| subreddit::community(r).boxed());
|
||||
|
||||
// Comments handler
|
||||
app.at("/comments/:id").get(|r| post::item(r).boxed());
|
||||
|
||||
// Front page
|
||||
app.at("/").get(|r| subreddit::community(r).boxed());
|
||||
|
||||
|
@ -274,19 +283,31 @@ async fn main() {
|
|||
// Handle about pages
|
||||
app.at("/about").get(|req| error(req, "About pages aren't added yet".to_string()).boxed());
|
||||
|
||||
app.at("/:id").get(|req: Request<Body>| match req.param("id").as_deref() {
|
||||
// Sort front page
|
||||
Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).boxed(),
|
||||
// Short link for post
|
||||
Some(id) if id.len() > 4 && id.len() < 7 => post::item(req).boxed(),
|
||||
// Error message for unknown pages
|
||||
_ => error(req, "Nothing here".to_string()).boxed(),
|
||||
app.at("/:id").get(|req: Request<Body>| {
|
||||
Box::pin(async move {
|
||||
match req.param("id").as_deref() {
|
||||
// Sort front page
|
||||
Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).await,
|
||||
|
||||
// Short link for post
|
||||
Some(id) if (5..7).contains(&id.len()) => match canonical_path(format!("/{}", id)).await {
|
||||
Ok(path_opt) => match path_opt {
|
||||
Some(path) => Ok(redirect(path)),
|
||||
None => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await,
|
||||
},
|
||||
Err(e) => error(req, e).await,
|
||||
},
|
||||
|
||||
// Error message for unknown pages
|
||||
_ => error(req, "Nothing here".to_string()).await,
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
// Default service in case no routes match
|
||||
app.at("/*").get(|req| error(req, "Nothing here".to_string()).boxed());
|
||||
|
||||
println!("Running Libreddit v{} on {}!", env!("CARGO_PKG_VERSION"), listener);
|
||||
println!("Running Ferrit v{} on {}!", env!("CARGO_PKG_VERSION"), listener);
|
||||
|
||||
let server = app.listen(listener);
|
||||
|
||||
|
|
104
src/post.rs
|
@ -3,7 +3,7 @@ use crate::client::json;
|
|||
use crate::server::RequestExt;
|
||||
use crate::subreddit::{can_access_quarantine, quarantine};
|
||||
use crate::utils::{
|
||||
error, format_num, format_url, get_filters, param, rewrite_urls, setting, template, time, val, Author, Awards, Comment, Flags, Flair, FlairPart, Media, Post, Preferences,
|
||||
error, format_num, get_filters, nsfw_landing, param, parse_post, rewrite_urls, setting, template, time, val, Author, Awards, Comment, Flair, FlairPart, Post, Preferences,
|
||||
};
|
||||
use hyper::{Body, Request, Response};
|
||||
|
||||
|
@ -54,7 +54,15 @@ pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||
// Otherwise, grab the JSON output from the request
|
||||
Ok(response) => {
|
||||
// Parse the JSON into Post and Comment structs
|
||||
let post = parse_post(&response[0]).await;
|
||||
let post = parse_post(&response[0]["data"]["children"][0]).await;
|
||||
|
||||
// Return landing page if this post if this Reddit deems this post
|
||||
// NSFW, but we have also disabled the display of NSFW content
|
||||
// or if the instance is SFW-only.
|
||||
if post.nsfw && (setting(&req, "show_nsfw") != "on" || crate::utils::sfw_only()) {
|
||||
return Ok(nsfw_landing(req).await.unwrap_or_default());
|
||||
}
|
||||
|
||||
let comments = parse_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment, &get_filters(&req));
|
||||
let url = req.uri().to_string();
|
||||
|
||||
|
@ -80,92 +88,6 @@ pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||
}
|
||||
}
|
||||
|
||||
// POSTS
|
||||
async fn parse_post(json: &serde_json::Value) -> Post {
|
||||
// Retrieve post (as opposed to comments) from JSON
|
||||
let post: &serde_json::Value = &json["data"]["children"][0];
|
||||
|
||||
// Grab UTC time as unix timestamp
|
||||
let (rel_time, created) = time(post["data"]["created_utc"].as_f64().unwrap_or_default());
|
||||
// Parse post score and upvote ratio
|
||||
let score = post["data"]["score"].as_i64().unwrap_or_default();
|
||||
let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
||||
|
||||
// Determine the type of media along with the media URL
|
||||
let (post_type, media, gallery) = Media::parse(&post["data"]).await;
|
||||
|
||||
let awards: Awards = Awards::parse(&post["data"]["all_awardings"]);
|
||||
|
||||
let permalink = val(post, "permalink");
|
||||
|
||||
let body = if val(post, "removed_by_category") == "moderator" {
|
||||
format!(
|
||||
"<div class=\"md\"><p>[removed] — <a href=\"https://www.reveddit.com{}\">view removed post</a></p></div>",
|
||||
permalink
|
||||
)
|
||||
} else {
|
||||
rewrite_urls(&val(post, "selftext_html"))
|
||||
};
|
||||
|
||||
// Build a post using data parsed from Reddit post API
|
||||
Post {
|
||||
id: val(post, "id"),
|
||||
title: val(post, "title"),
|
||||
community: val(post, "subreddit"),
|
||||
body,
|
||||
author: Author {
|
||||
name: val(post, "author"),
|
||||
flair: Flair {
|
||||
flair_parts: FlairPart::parse(
|
||||
post["data"]["author_flair_type"].as_str().unwrap_or_default(),
|
||||
post["data"]["author_flair_richtext"].as_array(),
|
||||
post["data"]["author_flair_text"].as_str(),
|
||||
),
|
||||
text: val(post, "link_flair_text"),
|
||||
background_color: val(post, "author_flair_background_color"),
|
||||
foreground_color: val(post, "author_flair_text_color"),
|
||||
},
|
||||
distinguished: val(post, "distinguished"),
|
||||
},
|
||||
permalink,
|
||||
score: format_num(score),
|
||||
upvote_ratio: ratio as i64,
|
||||
post_type,
|
||||
media,
|
||||
thumbnail: Media {
|
||||
url: format_url(val(post, "thumbnail").as_str()),
|
||||
alt_url: String::new(),
|
||||
width: post["data"]["thumbnail_width"].as_i64().unwrap_or_default(),
|
||||
height: post["data"]["thumbnail_height"].as_i64().unwrap_or_default(),
|
||||
poster: "".to_string(),
|
||||
},
|
||||
flair: Flair {
|
||||
flair_parts: FlairPart::parse(
|
||||
post["data"]["link_flair_type"].as_str().unwrap_or_default(),
|
||||
post["data"]["link_flair_richtext"].as_array(),
|
||||
post["data"]["link_flair_text"].as_str(),
|
||||
),
|
||||
text: val(post, "link_flair_text"),
|
||||
background_color: val(post, "link_flair_background_color"),
|
||||
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
||||
"black".to_string()
|
||||
} else {
|
||||
"white".to_string()
|
||||
},
|
||||
},
|
||||
flags: Flags {
|
||||
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
|
||||
stickied: post["data"]["stickied"].as_bool().unwrap_or(false),
|
||||
},
|
||||
domain: val(post, "domain"),
|
||||
rel_time,
|
||||
created,
|
||||
comments: format_num(post["data"]["num_comments"].as_i64().unwrap_or_default()),
|
||||
gallery,
|
||||
awards,
|
||||
}
|
||||
}
|
||||
|
||||
// COMMENTS
|
||||
fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, highlighted_comment: &str, filters: &HashSet<String>) -> Vec<Comment> {
|
||||
// Parse the comment JSON into a Vector of Comments
|
||||
|
@ -200,9 +122,9 @@ fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str,
|
|||
let id = val(&comment, "id");
|
||||
let highlighted = id == highlighted_comment;
|
||||
|
||||
let body = if val(&comment, "author") == "[deleted]" && val(&comment, "body") == "[removed]" {
|
||||
let body = if (val(&comment, "author") == "[deleted]" && val(&comment, "body") == "[removed]") || val(&comment, "body") == "[deleted]" {
|
||||
format!(
|
||||
"<div class=\"md\"><p>[removed] — <a href=\"https://www.reveddit.com{}{}\">view removed comment</a></p></div>",
|
||||
"<div class=\"md\"><p>[removed] — <a href=\"https://www.unddit.com{}{}\">view removed comment</a></p></div>",
|
||||
post_link, id
|
||||
)
|
||||
} else {
|
||||
|
@ -226,7 +148,7 @@ fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str,
|
|||
let is_filtered = filters.contains(&["u_", author.name.as_str()].concat());
|
||||
|
||||
// Many subreddits have a default comment posted about the sub's rules etc.
|
||||
// Many libreddit users do not wish to see this kind of comment by default.
|
||||
// Many Ferrit users do not wish to see this kind of comment by default.
|
||||
// Reddit does not tell us which users are "bots", so a good heuristic is to
|
||||
// collapse stickied moderator comments.
|
||||
let is_moderator_comment = data["distinguished"].as_str().unwrap_or_default() == "moderator";
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
// CRATES
|
||||
use crate::utils::{catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, setting, template, val, Post, Preferences};
|
||||
use crate::utils::{self, catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, setting, template, val, Post, Preferences};
|
||||
use crate::{
|
||||
client::json,
|
||||
subreddit::{can_access_quarantine, quarantine},
|
||||
|
@ -42,11 +42,18 @@ struct SearchTemplate {
|
|||
/// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place,
|
||||
/// and all fetched posts being filtered).
|
||||
all_posts_filtered: bool,
|
||||
/// Whether all posts were hidden because they are NSFW (and user has disabled show NSFW)
|
||||
all_posts_hidden_nsfw: bool,
|
||||
}
|
||||
|
||||
// SERVICES
|
||||
pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let nsfw_results = if setting(&req, "show_nsfw") == "on" { "&include_over_18=on" } else { "" };
|
||||
// This ensures that during a search, no NSFW posts are fetched at all
|
||||
let nsfw_results = if setting(&req, "show_nsfw") == "on" && !utils::sfw_only() {
|
||||
"&include_over_18=on"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
let path = format!("{}.json?{}{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), nsfw_results);
|
||||
let query = param(&path, "q").unwrap_or_default();
|
||||
|
||||
|
@ -100,12 +107,13 @@ pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||
url,
|
||||
is_filtered: true,
|
||||
all_posts_filtered: false,
|
||||
all_posts_hidden_nsfw: false,
|
||||
})
|
||||
} else {
|
||||
match Post::fetch(&path, quarantined).await {
|
||||
Ok((mut posts, after)) => {
|
||||
let all_posts_filtered = filter_posts(&mut posts, &filters);
|
||||
|
||||
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
||||
let all_posts_hidden_nsfw = posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on";
|
||||
template(SearchTemplate {
|
||||
posts,
|
||||
subreddits,
|
||||
|
@ -123,6 +131,7 @@ pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||
url,
|
||||
is_filtered: false,
|
||||
all_posts_filtered,
|
||||
all_posts_hidden_nsfw,
|
||||
})
|
||||
}
|
||||
Err(msg) => {
|
||||
|
|
556
src/server.rs
|
@ -1,17 +1,79 @@
|
|||
use brotli::enc::{BrotliCompress, BrotliEncoderParams};
|
||||
use cookie::Cookie;
|
||||
use core::f64;
|
||||
use futures_lite::{future::Boxed, Future, FutureExt};
|
||||
use hyper::{
|
||||
header::HeaderValue,
|
||||
body,
|
||||
body::{Buf, HttpBody},
|
||||
header,
|
||||
service::{make_service_fn, service_fn},
|
||||
HeaderMap,
|
||||
};
|
||||
use hyper::{Body, Method, Request, Response, Server as HyperServer};
|
||||
use libflate::gzip;
|
||||
use route_recognizer::{Params, Router};
|
||||
use std::{pin::Pin, result::Result};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
io,
|
||||
pin::Pin,
|
||||
result::Result,
|
||||
str::{from_utf8, Split},
|
||||
string::ToString,
|
||||
};
|
||||
use time::Duration;
|
||||
|
||||
use crate::dbg_msg;
|
||||
|
||||
type BoxResponse = Pin<Box<dyn Future<Output = Result<Response<Body>, String>> + Send>>;
|
||||
|
||||
/// Compressors for the response Body, in ascending order of preference.
|
||||
#[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]
|
||||
enum CompressionType {
|
||||
Passthrough,
|
||||
Gzip,
|
||||
Brotli,
|
||||
}
|
||||
|
||||
/// All browsers support gzip, so if we are given `Accept-Encoding: *`, deliver
|
||||
/// gzipped-content.
|
||||
///
|
||||
/// Brotli would be nice universally, but Safari (iOS, iPhone, macOS) reportedly
|
||||
/// doesn't support it yet.
|
||||
const DEFAULT_COMPRESSOR: CompressionType = CompressionType::Gzip;
|
||||
|
||||
impl CompressionType {
|
||||
fn parse(s: &str) -> Option<CompressionType> {
|
||||
let c = match s {
|
||||
// Compressors we support.
|
||||
"gzip" => CompressionType::Gzip,
|
||||
"br" => CompressionType::Brotli,
|
||||
|
||||
// The wildcard means that we can choose whatever
|
||||
// compression we prefer. In this case, use the
|
||||
// default.
|
||||
"*" => DEFAULT_COMPRESSOR,
|
||||
|
||||
// Compressor not supported.
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(c)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToString for CompressionType {
|
||||
fn to_string(&self) -> String {
|
||||
let s: &str = match *self {
|
||||
CompressionType::Gzip => "gzip",
|
||||
CompressionType::Brotli => "br",
|
||||
|
||||
_ => "",
|
||||
};
|
||||
|
||||
s.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Route<'a> {
|
||||
router: &'a mut Router<fn(Request<Body>) -> BoxResponse>,
|
||||
path: String,
|
||||
|
@ -97,7 +159,7 @@ impl ResponseExt for Response<Body> {
|
|||
}
|
||||
|
||||
fn insert_cookie(&mut self, cookie: Cookie) {
|
||||
if let Ok(val) = HeaderValue::from_str(&cookie.to_string()) {
|
||||
if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) {
|
||||
self.headers_mut().append("Set-Cookie", val);
|
||||
}
|
||||
}
|
||||
|
@ -106,7 +168,7 @@ impl ResponseExt for Response<Body> {
|
|||
let mut cookie = Cookie::named(name);
|
||||
cookie.set_path("/");
|
||||
cookie.set_max_age(Duration::seconds(1));
|
||||
if let Ok(val) = HeaderValue::from_str(&cookie.to_string()) {
|
||||
if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) {
|
||||
self.headers_mut().append("Set-Cookie", val);
|
||||
}
|
||||
}
|
||||
|
@ -131,7 +193,7 @@ impl Route<'_> {
|
|||
|
||||
impl Server {
|
||||
pub fn new() -> Self {
|
||||
Server {
|
||||
Self {
|
||||
default_headers: HeaderMap::new(),
|
||||
router: Router::new(),
|
||||
}
|
||||
|
@ -156,10 +218,11 @@ impl Server {
|
|||
// let shared_router = router.clone();
|
||||
async move {
|
||||
Ok::<_, String>(service_fn(move |req: Request<Body>| {
|
||||
let headers = default_headers.clone();
|
||||
let req_headers = req.headers().clone();
|
||||
let def_headers = default_headers.clone();
|
||||
|
||||
// Remove double slashes and decode encoded slashes
|
||||
let mut path = req.uri().path().replace("//", "/").replace("%2F","/");
|
||||
let mut path = req.uri().path().replace("//", "/").replace("%2F", "/");
|
||||
|
||||
// Remove trailing slashes
|
||||
if path != "/" && path.ends_with('/') {
|
||||
|
@ -176,26 +239,20 @@ impl Server {
|
|||
// Run the route's function
|
||||
let func = (found.handler().to_owned().to_owned())(parammed);
|
||||
async move {
|
||||
let res: Result<Response<Body>, String> = func.await;
|
||||
// Add default headers to response
|
||||
res.map(|mut response| {
|
||||
response.headers_mut().extend(headers);
|
||||
response
|
||||
})
|
||||
match func.await {
|
||||
Ok(mut res) => {
|
||||
res.headers_mut().extend(def_headers);
|
||||
let _ = compress_response(req_headers, &mut res).await;
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
Err(msg) => new_boilerplate(def_headers, req_headers, 500, Body::from(msg)).await,
|
||||
}
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
// If there was a routing error
|
||||
Err(e) => async move {
|
||||
// Return a 404 error
|
||||
let res: Result<Response<Body>, String> = Ok(Response::builder().status(404).body(e.into()).unwrap_or_default());
|
||||
// Add default headers to response
|
||||
res.map(|mut response| {
|
||||
response.headers_mut().extend(headers);
|
||||
response
|
||||
})
|
||||
}
|
||||
.boxed(),
|
||||
Err(e) => async move { new_boilerplate(def_headers, req_headers, 404, e.into()).await }.boxed(),
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
@ -213,3 +270,456 @@ impl Server {
|
|||
server.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a boilerplate Response for error conditions. This response will be
|
||||
/// compressed if requested by client.
|
||||
async fn new_boilerplate(
|
||||
default_headers: HeaderMap<header::HeaderValue>,
|
||||
req_headers: HeaderMap<header::HeaderValue>,
|
||||
status: u16,
|
||||
body: Body,
|
||||
) -> Result<Response<Body>, String> {
|
||||
match Response::builder().status(status).body(body) {
|
||||
Ok(mut res) => {
|
||||
let _ = compress_response(req_headers, &mut res).await;
|
||||
|
||||
res.headers_mut().extend(default_headers.clone());
|
||||
Ok(res)
|
||||
}
|
||||
Err(msg) => Err(msg.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Determines the desired compressor based on the Accept-Encoding header.
|
||||
///
|
||||
/// This function will honor the [q-value](https://developer.mozilla.org/en-US/docs/Glossary/Quality_values)
|
||||
/// for each compressor. The q-value is an optional parameter, a decimal value
|
||||
/// on \[0..1\], to order the compressors by preference. An Accept-Encoding value
|
||||
/// with no q-values is also accepted.
|
||||
///
|
||||
/// Here are [examples](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding#examples)
|
||||
/// of valid Accept-Encoding headers.
|
||||
///
|
||||
/// ```http
|
||||
/// Accept-Encoding: gzip
|
||||
/// Accept-Encoding: gzip, compress, br
|
||||
/// Accept-Encoding: br;q=1.0, gzip;q=0.8, *;q=0.1
|
||||
/// ```
|
||||
fn determine_compressor(accept_encoding: &str) -> Option<CompressionType> {
|
||||
if accept_encoding.is_empty() {
|
||||
return None;
|
||||
};
|
||||
|
||||
// Keep track of the compressor candidate based on both the client's
|
||||
// preference and our own. Concrete examples:
|
||||
//
|
||||
// 1. "Accept-Encoding: gzip, br" => assuming we like brotli more than
|
||||
// gzip, and the browser supports brotli, we choose brotli
|
||||
//
|
||||
// 2. "Accept-Encoding: gzip;q=0.8, br;q=0.3" => the client has stated a
|
||||
// preference for gzip over brotli, so we choose gzip
|
||||
//
|
||||
// To do this, we need to define a struct which contains the requested
|
||||
// requested compressor (abstracted as a CompressionType enum) and the
|
||||
// q-value. If no q-value is defined for the compressor, we assume one of
|
||||
// 1.0. We first compare compressor candidates by comparing q-values, and
|
||||
// then CompressionTypes. We keep track of whatever is the greatest per our
|
||||
// ordering.
|
||||
|
||||
struct CompressorCandidate {
|
||||
alg: CompressionType,
|
||||
q: f64,
|
||||
}
|
||||
|
||||
impl Ord for CompressorCandidate {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
// Compare q-values. Break ties with the
|
||||
// CompressionType values.
|
||||
|
||||
match self.q.total_cmp(&other.q) {
|
||||
Ordering::Equal => self.alg.cmp(&other.alg),
|
||||
ord => ord,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for CompressorCandidate {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
// Guard against NaN, both on our end and on the other.
|
||||
if self.q.is_nan() || other.q.is_nan() {
|
||||
return None;
|
||||
};
|
||||
|
||||
// f64 and CompressionType are ordered, except in the case
|
||||
// where the f64 is NAN (which we checked against), so we
|
||||
// can safely return a Some here.
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for CompressorCandidate {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
(self.q == other.q) && (self.alg == other.alg)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for CompressorCandidate {}
|
||||
|
||||
// This is the current candidate.
|
||||
//
|
||||
// Assmume no candidate so far. We do this by assigning the sentinel value
|
||||
// of negative infinity to the q-value. If this value is negative infinity,
|
||||
// that means there was no viable compressor candidate.
|
||||
let mut cur_candidate = CompressorCandidate {
|
||||
alg: CompressionType::Passthrough,
|
||||
q: f64::NEG_INFINITY,
|
||||
};
|
||||
|
||||
// This loop reads the requested compressors and keeps track of whichever
|
||||
// one has the highest priority per our heuristic.
|
||||
for val in accept_encoding.to_string().split(',') {
|
||||
let mut q: f64 = 1.0;
|
||||
|
||||
// The compressor and q-value (if the latter is defined)
|
||||
// will be delimited by semicolons.
|
||||
let mut spl: Split<char> = val.split(';');
|
||||
|
||||
// Get the compressor. For example, in
|
||||
// gzip;q=0.8
|
||||
// this grabs "gzip" in the string. It
|
||||
// will further validate the compressor against the
|
||||
// list of those we support. If it is not supported,
|
||||
// we move onto the next one.
|
||||
let compressor: CompressionType = match spl.next() {
|
||||
// CompressionType::parse will return the appropriate enum given
|
||||
// a string. For example, it will return CompressionType::Gzip
|
||||
// when given "gzip".
|
||||
Some(s) => match CompressionType::parse(s.trim()) {
|
||||
Some(candidate) => candidate,
|
||||
|
||||
// We don't support the requested compression algorithm.
|
||||
None => continue,
|
||||
},
|
||||
|
||||
// We should never get here, but I'm paranoid.
|
||||
None => continue,
|
||||
};
|
||||
|
||||
// Get the q-value. This might not be defined, in which case assume
|
||||
// 1.0.
|
||||
if let Some(s) = spl.next() {
|
||||
if !(s.len() > 2 && s.starts_with("q=")) {
|
||||
// If the q-value is malformed, the header is malformed, so
|
||||
// abort.
|
||||
return None;
|
||||
}
|
||||
|
||||
match s[2..].parse::<f64>() {
|
||||
Ok(val) => {
|
||||
if (0.0..=1.0).contains(&val) {
|
||||
q = val;
|
||||
} else {
|
||||
// If the value is outside [0..1], header is malformed.
|
||||
// Abort.
|
||||
return None;
|
||||
};
|
||||
}
|
||||
Err(_) => {
|
||||
// If this isn't a f64, then assume a malformed header
|
||||
// value and abort.
|
||||
return None;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// If new_candidate > cur_candidate, make new_candidate the new
|
||||
// cur_candidate. But do this safely! It is very possible that
|
||||
// someone gave us the string "NAN", which (&str).parse::<f64>
|
||||
// will happily translate to f64::NAN.
|
||||
let new_candidate = CompressorCandidate { alg: compressor, q };
|
||||
if let Some(ord) = new_candidate.partial_cmp(&cur_candidate) {
|
||||
if ord == Ordering::Greater {
|
||||
cur_candidate = new_candidate;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if cur_candidate.q != f64::NEG_INFINITY {
|
||||
Some(cur_candidate.alg)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Compress the response body, if possible or desirable. The Body will be
|
||||
/// compressed in place, and a new header Content-Encoding will be set
|
||||
/// indicating the compression algorithm.
|
||||
///
|
||||
/// This function deems Body eligible compression if and only if the following
|
||||
/// conditions are met:
|
||||
///
|
||||
/// 1. the HTTP client requests a compression encoding in the Content-Encoding
|
||||
/// header (hence the need for the req_headers);
|
||||
///
|
||||
/// 2. the content encoding corresponds to a compression algorithm we support;
|
||||
///
|
||||
/// 3. the Media type in the Content-Type response header is text with any
|
||||
/// subtype (e.g. text/plain) or application/json.
|
||||
///
|
||||
/// compress_response returns Ok on successful compression, or if not all three
|
||||
/// conditions above are met. It returns Err if there was a problem decoding
|
||||
/// any header in either req_headers or res, but res will remain intact.
|
||||
///
|
||||
/// This function logs errors to stderr, but only in debug mode. No information
|
||||
/// is logged in release builds.
|
||||
async fn compress_response(req_headers: HeaderMap<header::HeaderValue>, res: &mut Response<Body>) -> Result<(), String> {
|
||||
// Check if the data is eligible for compression.
|
||||
if let Some(hdr) = res.headers().get(header::CONTENT_TYPE) {
|
||||
match from_utf8(hdr.as_bytes()) {
|
||||
Ok(val) => {
|
||||
let s = val.to_string();
|
||||
|
||||
// TODO: better determination of what is eligible for compression
|
||||
if !(s.starts_with("text/") || s.starts_with("application/json")) {
|
||||
return Ok(());
|
||||
};
|
||||
}
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
};
|
||||
} else {
|
||||
// Response declares no Content-Type. Assume for simplicity that it
|
||||
// cannot be compressed.
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// Don't bother if the size of the size of the response body will fit
|
||||
// within an IP frame (less the bytes that make up the TCP/IP and HTTP
|
||||
// headers).
|
||||
if res.body().size_hint().lower() < 1452 {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// Quick and dirty closure for extracting a header from the request and
|
||||
// returning it as a &str.
|
||||
let get_req_header = |k: header::HeaderName| -> Option<&str> {
|
||||
match req_headers.get(k) {
|
||||
Some(hdr) => match from_utf8(hdr.as_bytes()) {
|
||||
Ok(val) => Some(val),
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
Err(_) => None,
|
||||
},
|
||||
None => None,
|
||||
}
|
||||
};
|
||||
|
||||
// Check to see which compressor is requested, and if we can use it.
|
||||
let accept_encoding: &str = match get_req_header(header::ACCEPT_ENCODING) {
|
||||
Some(val) => val,
|
||||
None => return Ok(()), // Client requested no compression.
|
||||
};
|
||||
|
||||
let compressor: CompressionType = match determine_compressor(accept_encoding) {
|
||||
Some(c) => c,
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
// Perform the compression.
|
||||
let compressed: Vec<u8>;
|
||||
{
|
||||
// Get the body from the response.
|
||||
let mut aggregated_body = match body::aggregate(res.body_mut()).await {
|
||||
Ok(b) => b.reader(),
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
};
|
||||
|
||||
// Compress!
|
||||
compressed = match compressor {
|
||||
CompressionType::Gzip => {
|
||||
let mut gz: gzip::Encoder<Vec<u8>> = match gzip::Encoder::new(Vec::new()) {
|
||||
Ok(gz) => gz,
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
};
|
||||
|
||||
match io::copy(&mut aggregated_body, &mut gz) {
|
||||
Ok(_) => match gz.finish().into_result() {
|
||||
Ok(compressed) => compressed,
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
CompressionType::Brotli => {
|
||||
// We may want to make the compression parameters configurable
|
||||
// in the future. For now, the defaults are sufficient.
|
||||
let brotli_params = BrotliEncoderParams::default();
|
||||
|
||||
let mut compressed = Vec::<u8>::new();
|
||||
match BrotliCompress(&mut aggregated_body, &mut compressed, &brotli_params) {
|
||||
Ok(_) => compressed,
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This arm is for any requested compressor for which we don't yet
|
||||
// have an implementation.
|
||||
_ => return Ok(()),
|
||||
};
|
||||
}
|
||||
|
||||
// We get here iff the compression was successful. Replace the body with
|
||||
// the compressed payload, and add the appropriate Content-Encoding header
|
||||
// in the response.
|
||||
res.headers_mut().insert(header::CONTENT_ENCODING, compressor.to_string().parse().unwrap());
|
||||
*(res.body_mut()) = Body::from(compressed);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use brotli::Decompressor as BrotliDecompressor;
|
||||
use futures_lite::future::block_on;
|
||||
use lipsum::lipsum;
|
||||
use std::{boxed::Box, io};
|
||||
|
||||
#[test]
|
||||
fn test_determine_compressor() {
|
||||
// Single compressor given.
|
||||
assert_eq!(determine_compressor("unsupported"), None);
|
||||
assert_eq!(determine_compressor("gzip"), Some(CompressionType::Gzip));
|
||||
assert_eq!(determine_compressor("*"), Some(DEFAULT_COMPRESSOR));
|
||||
|
||||
// Multiple compressors.
|
||||
assert_eq!(determine_compressor("gzip, br"), Some(CompressionType::Brotli));
|
||||
assert_eq!(determine_compressor("gzip;q=0.8, br;q=0.3"), Some(CompressionType::Gzip));
|
||||
assert_eq!(determine_compressor("br, gzip"), Some(CompressionType::Brotli));
|
||||
assert_eq!(determine_compressor("br;q=0.3, gzip;q=0.4"), Some(CompressionType::Gzip));
|
||||
|
||||
// Invalid q-values.
|
||||
assert_eq!(determine_compressor("gzip;q=NAN"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_compress_response() {
|
||||
// This macro generates an Accept-Encoding header value given any number of
|
||||
// compressors.
|
||||
macro_rules! ae_gen {
|
||||
($x:expr) => {
|
||||
$x.to_string().as_str()
|
||||
};
|
||||
|
||||
($x:expr, $($y:expr),+) => {
|
||||
format!("{}, {}", $x.to_string(), ae_gen!($($y),+)).as_str()
|
||||
};
|
||||
}
|
||||
|
||||
for accept_encoding in vec![
|
||||
"*",
|
||||
ae_gen!(CompressionType::Gzip),
|
||||
ae_gen!(CompressionType::Brotli, CompressionType::Gzip),
|
||||
ae_gen!(CompressionType::Brotli),
|
||||
] {
|
||||
// Determine what the expected encoding should be based on both the
|
||||
// specific encodings we accept.
|
||||
let expected_encoding: CompressionType = match determine_compressor(accept_encoding) {
|
||||
Some(s) => s,
|
||||
None => panic!("determine_compressor(accept_encoding) => None"),
|
||||
};
|
||||
|
||||
// Build headers with our Accept-Encoding.
|
||||
let mut req_headers = HeaderMap::new();
|
||||
req_headers.insert(header::ACCEPT_ENCODING, header::HeaderValue::from_str(accept_encoding).unwrap());
|
||||
|
||||
// Build test response.
|
||||
let lorem_ipsum: String = lipsum(10000);
|
||||
let expected_lorem_ipsum = Vec::<u8>::from(lorem_ipsum.as_str());
|
||||
let mut res = Response::builder()
|
||||
.status(200)
|
||||
.header(header::CONTENT_TYPE, "text/plain")
|
||||
.body(Body::from(lorem_ipsum))
|
||||
.unwrap();
|
||||
|
||||
// Perform the compression.
|
||||
if let Err(e) = block_on(compress_response(req_headers, &mut res)) {
|
||||
panic!("compress_response(req_headers, &mut res) => Err(\"{}\")", e);
|
||||
};
|
||||
|
||||
// If the content was compressed, we expect the Content-Encoding
|
||||
// header to be modified.
|
||||
assert_eq!(
|
||||
res
|
||||
.headers()
|
||||
.get(header::CONTENT_ENCODING)
|
||||
.unwrap_or_else(|| panic!("missing content-encoding header"))
|
||||
.to_str()
|
||||
.unwrap_or_else(|_| panic!("failed to convert Content-Encoding header::HeaderValue to String")),
|
||||
expected_encoding.to_string()
|
||||
);
|
||||
|
||||
// Decompress body and make sure it's equal to what we started
|
||||
// with.
|
||||
//
|
||||
// In the case of no compression, just make sure the "new" body in
|
||||
// the Response is the same as what with which we start.
|
||||
let body_vec = match block_on(body::to_bytes(res.body_mut())) {
|
||||
Ok(b) => b.to_vec(),
|
||||
Err(e) => panic!("{}", e),
|
||||
};
|
||||
|
||||
if expected_encoding == CompressionType::Passthrough {
|
||||
assert!(body_vec.eq(&expected_lorem_ipsum));
|
||||
continue;
|
||||
}
|
||||
|
||||
// This provides an io::Read for the underlying body.
|
||||
let mut body_cursor: io::Cursor<Vec<u8>> = io::Cursor::new(body_vec);
|
||||
|
||||
// Match the appropriate decompresor for the given expected_encoding.
|
||||
let mut decoder: Box<dyn io::Read> = match expected_encoding {
|
||||
CompressionType::Gzip => match gzip::Decoder::new(&mut body_cursor) {
|
||||
Ok(dgz) => Box::new(dgz),
|
||||
Err(e) => panic!("{}", e),
|
||||
},
|
||||
|
||||
CompressionType::Brotli => Box::new(BrotliDecompressor::new(body_cursor, expected_lorem_ipsum.len())),
|
||||
|
||||
_ => panic!("no decompressor for {}", expected_encoding.to_string()),
|
||||
};
|
||||
|
||||
let mut decompressed = Vec::<u8>::new();
|
||||
match io::copy(&mut decoder, &mut decompressed) {
|
||||
Ok(_) => {}
|
||||
Err(e) => panic!("{}", e),
|
||||
};
|
||||
|
||||
assert!(decompressed.eq(&expected_lorem_ipsum));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ struct SettingsTemplate {
|
|||
|
||||
// CONSTANTS
|
||||
|
||||
const PREFS: [&str; 10] = [
|
||||
const PREFS: [&str; 11] = [
|
||||
"theme",
|
||||
"front_page",
|
||||
"layout",
|
||||
|
@ -27,6 +27,7 @@ const PREFS: [&str; 10] = [
|
|||
"comment_sort",
|
||||
"post_sort",
|
||||
"show_nsfw",
|
||||
"blur_nsfw",
|
||||
"use_hls",
|
||||
"hide_hls_notification",
|
||||
"autoplay_videos",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// CRATES
|
||||
use crate::utils::{
|
||||
catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, Subreddit,
|
||||
catch_random, error, filter_posts, format_num, format_url, get_filters, nsfw_landing, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, Subreddit,
|
||||
};
|
||||
use crate::{client::json, server::ResponseExt, RequestExt};
|
||||
use askama::Template;
|
||||
|
@ -24,6 +24,8 @@ struct SubredditTemplate {
|
|||
/// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place,
|
||||
/// and all fetched posts being filtered).
|
||||
all_posts_filtered: bool,
|
||||
/// Whether all posts were hidden because they are NSFW (and user has disabled show NSFW)
|
||||
all_posts_hidden_nsfw: bool,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
|
@ -94,6 +96,12 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||
}
|
||||
};
|
||||
|
||||
// Return landing page if this post if this is NSFW community but the user
|
||||
// has disabled the display of NSFW content or if the instance is SFW-only.
|
||||
if sub.nsfw && (setting(&req, "show_nsfw") != "on" || crate::utils::sfw_only()) {
|
||||
return Ok(nsfw_landing(req).await.unwrap_or_default());
|
||||
}
|
||||
|
||||
let path = format!("/r/{}/{}.json?{}&raw_json=1", sub_name.clone(), sort, req.uri().query().unwrap_or_default());
|
||||
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
||||
let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26").replace('+', "%2B");
|
||||
|
@ -111,12 +119,13 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||
redirect_url,
|
||||
is_filtered: true,
|
||||
all_posts_filtered: false,
|
||||
all_posts_hidden_nsfw: false,
|
||||
})
|
||||
} else {
|
||||
match Post::fetch(&path, quarantined).await {
|
||||
Ok((mut posts, after)) => {
|
||||
let all_posts_filtered = filter_posts(&mut posts, &filters);
|
||||
|
||||
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
||||
let all_posts_hidden_nsfw = posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on";
|
||||
template(SubredditTemplate {
|
||||
sub,
|
||||
posts,
|
||||
|
@ -127,6 +136,7 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||
redirect_url,
|
||||
is_filtered: false,
|
||||
all_posts_filtered,
|
||||
all_posts_hidden_nsfw,
|
||||
})
|
||||
}
|
||||
Err(msg) => match msg.as_str() {
|
||||
|
@ -253,11 +263,7 @@ pub async fn subscriptions_filters(req: Request<Body>) -> Result<Response<Body>,
|
|||
|
||||
// Redirect back to subreddit
|
||||
// check for redirect parameter if unsubscribing/unfiltering from outside sidebar
|
||||
let path = if let Some(redirect_path) = param(&format!("?{}", query), "redirect") {
|
||||
format!("/{}", redirect_path)
|
||||
} else {
|
||||
format!("/r/{}", sub)
|
||||
};
|
||||
let path = param(&format!("?{}", query), "redirect").map_or_else(|| format!("/r/{}", sub), |redirect_path| format!("/{}", redirect_path));
|
||||
|
||||
let mut response = redirect(path);
|
||||
|
||||
|
@ -416,5 +422,6 @@ async fn subreddit(sub: &str, quarantined: bool) -> Result<Subreddit, String> {
|
|||
members: format_num(members),
|
||||
active: format_num(active),
|
||||
wiki: res["data"]["wiki_enabled"].as_bool().unwrap_or_default(),
|
||||
nsfw: res["data"]["over18"].as_bool().unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
|
|
22
src/user.rs
|
@ -1,7 +1,7 @@
|
|||
// CRATES
|
||||
use crate::client::json;
|
||||
use crate::server::RequestExt;
|
||||
use crate::utils::{error, filter_posts, format_url, get_filters, param, template, Post, Preferences, User};
|
||||
use crate::utils::{error, filter_posts, format_url, get_filters, nsfw_landing, param, setting, template, Post, Preferences, User};
|
||||
use askama::Template;
|
||||
use hyper::{Body, Request, Response};
|
||||
use time::{macros::format_description, OffsetDateTime};
|
||||
|
@ -24,6 +24,8 @@ struct UserTemplate {
|
|||
/// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place,
|
||||
/// and all fetched posts being filtered).
|
||||
all_posts_filtered: bool,
|
||||
/// Whether all posts were hidden because they are NSFW (and user has disabled show NSFW)
|
||||
all_posts_hidden_nsfw: bool,
|
||||
}
|
||||
|
||||
// FUNCTIONS
|
||||
|
@ -40,11 +42,20 @@ pub async fn profile(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
||||
let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26");
|
||||
|
||||
// Retrieve other variables from Libreddit request
|
||||
// Retrieve other variables from Ferrit request
|
||||
let sort = param(&path, "sort").unwrap_or_default();
|
||||
let username = req.param("name").unwrap_or_default();
|
||||
|
||||
// Retrieve info from user about page.
|
||||
let user = user(&username).await.unwrap_or_default();
|
||||
|
||||
// Return landing page if this post if this Reddit deems this user NSFW,
|
||||
// but we have also disabled the display of NSFW content or if the instance
|
||||
// is SFW-only.
|
||||
if user.nsfw && (setting(&req, "show_nsfw") != "on" || crate::utils::sfw_only()) {
|
||||
return Ok(nsfw_landing(req).await.unwrap_or_default());
|
||||
}
|
||||
|
||||
let filters = get_filters(&req);
|
||||
if filters.contains(&["u_", &username].concat()) {
|
||||
template(UserTemplate {
|
||||
|
@ -58,13 +69,14 @@ pub async fn profile(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||
redirect_url,
|
||||
is_filtered: true,
|
||||
all_posts_filtered: false,
|
||||
all_posts_hidden_nsfw: false,
|
||||
})
|
||||
} else {
|
||||
// Request user posts/comments from Reddit
|
||||
match Post::fetch(&path, false).await {
|
||||
Ok((mut posts, after)) => {
|
||||
let all_posts_filtered = filter_posts(&mut posts, &filters);
|
||||
|
||||
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
||||
let all_posts_hidden_nsfw = posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on";
|
||||
template(UserTemplate {
|
||||
user,
|
||||
posts,
|
||||
|
@ -76,6 +88,7 @@ pub async fn profile(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||
redirect_url,
|
||||
is_filtered: false,
|
||||
all_posts_filtered,
|
||||
all_posts_hidden_nsfw,
|
||||
})
|
||||
}
|
||||
// If there is an error show error page
|
||||
|
@ -107,6 +120,7 @@ async fn user(name: &str) -> Result<User, String> {
|
|||
created: created.format(format_description!("[month repr:short] [day] '[year repr:last_two]")).unwrap_or_default(),
|
||||
banner: about("banner_img"),
|
||||
description: about("public_description"),
|
||||
nsfw: res["data"]["subreddit"]["over_18"].as_bool().unwrap_or_default(),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
217
src/utils.rs
|
@ -9,10 +9,36 @@ use regex::Regex;
|
|||
use rust_embed::RustEmbed;
|
||||
use serde_json::Value;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::env;
|
||||
use std::str::FromStr;
|
||||
use time::{macros::format_description, Duration, OffsetDateTime};
|
||||
use url::Url;
|
||||
|
||||
/// Write a message to stderr on debug mode. This function is a no-op on
|
||||
/// release code.
|
||||
#[macro_export]
|
||||
macro_rules! dbg_msg {
|
||||
($x:expr) => {
|
||||
#[cfg(debug_assertions)]
|
||||
eprintln!("{}:{}: {}", file!(), line!(), $x.to_string())
|
||||
};
|
||||
|
||||
($($x:expr),+) => {
|
||||
#[cfg(debug_assertions)]
|
||||
dbg_msg!(format!($($x),+))
|
||||
};
|
||||
}
|
||||
|
||||
/// Identifies whether or not the page is a subreddit, a user page, or a post.
|
||||
/// This is used by the NSFW landing template to determine the mesage to convey
|
||||
/// to the user.
|
||||
#[derive(PartialEq, Eq)]
|
||||
pub enum ResourceType {
|
||||
Subreddit,
|
||||
User,
|
||||
Post,
|
||||
}
|
||||
|
||||
// Post flair with content, background color and foreground color
|
||||
pub struct Flair {
|
||||
pub flair_parts: Vec<FlairPart>,
|
||||
|
@ -210,9 +236,11 @@ pub struct Post {
|
|||
pub domain: String,
|
||||
pub rel_time: String,
|
||||
pub created: String,
|
||||
pub num_duplicates: u64,
|
||||
pub comments: (String, String),
|
||||
pub gallery: Vec<GalleryMedia>,
|
||||
pub awards: Awards,
|
||||
pub nsfw: bool,
|
||||
}
|
||||
|
||||
impl Post {
|
||||
|
@ -309,9 +337,11 @@ impl Post {
|
|||
permalink: val(post, "permalink"),
|
||||
rel_time,
|
||||
created,
|
||||
num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0),
|
||||
comments: format_num(data["num_comments"].as_i64().unwrap_or_default()),
|
||||
gallery,
|
||||
awards,
|
||||
nsfw: post["data"]["over_18"].as_bool().unwrap_or_default(),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -403,6 +433,27 @@ pub struct ErrorTemplate {
|
|||
pub url: String,
|
||||
}
|
||||
|
||||
/// Template for NSFW landing page. The landing page is displayed when a page's
|
||||
/// content is wholly NSFW, but a user has not enabled the option to view NSFW
|
||||
/// posts.
|
||||
#[derive(Template)]
|
||||
#[template(path = "nsfwlanding.html")]
|
||||
pub struct NSFWLandingTemplate {
|
||||
/// Identifier for the resource. This is either a subreddit name or a
|
||||
/// username. (In the case of the latter, set is_user to true.)
|
||||
pub res: String,
|
||||
|
||||
/// Identifies whether or not the resource is a subreddit, a user page,
|
||||
/// or a post.
|
||||
pub res_type: ResourceType,
|
||||
|
||||
/// User preferences.
|
||||
pub prefs: Preferences,
|
||||
|
||||
/// Request URL.
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
// User struct containing metadata about user
|
||||
pub struct User {
|
||||
|
@ -413,6 +464,7 @@ pub struct User {
|
|||
pub created: String,
|
||||
pub banner: String,
|
||||
pub description: String,
|
||||
pub nsfw: bool,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
|
@ -427,6 +479,7 @@ pub struct Subreddit {
|
|||
pub members: (String, String),
|
||||
pub active: (String, String),
|
||||
pub wiki: bool,
|
||||
pub nsfw: bool,
|
||||
}
|
||||
|
||||
// Parser for query params, used in sorting (eg. /r/rust/?sort=hot)
|
||||
|
@ -447,6 +500,7 @@ pub struct Preferences {
|
|||
pub layout: String,
|
||||
pub wide: String,
|
||||
pub show_nsfw: String,
|
||||
pub blur_nsfw: String,
|
||||
pub hide_hls_notification: String,
|
||||
pub use_hls: String,
|
||||
pub autoplay_videos: String,
|
||||
|
@ -469,7 +523,7 @@ impl Preferences {
|
|||
let mut themes = vec!["system".to_string()];
|
||||
for file in ThemeAssets::iter() {
|
||||
let chunks: Vec<&str> = file.as_ref().split(".css").collect();
|
||||
themes.push(chunks[0].to_owned())
|
||||
themes.push(chunks[0].to_owned());
|
||||
}
|
||||
Self {
|
||||
available_themes: themes,
|
||||
|
@ -478,6 +532,7 @@ impl Preferences {
|
|||
layout: setting(&req, "layout"),
|
||||
wide: setting(&req, "wide"),
|
||||
show_nsfw: setting(&req, "show_nsfw"),
|
||||
blur_nsfw: setting(&req, "blur_nsfw"),
|
||||
use_hls: setting(&req, "use_hls"),
|
||||
hide_hls_notification: setting(&req, "hide_hls_notification"),
|
||||
autoplay_videos: setting(&req, "autoplay_videos"),
|
||||
|
@ -494,15 +549,111 @@ pub fn get_filters(req: &Request<Body>) -> HashSet<String> {
|
|||
setting(req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect::<HashSet<String>>()
|
||||
}
|
||||
|
||||
/// Filters a `Vec<Post>` by the given `HashSet` of filters (each filter being a subreddit name or a user name). If a
|
||||
/// `Post`'s subreddit or author is found in the filters, it is removed. Returns `true` if _all_ posts were filtered
|
||||
/// out, or `false` otherwise.
|
||||
pub fn filter_posts(posts: &mut Vec<Post>, filters: &HashSet<String>) -> bool {
|
||||
/// Filters a `Vec<Post>` by the given `HashSet` of filters (each filter being
|
||||
/// a subreddit name or a user name). If a `Post`'s subreddit or author is
|
||||
/// found in the filters, it is removed.
|
||||
///
|
||||
/// The first value of the return tuple is the number of posts filtered. The
|
||||
/// second return value is `true` if all posts were filtered.
|
||||
pub fn filter_posts(posts: &mut Vec<Post>, filters: &HashSet<String>) -> (u64, bool) {
|
||||
// This is the length of the Vec<Post> prior to applying the filter.
|
||||
let lb: u64 = posts.len().try_into().unwrap_or(0);
|
||||
|
||||
if posts.is_empty() {
|
||||
false
|
||||
(0, false)
|
||||
} else {
|
||||
posts.retain(|p| !filters.contains(&p.community) && !filters.contains(&["u_", &p.author.name].concat()));
|
||||
posts.is_empty()
|
||||
posts.retain(|p| !(filters.contains(&p.community) || filters.contains(&["u_", &p.author.name].concat())));
|
||||
|
||||
// Get the length of the Vec<Post> after applying the filter.
|
||||
// If lb > la, then at least one post was removed.
|
||||
let la: u64 = posts.len().try_into().unwrap_or(0);
|
||||
|
||||
(lb - la, posts.is_empty())
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a [`Post`] from a provided JSON.
|
||||
pub async fn parse_post(post: &serde_json::Value) -> Post {
|
||||
// Grab UTC time as unix timestamp
|
||||
let (rel_time, created) = time(post["data"]["created_utc"].as_f64().unwrap_or_default());
|
||||
// Parse post score and upvote ratio
|
||||
let score = post["data"]["score"].as_i64().unwrap_or_default();
|
||||
let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
||||
|
||||
// Determine the type of media along with the media URL
|
||||
let (post_type, media, gallery) = Media::parse(&post["data"]).await;
|
||||
|
||||
let awards: Awards = Awards::parse(&post["data"]["all_awardings"]);
|
||||
|
||||
let permalink = val(post, "permalink");
|
||||
|
||||
let body = if val(post, "removed_by_category") == "moderator" {
|
||||
format!(
|
||||
"<div class=\"md\"><p>[removed] — <a href=\"https://www.unddit.com{}\">view removed post</a></p></div>",
|
||||
permalink
|
||||
)
|
||||
} else {
|
||||
rewrite_urls(&val(post, "selftext_html"))
|
||||
};
|
||||
|
||||
// Build a post using data parsed from Reddit post API
|
||||
Post {
|
||||
id: val(post, "id"),
|
||||
title: val(post, "title"),
|
||||
community: val(post, "subreddit"),
|
||||
body,
|
||||
author: Author {
|
||||
name: val(post, "author"),
|
||||
flair: Flair {
|
||||
flair_parts: FlairPart::parse(
|
||||
post["data"]["author_flair_type"].as_str().unwrap_or_default(),
|
||||
post["data"]["author_flair_richtext"].as_array(),
|
||||
post["data"]["author_flair_text"].as_str(),
|
||||
),
|
||||
text: val(post, "link_flair_text"),
|
||||
background_color: val(post, "author_flair_background_color"),
|
||||
foreground_color: val(post, "author_flair_text_color"),
|
||||
},
|
||||
distinguished: val(post, "distinguished"),
|
||||
},
|
||||
permalink,
|
||||
score: format_num(score),
|
||||
upvote_ratio: ratio as i64,
|
||||
post_type,
|
||||
media,
|
||||
thumbnail: Media {
|
||||
url: format_url(val(post, "thumbnail").as_str()),
|
||||
alt_url: String::new(),
|
||||
width: post["data"]["thumbnail_width"].as_i64().unwrap_or_default(),
|
||||
height: post["data"]["thumbnail_height"].as_i64().unwrap_or_default(),
|
||||
poster: "".to_string(),
|
||||
},
|
||||
flair: Flair {
|
||||
flair_parts: FlairPart::parse(
|
||||
post["data"]["link_flair_type"].as_str().unwrap_or_default(),
|
||||
post["data"]["link_flair_richtext"].as_array(),
|
||||
post["data"]["link_flair_text"].as_str(),
|
||||
),
|
||||
text: val(post, "link_flair_text"),
|
||||
background_color: val(post, "link_flair_background_color"),
|
||||
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
||||
"black".to_string()
|
||||
} else {
|
||||
"white".to_string()
|
||||
},
|
||||
},
|
||||
flags: Flags {
|
||||
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
|
||||
stickied: post["data"]["stickied"].as_bool().unwrap_or(false),
|
||||
},
|
||||
domain: val(post, "domain"),
|
||||
rel_time,
|
||||
created,
|
||||
num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0),
|
||||
comments: format_num(post["data"]["num_comments"].as_i64().unwrap_or_default()),
|
||||
gallery,
|
||||
awards,
|
||||
nsfw: post["data"]["over_18"].as_bool().unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -529,8 +680,8 @@ pub fn setting(req: &Request<Body>, name: &str) -> String {
|
|||
req
|
||||
.cookie(name)
|
||||
.unwrap_or_else(|| {
|
||||
// If there is no cookie for this setting, try receiving a default from an environment variable
|
||||
if let Ok(default) = std::env::var(format!("LIBREDDIT_DEFAULT_{}", name.to_uppercase())) {
|
||||
// If there is no cookie for this setting, try receiving a default from the config
|
||||
if let Some(default) = crate::config::get_setting(&format!("FERRIT_DEFAULT_{}", name.to_uppercase())) {
|
||||
Cookie::new(name, default)
|
||||
} else {
|
||||
Cookie::named(name)
|
||||
|
@ -615,7 +766,7 @@ pub fn format_url(url: &str) -> String {
|
|||
}
|
||||
}
|
||||
|
||||
// Rewrite Reddit links to Libreddit in body of text
|
||||
// Rewrite Reddit links to Ferrit in body of text
|
||||
pub fn rewrite_urls(input_text: &str) -> String {
|
||||
let text1 = Regex::new(r#"href="(https|http|)://(www\.|old\.|np\.|amp\.|)(reddit\.com|redd\.it)/"#)
|
||||
.map_or(String::new(), |re| re.replace_all(input_text, r#"href="/"#).to_string())
|
||||
|
@ -623,7 +774,7 @@ pub fn rewrite_urls(input_text: &str) -> String {
|
|||
.replace("%5C", "")
|
||||
.replace('\\', "");
|
||||
|
||||
// Rewrite external media previews to Libreddit
|
||||
// Rewrite external media previews to Ferrit
|
||||
Regex::new(r"https://external-preview\.redd\.it(.*)[^?]").map_or(String::new(), |re| {
|
||||
if re.is_match(&text1) {
|
||||
re.replace_all(&text1, format_url(re.find(&text1).map(|x| x.as_str()).unwrap_or_default())).to_string()
|
||||
|
@ -701,10 +852,11 @@ pub fn redirect(path: String) -> Response<Body> {
|
|||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub async fn error(req: Request<Body>, msg: String) -> Result<Response<Body>, String> {
|
||||
/// Renders a generic error landing page.
|
||||
pub async fn error(req: Request<Body>, msg: impl ToString) -> Result<Response<Body>, String> {
|
||||
let url = req.uri().to_string();
|
||||
let body = ErrorTemplate {
|
||||
msg,
|
||||
msg: msg.to_string(),
|
||||
prefs: Preferences::new(req),
|
||||
url,
|
||||
}
|
||||
|
@ -714,6 +866,43 @@ pub async fn error(req: Request<Body>, msg: String) -> Result<Response<Body>, St
|
|||
Ok(Response::builder().status(404).header("content-type", "text/html").body(body.into()).unwrap_or_default())
|
||||
}
|
||||
|
||||
/// Retrieve the `FERRIT_SFW_ONLY` setting value from the config, based on
|
||||
/// environment variables and the config file.
|
||||
pub fn sfw_only() -> bool {
|
||||
crate::config::get_setting("FERRIT_SFW_ONLY").is_some()
|
||||
}
|
||||
|
||||
/// Render the landing page for NSFW content when the user has not enabled
|
||||
/// "show NSFW posts" in settings.
|
||||
pub async fn nsfw_landing(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let res_type: ResourceType;
|
||||
let url = req.uri().to_string();
|
||||
|
||||
// Determine from the request URL if the resource is a subreddit, a user
|
||||
// page, or a post.
|
||||
let res: String = if !req.param("name").unwrap_or_default().is_empty() {
|
||||
res_type = ResourceType::User;
|
||||
req.param("name").unwrap_or_default()
|
||||
} else if !req.param("id").unwrap_or_default().is_empty() {
|
||||
res_type = ResourceType::Post;
|
||||
req.param("id").unwrap_or_default()
|
||||
} else {
|
||||
res_type = ResourceType::Subreddit;
|
||||
req.param("sub").unwrap_or_default()
|
||||
};
|
||||
|
||||
let body = NSFWLandingTemplate {
|
||||
res,
|
||||
res_type,
|
||||
prefs: Preferences::new(req),
|
||||
url,
|
||||
}
|
||||
.render()
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok(Response::builder().status(403).header("content-type", "text/html").body(body.into()).unwrap_or_default())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::format_num;
|
||||
|
|
Before Width: | Height: | Size: 8 KiB After Width: | Height: | Size: 5 KiB |
Before Width: | Height: | Size: 4.2 KiB After Width: | Height: | Size: 306 KiB |
Before Width: | Height: | Size: 969 B After Width: | Height: | Size: 1.1 KiB |
6
static/hls.min.js
vendored
BIN
static/logo.png
Before Width: | Height: | Size: 7.9 KiB After Width: | Height: | Size: 24 KiB |
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "Libreddit",
|
||||
"short_name": "Libreddit",
|
||||
"name": "Ferrit",
|
||||
"short_name": "Ferrit",
|
||||
"display": "standalone",
|
||||
"background_color": "#1f1f1f",
|
||||
"description": "An alternative private front-end to Reddit",
|
||||
|
@ -20,4 +20,4 @@
|
|||
"sizes": "32x32"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
112
static/style.css
|
@ -94,7 +94,7 @@ nav {
|
|||
}
|
||||
|
||||
nav * { color: var(--text); }
|
||||
nav #reddit, #code > span { color: var(--accent); }
|
||||
nav #ferrit_logo_p2, #code > span { color: var(--accent); }
|
||||
nav #code > svg { stroke: var(--accent); }
|
||||
|
||||
nav #logo {
|
||||
|
@ -119,7 +119,7 @@ nav #version {
|
|||
margin-right: 10px;
|
||||
}
|
||||
|
||||
nav #libreddit {
|
||||
nav #ferrit_logo {
|
||||
vertical-align: -2px;
|
||||
}
|
||||
|
||||
|
@ -712,22 +712,39 @@ a.search_subreddit:hover {
|
|||
font-weight: bold;
|
||||
}
|
||||
|
||||
.post_media_image, .post .__NoScript_PlaceHolder__, .post_media_video, .gallery {
|
||||
.post_media_content, .post .__NoScript_PlaceHolder__, .gallery {
|
||||
max-width: calc(100% - 40px);
|
||||
grid-area: post_media;
|
||||
margin: 15px auto 5px auto;
|
||||
width: auto;
|
||||
height: auto;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
|
||||
.post_media_video.short {
|
||||
max-height: 512px;
|
||||
.post_media_video {
|
||||
width: auto;
|
||||
height: auto;
|
||||
max-width: 100%;
|
||||
max-height: 512px;
|
||||
display: block;
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
.post_media_image.short svg, .post_media_image.short img{
|
||||
max-height: 512px;
|
||||
width: auto;
|
||||
height: auto;
|
||||
max-width: 100%;
|
||||
max-height: 512px;
|
||||
display: block;
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
.post_nsfw_blur {
|
||||
filter: blur(1.5rem);
|
||||
}
|
||||
|
||||
.post_nsfw_blur:hover {
|
||||
filter: none;
|
||||
}
|
||||
|
||||
.post_media_image svg{
|
||||
|
@ -813,6 +830,16 @@ a.search_subreddit:hover {
|
|||
margin-right: 15px;
|
||||
}
|
||||
|
||||
#post_links > li.desktop_item {
|
||||
display: auto;
|
||||
}
|
||||
|
||||
@media screen and (min-width: 480px) {
|
||||
#post_links > li.mobile_item {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
.post_thumbnail {
|
||||
border-radius: 5px;
|
||||
border: var(--panel-border);
|
||||
|
@ -823,13 +850,25 @@ a.search_subreddit:hover {
|
|||
margin: 5px;
|
||||
}
|
||||
|
||||
.post_thumbnail svg {
|
||||
.post_thumbnail div {
|
||||
grid-area: 1 / 1 / 2 / 2;
|
||||
width: 100%;
|
||||
height: auto;
|
||||
object-fit: cover;
|
||||
align-self: center;
|
||||
justify-self: center;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.post_thumbnail div svg {
|
||||
width: 100%;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
.post_thumbnail span {
|
||||
z-index: 0;
|
||||
}
|
||||
|
||||
.thumb_nsfw_blur {
|
||||
filter: blur(0.3rem)
|
||||
}
|
||||
|
||||
.post_thumbnail.no_thumbnail {
|
||||
|
@ -1234,6 +1273,54 @@ td, th {
|
|||
#error h3 { opacity: 0.85; }
|
||||
#error a { color: var(--accent); }
|
||||
|
||||
/* Messages */
|
||||
|
||||
#duplicates_msg h3 {
|
||||
display: inline-block;
|
||||
margin-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
/* Warnings */
|
||||
|
||||
.listing_warn {
|
||||
display: inline-block;
|
||||
margin: 10px;
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.listing_warn a {
|
||||
color: var(--accent);
|
||||
};
|
||||
|
||||
/* NSFW Landing Page */
|
||||
|
||||
#nsfw_landing {
|
||||
display: inline-block;
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
#nsfw_landing h1 {
|
||||
display: inline-block;
|
||||
margin-bottom: 20px;
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
#nsfw_landing p {
|
||||
display: inline-block;
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
#nsfw_landing a {
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
/* Mobile */
|
||||
|
||||
@media screen and (max-width: 800px) {
|
||||
|
@ -1334,4 +1421,9 @@ td, th {
|
|||
padding: 7px 0px;
|
||||
margin-right: -5px;
|
||||
}
|
||||
|
||||
#post_links > li { margin-right: 10px }
|
||||
#post_links > li.desktop_item { display: none }
|
||||
#post_links > li.mobile_item { display: auto }
|
||||
.post_footer > p > span#upvoted { display: none }
|
||||
}
|
||||
|
|
13
static/themes/doomone.css
Normal file
|
@ -0,0 +1,13 @@
|
|||
.doomone {
|
||||
--accent: #51afef;
|
||||
--green: #00a229;
|
||||
--text: #bbc2cf;
|
||||
--foreground: #3d4148;
|
||||
--background: #282c34;
|
||||
--outside: #52565c;
|
||||
--post: #24272e;
|
||||
--panel-border: 2px solid #52565c;
|
||||
--highlighted: #686b70;
|
||||
--visited: #969692;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
}
|
|
@ -2,14 +2,14 @@
|
|||
<html lang="en">
|
||||
<head>
|
||||
{% block head %}
|
||||
<title>{% block title %}Libreddit{% endblock %}</title>
|
||||
<title>{% block title %}Ferrit{% endblock %}</title>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
|
||||
<meta name="description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
||||
<meta name="description" content="View on Ferrit, an alternative private front-end to Reddit.">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<!-- General PWA -->
|
||||
<meta name="theme-color" content="#1F1F1F">
|
||||
<!-- iOS Application -->
|
||||
<meta name="apple-mobile-web-app-title" content="Libreddit">
|
||||
<meta name="apple-mobile-web-app-title" content="Ferrit">
|
||||
<meta name="apple-mobile-web-app-capable" content="yes">
|
||||
<meta name="apple-mobile-web-app-status-bar-style" content="default">
|
||||
<!-- Android -->
|
||||
|
@ -29,8 +29,8 @@
|
|||
<!-- NAVIGATION BAR -->
|
||||
<nav>
|
||||
<div id="logo">
|
||||
<a id="libreddit" href="/"><span id="lib">lib</span><span id="reddit">reddit.</span></a>
|
||||
<span id="version">v{{ env!("CARGO_PKG_VERSION") }}</span>
|
||||
<a id="ferrit_logo" href="/"><span id="ferrit_logo_p1">ferr</span><span id="ferrit_logo_p2">it.</span></a>
|
||||
<span id="version">v{{ env!("CARGO_PKG_VERSION") }}{% if crate::utils::sfw_only() %} <span title="This instance is SFW-only.">💼</span>{% endif %}</span>
|
||||
{% block subscriptions %}{% endblock %}
|
||||
</div>
|
||||
{% block search %}{% endblock %}
|
||||
|
@ -38,7 +38,11 @@
|
|||
<a id="reddit_link" href="https://www.reddit.com{{ url }}" rel="nofollow">
|
||||
<span>reddit</span>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<path d="M23 12.0737C23 10.7308 21.9222 9.64226 20.5926 9.64226C19.9435 9.64226 19.3557 9.90274 18.923 10.3244C17.2772 9.12492 15.0099 8.35046 12.4849 8.26135L13.5814 3.05002L17.1643 3.8195C17.2081 4.73947 17.9539 5.47368 18.8757 5.47368C19.8254 5.47368 20.5951 4.69626 20.5951 3.73684C20.5951 2.77769 19.8254 2 18.8758 2C18.2001 2 17.6214 2.39712 17.3404 2.96952L13.3393 2.11066C13.2279 2.08679 13.1116 2.10858 13.016 2.17125C12.9204 2.23393 12.8533 2.33235 12.8295 2.44491L11.6051 8.25987C9.04278 8.33175 6.73904 9.10729 5.07224 10.3201C4.63988 9.90099 4.05398 9.64226 3.40757 9.64226C2.0781 9.64226 1 10.7308 1 12.0737C1 13.0618 1.58457 13.9105 2.4225 14.2909C2.38466 14.5342 2.36545 14.78 2.36505 15.0263C2.36505 18.7673 6.67626 21.8 11.9945 21.8C17.3131 21.8 21.6243 18.7673 21.6243 15.0263C21.6243 14.7794 21.6043 14.5359 21.5678 14.2957C22.4109 13.9175 23 13.0657 23 12.0737Z"/>
|
||||
<title>reddit</title>
|
||||
<g transform="matrix(0.1219078,0,0,0.1219078,-2.7657832,-0.46047565)" style="fill:currentColor;fill-opacity:1;stroke:none;stroke-width:2">
|
||||
<path d="m 97.96875,84.774794 q -10.5,-1.78125 -19.875,-1.78125 -15.09375,0 -24.375,11.34375 -9.28125,11.343746 -9.28125,28.874996 v 47.625 h -16.875 v -65.71875 q 0,-7.124996 -1.3125,-16.781246 -1.21875,-9.65625 -3.5625,-18.9375 h 16.03125 q 3.75,12.9375 4.5,23.4375 h 0.46875 q 4.6875,-10.5 9.1875,-15.375 4.5,-4.96875 10.6875,-7.40625 6.1875,-2.53125 15.09375,-2.53125 9.75,0 19.3125,1.59375 z" />
|
||||
<path d="M 125.90625,172.71229 202.875,31.712294 h 16.6875 L 142.96875,172.71229 Z" />
|
||||
</g>
|
||||
</svg>
|
||||
</a>
|
||||
<a id="settings_link" href="/settings">
|
||||
|
@ -48,7 +52,7 @@
|
|||
<circle cx="12" cy="12" r="3"/><path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"/>
|
||||
</svg>
|
||||
</a>
|
||||
<a id="code" href="https://github.com/spikecodes/libreddit">
|
||||
<a id="code" href="https://github.com/ferritreader/ferrit">
|
||||
<span>code</span>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title>code</title>
|
||||
|
|
107
templates/duplicates.html
Normal file
|
@ -0,0 +1,107 @@
|
|||
{% extends "base.html" %}
|
||||
{% import "utils.html" as utils %}
|
||||
|
||||
{% block title %}{{ post.title }} - r/{{ post.community }}{% endblock %}
|
||||
|
||||
{% block search %}
|
||||
{% call utils::search(["/r/", post.community.as_str()].concat(), "") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block root %}/r/{{ post.community }}{% endblock %}{% block location %}r/{{ post.community }}{% endblock %}
|
||||
{% block head %}
|
||||
{% call super() %}
|
||||
{% endblock %}
|
||||
|
||||
{% block subscriptions %}
|
||||
{% call utils::sub_list(post.community.as_str()) %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="column_one">
|
||||
{% call utils::post(post) %}
|
||||
|
||||
<!-- DUPLICATES -->
|
||||
{% if post.num_duplicates == 0 %}
|
||||
<span class="listing_warn">(No duplicates found)</span>
|
||||
{% else if post.flags.nsfw && prefs.show_nsfw != "on" %}
|
||||
<span class="listing_warn">(Enable "Show NSFW posts" in <a href="/settings">settings</a> to show duplicates)</span>
|
||||
{% else %}
|
||||
<div id="duplicates_msg"><h3>Duplicates</h3></div>
|
||||
{% if num_posts_filtered > 0 %}
|
||||
<span class="listing_warn">
|
||||
{% if all_posts_filtered %}
|
||||
(All posts have been filtered)
|
||||
{% else %}
|
||||
(Some posts have been filtered)
|
||||
{% endif %}
|
||||
</span>
|
||||
{% endif %}
|
||||
|
||||
<div id="sort">
|
||||
<div id="sort_options">
|
||||
<a {% if params.sort.is_empty() || params.sort.eq("num_comments") %}class="selected"{% endif %} href="?sort=num_comments">
|
||||
Number of comments
|
||||
</a>
|
||||
<a {% if params.sort.eq("new") %}class="selected"{% endif %} href="?sort=new">
|
||||
New
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="posts">
|
||||
{% for post in duplicates -%}
|
||||
{# TODO: utils::post should be reworked to permit a truncated display of a post as below #}
|
||||
{% if !(post.flags.nsfw) || prefs.show_nsfw == "on" %}
|
||||
<div class="post {% if post.flags.stickied %}stickied{% endif %}" id="{{ post.id }}">
|
||||
<p class="post_header">
|
||||
{% let community -%}
|
||||
{% if post.community.starts_with("u_") -%}
|
||||
{% let community = format!("u/{}", &post.community[2..]) -%}
|
||||
{% else -%}
|
||||
{% let community = format!("r/{}", post.community) -%}
|
||||
{% endif -%}
|
||||
<a class="post_subreddit" href="/r/{{ post.community }}">{{ post.community }}</a>
|
||||
<span class="dot">•</span>
|
||||
<a class="post_author {{ post.author.distinguished }}" href="/u/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
{% if !post.awards.is_empty() %}
|
||||
{% for award in post.awards.clone() %}
|
||||
<span class="award" title="{{ award.name }}">
|
||||
<img alt="{{ award.name }}" src="{{ award.icon_url }}" width="16" height="16"/>
|
||||
</span>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
</p>
|
||||
<h2 class="post_title">
|
||||
{% if post.flair.flair_parts.len() > 0 %}
|
||||
<a href="/r/{{ post.community }}/search?q=flair_name%3A%22{{ post.flair.text }}%22&restrict_sr=on"
|
||||
class="post_flair"
|
||||
style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }};"
|
||||
dir="ltr">{% call utils::render_flair(post.flair.flair_parts) %}</a>
|
||||
{% endif %}
|
||||
<a href="{{ post.permalink }}">{{ post.title }}</a>{% if post.flags.nsfw %} <small class="nsfw">NSFW</small>{% endif %}
|
||||
</h2>
|
||||
|
||||
<div class="post_score" title="{{ post.score.1 }}">{{ post.score.0 }}<span class="label"> Upvotes</span></div>
|
||||
<div class="post_footer">
|
||||
<a href="{{ post.permalink }}" class="post_comments" title="{{ post.comments.1 }} comments">{{ post.comments.0 }} comments</a>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
{% endif %}
|
||||
{%- endfor %}
|
||||
</div>
|
||||
|
||||
<footer>
|
||||
{% if params.before != "" %}
|
||||
<a href="?before={{ params.before }}{% if !params.sort.is_empty() %}&sort={{ params.sort }}{% endif %}" accesskey="P">PREV</a>
|
||||
{% endif %}
|
||||
|
||||
{% if params.after != "" %}
|
||||
<a href="?after={{ params.after }}{% if !params.sort.is_empty() %}&sort={{ params.sort }}{% endif %}" accesskey="N">NEXT</a>
|
||||
{% endif %}
|
||||
</footer>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
20
templates/nsfwlanding.html
Normal file
|
@ -0,0 +1,20 @@
|
|||
{% extends "base.html" %}
|
||||
{% block title %}NSFW content gated{% endblock %}
|
||||
{% block sortstyle %}{% endblock %}
|
||||
{% block content %}
|
||||
<div id="nsfw_landing">
|
||||
{% if res_type == crate::utils::ResourceType::Subreddit %}
|
||||
<h1>😱 r/{{ res }} is a NSFW community!</h1>
|
||||
{% else if res_type == crate::utils::ResourceType::User %}
|
||||
<h1>😱 u/{{ res }}'s content is NSFW!</h1>
|
||||
{% else if res_type == crate::utils::ResourceType::Post %}
|
||||
<h1>😱 This post is NSFW!</h1>
|
||||
{% endif %}
|
||||
<br />
|
||||
{% if crate::utils::sfw_only() %}
|
||||
<p>This instance of Ferrit is SFW-only.</p>
|
||||
{% else %}
|
||||
<p>Enable "Show NSFW posts" in <a href="/settings">settings</a> to view this {% if res_type == crate::utils::ResourceType::Subreddit %}subreddit{% else if res_type == crate::utils::ResourceType::User %}user's posts or comments{% else if res_type == crate::utils::ResourceType::Post %}post{% endif %}.</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
|
@ -16,12 +16,12 @@
|
|||
<meta property="og:type" content="website">
|
||||
<meta property="og:url" content="{{ post.permalink }}">
|
||||
<meta property="og:title" content="{{ post.title }} - r/{{ post.community }}">
|
||||
<meta property="og:description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
||||
<meta property="og:description" content="View on Ferrit, an alternative private front-end to Reddit.">
|
||||
<meta property="og:image" content="{{ post.thumbnail.url }}">
|
||||
<meta property="twitter:card" content="summary_large_image">
|
||||
<meta property="twitter:url" content="{{ post.permalink }}">
|
||||
<meta property="twitter:title" content="{{ post.title }} - r/{{ post.community }}">
|
||||
<meta property="twitter:description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
||||
<meta property="twitter:description" content="View on Ferrit, an alternative private front-end to Reddit.">
|
||||
<meta property="twitter:image" content="{{ post.thumbnail.url }}">
|
||||
{% endblock %}
|
||||
|
||||
|
@ -31,95 +31,7 @@
|
|||
|
||||
{% block content %}
|
||||
<div id="column_one">
|
||||
|
||||
<!-- POST CONTENT -->
|
||||
<div class="post highlighted">
|
||||
<p class="post_header">
|
||||
<a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a>
|
||||
<span class="dot">•</span>
|
||||
<a class="post_author {{ post.author.distinguished }}" href="/user/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||
{% if post.author.flair.flair_parts.len() > 0 %}
|
||||
<small class="author_flair">{% call utils::render_flair(post.author.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
{% if !post.awards.is_empty() %}
|
||||
<span class="dot">•</span>
|
||||
<span class="awards">
|
||||
{% for award in post.awards.clone() %}
|
||||
<span class="award" title="{{ award.name }}">
|
||||
<img alt="{{ award.name }}" src="{{ award.icon_url }}" width="16" height="16"/>
|
||||
{{ award.count }}
|
||||
</span>
|
||||
{% endfor %}
|
||||
</span>
|
||||
{% endif %}
|
||||
</p>
|
||||
<h1 class="post_title">
|
||||
{{ post.title }}
|
||||
{% if post.flair.flair_parts.len() > 0 %}
|
||||
<a href="/r/{{ post.community }}/search?q=flair_name%3A%22{{ post.flair.text }}%22&restrict_sr=on"
|
||||
class="post_flair"
|
||||
style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }};">{% call utils::render_flair(post.flair.flair_parts) %}</a>
|
||||
{% endif %}
|
||||
{% if post.flags.nsfw %} <small class="nsfw">NSFW</small>{% endif %}
|
||||
</h1>
|
||||
|
||||
<!-- POST MEDIA -->
|
||||
<!-- post_type: {{ post.post_type }} -->
|
||||
{% if post.post_type == "image" %}
|
||||
<a href="{{ post.media.url }}" class="post_media_image" >
|
||||
<svg
|
||||
width="{{ post.media.width }}px"
|
||||
height="{{ post.media.height }}px"
|
||||
xmlns="http://www.w3.org/2000/svg">
|
||||
<image width="100%" height="100%" href="{{ post.media.url }}"/>
|
||||
<desc>
|
||||
<img loading="lazy" alt="Post image" src="{{ post.media.url }}"/>
|
||||
</desc>
|
||||
</svg>
|
||||
</a>
|
||||
{% else if post.post_type == "video" || post.post_type == "gif" %}
|
||||
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||
<script src="/hls.min.js"></script>
|
||||
<video class="post_media_video short {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls>
|
||||
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||
</video>
|
||||
<script src="/playHLSVideo.js"></script>
|
||||
{% else %}
|
||||
<video class="post_media_video" src="{{ post.media.url }}" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %} loop><a href={{ post.media.url }}>Video</a></video>
|
||||
{% call utils::render_hls_notification(post.permalink[1..]) %}
|
||||
{% endif %}
|
||||
{% else if post.post_type == "gallery" %}
|
||||
<div class="gallery">
|
||||
{% for image in post.gallery -%}
|
||||
<figure>
|
||||
<a href="{{ image.url }}" ><img loading="lazy" alt="Gallery image" src="{{ image.url }}"/></a>
|
||||
<figcaption>
|
||||
<p>{{ image.caption }}</p>
|
||||
{% if image.outbound_url.len() > 0 %}
|
||||
<p><a class="outbound_url" href="{{ image.outbound_url }}" rel="nofollow">{{ image.outbound_url }}</a>
|
||||
{% endif %}
|
||||
</figcaption>
|
||||
</figure>
|
||||
{%- endfor %}
|
||||
</div>
|
||||
{% else if post.post_type == "link" %}
|
||||
<a id="post_url" href="{{ post.media.url }}" rel="nofollow">{{ post.media.url }}</a>
|
||||
{% endif %}
|
||||
|
||||
<!-- POST BODY -->
|
||||
<div class="post_body">{{ post.body|safe }}</div>
|
||||
<div class="post_score" title="{{ post.score.1 }}">{{ post.score.0 }}<span class="label"> Upvotes</span></div>
|
||||
<div class="post_footer">
|
||||
<ul id="post_links">
|
||||
<li><a href="/{{ post.id }}">permalink</a></li>
|
||||
<li><a href="https://reddit.com/{{ post.id }}" rel="nofollow">reddit</a></li>
|
||||
</ul>
|
||||
<p>{{ post.upvote_ratio }}% Upvoted</p>
|
||||
</div>
|
||||
</div>
|
||||
{% call utils::post(post) %}
|
||||
|
||||
<!-- SORT FORM -->
|
||||
<form id="sort">
|
||||
|
@ -138,7 +50,7 @@
|
|||
{% for c in comments -%}
|
||||
<div class="thread">
|
||||
{% if single_thread %}
|
||||
<p class="thread_nav"><a href="/{{ post.id }}">View all comments</a></p>
|
||||
<p class="thread_nav"><a href="{{ post.permalink }}">View all comments</a></p>
|
||||
{% if c.parent_kind == "t1" %}
|
||||
<p class="thread_nav"><a href="?context=9999">Show parent comments</a></p>
|
||||
{% endif %}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{% extends "base.html" %}
|
||||
{% import "utils.html" as utils %}
|
||||
|
||||
{% block title %}Libreddit: search results - {{ params.q }}{% endblock %}
|
||||
{% block title %}Ferrit: search results - {{ params.q }}{% endblock %}
|
||||
|
||||
{% block subscriptions %}
|
||||
{% call utils::sub_list("") %}
|
||||
|
@ -10,7 +10,7 @@
|
|||
{% block content %}
|
||||
<div id="column_one">
|
||||
<form id="search_sort">
|
||||
<input id="search" type="text" name="q" placeholder="Search" value="{{ params.q }}" title="Search libreddit">
|
||||
<input id="search" type="text" name="q" placeholder="Search" value="{{ params.q }}" title="Search Ferrit">
|
||||
{% if sub != "" %}
|
||||
<div id="inside">
|
||||
<input type="checkbox" name="restrict_sr" id="restrict_sr" {% if params.restrict_sr != "" %}checked{% endif %}>
|
||||
|
@ -56,10 +56,15 @@
|
|||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% if all_posts_hidden_nsfw %}
|
||||
<span class="listing_warn">All posts are hidden because they are NSFW. Enable "Show NSFW posts" in settings to view.</span>
|
||||
{% endif %}
|
||||
|
||||
{% if all_posts_filtered %}
|
||||
<center>(All content on this page has been filtered)</center>
|
||||
<span class="listing_warn">(All content on this page has been filtered)</span>
|
||||
{% else if is_filtered %}
|
||||
<center>(Content from r/{{ sub }} has been filtered)</center>
|
||||
<span class="listing_warn">(Content from r/{{ sub }} has been filtered)</span>
|
||||
{% else if params.typed != "sr_user" %}
|
||||
{% for post in posts %}
|
||||
{% if post.flags.nsfw && prefs.show_nsfw != "on" %}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{% extends "base.html" %}
|
||||
{% import "utils.html" as utils %}
|
||||
|
||||
{% block title %}Libreddit Settings{% endblock %}
|
||||
{% block title %}Ferrit Settings{% endblock %}
|
||||
|
||||
{% block search %}
|
||||
{% call utils::search("".to_owned(), "", "") %}
|
||||
|
@ -49,11 +49,18 @@
|
|||
{% call utils::options(prefs.comment_sort, ["confidence", "top", "new", "controversial", "old"], "confidence") %}
|
||||
</select>
|
||||
</div>
|
||||
{% if !crate::utils::sfw_only() %}
|
||||
<div id="show_nsfw">
|
||||
<label for="show_nsfw">Show NSFW posts:</label>
|
||||
<input type="hidden" value="off" name="show_nsfw">
|
||||
<input type="checkbox" name="show_nsfw" {% if prefs.show_nsfw == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<div id="blur_nsfw">
|
||||
<label for="blur_nsfw">Blur NSFW previews:</label>
|
||||
<input type="hidden" value="off" name="blur_nsfw">
|
||||
<input type="checkbox" name="blur_nsfw" {% if prefs.blur_nsfw == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div id="autoplay_videos">
|
||||
<label for="autoplay_videos">Autoplay videos</label>
|
||||
<input type="hidden" value="off" name="autoplay_videos">
|
||||
|
@ -63,7 +70,7 @@
|
|||
<label for="use_hls">Use HLS for videos
|
||||
<details id="feeds">
|
||||
<summary>Why?</summary>
|
||||
<div id="feed_list" class="helper">Reddit videos require JavaScript (via HLS.js) to be enabled to be played with audio. Therefore, this toggle lets you either use Libreddit JS-free or utilize this feature.</div>
|
||||
<div id="feed_list" class="helper">Reddit videos require JavaScript (via HLS.js) to be enabled to be played with audio. Therefore, this toggle lets you either use Ferrit JS-free or utilize this feature.</div>
|
||||
</details>
|
||||
</label>
|
||||
<input type="hidden" value="off" name="use_hls">
|
||||
|
@ -110,7 +117,11 @@
|
|||
|
||||
<div id="settings_note">
|
||||
<p><b>Note:</b> settings and subscriptions are saved in browser cookies. Clearing your cookies will reset them.</p><br>
|
||||
<p>You can restore your current settings and subscriptions after clearing your cookies using <a href="/settings/restore/?theme={{ prefs.theme }}&front_page={{ prefs.front_page }}&layout={{ prefs.layout }}&wide={{ prefs.wide }}&post_sort={{ prefs.post_sort }}&comment_sort={{ prefs.comment_sort }}&show_nsfw={{ prefs.show_nsfw }}&use_hls={{ prefs.use_hls }}&hide_hls_notification={{ prefs.hide_hls_notification }}&subscriptions={{ prefs.subscriptions.join("%2B") }}&filters={{ prefs.filters.join("%2B") }}">this link</a>.</p>
|
||||
<p>You can restore your current settings and subscriptions after clearing your cookies using <a href="/settings/restore/?theme={{ prefs.theme }}&front_page={{ prefs.front_page }}&layout={{ prefs.layout }}&wide={{ prefs.wide }}&post_sort={{ prefs.post_sort }}&comment_sort={{ prefs.comment_sort }}&show_nsfw={{ prefs.show_nsfw }}&blur_nsfw={{ prefs.blur_nsfw }}&use_hls={{ prefs.use_hls }}&hide_hls_notification={{ prefs.hide_hls_notification }}&subscriptions={{ prefs.subscriptions.join("%2B") }}&filters={{ prefs.filters.join("%2B") }}">this link</a>.</p>
|
||||
<br />
|
||||
{% if crate::utils::sfw_only() %}
|
||||
<p>This instance is SFW-only. It will block all NSFW content.</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
{% block title %}
|
||||
{% if sub.title != "" %}{{ sub.title }}
|
||||
{% else if sub.name != "" %}{{ sub.name }}
|
||||
{% else %}Libreddit{% endif %}
|
||||
{% else %}Ferrit{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block search %}
|
||||
|
@ -46,6 +46,10 @@
|
|||
</form>
|
||||
{% endif %}
|
||||
|
||||
{% if all_posts_hidden_nsfw %}
|
||||
<center>All posts are hidden because they are NSFW. Enable "Show NSFW posts" in settings to view.</center>
|
||||
{% endif %}
|
||||
|
||||
{% if all_posts_filtered %}
|
||||
<center>(All content on this page has been filtered)</center>
|
||||
{% else %}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
{% call utils::search("".to_owned(), "", "") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block title %}{{ user.name.replace("u/", "") }} (u/{{ user.name }}) - Libreddit{% endblock %}
|
||||
{% block title %}{{ user.name.replace("u/", "") }} (u/{{ user.name }}) - Ferrit{% endblock %}
|
||||
|
||||
{% block subscriptions %}
|
||||
{% call utils::sub_list("") %}
|
||||
|
@ -32,6 +32,10 @@
|
|||
</button>
|
||||
</form>
|
||||
|
||||
{% if all_posts_hidden_nsfw %}
|
||||
<center>All posts are hidden because they are NSFW. Enable "Show NSFW posts" in settings to view.</center>
|
||||
{% endif %}
|
||||
|
||||
{% if all_posts_filtered %}
|
||||
<center>(All content on this page has been filtered)</center>
|
||||
{% else %}
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
{% macro search(root, search) -%}
|
||||
<form action="{% if root != "/r/" && !root.is_empty() %}{{ root }}{% endif %}/search" id="searchbox">
|
||||
<input id="search" type="text" name="q" placeholder="Search" title="Search libreddit" value="{{ search }}">
|
||||
<input id="search" type="text" name="q" placeholder="Search" title="Search Ferrit" value="{{ search }}">
|
||||
{% if root != "/r/" && !root.is_empty() %}
|
||||
<div id="inside">
|
||||
<input type="checkbox" name="restrict_sr" id="restrict_sr" checked>
|
||||
|
@ -61,6 +61,109 @@
|
|||
{% endif %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro post(post) -%}
|
||||
<!-- POST CONTENT -->
|
||||
<div class="post highlighted">
|
||||
<p class="post_header">
|
||||
<a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a>
|
||||
<span class="dot">•</span>
|
||||
<a class="post_author {{ post.author.distinguished }}" href="/user/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||
{% if post.author.flair.flair_parts.len() > 0 %}
|
||||
<small class="author_flair">{% call render_flair(post.author.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
{% if !post.awards.is_empty() %}
|
||||
<span class="dot">•</span>
|
||||
<span class="awards">
|
||||
{% for award in post.awards.clone() %}
|
||||
<span class="award" title="{{ award.name }}">
|
||||
<img alt="{{ award.name }}" src="{{ award.icon_url }}" width="16" height="16"/>
|
||||
{{ award.count }}
|
||||
</span>
|
||||
{% endfor %}
|
||||
</span>
|
||||
{% endif %}
|
||||
</p>
|
||||
<h1 class="post_title">
|
||||
{{ post.title }}
|
||||
{% if post.flair.flair_parts.len() > 0 %}
|
||||
<a href="/r/{{ post.community }}/search?q=flair_name%3A%22{{ post.flair.text }}%22&restrict_sr=on"
|
||||
class="post_flair"
|
||||
style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }};">{% call render_flair(post.flair.flair_parts) %}</a>
|
||||
{% endif %}
|
||||
{% if post.flags.nsfw %} <small class="nsfw">NSFW</small>{% endif %}
|
||||
</h1>
|
||||
|
||||
<!-- POST MEDIA -->
|
||||
<!-- post_type: {{ post.post_type }} -->
|
||||
{% if post.post_type == "image" %}
|
||||
<div class="post_media_content">
|
||||
<a href="{{ post.media.url }}" class="post_media_image" >
|
||||
<svg
|
||||
width="{{ post.media.width }}px"
|
||||
height="{{ post.media.height }}px"
|
||||
xmlns="http://www.w3.org/2000/svg">
|
||||
<image width="100%" height="100%" href="{{ post.media.url }}"/>
|
||||
<desc>
|
||||
<img loading="lazy" alt="Post image" src="{{ post.media.url }}"/>
|
||||
</desc>
|
||||
</svg>
|
||||
</a>
|
||||
</div>
|
||||
{% else if post.post_type == "video" || post.post_type == "gif" %}
|
||||
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||
<script src="/hls.min.js"></script>
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls>
|
||||
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||
</video>
|
||||
</div>
|
||||
<script src="/playHLSVideo.js"></script>
|
||||
{% else %}
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video" src="{{ post.media.url }}" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %} loop><a href={{ post.media.url }}>Video</a></video>
|
||||
</div>
|
||||
{% call render_hls_notification(post.permalink[1..]) %}
|
||||
{% endif %}
|
||||
{% else if post.post_type == "gallery" %}
|
||||
<div class="gallery">
|
||||
{% for image in post.gallery -%}
|
||||
<figure>
|
||||
<a href="{{ image.url }}" ><img loading="lazy" alt="Gallery image" src="{{ image.url }}"/></a>
|
||||
<figcaption>
|
||||
<p>{{ image.caption }}</p>
|
||||
{% if image.outbound_url.len() > 0 %}
|
||||
<p><a class="outbound_url" href="{{ image.outbound_url }}" rel="nofollow">{{ image.outbound_url }}</a>
|
||||
{% endif %}
|
||||
</figcaption>
|
||||
</figure>
|
||||
{%- endfor %}
|
||||
</div>
|
||||
{% else if post.post_type == "link" %}
|
||||
<a id="post_url" href="{{ post.media.url }}" rel="nofollow">{{ post.media.url }}</a>
|
||||
{% endif %}
|
||||
|
||||
<!-- POST BODY -->
|
||||
<div class="post_body">{{ post.body|safe }}</div>
|
||||
<div class="post_score" title="{{ post.score.1 }}">{{ post.score.0 }}<span class="label"> Upvotes</span></div>
|
||||
<div class="post_footer">
|
||||
<ul id="post_links">
|
||||
<li class="desktop_item"><a href="{{ post.permalink }}">permalink</a></li>
|
||||
<li class="mobile_item"><a href="{{ post.permalink }}">link</a></li>
|
||||
{% if post.num_duplicates > 0 %}
|
||||
<li class="desktop_item"><a href="/r/{{ post.community }}/duplicates/{{ post.id }}">duplicates</a></li>
|
||||
<li class="mobile_item"><a href="/r/{{ post.community }}/duplicates/{{ post.id }}">dupes</a></li>
|
||||
{% endif %}
|
||||
<li class="desktop_item"><a href="https://reddit.com{{ post.permalink }}" rel="nofollow">reddit</a></li>
|
||||
<li class="mobile_item"><a href="https://reddit.com{{ post.permalink }}" rel="nofollow">reddit</a></li>
|
||||
</ul>
|
||||
<p>{{ post.upvote_ratio }}%<span id="upvoted"> Upvoted</span></p>
|
||||
</div>
|
||||
</div>
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro post_in_list(post) -%}
|
||||
<div class="post {% if post.flags.stickied %}stickied{% endif %}" id="{{ post.id }}">
|
||||
<p class="post_header">
|
||||
|
@ -94,27 +197,36 @@
|
|||
</h2>
|
||||
<!-- POST MEDIA/THUMBNAIL -->
|
||||
{% if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "image" %}
|
||||
<a href="{{ post.media.url }}" class="post_media_image {% if post.media.height / post.media.width < 2 %}short{% endif %}" >
|
||||
<svg
|
||||
width="{{ post.media.width }}px"
|
||||
height="{{ post.media.height }}px"
|
||||
xmlns="http://www.w3.org/2000/svg">
|
||||
<image width="100%" height="100%" href="{{ post.media.url }}"/>
|
||||
<desc>
|
||||
<img loading="lazy" alt="Post image" src="{{ post.media.url }}"/>
|
||||
</desc>
|
||||
</svg>
|
||||
</a>
|
||||
<div class="post_media_content">
|
||||
<a href="{{ post.media.url }}" class="post_media_image {% if post.media.height / post.media.width < 2 %}short{% endif %}" >
|
||||
<svg
|
||||
{%if post.flags.nsfw && prefs.blur_nsfw=="on" %}class="post_nsfw_blur"{% endif %}
|
||||
width="{{ post.media.width }}px"
|
||||
height="{{ post.media.height }}px"
|
||||
xmlns="http://www.w3.org/2000/svg">
|
||||
<image width="100%" height="100%" href="{{ post.media.url }}"/>
|
||||
<desc>
|
||||
<img loading="lazy" alt="Post image" src="{{ post.media.url }}"/>
|
||||
</desc>
|
||||
</svg>
|
||||
</a>
|
||||
</div>
|
||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "gif" %}
|
||||
<video class="post_media_video short" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls loop {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls loop {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||
</div>
|
||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "video" %}
|
||||
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||
<video class="post_media_video short {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" controls preload="none">
|
||||
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||
</video>
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %} {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" controls preload="none">
|
||||
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||
</video>
|
||||
</div>
|
||||
{% else %}
|
||||
<video class="post_media_video short" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||
</div>
|
||||
{% call render_hls_notification(format!("{}%23{}", &self.url[1..].replace("&", "%26").replace("+", "%2B"), post.id)) %}
|
||||
{% endif %}
|
||||
{% else if post.post_type != "self" %}
|
||||
|
@ -125,12 +237,14 @@
|
|||
<path d="M35,15h-15a10,10 0,0,0 0,20h25a10,10 0,0,0 10,-10m-12.5,0a10, 10 0,0,1 10, -10h25a10,10 0,0,1 0,20h-15" fill="none" stroke-width="5" stroke-linecap="round"/>
|
||||
</svg>
|
||||
{% else %}
|
||||
<svg width="{{ post.thumbnail.width }}px" height="{{ post.thumbnail.height }}px" xmlns="http://www.w3.org/2000/svg">
|
||||
<image width="100%" height="100%" href="{{ post.thumbnail.url }}"/>
|
||||
<desc>
|
||||
<img loading="lazy" alt="Thumbnail" src="{{ post.thumbnail.url }}"/>
|
||||
</desc>
|
||||
</svg>
|
||||
<div style="max-width:{{ post.thumbnail.width }}px;max-height:{{ post.thumbnail.height }}px;">
|
||||
<svg {% if post.flags.nsfw && prefs.blur_nsfw=="on" %} class="thumb_nsfw_blur" {% endif %} width="{{ post.thumbnail.width }}px" height="{{ post.thumbnail.height }}px" xmlns="http://www.w3.org/2000/svg">
|
||||
<image width="100%" height="100%" href="{{ post.thumbnail.url }}"/>
|
||||
<desc>
|
||||
<img loading="lazy" alt="Thumbnail" src="{{ post.thumbnail.url }}"/>
|
||||
</desc>
|
||||
</svg>
|
||||
</div>
|
||||
{% endif %}
|
||||
<span>{% if post.post_type == "link" %}{{ post.domain }}{% else %}{{ post.post_type }}{% endif %}</span>
|
||||
</a>
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
{% block title %}
|
||||
{% if sub != "" %}{{ page }} - {{ sub }}
|
||||
{% else %}Libreddit{% endif %}
|
||||
{% else %}Ferrit{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block search %}
|
||||
|
|