Fix #196
This commit is contained in:
parent
43551f70fd
commit
0656756d21
5 changed files with 11 additions and 7 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -603,7 +603,7 @@ checksum = "8521a1b57e76b1ec69af7599e75e38e7b7fad6610f037db8c79b127201b5d119"
|
|||
|
||||
[[package]]
|
||||
name = "libreddit"
|
||||
version = "0.20.1"
|
||||
version = "0.20.2"
|
||||
dependencies = [
|
||||
"askama",
|
||||
"async-recursion",
|
||||
|
|
|
@ -3,7 +3,7 @@ name = "libreddit"
|
|||
description = " Alternative private front-end to Reddit"
|
||||
license = "AGPL-3.0"
|
||||
repository = "https://github.com/spikecodes/libreddit"
|
||||
version = "0.20.1"
|
||||
version = "0.20.2"
|
||||
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
||||
edition = "2018"
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ use cached::proc_macro::cached;
|
|||
use futures_lite::{future::Boxed, FutureExt};
|
||||
use hyper::{body::Buf, client, Body, Request, Response, Uri};
|
||||
use serde_json::Value;
|
||||
use std::{result::Result, str::FromStr};
|
||||
use std::result::Result;
|
||||
|
||||
use crate::server::RequestExt;
|
||||
|
||||
|
@ -20,7 +20,7 @@ pub async fn proxy(req: Request<Body>, format: &str) -> Result<Response<Body>, S
|
|||
|
||||
async fn stream(url: &str, req: &Request<Body>) -> Result<Response<Body>, String> {
|
||||
// First parameter is target URL (mandatory).
|
||||
let url = Uri::from_str(url).map_err(|_| "Couldn't parse URL".to_string())?;
|
||||
let uri = url.parse::<Uri>().map_err(|_| "Couldn't parse URL".to_string())?;
|
||||
|
||||
// Prepare the HTTPS connector.
|
||||
let https = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_only().enable_http1().build();
|
||||
|
@ -28,7 +28,7 @@ async fn stream(url: &str, req: &Request<Body>) -> Result<Response<Body>, String
|
|||
// Build the hyper client from the HTTPS connector.
|
||||
let client: client::Client<_, hyper::Body> = client::Client::builder().build(https);
|
||||
|
||||
let mut builder = Request::get(url);
|
||||
let mut builder = Request::get(uri);
|
||||
|
||||
// Copy useful headers from original request
|
||||
for &key in &["Range", "If-Modified-Since", "Cache-Control"] {
|
||||
|
@ -89,7 +89,10 @@ fn request(url: String, quarantine: bool) -> Boxed<Result<Response<Body>, String
|
|||
response
|
||||
.headers()
|
||||
.get("Location")
|
||||
.map(|val| val.to_str().unwrap_or_default())
|
||||
.map(|val| {
|
||||
let new_url = val.to_str().unwrap_or_default();
|
||||
format!("{}{}raw_json=1", new_url, if new_url.contains("?") { "&" } else { "?" })
|
||||
})
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
quarantine,
|
||||
|
|
|
@ -47,7 +47,7 @@ struct SearchTemplate {
|
|||
// SERVICES
|
||||
pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let nsfw_results = if setting(&req, "show_nsfw") == "on" { "&include_over_18=on" } else { "" };
|
||||
let path = format!("{}.json?{}{}", req.uri().path(), req.uri().query().unwrap_or_default(), nsfw_results);
|
||||
let path = format!("{}.json?{}{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), nsfw_results);
|
||||
let query = param(&path, "q").unwrap_or_default();
|
||||
|
||||
if query.is_empty() {
|
||||
|
|
|
@ -465,6 +465,7 @@ aside {
|
|||
#wiki {
|
||||
background: var(--foreground);
|
||||
padding: 35px;
|
||||
overflow-wrap: anywhere;
|
||||
}
|
||||
|
||||
#top {
|
||||
|
|
Loading…
Reference in a new issue