Compare commits
330 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
dae9f31b19 | ||
![]() |
fa57233e5d | ||
![]() |
765dcbda5c | ||
![]() |
14d8bf9fde | ||
![]() |
04cac02f3d | ||
![]() |
cca1115b0f | ||
![]() |
729c550688 | ||
![]() |
dc34d51877 | ||
![]() |
b911eaec54 | ||
![]() |
07370f7238 | ||
![]() |
5280806b7e | ||
![]() |
a3d5ca0112 | ||
![]() |
82e7669310 | ||
![]() |
5ea1372f91 | ||
![]() |
94a364daad | ||
![]() |
05a8ee5281 | ||
![]() |
ca6b271bc9 | ||
![]() |
ab126b9d70 | ||
![]() |
2e64fd5cbc | ||
![]() |
ce5c794ce2 | ||
![]() |
ac4adab00b | ||
![]() |
0715c2226b | ||
![]() |
fe796c676e | ||
![]() |
ba78d8e2c8 | ||
![]() |
cac82d1986 | ||
![]() |
ef0ae2f0aa | ||
![]() |
4fa9a7491e | ||
![]() |
e08c0754f8 | ||
![]() |
ee4bc00576 | ||
![]() |
d75e7d07ec | ||
![]() |
718e172b6d | ||
![]() |
56bfcbba0e | ||
![]() |
d52da3aeb4 | ||
![]() |
42c30aaaba | ||
![]() |
959d0c52b1 | ||
![]() |
c796ae8bb7 | ||
![]() |
913ca1b075 | ||
![]() |
5d59a2c7be | ||
![]() |
ecc6875a21 | ||
![]() |
d75693ce4e | ||
![]() |
e7efca4a4e | ||
![]() |
2a4dd07752 | ||
![]() |
193b4e36db | ||
![]() |
e582960402 | ||
![]() |
1883093dc7 | ||
![]() |
9984ba0d12 | ||
![]() |
98e817c0a8 | ||
![]() |
28231d99b8 | ||
![]() |
4afc07f1d5 | ||
![]() |
f2d2068bcf | ||
![]() |
b1bcf41061 | ||
![]() |
39af9096ef | ||
![]() |
4bb6c5e90b | ||
![]() |
494ff27b49 | ||
![]() |
948d20d8fb | ||
![]() |
4315221385 | ||
![]() |
3a1ff0f307 | ||
![]() |
b22d60f166 | ||
![]() |
acee5d892d | ||
![]() |
9a5f1c5f44 | ||
![]() |
ebee1f4a6c | ||
![]() |
4847a6eed2 | ||
![]() |
951060dc45 | ||
![]() |
2693cd18d2 | ||
![]() |
838d1b6958 | ||
![]() |
c527897a4d | ||
![]() |
2141b88c35 | ||
![]() |
b0c99f25e5 | ||
![]() |
d5c4206afe | ||
![]() |
4950106871 | ||
![]() |
0b48f671cb | ||
![]() |
16717bc27d | ||
![]() |
f51d2e6881 | ||
![]() |
ad5b754741 | ||
![]() |
d5524d7eae | ||
![]() |
3c2533f69a | ||
![]() |
8225d34a9c | ||
![]() |
d2954862ea | ||
![]() |
f55abf934d | ||
![]() |
052d9fd167 | ||
![]() |
bf7e73f9ff | ||
![]() |
2f4e4038b1 | ||
![]() |
5d06cce220 | ||
![]() |
33363a83ef | ||
![]() |
6200c5d53c | ||
![]() |
bbc49cbf42 | ||
![]() |
408858a91e | ||
![]() |
4993da4d89 | ||
![]() |
2e50fa4edb | ||
![]() |
9058b68f78 | ||
![]() |
9f90caf262 | ||
![]() |
1386cd6739 | ||
![]() |
fe959efd0e | ||
![]() |
5b6afb0b67 | ||
![]() |
93fd8f8565 | ||
![]() |
9efcf0b079 | ||
![]() |
c077e39ae7 | ||
![]() |
b1df4f1154 | ||
![]() |
bb50e8bb25 | ||
![]() |
c584a7d601 | ||
![]() |
ce4912b9c5 | ||
![]() |
236e8871c6 | ||
![]() |
f78c6d7660 | ||
![]() |
9c6729c931 | ||
![]() |
8779c03afa | ||
![]() |
2149e32c9e | ||
![]() |
4a990c537b | ||
![]() |
d110f72c18 | ||
![]() |
991f3f59de | ||
![]() |
8d9b660eb1 | ||
![]() |
9c51128af2 | ||
![]() |
c494edcaff | ||
![]() |
6aab9c85b9 | ||
![]() |
1d133ed2a8 | ||
![]() |
d020895a51 | ||
![]() |
b7a3a8d855 | ||
![]() |
11d23fd1b2 | ||
![]() |
41ab8a2a76 | ||
![]() |
2df6499fb2 | ||
![]() |
280c7e2b5e | ||
![]() |
8790f5f719 | ||
![]() |
0f19ade40c | ||
![]() |
ae5b3370bc | ||
![]() |
3d76b1bd86 | ||
![]() |
6c3d9ecd50 | ||
![]() |
57267827f5 | ||
![]() |
1909cc36a8 | ||
![]() |
ca425f9ef5 | ||
![]() |
a92550e050 | ||
![]() |
669e365913 | ||
![]() |
b2cbc5eaa5 | ||
![]() |
851ea314a7 | ||
![]() |
fbf73634ee | ||
![]() |
779908cb11 | ||
![]() |
78858b0e04 | ||
![]() |
660f85620d | ||
![]() |
51214dc23a | ||
![]() |
29b76be459 | ||
![]() |
6b9469e4b3 | ||
![]() |
f5cf5f9151 | ||
![]() |
c762f9cf8e | ||
![]() |
705ba81026 | ||
![]() |
0f717cc976 | ||
![]() |
b2971c1829 | ||
![]() |
99f0cf1113 | ||
![]() |
c25cd9c3fe | ||
![]() |
388aaf4bfd | ||
![]() |
31c9c676ee | ||
![]() |
ca96a76958 | ||
![]() |
326131aac4 | ||
![]() |
efa8efc6c7 | ||
![]() |
80e950de3b | ||
![]() |
a47e28587c | ||
![]() |
d912bff94e | ||
![]() |
6e9250c03a | ||
![]() |
7d762b3726 | ||
![]() |
33846cee34 | ||
![]() |
dde117e7e6 | ||
![]() |
86b0d3d6c9 | ||
![]() |
36e2ac93be | ||
![]() |
3a97a6f621 | ||
![]() |
92a141c1c5 | ||
![]() |
c73cb838e2 | ||
![]() |
ebd69ff427 | ||
![]() |
af3385d1c2 | ||
![]() |
649e0db5cb | ||
![]() |
62459c68e8 | ||
![]() |
db1115f19e | ||
![]() |
50aa52c485 | ||
![]() |
5020f36c90 | ||
![]() |
5b4864424a | ||
![]() |
4f4cb220cc | ||
![]() |
286bcf1bd3 | ||
![]() |
bfeb81270c | ||
![]() |
ddb10f6584 | ||
![]() |
61393ba7c4 | ||
![]() |
f2907641d2 | ||
![]() |
7d42c84aaf | ||
![]() |
72da32383e | ||
![]() |
2bdddaf928 | ||
![]() |
0dd25aacb6 | ||
![]() |
41f3fe7485 | ||
![]() |
7f84c6346d | ||
![]() |
d073aa247a | ||
![]() |
9f23a1c70b | ||
![]() |
d8943709c7 | ||
![]() |
93afb6b8c9 | ||
![]() |
83c3981697 | ||
![]() |
40138572be | ||
![]() |
7b392b369d | ||
![]() |
26aa345f06 | ||
![]() |
b7a23f1826 | ||
![]() |
34468202f9 | ||
![]() |
9f5213cf42 | ||
![]() |
43357493d5 | ||
![]() |
1d9718798f | ||
![]() |
8312d21f9f | ||
![]() |
0943b8b8c4 | ||
![]() |
3b127d26a1 | ||
![]() |
96ed04c298 | ||
![]() |
e8f9ad2479 | ||
![]() |
fb46d2c6f2 | ||
![]() |
a66362bce1 | ||
![]() |
12843414f8 | ||
![]() |
493696f4e2 | ||
![]() |
b95c3c8afe | ||
![]() |
c698f4e0ef | ||
![]() |
67c3e39d4e | ||
![]() |
6d497fcf81 | ||
![]() |
e45122288d | ||
![]() |
5e4ed070d2 | ||
![]() |
21591d2a0d | ||
![]() |
d61ab883d9 | ||
![]() |
4f27a5c9de | ||
![]() |
94a92ae30c | ||
![]() |
bb06797dec | ||
![]() |
22d0ff789e | ||
![]() |
64ca6a30c1 | ||
![]() |
11c4b8c21d | ||
![]() |
3bb7614256 | ||
![]() |
582f8aee5b | ||
![]() |
b00f76627b | ||
![]() |
c59596511f | ||
![]() |
0bc96b167c | ||
![]() |
19081b72c0 | ||
![]() |
1a2a833597 | ||
![]() |
7206e7d6a1 | ||
![]() |
9a4e450766 | ||
![]() |
c0d2d1ac65 | ||
![]() |
76419a7353 | ||
![]() |
0da1b9e1db | ||
![]() |
1c5a317c4d | ||
![]() |
2b41fb9735 | ||
![]() |
86b1f62393 | ||
![]() |
b718fb131c | ||
![]() |
2c985b8db5 | ||
![]() |
cface54414 | ||
![]() |
918d142d24 | ||
![]() |
44f51487c2 | ||
![]() |
c6ae86dbb4 | ||
![]() |
15b0505b96 | ||
![]() |
397a805ffd | ||
![]() |
c02006c297 | ||
![]() |
1143846ca6 | ||
![]() |
11166b4876 | ||
![]() |
62682911fc | ||
![]() |
4cd1810527 | ||
![]() |
b68e06c883 | ||
![]() |
89032e63bd | ||
![]() |
e8a64f5874 | ||
![]() |
e1e426c517 | ||
![]() |
fb2b6608fe | ||
![]() |
f11d35f057 | ||
![]() |
3ca8f63c1e | ||
![]() |
3565dcea39 | ||
![]() |
9a53329dc6 | ||
![]() |
6b71e71df8 | ||
![]() |
1cd336c7dc | ||
![]() |
026f7794d1 | ||
![]() |
15c71cbfba | ||
![]() |
5a8d61f231 | ||
![]() |
3c6632246e | ||
![]() |
e704c26ed3 | ||
![]() |
90f010359d | ||
![]() |
35dc276fd2 | ||
![]() |
5e2669b6de | ||
![]() |
7e1a80dc7e | ||
![]() |
26f73d5611 | ||
![]() |
a142aa75cd | ||
![]() |
95c6beeb47 | ||
![]() |
33507e4c09 | ||
![]() |
5fa1febf5f | ||
![]() |
5367dd39df | ||
![]() |
69eb815d25 | ||
![]() |
9bb1544bd7 | ||
![]() |
abc59b2858 | ||
![]() |
d28cbb96a1 | ||
![]() |
07bbea8f9b | ||
![]() |
fc830c4683 | ||
![]() |
a2360d1f65 | ||
![]() |
f62b821422 | ||
![]() |
ae9fa5b388 | ||
![]() |
e1a837f6b0 | ||
![]() |
0139fc568b | ||
![]() |
a46a2231c3 | ||
![]() |
fe74f2eef7 | ||
![]() |
b42adaa5a3 | ||
![]() |
05bf05b0dd | ||
![]() |
89542072c8 | ||
![]() |
dc5fa842c0 | ||
![]() |
9b1d89404e | ||
![]() |
03d649b97b | ||
![]() |
eee2f110b8 | ||
![]() |
c39d9ff0b0 | ||
![]() |
686e26ad6d | ||
![]() |
5c60d733cd | ||
![]() |
141ae26066 | ||
![]() |
47905f1e22 | ||
![]() |
ca1c72c3dc | ||
![]() |
c1a5b7086a | ||
![]() |
38ba4bd6cb | ||
![]() |
64c4d2c23a | ||
![]() |
0ec89146c8 | ||
![]() |
beb5e6012a | ||
![]() |
0facfdbd56 | ||
![]() |
67487a51ee | ||
![]() |
692ac48708 | ||
![]() |
ad514398fa | ||
![]() |
3742893c19 | ||
![]() |
89ee79cd0f | ||
![]() |
3aee141d0e | ||
![]() |
3189de6fb9 | ||
![]() |
bbc226829e | ||
![]() |
4dd44aec3f | ||
![]() |
66669a0cd8 | ||
![]() |
faf9995962 | ||
![]() |
68a2b955d8 | ||
![]() |
42f0dc1bc7 | ||
![]() |
2e0def777c | ||
![]() |
4dd455e185 | ||
![]() |
9d070141ff | ||
![]() |
68f701265f | ||
![]() |
24fda29358 | ||
![]() |
9282e30efd | ||
![]() |
41c57bd070 | ||
![]() |
bca2ba17b9 | ||
![]() |
3f367d0b5e | ||
![]() |
da03037ca4 | ||
![]() |
b123fbbdab | ||
![]() |
cfe57a430b |
20
.cspell.json
|
@ -1,20 +0,0 @@
|
|||
{
|
||||
"ignorePaths": [
|
||||
"**/node_modules/**",
|
||||
"**/vscode-extension/**",
|
||||
"**/.git/**",
|
||||
"**/.pnpm-lock.json",
|
||||
".vscode",
|
||||
"megalinter",
|
||||
"package-lock.json",
|
||||
"report"
|
||||
],
|
||||
"language": "en",
|
||||
"noConfigSearch": true,
|
||||
"words": [
|
||||
"megalinter",
|
||||
"oxsecurity",
|
||||
"websurfx"
|
||||
],
|
||||
"version": "0.2"
|
||||
}
|
48
.github/workflows/contributors.yml
vendored
|
@ -1,48 +0,0 @@
|
|||
---
|
||||
name: Contributors List
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
schedule:
|
||||
- cron: "0 1 * * *"
|
||||
|
||||
jobs:
|
||||
contributors:
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event.repository.default_branch }}
|
||||
|
||||
- name: Update contributors list
|
||||
uses: wow-actions/contributors-list@242b53835016268d20e79eeff6f42193c02be8c8 # v1.2.0
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
svgPath: images/contributors_list.svg
|
||||
round: true
|
||||
includeBots: false
|
||||
noCommit: true
|
||||
|
||||
- name: Commit & PR
|
||||
uses: peter-evans/create-pull-request@38e0b6e68b4c852a5500a94740f0e535e0d7ba54 # v4.2.4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: .github/assets/CONTRIBUTORS.svg
|
||||
commit-message: 'chore: update contributors-list'
|
||||
committer: GitHub <noreply@github.com>
|
||||
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
|
||||
signoff: false
|
||||
branch: workflow/update-contributors-list
|
||||
base: main
|
||||
delete-branch: true
|
||||
title: 'chore: update contributors-list'
|
||||
body: |
|
||||
Automated update to `images/contributors_list.svg`
|
79
.github/workflows/docker.yml
vendored
Normal file
|
@ -0,0 +1,79 @@
|
|||
name: Release stable image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "release/stable/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- "release/stable/**"
|
||||
types: [opened, synchronize]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
release_image:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
cache:
|
||||
- memory
|
||||
- redis
|
||||
- hybrid
|
||||
- no-cache
|
||||
|
||||
name: Release ${{ matrix.cache }} image
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# Install buildx
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
# Set buildx cache
|
||||
- name: Cache register
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: buildx-cache
|
||||
# Login to ghcr.io
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: neonmmd
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
# Extract branch info
|
||||
- name: Set info
|
||||
run: |
|
||||
echo "VERSION=$(echo ${GITHUB_REF} | awk -F/ '{print $6}')" >> $GITHUB_ENV
|
||||
# Print info for debug
|
||||
- name: Print Info
|
||||
run: |
|
||||
echo $VERSION
|
||||
# Create buildx multiarch
|
||||
- name: Create buildx multiarch
|
||||
run: docker buildx create --use --name=buildx-multi-arch --driver=docker-container --driver-opt=network=host
|
||||
# Modify cache variable in the dockerfile.
|
||||
- name: Modify Cache variable
|
||||
run: |
|
||||
sed -i "s/ARG CACHE=[a-z]*/ARG CACHE=${{ matrix.cache }}/g" Dockerfile
|
||||
# Publish image
|
||||
- name: Publish image
|
||||
run: docker buildx build --builder=buildx-multi-arch --platform=linux/amd64,linux/arm64 --build-arg CACHE=${{ matrix.cache }} --push -t neonmmd/websurfx:$VERSION-${{ matrix.cache }} -t neon-mmd/websurfx:${{matrix.cache}} -f Dockerfile .
|
||||
- name: Publish latest
|
||||
if: ${{ matrix.cache }} == 'hybrid'
|
||||
run: docker buildx build --builder=buildx-multi-arch --platform=linux/amd64,linux/arm64 --build-arg CACHE=${{ matrix.cache }} --push -t neon-mmd/websurfx:latest -f Dockerfile .
|
||||
# Upload it to release
|
||||
- name: Test if release already exists
|
||||
id: release-exists
|
||||
continue-on-error: true
|
||||
run: gh release view $BINARY_NAME-$VERSION
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Create new draft release
|
||||
if: steps.release-exists.outcome == 'failure' && steps.release-exists.conclusion == 'success'
|
||||
run: gh release create -t $VERSION -d $VERSION
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
2
.github/workflows/issue-lock-unlock.yml
vendored
|
@ -11,6 +11,6 @@ jobs:
|
|||
action:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/label-actions@v3
|
||||
- uses: dessant/label-actions@v4
|
||||
with:
|
||||
process-only: issues
|
||||
|
|
2
.github/workflows/labels.yml
vendored
|
@ -12,7 +12,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '14'
|
||||
- uses: EddieHubCommunity/gh-action-open-source-labels@main
|
||||
|
|
89
.github/workflows/mega-linter.yml
vendored
|
@ -1,89 +0,0 @@
|
|||
---
|
||||
# MegaLinter GitHub Action configuration file
|
||||
# More info at https://megalinter.io
|
||||
name: MegaLinter
|
||||
|
||||
on:
|
||||
# Trigger mega-linter at every push. Action will also be visible from Pull Requests to rolling
|
||||
push: # Comment this line to trigger action only on pull-requests (not recommended if you don't pay for GH Actions)
|
||||
pull_request:
|
||||
branches: [rolling]
|
||||
|
||||
env: # Comment env block if you do not want to apply fixes
|
||||
# Apply linter fixes configuration
|
||||
APPLY_FIXES: all # When active, APPLY_FIXES must also be defined as environment variable (in github/workflows/mega-linter.yml or other CI tool)
|
||||
APPLY_FIXES_EVENT: pull_request # Decide which event triggers application of fixes in a commit or a PR (pull_request, push, all)
|
||||
APPLY_FIXES_MODE: commit # If APPLY_FIXES is used, defines if the fixes are directly committed (commit) or posted in a PR (pull_request)
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.ref }}-${{ github.workflow }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: MegaLinter
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Give the default GITHUB_TOKEN write permission to commit and push, comment issues & post new PR
|
||||
# Remove the ones you do not need
|
||||
contents: write
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
# Git Checkout
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }}
|
||||
|
||||
# MegaLinter
|
||||
- name: MegaLinter
|
||||
id: ml
|
||||
# You can override MegaLinter flavor used to have faster performances
|
||||
# More info at https://megalinter.io/flavors/
|
||||
uses: oxsecurity/megalinter/flavors/cupcake@v7.1.0
|
||||
env:
|
||||
# All available variables are described in documentation
|
||||
# https://megalinter.io/configuration/
|
||||
VALIDATE_ALL_CODEBASE: true # Set ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} to validate only diff with main branch
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# ADD YOUR CUSTOM ENV VARIABLES HERE TO OVERRIDE VALUES OF .mega-linter.yml AT THE ROOT OF YOUR REPOSITORY
|
||||
|
||||
# Upload MegaLinter artifacts
|
||||
- name: Archive production artifacts
|
||||
if: ${{ success() }} || ${{ failure() }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: MegaLinter reports
|
||||
path: |
|
||||
megalinter-reports
|
||||
mega-linter.log
|
||||
|
||||
# Create pull request if applicable (for now works only on PR from same repository, not from forks)
|
||||
- name: Create Pull Request with applied fixes
|
||||
id: cpr
|
||||
if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'pull_request' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix')
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }}
|
||||
commit-message: "[MegaLinter] Apply linters automatic fixes"
|
||||
title: "[MegaLinter] Apply linters automatic fixes"
|
||||
labels: bot
|
||||
- name: Create PR output
|
||||
if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'pull_request' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix')
|
||||
run: |
|
||||
echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}"
|
||||
echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}"
|
||||
|
||||
# Push new commit if applicable (for now works only on PR from same repository, not from forks)
|
||||
- name: Prepare commit
|
||||
if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'commit' && github.ref != 'refs/heads/main' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix')
|
||||
run: sudo chown -Rc $UID .git/
|
||||
- name: Commit and push applied linter fixes
|
||||
if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'commit' && github.ref != 'refs/heads/main' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix')
|
||||
uses: stefanzweifel/git-auto-commit-action@v4
|
||||
with:
|
||||
branch: ${{ github.event.pull_request.head.ref || github.head_ref || github.ref }}
|
||||
commit_message: "[MegaLinter] Apply linters fixes"
|
||||
commit_user_name: megalinter-bot
|
||||
commit_user_email: nicolas.vuillamy@ox.security
|
2
.github/workflows/pr_labeler.yml
vendored
|
@ -9,7 +9,7 @@ jobs:
|
|||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v4
|
||||
- uses: actions/labeler@v5
|
||||
with:
|
||||
sync-labels: true
|
||||
dot: true
|
||||
|
|
72
.github/workflows/release.yml
vendored
Normal file
|
@ -0,0 +1,72 @@
|
|||
name: Bump release version
|
||||
on:
|
||||
pull_request:
|
||||
branches: [rolling]
|
||||
types:
|
||||
- closed
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
repository-projects: write
|
||||
|
||||
concurrency: production
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: bump tag version and release
|
||||
if: github.event.pull_request.merged == true
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.sha }}
|
||||
fetch-depth: 0
|
||||
- name: Bump version and push tag
|
||||
id: version-bump
|
||||
uses: hennejg/github-tag-action@v4.4.0
|
||||
with:
|
||||
github_token: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
|
||||
release_branches: rolling
|
||||
- name: create branch
|
||||
uses: peterjgrainger/action-create-branch@v3.0.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
|
||||
with:
|
||||
branch: update-from-${{ github.sha }}
|
||||
- name: update cargo.toml
|
||||
run: |
|
||||
appversion=$(echo "${{ steps.version-bump.outputs.new_tag }}" | grep -oE '[0-9]+\.[0-9]+\.[0-9]+')
|
||||
sed -i -e "s/^version = .*/version = \"$appversion\"/" Cargo.toml
|
||||
- run: rustup toolchain install stable --profile minimal
|
||||
- run: rustup update stable && rustup default stable
|
||||
- name: regenerate cargo.lock
|
||||
run: cargo generate-lockfile
|
||||
- name: auto commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "[skip ci] updating app version to ${{ steps.version-bump.outputs.new_tag }}"
|
||||
branch: update-from-${{ github.sha }}
|
||||
# create PR using GitHub CLI
|
||||
- name: create PR with update info
|
||||
id: create-pr
|
||||
run: gh pr create --base rolling --head update-from-${{ github.sha }} --title 'Merge new update into rolling' --body 'Created by Github action'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
|
||||
# merge PR using GitHub CLI
|
||||
- name: merge PR with update info
|
||||
id: merge-pr
|
||||
run: gh pr merge --admin --merge --subject 'Merge update info' --delete-branch
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
|
||||
- name: Create Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
|
||||
generate_release_notes: true
|
||||
name: ${{ steps.version-bump.outputs.new_tag }}
|
||||
tag_name: ${{ steps.version-bump.outputs.new_tag }}
|
||||
prerelease: false
|
||||
env:
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
2
.github/workflows/rust_format.yml
vendored
|
@ -33,7 +33,7 @@ jobs:
|
|||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: --all-targets --all
|
||||
args: --all-targets --all-features --all
|
||||
- name: Run cargo check
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
|
|
2
.github/workflows/stale.yml
vendored
|
@ -19,7 +19,7 @@ jobs:
|
|||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@v8
|
||||
- uses: actions/stale@v9
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: 'Stale issue message'
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
---
|
||||
# Configuration file for MegaLinter
|
||||
# See all available variables at https://megalinter.io/configuration/ and in linters documentation
|
||||
|
||||
APPLY_FIXES: all # all, none, or list of linter keys
|
||||
# ENABLE: # If you use ENABLE variable, all other languages/formats/tooling-formats will be disabled by default
|
||||
ENABLE_LINTERS: # If you use ENABLE_LINTERS variable, all other linters will be disabled by default
|
||||
- RUST_CLIPPY
|
||||
- JAVASCRIPT_ES
|
||||
- CSS_STYLELINT
|
||||
- MARKDOWN_MARKDOWNLINT
|
||||
- YAML_YAMLLINT
|
||||
- HTML_DJLINT
|
||||
- ACTION_ACTIONLINT
|
||||
- DOCKERFILE_HADOLINT
|
||||
- SPELL_CSPELL
|
||||
# DISABLE:
|
||||
# - COPYPASTE # Uncomment to disable checks of excessive copy-pastes
|
||||
# - SPELL # Uncomment to disable checks of spelling mistakes
|
||||
SHOW_ELAPSED_TIME: true
|
||||
FILEIO_REPORTER: false
|
||||
# DISABLE_ERRORS: true # Uncomment if you want MegaLinter to detect errors but not block CI to pass
|
19
.mergify.yml
|
@ -1,13 +1,14 @@
|
|||
pull_request_rules:
|
||||
- name: Automatic merge on approval
|
||||
conditions:
|
||||
queue_rules:
|
||||
- name: default
|
||||
queue_conditions:
|
||||
- "#approved-reviews-by>=2"
|
||||
- check-success=build (stable)
|
||||
- check-success=CodeFactor
|
||||
- check-success=Rust project
|
||||
actions:
|
||||
queue:
|
||||
method: squash
|
||||
merge_conditions: []
|
||||
merge_method: squash
|
||||
|
||||
pull_request_rules:
|
||||
- name: automatic update of pull requests where more 5 commits behind
|
||||
conditions:
|
||||
- "#commits-behind>5"
|
||||
|
@ -17,4 +18,8 @@ pull_request_rules:
|
|||
conditions:
|
||||
- merged
|
||||
actions:
|
||||
delete_head_branch: {}
|
||||
delete_head_branch: {}
|
||||
- name: Automatic merge on approval
|
||||
conditions: []
|
||||
actions:
|
||||
queue:
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
{
|
||||
{
|
||||
"extends": "stylelint-config-standard",
|
||||
"rules": {
|
||||
"alpha-value-notation": "number",
|
||||
"selector-class-pattern": null
|
||||
"selector-class-pattern": null,
|
||||
"no-descending-specificity": null
|
||||
},
|
||||
"fix": true,
|
||||
"cache": true,
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["*.js"],
|
||||
|
|
|
@ -6,11 +6,11 @@ Found a typo, or something that isn't as clear as it could be? Maybe I've missed
|
|||
|
||||
## Readme
|
||||
|
||||
Found a typo, or the Readme is not very clear as it should be?. Consider Submitting a Pull request to the [Readme](https://github.com/neon-mmd/websurfx/blob/master/README.md) to add to or improve the Readme. This will help future users to better understand the project more clearly.
|
||||
Did you find a typo, or the Readme is not as clear as it should be? Consider Submitting a Pull request to the [Readme](https://github.com/neon-mmd/websurfx/blob/master/README.md) to add to or improve the Readme. This will help future users to better understand the project more clearly.
|
||||
|
||||
## Help Improve Github Actions
|
||||
## Help Improve GitHub Actions
|
||||
|
||||
Know how to fix or improve a github action?. Consider Submitting a Pull request to help make automation and testing better.
|
||||
Know how to fix or improve a GitHub action? Consider Submitting a Pull request to help make automation and testing better.
|
||||
|
||||
## Source Code
|
||||
|
||||
|
@ -51,4 +51,4 @@ We have a [Discord](https://discord.gg/SWnda7Mw5u) channel, feel free to join an
|
|||
The _rolling branch_ is where we intend all contributions should go.
|
||||
|
||||
|
||||
We appreciate any contributions whether be of any size or topic and suggestions to help improve the Websurfx project. Please keep in mind the above requirements and guidelines before submitting a pull request and also if you have any doubts/concerns/questions about the project, its source code or anything related to the project than feel free to ask by opening an [issue](https://github.com/neon-mmd/websurfx/issues) or by asking us on our [Discord](https://discord.gg/SWnda7Mw5u) channel.
|
||||
We appreciate any contributions whether of any size or topic and suggestions to help improve the Websurfx project. Please keep in mind the above requirements and guidelines before submitting a pull request and also if you have any doubts/concerns/questions about the project, its source code or anything related to the project then feel free to ask by opening an [issue](https://github.com/neon-mmd/websurfx/issues) or by asking us on our [Discord](https://discord.gg/SWnda7Mw5u) channel.
|
||||
|
|
3234
Cargo.lock
generated
185
Cargo.toml
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "websurfx"
|
||||
version = "1.2.5"
|
||||
version = "1.21.0"
|
||||
edition = "2021"
|
||||
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
|
||||
repository = "https://github.com/neon-mmd/websurfx"
|
||||
|
@ -13,42 +13,105 @@ bench = false
|
|||
path = "src/bin/websurfx.rs"
|
||||
|
||||
[dependencies]
|
||||
reqwest = {version="0.11.21",default-features = false,features = ["json", "rustls-tls"]}
|
||||
tokio = {version="1.32.0",features=["rt-multi-thread","macros"]}
|
||||
serde = {version="1.0.188",features=["derive"]}
|
||||
handlebars = { version = "4.4.0", features = ["dir_source"] }
|
||||
scraper = {version="0.17.1"}
|
||||
actix-web = {version="4.4.0", features = ["cookies"]}
|
||||
actix-files = {version="0.6.2"}
|
||||
actix-cors = {version="0.6.4"}
|
||||
serde_json = {version="1.0.105"}
|
||||
fake-useragent = {version="0.1.3"}
|
||||
env_logger = {version="0.10.0"}
|
||||
log = {version="0.4.20"}
|
||||
mlua = {version="0.9.1", features=["luajit", "vendored"]}
|
||||
redis = {version="0.23.3", features=["tokio-comp","connection-manager"], optional = true}
|
||||
md5 = {version="0.7.0"}
|
||||
rand={version="0.8.5"}
|
||||
error-stack = {version="0.4.0"}
|
||||
async-trait = {version="0.1.73"}
|
||||
regex = {version="1.9.4", features=["perf"]}
|
||||
smallvec = {version="1.11.0", features=["union", "serde"]}
|
||||
futures = {version="0.3.28"}
|
||||
dhat = {version="0.3.2", optional = true}
|
||||
mimalloc = { version = "0.1.38", default-features = false }
|
||||
async-once-cell = {version="0.5.3"}
|
||||
actix-governor = {version="0.4.1"}
|
||||
mini-moka = { version="0.10", optional = true}
|
||||
reqwest = { version = "0.12.5", default-features = false, features = [
|
||||
"rustls-tls",
|
||||
"brotli",
|
||||
"gzip",
|
||||
"http2",
|
||||
"socks",
|
||||
] }
|
||||
tokio = { version = "1.43.0", features = [
|
||||
"rt-multi-thread",
|
||||
"macros",
|
||||
"fs",
|
||||
"io-util",
|
||||
], default-features = false }
|
||||
serde = { version = "1.0.215", default-features = false, features = ["derive"] }
|
||||
serde_json = { version = "1.0.134", default-features = false }
|
||||
bincode = {version="1.3.3", default-features=false}
|
||||
maud = { version = "0.26.0", default-features = false, features = [
|
||||
"actix-web",
|
||||
] }
|
||||
scraper = { version = "0.21.0", default-features = false }
|
||||
actix-web = { version = "4.9.0", features = [
|
||||
"cookies",
|
||||
"macros",
|
||||
"compress-brotli",
|
||||
], default-features = false }
|
||||
actix-files = { version = "0.6.5", default-features = false }
|
||||
actix-cors = { version = "0.7.0", default-features = false }
|
||||
fake-useragent = { version = "0.1.3", default-features = false }
|
||||
env_logger = { version = "0.11.6", default-features = false }
|
||||
log = { version = "0.4.21", default-features = false }
|
||||
mlua = { version = "0.10.2", features = [
|
||||
"luajit",
|
||||
"vendored",
|
||||
], default-features = false }
|
||||
redis = { version = "0.28.1", features = [
|
||||
"tokio-comp",
|
||||
"connection-manager",
|
||||
"tcp_nodelay"
|
||||
], default-features = false, optional = true }
|
||||
blake3 = { version = "1.5.4", default-features = false }
|
||||
error-stack = { version = "0.4.0", default-features = false, features = [
|
||||
"std",
|
||||
] }
|
||||
async-trait = { version = "0.1.80", default-features = false }
|
||||
regex = { version = "1.11.1", features = ["perf"], default-features = false }
|
||||
futures = { version = "0.3.31", default-features = false, features = ["alloc"] }
|
||||
dhat = { version = "0.3.2", optional = true, default-features = false }
|
||||
mimalloc = { version = "0.1.43", default-features = false }
|
||||
async-once-cell = { version = "0.5.4", default-features = false }
|
||||
actix-governor = { version = "0.8.0", default-features = false }
|
||||
moka = { version = "0.12.8", optional = true, default-features = false, features = [
|
||||
"future",
|
||||
] }
|
||||
async-compression = { version = "0.4.12", default-features = false, features = [
|
||||
"brotli",
|
||||
"tokio",
|
||||
], optional = true }
|
||||
chacha20poly1305 = { version = "0.10.1", default-features = false, features = [
|
||||
"alloc",
|
||||
"getrandom",
|
||||
], optional = true }
|
||||
chacha20 = { version = "0.9.1", default-features = false, optional = true }
|
||||
base64 = { version = "0.21.5", default-features = false, features = [
|
||||
"std",
|
||||
], optional = true }
|
||||
cfg-if = { version = "1.0.0", default-features = false, optional = true }
|
||||
keyword_extraction = { version = "1.5.0", default-features = false, features = [
|
||||
"tf_idf",
|
||||
"rayon",
|
||||
] }
|
||||
stop-words = { version = "0.8.0", default-features = false, features = ["iso"] }
|
||||
thesaurus = { version = "0.5.2", default-features = false, optional = true, features = [
|
||||
"moby",
|
||||
]}
|
||||
|
||||
actix-multipart = { version = "0.7.2", default-features = false, features = [
|
||||
"derive",
|
||||
"tempfile",
|
||||
]}
|
||||
itertools = {version = "0.14.0", default-features = false}
|
||||
|
||||
[dev-dependencies]
|
||||
rusty-hook = "^0.11.2"
|
||||
criterion = "0.5.1"
|
||||
tempfile = "3.8.0"
|
||||
rusty-hook = { version = "^0.11.2", default-features = false }
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
tempfile = { version = "3.14.0", default-features = false }
|
||||
|
||||
[build-dependencies]
|
||||
lightningcss = { version = "1.0.0-alpha.57", default-features = false, features = [
|
||||
"grid",
|
||||
] }
|
||||
# Disabled until bug fixing update
|
||||
# minify-js = { version = "0.6.0", default-features = false }
|
||||
# Temporary fork with fix
|
||||
minify-js = { git = "https://github.com/RuairidhWilliamson/minify-js", branch = "master", version = "0.6.0", default-features = false}
|
||||
|
||||
[profile.dev]
|
||||
opt-level = 0
|
||||
debug = true
|
||||
split-debuginfo = '...'
|
||||
split-debuginfo = 'unpacked'
|
||||
debug-assertions = true
|
||||
overflow-checks = true
|
||||
lto = false
|
||||
|
@ -64,15 +127,65 @@ debug = false # This should only be commented when testing with dhat profiler
|
|||
split-debuginfo = '...'
|
||||
debug-assertions = false
|
||||
overflow-checks = false
|
||||
lto = true
|
||||
lto = 'thin'
|
||||
panic = 'abort'
|
||||
incremental = false
|
||||
codegen-units = 1
|
||||
rpath = false
|
||||
strip = "debuginfo"
|
||||
strip = "symbols"
|
||||
|
||||
[profile.bsr1]
|
||||
inherits = "release"
|
||||
opt-level = "s"
|
||||
|
||||
[profile.bsr2]
|
||||
inherits = "bsr1"
|
||||
opt-level = "z"
|
||||
|
||||
[profile.lpcb1]
|
||||
inherits = "release"
|
||||
codegen-units = 16
|
||||
|
||||
[profile.lpcb2]
|
||||
inherits = "lpcb1"
|
||||
lto = "off"
|
||||
|
||||
[profile.lpcb3]
|
||||
inherits = "lpcb2"
|
||||
opt-level = 2
|
||||
|
||||
[profile.bsr_and_lpcb1]
|
||||
inherits = "lpcb1"
|
||||
opt-level = "s"
|
||||
|
||||
[profile.bsr_and_lpcb2]
|
||||
inherits = "lpcb2"
|
||||
opt-level = "s"
|
||||
|
||||
[profile.bsr_and_lpcb3]
|
||||
inherits = "lpcb3"
|
||||
opt-level = "s"
|
||||
|
||||
[profile.bsr_and_lpcb4]
|
||||
inherits = "lpcb1"
|
||||
opt-level = "z"
|
||||
|
||||
[profile.bsr_and_lpcb5]
|
||||
inherits = "lpcb1"
|
||||
opt-level = "z"
|
||||
|
||||
[profile.bsr_and_lpcb6]
|
||||
inherits = "lpcb1"
|
||||
opt-level = "z"
|
||||
|
||||
[features]
|
||||
use-synonyms-search = ["thesaurus/static"]
|
||||
default = ["memory-cache"]
|
||||
dhat-heap = ["dep:dhat"]
|
||||
memory-cache = ["dep:mini-moka"]
|
||||
redis-cache = ["dep:redis"]
|
||||
dhat-heap = ["dep:dhat"]
|
||||
memory-cache = ["dep:moka"]
|
||||
redis-cache = ["dep:redis", "dep:base64"]
|
||||
compress-cache-results = ["dep:async-compression", "dep:cfg-if"]
|
||||
encrypt-cache-results = ["dep:chacha20poly1305", "dep:chacha20"]
|
||||
cec-cache-results = ["compress-cache-results", "encrypt-cache-results"]
|
||||
experimental-io-uring = ["actix-web/experimental-io-uring"]
|
||||
use-non-static-synonyms-search = ["thesaurus"]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM --platform=$BUILDPLATFORM rust:1.73.0-alpine3.18 AS chef
|
||||
FROM --platform=$BUILDPLATFORM rust:1.78.0-alpine3.18 AS chef
|
||||
# We only pay the installation cost once,
|
||||
# it will be cached from the second build onwards
|
||||
RUN apk add --no-cache alpine-sdk musl-dev g++ make libcrypto3 libressl-dev upx perl build-base
|
||||
|
|
15
README.md
|
@ -32,7 +32,7 @@
|
|||
<a href=""
|
||||
><img
|
||||
alt="Maintenance"
|
||||
src="https://img.shields.io/maintenance/yes/2023?style=flat-square"
|
||||
src="https://img.shields.io/maintenance/yes/2024?style=flat-square"
|
||||
/>
|
||||
</a>
|
||||
<a href="https://www.codefactor.io/repository/github/neon-mmd/websurfx">
|
||||
|
@ -115,6 +115,9 @@
|
|||
- 🚀 Easy to setup with Docker or on bare metal with various installation and deployment options.
|
||||
- ⛔ Search filtering to filter search results based on four different levels.
|
||||
- 💾 Different caching levels focusing on reliability, speed and resiliancy.
|
||||
- ⬆️ Organic Search results (with ranking algorithm builtin to rerank the search results according to user's search query.).
|
||||
- 🔒 Different compression and encryption levels focusing on speed and privacy.
|
||||
- 🧪 Experimental IO-uring feature for Linux operating systems focused on performance of the engine.
|
||||
- 🔐 Fast, private, and secure
|
||||
- 🆓 100% free and open source
|
||||
- 💨 Ad-free and clean results
|
||||
|
@ -141,7 +144,7 @@ redis-server --port 8082 &
|
|||
|
||||
Once you have started the server, open your preferred web browser and navigate to <http://127.0.0.1:8080> to start using Websurfx.
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
>
|
||||
> 1. The project is no longer in the testing phase and is now ready for production use.
|
||||
> 2. There are many features still missing, like `support for image search`, `different categories`, `quick apps`, etc., but they will be added soon as part of future releases.
|
||||
|
@ -166,7 +169,7 @@ Websurfx comes loaded with several themes and color schemes, which you can apply
|
|||
|
||||
# Multi-Language Support 🌍
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> Currently, we do not support other languages, but we will start accepting contributions regarding language support in the future. We believe language should never be a barrier to entry.
|
||||
|
||||
**[⬆️ Back to Top](#--)**
|
||||
|
@ -181,7 +184,7 @@ At present, we only support x86_64 architecture systems, but we would love to ha
|
|||
|
||||
## Why Websurfx?
|
||||
|
||||
The primary purpose of the Websurfx project is to create a fast, secure, and privacy-focused meta-search engine. There are numerous meta-search engines available, but not all guarantee the security of their search engines, which is critical for maintaining privacy. Memory flaws, for example, can expose private or sensitive information, which is understandably bad. There is also the added problem of spam, ads, and inorganic results, which most engines don't have a foolproof answer to. Until now. With Websurfx, I finally put a full stop to this problem. Websurfx is based on Rust, which ensures memory safety and removes such issues. Many meta-search engines also lack important features like advanced picture search, required by graphic designers, content providers, and others. Websurfx improves the user experience by providing these and other features, such as proper NSFW blocking and micro-apps or quick results (providing a calculator, currency exchanges, etc. in the search results).
|
||||
The primary purpose of the Websurfx project is to create a fast, secure, and privacy-focused meta-search engine. There are numerous meta-search engines available, but not all guarantee the security of their search engines, which is critical for maintaining privacy. Memory flaws, for example, can expose private or sensitive information, which is understandably bad. There is also the added problem of spam, ads, and inorganic results, which most engines don't have a full-proof answer to. Until now. With Websurfx, I finally put a full stop to this problem. Websurfx is based on Rust, which ensures memory safety and removes such issues. Many meta-search engines also lack important features like advanced picture search, required by graphic designers, content providers, and others. Websurfx improves the user experience by providing these and other features, such as proper NSFW blocking and micro-apps or quick results (providing a calculator, currency exchanges, etc. in the search results).
|
||||
|
||||
## Why AGPLv3?
|
||||
|
||||
|
@ -218,7 +221,7 @@ Several areas that we need a bit of help with at the moment are:
|
|||
|
||||
# Documentation 📘
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> We welcome any contributions to the [documentation](docs) as this will benefit everyone who uses this project.
|
||||
|
||||
**[⬆️ Back to Top](#--)**
|
||||
|
@ -267,7 +270,7 @@ We would like to thank the following people for their contributions and support:
|
|||
|
||||
<p>
|
||||
<a href="https://github.com/neon-mmd/websurfx/stargazers">
|
||||
<img src="https://reporoster.com/stars/dark/neon-mmd/websurfx" />
|
||||
<img src="http://reporoster.com/stars/dark/neon-mmd/websurfx"/>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
|
|
85
build.rs
Normal file
|
@ -0,0 +1,85 @@
|
|||
//! A build module of the application which minifies the project's css and js files on build which
|
||||
//! helps reduce the initial page by loading the files faster.
|
||||
|
||||
#![forbid(unsafe_code, clippy::panic)]
|
||||
#![deny(missing_docs, clippy::missing_docs_in_private_items, clippy::perf)]
|
||||
#![warn(clippy::cognitive_complexity, rust_2018_idioms)]
|
||||
|
||||
// ------- Imports -------
|
||||
use lightningcss::stylesheet::{MinifyOptions, ParserOptions, PrinterOptions, StyleSheet};
|
||||
use minify_js::{minify, Session, TopLevelMode};
|
||||
use std::{
|
||||
fs::{read_dir, read_to_string, File, OpenOptions},
|
||||
io::{Read, Write},
|
||||
};
|
||||
|
||||
// ------- Constants -------
|
||||
/// A constant for the path to the public/theme folder in the codebase.
|
||||
const COMMON_STATIC_SOURCE_CODE_FOLDER: &str = "./public/static/";
|
||||
/// A constant for the names of the folders located in the "/public/static/"
|
||||
/// folder in the codebase which contains the css files to be minified.
|
||||
const STYLE_FOLDERS: [&str; 2] = ["themes", "colorschemes"];
|
||||
/// A constant for the environment variable name.
|
||||
const PACKAGE_ENVIRONMENT_VARIABLE: &str = "PKG_ENV";
|
||||
/// A constant for the `prod` value of the `pkg_env` environment variable.
|
||||
const PRODUCTION_PKG_ENV_VARIABLE_VALUE: &str = "prod";
|
||||
|
||||
/// A main function which minifies both css and js files using `lightningcss` and `minify_js` when
|
||||
/// the `PKG_ENV` environment and it is set to the value of `prod`.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// This function returns the unit type when the minification process runs successfully otherwise
|
||||
/// it returns a standard error.
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
if let Ok(pkg_env_var) = std::env::var(PACKAGE_ENVIRONMENT_VARIABLE) {
|
||||
if pkg_env_var.to_lowercase() == PRODUCTION_PKG_ENV_VARIABLE_VALUE {
|
||||
// A for loop that loops over each file name containing in the `colorschemes` and `themes` folders
|
||||
// and minifies it using the `lightningcss` minifier.
|
||||
for folder_name in STYLE_FOLDERS {
|
||||
for file in read_dir(format!("{COMMON_STATIC_SOURCE_CODE_FOLDER}{folder_name}/"))? {
|
||||
let file_path = file?.path();
|
||||
let source = read_to_string(file_path.clone())?;
|
||||
|
||||
let mut stylesheet = StyleSheet::parse(&source, ParserOptions::default())
|
||||
.map_err(|err| format!("{err}\n{:?}", file_path.file_name().unwrap()))?;
|
||||
|
||||
stylesheet.minify(MinifyOptions::default())?;
|
||||
let minified_css = stylesheet.to_css(PrinterOptions::default())?;
|
||||
|
||||
let mut old_css_file = OpenOptions::new()
|
||||
.write(true)
|
||||
.truncate(true)
|
||||
.open(file_path)?;
|
||||
old_css_file.write_all(minified_css.code.as_bytes())?;
|
||||
old_css_file.flush()?;
|
||||
}
|
||||
}
|
||||
|
||||
// A for loop that loops over each file name containing in the `public/static` folder and minifies
|
||||
// it using the `minify-js` minifier.
|
||||
for file in read_dir(COMMON_STATIC_SOURCE_CODE_FOLDER)? {
|
||||
let file_path = file?.path();
|
||||
if file_path.is_file() {
|
||||
let mut code = Vec::new();
|
||||
let mut js_file = File::open(file_path.clone())?;
|
||||
js_file.read_to_end(&mut code)?;
|
||||
|
||||
drop(js_file);
|
||||
|
||||
let mut out = Vec::new();
|
||||
minify(&Session::new(), TopLevelMode::Global, &code, &mut out)
|
||||
.map_err(|err| format!("{err}\n{:?}", file_path.file_name().unwrap()))?;
|
||||
|
||||
let mut old_js_file = OpenOptions::new()
|
||||
.write(true)
|
||||
.truncate(true)
|
||||
.open(file_path)?;
|
||||
old_js_file.write_all(&out)?;
|
||||
old_js_file.flush()?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
# Create Builder image
|
||||
FROM --platform=$BUILDPLATFORM rust:1.73.0-alpine3.18
|
||||
FROM --platform=$BUILDPLATFORM rust:1.78.0-alpine3.18
|
||||
|
||||
# Install required dependencies
|
||||
RUN apk add --no-cache alpine-sdk musl-dev g++ make libcrypto3 libressl-dev perl build-base
|
||||
|
|
|
@ -2,14 +2,14 @@
|
|||
|
||||
## Installed From Source
|
||||
|
||||
If you have built `websurfx` from source then the configuration file will be located under project directory (codebase) at `websurfx/`
|
||||
If you have built `websurfx` from the source then the configuration file will be located under the project directory (codebase) at `websurfx/`
|
||||
|
||||
> **Note**
|
||||
> If you have built websurfx with unstable/rolling/edge branch then you can copy the configuration file from `websurfx/config.lua` located under project directory (codebase) to `~/.config/websurfx/` and make the changes there and rerun the websurfx server. _This is only available from unstable/rolling/edge version_.
|
||||
> [!Note]
|
||||
> If you have built websurfx with an unstable/rolling/edge branch then you can copy the configuration file from `websurfx/config.lua` located under the project directory (codebase) to `~/.config/websurfx/` and make the changes there and rerun the websurfx server. _This is only available from unstable/rolling/edge version_.
|
||||
|
||||
## Installed From Package
|
||||
|
||||
If you have installed `websurfx` using the package manager of your Linux distro then the default configuration file will be located at `/etc/xdg/websurfx/`. You can copy the default config to `~/.config/websurfx/` and make the changes there and rerun the websurfx server.
|
||||
If you have installed `websurfx` using the package manager of your Linux distro then the default configuration file will be located at `/etc/xdg/websurfx/`. You can copy the default config to `~/.config/websurfx/` make the changes there and rerun the websurfx server.
|
||||
|
||||
Some of the configuration options provided in the file are stated below. These are subdivided into the following categories:
|
||||
|
||||
|
@ -42,13 +42,13 @@ Some of the configuration options provided in the file are stated below. These a
|
|||
>
|
||||
> - Level 0 - With this level no search filtering occurs.
|
||||
> - Level 1 - With this level some search filtering occurs.
|
||||
> - Level 2 - With this level the upstream search engines are restricted to send sensitive contents like NSFW search results, etc.
|
||||
> - Level 3 - With this level the regex based filter lists is used alongside level 2 to filter more search results that have slipped in or custom results that needs to be filtered using the filter lists.
|
||||
> - Level 4 - This level is similar to level 3 except in this level the regex based filter lists are used to disallow users to search sensitive or disallowed content. This level could be useful if you are parent or someone who wants to completely disallow their kids or yourself from watching sensitive content.
|
||||
> - Level 2 - With this level the upstream search engines are restricted to sending sensitive content like NSFW search results, etc.
|
||||
> - Level 3 - With this level the regex-based filter lists are used alongside level 2 to filter more search results that have slipped in or custom results that need to be filtered using the filter lists.
|
||||
> - Level 4 - This level is similar to level 3 except in this level the regex-based filter lists are used to disallow users to search sensitive or disallowed content. This level could be useful if you are a parent or someone who wants to completely disallow their kids or yourself from watching sensitive content.
|
||||
|
||||
## Website
|
||||
|
||||
- **colorscheme:** The colorscheme name which should be used for the website theme (the name should be in accordance to the colorscheme file name present in `public/static/colorschemes` folder).
|
||||
- **colorscheme:** The colorscheme name which should be used for the website theme (the name should be by the colorscheme file name present in the `public/static/colorschemes` folder).
|
||||
|
||||
> By Default we provide 12 colorschemes to choose from these are:
|
||||
>
|
||||
|
@ -65,19 +65,27 @@ Some of the configuration options provided in the file are stated below. These a
|
|||
> 11. tokyo-night
|
||||
> 12. tomorrow-night
|
||||
|
||||
- **theme:** The theme name which should be used for the website (again, the name should be in accordance to the theme file name present in `public/static/themes` folder).
|
||||
- **theme:** The theme name that should be used for the website (again, the name should be by the theme file name present in the `public/static/themes` folder).
|
||||
|
||||
> By Default we provide 1 theme to choose from these are:
|
||||
>
|
||||
> 1. simple
|
||||
|
||||
- **animation:** The animation name that should be used for the website (again, the name should be by the animation file name present in the `public/static/animations` folder).
|
||||
|
||||
> By Default we provide 1 animation to choose from these are:
|
||||
>
|
||||
> 1. simple-frosted-glow
|
||||
|
||||
## Cache
|
||||
|
||||
- **redis_url:** Redis connection url address on which the client should connect on.
|
||||
- **redis_url:** Redis connection URL address on which the client should connect.
|
||||
|
||||
> **Note**
|
||||
> This option can be commented out if you have compiled the app without the `redis-cache` feature. For more information, See [**building**](./building.md).
|
||||
|
||||
- **cache_expiry_time:** The maximum time the server will store the cache for, after which it flushs/removes/expires/invalidates the cached results. (value provided to this option should be in seconds and the value should be greater than or equal to 60 seconds).
|
||||
|
||||
## Search Engines
|
||||
|
||||
- **upstream_search_engines:** Select from the different upstream search engines from which the results should be fetched.
|
||||
|
|
|
@ -12,7 +12,7 @@ This page of the docs outlines how to get **Websurfx** up and running in a devel
|
|||
- [NixOS Dev Shell using Nix Flake](#nixos-dev-shell-using-nix-flake-)
|
||||
- [Local Development with Docker Compose](#local-development-with-docker-compose-)
|
||||
- [Project Commands](#project-commands)
|
||||
+ - [Environment Variables](#environment-variables)
|
||||
- [Environment Variables](#environment-variables)
|
||||
- [Git Strategy](#git-strategy)
|
||||
- [Flow](#git-flow)
|
||||
- [Branches](#git-branch-naming)
|
||||
|
@ -50,13 +50,14 @@ Before you start working on the project. You will need the following packages in
|
|||
- The latest version of `cargo` installed on your system which is required to manage building and running the project. The installation instructions for this can be found [here](https://doc.rust-lang.org/cargo/getting-started/installation.html).
|
||||
- The latest version of `npm` installed on your system which is required to allow the installation of other tools necessary for the project. The installation for this can be found [here](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
|
||||
- The latest version of `redis` installed on your system which will be used to avoid introducing unexpected issues when working on the project. The installation for this can be found [here](https://redis.io/docs/getting-started/installation/).
|
||||
- The latest version of `stylelint` should be installed on your system which will be used by the pre-commit checks to lint the code before a commit can be made to ensure better code quality. Before you install `stylelint` on your system, make sure you have `npm` installed on your system. To install `stylelint` run the following command:
|
||||
- The latest version of `stylelint` should be installed on your system which will be used by the pre-commit checks to lint the code before a commit can be made to ensure better code quality. Before you install `stylelint` on your system, make sure you have `npm` installed on your system. To install `stylelint` and plugins run the following command:
|
||||
|
||||
```shell
|
||||
$ npm i -g stylelint
|
||||
$ npm i -g stylelint stylelint-config-standard postcss-lit
|
||||
```
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
|
||||
|
||||
- `Cargo-watch` installed on your system which will allow you to auto-build the project when any checks occur in the source code files in the codebase (`websurfx` directory). Before you install `cargo-watch` on your system, make sure you have `cargo` installed on your system. To install `cargo-watch` run the following command:
|
||||
|
@ -68,7 +69,7 @@ cargo install cargo-watch
|
|||
- `Git` installed on your system. The installation instructions for this can be found [here](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git).
|
||||
- Finally, The latest version of `Docker` is installed on your system which will be used to avoid introducing unexpected issues when working on the project. The installation instructions for this can be found [here](https://docs.docker.com/engine/install/).
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> For **rolling release Linux distributions (distros)**, the above-mentioned required packages except for `stylelint` and `cargo-watch` can also be installed via the distro-specific package manager.
|
||||
>
|
||||
> **For Example:**
|
||||
|
@ -148,7 +149,7 @@ Once you have finished running the above command, Websurfx should now be served
|
|||
|
||||
This section covers how to use and set up the Gitpod development environment for working on the project.
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> By default the project only supports the Vscode **IDE/Editor** for Gitpod.
|
||||
|
||||
#### Launching Gitpod
|
||||
|
@ -211,12 +212,6 @@ This section covers how to setup the project for development using the `NixOS de
|
|||
Before you start working on the project. You will need the following packages installed on your system:
|
||||
|
||||
- `Git` installed on your system. The installation instructions for this can be found [here](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git).
|
||||
- Finally, The latest version of `Docker` is installed on your system which will be used to avoid introducing unexpected issues when working on the project. The installation instructions for this can be found [here](https://docs.docker.com/engine/install/).
|
||||
|
||||
> Optionally, On `NixOS` the above-mentioned required packages except for `stylelint` and `cargo-watch` could also be installed by following the link to the installation instructions provided below:
|
||||
>
|
||||
> - `Git`: https://search.nixos.org/packages?channel=23.05&show=git&from=0&size=50&sort=relevance&type=packages&query=git
|
||||
> - `Docker`: https://search.nixos.org/packages?channel=23.05&show=docker&from=0&size=50&sort=relevance&type=packages&query=docker
|
||||
|
||||
#### Setting up Pre-commit Checks
|
||||
|
||||
|
@ -233,7 +228,11 @@ Then run the following command to setup the `NixOS dev-shell`:
|
|||
nix develop
|
||||
```
|
||||
|
||||
Once you have finished running the above commands then run the following command to setup the `pre-commit` checks:
|
||||
> You can use `nix-direnv` to simplify entering into the `nix-shell`. Its setup is beyond the scope of this guide. Read more about it here: [nix-direnv](https://github.com/nix-community/nix-direnv)
|
||||
|
||||
This will add `docker`, `cargo-watch`, and other dev environment essentials to your `nix-shell` so you don't have to install everything imperatively.
|
||||
|
||||
After finishing the commands above, run the following command to setup the `pre-commit` checks:
|
||||
|
||||
```shell
|
||||
cargo test
|
||||
|
@ -243,14 +242,16 @@ By running the above-mentioned command, it will automatically set up all the pre
|
|||
|
||||
#### Post Setup Requirements
|
||||
|
||||
After you have done setting up pre-commit checks, then you may need to fulfill a few more requirements to finish setting up the development environment with `NixOS dev-shell`. These include:
|
||||
|
||||
- `Cargo-watch` installed on your system which will allow you to auto-build the project when any checks occur in the source code files in the codebase (`websurfx` directory). Before you install `cargo-watch` on your system, make sure you have `cargo` installed on your system. To install `cargo-watch` run the following command:
|
||||
The final step is to run
|
||||
|
||||
```shell
|
||||
cargo install cargo-watch
|
||||
npm i -D stylelint-config-standard postcss-lit`
|
||||
```
|
||||
|
||||
This will add `node_modules` in the current directory.
|
||||
|
||||
Run `git commit` and if every thing is setup correctly, it should say that your branch is up to date.
|
||||
|
||||
#### Running the Project
|
||||
|
||||
If you have followed the above section then you should now be inside a `dev-shell` environment. In the same environment run the following command to run the project:
|
||||
|
@ -309,13 +310,13 @@ Before you start working on the project. You will need the following packages in
|
|||
$ npm i -g stylelint
|
||||
```
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
|
||||
|
||||
- `Git` installed on your system. The installation instructions for this can be found [here](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git).
|
||||
- Finally, The latest version of `Docker` is installed on your system which will be used to avoid introducing unexpected issues when working on the project. The installation instructions for this can be found [here](https://docs.docker.com/engine/install/).
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> For **rolling release Linux distributions (distros)**, the above-mentioned all required packages can also be installed via the distro-specific package manager.
|
||||
>
|
||||
> **For Example:**
|
||||
|
@ -354,7 +355,7 @@ If you have followed the above section then you should have a cloned repository
|
|||
$ docker compose -f dev.docker-compose.yml up
|
||||
```
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
|
||||
|
||||
Once you have finished running the above command, Websurfx should now be served on the address http://127.0.0.1:8080. Hot reload is enabled, so making changes to any of the files will trigger the project to be rebuilt.
|
||||
|
@ -365,13 +366,12 @@ Once you have finished running the above command, Websurfx should now be served
|
|||
|
||||
- `cargo build`: Builds the project.
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> When you build the project first time with the above command it will require the app to compile every dependency in the project which will then be cached on your system. So when you compile the app next time it will only compile for the new changes.
|
||||
|
||||
+ `cargo run`: Starts the app and serves the project on http://127.0.0.1:8080.
|
||||
- `cargo run`: Starts the app and serves the project on http://127.0.0.1:8080.
|
||||
|
||||
|
||||
> **Important**
|
||||
> [!Important]
|
||||
> You must run the build command first.
|
||||
|
||||
#### Development
|
||||
|
@ -457,10 +457,10 @@ When you submit your pull request, include the required info, by filling out the
|
|||
- If any dependencies were added, explain why it was needed, and state the cost. associated, and confirm it does not introduce any security, privacy, or speed issues
|
||||
- Optionally, provide a checklist of all the changes that were included in the pull request.
|
||||
|
||||
> **Important**
|
||||
> [!Important]
|
||||
> Make sure to fill all the required/mandatory sections of the pull request as filling them helps us distinguish between spam pull requests and legitimate pull requests.
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> The pull request template contains comments in the following form `<!-- -->` which are used to provide a guide on what should be provided under each heading of the template. These comments are never rendered when the pull request is either created or updated and hence anything provided in such comments is never displayed.
|
||||
|
||||
## Resources for Beginners
|
||||
|
@ -497,7 +497,7 @@ For Rust, CSS, JS, HTML, Git, and Docker- you'll need an IDE (e.g. [VSCode](http
|
|||
|
||||
Linting is done using [Cargo Clippy](https://doc.rust-lang.org/clippy/) and [StyleLint](https://stylelint.io/) or [ESLint](https://eslint.org/). Also, linting is run as a git pre-commit hook.
|
||||
|
||||
> **Important**
|
||||
> [!Important]
|
||||
> All lint checks must pass before any PR can be merged.
|
||||
|
||||
Styleguides to follow:
|
||||
|
@ -508,7 +508,7 @@ Styleguides to follow:
|
|||
|
||||
## Application Structure
|
||||
|
||||
> **Important**
|
||||
> [!Important]
|
||||
> We follow the Unix style naming conventions for all the files and folders in the project (except for all files under the `themes` and `colorschemes` folder in the frontend's source code which requires that the names of the files and folders should be in lowercase and the words be separated with a hyphen.) which includes the name of the files and folders should be in lowercase and every word should be separated with an underscore.
|
||||
|
||||
**Files in the root of the codebase:** `./`
|
||||
|
@ -540,30 +540,42 @@ Styleguides to follow:
|
|||
./public/
|
||||
├── robots.txt # Robots file for the Website.
|
||||
├── images # Images for the Website.
|
||||
├── static # The directory containing all the UI handlers.
|
||||
│ ├── cookies.js # Handles the loading of saved cookies.
|
||||
│ ├── error_box.js # Handles the toggling functionality of the error box on the search page.
|
||||
│ ├── index.js # Functions to handle the search functionality of the search bar.
|
||||
│ ├── pagination.js # Functions to handle the navigation between the previous and next page in the search page.
|
||||
│ ├── search_area_options.js # Changes the search options under the search bar in the search page according to the safe search level set using the URL safesearch parameter.
|
||||
│ ├── settings.js # Handles the settings and saving of all the settings page options as a cookie.
|
||||
│ ├── colorschemes # A folder containing all the popular colorscheme files as CSS files.
|
||||
│ └── themes # A folder containing all the popular theme files as CSS files.
|
||||
└── templates # Folder containing all the template files for the different pages on the website.
|
||||
├── 404.html # A 404-page template.
|
||||
├── about.html # An about page template.
|
||||
├── bar.html # A template for the search bar.
|
||||
├── cookies_tab.html # A template for the cookies tab for the settings page.
|
||||
├── engines_tab.html # A template for the engines tab for the settings page.
|
||||
├── footer.html # A footer template for all pages.
|
||||
├── general_tab.html # A template for the general tab for the settings page.
|
||||
├── header.html # A header template for all pages.
|
||||
├── index.html # A home page template.
|
||||
├── navbar.html # A navbar template for the header template.
|
||||
├── search.html # A search page template.
|
||||
├── search_bar.html # A search bar template specifically for the search page.
|
||||
├── settings.html # A settings page template.
|
||||
└── user_interface_tab.html # A template for the user interface tab for the settings page.
|
||||
└── static # The directory containing all the UI handlers.
|
||||
├── cookies.js # Handles the loading of saved cookies.
|
||||
├── error_box.js # Handles the toggling functionality of the error box on the search page.
|
||||
├── index.js # Functions to handle the search functionality of the search bar.
|
||||
├── pagination.js # Functions to handle the navigation between the previous and next page in the search page.
|
||||
├── search_area_options.js # Changes the search options under the search bar in the search page according to the safe search level set using the URL safesearch parameter.
|
||||
├── settings.js # Handles the settings and saving of all the settings page options as a cookie.
|
||||
├── colorschemes # A folder containing all the popular colorscheme files as CSS files.
|
||||
└── themes # A folder containing all the popular theme files as CSS files.
|
||||
```
|
||||
|
||||
**Fronted Maud HTML Framework Source:** `./src/templates/`
|
||||
|
||||
```
|
||||
./src/templates/
|
||||
├── mod.rs # A module file for the rust project.
|
||||
├── partials # A folder containing the code for partials for the views.
|
||||
│ ├── bar.rs # Provides partial code for the search bar.
|
||||
│ ├── footer.rs # Provides partial code for the footer section.
|
||||
│ ├── header.rs # Provides partial code for the header section.
|
||||
│ ├── mod.rs # A module file for the rust project.
|
||||
│ ├── navbar.rs # Provides partial code for the navbar inside the header section.
|
||||
│ ├── search_bar.rs # Provides partial code for the search bar present in the search page.
|
||||
│ └── settings_tabs # A folder containing all the partials for the settings page tabs.
|
||||
│ ├── cookies.rs # Provides partial code for the cookies tab.
|
||||
│ ├── engines.rs # Provides partial code for the engines tab.
|
||||
│ ├── general.rs # Provides partial code for the general tab.
|
||||
│ ├── mod.rs # A module file for the rust project.
|
||||
│ └── user_interface.rs # Provides partial code for the user interface tab.
|
||||
└── views # A folder containing the code for the views.
|
||||
├── about.rs # Provides code for the about page view.
|
||||
├── index.rs # Provides code for the homepage view.
|
||||
├── mod.rs # A module file for the rust project.
|
||||
├── not_found.rs # Provides code for the 404 page view.
|
||||
├── search.rs # Provides code for the search page view.
|
||||
└── settings.rs # Provides code for the settings page view.
|
||||
```
|
||||
|
||||
**Backend Source:** `./src/`
|
||||
|
@ -600,12 +612,13 @@ Styleguides to follow:
|
|||
│ ├── aggregator.rs # Provides code aggregate and fetches results from the upstream engines.
|
||||
│ ├── mod.rs # A module file for the rust project.
|
||||
│ └── user_agent.rs # Provides a helper function to allow random user agents to pass in the server request code to improve user privacy and avoiding detected as a bot.
|
||||
└── server # A folder that holds code to handle the routes for the search engine website.
|
||||
├── mod.rs # A module file for the rust project.
|
||||
├── router.rs # Provides functions to handle the different routes on the website.
|
||||
└── routes # A folder that contains code to handle the bigger route for the website.
|
||||
├── mod.rs # A module file for the rust project.
|
||||
└── search.rs # Provides the function to handle the search route.
|
||||
├── server # A folder that holds code to handle the routes for the search engine website.
|
||||
│ ├── mod.rs # A module file for the rust project.
|
||||
│ ├── router.rs # Provides functions to handle the different routes on the website.
|
||||
│ └── routes # A folder that contains code to handle the bigger route for the website.
|
||||
│ ├── mod.rs # A module file for the rust project.
|
||||
│ └── search.rs # Provides the function to handle the search route.
|
||||
└── templates # A module that provides and handles Maud HTML framework source code for the search engine website (subfolders and files are explained in the above frontend section.)
|
||||
```
|
||||
|
||||
## Development Tools
|
||||
|
|
|
@ -44,7 +44,7 @@ nix build .#websurfx
|
|||
nix run .#websurfx
|
||||
```
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> In the above command the dollar sign(**$**) refers to running the command in Privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
|
||||
|
||||
Once you have run the above set of commands, open your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx.
|
||||
|
@ -89,7 +89,7 @@ nix build .#websurfx
|
|||
nix run .#websurfx
|
||||
```
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
|
||||
|
||||
## Other Distros
|
||||
|
@ -128,6 +128,16 @@ cd websurfx
|
|||
|
||||
Once you have changed the directory to the `websurfx` directory then follow the build options listed below:
|
||||
|
||||
> [!Note]
|
||||
> Before you start building the search engine using one of the below listed command. We would strongly recommend setting the `PKG_ENV` enviroment variable as this applies some special optimization to code to reduce the file and improve the page load speed of the website.
|
||||
> To set the `PKG_ENV` enviroment variable in the `bash` shell run the following command:
|
||||
>
|
||||
> ```bash
|
||||
> export PKG_ENV="prod"
|
||||
> ```
|
||||
>
|
||||
> For how to set the environment variables in other shells. You can follow the instructions on how to do so by visiting the documentation of the specific shell you are using.
|
||||
|
||||
### Hybrid Cache
|
||||
|
||||
> For more information on the features and their pros and cons. see: [**Features**](./features.md)
|
||||
|
@ -285,22 +295,22 @@ Then run the following command to deploy the search engine:
|
|||
$ docker compose up -d
|
||||
```
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec` or any other privileged access methods.
|
||||
|
||||
Then launch the browser of your choice and navigate to http://<ip_address_of_the_device>:<whatever_port_you_provided_in_the_config>.
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> The official prebuild images only support `stable` versions of the app and will not support `rolling/edge/unstable` versions. But with support and contribution, it could be made available for these versions as well 🙂.
|
||||
|
||||
## Manual Deployment
|
||||
|
||||
This section covers how to deploy the app with docker manually by manually building the image and deploying it.
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> This section is provided for those who want to further customize the docker image or for those who are extra cautious about security.
|
||||
|
||||
> **Warning**
|
||||
> [!Warning]
|
||||
> A note of caution the project currently only supports **x86-64** architecture and as such we do not recommend deploying the project on devices with other architectures. Though if you still want to do it then **do it at your own risk**.
|
||||
|
||||
### Unstable/Edge/Rolling
|
||||
|
@ -317,19 +327,19 @@ After that edit the config.lua file located under `websurfx` directory. In the c
|
|||
```lua
|
||||
-- ### General ###
|
||||
logging = true -- an option to enable or disable logs.
|
||||
debug = false -- an option to enable or disable debug mode.
|
||||
threads = 8 -- the amount of threads that the app will use to run (the value should be greater than 0).
|
||||
debug = false -- an option to enable or disable debug mode.
|
||||
threads = 10 -- the amount of threads that the app will use to run (the value should be greater than 0).
|
||||
|
||||
-- ### Server ###
|
||||
port = "8080" -- port on which server should be launched
|
||||
binding_ip = "0.0.0.0" --ip address on the server should be launched.
|
||||
production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users (more than one))
|
||||
port = "8080" -- port on which server should be launched
|
||||
binding_ip = "127.0.0.1" --ip address on the which server should be launched.
|
||||
production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users (more than one))
|
||||
-- if production_use is set to true
|
||||
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
|
||||
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
|
||||
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
|
||||
rate_limiter = {
|
||||
number_of_requests = 20, -- The number of requests that are allowed within a provided time limit.
|
||||
time_limit = 3, -- The time limit in which the number of requests that should be accepted.
|
||||
number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
|
||||
time_limit = 3, -- The time limit in which the quantity of requests that should be accepted.
|
||||
}
|
||||
|
||||
-- ### Search ###
|
||||
|
@ -360,15 +370,18 @@ safe_search = 2
|
|||
-- tomorrow-night
|
||||
-- }}
|
||||
colorscheme = "catppuccin-mocha" -- the colorscheme name which should be used for the website theme
|
||||
theme = "simple" -- the theme name which should be used for the website
|
||||
theme = "simple" -- the theme name which should be used for the website
|
||||
|
||||
-- ### Caching ###
|
||||
redis_url = "redis://redis:6379" -- redis connection url address on which the client should connect on.
|
||||
|
||||
redis_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
|
||||
cache_expiry_time = 600 -- This option takes the expiry time of the search results (value in seconds and the value should be greater than or equal to 60 seconds).
|
||||
-- ### Search Engines ###
|
||||
upstream_search_engines = {
|
||||
DuckDuckGo = true,
|
||||
Searx = false,
|
||||
DuckDuckGo = true,
|
||||
Searx = false,
|
||||
Brave = false,
|
||||
Startpage = false,
|
||||
LibreX = false,
|
||||
} -- select the upstream search engines from which the results should be fetched.
|
||||
```
|
||||
|
||||
|
@ -378,7 +391,7 @@ After this make sure to edit the `docker-compose.yml` and `Dockerfile` files as
|
|||
$ docker compose up -d --build
|
||||
```
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
|
||||
|
||||
This will take around 5-10 mins for the first deployment, afterwards, the docker build stages will be cached so it will be faster to build from next time onwards. After the above step finishes launch your preferred browser and then navigate to `http://<ip_address_of_the_device>:<whatever_port_you_provided_in_the_config>`.
|
||||
|
|
|
@ -4,9 +4,12 @@
|
|||
|
||||
This page provides a list of `Websurfx` instances provided by us and our community.
|
||||
|
||||
|URL|Network|Version|Location|Behind Cloudflare?|Maintained By|TLS|IPv6|Comment|
|
||||
|URL|Network|Version|Location|Status|Maintained By|TLS|IPv6|Comment|
|
||||
|-|-|-|-|-|-|-|-|-|
|
||||
|https://alamin655-websurfx.hf.space/|www|v0.21.4|🇺🇸 US||[websurfx project](https://github.com/neon-mmd/websurfx)|✅|||
|
||||
|https://websurfx.pp.ua|www|rolling|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Fwebsurfx.pp.ua&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|✅||
|
||||
|https://alamin655-spacex.hf.space|www|rolling|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Falamin655-spacex.hf.space&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|❌||
|
||||
|https://websurfx.instance.pp.ua|www|rolling|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Fwebsurfx.instance.pp.ua&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|✅||
|
||||
|https://alamin655-surfx.hf.space|www|stable|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Falamin655-surfx.hf.space&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|❌||
|
||||
|
||||
|
||||
[⬅️ Go back to Home](./README.md)
|
||||
|
|
|
@ -4,10 +4,10 @@ A modern-looking, lightning-fast, privacy-respecting, secure [meta search engine
|
|||
|
||||
# Motivation
|
||||
|
||||
Most meta search engines tend to be slow, lack high level of customization and missing many features and all of them like security as they are written in unsafe languages like python, javascript, etc which tend to open a wide variety of vulnerabilities which can also sometimes pose a threat to privacy as sometimes this can be exploited and can be used to leveraged to leak out sensitive information which is never good.
|
||||
Most meta search engines tend to be slow, lack a high level of customization, and miss many features, and all of them lack security as they are written in unsafe languages like Python, JavaScript, etc., which tend to open a wide variety of vulnerabilities, which can also sometimes pose a threat to privacy as sometimes this can be exploited and can be used to leak out sensitive information, which is never good.
|
||||
|
||||
# Solution
|
||||
|
||||
Websurfx is a project which seeks to provide privacy, security, speed and all the features which the user wants.
|
||||
Websurfx is a project that seeks to provide privacy, security, speed, and all the features that the user wants.
|
||||
|
||||
[⬅️ Go back to Home](./README.md)
|
||||
|
|
345
docs/theming.md
|
@ -1,17 +1,25 @@
|
|||
# Colorschemes
|
||||
# Theming
|
||||
|
||||
## Built-in
|
||||
## Colorschemes
|
||||
|
||||
By default `websurfx` comes with 9 colorschemes to choose from which can be easily chosen using the config file. To how to change colorschemes please view the [Configuration](https://github.com/neon-mmd/websurfx/wiki/configuration) section of the wiki.
|
||||
### Built-in
|
||||
|
||||
## Custom
|
||||
By default `websurfx` comes with 12 colorschemes to choose from which can be easily chosen using the config file or via the settings page on the website.
|
||||
|
||||
Creating coloschemes is as easy as it gets it requires the user to have a theme file name with the colorscheme in which every space should be replaced with a `-` (dash) and it should end with a `.css` file extension. After creating the file you need to add the following code with the `colors` you want:
|
||||
> To how to change colorschemes using the config file. See: [**Configuration**](https://github.com/neon-mmd/websurfx/wiki/configuration)
|
||||
|
||||
### Custom
|
||||
|
||||
To write a custom theme for the website, you will first need to create a new file under the `public/static/themes` folder with name of the theme containing each word seperated with a hyphen (**-**). Then after that edit the newly created file as required with new css code.
|
||||
|
||||
Creating coloschemes is as easy as it gets it requires the user to have a colorscheme file name with the name of the colorscheme that is to be provided in which every space should be replaced with a `-` (dash) and it should end with a `.css` file extension. After creating the file you need to add the following code with the `colors` you want to include:
|
||||
|
||||
```css
|
||||
:root {
|
||||
--background-color: <background color>;
|
||||
--foreground-color: <foreground color (text color on the website) >;
|
||||
--logo-color: <logo color
|
||||
(the color of the logo svg image on the website homepage) >;
|
||||
--color-one: <color 1>;
|
||||
--color-two: <color 2>;
|
||||
--color-three: <color 3>;
|
||||
|
@ -22,7 +30,7 @@ Creating coloschemes is as easy as it gets it requires the user to have a theme
|
|||
}
|
||||
```
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> Please infer the theme file located under `public/static/themes` to better understand where each color is being used.
|
||||
|
||||
**Example of `catppuccin-mocha` colorscheme:**
|
||||
|
@ -31,6 +39,7 @@ Creating coloschemes is as easy as it gets it requires the user to have a theme
|
|||
:root {
|
||||
--background-color: #1e1e2e;
|
||||
--foreground-color: #cdd6f4;
|
||||
--logo-color: #f5c2e7;
|
||||
--color-one: #45475a;
|
||||
--color-two: #f38ba8;
|
||||
--color-three: #a6e3a1;
|
||||
|
@ -41,21 +50,31 @@ Creating coloschemes is as easy as it gets it requires the user to have a theme
|
|||
}
|
||||
```
|
||||
|
||||
# Themes
|
||||
## Themes
|
||||
|
||||
## Built-in
|
||||
### Built-in
|
||||
|
||||
By default `websurfx` comes with 1 theme to choose from which can be easily chosen using the config file. To how to change themes please view the [Configuration](https://github.com/neon-mmd/websurfx/wiki/configuration) section of the wiki.
|
||||
By default `websurfx` comes with 1 theme to choose from which can be easily chosen using the config file or via the settings page on the website.
|
||||
|
||||
## Custom
|
||||
> To how to change themes using the config file. See: [**Configuration**](https://github.com/neon-mmd/websurfx/wiki/configuration)
|
||||
|
||||
To write custom color scheme, it requires the user to have some knowledge of `css stylesheets`.
|
||||
### Custom
|
||||
|
||||
**Here is an example of `simple theme` (which we provide by default with the app) which will give the user a better idea on how to create a custom theme using it as a template:**
|
||||
> This section expects the user to have some knowledge of `css`.
|
||||
|
||||
### General
|
||||
To write a custom theme for the website, you will first need to create a new file under the `public/static/themes` folder with name of the theme containing each word seperated with a hyphen (**-**). Then after that edit the newly created file as required with new css code.
|
||||
|
||||
Here is an example of `simple theme` (which we provide by default with the app) which will give you a better idea on how you can create your own custom theme for the website:
|
||||
|
||||
#### General
|
||||
|
||||
```css
|
||||
@font-face {
|
||||
font-family: Rubik;
|
||||
src: url('https://fonts.googleapis.com/css2?family=Rubik:wght@400;500;600;700;800&display=swap');
|
||||
fallback: sans-serif;
|
||||
}
|
||||
|
||||
* {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
|
@ -72,11 +91,17 @@ body {
|
|||
justify-content: space-between;
|
||||
align-items: center;
|
||||
height: 100vh;
|
||||
background: var(--color-one);
|
||||
font-family: Rubik, sans-serif;
|
||||
background-color: var(--background-color);
|
||||
}
|
||||
|
||||
/* enforce font for buttons */
|
||||
button {
|
||||
font-family: Rubik, sans-serif;
|
||||
}
|
||||
```
|
||||
|
||||
### Styles for the index page
|
||||
#### Styles for the index page
|
||||
|
||||
```css
|
||||
.search-container {
|
||||
|
@ -87,44 +112,69 @@ body {
|
|||
align-items: center;
|
||||
}
|
||||
|
||||
.search-container svg {
|
||||
color: var(--logo-color);
|
||||
}
|
||||
|
||||
.search-container div {
|
||||
display: flex;
|
||||
}
|
||||
```
|
||||
|
||||
### Styles for the search box and search button
|
||||
#### Styles for the search box and search button
|
||||
|
||||
```css
|
||||
.search_bar {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.search_bar input {
|
||||
padding: 1rem;
|
||||
border-radius: 6px;
|
||||
padding: 2.6rem 2.2rem;
|
||||
width: 50rem;
|
||||
height: 3rem;
|
||||
outline: none;
|
||||
border: none;
|
||||
box-shadow: rgba(0, 0, 0, 1);
|
||||
background: var(--foreground-color);
|
||||
box-shadow: rgb(0 0 0 / 1);
|
||||
background-color: var(--color-one);
|
||||
color: var(--foreground-color);
|
||||
outline-offset: 3px;
|
||||
font-size: 1.6rem;
|
||||
}
|
||||
|
||||
.search_bar input:focus {
|
||||
outline: 2px solid var(--foreground-color);
|
||||
}
|
||||
|
||||
.search_bar input::placeholder {
|
||||
color: var(--foreground-color);
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.search_bar button {
|
||||
padding: 1rem;
|
||||
border-radius: 0;
|
||||
padding: 2.6rem 3.2rem;
|
||||
border-radius: 6px;
|
||||
height: 3rem;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
outline: none;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
border: none;
|
||||
transition: 0.1s;
|
||||
gap: 0;
|
||||
background: var(--background-color);
|
||||
color: var(--color-three);
|
||||
background-color: var(--color-six);
|
||||
color: var(--background-color);
|
||||
font-weight: 600;
|
||||
letter-spacing: 0.1rem;
|
||||
}
|
||||
|
||||
.search_bar button:active {
|
||||
outline: 2px solid var(--color-three);
|
||||
}
|
||||
|
||||
.search_bar button:active,
|
||||
.search_bar button:hover {
|
||||
filter: brightness(1.2);
|
||||
|
@ -141,13 +191,19 @@ body {
|
|||
width: 20rem;
|
||||
background-color: var(--color-one);
|
||||
color: var(--foreground-color);
|
||||
padding: 1rem 2rem;
|
||||
padding: 1.2rem 2rem;
|
||||
border-radius: 0.5rem;
|
||||
outline: none;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
border: none;
|
||||
text-transform: capitalize;
|
||||
}
|
||||
|
||||
.search_area .search_options select:active,
|
||||
.search_area .search_options select:hover {
|
||||
outline: 2px solid var(--color-three);
|
||||
}
|
||||
|
||||
.search_area .search_options option:hover {
|
||||
background-color: var(--color-one);
|
||||
}
|
||||
|
@ -170,9 +226,7 @@ body {
|
|||
.result_not_found img {
|
||||
width: 40rem;
|
||||
}
|
||||
```
|
||||
|
||||
```css
|
||||
/* styles for the error box */
|
||||
.error_box .error_box_toggle_button {
|
||||
background: var(--foreground-color);
|
||||
|
@ -188,9 +242,11 @@ body {
|
|||
min-height: 20rem;
|
||||
min-width: 22rem;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box.show {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .error_item,
|
||||
.error_box .dropdown_error_box .no_errors {
|
||||
display: flex;
|
||||
|
@ -200,22 +256,25 @@ body {
|
|||
padding: 1rem;
|
||||
font-size: 1.2rem;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .error_item {
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .no_errors {
|
||||
min-height: 18rem;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .error_item:hover {
|
||||
box-shadow: inset 0 0 100px 100px rgba(255, 255, 255, 0.1);
|
||||
box-shadow: inset 0 0 100px 100px rgb(255 255 255 / 0.1);
|
||||
}
|
||||
|
||||
.error_box .error_item .severity_color {
|
||||
width: 1.2rem;
|
||||
height: 1.2rem;
|
||||
}
|
||||
|
||||
.results .result_disallowed,
|
||||
.results .result_filtered,
|
||||
.results .result_engine_not_selected {
|
||||
|
@ -225,7 +284,7 @@ body {
|
|||
gap: 10rem;
|
||||
font-size: 2rem;
|
||||
color: var(--foreground-color);
|
||||
margin: 0rem 7rem;
|
||||
margin: 0 7rem;
|
||||
}
|
||||
|
||||
.results .result_disallowed .user_query,
|
||||
|
@ -251,16 +310,34 @@ body {
|
|||
}
|
||||
```
|
||||
|
||||
### Styles for the footer and header
|
||||
#### Styles for the footer and header
|
||||
|
||||
```css
|
||||
header {
|
||||
background: var(--background-color);
|
||||
width: 100%;
|
||||
background: var(--background-color);
|
||||
display: flex;
|
||||
justify-content: right;
|
||||
align-items: center;
|
||||
padding: 1rem;
|
||||
justify-content: space-between;
|
||||
padding: 2rem 3rem;
|
||||
}
|
||||
|
||||
footer {
|
||||
width: 100%;
|
||||
background: var(--background-color);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 1.7rem 1.7rem 4rem;
|
||||
gap: 1.8rem;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
header h1 a {
|
||||
text-transform: capitalize;
|
||||
text-decoration: none;
|
||||
color: var(--foreground-color);
|
||||
letter-spacing: 0.1rem;
|
||||
}
|
||||
|
||||
header ul,
|
||||
|
@ -301,39 +378,9 @@ footer div {
|
|||
display: flex;
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
footer {
|
||||
background: var(--background-color);
|
||||
width: 100%;
|
||||
padding: 1rem;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
footer div span {
|
||||
font-size: 1.5rem;
|
||||
color: var(--4);
|
||||
}
|
||||
|
||||
footer div {
|
||||
display: flex;
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
footer {
|
||||
background: var(--bg);
|
||||
width: 100%;
|
||||
padding: 1rem;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
```
|
||||
|
||||
### Styles for the search page
|
||||
#### Styles for the search page
|
||||
|
||||
```css
|
||||
.results {
|
||||
|
@ -341,6 +388,11 @@ footer {
|
|||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: space-around;
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.result {
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.results .search_bar {
|
||||
|
@ -352,6 +404,7 @@ footer {
|
|||
flex-direction: column;
|
||||
justify-content: space-between;
|
||||
margin: 2rem 0;
|
||||
content-visibility: auto;
|
||||
}
|
||||
|
||||
.results_aggregated .result {
|
||||
|
@ -361,10 +414,10 @@ footer {
|
|||
}
|
||||
|
||||
.results_aggregated .result h1 a {
|
||||
font-size: 1.5rem;
|
||||
font-size: 1.7rem;
|
||||
font-weight: normal;
|
||||
color: var(--color-two);
|
||||
text-decoration: none;
|
||||
letter-spacing: 0.1rem;
|
||||
}
|
||||
|
||||
.results_aggregated .result h1 a:hover {
|
||||
|
@ -377,14 +430,15 @@ footer {
|
|||
|
||||
.results_aggregated .result small {
|
||||
color: var(--color-three);
|
||||
font-size: 1.1rem;
|
||||
font-size: 1.3rem;
|
||||
word-wrap: break-word;
|
||||
line-break: anywhere;
|
||||
}
|
||||
|
||||
.results_aggregated .result p {
|
||||
color: var(--foreground-color);
|
||||
font-size: 1.2rem;
|
||||
font-size: 1.4rem;
|
||||
line-height: 2.4rem;
|
||||
margin-top: 0.3rem;
|
||||
word-wrap: break-word;
|
||||
line-break: anywhere;
|
||||
|
@ -395,10 +449,13 @@ footer {
|
|||
font-size: 1.2rem;
|
||||
padding: 1rem;
|
||||
color: var(--color-five);
|
||||
display: flex;
|
||||
gap: 1rem;
|
||||
justify-content: right;
|
||||
}
|
||||
```
|
||||
|
||||
### Styles for the 404 page
|
||||
#### Styles for the 404 page
|
||||
|
||||
```css
|
||||
.error_container {
|
||||
|
@ -448,11 +505,11 @@ footer {
|
|||
}
|
||||
```
|
||||
|
||||
### Styles for the previous and next button on the search page
|
||||
#### Styles for the previous and next button on the search page
|
||||
|
||||
```css
|
||||
.page_navigation {
|
||||
padding: 0 0 2rem 0;
|
||||
padding: 0 0 2rem;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
|
@ -472,7 +529,7 @@ footer {
|
|||
}
|
||||
```
|
||||
|
||||
### Styles for the about page
|
||||
#### Styles for the about page
|
||||
|
||||
This part is only available right now in the **rolling/edge/unstable** version
|
||||
|
||||
|
@ -517,7 +574,7 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
}
|
||||
```
|
||||
|
||||
### Styles for the Settings Page
|
||||
#### Styles for the Settings Page
|
||||
|
||||
This part is only available right now in the **rolling/edge/unstable** version
|
||||
|
||||
|
@ -526,6 +583,7 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
display: flex;
|
||||
justify-content: space-around;
|
||||
width: 80dvw;
|
||||
margin: 5rem 0;
|
||||
}
|
||||
|
||||
.settings h1 {
|
||||
|
@ -533,9 +591,18 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
font-size: 2.5rem;
|
||||
}
|
||||
|
||||
.settings > h1 {
|
||||
margin-bottom: 4rem;
|
||||
margin-left: 2rem;
|
||||
}
|
||||
|
||||
.settings hr {
|
||||
border-color: var(--color-three);
|
||||
margin: 0.3rem 0 1rem 0;
|
||||
margin: 0.3rem 0 1rem;
|
||||
}
|
||||
|
||||
.settings > hr {
|
||||
margin-left: 2rem;
|
||||
}
|
||||
|
||||
.settings_container .sidebar {
|
||||
|
@ -548,7 +615,6 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
margin-left: -0.7rem;
|
||||
padding: 0.7rem;
|
||||
border-radius: 5px;
|
||||
font-weight: bold;
|
||||
margin-bottom: 0.5rem;
|
||||
color: var(--foreground-color);
|
||||
text-transform: capitalize;
|
||||
|
@ -556,18 +622,30 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
}
|
||||
|
||||
.settings_container .sidebar .btn {
|
||||
padding: 0.5rem;
|
||||
padding: 2rem;
|
||||
border-radius: 0.5rem;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
}
|
||||
|
||||
.settings_container .sidebar .btn:active {
|
||||
outline: 2px solid var(--color-two);
|
||||
}
|
||||
|
||||
.settings_container .sidebar .btn:not(.active):hover {
|
||||
color: var(--color-two);
|
||||
}
|
||||
|
||||
.settings_container .sidebar .btn.active {
|
||||
background-color: var(--color-two);
|
||||
color: var(--background-color);
|
||||
}
|
||||
|
||||
.settings_container .main_container {
|
||||
width: 70%;
|
||||
border-left: 1.5px solid var(--color-three);
|
||||
padding-left: 3rem;
|
||||
border: none;
|
||||
}
|
||||
|
||||
.settings_container .tab {
|
||||
|
@ -576,6 +654,7 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
|
||||
.settings_container .tab.active {
|
||||
display: flex;
|
||||
gap: 1.2rem;
|
||||
flex-direction: column;
|
||||
justify-content: space-around;
|
||||
}
|
||||
|
@ -623,7 +702,7 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
.settings_container .general select {
|
||||
margin: 0.7rem 0;
|
||||
width: 20rem;
|
||||
background-color: var(--background-color);
|
||||
background-color: var(--color-one);
|
||||
color: var(--foreground-color);
|
||||
padding: 1rem 2rem;
|
||||
border-radius: 0.5rem;
|
||||
|
@ -641,16 +720,19 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
gap: 1rem;
|
||||
padding: 1rem 0;
|
||||
margin-bottom: 2rem;
|
||||
gap: 2rem;
|
||||
}
|
||||
|
||||
.settings_container .engines .toggle_btn {
|
||||
color: var(--foreground-color);
|
||||
font-size: 1.5rem;
|
||||
display: flex;
|
||||
gap: 0.5rem;
|
||||
align-items: center;
|
||||
border-radius: 100px;
|
||||
gap: 1.5rem;
|
||||
letter-spacing: 1px;
|
||||
}
|
||||
|
||||
.settings_container .engines hr {
|
||||
|
@ -658,11 +740,11 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
}
|
||||
|
||||
.settings_container .cookies input {
|
||||
margin: 1rem 0rem;
|
||||
margin: 1rem 0;
|
||||
}
|
||||
```
|
||||
|
||||
### Styles for the Toggle Button
|
||||
#### Styles for the Toggle Button
|
||||
|
||||
This part is only available right now in the **rolling/edge/unstable** version
|
||||
|
||||
|
@ -686,25 +768,26 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
.slider {
|
||||
position: absolute;
|
||||
cursor: pointer;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background-color: var(--background-color);
|
||||
-webkit-transition: 0.4s;
|
||||
transition: 0.4s;
|
||||
inset: 0;
|
||||
background-color: var(--foreground-color);
|
||||
transition: 0.2s;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
}
|
||||
|
||||
.slider:before {
|
||||
.slider:active {
|
||||
outline: 2px solid var(--foreground-color);
|
||||
}
|
||||
|
||||
.slider::before {
|
||||
position: absolute;
|
||||
content: '';
|
||||
height: 2.6rem;
|
||||
width: 2.6rem;
|
||||
left: 0.4rem;
|
||||
bottom: 0.4rem;
|
||||
background-color: var(--foreground-color);
|
||||
-webkit-transition: 0.4s;
|
||||
transition: 0.4s;
|
||||
background-color: var(--background-color);
|
||||
transition: 0.2s;
|
||||
}
|
||||
|
||||
input:checked + .slider {
|
||||
|
@ -715,9 +798,7 @@ input:focus + .slider {
|
|||
box-shadow: 0 0 1px var(--color-three);
|
||||
}
|
||||
|
||||
input:checked + .slider:before {
|
||||
-webkit-transform: translateX(2.6rem);
|
||||
-ms-transform: translateX(2.6rem);
|
||||
input:checked + .slider::before {
|
||||
transform: translateX(2.6rem);
|
||||
}
|
||||
|
||||
|
@ -726,9 +807,79 @@ input:checked + .slider:before {
|
|||
border-radius: 3.4rem;
|
||||
}
|
||||
|
||||
.slider.round:before {
|
||||
.slider.round::before {
|
||||
border-radius: 50%;
|
||||
}
|
||||
```
|
||||
|
||||
## Animations
|
||||
|
||||
### Built-in
|
||||
|
||||
By default `websurfx` comes with 1 animation to choose from which can be easily chosen using the config file or via the settings page on the website.
|
||||
|
||||
> To how to change animations using the config file. See: [**Configuration**](https://github.com/neon-mmd/websurfx/wiki/configuration)
|
||||
|
||||
### Custom
|
||||
|
||||
To write custom animation, it requires the user to have some knowledge of `themes` and the `HTML of the page for which the animation is being provided for`.
|
||||
|
||||
The animations can be of 2 categories:
|
||||
|
||||
- Theme specific animations
|
||||
- Universal animations
|
||||
|
||||
#### Theme Specific Animations
|
||||
|
||||
These animations can only be used with a specific theme and should not be used with other themes otherwise it either won't look good or won't work at all or would work partially.
|
||||
|
||||
Here is an example of `simple-frosted-glow` animation for the `simple theme` (which we provide by default with the app) which will give you a better idea on how to create a custom animation for a specific theme:
|
||||
|
||||
```css
|
||||
.results_aggregated .result {
|
||||
margin: 1rem;
|
||||
padding: 1rem;
|
||||
border-radius: 1rem;
|
||||
}
|
||||
|
||||
.results_aggregated .result:hover {
|
||||
box-shadow:
|
||||
inset 0 0 3rem var(--color-two),
|
||||
inset 0 0 6rem var(--color-five),
|
||||
inset 0 0 9rem var(--color-three),
|
||||
0 0 0.25rem var(--color-two),
|
||||
0 0 0.5rem var(--color-five),
|
||||
0 0 0.75rem var(--color-three);
|
||||
}
|
||||
```
|
||||
|
||||
#### Universal Animations
|
||||
|
||||
These animations are independent of the theme being used and can be used with all the themes.
|
||||
|
||||
Here is an example of `text-tilt` animation which will give you an idea on how to create universal animations for the search engine website.
|
||||
|
||||
```css
|
||||
.results_aggregated .result:hover {
|
||||
transform: skewX(10deg);
|
||||
}
|
||||
```
|
||||
|
||||
> [!Note]
|
||||
> 1. The above-mentioned examples of animations was covered for the search page of the search engine website. While the same way of creating custom animations can also be done for other pages also.
|
||||
> 2. While the naming the file for the new theme file. Follow the following naming conventions:
|
||||
> 1. If the animation is theme specfic then name of the animation file should look like this:
|
||||
> `<name of the theme which these animation is for><seperated by a hyphen or dash><name of the animation with whitespaces replaced with hyphens>`
|
||||
> **For example:**
|
||||
> If the animation to make search results frosty glow on hover was to be created for the `simple` theme then the name of the file would look something like this:
|
||||
> `simple-frosted-glow`
|
||||
> Where `simple` is the name of the theme the animation targets and `frosted-glow` is the name of the animation where each word has been seperated by a hyphen.
|
||||
> 2. If the animation is not theme specfic (univeral theme) then name of the animation file should look like this:
|
||||
> `<name of the animation with whitespaces replaced with hyphens>`
|
||||
> **For example:**
|
||||
> If the animation to make search results text tilt on hover was to be created then the name of the file would look something like this:
|
||||
> `text-tilt`
|
||||
> Where `text-tilt` is the name of the animation where each word has been seperated by a hyphen. (While naming the files for these types of themes, You do not need to add a theme name in frontend of the file name.).
|
||||
|
||||
|
||||
[⬅️ Go back to Home](./README.md)
|
||||
|
|
6
flake.lock
generated
|
@ -34,11 +34,11 @@
|
|||
},
|
||||
"nixpkgs_2": {
|
||||
"locked": {
|
||||
"lastModified": 1695318763,
|
||||
"narHash": "sha256-FHVPDRP2AfvsxAdc+AsgFJevMz5VBmnZglFUMlxBkcY=",
|
||||
"lastModified": 1725194671,
|
||||
"narHash": "sha256-tLGCFEFTB5TaOKkpfw3iYT9dnk4awTP/q4w+ROpMfuw=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "e12483116b3b51a185a33a272bf351e357ba9a99",
|
||||
"rev": "b833ff01a0d694b910daca6e2ff4a3f26dee478c",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
|
13
flake.nix
|
@ -32,17 +32,28 @@
|
|||
buildInputs = [
|
||||
actionlint
|
||||
cargo
|
||||
docker
|
||||
haskellPackages.hadolint
|
||||
nodejs
|
||||
nodePackages_latest.cspell
|
||||
nodePackages_latest.eslint
|
||||
eslint
|
||||
nodePackages_latest.markdownlint-cli2
|
||||
nodePackages_latest.stylelint
|
||||
redis
|
||||
rustPackages.clippy
|
||||
rust-analyzer
|
||||
cargo-watch
|
||||
rustc
|
||||
rustfmt
|
||||
yamllint
|
||||
openssl
|
||||
pkg-config
|
||||
];
|
||||
RUST_SRC_PATH = rustPlatform.rustLibSrc;
|
||||
shellHook = ''
|
||||
export PATH="$PATH:$HOME/.cargo/bin"
|
||||
export NODE_PATH="$NODE_PATH:./node_modules"
|
||||
'';
|
||||
};
|
||||
|
||||
# Build via "nix build .#websurfx", which is basically just
|
||||
|
|
Before Width: | Height: | Size: 72 KiB After Width: | Height: | Size: 73 KiB |
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 45 KiB |
Before Width: | Height: | Size: 134 KiB After Width: | Height: | Size: 80 KiB |
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 36 KiB |
1
public/images/close.svg
Normal file
|
@ -0,0 +1 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-x"><line x1="18" y1="6" x2="6" y2="18"></line><line x1="6" y1="6" x2="18" y2="18"></line></svg>
|
After Width: | Height: | Size: 299 B |
1
public/images/magnifying_glass.svg
Normal file
|
@ -0,0 +1 @@
|
|||
<?xml version="1.0" encoding="utf-8"?><!-- Generator: Adobe Illustrator 16.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) --><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 50 50" width="24px" height="24px"><circle fill="none" stroke="#000000" stroke-width="2" stroke-linecap="round" stroke-miterlimit="10" cx="21" cy="20" r="16"/><line fill="none" stroke="#000000" stroke-width="4" stroke-miterlimit="10" x1="32.229" y1="32.229" x2="45.5" y2="45.5"/></svg>
|
After Width: | Height: | Size: 610 B |
Before Width: | Height: | Size: 7.2 KiB |
15
public/static/animations/simple-frosted-glow.css
Normal file
|
@ -0,0 +1,15 @@
|
|||
.results_aggregated .result {
|
||||
margin: 1rem;
|
||||
padding: 1rem;
|
||||
border-radius: 1rem;
|
||||
}
|
||||
|
||||
.results_aggregated .result:hover {
|
||||
box-shadow:
|
||||
inset 0 0 3rem var(--color-two),
|
||||
inset 0 0 6rem var(--color-five),
|
||||
inset 0 0 9rem var(--color-three),
|
||||
0 0 0.25rem var(--color-two),
|
||||
0 0 0.5rem var(--color-five),
|
||||
0 0 0.75rem var(--color-three);
|
||||
}
|
|
@ -1,11 +1,12 @@
|
|||
:root {
|
||||
--background-color: #1e1e2e;
|
||||
--foreground-color: #cdd6f4;
|
||||
--logo-color: #f5c2e7;
|
||||
--color-one: #45475a;
|
||||
--color-two: #f38ba8;
|
||||
--color-three: #a6e3a1;
|
||||
--color-four: #f9e2af;
|
||||
--color-five: #89b4fa;
|
||||
--color-six: #f5c2e7;
|
||||
--color-seven: #ffffff;
|
||||
--color-seven: #fff;
|
||||
}
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
:root {
|
||||
--background-color: #000000;
|
||||
--foreground-color: #ffffff;
|
||||
--background-color: #000;
|
||||
--foreground-color: #fff;
|
||||
--logo-color: #e0e0e0;
|
||||
--color-one: #121212;
|
||||
--color-two: #808080;
|
||||
--color-three: #999999;
|
||||
--color-four: #666666;
|
||||
--color-three: #999;
|
||||
--color-four: #666;
|
||||
--color-five: #bfbfbf;
|
||||
--color-six: #e0e0e0;
|
||||
--color-seven: #555555;
|
||||
--color-seven: #555;
|
||||
}
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
:root {
|
||||
--background-color: #44475a;
|
||||
--foreground-color: #8be9fd;
|
||||
--color-one: #ff5555;
|
||||
--logo-color: #ffb86c;
|
||||
--color-one: #f55;
|
||||
--color-two: #50fa7b;
|
||||
--color-three: #ffb86c;
|
||||
--color-four: #bd93f9;
|
||||
--color-five: #ff79c6;
|
||||
--color-six: #94a3a5;
|
||||
--color-seven: #ffffff;
|
||||
--color-seven: #fff;
|
||||
}
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
:root {
|
||||
--background-color: #1d2021;
|
||||
--foreground-color: #ebdbb2;
|
||||
--logo-color: #ebdbb2;
|
||||
--color-one: #282828;
|
||||
--color-two: #98971a;
|
||||
--color-three: #d79921;
|
||||
--color-four: #458588;
|
||||
--color-five: #b16286;
|
||||
--color-six: #689d6a;
|
||||
--color-seven: #ffffff;
|
||||
--color-seven: #fff;
|
||||
}
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
:root {
|
||||
--background-color: #49483Eff;
|
||||
--foreground-color: #FFB269;
|
||||
--logo-color: #ffd866;
|
||||
--color-one: #272822ff;
|
||||
--color-two: #61AFEF;
|
||||
--color-three: #ffd866;
|
||||
--color-four: #fc9867;
|
||||
--color-five: #ab9df2;
|
||||
--color-six: #78dce8;
|
||||
--color-seven: #ffffff;
|
||||
--color-seven: #fff;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
:root {
|
||||
--background-color: #122736ff;
|
||||
--foreground-color: #a2e2a9;
|
||||
--logo-color: #e2ecd6;
|
||||
--color-one: #121B2Cff;
|
||||
--color-two: #f08282;
|
||||
--color-three: #ABC5AAff;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
:root {
|
||||
--background-color: #1b2b34;
|
||||
--foreground-color: #d8dee9;
|
||||
--logo-color: #d8dee9;
|
||||
--color-one: #343d46;
|
||||
--color-two: #5FB3B3ff;
|
||||
--color-three: #69Cf;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
:root {
|
||||
--background-color: #282c34;
|
||||
--foreground-color: #abb2bf;
|
||||
--logo-color: #c8ccd4;
|
||||
--color-one: #3b4048;
|
||||
--color-two: #a3be8c;
|
||||
--color-three: #b48ead;
|
||||
|
|
12
public/static/colorschemes/rose-pine-dawn.css
Normal file
|
@ -0,0 +1,12 @@
|
|||
:root {
|
||||
--background-color: #faf4ed;
|
||||
--foreground-color: #575279;
|
||||
--logo-color: #d7827e;
|
||||
--color-one: #f2e9e1;
|
||||
--color-two: #907aa9;
|
||||
--color-three: #56949f;
|
||||
--color-four: #ea9d34;
|
||||
--color-five: #d7827e;
|
||||
--color-six: #9893a5;
|
||||
--color-seven: #575279;
|
||||
}
|
12
public/static/colorschemes/rose-pine-moon.css
Normal file
|
@ -0,0 +1,12 @@
|
|||
:root {
|
||||
--background-color: #232136;
|
||||
--foreground-color: #e0def4;
|
||||
--logo-color: #ea9a97;
|
||||
--color-one: #393552;
|
||||
--color-two: #c4a7e7;
|
||||
--color-three: #9ccfd8;
|
||||
--color-four: #f6c177;
|
||||
--color-five: #ea9a97;
|
||||
--color-six: #6e6a86;
|
||||
--color-seven: #e0def4;
|
||||
}
|
12
public/static/colorschemes/rose-pine.css
Normal file
|
@ -0,0 +1,12 @@
|
|||
:root {
|
||||
--background-color: #191724;
|
||||
--foreground-color: #e0def4;
|
||||
--logo-color: #ebbcba;
|
||||
--color-one: #26233a;
|
||||
--color-two: #c4a7e7;
|
||||
--color-three: #9ccfd8;
|
||||
--color-four: #f6c177;
|
||||
--color-five: #eb6f92;
|
||||
--color-six: #6e6a86;
|
||||
--color-seven: #e0def4;
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
:root {
|
||||
--background-color: #002b36;
|
||||
--foreground-color: #c9e0e6;
|
||||
--logo-color: #EEE8D5ff;
|
||||
--color-one: #073642;
|
||||
--color-two: #2AA198ff;
|
||||
--color-three: #2AA198ff;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
:root {
|
||||
--background-color: #EEE8D5ff;
|
||||
--foreground-color: #b1ab97;
|
||||
--logo-color: #586E75;
|
||||
--color-one: #fdf6e3;
|
||||
--color-two: #DC322Fff;
|
||||
--color-three: #586E75ff;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
:root {
|
||||
--background-color: #1a1b26;
|
||||
--foreground-color: #c0caf5;
|
||||
--logo-color: #e2afff;
|
||||
--color-one: #32364a;
|
||||
--color-two: #a9b1d6;
|
||||
--color-three: #5a5bb8;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
:root {
|
||||
--background-color: #35383Cff;
|
||||
--foreground-color: #D7DAD8ff;
|
||||
--logo-color: #D7DAD8ff;
|
||||
--color-one: #1d1f21;
|
||||
--color-two: #D77C79ff;
|
||||
--color-three: #f0c674;
|
||||
|
|
|
@ -1,3 +1,62 @@
|
|||
/**
|
||||
* This functions gets the saved cookies if it is present on the user's machine If it
|
||||
* is available then it is parsed and converted to an object which is then used to
|
||||
* retrieve the preferences that the user had selected previously and is then loaded
|
||||
* and used for displaying the user provided settings by setting them as the selected
|
||||
* options in the settings page.
|
||||
*
|
||||
* @function
|
||||
* @param {string} cookie - It takes the client settings cookie as a string.
|
||||
* @returns {void}
|
||||
*/
|
||||
function setClientSettingsOnPage(cookie) {
|
||||
let cookie_value = cookie
|
||||
.split(';')
|
||||
.map((item) => item.split('='))
|
||||
.reduce((acc, [_, v]) => (acc = JSON.parse(v)) && acc, {})
|
||||
|
||||
// Loop through all select tags and add their values to the cookie dictionary
|
||||
document.querySelectorAll('select').forEach((select_tag) => {
|
||||
switch (select_tag.name) {
|
||||
case 'themes':
|
||||
select_tag.value = cookie_value['theme']
|
||||
break
|
||||
case 'colorschemes':
|
||||
select_tag.value = cookie_value['colorscheme']
|
||||
break
|
||||
case 'animations':
|
||||
select_tag.value = cookie_value['animation']
|
||||
break
|
||||
case 'safe_search_levels':
|
||||
select_tag.value = cookie_value['safe_search_level']
|
||||
break
|
||||
}
|
||||
})
|
||||
let engines = document.querySelectorAll('.engine')
|
||||
let engines_cookie = cookie_value['engines']
|
||||
|
||||
if (engines_cookie.length === engines.length) {
|
||||
document.querySelector('.select_all').checked = true
|
||||
engines.forEach((engine_checkbox) => {
|
||||
engine_checkbox.checked = true
|
||||
})
|
||||
} else {
|
||||
engines.forEach((engines_checkbox) => {
|
||||
engines_checkbox.checked = false
|
||||
})
|
||||
engines_cookie.forEach((engine_name) => {
|
||||
engines.forEach((engine_checkbox) => {
|
||||
if (
|
||||
engine_checkbox.parentNode.parentNode.innerText.trim() ===
|
||||
engine_name.trim()
|
||||
) {
|
||||
engine_checkbox.checked = true
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is executed when any page on the website finishes loading and
|
||||
* this function retrieves the cookies if it is present on the user's machine.
|
||||
|
@ -16,9 +75,14 @@ document.addEventListener(
|
|||
let cookie = decodeURIComponent(document.cookie)
|
||||
// Set the value of the input field to the decoded cookie value if it is not empty
|
||||
// Otherwise, display a message indicating that no cookies have been saved on the user's system
|
||||
document.querySelector('.cookies input').value = cookie.length
|
||||
? cookie
|
||||
: 'No cookies have been saved on your system'
|
||||
if (cookie.length) {
|
||||
document.querySelector('.cookies input').value = cookie
|
||||
// This function displays the user provided settings on the settings page.
|
||||
setClientSettingsOnPage(cookie)
|
||||
} else {
|
||||
document.querySelector('.cookies input').value =
|
||||
'No cookies have been saved on your system'
|
||||
}
|
||||
} catch (error) {
|
||||
// If there is an error decoding the cookie, log the error to the console
|
||||
// and display an error message in the input field
|
||||
|
|
|
@ -1,34 +1,6 @@
|
|||
/**
|
||||
* Selects the input element for the search box
|
||||
* @type {HTMLInputElement}
|
||||
*/
|
||||
const searchBox = document.querySelector('input')
|
||||
|
||||
/**
|
||||
* Redirects the user to the search results page with the query parameter
|
||||
*/
|
||||
function searchWeb() {
|
||||
const query = searchBox.value.trim()
|
||||
try {
|
||||
let safeSearchLevel = document.querySelector('.search_options select').value
|
||||
if (query) {
|
||||
window.location.href = `search?q=${encodeURIComponent(
|
||||
query,
|
||||
)}&safesearch=${encodeURIComponent(safeSearchLevel)}`
|
||||
}
|
||||
} catch (error) {
|
||||
if (query) {
|
||||
window.location.href = `search?q=${encodeURIComponent(query)}`
|
||||
}
|
||||
}
|
||||
* A function that clears the search input text when the clear button is clicked.
|
||||
*/
|
||||
function clearSearchText() {
|
||||
document.querySelector('.search_bar > input').value = ''
|
||||
}
|
||||
|
||||
/**
|
||||
* Listens for the 'Enter' key press event on the search box and calls the searchWeb function
|
||||
* @param {KeyboardEvent} e - The keyboard event object
|
||||
*/
|
||||
searchBox.addEventListener('keyup', (e) => {
|
||||
if (e.key === 'Enter') {
|
||||
searchWeb()
|
||||
}
|
||||
})
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
/**
|
||||
* Navigates to the next page by incrementing the current page number in the URL query string.
|
||||
* @returns {void}
|
||||
*/
|
||||
function navigate_forward() {
|
||||
let url = new URL(window.location);
|
||||
let searchParams = url.searchParams;
|
||||
|
||||
let q = searchParams.get('q');
|
||||
let page = parseInt(searchParams.get('page'));
|
||||
|
||||
if (isNaN(page)) {
|
||||
page = 1;
|
||||
} else {
|
||||
page++;
|
||||
}
|
||||
|
||||
window.location.href = `${url.origin}${url.pathname}?q=${encodeURIComponent(q)}&page=${page}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Navigates to the previous page by decrementing the current page number in the URL query string.
|
||||
* @returns {void}
|
||||
*/
|
||||
function navigate_backward() {
|
||||
let url = new URL(window.location);
|
||||
let searchParams = url.searchParams;
|
||||
|
||||
let q = searchParams.get('q');
|
||||
let page = parseInt(searchParams.get('page'));
|
||||
|
||||
if (isNaN(page)) {
|
||||
page = 0;
|
||||
} else if (page > 0) {
|
||||
page--;
|
||||
}
|
||||
|
||||
window.location.href = `${url.origin}${url.pathname}?q=${encodeURIComponent(q)}&page=${page}`;
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
document.addEventListener(
|
||||
'DOMContentLoaded',
|
||||
() => {
|
||||
let url = new URL(window.location)
|
||||
let searchParams = url.searchParams
|
||||
|
||||
let safeSearchLevel = searchParams.get('safesearch')
|
||||
|
||||
if (
|
||||
safeSearchLevel >= 0 &&
|
||||
safeSearchLevel <= 2 &&
|
||||
safeSearchLevel !== null
|
||||
) {
|
||||
document.querySelector('.search_options select').value = safeSearchLevel
|
||||
}
|
||||
},
|
||||
false,
|
||||
)
|
|
@ -50,6 +50,9 @@ function setClientSettings() {
|
|||
case 'colorschemes':
|
||||
cookie_dictionary['colorscheme'] = select_tag.value
|
||||
break
|
||||
case 'animations':
|
||||
cookie_dictionary['animation'] = select_tag.value || null
|
||||
break
|
||||
case 'safe_search_levels':
|
||||
cookie_dictionary['safe_search_level'] = Number(select_tag.value)
|
||||
break
|
||||
|
@ -103,13 +106,50 @@ function getClientSettings() {
|
|||
.map((item) => item.split('='))
|
||||
.reduce((acc, [_, v]) => (acc = JSON.parse(v)) && acc, {})
|
||||
|
||||
// Loop through all link tags and update their href values to match the user's preferences
|
||||
Array.from(document.querySelectorAll('link')).forEach((item) => {
|
||||
if (item.href.includes('static/themes')) {
|
||||
item.href = `static/themes/${cookie_value['theme']}.css`
|
||||
} else if (item.href.includes('static/colorschemes')) {
|
||||
item.href = `static/colorschemes/${cookie_value['colorscheme']}.css`
|
||||
let links = Array.from(document.querySelectorAll('link'))
|
||||
|
||||
// A check to determine whether the animation link exists under the head tag or not.
|
||||
// If it does not exists then create and add a new animation link under the head tag
|
||||
// and update the other link tags href according to the settings provided by the user
|
||||
// via the UI. On the other hand if it does exist then just update all the link tags
|
||||
// href according to the settings provided by the user via the UI.
|
||||
if (!links.some((item) => item.href.includes('static/animations'))) {
|
||||
if (cookie_value['animation']) {
|
||||
let animation_link = document.createElement('link')
|
||||
animation_link.href = `static/animations/${cookie_value['animation']}.css`
|
||||
animation_link.rel = 'stylesheet'
|
||||
animation_link.type = 'text/css'
|
||||
document.querySelector('head').appendChild(animation_link)
|
||||
}
|
||||
})
|
||||
// Loop through all link tags and update their href values to match the user's preferences
|
||||
links.forEach((item) => {
|
||||
if (item.href.includes('static/themes')) {
|
||||
item.href = `static/themes/${cookie_value['theme']}.css`
|
||||
} else if (item.href.includes('static/colorschemes')) {
|
||||
item.href = `static/colorschemes/${cookie_value['colorscheme']}.css`
|
||||
}
|
||||
})
|
||||
} else {
|
||||
// Loop through all link tags and update their href values to match the user's preferences
|
||||
links.forEach((item) => {
|
||||
if (item.href.includes('static/themes')) {
|
||||
item.href = `static/themes/${cookie_value['theme']}.css`
|
||||
} else if (item.href.includes('static/colorschemes')) {
|
||||
item.href = `static/colorschemes/${cookie_value['colorscheme']}.css`
|
||||
} else if (
|
||||
item.href.includes('static/animations') &&
|
||||
cookie_value['animation']
|
||||
) {
|
||||
item.href = `static/colorschemes/${cookie_value['animation']}.css`
|
||||
}
|
||||
})
|
||||
if (!cookie_value['animation']) {
|
||||
document
|
||||
.querySelector('head')
|
||||
.removeChild(
|
||||
links.filter((item) => item.href.includes('static/animations')),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,13 @@
|
|||
/* @import url('./catppuccin-mocha.css'); */
|
||||
@font-face {
|
||||
font-family: Rubik;
|
||||
font-style: normal;
|
||||
font-weight: 200 600;
|
||||
font-stretch: 0% 200%;
|
||||
font-display: swap;
|
||||
src: url('https://fonts.gstatic.com/s/rubik/v28/iJWKBXyIfDnIV7nErXyi0A.woff2')
|
||||
format('woff2');
|
||||
}
|
||||
|
||||
* {
|
||||
padding: 0;
|
||||
|
@ -15,8 +24,14 @@ body {
|
|||
flex-direction: column;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
height: 100vh;
|
||||
background: var(--color-one);
|
||||
min-height: 100vh;
|
||||
font-family: Rubik, sans-serif;
|
||||
background-color: var(--background-color);
|
||||
}
|
||||
|
||||
/* enforce font for buttons */
|
||||
button {
|
||||
font-family: Rubik, sans-serif;
|
||||
}
|
||||
|
||||
/* styles for the index page */
|
||||
|
@ -29,6 +44,10 @@ body {
|
|||
align-items: center;
|
||||
}
|
||||
|
||||
.search-container svg {
|
||||
color: var(--logo-color);
|
||||
}
|
||||
|
||||
.search-container div {
|
||||
display: flex;
|
||||
}
|
||||
|
@ -37,32 +56,66 @@ body {
|
|||
|
||||
.search_bar {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.search_bar input {
|
||||
padding: 1rem;
|
||||
border-radius: 6px;
|
||||
padding: 2.6rem 2.2rem;
|
||||
width: 50rem;
|
||||
height: 3rem;
|
||||
outline: none;
|
||||
border: none;
|
||||
box-shadow: rgba(0, 0, 0, 1);
|
||||
background: var(--foreground-color);
|
||||
box-shadow: rgb(0 0 0 / 1);
|
||||
background-color: var(--color-one);
|
||||
color: var(--foreground-color);
|
||||
outline-offset: 3px;
|
||||
font-size: 1.6rem;
|
||||
}
|
||||
|
||||
.search_bar input::-webkit-search-results-button,
|
||||
.search_bar input::-webkit-search-cancel-button{
|
||||
display: none;
|
||||
}
|
||||
|
||||
.search_bar input:focus {
|
||||
outline: 2px solid var(--foreground-color);
|
||||
}
|
||||
|
||||
.search_bar input::placeholder {
|
||||
color: var(--foreground-color);
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.search_bar button {
|
||||
padding: 1rem;
|
||||
border-radius: 0;
|
||||
padding: 2.6rem 3.2rem;
|
||||
border-radius: 6px;
|
||||
height: 3rem;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
outline: none;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
border: none;
|
||||
transition: 0.1s;
|
||||
gap: 0;
|
||||
background: var(--background-color);
|
||||
color: var(--color-three);
|
||||
background-color: var(--color-six);
|
||||
color: var(--background-color);
|
||||
font-weight: 600;
|
||||
letter-spacing: 0.1rem;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.search_bar button img {
|
||||
position: absolute;
|
||||
left: 50%;
|
||||
top: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
}
|
||||
|
||||
.search_bar button:active {
|
||||
outline: 2px solid var(--color-three);
|
||||
}
|
||||
|
||||
.search_bar button:active,
|
||||
|
@ -81,13 +134,19 @@ body {
|
|||
width: 20rem;
|
||||
background-color: var(--color-one);
|
||||
color: var(--foreground-color);
|
||||
padding: 1rem 2rem;
|
||||
padding: 1.2rem 2rem;
|
||||
border-radius: 0.5rem;
|
||||
outline: none;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
border: none;
|
||||
text-transform: capitalize;
|
||||
}
|
||||
|
||||
.search_area .search_options select:active,
|
||||
.search_area .search_options select:hover {
|
||||
outline: 2px solid var(--color-three);
|
||||
}
|
||||
|
||||
.search_area .search_options option:hover {
|
||||
background-color: var(--color-one);
|
||||
}
|
||||
|
@ -126,9 +185,11 @@ body {
|
|||
min-height: 20rem;
|
||||
min-width: 22rem;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box.show {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .error_item,
|
||||
.error_box .dropdown_error_box .no_errors {
|
||||
display: flex;
|
||||
|
@ -138,22 +199,25 @@ body {
|
|||
padding: 1rem;
|
||||
font-size: 1.2rem;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .error_item {
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .no_errors {
|
||||
min-height: 18rem;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .error_item:hover {
|
||||
box-shadow: inset 0 0 100px 100px rgba(255, 255, 255, 0.1);
|
||||
box-shadow: inset 0 0 100px 100px rgb(255 255 255 / 0.1);
|
||||
}
|
||||
|
||||
.error_box .error_item .severity_color {
|
||||
width: 1.2rem;
|
||||
height: 1.2rem;
|
||||
}
|
||||
|
||||
.results .result_disallowed,
|
||||
.results .result_filtered,
|
||||
.results .result_engine_not_selected {
|
||||
|
@ -163,7 +227,7 @@ body {
|
|||
gap: 10rem;
|
||||
font-size: 2rem;
|
||||
color: var(--foreground-color);
|
||||
margin: 0rem 7rem;
|
||||
margin: 0 7rem;
|
||||
}
|
||||
|
||||
.results .result_disallowed .user_query,
|
||||
|
@ -190,17 +254,24 @@ body {
|
|||
|
||||
/* styles for the footer and header */
|
||||
|
||||
header,
|
||||
header {
|
||||
width: 100%;
|
||||
background: var(--background-color);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 2rem 3rem;
|
||||
}
|
||||
|
||||
footer {
|
||||
width: 100%;
|
||||
background: var(--background-color);
|
||||
display: flex;
|
||||
padding: 1rem;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
header {
|
||||
justify-content: space-between;
|
||||
padding: 1.7rem 1.7rem 4rem;
|
||||
gap: 1.8rem;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
header h1 a {
|
||||
|
@ -208,7 +279,6 @@ header h1 a {
|
|||
text-decoration: none;
|
||||
color: var(--foreground-color);
|
||||
letter-spacing: 0.1rem;
|
||||
margin-left: 1rem;
|
||||
}
|
||||
|
||||
header ul,
|
||||
|
@ -250,11 +320,6 @@ footer div {
|
|||
gap: 1rem;
|
||||
}
|
||||
|
||||
footer {
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
/* Styles for the search page */
|
||||
|
||||
.results {
|
||||
|
@ -262,6 +327,11 @@ footer {
|
|||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: space-around;
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.result {
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.results .search_bar {
|
||||
|
@ -273,6 +343,7 @@ footer {
|
|||
flex-direction: column;
|
||||
justify-content: space-between;
|
||||
margin: 2rem 0;
|
||||
content-visibility: auto;
|
||||
}
|
||||
|
||||
.results_aggregated .result {
|
||||
|
@ -282,10 +353,10 @@ footer {
|
|||
}
|
||||
|
||||
.results_aggregated .result h1 a {
|
||||
font-size: 1.5rem;
|
||||
font-size: 1.7rem;
|
||||
font-weight: normal;
|
||||
color: var(--color-two);
|
||||
text-decoration: none;
|
||||
letter-spacing: 0.1rem;
|
||||
}
|
||||
|
||||
.results_aggregated .result h1 a:hover {
|
||||
|
@ -293,19 +364,20 @@ footer {
|
|||
}
|
||||
|
||||
.results_aggregated .result h1 a:visited {
|
||||
color: var(--background-color);
|
||||
color: var(--color-five);
|
||||
}
|
||||
|
||||
.results_aggregated .result small {
|
||||
color: var(--color-three);
|
||||
font-size: 1.1rem;
|
||||
font-size: 1.3rem;
|
||||
word-wrap: break-word;
|
||||
line-break: anywhere;
|
||||
}
|
||||
|
||||
.results_aggregated .result p {
|
||||
color: var(--foreground-color);
|
||||
font-size: 1.2rem;
|
||||
font-size: 1.4rem;
|
||||
line-height: 2.4rem;
|
||||
margin-top: 0.3rem;
|
||||
word-wrap: break-word;
|
||||
line-break: anywhere;
|
||||
|
@ -316,6 +388,9 @@ footer {
|
|||
font-size: 1.2rem;
|
||||
padding: 1rem;
|
||||
color: var(--color-five);
|
||||
display: flex;
|
||||
gap: 1rem;
|
||||
justify-content: right;
|
||||
}
|
||||
|
||||
/* Styles for the 404 page */
|
||||
|
@ -367,13 +442,13 @@ footer {
|
|||
}
|
||||
|
||||
.page_navigation {
|
||||
padding: 0 0 2rem 0;
|
||||
padding: 0 0 2rem;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.page_navigation button {
|
||||
.page_navigation a {
|
||||
background: var(--background-color);
|
||||
color: var(--foreground-color);
|
||||
padding: 1rem;
|
||||
|
@ -382,7 +457,7 @@ footer {
|
|||
border: none;
|
||||
}
|
||||
|
||||
.page_navigation button:active {
|
||||
.page_navigation a:active {
|
||||
filter: brightness(1.2);
|
||||
}
|
||||
|
||||
|
@ -392,39 +467,122 @@ footer {
|
|||
font-size: 1.5rem;
|
||||
color: var(--foreground-color);
|
||||
padding-bottom: 10px;
|
||||
max-width: 1100px;
|
||||
margin: 14rem auto;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
row-gap: 100px;
|
||||
}
|
||||
|
||||
.about-container article h1 {
|
||||
color: var(--color-two);
|
||||
font-size: 2.8rem;
|
||||
font-size: 4.5rem;
|
||||
}
|
||||
|
||||
.about-container article div {
|
||||
padding-bottom: 15px;
|
||||
.about-container article .logo-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.about-container article .logo-container svg {
|
||||
width: clamp(200px, 530px, 815px);
|
||||
color: var(--logo-color);
|
||||
}
|
||||
|
||||
.about-container article .text-block {
|
||||
box-shadow: 0 0 0 100vmax var(--foreground-color);
|
||||
background-color: var(--foreground-color);
|
||||
clip-path: inset(0 -100vmax);
|
||||
padding: 90px 0;
|
||||
display: flex;
|
||||
gap: 40px;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-direction: column;
|
||||
text-align: center;
|
||||
color: var(--background-color);
|
||||
}
|
||||
|
||||
.about-container article .text-block .text-block-title {
|
||||
font-size: 64px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.hero-text-container {
|
||||
width: 860px;
|
||||
}
|
||||
|
||||
.hero-text {
|
||||
font-size: 45px;
|
||||
font-weight: 200;
|
||||
}
|
||||
|
||||
.about-container a {
|
||||
color: var(--color-three);
|
||||
}
|
||||
|
||||
.about-container article h2 {
|
||||
color: var(--color-three);
|
||||
font-size: 1.8rem;
|
||||
padding-bottom: 10px;
|
||||
}
|
||||
|
||||
.about-container p {
|
||||
color: var(--foreground-color);
|
||||
font-size: 1.6rem;
|
||||
padding-bottom: 10px;
|
||||
}
|
||||
|
||||
.about-container h3 {
|
||||
font-size: 1.5rem;
|
||||
}
|
||||
|
||||
.about-container {
|
||||
width: 80%;
|
||||
margin-bottom: 140px;
|
||||
}
|
||||
|
||||
.feature-list {
|
||||
padding: 35px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-direction: column;
|
||||
row-gap: 60px;
|
||||
}
|
||||
|
||||
.feature-list-title {
|
||||
text-align: center;
|
||||
font-size: 64px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.features {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(3, 1fr);
|
||||
gap: 40px;
|
||||
}
|
||||
|
||||
.feature-card {
|
||||
background-color: var(--foreground-color);
|
||||
color: var(--background-color);
|
||||
text-align: center;
|
||||
display: flex;
|
||||
padding: 30px;
|
||||
border-radius: 24px;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 15px;
|
||||
}
|
||||
|
||||
.feature-card-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-direction: column;
|
||||
row-gap: 15px;
|
||||
}
|
||||
|
||||
.feature-card-header h4 {
|
||||
font-size: 33px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.feature-card-body p {
|
||||
font-size: 20px;
|
||||
font-weight: 200;
|
||||
}
|
||||
|
||||
.about-footnote {
|
||||
font-size: 24px;
|
||||
text-align: center;
|
||||
color: var(--foreground-color);
|
||||
}
|
||||
|
||||
/* Styles for the settings page */
|
||||
|
@ -432,6 +590,7 @@ footer {
|
|||
display: flex;
|
||||
justify-content: space-around;
|
||||
width: 80dvw;
|
||||
margin: 5rem 0;
|
||||
}
|
||||
|
||||
.settings h1 {
|
||||
|
@ -439,9 +598,18 @@ footer {
|
|||
font-size: 2.5rem;
|
||||
}
|
||||
|
||||
.settings > h1 {
|
||||
margin-bottom: 4rem;
|
||||
margin-left: 2rem;
|
||||
}
|
||||
|
||||
.settings hr {
|
||||
border-color: var(--color-three);
|
||||
margin: 0.3rem 0 1rem 0;
|
||||
margin: 0.3rem 0 1rem;
|
||||
}
|
||||
|
||||
.settings > hr {
|
||||
margin-left: 2rem;
|
||||
}
|
||||
|
||||
.settings_container .sidebar {
|
||||
|
@ -454,7 +622,6 @@ footer {
|
|||
margin-left: -0.7rem;
|
||||
padding: 0.7rem;
|
||||
border-radius: 5px;
|
||||
font-weight: bold;
|
||||
margin-bottom: 0.5rem;
|
||||
color: var(--foreground-color);
|
||||
text-transform: capitalize;
|
||||
|
@ -462,18 +629,30 @@ footer {
|
|||
}
|
||||
|
||||
.settings_container .sidebar .btn {
|
||||
padding: 0.5rem;
|
||||
padding: 2rem;
|
||||
border-radius: 0.5rem;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
}
|
||||
|
||||
.settings_container .sidebar .btn:active {
|
||||
outline: 2px solid var(--color-two);
|
||||
}
|
||||
|
||||
.settings_container .sidebar .btn:not(.active):hover {
|
||||
color: var(--color-two);
|
||||
}
|
||||
|
||||
.settings_container .sidebar .btn.active {
|
||||
background-color: var(--color-two);
|
||||
color: var(--background-color);
|
||||
}
|
||||
|
||||
.settings_container .main_container {
|
||||
width: 70%;
|
||||
border-left: 1.5px solid var(--color-three);
|
||||
padding-left: 3rem;
|
||||
border: none;
|
||||
}
|
||||
|
||||
.settings_container .tab {
|
||||
|
@ -482,6 +661,7 @@ footer {
|
|||
|
||||
.settings_container .tab.active {
|
||||
display: flex;
|
||||
gap: 1.2rem;
|
||||
flex-direction: column;
|
||||
justify-content: space-around;
|
||||
}
|
||||
|
@ -519,17 +699,26 @@ footer {
|
|||
text-transform: capitalize;
|
||||
}
|
||||
|
||||
.settings_container .tab .description {
|
||||
.settings_container .tab .description,
|
||||
.settings_container .tab .admin_warning {
|
||||
font-size: 1.5rem;
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.settings_container .tab .description {
|
||||
color: var(--foreground-color);
|
||||
}
|
||||
|
||||
.settings_container .tab .admin_warning {
|
||||
color: var(--color-two);
|
||||
}
|
||||
|
||||
.settings_container .user_interface select,
|
||||
.settings_container .general select {
|
||||
.settings_container .general select,
|
||||
.settings_container .general form input {
|
||||
margin: 0.7rem 0;
|
||||
width: 20rem;
|
||||
background-color: var(--background-color);
|
||||
background-color: var(--color-one);
|
||||
color: var(--foreground-color);
|
||||
padding: 1rem 2rem;
|
||||
border-radius: 0.5rem;
|
||||
|
@ -538,6 +727,38 @@ footer {
|
|||
text-transform: capitalize;
|
||||
}
|
||||
|
||||
.settings_container .general form input {
|
||||
padding: 0;
|
||||
width: 30rem;
|
||||
text-align: center;
|
||||
text-transform: none;
|
||||
}
|
||||
|
||||
.settings_container .general form input::file-selector-button {
|
||||
content: 'Browse';
|
||||
padding: 1rem 2rem;
|
||||
font-size: 1.5rem;
|
||||
background: var(--color-three);
|
||||
color: var(--background-color);
|
||||
border-radius: 0.5rem;
|
||||
border: 2px solid transparent;
|
||||
font-weight: bold;
|
||||
transition: all 0.1s ease-out;
|
||||
cursor: pointer;
|
||||
box-shadow: 5px 5px;
|
||||
outline: none;
|
||||
translate: -1rem 0;
|
||||
}
|
||||
|
||||
.settings_container .general form input::file-selector-button:active {
|
||||
box-shadow: none;
|
||||
translate: 5px 5px;
|
||||
}
|
||||
|
||||
.settings_container .general .export_btn {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.settings_container .user_interface option:hover,
|
||||
.settings_container .general option:hover {
|
||||
background-color: var(--color-one);
|
||||
|
@ -547,16 +768,19 @@ footer {
|
|||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
gap: 1rem;
|
||||
padding: 1rem 0;
|
||||
margin-bottom: 2rem;
|
||||
gap: 2rem;
|
||||
}
|
||||
|
||||
.settings_container .engines .toggle_btn {
|
||||
color: var(--foreground-color);
|
||||
font-size: 1.5rem;
|
||||
display: flex;
|
||||
gap: 0.5rem;
|
||||
align-items: center;
|
||||
border-radius: 100px;
|
||||
gap: 1.5rem;
|
||||
letter-spacing: 1px;
|
||||
}
|
||||
|
||||
.settings_container .engines hr {
|
||||
|
@ -564,10 +788,11 @@ footer {
|
|||
}
|
||||
|
||||
.settings_container .cookies input {
|
||||
margin: 1rem 0rem;
|
||||
margin: 1rem 0;
|
||||
}
|
||||
|
||||
/* Styles for the toggle button */
|
||||
|
||||
/* The switch - the box around the slider */
|
||||
.switch {
|
||||
position: relative;
|
||||
|
@ -587,25 +812,26 @@ footer {
|
|||
.slider {
|
||||
position: absolute;
|
||||
cursor: pointer;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background-color: var(--background-color);
|
||||
-webkit-transition: 0.4s;
|
||||
transition: 0.4s;
|
||||
inset: 0;
|
||||
background-color: var(--foreground-color);
|
||||
transition: 0.2s;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
}
|
||||
|
||||
.slider:before {
|
||||
.slider:active {
|
||||
outline: 2px solid var(--foreground-color);
|
||||
}
|
||||
|
||||
.slider::before {
|
||||
position: absolute;
|
||||
content: '';
|
||||
height: 2.6rem;
|
||||
width: 2.6rem;
|
||||
left: 0.4rem;
|
||||
bottom: 0.4rem;
|
||||
background-color: var(--foreground-color);
|
||||
-webkit-transition: 0.4s;
|
||||
transition: 0.4s;
|
||||
background-color: var(--background-color);
|
||||
transition: 0.2s;
|
||||
}
|
||||
|
||||
input:checked + .slider {
|
||||
|
@ -616,9 +842,7 @@ input:focus + .slider {
|
|||
box-shadow: 0 0 1px var(--color-three);
|
||||
}
|
||||
|
||||
input:checked + .slider:before {
|
||||
-webkit-transform: translateX(2.6rem);
|
||||
-ms-transform: translateX(2.6rem);
|
||||
input:checked + .slider::before {
|
||||
transform: translateX(2.6rem);
|
||||
}
|
||||
|
||||
|
@ -627,6 +851,50 @@ input:checked + .slider:before {
|
|||
border-radius: 3.4rem;
|
||||
}
|
||||
|
||||
.slider.round:before {
|
||||
.slider.round::before {
|
||||
border-radius: 50%;
|
||||
}
|
||||
|
||||
@media screen and (width <=1136px) {
|
||||
.hero-text-container {
|
||||
width: unset;
|
||||
}
|
||||
|
||||
.features {
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (width <=706px) {
|
||||
.about-container article .logo-container svg {
|
||||
width: clamp(200px, 290px, 815px);
|
||||
}
|
||||
|
||||
.about-container article .text-block .text-block-title {
|
||||
font-size: 33px;
|
||||
}
|
||||
|
||||
.hero-text {
|
||||
font-size: 22px;
|
||||
}
|
||||
|
||||
.about-container {
|
||||
width: unset;
|
||||
}
|
||||
|
||||
.feature-list-title {
|
||||
font-size: 33px;
|
||||
}
|
||||
|
||||
.features {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
|
||||
.feature-list {
|
||||
padding: 35px 0;
|
||||
}
|
||||
|
||||
.feature-card {
|
||||
border-radius: 0;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
{{>header this}}
|
||||
<main class="error_container">
|
||||
<img src="images/robot-404.svg" alt="Image of broken robot." />
|
||||
<div class="error_content">
|
||||
<h1>Aw! snap</h1>
|
||||
<h2>404 Page Not Found!</h2>
|
||||
<p>Go to <a href="/">search page</a></p>
|
||||
</div>
|
||||
</main>
|
||||
{{>footer}}
|
|
@ -1,29 +0,0 @@
|
|||
{{>header this}}
|
||||
<main class="about-container">
|
||||
<article >
|
||||
<div>
|
||||
<h1 >Websurfx</h1>
|
||||
<hr size="4" width="100%" color="#a6e3a1">
|
||||
</div>
|
||||
<p>A modern-looking, lightning-fast, privacy-respecting, secure meta search engine written in Rust. It provides a fast and secure search experience while respecting user privacy.<br> It aggregates results from multiple search engines and presents them in an unbiased manner, filtering out trackers and ads.
|
||||
</p>
|
||||
|
||||
<h2>Some of the Top Features:</h2>
|
||||
|
||||
<ul><strong>Lightning fast </strong>- Results load within milliseconds for an instant search experience.</ul>
|
||||
|
||||
<ul><strong>Secure search</strong> - All searches are performed over an encrypted connection to prevent snooping.</ul>
|
||||
|
||||
<ul><strong>Ad free results</strong> - All search results are ad free and clutter free for a clean search experience.</ul>
|
||||
|
||||
<ul><strong>Privacy focused</strong> - Websurface does not track, store or sell your search data. Your privacy is our priority.</ul>
|
||||
|
||||
<ul><strong>Free and Open source</strong> - The entire project's code is open source and available for free on <a href="https://github.com/neon-mmd/websurfx">GitHub</a> under an GNU Affero General Public License.</ul>
|
||||
|
||||
<ul><strong>Highly customizable</strong> - Websurface comes with 9 built-in color themes and supports creating custom themes effortlessly.</ul>
|
||||
</article>
|
||||
|
||||
<h3>Devoloped by: <a href="https://github.com/neon-mmd/websurfx">Websurfx team</a></h3>
|
||||
</main>
|
||||
{{>footer}}
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
<div class="search_bar">
|
||||
<input type="search" name="search-box" value="{{this.pageQuery}}" placeholder="Type to search" />
|
||||
<button type="submit" onclick="searchWeb()">search</button>
|
|
@ -1,12 +0,0 @@
|
|||
<div class="cookies tab">
|
||||
<h1>Cookies</h1>
|
||||
<p class="description">
|
||||
This is the cookies are saved on your system and it contains the preferences
|
||||
you chose in the settings page
|
||||
</p>
|
||||
<input type="text" name="cookie_field" value="" readonly />
|
||||
<p class="description">
|
||||
The cookies stored are not used by us for any malicious intend or for
|
||||
tracking you in any way.
|
||||
</p>
|
||||
</div>
|
|
@ -1,32 +0,0 @@
|
|||
<div class="engines tab">
|
||||
<h1>Engines</h1>
|
||||
<h3>select search engines</h3>
|
||||
<p class="description">
|
||||
Select the search engines from the list of engines that you want results
|
||||
from
|
||||
</p>
|
||||
<div class="engine_selection">
|
||||
<div class="toggle_btn">
|
||||
<label class="switch">
|
||||
<input type="checkbox" class="select_all" onchange="toggleAllSelection()" />
|
||||
<span class="slider round"></span>
|
||||
</label>
|
||||
Select All
|
||||
</div>
|
||||
<hr />
|
||||
<div class="toggle_btn">
|
||||
<label class="switch">
|
||||
<input type="checkbox" class="engine" />
|
||||
<span class="slider round"></span>
|
||||
</label>
|
||||
DuckDuckGo
|
||||
</div>
|
||||
<div class="toggle_btn">
|
||||
<label class="switch">
|
||||
<input type="checkbox" class="engine" />
|
||||
<span class="slider round"></span>
|
||||
</label>
|
||||
Searx
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
|
@ -1,16 +0,0 @@
|
|||
<footer>
|
||||
<div>
|
||||
<span>Powered By <b>Websurfx</b></span><span>-</span><span>a lightening fast, privacy respecting, secure meta
|
||||
search engine</span>
|
||||
</div>
|
||||
<div>
|
||||
<ul>
|
||||
<li><a href="https://github.com/neon-mmd/websurfx">Source Code</a></li>
|
||||
<li><a href="https://github.com/neon-mmd/websurfx/issues">Issues/Bugs</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</footer>
|
||||
<script src="static/settings.js"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
|
@ -1,13 +0,0 @@
|
|||
<div class="general tab active">
|
||||
<h1>General</h1>
|
||||
<h3>Select a safe search level</h3>
|
||||
<p class="description">
|
||||
Select a safe search level from the menu below to filter content based on
|
||||
the level.
|
||||
</p>
|
||||
<select name="safe_search_levels">
|
||||
<option value=0>None</option>
|
||||
<option value=1>Low</option>
|
||||
<option value=2>Moderate</option>
|
||||
</select>
|
||||
</div>
|
|
@ -1,16 +0,0 @@
|
|||
<!doctype html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<title>Websurfx</title>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<link href="static/colorschemes/{{colorscheme}}.css" rel="stylesheet" type="text/css" />
|
||||
<link href="static/themes/{{theme}}.css" rel="stylesheet" type="text/css" />
|
||||
</head>
|
||||
|
||||
<body onload="getClientSettings()">
|
||||
<header>
|
||||
<h1><a href="/">Websurfx</a></h1>
|
||||
{{>navbar}}
|
||||
</header>
|
|
@ -1,8 +0,0 @@
|
|||
{{>header this}}
|
||||
<main class="search-container">
|
||||
<img src="../images/websurfx_logo.png" alt="Websurfx meta-search engine logo" />
|
||||
{{>bar}}
|
||||
</div>
|
||||
</main>
|
||||
<script src="static/index.js"></script>
|
||||
{{>footer}}
|
|
@ -1,6 +0,0 @@
|
|||
<nav>
|
||||
<ul>
|
||||
<li><a href="about">about</a></li>
|
||||
<li><a href="settings">settings</a></li>
|
||||
</ul>
|
||||
</nav>
|
|
@ -1,86 +0,0 @@
|
|||
{{>header this.style}}
|
||||
<main class="results">
|
||||
{{>search_bar this}}
|
||||
<div class="results_aggregated">
|
||||
{{#if results}} {{#each results}}
|
||||
<div class="result">
|
||||
<h1><a href="{{{this.url}}}">{{{this.title}}}</a></h1>
|
||||
<small>{{{this.url}}}</small>
|
||||
<p>{{{this.description}}}</p>
|
||||
<div class="upstream_engines">
|
||||
{{#each engine}}
|
||||
<span>{{{this}}}</span>
|
||||
{{/each}}
|
||||
</div>
|
||||
</div>
|
||||
{{/each}} {{else}} {{#if disallowed}}
|
||||
<div class="result_disallowed">
|
||||
<div class="description">
|
||||
<p>
|
||||
Your search - <span class="user_query">{{{this.pageQuery}}}</span> -
|
||||
has been disallowed.
|
||||
</p>
|
||||
<p class="description_paragraph">Dear user,</p>
|
||||
<p class="description_paragraph">
|
||||
The query - <span class="user_query">{{{this.pageQuery}}}</span> - has
|
||||
been blacklisted via server configuration and hence disallowed by the
|
||||
server. Henceforth no results could be displayed for your query.
|
||||
</p>
|
||||
</div>
|
||||
<img src="./images/barricade.png" alt="Image of a Barricade" />
|
||||
</div>
|
||||
{{else}} {{#if filtered}}
|
||||
<div class="result_filtered">
|
||||
<div class="description">
|
||||
<p>
|
||||
Your search - <span class="user_query">{{{this.pageQuery}}}</span> -
|
||||
has been filtered.
|
||||
</p>
|
||||
<p class="description_paragraph">Dear user,</p>
|
||||
<p class="description_paragraph">
|
||||
All the search results contain results that has been configured to be
|
||||
filtered out via server configuration and henceforth has been
|
||||
completely filtered out.
|
||||
</p>
|
||||
</div>
|
||||
<img src="./images/filter.png" alt="Image of a paper inside a funnel" />
|
||||
</div>
|
||||
{{else}} {{#if noEnginesSelected}}
|
||||
<div class="result_engine_not_selected">
|
||||
<div class="description">
|
||||
<p>
|
||||
No results could be fetched for your search "<span class="user_query">{{{this.pageQuery}}}</span>" .
|
||||
</p>
|
||||
<p class="description_paragraph">Dear user,</p>
|
||||
<p class="description_paragraph">
|
||||
No results could be retrieved from the upstream search engines as no
|
||||
upstream search engines were selected from the settings page.
|
||||
</p>
|
||||
</div>
|
||||
<img src="./images/no_selection.png" alt="Image of a white cross inside a red circle" />
|
||||
</div>
|
||||
{{else}}
|
||||
<div class="result_not_found">
|
||||
<p>Your search - {{{this.pageQuery}}} - did not match any documents.</p>
|
||||
<p class="suggestions">Suggestions:</p>
|
||||
<ul>
|
||||
<li>Make sure that all words are spelled correctly.</li>
|
||||
<li>Try different keywords.</li>
|
||||
<li>Try more general keywords.</li>
|
||||
</ul>
|
||||
<img src="./images/no_results.gif" alt="Man fishing gif" />
|
||||
</div>
|
||||
{{/if}} {{/if}} {{/if}} {{/if}}
|
||||
</div>
|
||||
<div class="page_navigation">
|
||||
<button type="button" onclick="navigate_backward()">
|
||||
← previous
|
||||
</button>
|
||||
<button type="button" onclick="navigate_forward()">next →</button>
|
||||
</div>
|
||||
</main>
|
||||
<script src="static/index.js"></script>
|
||||
<script src="static/search_area_options.js"></script>
|
||||
<script src="static/pagination.js"></script>
|
||||
<script src="static/error_box.js"></script>
|
||||
{{>footer}}
|
|
@ -1,36 +0,0 @@
|
|||
<div class="search_area">
|
||||
{{>bar this}}
|
||||
<div class="error_box">
|
||||
{{#if engineErrorsInfo}}
|
||||
<button onclick="toggleErrorBox()" class="error_box_toggle_button">
|
||||
<img src="./images/warning.svg" alt="Info icon for error box" />
|
||||
</button>
|
||||
<div class="dropdown_error_box">
|
||||
{{#each engineErrorsInfo}}
|
||||
<div class="error_item">
|
||||
<span class="engine_name">{{{this.engine}}}</span>
|
||||
<span class="engine_name">{{{this.error}}}</span>
|
||||
<span class="severity_color" style="background: {{{this.severity_color}}};"></span>
|
||||
</div>
|
||||
{{/each}}
|
||||
</div>
|
||||
{{else}}
|
||||
<button onclick="toggleErrorBox()" class="error_box_toggle_button">
|
||||
<img src="./images/info.svg" alt="Warning icon for error box" />
|
||||
</button>
|
||||
<div class="dropdown_error_box">
|
||||
<div class="no_errors">
|
||||
Everything looks good 🙂!!
|
||||
</div>
|
||||
</div>
|
||||
{{/if}}
|
||||
</div>
|
||||
</div>
|
||||
<div class="search_options">
|
||||
<select name="safe_search_levels" {{#if (gte safeSearchLevel 3)}} disabled {{/if}}>
|
||||
<option value=0 {{#if (eq safeSearchLevel 0)}} selected {{/if}}>SafeSearch: None</option>
|
||||
<option value=1 {{#if (eq safeSearchLevel 1)}} selected {{/if}}>SafeSearch: Low</option>
|
||||
<option value=2 {{#if (eq safeSearchLevel 2)}} selected {{/if}}>SafeSearch: Moderate</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
|
@ -1,22 +0,0 @@
|
|||
{{>header this}}
|
||||
<main class="settings" >
|
||||
<h1>Settings</h1>
|
||||
<hr />
|
||||
<div class="settings_container">
|
||||
<div class="sidebar">
|
||||
<div class="btn active" onclick="setActiveTab(this)">general</div>
|
||||
<div class="btn" onclick="setActiveTab(this)">user interface</div>
|
||||
<div class="btn" onclick="setActiveTab(this)">engines</div>
|
||||
<div class="btn" onclick="setActiveTab(this)">cookies</div>
|
||||
</div>
|
||||
<div class="main_container">
|
||||
{{> general_tab}} {{> user_interface_tab}} {{> engines_tab}} {{>
|
||||
cookies_tab}}
|
||||
<p class="message"></p>
|
||||
<button type="submit" onclick="setClientSettings()">Save</button>
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
<script src="static/settings.js"></script>
|
||||
<script src="static/cookies.js"></script>
|
||||
{{>footer}}
|
|
@ -1,28 +0,0 @@
|
|||
<div class="user_interface tab">
|
||||
<h1>User Interface</h1>
|
||||
<h3>select theme</h3>
|
||||
<p class="description">
|
||||
Select the theme from the available themes to be used in user interface
|
||||
</p>
|
||||
<select name="themes">
|
||||
<option value="simple">simple</option>
|
||||
</select>
|
||||
<h3>select color scheme</h3>
|
||||
<p class="description">
|
||||
Select the color scheme for your theme to be used in user interface
|
||||
</p>
|
||||
<select name="colorschemes">
|
||||
<option value="catppuccin-mocha">catppuccin mocha</option>
|
||||
<option value="dark-chocolate">dark chocolate</option>
|
||||
<option value="dracula">dracula</option>
|
||||
<option value="gruvbox-dark">gruvbox dark</option>
|
||||
<option value="monokai">monokai</option>
|
||||
<option value="nord">nord</option>
|
||||
<option value="oceanic-next">oceanic next</option>
|
||||
<option value="one-dark">one dark</option>
|
||||
<option value="solarized-dark">solarized dark</option>
|
||||
<option value="solarized-light">solarized light</option>
|
||||
<option value="tokyo-night">tokyo night</option>
|
||||
<option value="tomorrow-night">tomorrow night</option>
|
||||
</select>
|
||||
</div>
|
|
@ -2,10 +2,11 @@
|
|||
//!
|
||||
//! This module contains the main function which handles the logging of the application to the
|
||||
//! stdout and handles the command line arguments provided and launches the `websurfx` server.
|
||||
|
||||
#[cfg(not(feature = "dhat-heap"))]
|
||||
use mimalloc::MiMalloc;
|
||||
use std::net::TcpListener;
|
||||
use websurfx::{cache::cacher::Cache, config::parser::Config, run};
|
||||
|
||||
use std::{net::TcpListener, sync::OnceLock};
|
||||
use websurfx::{cache::cacher::create_cache, config::parser::Config, run};
|
||||
|
||||
/// A dhat heap memory profiler
|
||||
#[cfg(feature = "dhat-heap")]
|
||||
|
@ -16,6 +17,9 @@ static ALLOC: dhat::Alloc = dhat::Alloc;
|
|||
#[global_allocator]
|
||||
static GLOBAL: MiMalloc = MiMalloc;
|
||||
|
||||
/// A static constant for holding the parsed config.
|
||||
static CONFIG: OnceLock<Config> = OnceLock::new();
|
||||
|
||||
/// The function that launches the main server and registers all the routes of the website.
|
||||
///
|
||||
/// # Error
|
||||
|
@ -28,10 +32,10 @@ async fn main() -> std::io::Result<()> {
|
|||
#[cfg(feature = "dhat-heap")]
|
||||
let _profiler = dhat::Profiler::new_heap();
|
||||
|
||||
// Initialize the parsed config file.
|
||||
let config = Config::parse(false).unwrap();
|
||||
// Initialize the parsed config globally.
|
||||
let config = CONFIG.get_or_init(|| Config::parse(false).unwrap());
|
||||
|
||||
let cache = Cache::build(&config).await;
|
||||
let cache = create_cache(config).await;
|
||||
|
||||
log::info!(
|
||||
"started server on port {} and IP {}",
|
||||
|
@ -44,7 +48,7 @@ async fn main() -> std::io::Result<()> {
|
|||
config.port,
|
||||
);
|
||||
|
||||
let listener = TcpListener::bind((config.binding_ip.clone(), config.port))?;
|
||||
let listener = TcpListener::bind((config.binding_ip.as_str(), config.port))?;
|
||||
|
||||
run(listener, config, cache)?.await
|
||||
}
|
||||
|
|
658
src/cache/cacher.rs
vendored
|
@ -2,8 +2,10 @@
|
|||
//! from the upstream search engines in a json format.
|
||||
|
||||
use error_stack::Report;
|
||||
use futures::future::join_all;
|
||||
#[cfg(feature = "memory-cache")]
|
||||
use mini_moka::sync::Cache as MokaCache;
|
||||
use moka::future::Cache as MokaCache;
|
||||
|
||||
#[cfg(feature = "memory-cache")]
|
||||
use std::time::Duration;
|
||||
use tokio::sync::Mutex;
|
||||
|
@ -14,24 +16,13 @@ use super::error::CacheError;
|
|||
#[cfg(feature = "redis-cache")]
|
||||
use super::redis_cacher::RedisCache;
|
||||
|
||||
/// Different implementations for caching, currently it is possible to cache in-memory or in Redis.
|
||||
#[derive(Clone)]
|
||||
pub enum Cache {
|
||||
/// Caching is disabled
|
||||
Disabled,
|
||||
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||
/// Encapsulates the Redis based cache
|
||||
Redis(RedisCache),
|
||||
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||
/// Contains the in-memory cache.
|
||||
InMemory(MokaCache<String, SearchResults>),
|
||||
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
||||
/// Contains both the in-memory cache and Redis based cache
|
||||
Hybrid(RedisCache, MokaCache<String, SearchResults>),
|
||||
}
|
||||
#[cfg(any(feature = "encrypt-cache-results", feature = "cec-cache-results"))]
|
||||
use super::encryption::*;
|
||||
|
||||
impl Cache {
|
||||
/// A function that builds the cache from the given configuration.
|
||||
/// Abstraction trait for common methods provided by a cache backend.
|
||||
#[async_trait::async_trait]
|
||||
pub trait Cacher: Send + Sync {
|
||||
// A function that builds the cache from the given configuration.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
|
@ -39,89 +30,10 @@ impl Cache {
|
|||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// It returns a newly initialized variant based on the feature enabled by the user.
|
||||
pub async fn build(_config: &Config) -> Self {
|
||||
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
||||
{
|
||||
log::info!("Using a hybrid cache");
|
||||
Cache::new_hybrid(
|
||||
RedisCache::new(&_config.redis_url, 5)
|
||||
.await
|
||||
.expect("Redis cache configured"),
|
||||
)
|
||||
}
|
||||
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||
{
|
||||
log::info!("Listening redis server on {}", &_config.redis_url);
|
||||
Cache::new(
|
||||
RedisCache::new(&_config.redis_url, 5)
|
||||
.await
|
||||
.expect("Redis cache configured"),
|
||||
)
|
||||
}
|
||||
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||
{
|
||||
log::info!("Using an in-memory cache");
|
||||
Cache::new_in_memory()
|
||||
}
|
||||
#[cfg(not(any(feature = "memory-cache", feature = "redis-cache")))]
|
||||
{
|
||||
log::info!("Caching is disabled");
|
||||
Cache::Disabled
|
||||
}
|
||||
}
|
||||
|
||||
/// A function that initializes a new connection pool struct.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `redis_cache` - It takes the newly initialized connection pool struct as an argument.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// It returns a `Redis` variant with the newly initialized connection pool struct.
|
||||
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||
pub fn new(redis_cache: RedisCache) -> Self {
|
||||
Cache::Redis(redis_cache)
|
||||
}
|
||||
|
||||
/// A function that initializes the `in memory` cache which is used to cache the results in
|
||||
/// memory with the search engine thus improving performance by making retrieval and caching of
|
||||
/// results faster.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// It returns a `InMemory` variant with the newly initialized in memory cache type.
|
||||
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||
pub fn new_in_memory() -> Self {
|
||||
let cache = MokaCache::builder()
|
||||
.max_capacity(1000)
|
||||
.time_to_live(Duration::from_secs(60))
|
||||
.build();
|
||||
Cache::InMemory(cache)
|
||||
}
|
||||
|
||||
/// A function that initializes both in memory cache and redis client connection for being used
|
||||
/// for managing hybrid cache which increases resiliancy of the search engine by allowing the
|
||||
/// cache to switch to `in memory` caching if the `redis` cache server is temporarily
|
||||
/// unavailable.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `redis_cache` - It takes `redis` client connection struct as an argument.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// It returns a tuple variant `Hybrid` storing both the in-memory cache type and the `redis`
|
||||
/// client connection struct.
|
||||
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
||||
pub fn new_hybrid(redis_cache: RedisCache) -> Self {
|
||||
let cache = MokaCache::builder()
|
||||
.max_capacity(1000)
|
||||
.time_to_live(Duration::from_secs(60))
|
||||
.build();
|
||||
Cache::Hybrid(redis_cache, cache)
|
||||
}
|
||||
/// It returns a newly initialized backend based on the feature enabled by the user.
|
||||
async fn build(config: &Config) -> Self
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
/// A function which fetches the cached json results as json string.
|
||||
///
|
||||
|
@ -133,31 +45,7 @@ impl Cache {
|
|||
///
|
||||
/// Returns the `SearchResults` from the cache if the program executes normally otherwise
|
||||
/// returns a `CacheError` if the results cannot be retrieved from the cache.
|
||||
pub async fn cached_json(&mut self, _url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||
match self {
|
||||
Cache::Disabled => Err(Report::new(CacheError::MissingValue)),
|
||||
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||
Cache::Redis(redis_cache) => {
|
||||
let json = redis_cache.cached_json(_url).await?;
|
||||
Ok(serde_json::from_str::<SearchResults>(&json)
|
||||
.map_err(|_| CacheError::SerializationError)?)
|
||||
}
|
||||
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||
Cache::InMemory(in_memory) => match in_memory.get(&_url.to_string()) {
|
||||
Some(res) => Ok(res),
|
||||
None => Err(Report::new(CacheError::MissingValue)),
|
||||
},
|
||||
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
||||
Cache::Hybrid(redis_cache, in_memory) => match redis_cache.cached_json(_url).await {
|
||||
Ok(res) => Ok(serde_json::from_str::<SearchResults>(&res)
|
||||
.map_err(|_| CacheError::SerializationError)?),
|
||||
Err(_) => match in_memory.get(&_url.to_string()) {
|
||||
Some(res) => Ok(res),
|
||||
None => Err(Report::new(CacheError::MissingValue)),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
async fn cached_results(&mut self, url: &str) -> Result<SearchResults, Report<CacheError>>;
|
||||
|
||||
/// A function which caches the results by using the `url` as the key and
|
||||
/// `json results` as the value and stores it in the cache
|
||||
|
@ -172,44 +60,469 @@ impl Cache {
|
|||
/// Returns a unit type if the program caches the given search results without a failure
|
||||
/// otherwise it returns a `CacheError` if the search results cannot be cached due to a
|
||||
/// failure.
|
||||
pub async fn cache_results(
|
||||
async fn cache_results(
|
||||
&mut self,
|
||||
_search_results: &SearchResults,
|
||||
_url: &str,
|
||||
) -> Result<(), Report<CacheError>> {
|
||||
match self {
|
||||
Cache::Disabled => Ok(()),
|
||||
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||
Cache::Redis(redis_cache) => {
|
||||
let json = serde_json::to_string(_search_results)
|
||||
.map_err(|_| CacheError::SerializationError)?;
|
||||
redis_cache.cache_results(&json, _url).await
|
||||
search_results: &[SearchResults],
|
||||
urls: &[String],
|
||||
) -> Result<(), Report<CacheError>>;
|
||||
|
||||
/// A helper function which computes the hash of the url and formats and returns it as string.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `url` - It takes an url as string.
|
||||
fn hash_url(&self, url: &str) -> String {
|
||||
blake3::hash(url.as_bytes()).to_string()
|
||||
}
|
||||
|
||||
/// A helper function that returns either encrypted or decrypted results.
|
||||
/// Feature flags (**encrypt-cache-results or cec-cache-results**) are required for this to work.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `bytes` - It takes a slice of bytes as an argument.
|
||||
/// * `encrypt` - A boolean to choose whether to encrypt or decrypt the bytes
|
||||
|
||||
///
|
||||
/// # Error
|
||||
/// Returns either encrypted or decrypted bytes on success otherwise it returns a CacheError
|
||||
/// on failure.
|
||||
#[cfg(any(
|
||||
// feature = "compress-cache-results",
|
||||
feature = "encrypt-cache-results",
|
||||
feature = "cec-cache-results"
|
||||
))]
|
||||
async fn encrypt_or_decrypt_results(
|
||||
&mut self,
|
||||
mut bytes: Vec<u8>,
|
||||
encrypt: bool,
|
||||
) -> Result<Vec<u8>, Report<CacheError>> {
|
||||
use chacha20poly1305::{
|
||||
aead::{Aead, AeadCore, KeyInit, OsRng},
|
||||
ChaCha20Poly1305,
|
||||
};
|
||||
|
||||
let cipher = CIPHER.get_or_init(|| {
|
||||
let key = ChaCha20Poly1305::generate_key(&mut OsRng);
|
||||
ChaCha20Poly1305::new(&key)
|
||||
});
|
||||
|
||||
let encryption_key = ENCRYPTION_KEY.get_or_init(
|
||||
|| ChaCha20Poly1305::generate_nonce(&mut OsRng), // 96-bits; unique per message
|
||||
);
|
||||
|
||||
bytes = if encrypt {
|
||||
cipher
|
||||
.encrypt(encryption_key, bytes.as_ref())
|
||||
.map_err(|_| CacheError::EncryptionError)?
|
||||
} else {
|
||||
cipher
|
||||
.decrypt(encryption_key, bytes.as_ref())
|
||||
.map_err(|_| CacheError::EncryptionError)?
|
||||
};
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
/// A helper function that returns compressed results.
|
||||
/// Feature flags (**compress-cache-results or cec-cache-results**) are required for this to work.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `bytes` - It takes a slice of bytes as an argument.
|
||||
|
||||
///
|
||||
/// # Error
|
||||
/// Returns the compressed bytes on success otherwise it returns a CacheError
|
||||
/// on failure.
|
||||
#[cfg(any(feature = "compress-cache-results", feature = "cec-cache-results"))]
|
||||
async fn compress_results(
|
||||
&mut self,
|
||||
mut bytes: Vec<u8>,
|
||||
) -> Result<Vec<u8>, Report<CacheError>> {
|
||||
use tokio::io::AsyncWriteExt;
|
||||
let mut writer = async_compression::tokio::write::BrotliEncoder::new(Vec::new());
|
||||
writer
|
||||
.write_all(&bytes)
|
||||
.await
|
||||
.map_err(|_| CacheError::CompressionError)?;
|
||||
writer
|
||||
.shutdown()
|
||||
.await
|
||||
.map_err(|_| CacheError::CompressionError)?;
|
||||
bytes = writer.into_inner();
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
/// A helper function that returns compressed-encrypted results.
|
||||
/// Feature flag (**cec-cache-results**) is required for this to work.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `bytes` - It takes a slice of bytes as an argument.
|
||||
|
||||
///
|
||||
/// # Error
|
||||
/// Returns the compressed and encrypted bytes on success otherwise it returns a CacheError
|
||||
/// on failure.
|
||||
#[cfg(feature = "cec-cache-results")]
|
||||
async fn compress_encrypt_compress_results(
|
||||
&mut self,
|
||||
mut bytes: Vec<u8>,
|
||||
) -> Result<Vec<u8>, Report<CacheError>> {
|
||||
// compress first
|
||||
bytes = self.compress_results(bytes).await?;
|
||||
// encrypt
|
||||
bytes = self.encrypt_or_decrypt_results(bytes, true).await?;
|
||||
|
||||
// compress again;
|
||||
bytes = self.compress_results(bytes).await?;
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
/// A helper function that returns compressed results.
|
||||
/// Feature flags (**compress-cache-results or cec-cache-results**) are required for this to work.
|
||||
/// If bytes where
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `bytes` - It takes a slice of bytes as an argument.
|
||||
|
||||
///
|
||||
/// # Error
|
||||
/// Returns the uncompressed bytes on success otherwise it returns a CacheError
|
||||
/// on failure.
|
||||
|
||||
#[cfg(any(feature = "compress-cache-results", feature = "cec-cache-results"))]
|
||||
async fn decompress_results(&mut self, bytes: &[u8]) -> Result<Vec<u8>, Report<CacheError>> {
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(feature = "compress-cache-results")]
|
||||
{
|
||||
decompress_util(bytes).await
|
||||
|
||||
}
|
||||
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||
Cache::InMemory(cache) => {
|
||||
cache.insert(_url.to_string(), _search_results.clone());
|
||||
Ok(())
|
||||
}
|
||||
#[cfg(all(feature = "memory-cache", feature = "redis-cache"))]
|
||||
Cache::Hybrid(redis_cache, cache) => {
|
||||
let json = serde_json::to_string(_search_results)
|
||||
.map_err(|_| CacheError::SerializationError)?;
|
||||
match redis_cache.cache_results(&json, _url).await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(_) => {
|
||||
cache.insert(_url.to_string(), _search_results.clone());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
else if #[cfg(feature = "cec-cache-results")]
|
||||
{
|
||||
let decompressed = decompress_util(bytes)?;
|
||||
let decrypted = self.encrypt_or_decrypt_results(decompressed, false)?;
|
||||
|
||||
decompress_util(&decrypted).await
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A helper function that compresses or encrypts search results before they're inserted into a cache store
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `search_results` - A reference to the search_Results to process.
|
||||
///
|
||||
///
|
||||
/// # Error
|
||||
/// Returns a Vec of compressed or encrypted bytes on success otherwise it returns a CacheError
|
||||
/// on failure.
|
||||
async fn pre_process_search_results(
|
||||
&mut self,
|
||||
search_results: &SearchResults,
|
||||
) -> Result<Vec<u8>, Report<CacheError>> {
|
||||
#[allow(unused_mut)] // needs to be mutable when any of the features is enabled
|
||||
let mut bytes: Vec<u8> = search_results.try_into()?;
|
||||
#[cfg(feature = "compress-cache-results")]
|
||||
{
|
||||
let compressed = self.compress_results(bytes).await?;
|
||||
bytes = compressed;
|
||||
}
|
||||
|
||||
#[cfg(feature = "encrypt-cache-results")]
|
||||
{
|
||||
let encrypted = self.encrypt_or_decrypt_results(bytes, true).await?;
|
||||
bytes = encrypted;
|
||||
}
|
||||
|
||||
#[cfg(feature = "cec-cache-results")]
|
||||
{
|
||||
let compressed_encrypted_compressed =
|
||||
self.compress_encrypt_compress_results(bytes).await?;
|
||||
bytes = compressed_encrypted_compressed;
|
||||
}
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
/// A helper function that decompresses or decrypts search results after they're fetched from the cache-store
|
||||
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `bytes` - A Vec of bytes stores in the cache.
|
||||
///
|
||||
|
||||
///
|
||||
/// # Error
|
||||
/// Returns the SearchResults struct on success otherwise it returns a CacheError
|
||||
/// on failure.
|
||||
|
||||
#[allow(unused_mut)] // needs to be mutable when any of the features is enabled
|
||||
async fn post_process_search_results(
|
||||
&mut self,
|
||||
mut bytes: Vec<u8>,
|
||||
) -> Result<SearchResults, Report<CacheError>> {
|
||||
#[cfg(feature = "compress-cache-results")]
|
||||
{
|
||||
let decompressed = self.decompress_results(&bytes).await?;
|
||||
bytes = decompressed
|
||||
}
|
||||
|
||||
#[cfg(feature = "encrypt-cache-results")]
|
||||
{
|
||||
let decrypted = self.encrypt_or_decrypt_results(bytes, false).await?;
|
||||
bytes = decrypted
|
||||
}
|
||||
|
||||
#[cfg(feature = "cec-cache-results")]
|
||||
{
|
||||
let decompressed_decrypted = self.decompress_results(&bytes).await?;
|
||||
bytes = decompressed_decrypted;
|
||||
}
|
||||
|
||||
Ok(bytes.try_into()?)
|
||||
}
|
||||
}
|
||||
|
||||
/// A helper function that returns compressed results.
|
||||
/// Feature flags (**compress-cache-results or cec-cache-results**) are required for this to work.
|
||||
/// If bytes where
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `bytes` - It takes a slice of bytes as an argument.
|
||||
|
||||
///
|
||||
/// # Error
|
||||
/// Returns the uncompressed bytes on success otherwise it returns a CacheError
|
||||
/// on failure.
|
||||
|
||||
#[cfg(any(feature = "compress-cache-results", feature = "cec-cache-results"))]
|
||||
async fn decompress_util(input: &[u8]) -> Result<Vec<u8>, Report<CacheError>> {
|
||||
use tokio::io::AsyncWriteExt;
|
||||
let mut writer = async_compression::tokio::write::BrotliDecoder::new(Vec::new());
|
||||
|
||||
writer
|
||||
.write_all(input)
|
||||
.await
|
||||
.map_err(|_| CacheError::CompressionError)?;
|
||||
writer
|
||||
.shutdown()
|
||||
.await
|
||||
.map_err(|_| CacheError::CompressionError)?;
|
||||
let bytes = writer.into_inner();
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
#[cfg(feature = "redis-cache")]
|
||||
#[async_trait::async_trait]
|
||||
impl Cacher for RedisCache {
|
||||
async fn build(config: &Config) -> Self {
|
||||
log::info!(
|
||||
"Initialising redis cache. Listening to {}",
|
||||
&config.redis_url
|
||||
);
|
||||
RedisCache::new(&config.redis_url, 5, config.cache_expiry_time)
|
||||
.await
|
||||
.expect("Redis cache configured")
|
||||
}
|
||||
|
||||
async fn cached_results(&mut self, url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||
use base64::Engine;
|
||||
let hashed_url_string: &str = &self.hash_url(url);
|
||||
let base64_string = self.cached_json(hashed_url_string).await?;
|
||||
|
||||
let bytes = base64::engine::general_purpose::STANDARD_NO_PAD
|
||||
.decode(base64_string)
|
||||
.map_err(|_| CacheError::Base64DecodingOrEncodingError)?;
|
||||
self.post_process_search_results(bytes).await
|
||||
}
|
||||
|
||||
async fn cache_results(
|
||||
&mut self,
|
||||
search_results: &[SearchResults],
|
||||
urls: &[String],
|
||||
) -> Result<(), Report<CacheError>> {
|
||||
use base64::Engine;
|
||||
|
||||
// size of search_results is expected to be equal to size of urls -> key/value pairs for cache;
|
||||
let search_results_len = search_results.len();
|
||||
|
||||
let mut bytes = Vec::with_capacity(search_results_len);
|
||||
|
||||
for result in search_results {
|
||||
let processed = self.pre_process_search_results(result).await?;
|
||||
bytes.push(processed);
|
||||
}
|
||||
|
||||
let base64_strings = bytes
|
||||
.iter()
|
||||
.map(|bytes_vec| base64::engine::general_purpose::STANDARD_NO_PAD.encode(bytes_vec));
|
||||
|
||||
let mut hashed_url_strings = Vec::with_capacity(search_results_len);
|
||||
|
||||
for url in urls {
|
||||
let hash = self.hash_url(url);
|
||||
hashed_url_strings.push(hash);
|
||||
}
|
||||
self.cache_json(base64_strings, hashed_url_strings.into_iter())
|
||||
.await
|
||||
}
|
||||
}
|
||||
/// TryInto implementation for SearchResults from Vec<u8>
|
||||
use std::{convert::TryInto, sync::Arc};
|
||||
|
||||
impl TryInto<SearchResults> for Vec<u8> {
|
||||
type Error = CacheError;
|
||||
|
||||
fn try_into(self) -> Result<SearchResults, Self::Error> {
|
||||
bincode::deserialize_from(self.as_slice()).map_err(|_| CacheError::SerializationError)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryInto<Vec<u8>> for &SearchResults {
|
||||
type Error = CacheError;
|
||||
|
||||
fn try_into(self) -> Result<Vec<u8>, Self::Error> {
|
||||
bincode::serialize(self).map_err(|_| CacheError::SerializationError)
|
||||
}
|
||||
}
|
||||
|
||||
/// Memory based cache backend.
|
||||
#[cfg(feature = "memory-cache")]
|
||||
pub struct InMemoryCache {
|
||||
/// The backend cache which stores data.
|
||||
cache: Arc<MokaCache<String, Vec<u8>>>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "memory-cache")]
|
||||
impl Clone for InMemoryCache {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
cache: self.cache.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "memory-cache")]
|
||||
#[async_trait::async_trait]
|
||||
impl Cacher for InMemoryCache {
|
||||
async fn build(config: &Config) -> Self {
|
||||
log::info!("Initialising in-memory cache");
|
||||
|
||||
InMemoryCache {
|
||||
cache: Arc::new(
|
||||
MokaCache::builder()
|
||||
.time_to_live(Duration::from_secs(config.cache_expiry_time.into()))
|
||||
.build(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
async fn cached_results(&mut self, url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||
let hashed_url_string = self.hash_url(url);
|
||||
match self.cache.get(&hashed_url_string).await {
|
||||
Some(res) => self.post_process_search_results(res).await,
|
||||
None => Err(Report::new(CacheError::MissingValue)),
|
||||
}
|
||||
}
|
||||
|
||||
async fn cache_results(
|
||||
&mut self,
|
||||
search_results: &[SearchResults],
|
||||
urls: &[String],
|
||||
) -> Result<(), Report<CacheError>> {
|
||||
let mut tasks: Vec<_> = Vec::with_capacity(urls.len());
|
||||
for (url, search_result) in urls.iter().zip(search_results.iter()) {
|
||||
let hashed_url_string = self.hash_url(url);
|
||||
let bytes = self.pre_process_search_results(search_result).await?;
|
||||
let new_self = self.clone();
|
||||
tasks.push(tokio::spawn(async move {
|
||||
new_self.cache.insert(hashed_url_string, bytes).await
|
||||
}));
|
||||
}
|
||||
|
||||
join_all(tasks).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Cache backend which utilises both memory and redis based caches.
|
||||
///
|
||||
/// The hybrid cache system uses both the types of cache to ensure maximum availability.
|
||||
/// The set method sets the key, value pair in both the caches. Therefore in a case where redis
|
||||
/// cache becomes unavailable, the backend will retreive the value from in-memory cache.
|
||||
#[cfg(all(feature = "memory-cache", feature = "redis-cache"))]
|
||||
pub struct HybridCache {
|
||||
/// The in-memory backend cache which stores data.
|
||||
memory_cache: InMemoryCache,
|
||||
/// The redis backend cache which stores data.
|
||||
redis_cache: RedisCache,
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "memory-cache", feature = "redis-cache"))]
|
||||
#[async_trait::async_trait]
|
||||
impl Cacher for HybridCache {
|
||||
async fn build(config: &Config) -> Self {
|
||||
log::info!("Initialising hybrid cache");
|
||||
HybridCache {
|
||||
memory_cache: InMemoryCache::build(config).await,
|
||||
redis_cache: RedisCache::build(config).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn cached_results(&mut self, url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||
match self.redis_cache.cached_results(url).await {
|
||||
Ok(res) => Ok(res),
|
||||
Err(_) => self.memory_cache.cached_results(url).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn cache_results(
|
||||
&mut self,
|
||||
search_results: &[SearchResults],
|
||||
urls: &[String],
|
||||
) -> Result<(), Report<CacheError>> {
|
||||
self.redis_cache.cache_results(search_results, urls).await?;
|
||||
self.memory_cache
|
||||
.cache_results(search_results, urls)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Dummy cache backend
|
||||
pub struct DisabledCache;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Cacher for DisabledCache {
|
||||
async fn build(_config: &Config) -> Self {
|
||||
log::info!("Caching is disabled");
|
||||
DisabledCache
|
||||
}
|
||||
|
||||
async fn cached_results(&mut self, _url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||
Err(Report::new(CacheError::MissingValue))
|
||||
}
|
||||
|
||||
async fn cache_results(
|
||||
&mut self,
|
||||
_search_results: &[SearchResults],
|
||||
_urls: &[String],
|
||||
) -> Result<(), Report<CacheError>> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// A structure to efficiently share the cache between threads - as it is protected by a Mutex.
|
||||
pub struct SharedCache {
|
||||
/// The internal cache protected from concurrent access by a mutex
|
||||
cache: Mutex<Cache>,
|
||||
cache: Mutex<Box<dyn Cacher>>,
|
||||
}
|
||||
|
||||
impl SharedCache {
|
||||
|
@ -220,9 +533,9 @@ impl SharedCache {
|
|||
/// * `cache` - It takes the `Cache` enum variant as an argument with the prefered cache type.
|
||||
///
|
||||
/// Returns a newly constructed `SharedCache` struct.
|
||||
pub fn new(cache: Cache) -> Self {
|
||||
pub fn new(cache: impl Cacher + 'static) -> Self {
|
||||
Self {
|
||||
cache: Mutex::new(cache),
|
||||
cache: Mutex::new(Box::new(cache)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -231,15 +544,15 @@ impl SharedCache {
|
|||
/// # Arguments
|
||||
///
|
||||
/// * `url` - It takes the search url as an argument which will be used as the key to fetch the
|
||||
/// cached results from the cache.
|
||||
/// cached results from the cache.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// Returns a `SearchResults` struct containing the search results from the cache if nothing
|
||||
/// goes wrong otherwise returns a `CacheError`.
|
||||
pub async fn cached_json(&self, url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||
pub async fn cached_results(&self, url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||
let mut mut_cache = self.cache.lock().await;
|
||||
mut_cache.cached_json(url).await
|
||||
mut_cache.cached_results(url).await
|
||||
}
|
||||
|
||||
/// A setter function which caches the results by using the `url` as the key and
|
||||
|
@ -248,9 +561,9 @@ impl SharedCache {
|
|||
/// # Arguments
|
||||
///
|
||||
/// * `search_results` - It takes the `SearchResults` as an argument which are results that
|
||||
/// needs to be cached.
|
||||
/// needs to be cached.
|
||||
/// * `url` - It takes the search url as an argument which will be used as the key for storing
|
||||
/// results in the cache.
|
||||
/// results in the cache.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
|
@ -258,10 +571,27 @@ impl SharedCache {
|
|||
/// on a failure.
|
||||
pub async fn cache_results(
|
||||
&self,
|
||||
search_results: &SearchResults,
|
||||
url: &str,
|
||||
search_results: &[SearchResults],
|
||||
urls: &[String],
|
||||
) -> Result<(), Report<CacheError>> {
|
||||
let mut mut_cache = self.cache.lock().await;
|
||||
mut_cache.cache_results(search_results, url).await
|
||||
mut_cache.cache_results(search_results, urls).await
|
||||
}
|
||||
}
|
||||
|
||||
/// A function to initialise the cache backend.
|
||||
pub async fn create_cache(config: &Config) -> impl Cacher {
|
||||
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
||||
return HybridCache::build(config).await;
|
||||
|
||||
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||
return InMemoryCache::build(config).await;
|
||||
|
||||
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||
return RedisCache::build(config).await;
|
||||
|
||||
#[cfg(not(any(feature = "memory-cache", feature = "redis-cache")))]
|
||||
return DisabledCache::build(config).await;
|
||||
}
|
||||
|
||||
//#[cfg(feature = "Compress-cache-results")]
|
||||
|
|
25
src/cache/encryption.rs
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
use chacha20poly1305::{
|
||||
consts::{B0, B1},
|
||||
ChaChaPoly1305,
|
||||
};
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use chacha20::{
|
||||
cipher::{
|
||||
generic_array::GenericArray,
|
||||
typenum::{UInt, UTerm},
|
||||
StreamCipherCoreWrapper,
|
||||
},
|
||||
ChaChaCore,
|
||||
};
|
||||
|
||||
/// The ChaCha20 core wrapped in a stream cipher for use in ChaCha20-Poly1305 authenticated encryption.
|
||||
type StreamCipherCoreWrapperType =
|
||||
StreamCipherCoreWrapper<ChaChaCore<UInt<UInt<UInt<UInt<UTerm, B1>, B0>, B1>, B0>>>;
|
||||
/// Our ChaCha20-Poly1305 cipher instance, lazily initialized.
|
||||
pub static CIPHER: OnceLock<ChaChaPoly1305<StreamCipherCoreWrapperType>> = OnceLock::new();
|
||||
|
||||
/// The type alias for our encryption key, a 32-byte array.
|
||||
type GenericArrayType = GenericArray<u8, UInt<UInt<UInt<UInt<UTerm, B1>, B1>, B0>, B0>>;
|
||||
/// Our encryption key, lazily initialized.
|
||||
pub static ENCRYPTION_KEY: OnceLock<GenericArrayType> = OnceLock::new();
|
18
src/cache/error.rs
vendored
|
@ -18,6 +18,12 @@ pub enum CacheError {
|
|||
SerializationError,
|
||||
/// Returned when the value is missing.
|
||||
MissingValue,
|
||||
/// whenever encryption or decryption of cache results fails
|
||||
EncryptionError,
|
||||
/// Whenever compression of the cache results fails
|
||||
CompressionError,
|
||||
/// Whenever base64 decoding failed
|
||||
Base64DecodingOrEncodingError,
|
||||
}
|
||||
|
||||
impl fmt::Display for CacheError {
|
||||
|
@ -43,6 +49,18 @@ impl fmt::Display for CacheError {
|
|||
CacheError::SerializationError => {
|
||||
write!(f, "Unable to serialize, deserialize from the cache")
|
||||
}
|
||||
|
||||
CacheError::EncryptionError => {
|
||||
write!(f, "Failed to encrypt or decrypt cache-results")
|
||||
}
|
||||
|
||||
CacheError::CompressionError => {
|
||||
write!(f, "failed to compress or uncompress cache results")
|
||||
}
|
||||
|
||||
CacheError::Base64DecodingOrEncodingError => {
|
||||
write!(f, "base64 encoding or decoding failed")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
6
src/cache/mod.rs
vendored
|
@ -1,7 +1,11 @@
|
|||
//! This module provides the modules which provide the functionality to cache the aggregated
|
||||
//! results fetched and aggregated from the upstream search engines in a json format.
|
||||
|
||||
pub mod cacher;
|
||||
|
||||
#[cfg(any(feature = "encrypt-cache-results", feature = "cec-cache-results"))]
|
||||
/// encryption module contains encryption utils such the cipher and key
|
||||
pub mod encryption;
|
||||
pub mod error;
|
||||
|
||||
#[cfg(feature = "redis-cache")]
|
||||
pub mod redis_cacher;
|
||||
|
|
98
src/cache/redis_cacher.rs
vendored
|
@ -1,24 +1,31 @@
|
|||
//! This module provides the functionality to cache the aggregated results fetched and aggregated
|
||||
//! from the upstream search engines in a json format.
|
||||
|
||||
use error_stack::Report;
|
||||
use futures::future::try_join_all;
|
||||
use md5::compute;
|
||||
use redis::{aio::ConnectionManager, AsyncCommands, Client, RedisError};
|
||||
|
||||
use super::error::CacheError;
|
||||
use error_stack::Report;
|
||||
use futures::stream::FuturesUnordered;
|
||||
use redis::{
|
||||
aio::ConnectionManager, AsyncCommands, Client, ExistenceCheck, RedisError, SetExpiry,
|
||||
SetOptions,
|
||||
};
|
||||
|
||||
/// A constant holding the redis pipeline size.
|
||||
const REDIS_PIPELINE_SIZE: usize = 3;
|
||||
|
||||
/// A named struct which stores the redis Connection url address to which the client will
|
||||
/// connect to.
|
||||
#[derive(Clone)]
|
||||
pub struct RedisCache {
|
||||
/// It stores a pool of connections ready to be used.
|
||||
connection_pool: Vec<ConnectionManager>,
|
||||
connection_pool: Box<[ConnectionManager]>,
|
||||
/// It stores the size of the connection pool (in other words the number of
|
||||
/// connections that should be stored in the pool).
|
||||
pool_size: u8,
|
||||
/// It stores the index of which connection is being used at the moment.
|
||||
current_connection: u8,
|
||||
/// It stores the max TTL for keys.
|
||||
cache_ttl: u16,
|
||||
/// It stores the redis pipeline struct of size 3.
|
||||
pipeline: redis::Pipeline,
|
||||
}
|
||||
|
||||
impl RedisCache {
|
||||
|
@ -29,6 +36,8 @@ impl RedisCache {
|
|||
/// * `redis_connection_url` - It takes the redis Connection url address.
|
||||
/// * `pool_size` - It takes the size of the connection pool (in other words the number of
|
||||
/// connections that should be stored in the pool).
|
||||
/// * `cache_ttl` - It takes the the time to live for cached results to live in the redis
|
||||
/// server.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
|
@ -37,48 +46,50 @@ impl RedisCache {
|
|||
pub async fn new(
|
||||
redis_connection_url: &str,
|
||||
pool_size: u8,
|
||||
cache_ttl: u16,
|
||||
) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let client = Client::open(redis_connection_url)?;
|
||||
let mut tasks: Vec<_> = Vec::new();
|
||||
let tasks: FuturesUnordered<_> = FuturesUnordered::new();
|
||||
|
||||
for _ in 0..pool_size {
|
||||
tasks.push(client.get_tokio_connection_manager());
|
||||
let client_partially_cloned = client.clone();
|
||||
tasks.push(tokio::spawn(async move {
|
||||
client_partially_cloned.get_connection_manager().await
|
||||
}));
|
||||
}
|
||||
|
||||
let mut outputs = Vec::with_capacity(tasks.len());
|
||||
for task in tasks {
|
||||
outputs.push(task.await??);
|
||||
}
|
||||
|
||||
let redis_cache = RedisCache {
|
||||
connection_pool: try_join_all(tasks).await?,
|
||||
connection_pool: outputs.into_boxed_slice(),
|
||||
pool_size,
|
||||
current_connection: Default::default(),
|
||||
cache_ttl,
|
||||
pipeline: redis::Pipeline::with_capacity(REDIS_PIPELINE_SIZE),
|
||||
};
|
||||
|
||||
Ok(redis_cache)
|
||||
}
|
||||
|
||||
/// A helper function which computes the hash of the url and formats and returns it as string.
|
||||
/// A function which fetches the cached json as json string from the redis server.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `url` - It takes an url as string.
|
||||
fn hash_url(&self, url: &str) -> String {
|
||||
format!("{:?}", compute(url))
|
||||
}
|
||||
|
||||
/// A function which fetches the cached json results as json string from the redis server.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `url` - It takes an url as a string.
|
||||
/// * `key` - It takes a string as key.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// Returns the results as a String from the cache on success otherwise returns a `CacheError`
|
||||
/// Returns the json as a String from the cache on success otherwise returns a `CacheError`
|
||||
/// on a failure.
|
||||
pub async fn cached_json(&mut self, url: &str) -> Result<String, Report<CacheError>> {
|
||||
pub async fn cached_json(&mut self, key: &str) -> Result<String, Report<CacheError>> {
|
||||
self.current_connection = Default::default();
|
||||
let hashed_url_string: &str = &self.hash_url(url);
|
||||
|
||||
let mut result: Result<String, RedisError> = self.connection_pool
|
||||
[self.current_connection as usize]
|
||||
.get(hashed_url_string)
|
||||
.get(key)
|
||||
.await;
|
||||
|
||||
// Code to check whether the current connection being used is dropped with connection error
|
||||
|
@ -99,7 +110,7 @@ impl RedisCache {
|
|||
));
|
||||
}
|
||||
result = self.connection_pool[self.current_connection as usize]
|
||||
.get(hashed_url_string)
|
||||
.get(key)
|
||||
.await;
|
||||
continue;
|
||||
}
|
||||
|
@ -110,30 +121,40 @@ impl RedisCache {
|
|||
}
|
||||
}
|
||||
|
||||
/// A function which caches the results by using the hashed `url` as the key and
|
||||
/// A function which caches the json by using the key and
|
||||
/// `json results` as the value and stores it in redis server with ttl(time to live)
|
||||
/// set to 60 seconds.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `json_results` - It takes the json results string as an argument.
|
||||
/// * `url` - It takes the url as a String.
|
||||
/// * `key` - It takes the key as a String.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// Returns an unit type if the results are cached succesfully otherwise returns a `CacheError`
|
||||
/// on a failure.
|
||||
pub async fn cache_results(
|
||||
pub async fn cache_json(
|
||||
&mut self,
|
||||
json_results: &str,
|
||||
url: &str,
|
||||
json_results: impl Iterator<Item = String>,
|
||||
keys: impl Iterator<Item = String>,
|
||||
) -> Result<(), Report<CacheError>> {
|
||||
self.current_connection = Default::default();
|
||||
let hashed_url_string: &str = &self.hash_url(url);
|
||||
|
||||
let mut result: Result<(), RedisError> = self.connection_pool
|
||||
[self.current_connection as usize]
|
||||
.set_ex(hashed_url_string, json_results, 60)
|
||||
for (key, json_result) in keys.zip(json_results) {
|
||||
self.pipeline.set_options(
|
||||
key,
|
||||
json_result,
|
||||
SetOptions::default()
|
||||
.conditional_set(ExistenceCheck::NX)
|
||||
.get(true)
|
||||
.with_expiration(SetExpiry::EX(self.cache_ttl.into())),
|
||||
);
|
||||
}
|
||||
|
||||
let mut result: Result<(), RedisError> = self
|
||||
.pipeline
|
||||
.query_async(&mut self.connection_pool[self.current_connection as usize])
|
||||
.await;
|
||||
|
||||
// Code to check whether the current connection being used is dropped with connection error
|
||||
|
@ -153,8 +174,11 @@ impl RedisCache {
|
|||
CacheError::PoolExhaustionWithConnectionDropError,
|
||||
));
|
||||
}
|
||||
result = self.connection_pool[self.current_connection as usize]
|
||||
.set_ex(hashed_url_string, json_results, 60)
|
||||
result = self
|
||||
.pipeline
|
||||
.query_async(
|
||||
&mut self.connection_pool[self.current_connection as usize],
|
||||
)
|
||||
.await;
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -1,16 +1,15 @@
|
|||
//! This module provides the functionality to parse the lua config and convert the config options
|
||||
//! into rust readable form.
|
||||
|
||||
use crate::handler::paths::{file_path, FileType};
|
||||
use crate::handler::{file_path, FileType};
|
||||
|
||||
use crate::models::engine_models::{EngineError, EngineHandler};
|
||||
use crate::models::parser_models::{AggregatorConfig, RateLimiter, Style};
|
||||
use log::LevelFilter;
|
||||
use mlua::Lua;
|
||||
use reqwest::Proxy;
|
||||
use std::{collections::HashMap, fs, thread::available_parallelism};
|
||||
|
||||
/// A named struct which stores the parsed config file options.
|
||||
#[derive(Clone)]
|
||||
pub struct Config {
|
||||
/// It stores the parsed port number option on which the server should launch.
|
||||
pub port: u16,
|
||||
|
@ -22,23 +21,40 @@ pub struct Config {
|
|||
/// It stores the redis connection url address on which the redis
|
||||
/// client should connect.
|
||||
pub redis_url: String,
|
||||
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
|
||||
/// It stores the max TTL for search results in cache.
|
||||
pub cache_expiry_time: u16,
|
||||
/// It stores the option to whether enable or disable production use.
|
||||
pub aggregator: AggregatorConfig,
|
||||
/// It stores the option to whether enable or disable logs.
|
||||
pub logging: bool,
|
||||
/// It stores the option to whether enable or disable debug mode.
|
||||
pub debug: bool,
|
||||
/// It toggles whether to use adaptive HTTP windows
|
||||
pub adaptive_window: bool,
|
||||
/// It stores all the engine names that were enabled by the user.
|
||||
pub upstream_search_engines: Vec<EngineHandler>,
|
||||
pub upstream_search_engines: HashMap<String, bool>,
|
||||
/// It stores the time (secs) which controls the server request timeout.
|
||||
pub request_timeout: u8,
|
||||
/// It stores the number of threads which controls the app will use to run.
|
||||
pub threads: u8,
|
||||
/// Set the keep-alive time for client connections to the HTTP server
|
||||
pub client_connection_keep_alive: u8,
|
||||
/// It stores configuration options for the ratelimiting middleware.
|
||||
pub rate_limiter: RateLimiter,
|
||||
/// It stores the level of safe search to be used for restricting content in the
|
||||
/// search results.
|
||||
pub safe_search: u8,
|
||||
/// It stores the TCP connection keepalive duration in seconds.
|
||||
pub tcp_connection_keep_alive: u8,
|
||||
/// It stores the pool idle connection timeout in seconds.
|
||||
pub pool_idle_connection_timeout: u8,
|
||||
/// Url of the proxy to use for outgoing requests.
|
||||
pub proxy: Option<Proxy>,
|
||||
/// It stores the number of https connections to keep in the pool.
|
||||
pub number_of_https_connections: u8,
|
||||
/// It stores the operating system's TLS certificates for https requests.
|
||||
pub operating_system_tls_certificates: bool,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
|
@ -48,7 +64,7 @@ impl Config {
|
|||
/// # Arguments
|
||||
///
|
||||
/// * `logging_initialized` - It takes a boolean which ensures that the logging doesn't get
|
||||
/// initialized twice. Pass false if the logger has not yet been initialized.
|
||||
/// initialized twice. Pass false if the logger has not yet been initialized.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
|
@ -62,10 +78,11 @@ impl Config {
|
|||
lua.load(&fs::read_to_string(file_path(FileType::Config)?)?)
|
||||
.exec()?;
|
||||
|
||||
let parsed_threads: u8 = globals.get::<_, u8>("threads")?;
|
||||
let parsed_threads: u8 = globals.get("threads")?;
|
||||
|
||||
let debug: bool = globals.get::<_, bool>("debug")?;
|
||||
let logging: bool = globals.get::<_, bool>("logging")?;
|
||||
let debug: bool = globals.get("debug")?;
|
||||
let logging: bool = globals.get("logging")?;
|
||||
let adaptive_window: bool = globals.get("adaptive_window")?;
|
||||
|
||||
if !logging_initialized {
|
||||
set_logging_level(debug, logging);
|
||||
|
@ -82,9 +99,9 @@ impl Config {
|
|||
parsed_threads
|
||||
};
|
||||
|
||||
let rate_limiter = globals.get::<_, HashMap<String, u8>>("rate_limiter")?;
|
||||
let rate_limiter: HashMap<String, u8> = globals.get("rate_limiter")?;
|
||||
|
||||
let parsed_safe_search: u8 = globals.get::<_, u8>("safe_search")?;
|
||||
let parsed_safe_search: u8 = globals.get::<_>("safe_search")?;
|
||||
let safe_search: u8 = match parsed_safe_search {
|
||||
0..=4 => parsed_safe_search,
|
||||
_ => {
|
||||
|
@ -94,33 +111,61 @@ impl Config {
|
|||
}
|
||||
};
|
||||
|
||||
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
|
||||
let parsed_cet = globals.get::<_>("cache_expiry_time")?;
|
||||
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
|
||||
let cache_expiry_time = match parsed_cet {
|
||||
0..=59 => {
|
||||
log::error!(
|
||||
"Config Error: The value of `cache_expiry_time` must be greater than 60"
|
||||
);
|
||||
log::error!("Falling back to using the value `60` for the option");
|
||||
60
|
||||
}
|
||||
_ => parsed_cet,
|
||||
};
|
||||
|
||||
let proxy_opt: Option<String> = globals.get::<_>("proxy")?;
|
||||
let proxy = proxy_opt.and_then(|proxy_str| {
|
||||
Proxy::all(proxy_str).ok().and_then(|_| {
|
||||
log::error!("Invalid proxy url, defaulting to no proxy.");
|
||||
None
|
||||
})
|
||||
});
|
||||
|
||||
Ok(Config {
|
||||
port: globals.get::<_, u16>("port")?,
|
||||
binding_ip: globals.get::<_, String>("binding_ip")?,
|
||||
operating_system_tls_certificates: globals
|
||||
.get::<_>("operating_system_tls_certificates")?,
|
||||
port: globals.get::<_>("port")?,
|
||||
binding_ip: globals.get::<_>("binding_ip")?,
|
||||
style: Style::new(
|
||||
globals.get::<_, String>("theme")?,
|
||||
globals.get::<_, String>("colorscheme")?,
|
||||
globals.get::<_>("theme")?,
|
||||
globals.get::<_>("colorscheme")?,
|
||||
globals.get::<_>("animation")?,
|
||||
),
|
||||
#[cfg(feature = "redis-cache")]
|
||||
redis_url: globals.get::<_, String>("redis_url")?,
|
||||
redis_url: globals.get::<_>("redis_url")?,
|
||||
aggregator: AggregatorConfig {
|
||||
random_delay: globals.get::<_, bool>("production_use")?,
|
||||
random_delay: globals.get::<_>("production_use")?,
|
||||
},
|
||||
logging,
|
||||
debug,
|
||||
upstream_search_engines: globals
|
||||
.get::<_, HashMap<String, bool>>("upstream_search_engines")?
|
||||
.into_iter()
|
||||
.filter_map(|(key, value)| value.then_some(key))
|
||||
.map(|engine| EngineHandler::new(&engine))
|
||||
.collect::<Result<Vec<EngineHandler>, error_stack::Report<EngineError>>>()?,
|
||||
request_timeout: globals.get::<_, u8>("request_timeout")?,
|
||||
adaptive_window,
|
||||
upstream_search_engines: globals.get::<_>("upstream_search_engines")?,
|
||||
request_timeout: globals.get::<_>("request_timeout")?,
|
||||
tcp_connection_keep_alive: globals.get::<_>("tcp_connection_keep_alive")?,
|
||||
pool_idle_connection_timeout: globals.get::<_>("pool_idle_connection_timeout")?,
|
||||
number_of_https_connections: globals.get::<_>("number_of_https_connections")?,
|
||||
threads,
|
||||
client_connection_keep_alive: globals.get::<_>("client_connection_keep_alive")?,
|
||||
rate_limiter: RateLimiter {
|
||||
number_of_requests: rate_limiter["number_of_requests"],
|
||||
time_limit: rate_limiter["time_limit"],
|
||||
},
|
||||
safe_search,
|
||||
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
|
||||
cache_expiry_time,
|
||||
proxy,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
122
src/engines/bing.rs
Normal file
|
@ -0,0 +1,122 @@
|
|||
//! The `bing` module handles the scraping of results from the bing search engine
|
||||
//! by querying the upstream bing search engine with user provided query and with a page
|
||||
//! number if provided.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use regex::Regex;
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::Client;
|
||||
use scraper::Html;
|
||||
|
||||
use crate::models::aggregation_models::SearchResult;
|
||||
|
||||
use crate::models::engine_models::{EngineError, SearchEngine};
|
||||
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
|
||||
use super::common::build_cookie;
|
||||
use super::search_result_parser::SearchResultParser;
|
||||
|
||||
/// A new Bing engine type defined in-order to implement the `SearchEngine` trait which allows to
|
||||
/// reduce code duplication as well as allows to create vector of different search engines easily.
|
||||
pub struct Bing {
|
||||
/// The parser, used to interpret the search result.
|
||||
parser: SearchResultParser,
|
||||
}
|
||||
|
||||
impl Bing {
|
||||
/// Creates the Bing parser.
|
||||
pub fn new() -> Result<Self, EngineError> {
|
||||
Ok(Self {
|
||||
parser: SearchResultParser::new(
|
||||
".b_results",
|
||||
".b_algo",
|
||||
"h2 a",
|
||||
".tpcn a.tilk",
|
||||
".b_caption p",
|
||||
)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SearchEngine for Bing {
|
||||
async fn results(
|
||||
&self,
|
||||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
client: &Client,
|
||||
_safe_search: u8,
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
// Bing uses `start results from this number` convention
|
||||
// So, for 10 results per page, page 0 starts at 1, page 1
|
||||
// starts at 11, and so on.
|
||||
let results_per_page = 10;
|
||||
let start_result = results_per_page * page + 1;
|
||||
|
||||
let url: String = match page {
|
||||
0 => {
|
||||
format!("https://www.bing.com/search?q={query}")
|
||||
}
|
||||
_ => {
|
||||
format!("https://www.bing.com/search?q={query}&first={start_result}")
|
||||
}
|
||||
};
|
||||
|
||||
let query_params: Vec<(&str, &str)> = vec![
|
||||
("_EDGE_V", "1"),
|
||||
("SRCHD=AF", "NOFORM"),
|
||||
("_Rwho=u", "d"),
|
||||
("bngps=s", "0"),
|
||||
("_UR=QS=0&TQS", "0"),
|
||||
("_UR=QS=0&TQS", "0"),
|
||||
];
|
||||
|
||||
let cookie_string = build_cookie(&query_params);
|
||||
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
("Referer".to_string(), "https://google.com/".to_string()),
|
||||
(
|
||||
"Content-Type".to_string(),
|
||||
"application/x-www-form-urlencoded".to_string(),
|
||||
),
|
||||
("Cookie".to_string(), cookie_string),
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&Bing::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
// Bing is very aggressive in finding matches
|
||||
// even with the most absurd of queries. ".b_algo" is the
|
||||
// class for the list item of results
|
||||
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(0) {
|
||||
if no_result_msg
|
||||
.value()
|
||||
.attr("class")
|
||||
.map(|classes| classes.contains("b_algo"))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return Err(Report::new(EngineError::EmptyResultSet));
|
||||
}
|
||||
}
|
||||
|
||||
let re_span = Regex::new(r#"<span.*?>.*?(?:</span> ·|</span>)"#).unwrap();
|
||||
let re_strong = Regex::new(r#"(<strong>|</strong>)"#).unwrap();
|
||||
|
||||
// scrape all the results from the html
|
||||
self.parser
|
||||
.parse_for_results(&document, |title, url, desc| {
|
||||
Some(SearchResult::new(
|
||||
&re_strong.replace_all(title.inner_html().trim(), ""),
|
||||
url.value().attr("href").unwrap(),
|
||||
&re_span.replace_all(desc.inner_html().trim(), ""),
|
||||
&["bing"],
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::{header::HeaderMap, Client};
|
||||
use scraper::Html;
|
||||
|
||||
use crate::models::aggregation_models::SearchResult;
|
||||
|
@ -42,9 +42,9 @@ impl SearchEngine for Brave {
|
|||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
request_timeout: u8,
|
||||
client: &Client,
|
||||
safe_search: u8,
|
||||
) -> Result<HashMap<String, SearchResult>, EngineError> {
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
let url = format!("https://search.brave.com/search?q={query}&offset={page}");
|
||||
|
||||
let safe_search_level = match safe_search {
|
||||
|
@ -54,21 +54,21 @@ impl SearchEngine for Brave {
|
|||
};
|
||||
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("USER_AGENT".to_string(), user_agent.to_string()),
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
(
|
||||
"CONTENT_TYPE".to_string(),
|
||||
"Content-Type".to_string(),
|
||||
"application/x-www-form-urlencoded".to_string(),
|
||||
),
|
||||
("REFERER".to_string(), "https://google.com/".to_string()),
|
||||
("Referer".to_string(), "https://google.com/".to_string()),
|
||||
(
|
||||
"COOKIE".to_string(),
|
||||
"Cookie".to_string(),
|
||||
format!("safe_search={safe_search_level}"),
|
||||
),
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&Brave::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
|
||||
&Brave::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(0) {
|
||||
|
|
23
src/engines/common.rs
Normal file
|
@ -0,0 +1,23 @@
|
|||
//! This module provides common functionalities for engines
|
||||
|
||||
/**
|
||||
* Build a query from a list of key value pairs.
|
||||
*/
|
||||
pub fn build_query(query_params: &[(&str, &str)]) -> String {
|
||||
let mut query_params_string = String::new();
|
||||
for (k, v) in query_params {
|
||||
query_params_string.push_str(&format!("&{k}={v}"));
|
||||
}
|
||||
query_params_string
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a cookie from a list of key value pairs.
|
||||
*/
|
||||
pub fn build_cookie(cookie_params: &[(&str, &str)]) -> String {
|
||||
let mut cookie_string = String::new();
|
||||
for (k, v) in cookie_params {
|
||||
cookie_string.push_str(&format!("{k}={v}; "));
|
||||
}
|
||||
cookie_string
|
||||
}
|
|
@ -5,6 +5,7 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::Client;
|
||||
use scraper::Html;
|
||||
|
||||
use crate::models::aggregation_models::SearchResult;
|
||||
|
@ -28,8 +29,8 @@ impl DuckDuckGo {
|
|||
Ok(Self {
|
||||
parser: SearchResultParser::new(
|
||||
".no-results",
|
||||
".result",
|
||||
".result__a",
|
||||
".results>.result",
|
||||
".result__title>.result__a",
|
||||
".result__url",
|
||||
".result__snippet",
|
||||
)?,
|
||||
|
@ -44,39 +45,38 @@ impl SearchEngine for DuckDuckGo {
|
|||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
request_timeout: u8,
|
||||
client: &Client,
|
||||
_safe_search: u8,
|
||||
) -> Result<HashMap<String, SearchResult>, EngineError> {
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
// Page number can be missing or empty string and so appropriate handling is required
|
||||
// so that upstream server recieves valid page number.
|
||||
let url: String = match page {
|
||||
1 | 0 => {
|
||||
0 => {
|
||||
format!("https://html.duckduckgo.com/html/?q={query}&s=&dc=&v=1&o=json&api=/d.js")
|
||||
}
|
||||
_ => {
|
||||
format!(
|
||||
"https://duckduckgo.com/html/?q={}&s={}&dc={}&v=1&o=json&api=/d.js",
|
||||
query,
|
||||
(page / 2 + (page % 2)) * 30,
|
||||
(page / 2 + (page % 2)) * 30 + 1
|
||||
"https://duckduckgo.com/html/?q={query}&s={}&dc={}&v=1&o=json&api=/d.js",
|
||||
page * 30,
|
||||
page * 30 + 1
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
// initializing HeaderMap and adding appropriate headers.
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("USER_AGENT".to_string(), user_agent.to_string()),
|
||||
("REFERER".to_string(), "https://google.com/".to_string()),
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
("Referer".to_string(), "https://google.com/".to_string()),
|
||||
(
|
||||
"CONTENT_TYPE".to_string(),
|
||||
"Content-Type".to_string(),
|
||||
"application/x-www-form-urlencoded".to_string(),
|
||||
),
|
||||
("COOKIE".to_string(), "kl=wt-wt".to_string()),
|
||||
("Cookie".to_string(), "kl=wt-wt".to_string()),
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&DuckDuckGo::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
|
||||
&DuckDuckGo::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
if self.parser.parse_for_no_results(&document).next().is_some() {
|
||||
|
|
104
src/engines/librex.rs
Normal file
|
@ -0,0 +1,104 @@
|
|||
//! The `librex` module contains the implementation of a search engine for LibreX using the reqwest and scraper libraries.
|
||||
//! It includes a `SearchEngine` trait implementation for interacting with the search engine and retrieving search results.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::Client;
|
||||
use scraper::Html;
|
||||
|
||||
use crate::models::aggregation_models::SearchResult;
|
||||
use crate::models::engine_models::{EngineError, SearchEngine};
|
||||
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
|
||||
use super::search_result_parser::SearchResultParser;
|
||||
|
||||
/// Represents the LibreX search engine.
|
||||
pub struct LibreX {
|
||||
/// The parser used to extract search results from HTML documents.
|
||||
parser: SearchResultParser,
|
||||
}
|
||||
|
||||
impl LibreX {
|
||||
/// Creates a new instance of LibreX with a default configuration.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns a `Result` containing `LibreX` if successful, otherwise an `EngineError`.
|
||||
pub fn new() -> Result<Self, EngineError> {
|
||||
Ok(Self {
|
||||
parser: SearchResultParser::new(
|
||||
".text-result-container>p",
|
||||
".text-result-wrapper",
|
||||
".text-result-wrapper>a>h2",
|
||||
".text-result-wrapper>a",
|
||||
".text-result-wrapper>span",
|
||||
)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SearchEngine for LibreX {
|
||||
/// Retrieves search results from LibreX based on the provided query, page, user agent, and client.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `query` - The search query.
|
||||
/// * `page` - The page number for pagination.
|
||||
/// * `user_agent` - The user agent string.
|
||||
/// * `client` - The reqwest client for making HTTP requests.
|
||||
/// * `_safe_search` - A parameter for safe search (not currently used).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns a `Result` containing a `HashMap` of search results if successful, otherwise an `EngineError`.
|
||||
/// The `Err` variant is explicit for better documentation.
|
||||
async fn results(
|
||||
&self,
|
||||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
client: &Client,
|
||||
_safe_search: u8,
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
// Page number can be missing or empty string and so appropriate handling is required
|
||||
// so that upstream server recieves valid page number.
|
||||
let url: String = format!(
|
||||
"https://search.ahwx.org/search.php?q={query}&p={}&t=10",
|
||||
page * 10
|
||||
);
|
||||
|
||||
// initializing HeaderMap and adding appropriate headers.
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
("Referer".to_string(), "https://google.com/".to_string()),
|
||||
("Content-Type".to_string(), "application/x-www-form-urlencoded".to_string()),
|
||||
(
|
||||
"Cookie".to_string(),
|
||||
"theme=amoled; disable_special=on; disable_frontends=on; language=en; number_of_results=10; safe_search=on; save=1".to_string(),
|
||||
),
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&LibreX::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
if self.parser.parse_for_no_results(&document).next().is_some() {
|
||||
return Err(Report::new(EngineError::EmptyResultSet));
|
||||
}
|
||||
|
||||
// scrape all the results from the html
|
||||
self.parser
|
||||
.parse_for_results(&document, |title, url, desc| {
|
||||
Some(SearchResult::new(
|
||||
title.inner_html().trim(),
|
||||
url.inner_html().trim(),
|
||||
desc.inner_html().trim(),
|
||||
&["librex"],
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
|
@ -3,7 +3,13 @@
|
|||
//! provide a standard functions to be implemented for all the upstream search engine handling
|
||||
//! code. Moreover, it also provides a custom error for the upstream search engine handling code.
|
||||
|
||||
pub mod bing;
|
||||
pub mod brave;
|
||||
pub mod common;
|
||||
pub mod duckduckgo;
|
||||
pub mod librex;
|
||||
pub mod mojeek;
|
||||
pub mod search_result_parser;
|
||||
pub mod searx;
|
||||
pub mod startpage;
|
||||
pub mod wikipedia;
|
||||
|
|
161
src/engines/mojeek.rs
Normal file
|
@ -0,0 +1,161 @@
|
|||
//! The `mojeek` module handles the scraping of results from the mojeek search engine
|
||||
//! by querying the upstream mojeek search engine with user provided query and with a page
|
||||
//! number if provided.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::Client;
|
||||
use scraper::Html;
|
||||
|
||||
use crate::models::aggregation_models::SearchResult;
|
||||
|
||||
use crate::models::engine_models::{EngineError, SearchEngine};
|
||||
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
|
||||
use super::common::{build_cookie, build_query};
|
||||
use super::search_result_parser::SearchResultParser;
|
||||
|
||||
/// A new Mojeek engine type defined in-order to implement the `SearchEngine` trait which allows to
|
||||
/// reduce code duplication as well as allows to create vector of different search engines easily.
|
||||
pub struct Mojeek {
|
||||
/// The parser, used to interpret the search result.
|
||||
parser: SearchResultParser,
|
||||
}
|
||||
|
||||
impl Mojeek {
|
||||
/// Creates the Mojeek parser.
|
||||
pub fn new() -> Result<Self, EngineError> {
|
||||
Ok(Self {
|
||||
parser: SearchResultParser::new(
|
||||
".result-col",
|
||||
".results-standard li",
|
||||
"h2 > a.title",
|
||||
"a.ob",
|
||||
"p.s",
|
||||
)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SearchEngine for Mojeek {
|
||||
async fn results(
|
||||
&self,
|
||||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
client: &Client,
|
||||
safe_search: u8,
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
// Mojeek uses `start results from this number` convention
|
||||
// So, for 10 results per page, page 0 starts at 1, page 1
|
||||
// starts at 11, and so on.
|
||||
let results_per_page = 10;
|
||||
let start_result = results_per_page * page + 1;
|
||||
|
||||
let results_per_page = results_per_page.to_string();
|
||||
let start_result = start_result.to_string();
|
||||
|
||||
let search_engines = vec![
|
||||
"Bing",
|
||||
"Brave",
|
||||
"DuckDuckGo",
|
||||
"Ecosia",
|
||||
"Google",
|
||||
"Lilo",
|
||||
"Metager",
|
||||
"Qwant",
|
||||
"Startpage",
|
||||
"Swisscows",
|
||||
"Yandex",
|
||||
"Yep",
|
||||
"You",
|
||||
];
|
||||
|
||||
let qss = search_engines.join("%2C");
|
||||
|
||||
// A branchless condition to check whether the `safe_search` parameter has the
|
||||
// value 0 or not. If it is zero then it sets the value 0 otherwise it sets
|
||||
// the value to 1 for all other values of `safe_search`
|
||||
//
|
||||
// Moreover, the below branchless code is equivalent to the following code below:
|
||||
//
|
||||
// ```rust
|
||||
// let safe = if safe_search == 0 { 0 } else { 1 }.to_string();
|
||||
// ```
|
||||
//
|
||||
// For more information on branchless programming. See:
|
||||
//
|
||||
// * https://piped.video/watch?v=bVJ-mWWL7cE
|
||||
let safe = u8::from(safe_search != 0).to_string();
|
||||
|
||||
// Mojeek detects automated requests, these are preferences that are
|
||||
// able to circumvent the countermeasure. Some of these are
|
||||
// not documented in their Search API
|
||||
let query_params: Vec<(&str, &str)> = vec![
|
||||
("t", results_per_page.as_str()),
|
||||
("theme", "dark"),
|
||||
("arc", "none"),
|
||||
("date", "1"),
|
||||
("cdate", "1"),
|
||||
("tlen", "100"),
|
||||
("ref", "1"),
|
||||
("hp", "minimal"),
|
||||
("lb", "en"),
|
||||
("qss", &qss),
|
||||
("safe", &safe),
|
||||
];
|
||||
|
||||
let query_params_string = build_query(&query_params);
|
||||
|
||||
let url: String = match page {
|
||||
0 => {
|
||||
format!("https://www.mojeek.com/search?q={query}{query_params_string}")
|
||||
}
|
||||
_ => {
|
||||
format!(
|
||||
"https://www.mojeek.com/search?q={query}&s={start_result}{query_params_string}"
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let cookie_string = build_cookie(&query_params);
|
||||
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
("Referer".to_string(), "https://google.com/".to_string()),
|
||||
(
|
||||
"Content-Type".to_string(),
|
||||
"application/x-www-form-urlencoded".to_string(),
|
||||
),
|
||||
("Cookie".to_string(), cookie_string),
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&Mojeek::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(0) {
|
||||
if no_result_msg
|
||||
.inner_html()
|
||||
.contains("No pages found matching:")
|
||||
{
|
||||
return Err(Report::new(EngineError::EmptyResultSet));
|
||||
}
|
||||
}
|
||||
|
||||
// scrape all the results from the html
|
||||
self.parser
|
||||
.parse_for_results(&document, |title, url, desc| {
|
||||
Some(SearchResult::new(
|
||||
title.inner_html().trim(),
|
||||
url.attr("href")?.trim(),
|
||||
desc.inner_html().trim(),
|
||||
&["mojeek"],
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,5 +1,4 @@
|
|||
//! This modules provides helper functionalities for parsing a html document into internal SearchResult.
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::models::{aggregation_models::SearchResult, engine_models::EngineError};
|
||||
use error_stack::{Report, Result};
|
||||
|
@ -47,7 +46,7 @@ impl SearchResultParser {
|
|||
&self,
|
||||
document: &Html,
|
||||
builder: impl Fn(&ElementRef<'_>, &ElementRef<'_>, &ElementRef<'_>) -> Option<SearchResult>,
|
||||
) -> Result<HashMap<String, SearchResult>, EngineError> {
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
let res = document
|
||||
.select(&self.results)
|
||||
.filter_map(|result| {
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
//! number if provided.
|
||||
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::Client;
|
||||
use scraper::Html;
|
||||
use std::collections::HashMap;
|
||||
|
||||
|
@ -40,35 +41,40 @@ impl SearchEngine for Searx {
|
|||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
request_timeout: u8,
|
||||
client: &Client,
|
||||
mut safe_search: u8,
|
||||
) -> Result<HashMap<String, SearchResult>, EngineError> {
|
||||
// Page number can be missing or empty string and so appropriate handling is required
|
||||
// so that upstream server recieves valid page number.
|
||||
if safe_search == 3 {
|
||||
safe_search = 2;
|
||||
};
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
// A branchless condition to check whether the `safe_search` parameter has the
|
||||
// value greater than equal to three or not. If it is, then it modifies the
|
||||
// `safesearch` parameters value to 2.
|
||||
//
|
||||
// Moreover, the below branchless code is equivalent to the following code below:
|
||||
//
|
||||
// ```rust
|
||||
// safe_search = u8::from(safe_search == 3) * 2;
|
||||
// ```
|
||||
//
|
||||
// For more information on branchless programming. See:
|
||||
//
|
||||
// * https://piped.video/watch?v=bVJ-mWWL7cE
|
||||
safe_search = u8::from(safe_search >= 3) * 2;
|
||||
|
||||
let url: String = match page {
|
||||
0 | 1 => {
|
||||
format!("https://searx.work/search?q={query}&pageno=1&safesearch={safe_search}")
|
||||
}
|
||||
_ => format!(
|
||||
"https://searx.work/search?q={query}&pageno={page}&safesearch={safe_search}"
|
||||
),
|
||||
};
|
||||
let url: String = format!(
|
||||
"https://searx.be/search?q={query}&pageno={}&safesearch={safe_search}",
|
||||
page + 1
|
||||
);
|
||||
|
||||
// initializing headers and adding appropriate headers.
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("USER_AGENT".to_string(), user_agent.to_string()),
|
||||
("REFERER".to_string(), "https://google.com/".to_string()),
|
||||
("CONTENT_TYPE".to_string(), "application/x-www-form-urlencoded".to_string()),
|
||||
("COOKIE".to_string(), "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".to_string())
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
("Referer".to_string(), "https://google.com/".to_string()),
|
||||
("Content-Type".to_string(), "application/x-www-form-urlencoded".to_string()),
|
||||
("Cookie".to_string(), "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".to_string())
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&Searx::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
|
||||
&Searx::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(1) {
|
||||
|
|
89
src/engines/startpage.rs
Normal file
|
@ -0,0 +1,89 @@
|
|||
//! The `startpage` module handles the scraping of results from the startpage search engine
|
||||
//! by querying the upstream startpage search engine with user provided query and with a page
|
||||
//! number if provided.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::Client;
|
||||
use scraper::Html;
|
||||
|
||||
use crate::models::aggregation_models::SearchResult;
|
||||
|
||||
use crate::models::engine_models::{EngineError, SearchEngine};
|
||||
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
|
||||
use super::search_result_parser::SearchResultParser;
|
||||
|
||||
/// A new Startpage engine type defined in-order to implement the `SearchEngine` trait which allows to
|
||||
/// reduce code duplication as well as allows to create vector of different search engines easily.
|
||||
pub struct Startpage {
|
||||
/// The parser, used to interpret the search result.
|
||||
parser: SearchResultParser,
|
||||
}
|
||||
|
||||
impl Startpage {
|
||||
/// Creates the Startpage parser.
|
||||
pub fn new() -> Result<Self, EngineError> {
|
||||
Ok(Self {
|
||||
parser: SearchResultParser::new(
|
||||
".no-results",
|
||||
".w-gl__result__main",
|
||||
".w-gl__result-second-line-container>.w-gl__result-title>h3",
|
||||
".w-gl__result-url",
|
||||
".w-gl__description",
|
||||
)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SearchEngine for Startpage {
|
||||
async fn results(
|
||||
&self,
|
||||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
client: &Client,
|
||||
_safe_search: u8,
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
// Page number can be missing or empty string and so appropriate handling is required
|
||||
// so that upstream server recieves valid page number.
|
||||
let url: String = format!(
|
||||
"https://startpage.com/do/dsearch?q={query}&num=10&start={}",
|
||||
page * 10,
|
||||
);
|
||||
|
||||
// initializing HeaderMap and adding appropriate headers.
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
("Referer".to_string(), "https://google.com/".to_string()),
|
||||
(
|
||||
"Content-Type".to_string(),
|
||||
"application/x-www-form-urlencoded".to_string(),
|
||||
),
|
||||
("Cookie".to_string(), "preferences=connect_to_serverEEE0N1Ndate_timeEEEworldN1Ndisable_family_filterEEE0N1Ndisable_open_in_new_windowEEE0N1Nenable_post_methodEEE1N1Nenable_proxy_safety_suggestEEE1N1Nenable_stay_controlEEE0N1Ninstant_answersEEE1N1Nlang_homepageEEEs%2Fnight%2FenN1NlanguageEEEenglishN1Nlanguage_uiEEEenglishN1Nnum_of_resultsEEE10N1Nsearch_results_regionEEEallN1NsuggestionsEEE1N1Nwt_unitEEEcelsius".to_string()),
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&Startpage::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
if self.parser.parse_for_no_results(&document).next().is_some() {
|
||||
return Err(Report::new(EngineError::EmptyResultSet));
|
||||
}
|
||||
|
||||
// scrape all the results from the html
|
||||
self.parser
|
||||
.parse_for_results(&document, |title, url, desc| {
|
||||
Some(SearchResult::new(
|
||||
title.inner_html().trim(),
|
||||
url.inner_html().trim(),
|
||||
desc.inner_html().trim(),
|
||||
&["startpage"],
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
101
src/engines/wikipedia.rs
Normal file
|
@ -0,0 +1,101 @@
|
|||
//! The `wikipedia` module handles the scraping of results from wikipedia
|
||||
//! with user provided query and with a page number if provided.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::Client;
|
||||
use scraper::Html;
|
||||
|
||||
use crate::models::aggregation_models::SearchResult;
|
||||
|
||||
use crate::models::engine_models::{EngineError, SearchEngine};
|
||||
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
|
||||
use super::common::build_query;
|
||||
use super::search_result_parser::SearchResultParser;
|
||||
|
||||
/// A new Wikipedia engine type defined in-order to implement the `SearchEngine` trait which allows to
|
||||
/// reduce code duplication as well as allows to create vector of different search engines easily.
|
||||
pub struct Wikipedia {
|
||||
/// The parser, used to interpret the search result.
|
||||
parser: SearchResultParser,
|
||||
/// The id of the engine, equals to 'wikipedia-' + language
|
||||
id: String,
|
||||
/// The host where wikipedia can be accessed.
|
||||
host: String,
|
||||
}
|
||||
|
||||
impl Wikipedia {
|
||||
/// Creates the Wikipedia parser.
|
||||
pub fn new(language: &str) -> Result<Self, EngineError> {
|
||||
let host = format!("https://{}.wikipedia.org", &language);
|
||||
let id = format!("wikipedia-{}", &language);
|
||||
Ok(Self {
|
||||
parser: SearchResultParser::new(
|
||||
"p.mw-search-nonefound",
|
||||
".mw-search-results li.mw-search-result",
|
||||
".mw-search-result-heading a",
|
||||
".mw-search-result-heading a",
|
||||
".searchresult",
|
||||
)?,
|
||||
id,
|
||||
host,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SearchEngine for Wikipedia {
|
||||
async fn results(
|
||||
&self,
|
||||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
client: &Client,
|
||||
_safe_search: u8,
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
("Referer".to_string(), self.host.to_string()),
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let offset = (page * 20).to_string();
|
||||
let query_params: Vec<(&str, &str)> = vec![
|
||||
("limit", "20"),
|
||||
("offset", &offset),
|
||||
("profile", "default"),
|
||||
("search", query),
|
||||
("title", "Special:Search"),
|
||||
("ns0", "1"),
|
||||
];
|
||||
|
||||
let query_params_string = build_query(&query_params);
|
||||
|
||||
let url: String = format!("{}/w/index.php?{}", self.host, query_params_string);
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&Wikipedia::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
if self.parser.parse_for_no_results(&document).next().is_some() {
|
||||
return Err(Report::new(EngineError::EmptyResultSet));
|
||||
}
|
||||
|
||||
// scrape all the results from the html
|
||||
self.parser
|
||||
.parse_for_results(&document, |title, url, desc| {
|
||||
let found_url = url.attr("href");
|
||||
found_url.map(|relative_url| {
|
||||
SearchResult::new(
|
||||
title.inner_html().trim(),
|
||||
&format!("{}{relative_url}", self.host),
|
||||
desc.inner_html().trim(),
|
||||
&[&self.id],
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,5 +1,115 @@
|
|||
//! This module provides modules which provide the functionality to handle paths for different
|
||||
//! files present on different paths and provide one appropriate path on which it is present and
|
||||
//! can be used.
|
||||
//! This module provides the functionality to handle theme folder present on different paths and
|
||||
//! provide one appropriate path on which it is present and can be used.
|
||||
|
||||
pub mod paths;
|
||||
use std::collections::HashMap;
|
||||
use std::io::Error;
|
||||
use std::path::Path;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
// ------- Constants --------
|
||||
/// The constant holding the name of the theme folder.
|
||||
const PUBLIC_DIRECTORY_NAME: &str = "public";
|
||||
/// The constant holding the name of the common folder.
|
||||
const COMMON_DIRECTORY_NAME: &str = "websurfx";
|
||||
/// The constant holding the name of the config file.
|
||||
const CONFIG_FILE_NAME: &str = "config.lua";
|
||||
/// The constant holding the name of the AllowList text file.
|
||||
const ALLOWLIST_FILE_NAME: &str = "allowlist.txt";
|
||||
/// The constant holding the name of the BlockList text file.
|
||||
const BLOCKLIST_FILE_NAME: &str = "blocklist.txt";
|
||||
|
||||
/// An enum type which provides different variants to handle paths for various files/folders.
|
||||
#[derive(Hash, PartialEq, Eq, Debug)]
|
||||
pub enum FileType {
|
||||
/// This variant handles all the paths associated with the config file.
|
||||
Config,
|
||||
/// This variant handles all the paths associated with the Allowlist text file.
|
||||
AllowList,
|
||||
/// This variant handles all the paths associated with the BlockList text file.
|
||||
BlockList,
|
||||
/// This variant handles all the paths associated with the public folder (Theme folder).
|
||||
Theme,
|
||||
}
|
||||
|
||||
/// A static variable which stores the different filesystem paths for various file/folder types.
|
||||
static FILE_PATHS_FOR_DIFF_FILE_TYPES: OnceLock<HashMap<FileType, Vec<String>>> = OnceLock::new();
|
||||
|
||||
/// A function which returns an appropriate path for thr provided file type by checking if the path
|
||||
/// for the given file type exists on that path.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// Returns a `<File Name> folder/file not found!!` error if the give file_type folder/file is not
|
||||
/// present on the path on which it is being tested.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// If this function is give the file_type of Theme variant then the theme folder is checked by the
|
||||
/// following steps:
|
||||
///
|
||||
/// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2)
|
||||
/// 2. Under project folder ( or codebase in other words) if it is not present
|
||||
/// here then it returns an error as mentioned above.
|
||||
pub fn file_path(file_type: FileType) -> Result<&'static str, Error> {
|
||||
let home = env!("HOME");
|
||||
|
||||
let file_path: &Vec<String> = FILE_PATHS_FOR_DIFF_FILE_TYPES
|
||||
.get_or_init(|| {
|
||||
HashMap::from([
|
||||
(
|
||||
FileType::Config,
|
||||
vec![
|
||||
format!(
|
||||
"{}/.config/{}/{}",
|
||||
home, COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME
|
||||
),
|
||||
format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME),
|
||||
format!("./{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME),
|
||||
],
|
||||
),
|
||||
(
|
||||
FileType::Theme,
|
||||
vec![
|
||||
format!("/opt/websurfx/{}/", PUBLIC_DIRECTORY_NAME),
|
||||
format!("./{}/", PUBLIC_DIRECTORY_NAME),
|
||||
],
|
||||
),
|
||||
(
|
||||
FileType::AllowList,
|
||||
vec![
|
||||
format!(
|
||||
"{}/.config/{}/{}",
|
||||
home, COMMON_DIRECTORY_NAME, ALLOWLIST_FILE_NAME
|
||||
),
|
||||
format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, ALLOWLIST_FILE_NAME),
|
||||
format!("./{}/{}", COMMON_DIRECTORY_NAME, ALLOWLIST_FILE_NAME),
|
||||
],
|
||||
),
|
||||
(
|
||||
FileType::BlockList,
|
||||
vec![
|
||||
format!(
|
||||
"{}/.config/{}/{}",
|
||||
home, COMMON_DIRECTORY_NAME, BLOCKLIST_FILE_NAME
|
||||
),
|
||||
format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, BLOCKLIST_FILE_NAME),
|
||||
format!("./{}/{}", COMMON_DIRECTORY_NAME, BLOCKLIST_FILE_NAME),
|
||||
],
|
||||
),
|
||||
])
|
||||
})
|
||||
.get(&file_type)
|
||||
.unwrap();
|
||||
|
||||
for path in file_path.iter() {
|
||||
if Path::new(path).exists() {
|
||||
return Ok(path);
|
||||
}
|
||||
}
|
||||
|
||||
// if no of the configs above exist, return error
|
||||
Err(Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
format!("{:?} file/folder not found!!", file_type),
|
||||
))
|
||||
}
|
||||
|
|
|
@ -1,119 +0,0 @@
|
|||
//! This module provides the functionality to handle theme folder present on different paths and
|
||||
//! provide one appropriate path on which it is present and can be used.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::io::Error;
|
||||
use std::path::Path;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
// ------- Constants --------
|
||||
/// The constant holding the name of the theme folder.
|
||||
const PUBLIC_DIRECTORY_NAME: &str = "public";
|
||||
/// The constant holding the name of the common folder.
|
||||
const COMMON_DIRECTORY_NAME: &str = "websurfx";
|
||||
/// The constant holding the name of the config file.
|
||||
const CONFIG_FILE_NAME: &str = "config.lua";
|
||||
/// The constant holding the name of the AllowList text file.
|
||||
const ALLOWLIST_FILE_NAME: &str = "allowlist.txt";
|
||||
/// The constant holding the name of the BlockList text file.
|
||||
const BLOCKLIST_FILE_NAME: &str = "blocklist.txt";
|
||||
|
||||
/// An enum type which provides different variants to handle paths for various files/folders.
|
||||
#[derive(Hash, PartialEq, Eq, Debug)]
|
||||
pub enum FileType {
|
||||
/// This variant handles all the paths associated with the config file.
|
||||
Config,
|
||||
/// This variant handles all the paths associated with the Allowlist text file.
|
||||
AllowList,
|
||||
/// This variant handles all the paths associated with the BlockList text file.
|
||||
BlockList,
|
||||
/// This variant handles all the paths associated with the public folder (Theme folder).
|
||||
Theme,
|
||||
}
|
||||
|
||||
/// A static variable which stores the different filesystem paths for various file/folder types.
|
||||
static FILE_PATHS_FOR_DIFF_FILE_TYPES: OnceLock<HashMap<FileType, Vec<String>>> = OnceLock::new();
|
||||
|
||||
/// A function which returns an appropriate path for thr provided file type by checking if the path
|
||||
/// for the given file type exists on that path.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// Returns a `<File Name> folder/file not found!!` error if the give file_type folder/file is not
|
||||
/// present on the path on which it is being tested.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// If this function is give the file_type of Theme variant then the theme folder is checked by the
|
||||
/// following steps:
|
||||
///
|
||||
/// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2)
|
||||
/// 2. Under project folder ( or codebase in other words) if it is not present
|
||||
/// here then it returns an error as mentioned above.
|
||||
pub fn file_path(file_type: FileType) -> Result<&'static str, Error> {
|
||||
let file_path: &Vec<String> = FILE_PATHS_FOR_DIFF_FILE_TYPES
|
||||
.get_or_init(|| {
|
||||
HashMap::from([
|
||||
(
|
||||
FileType::Config,
|
||||
vec![
|
||||
format!(
|
||||
"{}/.config/{}/{}",
|
||||
std::env::var("HOME").unwrap(),
|
||||
COMMON_DIRECTORY_NAME,
|
||||
CONFIG_FILE_NAME
|
||||
),
|
||||
format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME),
|
||||
format!("./{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME),
|
||||
],
|
||||
),
|
||||
(
|
||||
FileType::Theme,
|
||||
vec![
|
||||
format!("/opt/websurfx/{}/", PUBLIC_DIRECTORY_NAME),
|
||||
format!("./{}/", PUBLIC_DIRECTORY_NAME),
|
||||
],
|
||||
),
|
||||
(
|
||||
FileType::AllowList,
|
||||
vec![
|
||||
format!(
|
||||
"{}/.config/{}/{}",
|
||||
std::env::var("HOME").unwrap(),
|
||||
COMMON_DIRECTORY_NAME,
|
||||
ALLOWLIST_FILE_NAME
|
||||
),
|
||||
format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, ALLOWLIST_FILE_NAME),
|
||||
format!("./{}/{}", COMMON_DIRECTORY_NAME, ALLOWLIST_FILE_NAME),
|
||||
],
|
||||
),
|
||||
(
|
||||
FileType::BlockList,
|
||||
vec![
|
||||
format!(
|
||||
"{}/.config/{}/{}",
|
||||
std::env::var("HOME").unwrap(),
|
||||
COMMON_DIRECTORY_NAME,
|
||||
BLOCKLIST_FILE_NAME
|
||||
),
|
||||
format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, BLOCKLIST_FILE_NAME),
|
||||
format!("./{}/{}", COMMON_DIRECTORY_NAME, BLOCKLIST_FILE_NAME),
|
||||
],
|
||||
),
|
||||
])
|
||||
})
|
||||
.get(&file_type)
|
||||
.unwrap();
|
||||
|
||||
for (idx, _) in file_path.iter().enumerate() {
|
||||
if Path::new(file_path[idx].as_str()).exists() {
|
||||
return Ok(std::mem::take(&mut &*file_path[idx]));
|
||||
}
|
||||
}
|
||||
|
||||
// if no of the configs above exist, return error
|
||||
Err(Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
format!("{:?} file/folder not found!!", file_type),
|
||||
))
|
||||
}
|
71
src/lib.rs
|
@ -12,19 +12,27 @@ pub mod handler;
|
|||
pub mod models;
|
||||
pub mod results;
|
||||
pub mod server;
|
||||
pub mod templates;
|
||||
|
||||
use std::net::TcpListener;
|
||||
use std::{net::TcpListener, sync::OnceLock, time::Duration};
|
||||
|
||||
use crate::server::router;
|
||||
|
||||
use actix_cors::Cors;
|
||||
use actix_files as fs;
|
||||
use actix_governor::{Governor, GovernorConfigBuilder};
|
||||
use actix_web::{dev::Server, http::header, middleware::Logger, web, App, HttpServer};
|
||||
use cache::cacher::{Cache, SharedCache};
|
||||
use actix_web::{
|
||||
dev::Server,
|
||||
http::header,
|
||||
middleware::{Compress, Logger},
|
||||
web, App, HttpServer,
|
||||
};
|
||||
use cache::cacher::{Cacher, SharedCache};
|
||||
use config::parser::Config;
|
||||
use handlebars::Handlebars;
|
||||
use handler::paths::{file_path, FileType};
|
||||
use handler::{file_path, FileType};
|
||||
|
||||
/// A static constant for holding the cache struct.
|
||||
static SHARED_CACHE: OnceLock<SharedCache> = OnceLock::new();
|
||||
|
||||
/// Runs the web server on the provided TCP listener and returns a `Server` instance.
|
||||
///
|
||||
|
@ -39,28 +47,29 @@ use handler::paths::{file_path, FileType};
|
|||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use std::net::TcpListener;
|
||||
/// use websurfx::{config::parser::Config, run, cache::cacher::Cache};
|
||||
/// use std::{net::TcpListener, sync::OnceLock};
|
||||
/// use websurfx::{config::parser::Config, run, cache::cacher::create_cache};
|
||||
///
|
||||
/// let config = Config::parse(true).unwrap();
|
||||
/// let listener = TcpListener::bind("127.0.0.1:8080").expect("Failed to bind address");
|
||||
/// let cache = Cache::new_in_memory();
|
||||
/// let server = run(listener,config,cache).expect("Failed to start server");
|
||||
/// /// A static constant for holding the parsed config.
|
||||
/// static CONFIG: OnceLock<Config> = OnceLock::new();
|
||||
///
|
||||
/// #[tokio::main]
|
||||
/// async fn main(){
|
||||
/// // Initialize the parsed config globally.
|
||||
/// let config = CONFIG.get_or_init(|| Config::parse(true).unwrap());
|
||||
/// let listener = TcpListener::bind("127.0.0.1:8080").expect("Failed to bind address");
|
||||
/// let cache = create_cache(config).await;
|
||||
/// let server = run(listener,&config,cache).expect("Failed to start server");
|
||||
/// }
|
||||
/// ```
|
||||
pub fn run(listener: TcpListener, config: Config, cache: Cache) -> std::io::Result<Server> {
|
||||
let mut handlebars: Handlebars<'_> = Handlebars::new();
|
||||
|
||||
pub fn run(
|
||||
listener: TcpListener,
|
||||
config: &'static Config,
|
||||
cache: impl Cacher + 'static,
|
||||
) -> std::io::Result<Server> {
|
||||
let public_folder_path: &str = file_path(FileType::Theme)?;
|
||||
|
||||
handlebars
|
||||
.register_templates_directory(".html", format!("{}/templates", public_folder_path))
|
||||
.unwrap();
|
||||
|
||||
let handlebars_ref: web::Data<Handlebars<'_>> = web::Data::new(handlebars);
|
||||
|
||||
let cloned_config_threads_opt: u8 = config.threads;
|
||||
|
||||
let cache = web::Data::new(SharedCache::new(cache));
|
||||
let cache = SHARED_CACHE.get_or_init(|| SharedCache::new(cache));
|
||||
|
||||
let server = HttpServer::new(move || {
|
||||
let cors: Cors = Cors::default()
|
||||
|
@ -74,14 +83,15 @@ pub fn run(listener: TcpListener, config: Config, cache: Cache) -> std::io::Resu
|
|||
]);
|
||||
|
||||
App::new()
|
||||
// Compress the responses provided by the server for the client requests.
|
||||
.wrap(Compress::default())
|
||||
.wrap(Logger::default()) // added logging middleware for logging.
|
||||
.app_data(handlebars_ref.clone())
|
||||
.app_data(web::Data::new(config.clone()))
|
||||
.app_data(cache.clone())
|
||||
.app_data(web::Data::new(config))
|
||||
.app_data(web::Data::new(cache))
|
||||
.wrap(cors)
|
||||
.wrap(Governor::new(
|
||||
&GovernorConfigBuilder::default()
|
||||
.per_second(config.rate_limiter.time_limit as u64)
|
||||
.seconds_per_request(config.rate_limiter.time_limit as u64)
|
||||
.burst_size(config.rate_limiter.number_of_requests as u32)
|
||||
.finish()
|
||||
.unwrap(),
|
||||
|
@ -100,9 +110,14 @@ pub fn run(listener: TcpListener, config: Config, cache: Cache) -> std::io::Resu
|
|||
.service(server::routes::search::search) // search page
|
||||
.service(router::about) // about page
|
||||
.service(router::settings) // settings page
|
||||
.service(server::routes::export_import::download) // download page
|
||||
.default_service(web::route().to(router::not_found)) // error page
|
||||
})
|
||||
.workers(cloned_config_threads_opt as usize)
|
||||
.workers(config.threads as usize)
|
||||
// Set the keep-alive timer for client connections
|
||||
.keep_alive(Duration::from_secs(
|
||||
config.client_connection_keep_alive as u64,
|
||||
))
|
||||
// Start server on 127.0.0.1 with the user provided port number. for example 127.0.0.1:8080.
|
||||
.listen(listener)?
|
||||
.run();
|
||||
|
|
|
@ -1,15 +1,19 @@
|
|||
//! This module provides public models for handling, storing and serializing of search results
|
||||
//! data scraped from the upstream search engines.
|
||||
|
||||
use super::engine_models::EngineError;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use super::{engine_models::EngineError, parser_models::Style};
|
||||
|
||||
#[cfg(any(
|
||||
feature = "use-synonyms-search",
|
||||
feature = "use-non-static-synonyms-search"
|
||||
))]
|
||||
use thesaurus::synonyms;
|
||||
/// A named struct to store the raw scraped search results scraped search results from the
|
||||
/// upstream search engines before aggregating it.It derives the Clone trait which is needed
|
||||
/// to write idiomatic rust using `Iterators`.
|
||||
/// (href url in html in simple words).
|
||||
///
|
||||
/// (href url in html in simple words).
|
||||
///
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SearchResult {
|
||||
|
@ -20,7 +24,9 @@ pub struct SearchResult {
|
|||
/// The description of the search result.
|
||||
pub description: String,
|
||||
/// The names of the upstream engines from which this results were provided.
|
||||
pub engine: SmallVec<[String; 0]>,
|
||||
pub engine: Vec<String>,
|
||||
/// The td-tdf score of the result in regards to the title, url and description and the user's query
|
||||
pub relevance_score: f32,
|
||||
}
|
||||
|
||||
impl SearchResult {
|
||||
|
@ -30,7 +36,7 @@ impl SearchResult {
|
|||
///
|
||||
/// * `title` - The title of the search result.
|
||||
/// * `url` - The url which is accessed when clicked on it
|
||||
/// (href url in html in simple words).
|
||||
/// (href url in html in simple words).
|
||||
/// * `description` - The description of the search result.
|
||||
/// * `engine` - The names of the upstream engines from which this results were provided.
|
||||
pub fn new(title: &str, url: &str, description: &str, engine: &[&str]) -> Self {
|
||||
|
@ -38,9 +44,49 @@ impl SearchResult {
|
|||
title: title.to_owned(),
|
||||
url: url.to_owned(),
|
||||
description: description.to_owned(),
|
||||
relevance_score: 0.0,
|
||||
engine: engine.iter().map(|name| name.to_string()).collect(),
|
||||
}
|
||||
}
|
||||
/// calculates and update the relevance score of the current search.
|
||||
|
||||
/// # Arguments
|
||||
///
|
||||
/// * query - the query string used to obtain the results
|
||||
///
|
||||
///
|
||||
|
||||
pub fn calculate_relevance(&mut self, query: &str) {
|
||||
use stop_words::{get, LANGUAGE};
|
||||
// when language settings can change to any of the ones supported on this crate: https://docs.rs/crate/stop-words/0.8.0
|
||||
let documents = [
|
||||
self.title.clone(),
|
||||
self.url.clone(),
|
||||
self.description.clone(),
|
||||
];
|
||||
|
||||
let stop_words = get(LANGUAGE::English);
|
||||
let punctuation = [
|
||||
".".to_owned(),
|
||||
",".to_owned(),
|
||||
":".to_owned(),
|
||||
";".to_owned(),
|
||||
"!".to_owned(),
|
||||
"?".to_owned(),
|
||||
"(".to_owned(),
|
||||
")".to_owned(),
|
||||
"[".to_owned(),
|
||||
"]".to_owned(),
|
||||
"{".to_owned(),
|
||||
"}".to_owned(),
|
||||
"\"".to_owned(),
|
||||
"'".to_owned(),
|
||||
"<".to_owned(),
|
||||
">".to_owned(),
|
||||
];
|
||||
|
||||
self.relevance_score = calculate_tf_idf(query, &documents, &stop_words, &punctuation);
|
||||
}
|
||||
|
||||
/// A function which adds the engine name provided as a string into a vector of strings.
|
||||
///
|
||||
|
@ -80,7 +126,7 @@ impl EngineErrorInfo {
|
|||
/// # Arguments
|
||||
///
|
||||
/// * `error` - It takes the error type which occured while fetching the result from a particular
|
||||
/// search engine.
|
||||
/// search engine.
|
||||
/// * `engine` - It takes the name of the engine that failed to provide the requested search results.
|
||||
pub fn new(error: &EngineError, engine: &str) -> Self {
|
||||
Self {
|
||||
|
@ -108,14 +154,10 @@ impl EngineErrorInfo {
|
|||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SearchResults {
|
||||
/// Stores the individual serializable `SearchResult` struct into a vector of
|
||||
pub results: Vec<SearchResult>,
|
||||
/// Stores the current pages search query `q` provided in the search url.
|
||||
pub page_query: String,
|
||||
/// Stores the theming options for the website.
|
||||
pub style: Style,
|
||||
pub results: Box<[SearchResult]>,
|
||||
/// Stores the information on which engines failed with their engine name
|
||||
/// and the type of error that caused it.
|
||||
pub engine_errors_info: Vec<EngineErrorInfo>,
|
||||
pub engine_errors_info: Box<[EngineErrorInfo]>,
|
||||
/// Stores the flag option which holds the check value that the following
|
||||
/// search query was disallowed when the safe search level set to 4 and it
|
||||
/// was present in the `Blocklist` file.
|
||||
|
@ -137,21 +179,15 @@ impl SearchResults {
|
|||
/// # Arguments
|
||||
///
|
||||
/// * `results` - Takes an argument of individual serializable `SearchResult` struct
|
||||
/// and stores it into a vector of `SearchResult` structs.
|
||||
/// and stores it into a vector of `SearchResult` structs.
|
||||
/// * `page_query` - Takes an argument of current page`s search query `q` provided in
|
||||
/// the search url.
|
||||
/// the search url.
|
||||
/// * `engine_errors_info` - Takes an array of structs which contains information regarding
|
||||
/// which engines failed with their names, reason and their severity color name.
|
||||
pub fn new(
|
||||
results: Vec<SearchResult>,
|
||||
page_query: &str,
|
||||
engine_errors_info: &[EngineErrorInfo],
|
||||
) -> Self {
|
||||
/// which engines failed with their names, reason and their severity color name.
|
||||
pub fn new(results: Box<[SearchResult]>, engine_errors_info: Box<[EngineErrorInfo]>) -> Self {
|
||||
Self {
|
||||
results,
|
||||
page_query: page_query.to_owned(),
|
||||
style: Style::default(),
|
||||
engine_errors_info: engine_errors_info.to_owned(),
|
||||
engine_errors_info,
|
||||
disallowed: Default::default(),
|
||||
filtered: Default::default(),
|
||||
safe_search_level: Default::default(),
|
||||
|
@ -159,32 +195,22 @@ impl SearchResults {
|
|||
}
|
||||
}
|
||||
|
||||
/// A setter function to add website style to the return search results.
|
||||
pub fn add_style(&mut self, style: &Style) {
|
||||
self.style = style.clone();
|
||||
}
|
||||
|
||||
/// A setter function that sets disallowed to true.
|
||||
pub fn set_disallowed(&mut self) {
|
||||
self.disallowed = true;
|
||||
}
|
||||
|
||||
/// A setter function to set the current page search query.
|
||||
pub fn set_page_query(&mut self, page: &str) {
|
||||
self.page_query = page.to_owned();
|
||||
}
|
||||
|
||||
/// A setter function that sets the filtered to true.
|
||||
pub fn set_filtered(&mut self) {
|
||||
self.filtered = true;
|
||||
pub fn set_filtered(&mut self, filtered: bool) {
|
||||
self.filtered = filtered;
|
||||
}
|
||||
|
||||
/// A getter function that gets the value of `engine_errors_info`.
|
||||
pub fn engine_errors_info(&mut self) -> Vec<EngineErrorInfo> {
|
||||
pub fn engine_errors_info(&mut self) -> Box<[EngineErrorInfo]> {
|
||||
std::mem::take(&mut self.engine_errors_info)
|
||||
}
|
||||
/// A getter function that gets the value of `results`.
|
||||
pub fn results(&mut self) -> Vec<SearchResult> {
|
||||
pub fn results(&mut self) -> Box<[SearchResult]> {
|
||||
self.results.clone()
|
||||
}
|
||||
|
||||
|
@ -203,3 +229,76 @@ impl SearchResults {
|
|||
self.no_engines_selected = true;
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to calculate the tf-idf for the search query.
|
||||
/// <br> The approach is as [`as`](https://en.wikipedia.org/wiki/Tf%E2%80%93idf).
|
||||
/// <br> Find a sample article about TF-IDF [`here`](https://medium.com/analytics-vidhya/tf-idf-term-frequency-technique-easiest-explanation-for-text-classification-in-nlp-with-code-8ca3912e58c3)
|
||||
/// ### Arguments
|
||||
/// * `query` - a user's search query
|
||||
/// * `documents` - a list of text used for comparision (url, title, description)
|
||||
/// * `stop_words` - A list of language specific stop words.
|
||||
/// * `punctuation` - list of punctuation symbols.
|
||||
/// ### Returns
|
||||
/// * `score` - The average tf-idf score of the word tokens (and synonyms) in the query
|
||||
fn calculate_tf_idf(
|
||||
query: &str,
|
||||
documents: &[String],
|
||||
stop_words: &[String],
|
||||
punctuation: &[String],
|
||||
) -> f32 {
|
||||
use keyword_extraction::{
|
||||
tf_idf::{TfIdf, TfIdfParams},
|
||||
tokenizer::Tokenizer,
|
||||
};
|
||||
|
||||
let params = TfIdfParams::UnprocessedDocuments(documents, stop_words, Some(punctuation));
|
||||
let tf_idf = TfIdf::new(params);
|
||||
let tokener = Tokenizer::new(query, stop_words, Some(punctuation));
|
||||
let query_tokens = tokener.split_into_words();
|
||||
|
||||
#[cfg(any(
|
||||
feature = "use-synonyms-search",
|
||||
feature = "use-non-static-synonyms-search"
|
||||
))]
|
||||
let mut extra_tokens = vec![];
|
||||
|
||||
let total_score: f32 = query_tokens
|
||||
.iter()
|
||||
.map(|token| {
|
||||
#[cfg(any(
|
||||
feature = "use-synonyms-search",
|
||||
feature = "use-non-static-synonyms-search"
|
||||
))]
|
||||
{
|
||||
// find some synonyms and add them to the search (from wordnet or moby if feature is enabled)
|
||||
extra_tokens.extend(synonyms(token))
|
||||
}
|
||||
|
||||
tf_idf.get_score(token)
|
||||
})
|
||||
.sum();
|
||||
|
||||
#[cfg(not(any(
|
||||
feature = "use-synonyms-search",
|
||||
feature = "use-non-static-synonyms-search"
|
||||
)))]
|
||||
let result = total_score / (query_tokens.len() as f32);
|
||||
|
||||
#[cfg(any(
|
||||
feature = "use-synonyms-search",
|
||||
feature = "use-non-static-synonyms-search"
|
||||
))]
|
||||
let extra_total_score: f32 = extra_tokens
|
||||
.iter()
|
||||
.map(|token| tf_idf.get_score(token))
|
||||
.sum();
|
||||
|
||||
#[cfg(any(
|
||||
feature = "use-synonyms-search",
|
||||
feature = "use-non-static-synonyms-search"
|
||||
))]
|
||||
let result =
|
||||
(extra_total_score + total_score) / ((query_tokens.len() + extra_tokens.len()) as f32);
|
||||
|
||||
f32::from(!result.is_nan()) * result
|
||||
}
|
||||
|
|
|
@ -3,7 +3,8 @@
|
|||
|
||||
use super::aggregation_models::SearchResult;
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
use std::{collections::HashMap, fmt, time::Duration};
|
||||
use reqwest::Client;
|
||||
use std::fmt;
|
||||
|
||||
/// A custom error type used for handle engine associated errors.
|
||||
#[derive(Debug)]
|
||||
|
@ -71,12 +72,11 @@ pub trait SearchEngine: Sync + Send {
|
|||
&self,
|
||||
url: &str,
|
||||
header_map: reqwest::header::HeaderMap,
|
||||
request_timeout: u8,
|
||||
client: &Client,
|
||||
) -> Result<String, EngineError> {
|
||||
// fetch the html from upstream search engine
|
||||
Ok(reqwest::Client::new()
|
||||
Ok(client
|
||||
.get(url)
|
||||
.timeout(Duration::from_secs(request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
|
||||
.headers(header_map) // add spoofed headers to emulate human behavior
|
||||
.send()
|
||||
.await
|
||||
|
@ -86,6 +86,42 @@ pub trait SearchEngine: Sync + Send {
|
|||
.change_context(EngineError::RequestError)?)
|
||||
}
|
||||
|
||||
/// This helper function fetches/requests the json search results from the upstream search engine as a vector of bytes.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `url` - It takes the url of the upstream search engine with the user requested search
|
||||
/// query appended in the search parameters.
|
||||
/// * `header_map` - It takes the http request headers to be sent to the upstream engine in
|
||||
/// order to prevent being detected as a bot. It takes the header as a HeaderMap type.
|
||||
/// * `request_timeout` - It takes the request timeout value as seconds which is used to limit
|
||||
/// the amount of time for each request to remain connected when until the results can be provided
|
||||
/// by the upstream engine.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// It returns the html data as a vector of bytes if the upstream engine provides the data as expected
|
||||
/// otherwise it returns a custom `EngineError`.
|
||||
async fn fetch_json_as_bytes_from_upstream(
|
||||
&self,
|
||||
url: &str,
|
||||
header_map: reqwest::header::HeaderMap,
|
||||
client: &Client,
|
||||
) -> Result<Vec<u8>, EngineError> {
|
||||
// fetch the json response from upstream search engine
|
||||
|
||||
Ok(client
|
||||
.get(url)
|
||||
.headers(header_map) // add spoofed headers to emulate human behavior
|
||||
.send()
|
||||
.await
|
||||
.change_context(EngineError::RequestError)?
|
||||
.bytes()
|
||||
.await
|
||||
.change_context(EngineError::RequestError)?
|
||||
.to_vec())
|
||||
}
|
||||
|
||||
/// This function scrapes results from the upstream engine and puts all the scraped results like
|
||||
/// title, visiting_url (href in html),engine (from which engine it was fetched from) and description
|
||||
/// in a RawSearchResult and then adds that to HashMap whose keys are url and values are RawSearchResult
|
||||
|
@ -109,9 +145,9 @@ pub trait SearchEngine: Sync + Send {
|
|||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
request_timeout: u8,
|
||||
client: &Client,
|
||||
safe_search: u8,
|
||||
) -> Result<HashMap<String, SearchResult>, EngineError>;
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError>;
|
||||
}
|
||||
|
||||
/// A named struct which stores the engine struct with the name of the associated engine.
|
||||
|
@ -154,6 +190,26 @@ impl EngineHandler {
|
|||
let engine = crate::engines::brave::Brave::new()?;
|
||||
("brave", Box::new(engine))
|
||||
}
|
||||
"startpage" => {
|
||||
let engine = crate::engines::startpage::Startpage::new()?;
|
||||
("startpage", Box::new(engine))
|
||||
}
|
||||
"librex" => {
|
||||
let engine = crate::engines::librex::LibreX::new()?;
|
||||
("librex", Box::new(engine))
|
||||
}
|
||||
"mojeek" => {
|
||||
let engine = crate::engines::mojeek::Mojeek::new()?;
|
||||
("mojeek", Box::new(engine))
|
||||
}
|
||||
"bing" => {
|
||||
let engine = crate::engines::bing::Bing::new()?;
|
||||
("bing", Box::new(engine))
|
||||
}
|
||||
"wikipedia" => {
|
||||
let engine = crate::engines::wikipedia::Wikipedia::new("en")?;
|
||||
("wikipedia", Box::new(engine))
|
||||
}
|
||||
_ => {
|
||||
return Err(Report::from(EngineError::NoSuchEngineFound(
|
||||
engine_name.to_string(),
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
//! This module provides public models for handling, storing and serializing parsed config file
|
||||
//! options from config.lua by grouping them together.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// A named struct which stores,deserializes, serializes and groups the parsed config file options
|
||||
/// of theme and colorscheme names into the Style struct which derives the `Clone`, `Serialize`
|
||||
/// and Deserialize traits where the `Clone` trait is derived for allowing the struct to be
|
||||
|
@ -12,13 +10,16 @@ use serde::{Deserialize, Serialize};
|
|||
/// order to allow the deserializing the json back to struct in aggregate function in
|
||||
/// aggregator.rs and create a new struct out of it and then serialize it back to json and pass
|
||||
/// it to the template files.
|
||||
#[derive(Serialize, Deserialize, Clone, Default)]
|
||||
#[derive(Default, Clone)]
|
||||
pub struct Style {
|
||||
/// It stores the parsed theme option used to set a theme for the website.
|
||||
pub theme: String,
|
||||
/// It stores the parsed colorscheme option used to set a colorscheme for the
|
||||
/// theme being used.
|
||||
pub colorscheme: String,
|
||||
/// It stores the parsed animation option used to set an animation for the
|
||||
/// theme being used.
|
||||
pub animation: Option<String>,
|
||||
}
|
||||
|
||||
impl Style {
|
||||
|
@ -28,14 +29,17 @@ impl Style {
|
|||
///
|
||||
/// * `theme` - It takes the parsed theme option used to set a theme for the website.
|
||||
/// * `colorscheme` - It takes the parsed colorscheme option used to set a colorscheme
|
||||
/// for the theme being used.
|
||||
pub fn new(theme: String, colorscheme: String) -> Self {
|
||||
Style { theme, colorscheme }
|
||||
/// for the theme being used.
|
||||
pub fn new(theme: String, colorscheme: String, animation: Option<String>) -> Self {
|
||||
Style {
|
||||
theme,
|
||||
colorscheme,
|
||||
animation,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration options for the aggregator.
|
||||
#[derive(Clone)]
|
||||
pub struct AggregatorConfig {
|
||||
/// It stores the option to whether enable or disable random delays between
|
||||
/// requests.
|
||||
|
@ -43,7 +47,6 @@ pub struct AggregatorConfig {
|
|||
}
|
||||
|
||||
/// Configuration options for the rate limiter middleware.
|
||||
#[derive(Clone)]
|
||||
pub struct RateLimiter {
|
||||
/// The number of request that are allowed within a provided time limit.
|
||||
pub number_of_requests: u8,
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
//! This module provides the models to parse cookies and search parameters from the search
|
||||
//! engine website.
|
||||
use serde::Deserialize;
|
||||
use std::borrow::Cow;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::parser_models::Style;
|
||||
|
||||
/// A named struct which deserializes all the user provided search parameters and stores them.
|
||||
#[derive(Deserialize)]
|
||||
pub struct SearchParams {
|
||||
/// It stores the search parameter option `q` (or query in simple words)
|
||||
/// of the search url.
|
||||
pub q: Option<String>,
|
||||
pub q: Option<Cow<'static, str>>,
|
||||
/// It stores the search parameter `page` (or pageno in simple words)
|
||||
/// of the search url.
|
||||
pub page: Option<u32>,
|
||||
|
@ -18,14 +22,37 @@ pub struct SearchParams {
|
|||
|
||||
/// A named struct which is used to deserialize the cookies fetched from the client side.
|
||||
#[allow(dead_code)]
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Cookie<'a> {
|
||||
#[serde(borrow)]
|
||||
/// It stores the theme name used in the website.
|
||||
pub theme: &'a str,
|
||||
pub theme: Cow<'a, str>,
|
||||
#[serde(borrow)]
|
||||
/// It stores the colorscheme name used for the website theme.
|
||||
pub colorscheme: &'a str,
|
||||
pub colorscheme: Cow<'a, str>,
|
||||
#[serde(borrow)]
|
||||
/// It stores the user selected upstream search engines selected from the UI.
|
||||
pub engines: Vec<&'a str>,
|
||||
pub engines: Cow<'a, [Cow<'a, str>]>,
|
||||
/// It stores the user selected safe search level from the UI.
|
||||
pub safe_search_level: u8,
|
||||
#[serde(borrow)]
|
||||
/// It stores the animation name used for the website theme.
|
||||
pub animation: Option<Cow<'a, str>>,
|
||||
}
|
||||
|
||||
impl<'a> Cookie<'a> {
|
||||
/// server_models::Cookie contructor function
|
||||
pub fn build(style: &'a Style, mut engines: Vec<Cow<'a, str>>, safe_search_level: u8) -> Self {
|
||||
engines.sort();
|
||||
Self {
|
||||
theme: Cow::Borrowed(&style.theme),
|
||||
colorscheme: Cow::Borrowed(&style.colorscheme),
|
||||
engines: Cow::Owned(engines),
|
||||
safe_search_level,
|
||||
animation: style
|
||||
.animation
|
||||
.as_ref()
|
||||
.map(|str| Cow::Borrowed(str.as_str())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,24 +2,32 @@
|
|||
//! search engines and then removes duplicate results.
|
||||
|
||||
use super::user_agent::random_user_agent;
|
||||
use crate::handler::paths::{file_path, FileType};
|
||||
use crate::config::parser::Config;
|
||||
use crate::handler::{file_path, FileType};
|
||||
use crate::models::{
|
||||
aggregation_models::{EngineErrorInfo, SearchResult, SearchResults},
|
||||
engine_models::{EngineError, EngineHandler},
|
||||
};
|
||||
|
||||
use error_stack::Report;
|
||||
use rand::Rng;
|
||||
use futures::stream::FuturesUnordered;
|
||||
use regex::Regex;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::{BufReader, Read},
|
||||
use reqwest::{Client, ClientBuilder};
|
||||
use std::sync::Arc;
|
||||
use tokio::{
|
||||
fs::File,
|
||||
io::{AsyncBufReadExt, BufReader},
|
||||
task::JoinHandle,
|
||||
time::Duration,
|
||||
};
|
||||
use std::{fs::File, io::BufRead};
|
||||
use tokio::task::JoinHandle;
|
||||
|
||||
/// A constant for holding the prebuilt Client globally in the app.
|
||||
static CLIENT: std::sync::OnceLock<Client> = std::sync::OnceLock::new();
|
||||
|
||||
/// Aliases for long type annotations
|
||||
type FutureVec = Vec<JoinHandle<Result<HashMap<String, SearchResult>, Report<EngineError>>>>;
|
||||
|
||||
type FutureVec =
|
||||
FuturesUnordered<JoinHandle<Result<Vec<(String, SearchResult)>, Report<EngineError>>>>;
|
||||
|
||||
/// The function aggregates the scraped results from the user-selected upstream search engines.
|
||||
/// These engines can be chosen either from the user interface (UI) or from the configuration file.
|
||||
|
@ -32,7 +40,7 @@ type FutureVec = Vec<JoinHandle<Result<HashMap<String, SearchResult>, Report<Eng
|
|||
///
|
||||
/// Additionally, the function eliminates duplicate results. If two results are identified as coming from
|
||||
/// multiple engines, their names are combined to indicate that the results were fetched from these upstream
|
||||
/// engines. After this, all the data in the `HashMap` is removed and placed into a struct that contains all
|
||||
/// engines. After this, all the data in the `Vec` is removed and placed into a struct that contains all
|
||||
/// the aggregated results in a vector. Furthermore, the query used is also added to the struct. This step is
|
||||
/// necessary to ensure that the search bar in the search remains populated even when searched from the query URL.
|
||||
///
|
||||
|
@ -52,7 +60,7 @@ type FutureVec = Vec<JoinHandle<Result<HashMap<String, SearchResult>, Report<Eng
|
|||
/// * `debug` - Accepts a boolean value to enable or disable debug mode option.
|
||||
/// * `upstream_search_engines` - Accepts a vector of search engine names which was selected by the
|
||||
/// * `request_timeout` - Accepts a time (secs) as a value which controls the server request timeout.
|
||||
/// user through the UI or the config file.
|
||||
/// user through the UI or the config file.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
|
@ -62,33 +70,54 @@ type FutureVec = Vec<JoinHandle<Result<HashMap<String, SearchResult>, Report<Eng
|
|||
pub async fn aggregate(
|
||||
query: &str,
|
||||
page: u32,
|
||||
random_delay: bool,
|
||||
debug: bool,
|
||||
config: &Config,
|
||||
upstream_search_engines: &[EngineHandler],
|
||||
request_timeout: u8,
|
||||
safe_search: u8,
|
||||
) -> Result<SearchResults, Box<dyn std::error::Error>> {
|
||||
let user_agent: &str = random_user_agent();
|
||||
let client = CLIENT.get_or_init(|| {
|
||||
let mut cb = ClientBuilder::new()
|
||||
.timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
|
||||
.pool_idle_timeout(Duration::from_secs(
|
||||
config.pool_idle_connection_timeout as u64,
|
||||
))
|
||||
.tcp_keepalive(Duration::from_secs(config.tcp_connection_keep_alive as u64))
|
||||
.pool_max_idle_per_host(config.number_of_https_connections as usize)
|
||||
.connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
|
||||
.use_rustls_tls()
|
||||
.tls_built_in_root_certs(config.operating_system_tls_certificates)
|
||||
.https_only(true)
|
||||
.gzip(true)
|
||||
.brotli(true)
|
||||
.http2_adaptive_window(config.adaptive_window);
|
||||
|
||||
// Add a random delay before making the request.
|
||||
if random_delay || !debug {
|
||||
let mut rng = rand::thread_rng();
|
||||
let delay_secs = rng.gen_range(1..10);
|
||||
tokio::time::sleep(Duration::from_secs(delay_secs)).await;
|
||||
}
|
||||
if config.proxy.is_some() {
|
||||
cb = cb.proxy(config.proxy.clone().unwrap());
|
||||
}
|
||||
|
||||
cb.build().unwrap()
|
||||
});
|
||||
|
||||
let user_agent: &str = random_user_agent();
|
||||
|
||||
let mut names: Vec<&str> = Vec::with_capacity(0);
|
||||
|
||||
// create tasks for upstream result fetching
|
||||
let mut tasks: FutureVec = FutureVec::new();
|
||||
let tasks: FutureVec = FutureVec::new();
|
||||
|
||||
let query: Arc<String> = Arc::new(query.to_string());
|
||||
for engine_handler in upstream_search_engines {
|
||||
let (name, search_engine) = engine_handler.to_owned().into_name_engine();
|
||||
let (name, search_engine) = engine_handler.clone().into_name_engine();
|
||||
names.push(name);
|
||||
let query: String = query.to_owned();
|
||||
let query_partially_cloned = query.clone();
|
||||
tasks.push(tokio::spawn(async move {
|
||||
search_engine
|
||||
.results(&query, page, user_agent, request_timeout, safe_search)
|
||||
.results(
|
||||
&query_partially_cloned,
|
||||
page,
|
||||
user_agent,
|
||||
client,
|
||||
safe_search,
|
||||
)
|
||||
.await
|
||||
}));
|
||||
}
|
||||
|
@ -103,7 +132,7 @@ pub async fn aggregate(
|
|||
}
|
||||
|
||||
// aggregate search results, removing duplicates and handling errors the upstream engines returned
|
||||
let mut result_map: HashMap<String, SearchResult> = HashMap::new();
|
||||
let mut result_map: Vec<(String, SearchResult)> = Vec::new();
|
||||
let mut engine_errors_info: Vec<EngineErrorInfo> = Vec::new();
|
||||
|
||||
let mut handle_error = |error: &Report<EngineError>, engine_name: &'static str| {
|
||||
|
@ -120,124 +149,152 @@ pub async fn aggregate(
|
|||
|
||||
if result_map.is_empty() {
|
||||
match response {
|
||||
Ok(results) => {
|
||||
result_map = results.clone();
|
||||
}
|
||||
Err(error) => {
|
||||
handle_error(&error, engine);
|
||||
}
|
||||
}
|
||||
Ok(results) => result_map = results,
|
||||
Err(error) => handle_error(&error, engine),
|
||||
};
|
||||
continue;
|
||||
}
|
||||
|
||||
match response {
|
||||
Ok(result) => {
|
||||
result.into_iter().for_each(|(key, value)| {
|
||||
result_map
|
||||
.entry(key)
|
||||
.and_modify(|result| {
|
||||
result.add_engines(engine);
|
||||
})
|
||||
.or_insert_with(|| -> SearchResult { value });
|
||||
match result_map.iter().find(|(key_s, _)| key_s == &key) {
|
||||
Some(value) => value.1.to_owned().add_engines(engine),
|
||||
None => result_map.push((key, value)),
|
||||
};
|
||||
});
|
||||
}
|
||||
Err(error) => {
|
||||
handle_error(&error, engine);
|
||||
}
|
||||
}
|
||||
Err(error) => handle_error(&error, engine),
|
||||
};
|
||||
}
|
||||
|
||||
if safe_search >= 3 {
|
||||
let mut blacklist_map: HashMap<String, SearchResult> = HashMap::new();
|
||||
let mut blacklist_map: Vec<(String, SearchResult)> = Vec::new();
|
||||
filter_with_lists(
|
||||
&mut result_map,
|
||||
&mut blacklist_map,
|
||||
file_path(FileType::BlockList)?,
|
||||
)?;
|
||||
)
|
||||
.await?;
|
||||
|
||||
filter_with_lists(
|
||||
&mut blacklist_map,
|
||||
&mut result_map,
|
||||
file_path(FileType::AllowList)?,
|
||||
)?;
|
||||
)
|
||||
.await?;
|
||||
|
||||
drop(blacklist_map);
|
||||
}
|
||||
|
||||
let results: Vec<SearchResult> = result_map.into_values().collect();
|
||||
let mut results: Box<[SearchResult]> = result_map
|
||||
.into_iter()
|
||||
.map(|(_, mut value)| {
|
||||
if !value.url.contains("temu.com") {
|
||||
value.calculate_relevance(query.as_str())
|
||||
}
|
||||
value
|
||||
})
|
||||
.collect();
|
||||
sort_search_results(&mut results);
|
||||
|
||||
Ok(SearchResults::new(results, query, &engine_errors_info))
|
||||
Ok(SearchResults::new(
|
||||
results,
|
||||
engine_errors_info.into_boxed_slice(),
|
||||
))
|
||||
}
|
||||
|
||||
/// Filters a map of search results using a list of regex patterns.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `map_to_be_filtered` - A mutable reference to a `HashMap` of search results to filter, where the filtered results will be removed from.
|
||||
/// * `resultant_map` - A mutable reference to a `HashMap` to hold the filtered results.
|
||||
/// * `map_to_be_filtered` - A mutable reference to a `Vec` of search results to filter, where the filtered results will be removed from.
|
||||
/// * `resultant_map` - A mutable reference to a `Vec` to hold the filtered results.
|
||||
/// * `file_path` - A `&str` representing the path to a file containing regex patterns to use for filtering.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if the file at `file_path` cannot be opened or read, or if a regex pattern is invalid.
|
||||
pub fn filter_with_lists(
|
||||
map_to_be_filtered: &mut HashMap<String, SearchResult>,
|
||||
resultant_map: &mut HashMap<String, SearchResult>,
|
||||
pub async fn filter_with_lists(
|
||||
map_to_be_filtered: &mut Vec<(String, SearchResult)>,
|
||||
resultant_map: &mut Vec<(String, SearchResult)>,
|
||||
file_path: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let mut reader = BufReader::new(File::open(file_path)?);
|
||||
let reader = BufReader::new(File::open(file_path).await?);
|
||||
let mut lines = reader.lines();
|
||||
|
||||
for line in reader.by_ref().lines() {
|
||||
let re = Regex::new(line?.trim())?;
|
||||
while let Some(line) = lines.next_line().await? {
|
||||
let re = Regex::new(line.trim())?;
|
||||
|
||||
let mut length = map_to_be_filtered.len();
|
||||
let mut idx: usize = Default::default();
|
||||
// Iterate over each search result in the map and check if it matches the regex pattern
|
||||
for (url, search_result) in map_to_be_filtered.clone().into_iter() {
|
||||
if re.is_match(&url.to_lowercase())
|
||||
|| re.is_match(&search_result.title.to_lowercase())
|
||||
|| re.is_match(&search_result.description.to_lowercase())
|
||||
while idx < length {
|
||||
let ele = &map_to_be_filtered[idx];
|
||||
let ele_inner = &ele.1;
|
||||
match re.is_match(&ele.0.to_lowercase())
|
||||
|| re.is_match(&ele_inner.title.to_lowercase())
|
||||
|| re.is_match(&ele_inner.description.to_lowercase())
|
||||
{
|
||||
// If the search result matches the regex pattern, move it from the original map to the resultant map
|
||||
resultant_map.insert(
|
||||
url.to_owned(),
|
||||
map_to_be_filtered.remove(&url.to_owned()).unwrap(),
|
||||
);
|
||||
}
|
||||
true => {
|
||||
// If the search result matches the regex pattern, move it from the original map to the resultant map
|
||||
resultant_map.push(map_to_be_filtered.swap_remove(idx));
|
||||
length -= 1;
|
||||
}
|
||||
false => idx += 1,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sorts SearchResults by relevance score.
|
||||
/// <br> sort_unstable is used as its faster,stability is not an issue on our side.
|
||||
/// For reasons why, check out [`this`](https://rust-lang.github.io/rfcs/1884-unstable-sort.html)
|
||||
/// # Arguments
|
||||
/// * `results` - A mutable slice or Vec of SearchResults
|
||||
///
|
||||
fn sort_search_results(results: &mut [SearchResult]) {
|
||||
results.sort_unstable_by(|a, b| {
|
||||
use std::cmp::Ordering;
|
||||
|
||||
b.relevance_score
|
||||
.partial_cmp(&a.relevance_score)
|
||||
.unwrap_or(Ordering::Less)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use smallvec::smallvec;
|
||||
use std::collections::HashMap;
|
||||
use std::io::Write;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
#[test]
|
||||
fn test_filter_with_lists() -> Result<(), Box<dyn std::error::Error>> {
|
||||
#[tokio::test]
|
||||
async fn test_filter_with_lists() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Create a map of search results to filter
|
||||
let mut map_to_be_filtered = HashMap::new();
|
||||
map_to_be_filtered.insert(
|
||||
let mut map_to_be_filtered = Vec::new();
|
||||
map_to_be_filtered.push((
|
||||
"https://www.example.com".to_owned(),
|
||||
SearchResult {
|
||||
title: "Example Domain".to_owned(),
|
||||
url: "https://www.example.com".to_owned(),
|
||||
description: "This domain is for use in illustrative examples in documents."
|
||||
.to_owned(),
|
||||
engine: smallvec!["Google".to_owned(), "Bing".to_owned()],
|
||||
relevance_score: 0.0,
|
||||
engine: vec!["Google".to_owned(), "Bing".to_owned()],
|
||||
},
|
||||
);
|
||||
map_to_be_filtered.insert(
|
||||
));
|
||||
map_to_be_filtered.push((
|
||||
"https://www.rust-lang.org/".to_owned(),
|
||||
SearchResult {
|
||||
title: "Rust Programming Language".to_owned(),
|
||||
url: "https://www.rust-lang.org/".to_owned(),
|
||||
description: "A systems programming language that runs blazingly fast, prevents segfaults, and guarantees thread safety.".to_owned(),
|
||||
engine: smallvec!["Google".to_owned(), "DuckDuckGo".to_owned()],
|
||||
},
|
||||
engine: vec!["Google".to_owned(), "DuckDuckGo".to_owned()],
|
||||
relevance_score:0.0
|
||||
},)
|
||||
);
|
||||
|
||||
// Create a temporary file with regex patterns
|
||||
|
@ -246,70 +303,82 @@ mod tests {
|
|||
writeln!(file, "rust")?;
|
||||
file.flush()?;
|
||||
|
||||
let mut resultant_map = HashMap::new();
|
||||
let mut resultant_map = Vec::new();
|
||||
filter_with_lists(
|
||||
&mut map_to_be_filtered,
|
||||
&mut resultant_map,
|
||||
file.path().to_str().unwrap(),
|
||||
)?;
|
||||
)
|
||||
.await?;
|
||||
|
||||
assert_eq!(resultant_map.len(), 2);
|
||||
assert!(resultant_map.contains_key("https://www.example.com"));
|
||||
assert!(resultant_map.contains_key("https://www.rust-lang.org/"));
|
||||
assert!(resultant_map
|
||||
.iter()
|
||||
.any(|(key, _)| key == "https://www.example.com"));
|
||||
assert!(resultant_map
|
||||
.iter()
|
||||
.any(|(key, _)| key == "https://www.rust-lang.org/"));
|
||||
assert_eq!(map_to_be_filtered.len(), 0);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filter_with_lists_wildcard() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let mut map_to_be_filtered = HashMap::new();
|
||||
map_to_be_filtered.insert(
|
||||
#[tokio::test]
|
||||
async fn test_filter_with_lists_wildcard() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let mut map_to_be_filtered = Vec::new();
|
||||
map_to_be_filtered.push((
|
||||
"https://www.example.com".to_owned(),
|
||||
SearchResult {
|
||||
title: "Example Domain".to_owned(),
|
||||
url: "https://www.example.com".to_owned(),
|
||||
description: "This domain is for use in illustrative examples in documents."
|
||||
.to_owned(),
|
||||
engine: smallvec!["Google".to_owned(), "Bing".to_owned()],
|
||||
engine: vec!["Google".to_owned(), "Bing".to_owned()],
|
||||
relevance_score: 0.0,
|
||||
},
|
||||
);
|
||||
map_to_be_filtered.insert(
|
||||
));
|
||||
map_to_be_filtered.push((
|
||||
"https://www.rust-lang.org/".to_owned(),
|
||||
SearchResult {
|
||||
title: "Rust Programming Language".to_owned(),
|
||||
url: "https://www.rust-lang.org/".to_owned(),
|
||||
description: "A systems programming language that runs blazingly fast, prevents segfaults, and guarantees thread safety.".to_owned(),
|
||||
engine: smallvec!["Google".to_owned(), "DuckDuckGo".to_owned()],
|
||||
engine: vec!["Google".to_owned(), "DuckDuckGo".to_owned()],
|
||||
relevance_score:0.0
|
||||
},
|
||||
);
|
||||
));
|
||||
|
||||
// Create a temporary file with a regex pattern containing a wildcard
|
||||
let mut file = NamedTempFile::new()?;
|
||||
writeln!(file, "ex.*le")?;
|
||||
file.flush()?;
|
||||
|
||||
let mut resultant_map = HashMap::new();
|
||||
let mut resultant_map = Vec::new();
|
||||
|
||||
filter_with_lists(
|
||||
&mut map_to_be_filtered,
|
||||
&mut resultant_map,
|
||||
file.path().to_str().unwrap(),
|
||||
)?;
|
||||
)
|
||||
.await?;
|
||||
|
||||
assert_eq!(resultant_map.len(), 1);
|
||||
assert!(resultant_map.contains_key("https://www.example.com"));
|
||||
assert!(resultant_map
|
||||
.iter()
|
||||
.any(|(key, _)| key == "https://www.example.com"));
|
||||
assert_eq!(map_to_be_filtered.len(), 1);
|
||||
assert!(map_to_be_filtered.contains_key("https://www.rust-lang.org/"));
|
||||
assert!(map_to_be_filtered
|
||||
.iter()
|
||||
.any(|(key, _)| key == "https://www.rust-lang.org/"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filter_with_lists_file_not_found() {
|
||||
let mut map_to_be_filtered = HashMap::new();
|
||||
#[tokio::test]
|
||||
async fn test_filter_with_lists_file_not_found() {
|
||||
let mut map_to_be_filtered = Vec::new();
|
||||
|
||||
let mut resultant_map = HashMap::new();
|
||||
let mut resultant_map = Vec::new();
|
||||
|
||||
// Call the `filter_with_lists` function with a non-existent file path
|
||||
let result = filter_with_lists(
|
||||
|
@ -318,24 +387,25 @@ mod tests {
|
|||
"non-existent-file.txt",
|
||||
);
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result.await.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filter_with_lists_invalid_regex() {
|
||||
let mut map_to_be_filtered = HashMap::new();
|
||||
map_to_be_filtered.insert(
|
||||
#[tokio::test]
|
||||
async fn test_filter_with_lists_invalid_regex() {
|
||||
let mut map_to_be_filtered = Vec::new();
|
||||
map_to_be_filtered.push((
|
||||
"https://www.example.com".to_owned(),
|
||||
SearchResult {
|
||||
title: "Example Domain".to_owned(),
|
||||
url: "https://www.example.com".to_owned(),
|
||||
description: "This domain is for use in illustrative examples in documents."
|
||||
.to_owned(),
|
||||
engine: smallvec!["Google".to_owned(), "Bing".to_owned()],
|
||||
engine: vec!["Google".to_owned(), "Bing".to_owned()],
|
||||
relevance_score: 0.0,
|
||||
},
|
||||
);
|
||||
));
|
||||
|
||||
let mut resultant_map = HashMap::new();
|
||||
let mut resultant_map = Vec::new();
|
||||
|
||||
// Create a temporary file with an invalid regex pattern
|
||||
let mut file = NamedTempFile::new().unwrap();
|
||||
|
@ -348,6 +418,6 @@ mod tests {
|
|||
file.path().to_str().unwrap(),
|
||||
);
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result.await.is_err());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,61 +4,79 @@
|
|||
|
||||
use crate::{
|
||||
config::parser::Config,
|
||||
handler::paths::{file_path, FileType},
|
||||
handler::{file_path, FileType},
|
||||
};
|
||||
use actix_web::{get, web, HttpRequest, HttpResponse};
|
||||
use handlebars::Handlebars;
|
||||
use std::fs::read_to_string;
|
||||
use actix_web::{get, http::header::ContentType, web, HttpRequest, HttpResponse};
|
||||
use tokio::fs::read_to_string;
|
||||
|
||||
/// Handles the route of index page or main page of the `websurfx` meta search engine website.
|
||||
#[get("/")]
|
||||
pub async fn index(
|
||||
hbs: web::Data<Handlebars<'_>>,
|
||||
config: web::Data<Config>,
|
||||
config: web::Data<&'static Config>,
|
||||
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
||||
let page_content: String = hbs.render("index", &config.style).unwrap();
|
||||
Ok(HttpResponse::Ok().body(page_content))
|
||||
Ok(HttpResponse::Ok().content_type(ContentType::html()).body(
|
||||
crate::templates::views::index::index(
|
||||
&config.style.colorscheme,
|
||||
&config.style.theme,
|
||||
&config.style.animation,
|
||||
)
|
||||
.0,
|
||||
))
|
||||
}
|
||||
|
||||
/// Handles the route of any other accessed route/page which is not provided by the
|
||||
/// website essentially the 404 error page.
|
||||
pub async fn not_found(
|
||||
hbs: web::Data<Handlebars<'_>>,
|
||||
config: web::Data<Config>,
|
||||
config: web::Data<&'static Config>,
|
||||
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
||||
let page_content: String = hbs.render("404", &config.style)?;
|
||||
|
||||
Ok(HttpResponse::Ok()
|
||||
.content_type("text/html; charset=utf-8")
|
||||
.body(page_content))
|
||||
Ok(HttpResponse::Ok().content_type(ContentType::html()).body(
|
||||
crate::templates::views::not_found::not_found(
|
||||
&config.style.colorscheme,
|
||||
&config.style.theme,
|
||||
&config.style.animation,
|
||||
)
|
||||
.0,
|
||||
))
|
||||
}
|
||||
|
||||
/// Handles the route of robots.txt page of the `websurfx` meta search engine website.
|
||||
#[get("/robots.txt")]
|
||||
pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
||||
let page_content: String =
|
||||
read_to_string(format!("{}/robots.txt", file_path(FileType::Theme)?))?;
|
||||
read_to_string(format!("{}/robots.txt", file_path(FileType::Theme)?)).await?;
|
||||
Ok(HttpResponse::Ok()
|
||||
.content_type("text/plain; charset=ascii")
|
||||
.content_type(ContentType::plaintext())
|
||||
.body(page_content))
|
||||
}
|
||||
|
||||
/// Handles the route of about page of the `websurfx` meta search engine website.
|
||||
#[get("/about")]
|
||||
pub async fn about(
|
||||
hbs: web::Data<Handlebars<'_>>,
|
||||
config: web::Data<Config>,
|
||||
config: web::Data<&'static Config>,
|
||||
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
||||
let page_content: String = hbs.render("about", &config.style)?;
|
||||
Ok(HttpResponse::Ok().body(page_content))
|
||||
Ok(HttpResponse::Ok().content_type(ContentType::html()).body(
|
||||
crate::templates::views::about::about(
|
||||
&config.style.colorscheme,
|
||||
&config.style.theme,
|
||||
&config.style.animation,
|
||||
)
|
||||
.0,
|
||||
))
|
||||
}
|
||||
|
||||
/// Handles the route of settings page of the `websurfx` meta search engine website.
|
||||
#[get("/settings")]
|
||||
pub async fn settings(
|
||||
hbs: web::Data<Handlebars<'_>>,
|
||||
config: web::Data<Config>,
|
||||
config: web::Data<&'static Config>,
|
||||
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
||||
let page_content: String = hbs.render("settings", &config.style)?;
|
||||
Ok(HttpResponse::Ok().body(page_content))
|
||||
Ok(HttpResponse::Ok().content_type(ContentType::html()).body(
|
||||
crate::templates::views::settings::settings(
|
||||
config.safe_search,
|
||||
&config.style.colorscheme,
|
||||
&config.style.theme,
|
||||
&config.style.animation,
|
||||
&config.upstream_search_engines,
|
||||
)?
|
||||
.0,
|
||||
))
|
||||
}
|
||||
|
|
194
src/server/routes/export_import.rs
Normal file
|
@ -0,0 +1,194 @@
|
|||
//! This module handles the settings and download route of the search engine website.
|
||||
|
||||
use crate::{
|
||||
handler::{file_path, FileType},
|
||||
models::{self, server_models},
|
||||
Config,
|
||||
};
|
||||
use actix_multipart::form::{tempfile::TempFile, MultipartForm};
|
||||
use actix_web::{
|
||||
cookie::{
|
||||
time::{Duration, OffsetDateTime},
|
||||
Cookie,
|
||||
},
|
||||
get, post, web, HttpRequest, HttpResponse,
|
||||
};
|
||||
use std::borrow::Cow;
|
||||
use std::io::Read;
|
||||
|
||||
use tokio::fs::read_dir;
|
||||
|
||||
/// A helper function that helps in building the list of all available colorscheme/theme/animation
|
||||
/// names present in the colorschemes, animations and themes folder respectively by excluding the
|
||||
/// ones that have already been selected via the config file.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `style_type` - It takes the style type of the values `theme` and `colorscheme` as an
|
||||
/// argument.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// Returns a list of colorscheme/theme names as a vector of tuple strings on success otherwise
|
||||
/// returns a standard error message.
|
||||
async fn style_option_list<'a>(
|
||||
style_type: &'a str,
|
||||
) -> Result<Box<[Cow<'a, str>]>, Box<dyn std::error::Error>> {
|
||||
let mut style_options = Vec::new();
|
||||
let mut dir = read_dir(format!(
|
||||
"{}static/{}/",
|
||||
file_path(FileType::Theme)?,
|
||||
style_type,
|
||||
))
|
||||
.await?;
|
||||
while let Some(file) = dir.next_entry().await? {
|
||||
let style_name = file.file_name().to_str().unwrap().replace(".css", "");
|
||||
style_options.push(Cow::Owned(style_name));
|
||||
}
|
||||
|
||||
if style_type == "animations" {
|
||||
style_options.push(Cow::default())
|
||||
}
|
||||
|
||||
Ok(style_options.into_boxed_slice())
|
||||
}
|
||||
|
||||
/// A helper function which santizes user provided json data from the input file.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `config` - It takes the config struct as an argument.
|
||||
/// * `setting_value` - It takes the cookie struct as an argument.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// returns a standard error message on failure otherwise it returns the unit type.
|
||||
async fn sanitize(
|
||||
config: web::Data<&'static Config>,
|
||||
setting_value: &mut models::server_models::Cookie<'_>,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Check whether the theme, colorscheme and animation option is valid by matching it against
|
||||
// the available option list. If the option provided by the user via the JSON file is invalid
|
||||
// then replace the user provided by the default one used by the server via the config file.
|
||||
|
||||
if !style_option_list("themes")
|
||||
.await?
|
||||
.contains(&setting_value.theme)
|
||||
{
|
||||
setting_value.theme = Cow::Borrowed(&config.style.theme)
|
||||
} else if !style_option_list("colorschemes")
|
||||
.await?
|
||||
.contains(&setting_value.colorscheme)
|
||||
{
|
||||
setting_value.colorscheme = Cow::Borrowed(&config.style.colorscheme)
|
||||
} else if !style_option_list("animations")
|
||||
.await?
|
||||
.contains(setting_value.animation.as_ref().unwrap())
|
||||
{
|
||||
setting_value.animation = config
|
||||
.style
|
||||
.animation
|
||||
.as_ref()
|
||||
.map(|str| Cow::Borrowed(str.as_str()));
|
||||
}
|
||||
|
||||
// Filters out any engines in the list that are invalid by matching each engine against the
|
||||
// available engine list.
|
||||
let engines: Vec<_> = setting_value
|
||||
.engines
|
||||
.iter()
|
||||
.cloned()
|
||||
.filter_map(|engine| {
|
||||
config
|
||||
.upstream_search_engines
|
||||
.keys()
|
||||
.cloned()
|
||||
.any(|other_engine| *engine == other_engine)
|
||||
.then_some(engine.clone())
|
||||
})
|
||||
.collect();
|
||||
setting_value.engines = Cow::Owned(engines);
|
||||
|
||||
setting_value.safe_search_level = match setting_value.safe_search_level {
|
||||
0..2 => setting_value.safe_search_level,
|
||||
_ => u8::default(),
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// A multipart struct which stores user provided input file data in memory.
|
||||
#[derive(MultipartForm)]
|
||||
struct File {
|
||||
/// It stores the input file data in memory.
|
||||
file: TempFile,
|
||||
}
|
||||
|
||||
/// Handles the route of the post settings page.
|
||||
#[post("/settings")]
|
||||
pub async fn set_settings(
|
||||
config: web::Data<&'static Config>,
|
||||
MultipartForm(mut form): MultipartForm<File>,
|
||||
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
||||
if let Some(file_name) = form.file.file_name {
|
||||
let file_name_parts = file_name.split(".");
|
||||
if let 2 = file_name_parts.clone().count() {
|
||||
if let Some("json") = file_name_parts.last() {
|
||||
if let 0 = form.file.size {
|
||||
return Ok(HttpResponse::BadRequest().finish());
|
||||
} else {
|
||||
let mut data = String::new();
|
||||
form.file.file.read_to_string(&mut data).unwrap();
|
||||
|
||||
let mut unsanitized_json_data: models::server_models::Cookie<'_> =
|
||||
serde_json::from_str(&data)?;
|
||||
|
||||
sanitize(config, &mut unsanitized_json_data).await?;
|
||||
|
||||
let sanitized_json_data: String =
|
||||
serde_json::json!(unsanitized_json_data).to_string();
|
||||
|
||||
return Ok(HttpResponse::Ok()
|
||||
.cookie(
|
||||
Cookie::build("appCookie", sanitized_json_data)
|
||||
.expires(
|
||||
OffsetDateTime::now_utc().saturating_add(Duration::weeks(52)),
|
||||
)
|
||||
.finish(),
|
||||
)
|
||||
.finish());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
/// Handles the route of the download page.
|
||||
#[get("/download")]
|
||||
pub async fn download(
|
||||
config: web::Data<&'static Config>,
|
||||
req: HttpRequest,
|
||||
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
||||
let cookie = req.cookie("appCookie");
|
||||
|
||||
// Get search settings using the user's cookie or from the server's config
|
||||
let preferences: server_models::Cookie<'_> = cookie
|
||||
.as_ref()
|
||||
.and_then(|cookie_value| serde_json::from_str(cookie_value.value()).ok())
|
||||
.unwrap_or_else(|| {
|
||||
server_models::Cookie::build(
|
||||
&config.style,
|
||||
config
|
||||
.upstream_search_engines
|
||||
.iter()
|
||||
.filter_map(|(engine, enabled)| {
|
||||
enabled.then_some(Cow::Borrowed(engine.as_str()))
|
||||
})
|
||||
.collect(),
|
||||
u8::default(),
|
||||
)
|
||||
});
|
||||
|
||||
Ok(HttpResponse::Ok().json(preferences))
|
||||
}
|
|
@ -1,3 +1,4 @@
|
|||
//! This module provides modules to handle various routes in the search engine website.
|
||||
|
||||
pub mod export_import;
|
||||
pub mod search;
|
||||
|
|