Compare commits
389 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
dae9f31b19 | ||
![]() |
fa57233e5d | ||
![]() |
765dcbda5c | ||
![]() |
14d8bf9fde | ||
![]() |
04cac02f3d | ||
![]() |
cca1115b0f | ||
![]() |
729c550688 | ||
![]() |
dc34d51877 | ||
![]() |
b911eaec54 | ||
![]() |
07370f7238 | ||
![]() |
5280806b7e | ||
![]() |
a3d5ca0112 | ||
![]() |
82e7669310 | ||
![]() |
5ea1372f91 | ||
![]() |
94a364daad | ||
![]() |
05a8ee5281 | ||
![]() |
ca6b271bc9 | ||
![]() |
ab126b9d70 | ||
![]() |
2e64fd5cbc | ||
![]() |
ce5c794ce2 | ||
![]() |
ac4adab00b | ||
![]() |
0715c2226b | ||
![]() |
fe796c676e | ||
![]() |
ba78d8e2c8 | ||
![]() |
cac82d1986 | ||
![]() |
ef0ae2f0aa | ||
![]() |
4fa9a7491e | ||
![]() |
e08c0754f8 | ||
![]() |
ee4bc00576 | ||
![]() |
d75e7d07ec | ||
![]() |
718e172b6d | ||
![]() |
56bfcbba0e | ||
![]() |
d52da3aeb4 | ||
![]() |
42c30aaaba | ||
![]() |
959d0c52b1 | ||
![]() |
c796ae8bb7 | ||
![]() |
913ca1b075 | ||
![]() |
5d59a2c7be | ||
![]() |
ecc6875a21 | ||
![]() |
d75693ce4e | ||
![]() |
e7efca4a4e | ||
![]() |
2a4dd07752 | ||
![]() |
193b4e36db | ||
![]() |
e582960402 | ||
![]() |
1883093dc7 | ||
![]() |
9984ba0d12 | ||
![]() |
98e817c0a8 | ||
![]() |
28231d99b8 | ||
![]() |
4afc07f1d5 | ||
![]() |
f2d2068bcf | ||
![]() |
b1bcf41061 | ||
![]() |
39af9096ef | ||
![]() |
4bb6c5e90b | ||
![]() |
494ff27b49 | ||
![]() |
948d20d8fb | ||
![]() |
4315221385 | ||
![]() |
3a1ff0f307 | ||
![]() |
b22d60f166 | ||
![]() |
acee5d892d | ||
![]() |
9a5f1c5f44 | ||
![]() |
ebee1f4a6c | ||
![]() |
4847a6eed2 | ||
![]() |
951060dc45 | ||
![]() |
2693cd18d2 | ||
![]() |
838d1b6958 | ||
![]() |
c527897a4d | ||
![]() |
2141b88c35 | ||
![]() |
b0c99f25e5 | ||
![]() |
d5c4206afe | ||
![]() |
4950106871 | ||
![]() |
0b48f671cb | ||
![]() |
16717bc27d | ||
![]() |
f51d2e6881 | ||
![]() |
ad5b754741 | ||
![]() |
d5524d7eae | ||
![]() |
3c2533f69a | ||
![]() |
8225d34a9c | ||
![]() |
d2954862ea | ||
![]() |
f55abf934d | ||
![]() |
052d9fd167 | ||
![]() |
bf7e73f9ff | ||
![]() |
2f4e4038b1 | ||
![]() |
5d06cce220 | ||
![]() |
33363a83ef | ||
![]() |
6200c5d53c | ||
![]() |
bbc49cbf42 | ||
![]() |
408858a91e | ||
![]() |
4993da4d89 | ||
![]() |
2e50fa4edb | ||
![]() |
9058b68f78 | ||
![]() |
9f90caf262 | ||
![]() |
1386cd6739 | ||
![]() |
fe959efd0e | ||
![]() |
5b6afb0b67 | ||
![]() |
93fd8f8565 | ||
![]() |
9efcf0b079 | ||
![]() |
c077e39ae7 | ||
![]() |
b1df4f1154 | ||
![]() |
bb50e8bb25 | ||
![]() |
c584a7d601 | ||
![]() |
ce4912b9c5 | ||
![]() |
236e8871c6 | ||
![]() |
f78c6d7660 | ||
![]() |
9c6729c931 | ||
![]() |
8779c03afa | ||
![]() |
2149e32c9e | ||
![]() |
4a990c537b | ||
![]() |
d110f72c18 | ||
![]() |
991f3f59de | ||
![]() |
8d9b660eb1 | ||
![]() |
9c51128af2 | ||
![]() |
c494edcaff | ||
![]() |
6aab9c85b9 | ||
![]() |
1d133ed2a8 | ||
![]() |
d020895a51 | ||
![]() |
b7a3a8d855 | ||
![]() |
11d23fd1b2 | ||
![]() |
41ab8a2a76 | ||
![]() |
2df6499fb2 | ||
![]() |
280c7e2b5e | ||
![]() |
8790f5f719 | ||
![]() |
0f19ade40c | ||
![]() |
ae5b3370bc | ||
![]() |
3d76b1bd86 | ||
![]() |
6c3d9ecd50 | ||
![]() |
57267827f5 | ||
![]() |
1909cc36a8 | ||
![]() |
ca425f9ef5 | ||
![]() |
a92550e050 | ||
![]() |
669e365913 | ||
![]() |
b2cbc5eaa5 | ||
![]() |
851ea314a7 | ||
![]() |
fbf73634ee | ||
![]() |
779908cb11 | ||
![]() |
78858b0e04 | ||
![]() |
660f85620d | ||
![]() |
51214dc23a | ||
![]() |
29b76be459 | ||
![]() |
6b9469e4b3 | ||
![]() |
f5cf5f9151 | ||
![]() |
c762f9cf8e | ||
![]() |
705ba81026 | ||
![]() |
0f717cc976 | ||
![]() |
b2971c1829 | ||
![]() |
99f0cf1113 | ||
![]() |
c25cd9c3fe | ||
![]() |
388aaf4bfd | ||
![]() |
31c9c676ee | ||
![]() |
ca96a76958 | ||
![]() |
326131aac4 | ||
![]() |
efa8efc6c7 | ||
![]() |
80e950de3b | ||
![]() |
a47e28587c | ||
![]() |
d912bff94e | ||
![]() |
6e9250c03a | ||
![]() |
7d762b3726 | ||
![]() |
33846cee34 | ||
![]() |
dde117e7e6 | ||
![]() |
86b0d3d6c9 | ||
![]() |
36e2ac93be | ||
![]() |
3a97a6f621 | ||
![]() |
92a141c1c5 | ||
![]() |
c73cb838e2 | ||
![]() |
ebd69ff427 | ||
![]() |
af3385d1c2 | ||
![]() |
649e0db5cb | ||
![]() |
62459c68e8 | ||
![]() |
db1115f19e | ||
![]() |
50aa52c485 | ||
![]() |
5020f36c90 | ||
![]() |
5b4864424a | ||
![]() |
4f4cb220cc | ||
![]() |
286bcf1bd3 | ||
![]() |
bfeb81270c | ||
![]() |
ddb10f6584 | ||
![]() |
61393ba7c4 | ||
![]() |
f2907641d2 | ||
![]() |
7d42c84aaf | ||
![]() |
72da32383e | ||
![]() |
2bdddaf928 | ||
![]() |
0dd25aacb6 | ||
![]() |
41f3fe7485 | ||
![]() |
7f84c6346d | ||
![]() |
d073aa247a | ||
![]() |
9f23a1c70b | ||
![]() |
d8943709c7 | ||
![]() |
93afb6b8c9 | ||
![]() |
83c3981697 | ||
![]() |
40138572be | ||
![]() |
7b392b369d | ||
![]() |
26aa345f06 | ||
![]() |
b7a23f1826 | ||
![]() |
34468202f9 | ||
![]() |
9f5213cf42 | ||
![]() |
43357493d5 | ||
![]() |
1d9718798f | ||
![]() |
8312d21f9f | ||
![]() |
0943b8b8c4 | ||
![]() |
3b127d26a1 | ||
![]() |
96ed04c298 | ||
![]() |
e8f9ad2479 | ||
![]() |
fb46d2c6f2 | ||
![]() |
a66362bce1 | ||
![]() |
12843414f8 | ||
![]() |
493696f4e2 | ||
![]() |
b95c3c8afe | ||
![]() |
c698f4e0ef | ||
![]() |
67c3e39d4e | ||
![]() |
6d497fcf81 | ||
![]() |
e45122288d | ||
![]() |
5e4ed070d2 | ||
![]() |
21591d2a0d | ||
![]() |
d61ab883d9 | ||
![]() |
4f27a5c9de | ||
![]() |
94a92ae30c | ||
![]() |
bb06797dec | ||
![]() |
22d0ff789e | ||
![]() |
64ca6a30c1 | ||
![]() |
11c4b8c21d | ||
![]() |
3bb7614256 | ||
![]() |
582f8aee5b | ||
![]() |
b00f76627b | ||
![]() |
c59596511f | ||
![]() |
0bc96b167c | ||
![]() |
19081b72c0 | ||
![]() |
1a2a833597 | ||
![]() |
7206e7d6a1 | ||
![]() |
9a4e450766 | ||
![]() |
c0d2d1ac65 | ||
![]() |
76419a7353 | ||
![]() |
0da1b9e1db | ||
![]() |
1c5a317c4d | ||
![]() |
2b41fb9735 | ||
![]() |
86b1f62393 | ||
![]() |
b718fb131c | ||
![]() |
2c985b8db5 | ||
![]() |
cface54414 | ||
![]() |
918d142d24 | ||
![]() |
44f51487c2 | ||
![]() |
c6ae86dbb4 | ||
![]() |
15b0505b96 | ||
![]() |
397a805ffd | ||
![]() |
c02006c297 | ||
![]() |
1143846ca6 | ||
![]() |
11166b4876 | ||
![]() |
62682911fc | ||
![]() |
4cd1810527 | ||
![]() |
b68e06c883 | ||
![]() |
89032e63bd | ||
![]() |
e8a64f5874 | ||
![]() |
e1e426c517 | ||
![]() |
fb2b6608fe | ||
![]() |
f11d35f057 | ||
![]() |
3ca8f63c1e | ||
![]() |
3565dcea39 | ||
![]() |
9a53329dc6 | ||
![]() |
6b71e71df8 | ||
![]() |
1cd336c7dc | ||
![]() |
026f7794d1 | ||
![]() |
15c71cbfba | ||
![]() |
5a8d61f231 | ||
![]() |
3c6632246e | ||
![]() |
e704c26ed3 | ||
![]() |
90f010359d | ||
![]() |
35dc276fd2 | ||
![]() |
5e2669b6de | ||
![]() |
7e1a80dc7e | ||
![]() |
26f73d5611 | ||
![]() |
a142aa75cd | ||
![]() |
95c6beeb47 | ||
![]() |
33507e4c09 | ||
![]() |
5fa1febf5f | ||
![]() |
5367dd39df | ||
![]() |
69eb815d25 | ||
![]() |
9bb1544bd7 | ||
![]() |
abc59b2858 | ||
![]() |
d28cbb96a1 | ||
![]() |
07bbea8f9b | ||
![]() |
fc830c4683 | ||
![]() |
a2360d1f65 | ||
![]() |
f62b821422 | ||
![]() |
ae9fa5b388 | ||
![]() |
e1a837f6b0 | ||
![]() |
0139fc568b | ||
![]() |
a46a2231c3 | ||
![]() |
fe74f2eef7 | ||
![]() |
b42adaa5a3 | ||
![]() |
05bf05b0dd | ||
![]() |
89542072c8 | ||
![]() |
dc5fa842c0 | ||
![]() |
9b1d89404e | ||
![]() |
03d649b97b | ||
![]() |
eee2f110b8 | ||
![]() |
c39d9ff0b0 | ||
![]() |
686e26ad6d | ||
![]() |
5c60d733cd | ||
![]() |
141ae26066 | ||
![]() |
47905f1e22 | ||
![]() |
ca1c72c3dc | ||
![]() |
c1a5b7086a | ||
![]() |
38ba4bd6cb | ||
![]() |
64c4d2c23a | ||
![]() |
0ec89146c8 | ||
![]() |
beb5e6012a | ||
![]() |
0facfdbd56 | ||
![]() |
67487a51ee | ||
![]() |
692ac48708 | ||
![]() |
ad514398fa | ||
![]() |
3742893c19 | ||
![]() |
89ee79cd0f | ||
![]() |
3aee141d0e | ||
![]() |
3189de6fb9 | ||
![]() |
bbc226829e | ||
![]() |
4dd44aec3f | ||
![]() |
66669a0cd8 | ||
![]() |
faf9995962 | ||
![]() |
68a2b955d8 | ||
![]() |
42f0dc1bc7 | ||
![]() |
2e0def777c | ||
![]() |
4dd455e185 | ||
![]() |
9d070141ff | ||
![]() |
68f701265f | ||
![]() |
24fda29358 | ||
![]() |
9282e30efd | ||
![]() |
41c57bd070 | ||
![]() |
bca2ba17b9 | ||
![]() |
3f367d0b5e | ||
![]() |
da03037ca4 | ||
![]() |
b123fbbdab | ||
![]() |
cfe57a430b | ||
![]() |
344e641161 | ||
![]() |
82802ba552 | ||
![]() |
5301d09894 | ||
![]() |
6963d00074 | ||
![]() |
0c2dbb3a9f | ||
![]() |
41799006e3 | ||
![]() |
d4be2957b8 | ||
![]() |
bfd4dbec22 | ||
![]() |
9dcd79611e | ||
![]() |
d67ee863e1 | ||
![]() |
19eab639f8 | ||
![]() |
08de9c7358 | ||
![]() |
97067bf35f | ||
![]() |
07f4b25ebc | ||
![]() |
f1a12d0b87 | ||
![]() |
e6c956f29d | ||
![]() |
7f8404e341 | ||
![]() |
a6383ee113 | ||
![]() |
466ed1bbb1 | ||
![]() |
c80ae2fd14 | ||
![]() |
27bc52c008 | ||
![]() |
fc3b416970 | ||
![]() |
405d00612f | ||
![]() |
092a38fc40 | ||
![]() |
ad297a0a96 | ||
![]() |
a912ac0724 | ||
![]() |
40b7e6d2ea | ||
![]() |
dac2c0c488 | ||
![]() |
8156f7ea79 | ||
![]() |
f56002dca6 | ||
![]() |
8ed4c9e206 | ||
![]() |
32abacb4c3 | ||
![]() |
57c73d38c8 | ||
![]() |
75a77d25f0 | ||
![]() |
769d870803 | ||
![]() |
3ad5444e27 | ||
![]() |
5e63f37d70 | ||
![]() |
79b0aec66b | ||
![]() |
01cb86dedd | ||
![]() |
074975bc20 | ||
![]() |
57410378a4 | ||
![]() |
5d26a2ec6d | ||
![]() |
7ae554e62d | ||
![]() |
4f0d00bde0 | ||
![]() |
6bc434cebd | ||
![]() |
a9f54b4027 | ||
![]() |
0fbb523082 | ||
![]() |
f9ac87bdba | ||
![]() |
b3fab88b6f | ||
![]() |
f1375abff9 | ||
![]() |
c324c74411 | ||
![]() |
6362d0f733 | ||
![]() |
89e5895519 | ||
![]() |
9960bc3315 | ||
![]() |
6cf4a63db7 | ||
![]() |
4f92262a05 | ||
![]() |
2803471821 | ||
![]() |
4155a23e66 | ||
![]() |
6ec79144a0 |
20
.cspell.json
|
@ -1,20 +0,0 @@
|
|||
{
|
||||
"ignorePaths": [
|
||||
"**/node_modules/**",
|
||||
"**/vscode-extension/**",
|
||||
"**/.git/**",
|
||||
"**/.pnpm-lock.json",
|
||||
".vscode",
|
||||
"megalinter",
|
||||
"package-lock.json",
|
||||
"report"
|
||||
],
|
||||
"language": "en",
|
||||
"noConfigSearch": true,
|
||||
"words": [
|
||||
"megalinter",
|
||||
"oxsecurity",
|
||||
"websurfx"
|
||||
],
|
||||
"version": "0.2"
|
||||
}
|
48
.github/workflows/contributors.yml
vendored
|
@ -1,48 +0,0 @@
|
|||
---
|
||||
name: Contributors List
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
schedule:
|
||||
- cron: "0 1 * * *"
|
||||
|
||||
jobs:
|
||||
contributors:
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # v3.5.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event.repository.default_branch }}
|
||||
|
||||
- name: Update contributors list
|
||||
uses: wow-actions/contributors-list@b9e91f91a51a55460fdcae64daad0cb8122cdd53 # v1.1.0
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
svgPath: images/contributors_list.svg
|
||||
round: true
|
||||
includeBots: false
|
||||
noCommit: true
|
||||
|
||||
- name: Commit & PR
|
||||
uses: peter-evans/create-pull-request@38e0b6e68b4c852a5500a94740f0e535e0d7ba54 # v4.2.4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: .github/assets/CONTRIBUTORS.svg
|
||||
commit-message: 'chore: update contributors-list'
|
||||
committer: GitHub <noreply@github.com>
|
||||
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
|
||||
signoff: false
|
||||
branch: workflow/update-contributors-list
|
||||
base: main
|
||||
delete-branch: true
|
||||
title: 'chore: update contributors-list'
|
||||
body: |
|
||||
Automated update to `images/contributors_list.svg`
|
79
.github/workflows/docker.yml
vendored
Normal file
|
@ -0,0 +1,79 @@
|
|||
name: Release stable image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "release/stable/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- "release/stable/**"
|
||||
types: [opened, synchronize]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
release_image:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
cache:
|
||||
- memory
|
||||
- redis
|
||||
- hybrid
|
||||
- no-cache
|
||||
|
||||
name: Release ${{ matrix.cache }} image
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# Install buildx
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
# Set buildx cache
|
||||
- name: Cache register
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: buildx-cache
|
||||
# Login to ghcr.io
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: neonmmd
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
# Extract branch info
|
||||
- name: Set info
|
||||
run: |
|
||||
echo "VERSION=$(echo ${GITHUB_REF} | awk -F/ '{print $6}')" >> $GITHUB_ENV
|
||||
# Print info for debug
|
||||
- name: Print Info
|
||||
run: |
|
||||
echo $VERSION
|
||||
# Create buildx multiarch
|
||||
- name: Create buildx multiarch
|
||||
run: docker buildx create --use --name=buildx-multi-arch --driver=docker-container --driver-opt=network=host
|
||||
# Modify cache variable in the dockerfile.
|
||||
- name: Modify Cache variable
|
||||
run: |
|
||||
sed -i "s/ARG CACHE=[a-z]*/ARG CACHE=${{ matrix.cache }}/g" Dockerfile
|
||||
# Publish image
|
||||
- name: Publish image
|
||||
run: docker buildx build --builder=buildx-multi-arch --platform=linux/amd64,linux/arm64 --build-arg CACHE=${{ matrix.cache }} --push -t neonmmd/websurfx:$VERSION-${{ matrix.cache }} -t neon-mmd/websurfx:${{matrix.cache}} -f Dockerfile .
|
||||
- name: Publish latest
|
||||
if: ${{ matrix.cache }} == 'hybrid'
|
||||
run: docker buildx build --builder=buildx-multi-arch --platform=linux/amd64,linux/arm64 --build-arg CACHE=${{ matrix.cache }} --push -t neon-mmd/websurfx:latest -f Dockerfile .
|
||||
# Upload it to release
|
||||
- name: Test if release already exists
|
||||
id: release-exists
|
||||
continue-on-error: true
|
||||
run: gh release view $BINARY_NAME-$VERSION
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Create new draft release
|
||||
if: steps.release-exists.outcome == 'failure' && steps.release-exists.conclusion == 'success'
|
||||
run: gh release create -t $VERSION -d $VERSION
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
2
.github/workflows/issue-lock-unlock.yml
vendored
|
@ -11,6 +11,6 @@ jobs:
|
|||
action:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/label-actions@v3
|
||||
- uses: dessant/label-actions@v4
|
||||
with:
|
||||
process-only: issues
|
||||
|
|
2
.github/workflows/labels.yml
vendored
|
@ -12,7 +12,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/setup-node@v2
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '14'
|
||||
- uses: EddieHubCommunity/gh-action-open-source-labels@main
|
||||
|
|
89
.github/workflows/mega-linter.yml
vendored
|
@ -1,89 +0,0 @@
|
|||
---
|
||||
# MegaLinter GitHub Action configuration file
|
||||
# More info at https://megalinter.io
|
||||
name: MegaLinter
|
||||
|
||||
on:
|
||||
# Trigger mega-linter at every push. Action will also be visible from Pull Requests to rolling
|
||||
push: # Comment this line to trigger action only on pull-requests (not recommended if you don't pay for GH Actions)
|
||||
pull_request:
|
||||
branches: [rolling]
|
||||
|
||||
env: # Comment env block if you do not want to apply fixes
|
||||
# Apply linter fixes configuration
|
||||
APPLY_FIXES: all # When active, APPLY_FIXES must also be defined as environment variable (in github/workflows/mega-linter.yml or other CI tool)
|
||||
APPLY_FIXES_EVENT: pull_request # Decide which event triggers application of fixes in a commit or a PR (pull_request, push, all)
|
||||
APPLY_FIXES_MODE: commit # If APPLY_FIXES is used, defines if the fixes are directly committed (commit) or posted in a PR (pull_request)
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.ref }}-${{ github.workflow }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: MegaLinter
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Give the default GITHUB_TOKEN write permission to commit and push, comment issues & post new PR
|
||||
# Remove the ones you do not need
|
||||
contents: write
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
# Git Checkout
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }}
|
||||
|
||||
# MegaLinter
|
||||
- name: MegaLinter
|
||||
id: ml
|
||||
# You can override MegaLinter flavor used to have faster performances
|
||||
# More info at https://megalinter.io/flavors/
|
||||
uses: oxsecurity/megalinter/flavors/cupcake@v7.1.0
|
||||
env:
|
||||
# All available variables are described in documentation
|
||||
# https://megalinter.io/configuration/
|
||||
VALIDATE_ALL_CODEBASE: true # Set ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} to validate only diff with main branch
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# ADD YOUR CUSTOM ENV VARIABLES HERE TO OVERRIDE VALUES OF .mega-linter.yml AT THE ROOT OF YOUR REPOSITORY
|
||||
|
||||
# Upload MegaLinter artifacts
|
||||
- name: Archive production artifacts
|
||||
if: ${{ success() }} || ${{ failure() }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: MegaLinter reports
|
||||
path: |
|
||||
megalinter-reports
|
||||
mega-linter.log
|
||||
|
||||
# Create pull request if applicable (for now works only on PR from same repository, not from forks)
|
||||
- name: Create Pull Request with applied fixes
|
||||
id: cpr
|
||||
if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'pull_request' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix')
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }}
|
||||
commit-message: "[MegaLinter] Apply linters automatic fixes"
|
||||
title: "[MegaLinter] Apply linters automatic fixes"
|
||||
labels: bot
|
||||
- name: Create PR output
|
||||
if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'pull_request' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix')
|
||||
run: |
|
||||
echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}"
|
||||
echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}"
|
||||
|
||||
# Push new commit if applicable (for now works only on PR from same repository, not from forks)
|
||||
- name: Prepare commit
|
||||
if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'commit' && github.ref != 'refs/heads/main' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix')
|
||||
run: sudo chown -Rc $UID .git/
|
||||
- name: Commit and push applied linter fixes
|
||||
if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'commit' && github.ref != 'refs/heads/main' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix')
|
||||
uses: stefanzweifel/git-auto-commit-action@v4
|
||||
with:
|
||||
branch: ${{ github.event.pull_request.head.ref || github.head_ref || github.ref }}
|
||||
commit_message: "[MegaLinter] Apply linters fixes"
|
||||
commit_user_name: megalinter-bot
|
||||
commit_user_email: nicolas.vuillamy@ox.security
|
2
.github/workflows/pr_labeler.yml
vendored
|
@ -9,7 +9,7 @@ jobs:
|
|||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v4
|
||||
- uses: actions/labeler@v5
|
||||
with:
|
||||
sync-labels: true
|
||||
dot: true
|
||||
|
|
72
.github/workflows/release.yml
vendored
Normal file
|
@ -0,0 +1,72 @@
|
|||
name: Bump release version
|
||||
on:
|
||||
pull_request:
|
||||
branches: [rolling]
|
||||
types:
|
||||
- closed
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
repository-projects: write
|
||||
|
||||
concurrency: production
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: bump tag version and release
|
||||
if: github.event.pull_request.merged == true
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.sha }}
|
||||
fetch-depth: 0
|
||||
- name: Bump version and push tag
|
||||
id: version-bump
|
||||
uses: hennejg/github-tag-action@v4.4.0
|
||||
with:
|
||||
github_token: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
|
||||
release_branches: rolling
|
||||
- name: create branch
|
||||
uses: peterjgrainger/action-create-branch@v3.0.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
|
||||
with:
|
||||
branch: update-from-${{ github.sha }}
|
||||
- name: update cargo.toml
|
||||
run: |
|
||||
appversion=$(echo "${{ steps.version-bump.outputs.new_tag }}" | grep -oE '[0-9]+\.[0-9]+\.[0-9]+')
|
||||
sed -i -e "s/^version = .*/version = \"$appversion\"/" Cargo.toml
|
||||
- run: rustup toolchain install stable --profile minimal
|
||||
- run: rustup update stable && rustup default stable
|
||||
- name: regenerate cargo.lock
|
||||
run: cargo generate-lockfile
|
||||
- name: auto commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "[skip ci] updating app version to ${{ steps.version-bump.outputs.new_tag }}"
|
||||
branch: update-from-${{ github.sha }}
|
||||
# create PR using GitHub CLI
|
||||
- name: create PR with update info
|
||||
id: create-pr
|
||||
run: gh pr create --base rolling --head update-from-${{ github.sha }} --title 'Merge new update into rolling' --body 'Created by Github action'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
|
||||
# merge PR using GitHub CLI
|
||||
- name: merge PR with update info
|
||||
id: merge-pr
|
||||
run: gh pr merge --admin --merge --subject 'Merge update info' --delete-branch
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
|
||||
- name: Create Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
|
||||
generate_release_notes: true
|
||||
name: ${{ steps.version-bump.outputs.new_tag }}
|
||||
tag_name: ${{ steps.version-bump.outputs.new_tag }}
|
||||
prerelease: false
|
||||
env:
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
4
.github/workflows/rust.yml
vendored
|
@ -25,7 +25,7 @@ jobs:
|
|||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends liblua5.4-dev liblua5.3-dev liblua5.2-dev liblua5.1-0-dev libluajit-5.1-dev
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- run: rustup toolchain install stable --profile minimal
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
|
@ -39,7 +39,7 @@ jobs:
|
|||
cache-on-failure: ''
|
||||
cache-all-crates: ''
|
||||
save-if: ''
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- run: rustup update ${{ matrix.toolchain }} && rustup default ${{ matrix.toolchain }}
|
||||
- name: Build
|
||||
run: cargo build --verbose
|
||||
|
|
4
.github/workflows/rust_format.yml
vendored
|
@ -17,7 +17,7 @@ jobs:
|
|||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends liblua5.4-dev liblua5.3-dev liblua5.2-dev liblua5.1-0-dev libluajit-5.1-dev
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install minimal stable with clippy and rustfmt
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
|
@ -33,7 +33,7 @@ jobs:
|
|||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: --all-targets --all
|
||||
args: --all-targets --all-features --all
|
||||
- name: Run cargo check
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
|
|
2
.github/workflows/stale.yml
vendored
|
@ -19,7 +19,7 @@ jobs:
|
|||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@v5
|
||||
- uses: actions/stale@v9
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: 'Stale issue message'
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
---
|
||||
# Configuration file for MegaLinter
|
||||
# See all available variables at https://megalinter.io/configuration/ and in linters documentation
|
||||
|
||||
APPLY_FIXES: all # all, none, or list of linter keys
|
||||
# ENABLE: # If you use ENABLE variable, all other languages/formats/tooling-formats will be disabled by default
|
||||
ENABLE_LINTERS: # If you use ENABLE_LINTERS variable, all other linters will be disabled by default
|
||||
- RUST_CLIPPY
|
||||
- JAVASCRIPT_ES
|
||||
- CSS_STYLELINT
|
||||
- MARKDOWN_MARKDOWNLINT
|
||||
- YAML_YAMLLINT
|
||||
- HTML_DJLINT
|
||||
- ACTION_ACTIONLINT
|
||||
- DOCKERFILE_HADOLINT
|
||||
- SPELL_CSPELL
|
||||
# DISABLE:
|
||||
# - COPYPASTE # Uncomment to disable checks of excessive copy-pastes
|
||||
# - SPELL # Uncomment to disable checks of spelling mistakes
|
||||
SHOW_ELAPSED_TIME: true
|
||||
FILEIO_REPORTER: false
|
||||
# DISABLE_ERRORS: true # Uncomment if you want MegaLinter to detect errors but not block CI to pass
|
25
.mergify.yml
Normal file
|
@ -0,0 +1,25 @@
|
|||
queue_rules:
|
||||
- name: default
|
||||
queue_conditions:
|
||||
- "#approved-reviews-by>=2"
|
||||
- check-success=build (stable)
|
||||
- check-success=CodeFactor
|
||||
- check-success=Rust project
|
||||
merge_conditions: []
|
||||
merge_method: squash
|
||||
|
||||
pull_request_rules:
|
||||
- name: automatic update of pull requests where more 5 commits behind
|
||||
conditions:
|
||||
- "#commits-behind>5"
|
||||
actions:
|
||||
update:
|
||||
- name: delete head branch after merge
|
||||
conditions:
|
||||
- merged
|
||||
actions:
|
||||
delete_head_branch: {}
|
||||
- name: Automatic merge on approval
|
||||
conditions: []
|
||||
actions:
|
||||
queue:
|
|
@ -1,10 +1,13 @@
|
|||
{
|
||||
"extends": "stylelint-config-standard",
|
||||
{
|
||||
"extends": "stylelint-config-standard",
|
||||
"rules": {
|
||||
"alpha-value-notation": "number",
|
||||
"selector-class-pattern": null
|
||||
"selector-class-pattern": null,
|
||||
"no-descending-specificity": null
|
||||
},
|
||||
"overrides": [
|
||||
"fix": true,
|
||||
"cache": true,
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["*.js"],
|
||||
"customSyntax": "postcss-lit"
|
||||
|
|
|
@ -6,11 +6,11 @@ Found a typo, or something that isn't as clear as it could be? Maybe I've missed
|
|||
|
||||
## Readme
|
||||
|
||||
Found a typo, or the Readme is not very clear as it should be?. Consider Submitting a Pull request to the [Readme](https://github.com/neon-mmd/websurfx/blob/master/README.md) to add to or improve the Readme. This will help future users to better understand the project more clearly.
|
||||
Did you find a typo, or the Readme is not as clear as it should be? Consider Submitting a Pull request to the [Readme](https://github.com/neon-mmd/websurfx/blob/master/README.md) to add to or improve the Readme. This will help future users to better understand the project more clearly.
|
||||
|
||||
## Help Improve Github Actions
|
||||
## Help Improve GitHub Actions
|
||||
|
||||
Know how to fix or improve a github action?. Consider Submitting a Pull request to help make automation and testing better.
|
||||
Know how to fix or improve a GitHub action? Consider Submitting a Pull request to help make automation and testing better.
|
||||
|
||||
## Source Code
|
||||
|
||||
|
@ -51,4 +51,4 @@ We have a [Discord](https://discord.gg/SWnda7Mw5u) channel, feel free to join an
|
|||
The _rolling branch_ is where we intend all contributions should go.
|
||||
|
||||
|
||||
We appreciate any contributions whether be of any size or topic and suggestions to help improve the Websurfx project. Please keep in mind the above requirements and guidelines before submitting a pull request and also if you have any doubts/concerns/questions about the project, its source code or anything related to the project than feel free to ask by opening an [issue](https://github.com/neon-mmd/websurfx/issues) or by asking us on our [Discord](https://discord.gg/SWnda7Mw5u) channel.
|
||||
We appreciate any contributions whether of any size or topic and suggestions to help improve the Websurfx project. Please keep in mind the above requirements and guidelines before submitting a pull request and also if you have any doubts/concerns/questions about the project, its source code or anything related to the project then feel free to ask by opening an [issue](https://github.com/neon-mmd/websurfx/issues) or by asking us on our [Discord](https://discord.gg/SWnda7Mw5u) channel.
|
||||
|
|
3282
Cargo.lock
generated
192
Cargo.toml
|
@ -1,49 +1,117 @@
|
|||
[package]
|
||||
name = "websurfx"
|
||||
version = "1.0.1"
|
||||
version = "1.21.0"
|
||||
edition = "2021"
|
||||
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
|
||||
repository = "https://github.com/neon-mmd/websurfx"
|
||||
license = "AGPL-3.0"
|
||||
|
||||
[[bin]]
|
||||
name = "websurfx"
|
||||
test = true
|
||||
bench = false
|
||||
path = "src/bin/websurfx.rs"
|
||||
|
||||
[dependencies]
|
||||
reqwest = {version="0.11.20",features=["json"]}
|
||||
tokio = {version="1.32.0",features=["rt-multi-thread","macros"]}
|
||||
serde = {version="1.0.188",features=["derive"]}
|
||||
handlebars = { version = "4.4.0", features = ["dir_source"] }
|
||||
scraper = {version="0.17.1"}
|
||||
actix-web = {version="4.4.0", features = ["cookies"]}
|
||||
actix-files = {version="0.6.2"}
|
||||
actix-cors = {version="0.6.4"}
|
||||
serde_json = {version="1.0.105"}
|
||||
fake-useragent = {version="0.1.3"}
|
||||
env_logger = {version="0.10.0"}
|
||||
log = {version="0.4.20"}
|
||||
mlua = {version="0.8.10", features=["luajit", "vendored"]}
|
||||
redis = {version="0.23.3", features=["tokio-comp","connection-manager"], optional = true}
|
||||
md5 = {version="0.7.0"}
|
||||
rand={version="0.8.5"}
|
||||
once_cell = {version="1.18.0"}
|
||||
error-stack = {version="0.4.0"}
|
||||
async-trait = {version="0.1.73"}
|
||||
regex = {version="1.9.4", features=["perf"]}
|
||||
smallvec = {version="1.11.0", features=["union", "serde"]}
|
||||
futures = {version="0.3.28"}
|
||||
dhat = {version="0.3.2", optional = true}
|
||||
mimalloc = { version = "0.1.38", default-features = false }
|
||||
async-once-cell = {version="0.5.3"}
|
||||
actix-governor = {version="0.4.1"}
|
||||
mini-moka = { version="0.10", optional = true}
|
||||
reqwest = { version = "0.12.5", default-features = false, features = [
|
||||
"rustls-tls",
|
||||
"brotli",
|
||||
"gzip",
|
||||
"http2",
|
||||
"socks",
|
||||
] }
|
||||
tokio = { version = "1.43.0", features = [
|
||||
"rt-multi-thread",
|
||||
"macros",
|
||||
"fs",
|
||||
"io-util",
|
||||
], default-features = false }
|
||||
serde = { version = "1.0.215", default-features = false, features = ["derive"] }
|
||||
serde_json = { version = "1.0.134", default-features = false }
|
||||
bincode = {version="1.3.3", default-features=false}
|
||||
maud = { version = "0.26.0", default-features = false, features = [
|
||||
"actix-web",
|
||||
] }
|
||||
scraper = { version = "0.21.0", default-features = false }
|
||||
actix-web = { version = "4.9.0", features = [
|
||||
"cookies",
|
||||
"macros",
|
||||
"compress-brotli",
|
||||
], default-features = false }
|
||||
actix-files = { version = "0.6.5", default-features = false }
|
||||
actix-cors = { version = "0.7.0", default-features = false }
|
||||
fake-useragent = { version = "0.1.3", default-features = false }
|
||||
env_logger = { version = "0.11.6", default-features = false }
|
||||
log = { version = "0.4.21", default-features = false }
|
||||
mlua = { version = "0.10.2", features = [
|
||||
"luajit",
|
||||
"vendored",
|
||||
], default-features = false }
|
||||
redis = { version = "0.28.1", features = [
|
||||
"tokio-comp",
|
||||
"connection-manager",
|
||||
"tcp_nodelay"
|
||||
], default-features = false, optional = true }
|
||||
blake3 = { version = "1.5.4", default-features = false }
|
||||
error-stack = { version = "0.4.0", default-features = false, features = [
|
||||
"std",
|
||||
] }
|
||||
async-trait = { version = "0.1.80", default-features = false }
|
||||
regex = { version = "1.11.1", features = ["perf"], default-features = false }
|
||||
futures = { version = "0.3.31", default-features = false, features = ["alloc"] }
|
||||
dhat = { version = "0.3.2", optional = true, default-features = false }
|
||||
mimalloc = { version = "0.1.43", default-features = false }
|
||||
async-once-cell = { version = "0.5.4", default-features = false }
|
||||
actix-governor = { version = "0.8.0", default-features = false }
|
||||
moka = { version = "0.12.8", optional = true, default-features = false, features = [
|
||||
"future",
|
||||
] }
|
||||
async-compression = { version = "0.4.12", default-features = false, features = [
|
||||
"brotli",
|
||||
"tokio",
|
||||
], optional = true }
|
||||
chacha20poly1305 = { version = "0.10.1", default-features = false, features = [
|
||||
"alloc",
|
||||
"getrandom",
|
||||
], optional = true }
|
||||
chacha20 = { version = "0.9.1", default-features = false, optional = true }
|
||||
base64 = { version = "0.21.5", default-features = false, features = [
|
||||
"std",
|
||||
], optional = true }
|
||||
cfg-if = { version = "1.0.0", default-features = false, optional = true }
|
||||
keyword_extraction = { version = "1.5.0", default-features = false, features = [
|
||||
"tf_idf",
|
||||
"rayon",
|
||||
] }
|
||||
stop-words = { version = "0.8.0", default-features = false, features = ["iso"] }
|
||||
thesaurus = { version = "0.5.2", default-features = false, optional = true, features = [
|
||||
"moby",
|
||||
]}
|
||||
|
||||
actix-multipart = { version = "0.7.2", default-features = false, features = [
|
||||
"derive",
|
||||
"tempfile",
|
||||
]}
|
||||
itertools = {version = "0.14.0", default-features = false}
|
||||
|
||||
[dev-dependencies]
|
||||
rusty-hook = "^0.11.2"
|
||||
criterion = "0.5.1"
|
||||
tempfile = "3.8.0"
|
||||
rusty-hook = { version = "^0.11.2", default-features = false }
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
tempfile = { version = "3.14.0", default-features = false }
|
||||
|
||||
[build-dependencies]
|
||||
lightningcss = { version = "1.0.0-alpha.57", default-features = false, features = [
|
||||
"grid",
|
||||
] }
|
||||
# Disabled until bug fixing update
|
||||
# minify-js = { version = "0.6.0", default-features = false }
|
||||
# Temporary fork with fix
|
||||
minify-js = { git = "https://github.com/RuairidhWilliamson/minify-js", branch = "master", version = "0.6.0", default-features = false}
|
||||
|
||||
[profile.dev]
|
||||
opt-level = 0
|
||||
debug = true
|
||||
split-debuginfo = '...'
|
||||
split-debuginfo = 'unpacked'
|
||||
debug-assertions = true
|
||||
overflow-checks = true
|
||||
lto = false
|
||||
|
@ -59,15 +127,65 @@ debug = false # This should only be commented when testing with dhat profiler
|
|||
split-debuginfo = '...'
|
||||
debug-assertions = false
|
||||
overflow-checks = false
|
||||
lto = true
|
||||
lto = 'thin'
|
||||
panic = 'abort'
|
||||
incremental = false
|
||||
codegen-units = 1
|
||||
rpath = false
|
||||
strip = "debuginfo"
|
||||
strip = "symbols"
|
||||
|
||||
[profile.bsr1]
|
||||
inherits = "release"
|
||||
opt-level = "s"
|
||||
|
||||
[profile.bsr2]
|
||||
inherits = "bsr1"
|
||||
opt-level = "z"
|
||||
|
||||
[profile.lpcb1]
|
||||
inherits = "release"
|
||||
codegen-units = 16
|
||||
|
||||
[profile.lpcb2]
|
||||
inherits = "lpcb1"
|
||||
lto = "off"
|
||||
|
||||
[profile.lpcb3]
|
||||
inherits = "lpcb2"
|
||||
opt-level = 2
|
||||
|
||||
[profile.bsr_and_lpcb1]
|
||||
inherits = "lpcb1"
|
||||
opt-level = "s"
|
||||
|
||||
[profile.bsr_and_lpcb2]
|
||||
inherits = "lpcb2"
|
||||
opt-level = "s"
|
||||
|
||||
[profile.bsr_and_lpcb3]
|
||||
inherits = "lpcb3"
|
||||
opt-level = "s"
|
||||
|
||||
[profile.bsr_and_lpcb4]
|
||||
inherits = "lpcb1"
|
||||
opt-level = "z"
|
||||
|
||||
[profile.bsr_and_lpcb5]
|
||||
inherits = "lpcb1"
|
||||
opt-level = "z"
|
||||
|
||||
[profile.bsr_and_lpcb6]
|
||||
inherits = "lpcb1"
|
||||
opt-level = "z"
|
||||
|
||||
[features]
|
||||
use-synonyms-search = ["thesaurus/static"]
|
||||
default = ["memory-cache"]
|
||||
dhat-heap = ["dep:dhat"]
|
||||
memory-cache = ["dep:mini-moka"]
|
||||
redis-cache = ["dep:redis"]
|
||||
dhat-heap = ["dep:dhat"]
|
||||
memory-cache = ["dep:moka"]
|
||||
redis-cache = ["dep:redis", "dep:base64"]
|
||||
compress-cache-results = ["dep:async-compression", "dep:cfg-if"]
|
||||
encrypt-cache-results = ["dep:chacha20poly1305", "dep:chacha20"]
|
||||
cec-cache-results = ["compress-cache-results", "encrypt-cache-results"]
|
||||
experimental-io-uring = ["actix-web/experimental-io-uring"]
|
||||
use-non-static-synonyms-search = ["thesaurus"]
|
||||
|
|
53
Dockerfile
|
@ -1,40 +1,43 @@
|
|||
FROM rust:latest AS chef
|
||||
FROM --platform=$BUILDPLATFORM rust:1.78.0-alpine3.18 AS chef
|
||||
# We only pay the installation cost once,
|
||||
# it will be cached from the second build onwards
|
||||
RUN apk add --no-cache alpine-sdk musl-dev g++ make libcrypto3 libressl-dev upx perl build-base
|
||||
RUN cargo install cargo-chef --locked
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
FROM chef AS planner
|
||||
COPY . .
|
||||
COPY ./Cargo.toml ./Cargo.lock ./
|
||||
RUN cargo chef prepare --recipe-path recipe.json
|
||||
|
||||
FROM chef AS builder
|
||||
FROM --platform=$BUILDPLATFORM chef AS builder
|
||||
COPY --from=planner /app/recipe.json recipe.json
|
||||
# Build dependencies - this is the caching Docker layer!
|
||||
# Uncomment the line below if you want to use the `hybrid` caching feature.
|
||||
# RUN cargo chef cook --release --features redis-cache --recipe-path recipe.json
|
||||
# Comment the line below if you don't want to use the `In-Memory` caching feature.
|
||||
RUN cargo chef cook --release --recipe-path recipe.json
|
||||
# Uncomment the line below if you want to use the `no cache` feature.
|
||||
# RUN cargo chef cook --release --no-default-features --recipe-path recipe.json
|
||||
# Uncomment the line below if you want to use the `redis` caching feature.
|
||||
# RUN cargo chef cook --release --no-default-features --features redis-cache --recipe-path recipe.json
|
||||
# Specify the cache type to use (memory, redis, hybrid, no-cache)
|
||||
ARG CACHE=memory
|
||||
ENV CACHE=${CACHE}
|
||||
# Cook the dependencies
|
||||
RUN export ARCH=$(uname -m) && \
|
||||
if [ "$CACHE" = "memory" ] ; then cargo chef cook --release --target=$ARCH-unknown-linux-musl --recipe-path recipe.json ; \
|
||||
else if [ "$CACHE" = "redis" ] ; then cargo chef cook --release --target=$ARCH-unknown-linux-musl --no-default-features --features redis-cache --recipe-path recipe.json ; \
|
||||
else if [ "$CACHE" = "hybrid" ] ; then cargo chef cook --release --target=$ARCH-unknown-linux-musl --features redis-cache --recipe-path recipe.json ; \
|
||||
else if [ "$CACHE" = "no-cache" ] ; then cargo chef cook --release --target=$ARCH-unknown-linux-musl --no-default-features --recipe-path recipe.json ; fi ; fi ; fi ; fi
|
||||
# Copy the source code and public folder
|
||||
COPY ./src ./src
|
||||
COPY ./public ./public
|
||||
# Build the application
|
||||
RUN export ARCH=$(uname -m) && \
|
||||
if [ "$CACHE" = "memory" ] ; then cargo build --release --target=$ARCH-unknown-linux-musl ; \
|
||||
else if [ "$CACHE" = "redis" ] ; then cargo build --release --target=$ARCH-unknown-linux-musl --no-default-features --features redis-cache ; \
|
||||
else if [ "$CACHE" = "hybrid" ] ; then cargo build --release --target=$ARCH-unknown-linux-musl --features redis-cache ; \
|
||||
else if [ "$CACHE" = "no-cache" ] ; then cargo build --release --target=$ARCH-unknown-linux-musl --no-default-features ; fi ; fi ; fi ; fi
|
||||
# Optimise binary size with UPX
|
||||
RUN export ARCH=$(uname -m) \
|
||||
&& upx --lzma --best /app/target/$ARCH-unknown-linux-musl/release/websurfx \
|
||||
&& cp /app/target/$ARCH-unknown-linux-musl/release/websurfx /usr/local/bin/websurfx
|
||||
|
||||
# Build application
|
||||
COPY . .
|
||||
# Uncomment the line below if you want to use the `hybrid` caching feature.
|
||||
# RUN cargo install --path . --features redis-cache
|
||||
# Comment the line below if you don't want to use the `In-Memory` caching feature.
|
||||
RUN cargo install --path .
|
||||
# Uncomment the line below if you want to use the `no cache` feature.
|
||||
# RUN cargo install --path . --no-default-features
|
||||
# Uncomment the line below if you want to use the `redis` caching feature.
|
||||
# RUN cargo install --path . --no-default-features --features redis-cache
|
||||
|
||||
# We do not need the Rust toolchain to run the binary!
|
||||
FROM gcr.io/distroless/cc-debian12
|
||||
FROM --platform=$BUILDPLATFORM scratch
|
||||
COPY --from=builder /app/public/ /opt/websurfx/public/
|
||||
VOLUME ["/etc/xdg/websurfx/"]
|
||||
COPY --from=builder /usr/local/cargo/bin/* /usr/local/bin/
|
||||
COPY --from=builder /usr/local/bin/websurfx /usr/local/bin/websurfx
|
||||
CMD ["websurfx"]
|
||||
|
|
62
README.md
|
@ -4,10 +4,10 @@
|
|||
<p align="center">
|
||||
<b align="center"><a href="README.md">Readme</a></b> |
|
||||
<b><a href="https://discord.gg/SWnda7Mw5u">Discord</a></b> |
|
||||
<b><a href="../../tree/HEAD/docs/instances.md">Instances</a></b> |
|
||||
<b><a href="docs/instances.md">Instances</a></b> |
|
||||
<b><a href="https://discord.gg/VKCAememnr">User Showcase</a></b> |
|
||||
<b><a href="https://github.com/neon-mmd/websurfx">GitHub</a></b> |
|
||||
<b><a href="../../tree/HEAD/docs/">Documentation</a></b>
|
||||
<b><a href="docs">Documentation</a></b>
|
||||
<br /><br />
|
||||
<a
|
||||
href="https://github.com/awesome-selfhosted/awesome-selfhosted#search-engines"
|
||||
|
@ -32,7 +32,13 @@
|
|||
<a href=""
|
||||
><img
|
||||
alt="Maintenance"
|
||||
src="https://img.shields.io/maintenance/yes/2023?style=flat-square"
|
||||
src="https://img.shields.io/maintenance/yes/2024?style=flat-square"
|
||||
/>
|
||||
</a>
|
||||
<a href="https://www.codefactor.io/repository/github/neon-mmd/websurfx">
|
||||
<img
|
||||
alt="CodeFactor"
|
||||
src="https://www.codefactor.io/repository/github/neon-mmd/websurfx/badge"
|
||||
/>
|
||||
</a>
|
||||
<a href="https://gitpod.io/#https://github.com/neon-mmd/websurfx">
|
||||
|
@ -61,7 +67,7 @@
|
|||
- **Getting Started**
|
||||
- [🔭 Preview](#preview-)
|
||||
- [🚀 Features](#features-)
|
||||
- [🔗 Instances](instances-)
|
||||
- [🔗 Instances](#instances-)
|
||||
- [🛠️ Installation and Testing](#installation-and-testing-%EF%B8%8F)
|
||||
- [🔧 Configuration](#configuration-)
|
||||
- **Feature Overview**
|
||||
|
@ -69,11 +75,11 @@
|
|||
- [🌍 Multi-Language Support](#multi-language-support-)
|
||||
- **Community**
|
||||
- [📊 System Requirements](#system-requirements-)
|
||||
- [🗨️ FAQ (Frequently Asked Questions)](#faq-frequently-asked-questions-)
|
||||
- [🗨️ FAQ (Frequently Asked Questions)](#faq-frequently-asked-questions-%EF%B8%8F)
|
||||
- [📣 More Contributors Wanted](#more-contributors-wanted-)
|
||||
- [💖 Supporting Websurfx](#supporting-websurfx-)
|
||||
- [📘 Documentation](#documentation-)
|
||||
- [🛣️ Roadmap](#roadmap-)
|
||||
- [🛣️ Roadmap](#roadmap-%EF%B8%8F)
|
||||
- [🙋 Contributing](#contributing-)
|
||||
- [📜 License](#license-)
|
||||
- [🤝 Credits](#credits-)
|
||||
|
@ -99,13 +105,19 @@
|
|||
|
||||
# Instances 🔗
|
||||
|
||||
> For a full list of publicly available community driven `websurfx` instances to test or for daily use. see [**Instances**](./docs/instances.md)
|
||||
> For a full list of publicly available community driven `websurfx` instances to test or for daily use. see [**Instances**](docs/instances.md)
|
||||
|
||||
**[⬆️ Back to Top](#--)**
|
||||
|
||||
# Features 🚀
|
||||
|
||||
- 🎨 Make Websurfx uniquely yours with twelve color schemes provided by default. It also supports creation of custom themes and color schemes in a quick and easy way, so unleash your creativity!
|
||||
- 🎨 Make Websurfx uniquely yours with the twelve color schemes provided by default. It also supports the creation of custom themes and color schemes in a quick and easy way, so unleash your creativity!
|
||||
- 🚀 Easy to setup with Docker or on bare metal with various installation and deployment options.
|
||||
- ⛔ Search filtering to filter search results based on four different levels.
|
||||
- 💾 Different caching levels focusing on reliability, speed and resiliancy.
|
||||
- ⬆️ Organic Search results (with ranking algorithm builtin to rerank the search results according to user's search query.).
|
||||
- 🔒 Different compression and encryption levels focusing on speed and privacy.
|
||||
- 🧪 Experimental IO-uring feature for Linux operating systems focused on performance of the engine.
|
||||
- 🔐 Fast, private, and secure
|
||||
- 🆓 100% free and open source
|
||||
- 💨 Ad-free and clean results
|
||||
|
@ -115,7 +127,7 @@
|
|||
|
||||
# Installation and Testing 🛠️
|
||||
|
||||
> For full setup instructions, see: [**Installation**](./docs/installation.md)
|
||||
> For full setup instructions, see: [**Installation**](docs/installation.md)
|
||||
|
||||
Before you can start building `websurfx`, you will need to have `Cargo` installed on your system. You can find the installation instructions [here](https://doc.rust-lang.org/cargo/getting-started/installation.html).
|
||||
|
||||
|
@ -132,16 +144,16 @@ redis-server --port 8082 &
|
|||
|
||||
Once you have started the server, open your preferred web browser and navigate to <http://127.0.0.1:8080> to start using Websurfx.
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
>
|
||||
> 1. The project is no longer in the testing phase and is now ready for production use.
|
||||
> 2. There are many features still missing like `support for image search`, `different categories`, `quick apps`, etc but they will be added soon as part of future releases.
|
||||
> 2. There are many features still missing, like `support for image search`, `different categories`, `quick apps`, etc., but they will be added soon as part of future releases.
|
||||
|
||||
**[⬆️ Back to Top](#--)**
|
||||
|
||||
# Configuration 🔧
|
||||
|
||||
> For full configuration instructions, see: [**Configuration**](./docs/configuration.md)
|
||||
> For full configuration instructions, see: [**Configuration**](docs/configuration.md)
|
||||
|
||||
Websurfx is configured through the config.lua file, located at `websurfx/config.lua`.
|
||||
|
||||
|
@ -149,7 +161,7 @@ Websurfx is configured through the config.lua file, located at `websurfx/config.
|
|||
|
||||
# Theming 🎨
|
||||
|
||||
> For full theming and customization instructions, see: [**Theming**](./docs/theming.md)
|
||||
> For full theming and customization instructions, see: [**Theming**](docs/theming.md)
|
||||
|
||||
Websurfx comes loaded with several themes and color schemes, which you can apply and edit through the config file. It also supports custom themes and color schemes using CSS, allowing you to make it truly yours.
|
||||
|
||||
|
@ -157,8 +169,8 @@ Websurfx comes loaded with several themes and color schemes, which you can apply
|
|||
|
||||
# Multi-Language Support 🌍
|
||||
|
||||
> **Note**
|
||||
> Currently, we do not support other languages but we will start accepting contributions regarding language support in the future. We believe language should never be a barrier to entry.
|
||||
> [!Note]
|
||||
> Currently, we do not support other languages, but we will start accepting contributions regarding language support in the future. We believe language should never be a barrier to entry.
|
||||
|
||||
**[⬆️ Back to Top](#--)**
|
||||
|
||||
|
@ -172,7 +184,7 @@ At present, we only support x86_64 architecture systems, but we would love to ha
|
|||
|
||||
## Why Websurfx?
|
||||
|
||||
The primary purpose of the Websurfx project is to create a fast, secure, and privacy-focused meta-search engine. There are numerous meta-search engines available, but not all guarantee the security of their search engine, which is critical for maintaining privacy. Memory flaws, for example, can expose private or sensitive information, which is understandably bad. There is also the added problem of spam, ads, and inorganic results which most engines don't have a fool-proof answer to. Until now. With Websurfx I finally put a full stop to this problem. Websurfx is based on Rust, which ensures memory safety and removes such issues. Many meta-search engines also lack important features like advanced picture search, required by graphic designers, content providers, and others. Websurfx improves the user experience by providing these and other features, such as proper NSFW blocking and Micro-apps or Quick Results (providing a calculator, currency exchanges, etc in the search results).
|
||||
The primary purpose of the Websurfx project is to create a fast, secure, and privacy-focused meta-search engine. There are numerous meta-search engines available, but not all guarantee the security of their search engines, which is critical for maintaining privacy. Memory flaws, for example, can expose private or sensitive information, which is understandably bad. There is also the added problem of spam, ads, and inorganic results, which most engines don't have a full-proof answer to. Until now. With Websurfx, I finally put a full stop to this problem. Websurfx is based on Rust, which ensures memory safety and removes such issues. Many meta-search engines also lack important features like advanced picture search, required by graphic designers, content providers, and others. Websurfx improves the user experience by providing these and other features, such as proper NSFW blocking and micro-apps or quick results (providing a calculator, currency exchanges, etc. in the search results).
|
||||
|
||||
## Why AGPLv3?
|
||||
|
||||
|
@ -180,7 +192,7 @@ Websurfx is distributed under the **AGPLv3** license to keep the source code ope
|
|||
|
||||
## Why Rust?
|
||||
|
||||
Websurfx is based on Rust due to its memory safety features, which prevents vulnerabilities and makes the codebase more secure. Rust is also faster than C++, contributing to Websurfx's speed and responsiveness. Finally, the Rust ownership and borrowing system enables secure concurrency and thread safety in the program.
|
||||
Websurfx is based on Rust due to its memory safety features, which prevent vulnerabilities and make the codebase more secure. Rust is also faster than C++, contributing to Websurfx's speed and responsiveness. Finally, the Rust ownership and borrowing system enables secure concurrency and thread safety in the program.
|
||||
|
||||
**[⬆️ Back to Top](#--)**
|
||||
|
||||
|
@ -192,16 +204,16 @@ We are looking for more willing contributors to help grow this project. For more
|
|||
|
||||
# Supporting Websurfx 💖
|
||||
|
||||
> For full details and other ways you can help out, see: [**Contributing**]()
|
||||
> For full details and other ways you can help out, see: [**Contributing**](CONTRIBUTING.md)
|
||||
|
||||
If you use Websurfx and would like to contribute to its development, we're glad to have you on board! Contributions of any size or type are always welcome, and we will always acknowledge your efforts.
|
||||
|
||||
Several areas that we need a bit of help with at the moment are:
|
||||
|
||||
- **Better and more color schemes**: Help fix color schemes and add other famous color schemes.
|
||||
- **Improve evasion code for bot detection** - Help improve code related to evading IP blocking and emulating human behaviors located in everyone's engine file.
|
||||
- **Logo** - Help create a logo for the project and website.
|
||||
- **Docker Support** - Help write a Docker Compose file for the project.
|
||||
- **Improve evasion code for bot detection**: Help improve code related to evading IP blocking and emulating human behaviors located in everyone's engine file.
|
||||
- **Logo**: Help create a logo for the project and website.
|
||||
- **Docker Support**: Help write a Docker Compose file for the project.
|
||||
- Submit a PR to add a new feature, fix a bug, update the docs, add a theme, widget, or anything else.
|
||||
- Star Websurfx on GitHub.
|
||||
|
||||
|
@ -209,8 +221,8 @@ Several areas that we need a bit of help with at the moment are:
|
|||
|
||||
# Documentation 📘
|
||||
|
||||
> **Note**
|
||||
> We welcome any contributions to the [documentation](../../tree/HEAD/docs/) as this will benefit everyone who uses this project.
|
||||
> [!Note]
|
||||
> We welcome any contributions to the [documentation](docs) as this will benefit everyone who uses this project.
|
||||
|
||||
**[⬆️ Back to Top](#--)**
|
||||
|
||||
|
@ -230,7 +242,7 @@ Check out this [video](https://youtu.be/FccdqCucVSI) by Mr. Nick on how to contr
|
|||
|
||||
## Developer
|
||||
|
||||
If you are a developer, have a look at the [CONTRIBUTING.org](CONTRIBUTING.md) document for more information.
|
||||
If you are a developer, have a look at the [CONTRIBUTING.md](CONTRIBUTING.md) document for more information.
|
||||
|
||||
**[⬆️ Back to Top](#--)**
|
||||
|
||||
|
@ -258,7 +270,7 @@ We would like to thank the following people for their contributions and support:
|
|||
|
||||
<p>
|
||||
<a href="https://github.com/neon-mmd/websurfx/stargazers">
|
||||
<img src="https://reporoster.com/stars/dark/neon-mmd/websurfx" />
|
||||
<img src="http://reporoster.com/stars/dark/neon-mmd/websurfx"/>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
|
|
85
build.rs
Normal file
|
@ -0,0 +1,85 @@
|
|||
//! A build module of the application which minifies the project's css and js files on build which
|
||||
//! helps reduce the initial page by loading the files faster.
|
||||
|
||||
#![forbid(unsafe_code, clippy::panic)]
|
||||
#![deny(missing_docs, clippy::missing_docs_in_private_items, clippy::perf)]
|
||||
#![warn(clippy::cognitive_complexity, rust_2018_idioms)]
|
||||
|
||||
// ------- Imports -------
|
||||
use lightningcss::stylesheet::{MinifyOptions, ParserOptions, PrinterOptions, StyleSheet};
|
||||
use minify_js::{minify, Session, TopLevelMode};
|
||||
use std::{
|
||||
fs::{read_dir, read_to_string, File, OpenOptions},
|
||||
io::{Read, Write},
|
||||
};
|
||||
|
||||
// ------- Constants -------
|
||||
/// A constant for the path to the public/theme folder in the codebase.
|
||||
const COMMON_STATIC_SOURCE_CODE_FOLDER: &str = "./public/static/";
|
||||
/// A constant for the names of the folders located in the "/public/static/"
|
||||
/// folder in the codebase which contains the css files to be minified.
|
||||
const STYLE_FOLDERS: [&str; 2] = ["themes", "colorschemes"];
|
||||
/// A constant for the environment variable name.
|
||||
const PACKAGE_ENVIRONMENT_VARIABLE: &str = "PKG_ENV";
|
||||
/// A constant for the `prod` value of the `pkg_env` environment variable.
|
||||
const PRODUCTION_PKG_ENV_VARIABLE_VALUE: &str = "prod";
|
||||
|
||||
/// A main function which minifies both css and js files using `lightningcss` and `minify_js` when
|
||||
/// the `PKG_ENV` environment and it is set to the value of `prod`.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// This function returns the unit type when the minification process runs successfully otherwise
|
||||
/// it returns a standard error.
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
if let Ok(pkg_env_var) = std::env::var(PACKAGE_ENVIRONMENT_VARIABLE) {
|
||||
if pkg_env_var.to_lowercase() == PRODUCTION_PKG_ENV_VARIABLE_VALUE {
|
||||
// A for loop that loops over each file name containing in the `colorschemes` and `themes` folders
|
||||
// and minifies it using the `lightningcss` minifier.
|
||||
for folder_name in STYLE_FOLDERS {
|
||||
for file in read_dir(format!("{COMMON_STATIC_SOURCE_CODE_FOLDER}{folder_name}/"))? {
|
||||
let file_path = file?.path();
|
||||
let source = read_to_string(file_path.clone())?;
|
||||
|
||||
let mut stylesheet = StyleSheet::parse(&source, ParserOptions::default())
|
||||
.map_err(|err| format!("{err}\n{:?}", file_path.file_name().unwrap()))?;
|
||||
|
||||
stylesheet.minify(MinifyOptions::default())?;
|
||||
let minified_css = stylesheet.to_css(PrinterOptions::default())?;
|
||||
|
||||
let mut old_css_file = OpenOptions::new()
|
||||
.write(true)
|
||||
.truncate(true)
|
||||
.open(file_path)?;
|
||||
old_css_file.write_all(minified_css.code.as_bytes())?;
|
||||
old_css_file.flush()?;
|
||||
}
|
||||
}
|
||||
|
||||
// A for loop that loops over each file name containing in the `public/static` folder and minifies
|
||||
// it using the `minify-js` minifier.
|
||||
for file in read_dir(COMMON_STATIC_SOURCE_CODE_FOLDER)? {
|
||||
let file_path = file?.path();
|
||||
if file_path.is_file() {
|
||||
let mut code = Vec::new();
|
||||
let mut js_file = File::open(file_path.clone())?;
|
||||
js_file.read_to_end(&mut code)?;
|
||||
|
||||
drop(js_file);
|
||||
|
||||
let mut out = Vec::new();
|
||||
minify(&Session::new(), TopLevelMode::Global, &code, &mut out)
|
||||
.map_err(|err| format!("{err}\n{:?}", file_path.file_name().unwrap()))?;
|
||||
|
||||
let mut old_js_file = OpenOptions::new()
|
||||
.write(true)
|
||||
.truncate(true)
|
||||
.open(file_path)?;
|
||||
old_js_file.write_all(&out)?;
|
||||
old_js_file.flush()?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
15
dev.Dockerfile
Normal file
|
@ -0,0 +1,15 @@
|
|||
# Create Builder image
|
||||
FROM --platform=$BUILDPLATFORM rust:1.78.0-alpine3.18
|
||||
|
||||
# Install required dependencies
|
||||
RUN apk add --no-cache alpine-sdk musl-dev g++ make libcrypto3 libressl-dev perl build-base
|
||||
|
||||
RUN cargo install cargo-watch --locked
|
||||
|
||||
# Create project directory
|
||||
RUN mkdir -p /project
|
||||
WORKDIR /project
|
||||
|
||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||
|
||||
ENTRYPOINT ["cargo"]
|
26
dev.docker-compose.yml
Normal file
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
version: "3.9"
|
||||
services:
|
||||
redis:
|
||||
container_name: redis
|
||||
image: redis:6.2.5-alpine
|
||||
tty: true
|
||||
hostname: surfx-redis
|
||||
websurx:
|
||||
container_name: websurx-dev
|
||||
image: websurfx:dev
|
||||
working_dir: /project
|
||||
tty: true
|
||||
build:
|
||||
context: .
|
||||
dockerfile: dev.Dockerfile
|
||||
ports:
|
||||
- 8080:8080
|
||||
volumes:
|
||||
- type: bind
|
||||
source: .
|
||||
target: /project
|
||||
command:
|
||||
- watch
|
||||
- -x
|
||||
- run
|
|
@ -15,6 +15,4 @@ services:
|
|||
- ./websurfx/:/etc/xdg/websurfx/
|
||||
# Uncomment the following lines if you are using the `hybrid` or `redis` caching feature.
|
||||
# redis:
|
||||
# image: redis:latest
|
||||
# ports:
|
||||
# - 6379:6379
|
||||
# image: redis:latest
|
|
@ -15,5 +15,6 @@
|
|||
|
||||
# Developers
|
||||
|
||||
- [Developing](./developing.md)
|
||||
- [**Contribute**](https://github.com/neon-mmd/websurfx/blob/master/CONTRIBUTING.md)
|
||||
- [**Coding style**](https://rust-lang.github.io/api-guidelines/naming.html)
|
||||
|
|
|
@ -2,14 +2,14 @@
|
|||
|
||||
## Installed From Source
|
||||
|
||||
If you have built `websurfx` from source then the configuration file will be located under project directory (codebase) at `websurfx/`
|
||||
If you have built `websurfx` from the source then the configuration file will be located under the project directory (codebase) at `websurfx/`
|
||||
|
||||
> **Note**
|
||||
> If you have built websurfx with unstable/rolling/edge branch then you can copy the configuration file from `websurfx/config.lua` located under project directory (codebase) to `~/.config/websurfx/` and make the changes there and rerun the websurfx server. _This is only available from unstable/rolling/edge version_.
|
||||
> [!Note]
|
||||
> If you have built websurfx with an unstable/rolling/edge branch then you can copy the configuration file from `websurfx/config.lua` located under the project directory (codebase) to `~/.config/websurfx/` and make the changes there and rerun the websurfx server. _This is only available from unstable/rolling/edge version_.
|
||||
|
||||
## Installed From Package
|
||||
|
||||
If you have installed `websurfx` using the package manager of your Linux distro then the default configuration file will be located at `/etc/xdg/websurfx/`. You can copy the default config to `~/.config/websurfx/` and make the changes there and rerun the websurfx server.
|
||||
If you have installed `websurfx` using the package manager of your Linux distro then the default configuration file will be located at `/etc/xdg/websurfx/`. You can copy the default config to `~/.config/websurfx/` make the changes there and rerun the websurfx server.
|
||||
|
||||
Some of the configuration options provided in the file are stated below. These are subdivided into the following categories:
|
||||
|
||||
|
@ -42,13 +42,13 @@ Some of the configuration options provided in the file are stated below. These a
|
|||
>
|
||||
> - Level 0 - With this level no search filtering occurs.
|
||||
> - Level 1 - With this level some search filtering occurs.
|
||||
> - Level 2 - With this level the upstream search engines are restricted to send sensitive contents like NSFW search results, etc.
|
||||
> - Level 3 - With this level the regex based filter lists is used alongside level 2 to filter more search results that have slipped in or custom results that needs to be filtered using the filter lists.
|
||||
> - Level 4 - This level is similar to level 3 except in this level the regex based filter lists are used to disallow users to search sensitive or disallowed content. This level could be useful if you are parent or someone who wants to completely disallow their kids or yourself from watching sensitive content.
|
||||
> - Level 2 - With this level the upstream search engines are restricted to sending sensitive content like NSFW search results, etc.
|
||||
> - Level 3 - With this level the regex-based filter lists are used alongside level 2 to filter more search results that have slipped in or custom results that need to be filtered using the filter lists.
|
||||
> - Level 4 - This level is similar to level 3 except in this level the regex-based filter lists are used to disallow users to search sensitive or disallowed content. This level could be useful if you are a parent or someone who wants to completely disallow their kids or yourself from watching sensitive content.
|
||||
|
||||
## Website
|
||||
|
||||
- **colorscheme:** The colorscheme name which should be used for the website theme (the name should be in accordance to the colorscheme file name present in `public/static/colorschemes` folder).
|
||||
- **colorscheme:** The colorscheme name which should be used for the website theme (the name should be by the colorscheme file name present in the `public/static/colorschemes` folder).
|
||||
|
||||
> By Default we provide 12 colorschemes to choose from these are:
|
||||
>
|
||||
|
@ -65,19 +65,27 @@ Some of the configuration options provided in the file are stated below. These a
|
|||
> 11. tokyo-night
|
||||
> 12. tomorrow-night
|
||||
|
||||
- **theme:** The theme name which should be used for the website (again, the name should be in accordance to the theme file name present in `public/static/themes` folder).
|
||||
- **theme:** The theme name that should be used for the website (again, the name should be by the theme file name present in the `public/static/themes` folder).
|
||||
|
||||
> By Default we provide 1 theme to choose from these are:
|
||||
>
|
||||
> 1. simple
|
||||
|
||||
- **animation:** The animation name that should be used for the website (again, the name should be by the animation file name present in the `public/static/animations` folder).
|
||||
|
||||
> By Default we provide 1 animation to choose from these are:
|
||||
>
|
||||
> 1. simple-frosted-glow
|
||||
|
||||
## Cache
|
||||
|
||||
- **redis_url:** Redis connection url address on which the client should connect on.
|
||||
- **redis_url:** Redis connection URL address on which the client should connect.
|
||||
|
||||
> **Note**
|
||||
> This option can be commented out if you have compiled the app without the `redis-cache` feature. For more information, See [**building**](./building.md).
|
||||
|
||||
- **cache_expiry_time:** The maximum time the server will store the cache for, after which it flushs/removes/expires/invalidates the cached results. (value provided to this option should be in seconds and the value should be greater than or equal to 60 seconds).
|
||||
|
||||
## Search Engines
|
||||
|
||||
- **upstream_search_engines:** Select from the different upstream search engines from which the results should be fetched.
|
||||
|
|
643
docs/developing.md
Normal file
|
@ -0,0 +1,643 @@
|
|||
# Developing
|
||||
|
||||
This page of the docs outlines how to get **Websurfx** up and running in a development environment, and outlines the common workflow, different ways to work on the project, a high-level overview of how the project works, project structure, and the best practices that should be followed when working on the project.
|
||||
|
||||
<details>
|
||||
<summary><b>Table of Contents</b></summary>
|
||||
<p>
|
||||
|
||||
- [Setting up the Development Environment](#setting-up-the-development-environment)
|
||||
- [Local Development](#local-development-)
|
||||
- [Gitpod](#gitpod-)
|
||||
- [NixOS Dev Shell using Nix Flake](#nixos-dev-shell-using-nix-flake-)
|
||||
- [Local Development with Docker Compose](#local-development-with-docker-compose-)
|
||||
- [Project Commands](#project-commands)
|
||||
- [Environment Variables](#environment-variables)
|
||||
- [Git Strategy](#git-strategy)
|
||||
- [Flow](#git-flow)
|
||||
- [Branches](#git-branch-naming)
|
||||
- [Commit emojis](#commit-emojis)
|
||||
- [PR Guidelines](#pr-guidelines)
|
||||
- [Resources for Beginners](#resources-for-beginners)
|
||||
- [App Info](#app-info)
|
||||
- [Code Style Guide](#style-guide)
|
||||
- [Application Structure](#application-structure)
|
||||
- [Development Tools](#development-tools)
|
||||
- [Misc / Notes](#notes)
|
||||
|
||||
</p>
|
||||
</details>
|
||||
|
||||
## Setting up the Development Environment
|
||||
|
||||
By default, we provide four different ways to work on the project. These are as follows:
|
||||
|
||||
- [Local Development](#local-development-)
|
||||
- [Gitpod](#gitpod-)
|
||||
- [NixOS Dev Shell using Nix Flake](#nixos-dev-shell-using-nix-flake-)
|
||||
- [Local Development with Docker Compose](#local-development-with-docker-compose-)
|
||||
|
||||
The different methods are explained in depth below.
|
||||
|
||||
### Local Development
|
||||
|
||||
This section covers how to set up the project for development on your local machine (bare metal).
|
||||
|
||||
#### Prerequisites
|
||||
|
||||
Before you start working on the project. You will need the following packages installed on your system:
|
||||
|
||||
- The latest version of `cargo` installed on your system which is required to manage building and running the project. The installation instructions for this can be found [here](https://doc.rust-lang.org/cargo/getting-started/installation.html).
|
||||
- The latest version of `npm` installed on your system which is required to allow the installation of other tools necessary for the project. The installation for this can be found [here](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
|
||||
- The latest version of `redis` installed on your system which will be used to avoid introducing unexpected issues when working on the project. The installation for this can be found [here](https://redis.io/docs/getting-started/installation/).
|
||||
- The latest version of `stylelint` should be installed on your system which will be used by the pre-commit checks to lint the code before a commit can be made to ensure better code quality. Before you install `stylelint` on your system, make sure you have `npm` installed on your system. To install `stylelint` and plugins run the following command:
|
||||
|
||||
```shell
|
||||
$ npm i -g stylelint
|
||||
$ npm i -g stylelint stylelint-config-standard postcss-lit
|
||||
```
|
||||
|
||||
> [!Note]
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
|
||||
|
||||
- `Cargo-watch` installed on your system which will allow you to auto-build the project when any checks occur in the source code files in the codebase (`websurfx` directory). Before you install `cargo-watch` on your system, make sure you have `cargo` installed on your system. To install `cargo-watch` run the following command:
|
||||
|
||||
```shell
|
||||
cargo install cargo-watch
|
||||
```
|
||||
|
||||
- `Git` installed on your system. The installation instructions for this can be found [here](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git).
|
||||
- Finally, The latest version of `Docker` is installed on your system which will be used to avoid introducing unexpected issues when working on the project. The installation instructions for this can be found [here](https://docs.docker.com/engine/install/).
|
||||
|
||||
> [!Note]
|
||||
> For **rolling release Linux distributions (distros)**, the above-mentioned required packages except for `stylelint` and `cargo-watch` can also be installed via the distro-specific package manager.
|
||||
>
|
||||
> **For Example:**
|
||||
>
|
||||
> On `arch linux` the following packages can be installed by following the link to the installation instructions provided below:
|
||||
>
|
||||
> - `Cargo`: https://wiki.archlinux.org/title/rust
|
||||
> - `Npm`: https://wiki.archlinux.org/title/Node.js
|
||||
> - `Redis`: https://wiki.archlinux.org/title/redis
|
||||
> - `Git`: https://wiki.archlinux.org/title/git
|
||||
> - `Docker`: https://wiki.archlinux.org/title/docker
|
||||
>
|
||||
> But we do not recommend this method for **stable release Linux distros** as they tend to not provide very up-to-date versions of the required packages.
|
||||
|
||||
#### Setting up Pre-commit Checks
|
||||
|
||||
Before you set `pre-commit` checks, you will first need to clone **your fork of the project** and navigate into the cloned repository by running the following command:
|
||||
|
||||
```shell
|
||||
git clone https://github.com/<your_github_username>/websurfx.git
|
||||
cd websurfx
|
||||
```
|
||||
|
||||
Once you have finished running the above commands then run the following command to set the `pre-commit` checks:
|
||||
|
||||
```shell
|
||||
cargo test
|
||||
```
|
||||
|
||||
By running the above-mentioned command, it will automatically set up all the pre-commit checks in the project.
|
||||
|
||||
#### Running the Project
|
||||
|
||||
If you have followed the above section then you should have a cloned repository folder present on your system. In the same directory run the following command to run the project:
|
||||
|
||||
```shell
|
||||
cargo watch -q -x "run" -w "."
|
||||
```
|
||||
|
||||
This will compile the app by default with the **In-Memory caching** feature. To compile, run, and test the app with other features follow the build options listed below:
|
||||
|
||||
##### Hybrid Cache
|
||||
|
||||
To build and run the app with the `Hybrid caching` feature. Run the following command:
|
||||
|
||||
```shell
|
||||
cargo watch -q -x "run --features redis-cache" -w .
|
||||
```
|
||||
|
||||
##### No Cache
|
||||
|
||||
To build and run the search engine with the `No caching` feature. Run the following command:
|
||||
|
||||
```shell
|
||||
cargo watch -q -x "run --no-default-features" -w .
|
||||
```
|
||||
|
||||
##### Redis Cache
|
||||
|
||||
To build the search engine with the `Redis caching` feature. Run the following command:
|
||||
|
||||
```shell
|
||||
cargo watch -q -x "run --no-default-features --features redis-cache" -w .
|
||||
```
|
||||
|
||||
> Optionally, If you have build and run the app with the `Redis cache`or `Hybrid cache` feature (as mentioned above) then you will need to start the redis server alongside the app which can be done so by running the following command:
|
||||
>
|
||||
> ```shell
|
||||
> redis-server --port 8082 &
|
||||
> ```
|
||||
|
||||
Once you have finished running the above command, Websurfx should now be served on the address http://127.0.0.1:8080. Hot reload is enabled, so making changes to any of the files will trigger the project to be rebuilt.
|
||||
|
||||
> For more info on all the project commands. See: [**Project Commands**](#project-commands-)
|
||||
|
||||
### Gitpod
|
||||
|
||||
This section covers how to use and set up the Gitpod development environment for working on the project.
|
||||
|
||||
> [!Note]
|
||||
> By default the project only supports the Vscode **IDE/Editor** for Gitpod.
|
||||
|
||||
#### Launching Gitpod
|
||||
|
||||
> For a full guide on how to fork the project. See: [**Forking**](#)
|
||||
|
||||
To launch gitpod and start working on the project from your fork of the Websurfx, Just navigate to the following link:
|
||||
|
||||
```text
|
||||
https://gitpod.io/#https://github.com/<your_github_username>/websurfx
|
||||
```
|
||||
|
||||
> For a full guide on how to use it and how to use it in different ways. See [**Learn Gitpod**](https://piped.kavin.rocks/playlist?list=PL3TSF5whlprXVp-7Br2oKwQgU4bji1S7H)
|
||||
|
||||
#### Default Plugins
|
||||
|
||||
The project by default provides a set of pre-installed plugins for gitpod which is done to improve productivity and efficiency while working on the project. Also to make working on the project more fun and engaging which can be customized from within the `Gitpod` instance.
|
||||
|
||||
The list of all the pre-installed plugins are listed below:
|
||||
|
||||
**Productivity**
|
||||
|
||||
- [CodeLLDB](https://open-vsx.org/extension/vadimcn/vscode-lldb): Provides a native debugger for rust programming langauge.
|
||||
- [GitHub Actions](https://open-vsx.org/extension/cschleiden/vscode-github-actions): Provides an easy to work with github actions.
|
||||
- [rust-analyzer](https://open-vsx.org/extension/rust-lang/rust-analyzer): Provides a language server for rust programming langauge.
|
||||
- [better-toml](https://open-vsx.org/extension/bungcip/better-toml): Provides support for toml files.
|
||||
- [crates](https://open-vsx.org/extension/serayuzgur/crates): Makes managing rust dependencies easier.
|
||||
- [Error Lens](https://open-vsx.org/extension/usernamehw/errorlens): Provides better highlighting of errors.
|
||||
- [markdownlint](https://open-vsx.org/extension/DavidAnson/vscode-markdownlint): Provides a linter for linting markdown documents.
|
||||
- [Prettier](https://open-vsx.org/extension/esbenp/prettier-vscode): Provides a code formatter.
|
||||
- [Stylelint](https://open-vsx.org/extension/stylelint/vscode-stylelint): Provides a linter for CSS files.
|
||||
- [ESLint](https://open-vsx.org/extension/dbaeumer/vscode-eslint): Provides a linter for JS files.
|
||||
- [Syntax Highlighter](https://open-vsx.org/extension/evgeniypeshkov/syntax-highlighter): A better syntax highlighting for code.
|
||||
- [Docker](https://open-vsx.org/extension/ms-azuretools/vscode-docker): Makes handling docker files easier.
|
||||
- [indent-rainbow](https://open-vsx.org/extension/oderwat/indent-rainbow): Highlightes code idents for better visualization.
|
||||
- [Auto Rename Tag](https://open-vsx.org/extension/formulahendry/auto-rename-tag): Provides a way to easily and quickly rename html tags.
|
||||
- [Rust Test Explorer](https://open-vsx.org/extension/Swellaby/vscode-rust-test-adapter): View and run cargo tests easily from a convenient sidebar.
|
||||
- [Search crates-io](https://open-vsx.org/extension/belfz/search-crates-io): Provides crates suggestions in the `cargo.toml` file.
|
||||
- [Test Adapter Converter](https://open-vsx.org/extension/hbenl/test-adapter-converter): A vscode native way to view and run tests.
|
||||
- [Test Explorer UI](https://open-vsx.org/extension/hbenl/vscode-test-explorer): Provides a way to run any test from a convenient sidebar.
|
||||
- [GitLens](https://open-vsx.org/extension/eamodio/gitlens): Provides a better and more efficient way to manage common git workflows.
|
||||
|
||||
> Optionally, if you prefer a more keyboard-centric workflow then we would recommend using the following extension:
|
||||
>
|
||||
> - [VSCode Neovim](https://open-vsx.org/extension/asvetliakov/vscode-neovim): Provides complete vim emulation for vscode.
|
||||
|
||||
**Theming**
|
||||
|
||||
- [Catppuccin for VSCode](https://open-vsx.org/extension/Catppuccin/catppuccin-vsc): Provides the catpuccin theme for vscode.
|
||||
- [Material Icon Theme](https://open-vsx.org/extension/PKief/material-icon-theme): Provides material design icons for files dependening on the file extension.
|
||||
|
||||
> If you have more ideas and ways to improve Gitpod for development purposes then feel free to do so by contributing a PR to this project [**here**](https://github.com/neon-mmd/websurfx/pulls).
|
||||
|
||||
### NixOS Dev Shell using Nix Flake
|
||||
|
||||
This section covers how to setup the project for development using the `NixOS dev-shell`.
|
||||
|
||||
#### Pre Setup Requirements
|
||||
|
||||
Before you start working on the project. You will need the following packages installed on your system:
|
||||
|
||||
- `Git` installed on your system. The installation instructions for this can be found [here](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git).
|
||||
|
||||
#### Setting up Pre-commit Checks
|
||||
|
||||
Before you setup `pre-commit` checks, you will first need to clone **your fork of the project** and navigate into the cloned repository by running the following command:
|
||||
|
||||
```shell
|
||||
git clone https://github.com/<your_github_username>/websurfx.git
|
||||
cd websurfx
|
||||
```
|
||||
|
||||
Then run the following command to setup the `NixOS dev-shell`:
|
||||
|
||||
```shell
|
||||
nix develop
|
||||
```
|
||||
|
||||
> You can use `nix-direnv` to simplify entering into the `nix-shell`. Its setup is beyond the scope of this guide. Read more about it here: [nix-direnv](https://github.com/nix-community/nix-direnv)
|
||||
|
||||
This will add `docker`, `cargo-watch`, and other dev environment essentials to your `nix-shell` so you don't have to install everything imperatively.
|
||||
|
||||
After finishing the commands above, run the following command to setup the `pre-commit` checks:
|
||||
|
||||
```shell
|
||||
cargo test
|
||||
```
|
||||
|
||||
By running the above-mentioned command, it will automatically set up all the pre-commit checks in the project.
|
||||
|
||||
#### Post Setup Requirements
|
||||
|
||||
The final step is to run
|
||||
|
||||
```shell
|
||||
npm i -D stylelint-config-standard postcss-lit`
|
||||
```
|
||||
|
||||
This will add `node_modules` in the current directory.
|
||||
|
||||
Run `git commit` and if every thing is setup correctly, it should say that your branch is up to date.
|
||||
|
||||
#### Running the Project
|
||||
|
||||
If you have followed the above section then you should now be inside a `dev-shell` environment. In the same environment run the following command to run the project:
|
||||
|
||||
```shell
|
||||
cargo watch -q -x "run" -w "."
|
||||
```
|
||||
|
||||
This will compile the app by default with the **In-Memory caching** feature. To compile, run, and test the app with other features follow the build options listed below:
|
||||
|
||||
##### Hybrid Cache
|
||||
|
||||
To build and run the app with the `Hybrid caching` feature. Run the following command:
|
||||
|
||||
```shell
|
||||
cargo watch -q -x "run --features redis-cache" -w .
|
||||
```
|
||||
|
||||
##### No Cache
|
||||
|
||||
To build and run the search engine with the `No caching` feature. Run the following command:
|
||||
|
||||
```shell
|
||||
cargo watch -q -x "run --no-default-features" -w .
|
||||
```
|
||||
|
||||
##### Redis Cache
|
||||
|
||||
To build the search engine with the `Redis caching` feature. Run the following command:
|
||||
|
||||
```shell
|
||||
cargo watch -q -x "run --no-default-features --features redis-cache" -w .
|
||||
```
|
||||
|
||||
> Optionally, If you have build and run the app with the `Redis cache`or `Hybrid cache` feature (as mentioned above) then you will need to start the redis server alongside the app which can be done by running the following command:
|
||||
>
|
||||
> ```shell
|
||||
> redis-server --port 8082 &
|
||||
> ```
|
||||
|
||||
Once you have finished running the above command, Websurfx should now be served on the address http://127.0.0.1:8080. Hot reload is enabled, so making changes to any of the files will trigger the project to be rebuilt.
|
||||
|
||||
### Local Development with Docker Compose
|
||||
|
||||
This section covers how to set up the project for development on your local machine (bare metal) using `docker compose`.
|
||||
|
||||
#### Prerequisites
|
||||
|
||||
Before you start working on the project. You will need the following packages installed on your system:
|
||||
|
||||
- The latest version of `cargo` installed on your system which is required to manage the building and running the project. The installation instructions for this can be found [here](https://doc.rust-lang.org/cargo/getting-started/installation.html).
|
||||
- The latest version of `npm` installed on your system which is required to allow the installation of other tools necessary for the project. The installation for this can be found [here](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
|
||||
- The latest version of `stylelint` should be installed on your system which will be used by the pre-commit checks to lint the code before a commit can be made to ensure better code quality. Before you install `stylelint` on your system, make sure you have `npm` installed on your system. To install `stylelint` run the following command:
|
||||
|
||||
```shell
|
||||
$ npm i -g stylelint
|
||||
```
|
||||
|
||||
> [!Note]
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
|
||||
|
||||
- `Git` installed on your system. The installation instructions for this can be found [here](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git).
|
||||
- Finally, The latest version of `Docker` is installed on your system which will be used to avoid introducing unexpected issues when working on the project. The installation instructions for this can be found [here](https://docs.docker.com/engine/install/).
|
||||
|
||||
> [!Note]
|
||||
> For **rolling release Linux distributions (distros)**, the above-mentioned all required packages can also be installed via the distro-specific package manager.
|
||||
>
|
||||
> **For Example:**
|
||||
>
|
||||
> On `arch linux` the following packages can be installed by following the link to the installation instructions provided below:
|
||||
>
|
||||
> - `Cargo`: https://wiki.archlinux.org/title/rust
|
||||
> - `Npm`: https://wiki.archlinux.org/title/Node.js
|
||||
> - `Git`: https://wiki.archlinux.org/title/git
|
||||
> - `Docker`: https://wiki.archlinux.org/title/docker
|
||||
>
|
||||
> But we do not recommend this method for **stable release Linux distros** as they tend to not provide very up-to-date versions of the required packages.
|
||||
|
||||
#### Setting up Pre-commit Checks
|
||||
|
||||
Before you setup `pre-commit` checks, you will first need to clone **your fork of the project** and navigate into the cloned repository by running the following command:
|
||||
|
||||
```shell
|
||||
git clone https://github.com/<your_github_username>/websurfx.git
|
||||
cd websurfx
|
||||
```
|
||||
|
||||
Once you have finished running the above commands then run the following command to setup the `pre-commit` checks:
|
||||
|
||||
```shell
|
||||
cargo test
|
||||
```
|
||||
|
||||
By running the above-mentioned command, it will automatically set up all the pre-commit checks in the project.
|
||||
|
||||
#### Running the Project
|
||||
|
||||
If you have followed the above section then you should have a cloned repository folder present on your system. In the same directory, edit the `dev.docker-compose.yml` file as required before running the following command to run the project:
|
||||
|
||||
```shell
|
||||
$ docker compose -f dev.docker-compose.yml up
|
||||
```
|
||||
|
||||
> [!Note]
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
|
||||
|
||||
Once you have finished running the above command, Websurfx should now be served on the address http://127.0.0.1:8080. Hot reload is enabled, so making changes to any of the files will trigger the project to be rebuilt.
|
||||
|
||||
### Project Commands
|
||||
|
||||
#### Basics
|
||||
|
||||
- `cargo build`: Builds the project.
|
||||
|
||||
> [!Note]
|
||||
> When you build the project first time with the above command it will require the app to compile every dependency in the project which will then be cached on your system. So when you compile the app next time it will only compile for the new changes.
|
||||
|
||||
- `cargo run`: Starts the app and serves the project on http://127.0.0.1:8080.
|
||||
|
||||
> [!Important]
|
||||
> You must run the build command first.
|
||||
|
||||
#### Development
|
||||
|
||||
- `cargo watch -q -x "run" -w .`: Starts the development server with hot reloading.
|
||||
- `cargo fmt -- --check`: Checks the code for proper formatting.
|
||||
- `cargo clippy`: Lints code to ensure it follows a consistent, neat style.
|
||||
- `cargo test`: Runs unit tests, integrations tests and doc tests.
|
||||
|
||||
### Environment Variables
|
||||
|
||||
All environment variables are optional. Currently, there are not many environment variables used, as most of the user preferences are stored under the `websurfx` folder (located under the codebase (`websurfx` directory)) in the `config.lua` file.
|
||||
|
||||
The list of all the available environment variables are listed below:
|
||||
|
||||
- `PKG_ENV`: Sets the logging level for the app to **Trace** which can be useful for better debugging of the app. These environment variables accept two values `dev` or `prod` as strings.
|
||||
- `RUST_BACKTRACE`: Rust-specific environment variable useful for getting more elaborate error messages with an error stack to better diagnose the issue. This environment variable accepts three values `0` (off), `1` (on), and `full` (for long error stack to being printed out).
|
||||
|
||||
## Git Strategy
|
||||
|
||||
### Git Flow
|
||||
|
||||
Like most Git repos, we are following the [Github Flow](https://guides.github.com/introduction/flow) standard.
|
||||
|
||||
1. Create a branch (or fork if you don't have write access)
|
||||
2. Code some awesome stuff 🧑💻
|
||||
3. Add, commit, and push your changes to your branch/ fork
|
||||
4. Head over to GitHub and create a Pull Request
|
||||
5. Fill in the required sections in the template, and hit submit
|
||||
6. Follow up with any reviews on your code
|
||||
7. Merge 🎉
|
||||
|
||||
### Git Branch Naming
|
||||
|
||||
The format of your branch name should be something similar to: `[TYPE]/[TICKET]_[TITLE]`
|
||||
For example, `FEATURE/420_Awesome-feature` or `FIX/690_login-server-error`
|
||||
|
||||
### Commit Emojis
|
||||
|
||||
Using a single emoji at the start of each commit message, issue title, and pull request title, to indicate the type of task, makes the commit ledger, issue, and pull request easier to understand, it looks cool.
|
||||
|
||||
- 🎨 `:art:` - Improve the structure/format of the code.
|
||||
- ⚡️ `:zap:` - Improve performance.
|
||||
- 🔥 `:fire:` - Remove code or files.
|
||||
- 🐛 `:bug:` - Fix a bug.
|
||||
- 🚑️ `:ambulance:` - Critical hotfix
|
||||
- ✨ `:sparkles:` - Introduce new features.
|
||||
- 📝 `:memo:` - Add or update documentation.
|
||||
- 🚀 `:rocket:` - Deploy stuff.
|
||||
- 💄 `:lipstick:` - Add or update the UI and style files.
|
||||
- 🎉 `:tada:` - Begin a project.
|
||||
- ✅ `:white_check_mark:` - Add, update, or pass tests.
|
||||
- 🔒️ `:lock:` - Fix security issues.
|
||||
- 🔖 `:bookmark:` - Make a Release or Version tag.
|
||||
- 🚨 `:rotating_light:` - Fix compiler/linter warnings.
|
||||
- 🚧 `:construction:` - Work in progress.
|
||||
- ⬆️ `:arrow_up:` - Upgrade dependencies.
|
||||
- 👷 `:construction_worker:` - Add or update the CI build system.
|
||||
- ♻️ `:recycle:` - Refactor code.
|
||||
- 🩹 `:adhesive_bandage:` - Simple fix for a non-critical issue.
|
||||
- 🔧 `:wrench:` - Add or update configuration files.
|
||||
- 🍱 `:bento:` - Add or update assets.
|
||||
- 🗃️ `:card_file_box:` - Perform database schema-related changes.
|
||||
- ✏️ `:pencil2:` - Fix typos.
|
||||
- 🌐 `:globe_with_meridians:` - Internationalization and translations.
|
||||
|
||||
For a full list of options, see [gitmoji.dev](https://gitmoji.dev/)
|
||||
|
||||
### PR Guidelines
|
||||
|
||||
Once you've made your changes, and pushed them to your fork or branch, you're ready to open a pull request!
|
||||
|
||||
For a pull request to be merged, it must:
|
||||
|
||||
- The build, lint, and tests (run by GH actions) must pass
|
||||
- There must not be any merge conflicts
|
||||
|
||||
When you submit your pull request, include the required info, by filling out the pull request template. Including:
|
||||
|
||||
- A brief description of your changes.
|
||||
- The issue or ticket number (if applicable).
|
||||
- For UI-related updates include a screenshot.
|
||||
- If any dependencies were added, explain why it was needed, and state the cost. associated, and confirm it does not introduce any security, privacy, or speed issues
|
||||
- Optionally, provide a checklist of all the changes that were included in the pull request.
|
||||
|
||||
> [!Important]
|
||||
> Make sure to fill all the required/mandatory sections of the pull request as filling them helps us distinguish between spam pull requests and legitimate pull requests.
|
||||
|
||||
> [!Note]
|
||||
> The pull request template contains comments in the following form `<!-- -->` which are used to provide a guide on what should be provided under each heading of the template. These comments are never rendered when the pull request is either created or updated and hence anything provided in such comments is never displayed.
|
||||
|
||||
## Resources for Beginners
|
||||
|
||||
New to Web Development? Or New to GitHub? Glad to see you're here!! :slightly_smiling_face: Websurfx is a pretty simple app, so it should make a good candidate for your first PR. The following articles (which have been divided into parts for convenience) should point you in the right direction for getting up to speed with the technologies used in this project:
|
||||
|
||||
**Development**
|
||||
|
||||
- [Basics of Rust](https://piped.kavin.rocks/playlist?list=PLai5B987bZ9CoVR-QEIN9foz4QCJ0H2Y8)
|
||||
- [Introduction and deep dive into async/await in rust](https://piped.kavin.rocks/watch?v=ThjvMReOXYM)
|
||||
- [Getting Started to Actix Guide](https://actix.rs/docs/getting-started)
|
||||
- [Basics of Lua](https://learn.coregames.com/courses/intro-to-lua/)
|
||||
- [Complete course on CSS](https://piped.kavin.rocks/watch?v=1Rs2ND1ryYc)
|
||||
- [Complete course on JS](https://piped.kavin.rocks/playlist?list=PL_c9BZzLwBRLVh9OdCBYFEql6esA6aRsi)
|
||||
- [Responsive web design](https://piped.kavin.rocks/watch?v=srvUrASNj0s)
|
||||
- [Complete beginners guide to Docker](https://docker-curriculum.com/)
|
||||
- [Docker Classroom - Interactive Tutorials](https://training.play-with-docker.com/)
|
||||
- [Docker Compose Tutorial](https://docs.docker.com/compose/gettingstarted/)
|
||||
- [ES6 Tutorial](https://piped.kavin.rocks/watch?v=nZ1DMMsyVyI)
|
||||
- [Cargo Guide Book](https://doc.rust-lang.org/cargo/index.html)
|
||||
|
||||
**GitHub**
|
||||
|
||||
- [Complete Guide to Open Source - How to Contribute](https://piped.kavin.rocks/watch?v=yzeVMecydCE)
|
||||
- [Forking a Project](https://piped.kavin.rocks/watch?v=FnxFwyzm4Z4)
|
||||
- [A Tutorial on Git](https://piped.kavin.rocks/playlist?list=PL4lTrYcDuAfxAgSefXftJXbhw0qvjfOFo)
|
||||
- [Git cheat sheet](http://git-cheatsheet.com/)
|
||||
|
||||
For Rust, CSS, JS, HTML, Git, and Docker- you'll need an IDE (e.g. [VSCode](https://code.visualstudio.com/) or [Neovim](https://neovim.io/) and a terminal (Windows users may find [WSL](https://docs.microsoft.com/en-us/windows/wsl/) more convenient).
|
||||
|
||||
## App Info
|
||||
|
||||
### Style Guides
|
||||
|
||||
Linting is done using [Cargo Clippy](https://doc.rust-lang.org/clippy/) and [StyleLint](https://stylelint.io/) or [ESLint](https://eslint.org/). Also, linting is run as a git pre-commit hook.
|
||||
|
||||
> [!Important]
|
||||
> All lint checks must pass before any PR can be merged.
|
||||
|
||||
Styleguides to follow:
|
||||
|
||||
- [Rust API Guidelines](https://rust-lang.github.io/api-guidelines/naming.html)
|
||||
- [Airbnb JS Guidelines](https://github.com/airbnb/javascript)
|
||||
- [Google's Html and CSS Guidelines](https://google.github.io/styleguide/htmlcssguide.html)
|
||||
|
||||
## Application Structure
|
||||
|
||||
> [!Important]
|
||||
> We follow the Unix style naming conventions for all the files and folders in the project (except for all files under the `themes` and `colorschemes` folder in the frontend's source code which requires that the names of the files and folders should be in lowercase and the words be separated with a hyphen.) which includes the name of the files and folders should be in lowercase and every word should be separated with an underscore.
|
||||
|
||||
**Files in the root of the codebase:** `./`
|
||||
|
||||
```
|
||||
./
|
||||
├── .dockerignore # Docker ignore file to ignore stuff being included in the file docker image.
|
||||
├── .gitignore # Git ignore file to ignore stuff from being
|
||||
├── Cargo.lock # Auto-generated list of current packages and version numbers.
|
||||
├── Cargo.toml # Project meta-data and dependencies.
|
||||
├── Dockerfile # The blueprint for building the Docker container.
|
||||
├── LICENSE # License for use.
|
||||
├── README.md # Readme, basic info for getting started.
|
||||
├── dev.Dockerfile # The blueprint for building the Docker container for development purposes.
|
||||
├── dev.docker-compose.yml # A Docker run command for development environments.
|
||||
├── docker-compose.yml # A Docker run command.
|
||||
├── flake.lock # NixOS auto-generated flake configuration.
|
||||
├── flake.nix # Nix flake package configuration.
|
||||
├── docs # Markdown documentation
|
||||
├── public # Project front-end source code
|
||||
├── src # Project back-end source code
|
||||
├── tests # Project integration tests for the back-end source code.
|
||||
└── websurfx # Project folder containing config files for the app.
|
||||
```
|
||||
|
||||
**Frontend Source:** `./public/`
|
||||
|
||||
```
|
||||
./public/
|
||||
├── robots.txt # Robots file for the Website.
|
||||
├── images # Images for the Website.
|
||||
└── static # The directory containing all the UI handlers.
|
||||
├── cookies.js # Handles the loading of saved cookies.
|
||||
├── error_box.js # Handles the toggling functionality of the error box on the search page.
|
||||
├── index.js # Functions to handle the search functionality of the search bar.
|
||||
├── pagination.js # Functions to handle the navigation between the previous and next page in the search page.
|
||||
├── search_area_options.js # Changes the search options under the search bar in the search page according to the safe search level set using the URL safesearch parameter.
|
||||
├── settings.js # Handles the settings and saving of all the settings page options as a cookie.
|
||||
├── colorschemes # A folder containing all the popular colorscheme files as CSS files.
|
||||
└── themes # A folder containing all the popular theme files as CSS files.
|
||||
```
|
||||
|
||||
**Fronted Maud HTML Framework Source:** `./src/templates/`
|
||||
|
||||
```
|
||||
./src/templates/
|
||||
├── mod.rs # A module file for the rust project.
|
||||
├── partials # A folder containing the code for partials for the views.
|
||||
│ ├── bar.rs # Provides partial code for the search bar.
|
||||
│ ├── footer.rs # Provides partial code for the footer section.
|
||||
│ ├── header.rs # Provides partial code for the header section.
|
||||
│ ├── mod.rs # A module file for the rust project.
|
||||
│ ├── navbar.rs # Provides partial code for the navbar inside the header section.
|
||||
│ ├── search_bar.rs # Provides partial code for the search bar present in the search page.
|
||||
│ └── settings_tabs # A folder containing all the partials for the settings page tabs.
|
||||
│ ├── cookies.rs # Provides partial code for the cookies tab.
|
||||
│ ├── engines.rs # Provides partial code for the engines tab.
|
||||
│ ├── general.rs # Provides partial code for the general tab.
|
||||
│ ├── mod.rs # A module file for the rust project.
|
||||
│ └── user_interface.rs # Provides partial code for the user interface tab.
|
||||
└── views # A folder containing the code for the views.
|
||||
├── about.rs # Provides code for the about page view.
|
||||
├── index.rs # Provides code for the homepage view.
|
||||
├── mod.rs # A module file for the rust project.
|
||||
├── not_found.rs # Provides code for the 404 page view.
|
||||
├── search.rs # Provides code for the search page view.
|
||||
└── settings.rs # Provides code for the settings page view.
|
||||
```
|
||||
|
||||
**Backend Source:** `./src/`
|
||||
|
||||
```
|
||||
./src/
|
||||
├── lib.rs # A library file for the rust project.
|
||||
├── bin # A folder containing the source code that would produce the binary file when compiled.
|
||||
│ └── websurfx.rs # A file that would be compiled into a binary file.
|
||||
├── cache # A folder that contains code to handle the caching functionality of the search engine.
|
||||
│ ├── cacher.rs # Handles the different caching features.
|
||||
│ ├── error.rs # Provides custom error messages for different types of caches and their related errors.
|
||||
│ ├── mod.rs # A module file for the rust project.
|
||||
│ └── redis_cacher.rs # Provides custom asynchronous pool implementation with auto background reconnection functionality.
|
||||
├── config # A folder that holds the code to help parse the lua config file that would be used in the app.
|
||||
│ ├── mod.rs # A module file for the rust project.
|
||||
│ └── parser.rs # Provides the code to parse the config file.
|
||||
├── engines # A folder that holds code to handle fetching data from different upstream engines.
|
||||
│ ├── brave.rs # Provides code to fetch and remove unnecessary or waste results from the fetched results from the brave search engine.
|
||||
│ ├── duckduckgo.rs # Provides code to fetch and remove unnecessary or waste results from the fetched results from the duckduckgo search engine.
|
||||
│ ├── mod.rs # A module file for the rust project.
|
||||
│ ├── search_result_parser.rs # Provides helper function to help ease the process of defining different result selection selectors.
|
||||
│ └── searx.rs # Provides code to fetch and remove unnecessary or waste results from the fetched results from the searx engine.
|
||||
├── handler # A folder that provides helper code to provide a proper path to the public (theme) folder, config file, blocklist file, and allowlist file based on where they are located.
|
||||
│ ├── mod.rs # A module file for the rust project.
|
||||
│ └── paths.rs # Provides helper code to handle different paths.
|
||||
├── models # A folder that provides different models for the different modules in the backend code.
|
||||
│ ├── aggregation_models.rs # Provides different models (enums, structs) for handling and standardizing different parts in the "results" module code.
|
||||
│ ├── engine_models.rs # Provides different models (enums, structs) for handling and standardizing different parts in the "engines" module code.
|
||||
│ ├── mod.rs # A module file for the rust project.
|
||||
│ ├── parser_models.rs # Provides different models (enums, structs) for handling and standardizing different parts in the "config" module code.
|
||||
│ └── server_models.rs # Provides different models (enums, structs) for handling and standardizing different parts in the "server" module code.
|
||||
├── results # A folder that provides code to handle the fetching and aggregating of results from the upstream search engines.
|
||||
│ ├── aggregator.rs # Provides code aggregate and fetches results from the upstream engines.
|
||||
│ ├── mod.rs # A module file for the rust project.
|
||||
│ └── user_agent.rs # Provides a helper function to allow random user agents to pass in the server request code to improve user privacy and avoiding detected as a bot.
|
||||
├── server # A folder that holds code to handle the routes for the search engine website.
|
||||
│ ├── mod.rs # A module file for the rust project.
|
||||
│ ├── router.rs # Provides functions to handle the different routes on the website.
|
||||
│ └── routes # A folder that contains code to handle the bigger route for the website.
|
||||
│ ├── mod.rs # A module file for the rust project.
|
||||
│ └── search.rs # Provides the function to handle the search route.
|
||||
└── templates # A module that provides and handles Maud HTML framework source code for the search engine website (subfolders and files are explained in the above frontend section.)
|
||||
```
|
||||
|
||||
## Development Tools
|
||||
|
||||
### Performance - Lighthouse
|
||||
|
||||
The easiest method of checking performance is to use Chromium's built-in auditing tool, Lighthouse. To run the test, open Developer Tools (usually F12) --> Lighthouse and click on the 'Generate Report' button at the bottom.
|
||||
|
||||
## Notes
|
||||
|
||||
### Known warnings
|
||||
|
||||
When running the build command, a warning appears. This is not an error and does not affect the security or performance of the application. They will be addressed soon in a future update.
|
||||
|
||||
```shell
|
||||
warning: the following packages contain code that will be rejected by a future version of Rust: html5ever v0.23.0
|
||||
note: to see what the problems were, use the option `--future-incompat-report`, or run `cargo report future-incompatibilities --id 2`
|
||||
```
|
||||
|
||||
This warning just means that any dependencies or code using the `html5ever` code would be deprecated and rejected in future versions of the Rust language. So right now these dependencies can be used as these have not happened yet.
|
||||
|
||||
[⬅️ Go back to Home](./README.md)
|
|
@ -8,11 +8,11 @@ The different caching features provided are as follows:
|
|||
- In memory cache
|
||||
- Hybrid cache
|
||||
|
||||
## Explaination
|
||||
## Explanation
|
||||
|
||||
### No Cache
|
||||
|
||||
This feature can drastically reduce binary size but with the cost that subsequent search requests and previous & next page search results are not cached which can make navigating between pages slower. As well as page refreshes of the same page also becomes slower as each refresh has to fetch the results from the upstream search engines.
|
||||
This feature can drastically reduce binary size but with the cost that subsequent search requests and previous & next page search results are not cached which can make navigating between pages slower. As well as Page refreshes of the same page also become slower as each refresh has to fetch the results from the upstream search engines.
|
||||
|
||||
### Redis Cache
|
||||
|
||||
|
@ -20,11 +20,11 @@ This feature allows the search engine to cache the results on the redis server.
|
|||
|
||||
### In Memory Cache
|
||||
|
||||
This feature is the default feature provided by the project. This feature allows the search engine to cache the results in the memory which can help increase the speed of the fetched cache results and it also has an advantage that it is extremely reliable as all the results are stored in memory within the search engine. Though the disadvantage of this solution are that caching of results is slightly slower than the `redis-cache` solution, it requires a good amount of memory on the system and as such is not ideal for very low memory devices and is highly unscalable.
|
||||
This feature is the default feature provided by the project. This feature allows the search engine to cache the results in the memory which can help increase the speed of the fetched cache results and it also has the advantage that it is extremely reliable as all the results are stored in memory within the search engine. Though the disadvantage of this solution is that caching of results is slightly slower than the `redis-cache` solution, it requires a good amount of memory on the system and as such is not ideal for very low memory devices and is highly unscalable.
|
||||
|
||||
### Hybrid Cache
|
||||
|
||||
This feature provides the advantages of both `In Memory` caching and `Redis` caching and it is an ideal solution if you need a very resiliant and reliable solution for the `Websurfx` which can provide both speed and reliability. Like for example if the `Redis` server becomes unavailable then the search engine switches to `In Memory` caching until the server becomes available again. This solution can be useful for hosting `Websurfx` instance which will be used by hundreds or thousands of users over the world.
|
||||
This feature provides the advantages of both `In Memory` caching and `Redis` caching and it is an ideal solution if you need a very resilient and reliable solution for the `Websurfx` which can provide both speed and reliability. Like for example if the `Redis` server becomes unavailable then the search engine switches to `In Memory` caching until the server becomes available again. This solution can be useful for hosting a `Websurfx` instance which will be used by hundreds or thousands of users all over the world.
|
||||
|
||||
## Tabular Summary
|
||||
|
||||
|
@ -34,7 +34,7 @@ This feature provides the advantages of both `In Memory` caching and `Redis` cac
|
|||
| **Speed** | Fast | Caching is slow, but retrieval of cache data is fast | Slow | Fastest |
|
||||
| **Reliability** | ✅ | ✅ | ✅ | ❌ |
|
||||
| **Scalability** | ✅ | ❌ | - | ✅ |
|
||||
| **Resiliancy** | ✅ | ✅ | ✅ | ❌ |
|
||||
| **Resiliency** | ✅ | ✅ | ✅ | ❌ |
|
||||
| **Production/Large Scale/Instance use** | ✅ | Not Recommended | Not Recommended | Not Recommended |
|
||||
| **Low Memory Support** | ❌ | ❌ | ✅ | ❌ |
|
||||
| **Binary Size** | Big | Bigger than `No Cache` | small | Bigger than `No Cache` |
|
||||
|
|
|
@ -18,11 +18,11 @@ websurfx
|
|||
|
||||
Once you have started the server, open your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx.
|
||||
|
||||
If you want to change the port or the ip or any other configuration setting checkout the [configuration docs](./configuration.md).
|
||||
If you want to change the port or the IP or any other configuration setting check out the [configuration docs](./configuration.md).
|
||||
|
||||
### Stable
|
||||
|
||||
For the stable version, follow the same steps as above (as mentioned for the `unstable/rolling/edge` version) with the only difference being that the package to be installed for stable version is called `websurfx-git` instead of `websurfx-edge-git`.
|
||||
For the stable version, follow the same steps as above (as mentioned for the `unstable/rolling/edge` version) with the only difference being that the package to be installed for the stable version is called `websurfx-git` instead of `websurfx-edge-git`.
|
||||
|
||||
## NixOS
|
||||
|
||||
|
@ -32,7 +32,7 @@ The Websurfx project provides 2 versions/flavours for the flake `stable` and `ro
|
|||
|
||||
### Rolling/Edge/Unstable
|
||||
|
||||
To get started, First clone the repository, edit the config file which is located in the `websurfx` directory and then build and run the websurfx server by running the following commands:
|
||||
To get started, First, clone the repository, edit the config file which is located in the `websurfx` directory, and then build and run the websurfx server by running the following commands:
|
||||
|
||||
```shell
|
||||
git clone https://github.com/neon-mmd/websurfx.git
|
||||
|
@ -44,12 +44,12 @@ nix build .#websurfx
|
|||
nix run .#websurfx
|
||||
```
|
||||
|
||||
> **Note**
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privilaged mode by using utilities `sudo`, `doas`, `pkgexec` or any other privilage access methods.
|
||||
> [!Note]
|
||||
> In the above command the dollar sign(**$**) refers to running the command in Privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
|
||||
|
||||
Once you have run the above set of commands, then open your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx.
|
||||
Once you have run the above set of commands, open your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx.
|
||||
|
||||
If you want to change the port or the ip or any other configuration setting checkout the [configuration docs](./configuration.md).
|
||||
If you want to change the port or the IP or any other configuration setting check out the [configuration docs](./configuration.md).
|
||||
|
||||
> Optionally, you may include it in your own flake by adding this repo to its inputs and adding it to `environment.systemPackages` as follows:
|
||||
>
|
||||
|
@ -76,7 +76,7 @@ If you want to change the port or the ip or any other configuration setting chec
|
|||
|
||||
### Stable
|
||||
|
||||
For the stable version, follow the same steps as above (as mentioned for the `unstable/rolling/edge version`) with an addition of one command which has to be performed after cloning and changing directory into the repository which makes the building step as follows:
|
||||
For the stable version, follow the same steps as above (as mentioned for the `unstable/rolling/edge version`) with an addition of one command which has to be performed after cloning and changing the directory into the repository which makes the building step as follows:
|
||||
|
||||
```shell
|
||||
git clone https://github.com/neon-mmd/websurfx.git
|
||||
|
@ -89,8 +89,8 @@ nix build .#websurfx
|
|||
nix run .#websurfx
|
||||
```
|
||||
|
||||
> **Note**
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privilaged mode by using utilities `sudo`, `doas`, `pkgexec` or any other privilage access methods.
|
||||
> [!Note]
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
|
||||
|
||||
## Other Distros
|
||||
|
||||
|
@ -102,7 +102,7 @@ Before you can start building `websurfx`, you will need to have `Cargo` installe
|
|||
|
||||
## Stable
|
||||
|
||||
To get started with Websurfx, clone the repository, edit the config file which is located in the `websurfx` directory and install redis server by following the instructions located [here](https://redis.io/docs/getting-started/) and then build and run the websurfx server by running the following commands:
|
||||
To get started with Websurfx, clone the repository, edit the config file which is located in the `websurfx` directory, and install redis server by following the instructions located [here](https://redis.io/docs/getting-started/) and then build and run the websurfx server by running the following commands:
|
||||
|
||||
```shell
|
||||
git clone https://github.com/neon-mmd/websurfx.git
|
||||
|
@ -115,7 +115,7 @@ redis-server --port 8082 &
|
|||
|
||||
Once you have started the server, open your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx.
|
||||
|
||||
If you want to change the port or the ip or any other configuration setting checkout the [configuration docs](./configuration.md).
|
||||
If you want to change the port or the IP or any other configuration setting check out the [configuration docs](./configuration.md).
|
||||
|
||||
## Rolling/Edge/Unstable
|
||||
|
||||
|
@ -128,6 +128,16 @@ cd websurfx
|
|||
|
||||
Once you have changed the directory to the `websurfx` directory then follow the build options listed below:
|
||||
|
||||
> [!Note]
|
||||
> Before you start building the search engine using one of the below listed command. We would strongly recommend setting the `PKG_ENV` enviroment variable as this applies some special optimization to code to reduce the file and improve the page load speed of the website.
|
||||
> To set the `PKG_ENV` enviroment variable in the `bash` shell run the following command:
|
||||
>
|
||||
> ```bash
|
||||
> export PKG_ENV="prod"
|
||||
> ```
|
||||
>
|
||||
> For how to set the environment variables in other shells. You can follow the instructions on how to do so by visiting the documentation of the specific shell you are using.
|
||||
|
||||
### Hybrid Cache
|
||||
|
||||
> For more information on the features and their pros and cons. see: [**Features**](./features.md)
|
||||
|
@ -138,7 +148,7 @@ To build the search engine with the `Hybrid caching` feature. Run the following
|
|||
cargo build -r --features redis-cache
|
||||
```
|
||||
|
||||
### Memory Cache (Default Feature)
|
||||
### Memory Cache (Default Features)
|
||||
|
||||
> For more information on the features and their pros and cons. see: [**Features**](./features.md)
|
||||
|
||||
|
@ -168,7 +178,7 @@ To build the search engine with the `hybrid caching` feature. Run the following
|
|||
cargo build -r --no-default-features --features redis-cache
|
||||
```
|
||||
|
||||
> Optionally, If you have build the app with the `Redis cache`or `Hybrid cache` feature (as mentioned above) then before launching the search engine run the following command:
|
||||
> Optionally, If you have built the app with the `Redis cache`or `Hybrid cache` feature (as mentioned above) then before launching the search engine run the following command:
|
||||
>
|
||||
> ```shell
|
||||
> redis-server --port 8082 &
|
||||
|
@ -180,9 +190,9 @@ Once you have finished building the `search engine`. then run the following comm
|
|||
./target/release/websurfx
|
||||
```
|
||||
|
||||
Once you have started the server, then launch your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx.
|
||||
Once you have started the server, launch your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx.
|
||||
|
||||
If you want to change the port or the ip or any other configuration setting checkout the [configuration docs](./configuration.md).
|
||||
If you want to change the port or the IP or any other configuration setting check out the [configuration docs](./configuration.md).
|
||||
|
||||
# Docker Deployment
|
||||
|
||||
|
@ -190,7 +200,7 @@ Before you start, you will need [Docker](https://docs.docker.com/get-docker/) in
|
|||
|
||||
## Prebuild
|
||||
|
||||
The Websurfx project provides several prebuild images based on the different features provided by the search engine. To get started using the prebuild image, you will first need to create a `docker-compose.yml` file with the following content:
|
||||
The Websurfx project provides several prebuilt images based on the different features provided by the search engine. To get started using the prebuild image, you will first need to create a `docker-compose.yml` file with the following content:
|
||||
|
||||
```yaml
|
||||
---
|
||||
|
@ -217,8 +227,6 @@ services:
|
|||
# Uncomment the following lines if you are using the `hybrid/latest` or `redis` image.
|
||||
# redis:
|
||||
# image: redis:latest
|
||||
# ports:
|
||||
# - 6379:6379
|
||||
```
|
||||
|
||||
Then make sure to edit the `docker-compose.yml` file as required. After that create a directory `websurfx` in the directory you have placed the `docker-compose.yml` file, and then in the new directory create two new empty files named `allowlist.txt` and `blocklist.txt`. Finally, create a new config file `config.lua` with the default configuration, which looks something like this:
|
||||
|
@ -237,8 +245,8 @@ production_use = false -- whether to use production mode or not (in other words
|
|||
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
|
||||
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
|
||||
rate_limiter = {
|
||||
number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
|
||||
time_limit = 3, -- The time limit in which the quantity of requests that should be accepted.
|
||||
number_of_requests = 20, -- The number of requests that are allowed within a provided time limit.
|
||||
time_limit = 3, -- The time limit in which the number of requests that should be accepted.
|
||||
}
|
||||
|
||||
-- ### Search ###
|
||||
|
@ -268,8 +276,8 @@ safe_search = 2
|
|||
-- tokyo-night
|
||||
-- tomorrow-night
|
||||
-- }}
|
||||
colorscheme = "catppuccin-mocha" -- the colorscheme name which should be used for the website theme
|
||||
theme = "simple" -- the theme name which should be used for the website
|
||||
colorscheme = "catppuccin-mocha" -- the colorscheme name that should be used for the website theme
|
||||
theme = "simple" -- the theme name that should be used for the website
|
||||
|
||||
-- ### Caching ###
|
||||
redis_url = "redis://redis:6379" -- redis connection url address on which the client should connect on.
|
||||
|
@ -287,48 +295,51 @@ Then run the following command to deploy the search engine:
|
|||
$ docker compose up -d
|
||||
```
|
||||
|
||||
> **Note**
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privilaged mode by using utilities `sudo`, `doas`, `pkgexec` or any other privilage access methods.
|
||||
> [!Note]
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec` or any other privileged access methods.
|
||||
|
||||
Then launch the browser of your choice and navigate to http://<ip_address_of_the_device>:<whatever_port_you_provided_in_the_config>.
|
||||
|
||||
> **Note**
|
||||
> The official prebuild images only support `stable` versions of the app and will not support `rolling/edge/unstable` versions. But with support and contribution it could be made available for these versions as well 🙂.
|
||||
> [!Note]
|
||||
> The official prebuild images only support `stable` versions of the app and will not support `rolling/edge/unstable` versions. But with support and contribution, it could be made available for these versions as well 🙂.
|
||||
|
||||
## Manual Deployment
|
||||
|
||||
This section covers how to deploy the app with docker manually by manually building the image and deploying it.
|
||||
|
||||
> **Note**
|
||||
> This section is provided for those who want to futher customize the docker image or for those who are extra cautious about security.
|
||||
> [!Note]
|
||||
> This section is provided for those who want to further customize the docker image or for those who are extra cautious about security.
|
||||
|
||||
> [!Warning]
|
||||
> A note of caution the project currently only supports **x86-64** architecture and as such we do not recommend deploying the project on devices with other architectures. Though if you still want to do it then **do it at your own risk**.
|
||||
|
||||
### Unstable/Edge/Rolling
|
||||
|
||||
First clone the the repository by running the following command:
|
||||
First, clone the repository by running the following command:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/neon-mmd/websurfx.git
|
||||
cd websurfx
|
||||
```
|
||||
|
||||
After that edit the config.lua file located under `websurfx` directory. In the config file you will specifically need to change to values which is `binding_ip_addr` and `redis_connection_url` which should make the config look something like this:
|
||||
After that edit the config.lua file located under `websurfx` directory. In the config file, you will specifically need to change to values which are `binding_ip_addr` and `redis_connection_url` which should make the config look something like this:
|
||||
|
||||
```lua
|
||||
-- ### General ###
|
||||
logging = true -- an option to enable or disable logs.
|
||||
debug = false -- an option to enable or disable debug mode.
|
||||
threads = 8 -- the amount of threads that the app will use to run (the value should be greater than 0).
|
||||
debug = false -- an option to enable or disable debug mode.
|
||||
threads = 10 -- the amount of threads that the app will use to run (the value should be greater than 0).
|
||||
|
||||
-- ### Server ###
|
||||
port = "8080" -- port on which server should be launched
|
||||
binding_ip = "0.0.0.0" --ip address on the which server should be launched.
|
||||
production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users (more than one))
|
||||
port = "8080" -- port on which server should be launched
|
||||
binding_ip = "127.0.0.1" --ip address on the which server should be launched.
|
||||
production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users (more than one))
|
||||
-- if production_use is set to true
|
||||
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
|
||||
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
|
||||
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
|
||||
rate_limiter = {
|
||||
number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
|
||||
time_limit = 3, -- The time limit in which the quantity of requests that should be accepted.
|
||||
number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
|
||||
time_limit = 3, -- The time limit in which the quantity of requests that should be accepted.
|
||||
}
|
||||
|
||||
-- ### Search ###
|
||||
|
@ -359,15 +370,18 @@ safe_search = 2
|
|||
-- tomorrow-night
|
||||
-- }}
|
||||
colorscheme = "catppuccin-mocha" -- the colorscheme name which should be used for the website theme
|
||||
theme = "simple" -- the theme name which should be used for the website
|
||||
theme = "simple" -- the theme name which should be used for the website
|
||||
|
||||
-- ### Caching ###
|
||||
redis_url = "redis://redis:6379" -- redis connection url address on which the client should connect on.
|
||||
|
||||
redis_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
|
||||
cache_expiry_time = 600 -- This option takes the expiry time of the search results (value in seconds and the value should be greater than or equal to 60 seconds).
|
||||
-- ### Search Engines ###
|
||||
upstream_search_engines = {
|
||||
DuckDuckGo = true,
|
||||
Searx = false,
|
||||
DuckDuckGo = true,
|
||||
Searx = false,
|
||||
Brave = false,
|
||||
Startpage = false,
|
||||
LibreX = false,
|
||||
} -- select the upstream search engines from which the results should be fetched.
|
||||
```
|
||||
|
||||
|
@ -377,14 +391,14 @@ After this make sure to edit the `docker-compose.yml` and `Dockerfile` files as
|
|||
$ docker compose up -d --build
|
||||
```
|
||||
|
||||
> **Note**
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privilaged mode by using utilities `sudo`, `doas`, `pkgexec` or any other privilage access methods.
|
||||
> [!Note]
|
||||
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
|
||||
|
||||
This will take around 5-10 mins for first deployment, afterwards the docker build stages will be cached so it will be faster to be build from next time onwards. After the above step finishes launch your preferred browser and then navigate to `http://<ip_address_of_the_device>:<whatever_port_you_provided_in_the_config>`.
|
||||
This will take around 5-10 mins for the first deployment, afterwards, the docker build stages will be cached so it will be faster to build from next time onwards. After the above step finishes launch your preferred browser and then navigate to `http://<ip_address_of_the_device>:<whatever_port_you_provided_in_the_config>`.
|
||||
|
||||
### Stable
|
||||
|
||||
For the stable version, follow the same steps as above (as mentioned for the unstable/rolling/edge version) with an addition of one command which has to be performed after cloning and changing directory into the repository which makes the cloning step as follows:
|
||||
For the stable version, follow the same steps as above (as mentioned for the unstable/rolling/edge version) with an addition of one command which has to be performed after cloning and changing the directory into the repository which makes the cloning step as follows:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/neon-mmd/websurfx.git
|
||||
|
|
|
@ -4,9 +4,12 @@
|
|||
|
||||
This page provides a list of `Websurfx` instances provided by us and our community.
|
||||
|
||||
|URL|Network|Version|Location|Behind Cloudflare?|Maintained By|TLS|IPv6|Comment|
|
||||
|URL|Network|Version|Location|Status|Maintained By|TLS|IPv6|Comment|
|
||||
|-|-|-|-|-|-|-|-|-|
|
||||
|https://alamin655-websurfx.hf.space/|www|v0.21.4|🇺🇸 US||[websurfx project](https://github.com/neon-mmd/websurfx)|✅|||
|
||||
|https://websurfx.pp.ua|www|rolling|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Fwebsurfx.pp.ua&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|✅||
|
||||
|https://alamin655-spacex.hf.space|www|rolling|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Falamin655-spacex.hf.space&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|❌||
|
||||
|https://websurfx.instance.pp.ua|www|rolling|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Fwebsurfx.instance.pp.ua&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|✅||
|
||||
|https://alamin655-surfx.hf.space|www|stable|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Falamin655-surfx.hf.space&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|❌||
|
||||
|
||||
|
||||
[⬅️ Go back to Home](./README.md)
|
||||
|
|
|
@ -4,10 +4,10 @@ A modern-looking, lightning-fast, privacy-respecting, secure [meta search engine
|
|||
|
||||
# Motivation
|
||||
|
||||
Most meta search engines tend to be slow, lack high level of customization and missing many features and all of them like security as they are written in unsafe languages like python, javascript, etc which tend to open a wide variety of vulnerabilities which can also sometimes pose a threat to privacy as sometimes this can be exploited and can be used to leveraged to leak out sensitive information which is never good.
|
||||
Most meta search engines tend to be slow, lack a high level of customization, and miss many features, and all of them lack security as they are written in unsafe languages like Python, JavaScript, etc., which tend to open a wide variety of vulnerabilities, which can also sometimes pose a threat to privacy as sometimes this can be exploited and can be used to leak out sensitive information, which is never good.
|
||||
|
||||
# Solution
|
||||
|
||||
Websurfx is a project which seeks to provide privacy, security, speed and all the features which the user wants.
|
||||
Websurfx is a project that seeks to provide privacy, security, speed, and all the features that the user wants.
|
||||
|
||||
[⬅️ Go back to Home](./README.md)
|
||||
|
|
345
docs/theming.md
|
@ -1,17 +1,25 @@
|
|||
# Colorschemes
|
||||
# Theming
|
||||
|
||||
## Built-in
|
||||
## Colorschemes
|
||||
|
||||
By default `websurfx` comes with 9 colorschemes to choose from which can be easily chosen using the config file. To how to change colorschemes please view the [Configuration](https://github.com/neon-mmd/websurfx/wiki/configuration) section of the wiki.
|
||||
### Built-in
|
||||
|
||||
## Custom
|
||||
By default `websurfx` comes with 12 colorschemes to choose from which can be easily chosen using the config file or via the settings page on the website.
|
||||
|
||||
Creating coloschemes is as easy as it gets it requires the user to have a theme file name with the colorscheme in which every space should be replaced with a `-` (dash) and it should end with a `.css` file extension. After creating the file you need to add the following code with the `colors` you want:
|
||||
> To how to change colorschemes using the config file. See: [**Configuration**](https://github.com/neon-mmd/websurfx/wiki/configuration)
|
||||
|
||||
### Custom
|
||||
|
||||
To write a custom theme for the website, you will first need to create a new file under the `public/static/themes` folder with name of the theme containing each word seperated with a hyphen (**-**). Then after that edit the newly created file as required with new css code.
|
||||
|
||||
Creating coloschemes is as easy as it gets it requires the user to have a colorscheme file name with the name of the colorscheme that is to be provided in which every space should be replaced with a `-` (dash) and it should end with a `.css` file extension. After creating the file you need to add the following code with the `colors` you want to include:
|
||||
|
||||
```css
|
||||
:root {
|
||||
--background-color: <background color>;
|
||||
--foreground-color: <foreground color (text color on the website) >;
|
||||
--logo-color: <logo color
|
||||
(the color of the logo svg image on the website homepage) >;
|
||||
--color-one: <color 1>;
|
||||
--color-two: <color 2>;
|
||||
--color-three: <color 3>;
|
||||
|
@ -22,7 +30,7 @@ Creating coloschemes is as easy as it gets it requires the user to have a theme
|
|||
}
|
||||
```
|
||||
|
||||
> **Note**
|
||||
> [!Note]
|
||||
> Please infer the theme file located under `public/static/themes` to better understand where each color is being used.
|
||||
|
||||
**Example of `catppuccin-mocha` colorscheme:**
|
||||
|
@ -31,6 +39,7 @@ Creating coloschemes is as easy as it gets it requires the user to have a theme
|
|||
:root {
|
||||
--background-color: #1e1e2e;
|
||||
--foreground-color: #cdd6f4;
|
||||
--logo-color: #f5c2e7;
|
||||
--color-one: #45475a;
|
||||
--color-two: #f38ba8;
|
||||
--color-three: #a6e3a1;
|
||||
|
@ -41,21 +50,31 @@ Creating coloschemes is as easy as it gets it requires the user to have a theme
|
|||
}
|
||||
```
|
||||
|
||||
# Themes
|
||||
## Themes
|
||||
|
||||
## Built-in
|
||||
### Built-in
|
||||
|
||||
By default `websurfx` comes with 1 theme to choose from which can be easily chosen using the config file. To how to change themes please view the [Configuration](https://github.com/neon-mmd/websurfx/wiki/configuration) section of the wiki.
|
||||
By default `websurfx` comes with 1 theme to choose from which can be easily chosen using the config file or via the settings page on the website.
|
||||
|
||||
## Custom
|
||||
> To how to change themes using the config file. See: [**Configuration**](https://github.com/neon-mmd/websurfx/wiki/configuration)
|
||||
|
||||
To write custom color scheme, it requires the user to have some knowledge of `css stylesheets`.
|
||||
### Custom
|
||||
|
||||
**Here is an example of `simple theme` (which we provide by default with the app) which will give the user a better idea on how to create a custom theme using it as a template:**
|
||||
> This section expects the user to have some knowledge of `css`.
|
||||
|
||||
### General
|
||||
To write a custom theme for the website, you will first need to create a new file under the `public/static/themes` folder with name of the theme containing each word seperated with a hyphen (**-**). Then after that edit the newly created file as required with new css code.
|
||||
|
||||
Here is an example of `simple theme` (which we provide by default with the app) which will give you a better idea on how you can create your own custom theme for the website:
|
||||
|
||||
#### General
|
||||
|
||||
```css
|
||||
@font-face {
|
||||
font-family: Rubik;
|
||||
src: url('https://fonts.googleapis.com/css2?family=Rubik:wght@400;500;600;700;800&display=swap');
|
||||
fallback: sans-serif;
|
||||
}
|
||||
|
||||
* {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
|
@ -72,11 +91,17 @@ body {
|
|||
justify-content: space-between;
|
||||
align-items: center;
|
||||
height: 100vh;
|
||||
background: var(--color-one);
|
||||
font-family: Rubik, sans-serif;
|
||||
background-color: var(--background-color);
|
||||
}
|
||||
|
||||
/* enforce font for buttons */
|
||||
button {
|
||||
font-family: Rubik, sans-serif;
|
||||
}
|
||||
```
|
||||
|
||||
### Styles for the index page
|
||||
#### Styles for the index page
|
||||
|
||||
```css
|
||||
.search-container {
|
||||
|
@ -87,44 +112,69 @@ body {
|
|||
align-items: center;
|
||||
}
|
||||
|
||||
.search-container svg {
|
||||
color: var(--logo-color);
|
||||
}
|
||||
|
||||
.search-container div {
|
||||
display: flex;
|
||||
}
|
||||
```
|
||||
|
||||
### Styles for the search box and search button
|
||||
#### Styles for the search box and search button
|
||||
|
||||
```css
|
||||
.search_bar {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.search_bar input {
|
||||
padding: 1rem;
|
||||
border-radius: 6px;
|
||||
padding: 2.6rem 2.2rem;
|
||||
width: 50rem;
|
||||
height: 3rem;
|
||||
outline: none;
|
||||
border: none;
|
||||
box-shadow: rgba(0, 0, 0, 1);
|
||||
background: var(--foreground-color);
|
||||
box-shadow: rgb(0 0 0 / 1);
|
||||
background-color: var(--color-one);
|
||||
color: var(--foreground-color);
|
||||
outline-offset: 3px;
|
||||
font-size: 1.6rem;
|
||||
}
|
||||
|
||||
.search_bar input:focus {
|
||||
outline: 2px solid var(--foreground-color);
|
||||
}
|
||||
|
||||
.search_bar input::placeholder {
|
||||
color: var(--foreground-color);
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.search_bar button {
|
||||
padding: 1rem;
|
||||
border-radius: 0;
|
||||
padding: 2.6rem 3.2rem;
|
||||
border-radius: 6px;
|
||||
height: 3rem;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
outline: none;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
border: none;
|
||||
transition: 0.1s;
|
||||
gap: 0;
|
||||
background: var(--background-color);
|
||||
color: var(--color-three);
|
||||
background-color: var(--color-six);
|
||||
color: var(--background-color);
|
||||
font-weight: 600;
|
||||
letter-spacing: 0.1rem;
|
||||
}
|
||||
|
||||
.search_bar button:active {
|
||||
outline: 2px solid var(--color-three);
|
||||
}
|
||||
|
||||
.search_bar button:active,
|
||||
.search_bar button:hover {
|
||||
filter: brightness(1.2);
|
||||
|
@ -141,13 +191,19 @@ body {
|
|||
width: 20rem;
|
||||
background-color: var(--color-one);
|
||||
color: var(--foreground-color);
|
||||
padding: 1rem 2rem;
|
||||
padding: 1.2rem 2rem;
|
||||
border-radius: 0.5rem;
|
||||
outline: none;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
border: none;
|
||||
text-transform: capitalize;
|
||||
}
|
||||
|
||||
.search_area .search_options select:active,
|
||||
.search_area .search_options select:hover {
|
||||
outline: 2px solid var(--color-three);
|
||||
}
|
||||
|
||||
.search_area .search_options option:hover {
|
||||
background-color: var(--color-one);
|
||||
}
|
||||
|
@ -170,9 +226,7 @@ body {
|
|||
.result_not_found img {
|
||||
width: 40rem;
|
||||
}
|
||||
```
|
||||
|
||||
```css
|
||||
/* styles for the error box */
|
||||
.error_box .error_box_toggle_button {
|
||||
background: var(--foreground-color);
|
||||
|
@ -188,9 +242,11 @@ body {
|
|||
min-height: 20rem;
|
||||
min-width: 22rem;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box.show {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .error_item,
|
||||
.error_box .dropdown_error_box .no_errors {
|
||||
display: flex;
|
||||
|
@ -200,22 +256,25 @@ body {
|
|||
padding: 1rem;
|
||||
font-size: 1.2rem;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .error_item {
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .no_errors {
|
||||
min-height: 18rem;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .error_item:hover {
|
||||
box-shadow: inset 0 0 100px 100px rgba(255, 255, 255, 0.1);
|
||||
box-shadow: inset 0 0 100px 100px rgb(255 255 255 / 0.1);
|
||||
}
|
||||
|
||||
.error_box .error_item .severity_color {
|
||||
width: 1.2rem;
|
||||
height: 1.2rem;
|
||||
}
|
||||
|
||||
.results .result_disallowed,
|
||||
.results .result_filtered,
|
||||
.results .result_engine_not_selected {
|
||||
|
@ -225,7 +284,7 @@ body {
|
|||
gap: 10rem;
|
||||
font-size: 2rem;
|
||||
color: var(--foreground-color);
|
||||
margin: 0rem 7rem;
|
||||
margin: 0 7rem;
|
||||
}
|
||||
|
||||
.results .result_disallowed .user_query,
|
||||
|
@ -251,16 +310,34 @@ body {
|
|||
}
|
||||
```
|
||||
|
||||
### Styles for the footer and header
|
||||
#### Styles for the footer and header
|
||||
|
||||
```css
|
||||
header {
|
||||
background: var(--background-color);
|
||||
width: 100%;
|
||||
background: var(--background-color);
|
||||
display: flex;
|
||||
justify-content: right;
|
||||
align-items: center;
|
||||
padding: 1rem;
|
||||
justify-content: space-between;
|
||||
padding: 2rem 3rem;
|
||||
}
|
||||
|
||||
footer {
|
||||
width: 100%;
|
||||
background: var(--background-color);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 1.7rem 1.7rem 4rem;
|
||||
gap: 1.8rem;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
header h1 a {
|
||||
text-transform: capitalize;
|
||||
text-decoration: none;
|
||||
color: var(--foreground-color);
|
||||
letter-spacing: 0.1rem;
|
||||
}
|
||||
|
||||
header ul,
|
||||
|
@ -301,39 +378,9 @@ footer div {
|
|||
display: flex;
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
footer {
|
||||
background: var(--background-color);
|
||||
width: 100%;
|
||||
padding: 1rem;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
footer div span {
|
||||
font-size: 1.5rem;
|
||||
color: var(--4);
|
||||
}
|
||||
|
||||
footer div {
|
||||
display: flex;
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
footer {
|
||||
background: var(--bg);
|
||||
width: 100%;
|
||||
padding: 1rem;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
```
|
||||
|
||||
### Styles for the search page
|
||||
#### Styles for the search page
|
||||
|
||||
```css
|
||||
.results {
|
||||
|
@ -341,6 +388,11 @@ footer {
|
|||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: space-around;
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.result {
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.results .search_bar {
|
||||
|
@ -352,6 +404,7 @@ footer {
|
|||
flex-direction: column;
|
||||
justify-content: space-between;
|
||||
margin: 2rem 0;
|
||||
content-visibility: auto;
|
||||
}
|
||||
|
||||
.results_aggregated .result {
|
||||
|
@ -361,10 +414,10 @@ footer {
|
|||
}
|
||||
|
||||
.results_aggregated .result h1 a {
|
||||
font-size: 1.5rem;
|
||||
font-size: 1.7rem;
|
||||
font-weight: normal;
|
||||
color: var(--color-two);
|
||||
text-decoration: none;
|
||||
letter-spacing: 0.1rem;
|
||||
}
|
||||
|
||||
.results_aggregated .result h1 a:hover {
|
||||
|
@ -377,14 +430,15 @@ footer {
|
|||
|
||||
.results_aggregated .result small {
|
||||
color: var(--color-three);
|
||||
font-size: 1.1rem;
|
||||
font-size: 1.3rem;
|
||||
word-wrap: break-word;
|
||||
line-break: anywhere;
|
||||
}
|
||||
|
||||
.results_aggregated .result p {
|
||||
color: var(--foreground-color);
|
||||
font-size: 1.2rem;
|
||||
font-size: 1.4rem;
|
||||
line-height: 2.4rem;
|
||||
margin-top: 0.3rem;
|
||||
word-wrap: break-word;
|
||||
line-break: anywhere;
|
||||
|
@ -395,10 +449,13 @@ footer {
|
|||
font-size: 1.2rem;
|
||||
padding: 1rem;
|
||||
color: var(--color-five);
|
||||
display: flex;
|
||||
gap: 1rem;
|
||||
justify-content: right;
|
||||
}
|
||||
```
|
||||
|
||||
### Styles for the 404 page
|
||||
#### Styles for the 404 page
|
||||
|
||||
```css
|
||||
.error_container {
|
||||
|
@ -448,11 +505,11 @@ footer {
|
|||
}
|
||||
```
|
||||
|
||||
### Styles for the previous and next button on the search page
|
||||
#### Styles for the previous and next button on the search page
|
||||
|
||||
```css
|
||||
.page_navigation {
|
||||
padding: 0 0 2rem 0;
|
||||
padding: 0 0 2rem;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
|
@ -472,7 +529,7 @@ footer {
|
|||
}
|
||||
```
|
||||
|
||||
### Styles for the about page
|
||||
#### Styles for the about page
|
||||
|
||||
This part is only available right now in the **rolling/edge/unstable** version
|
||||
|
||||
|
@ -517,7 +574,7 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
}
|
||||
```
|
||||
|
||||
### Styles for the Settings Page
|
||||
#### Styles for the Settings Page
|
||||
|
||||
This part is only available right now in the **rolling/edge/unstable** version
|
||||
|
||||
|
@ -526,6 +583,7 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
display: flex;
|
||||
justify-content: space-around;
|
||||
width: 80dvw;
|
||||
margin: 5rem 0;
|
||||
}
|
||||
|
||||
.settings h1 {
|
||||
|
@ -533,9 +591,18 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
font-size: 2.5rem;
|
||||
}
|
||||
|
||||
.settings > h1 {
|
||||
margin-bottom: 4rem;
|
||||
margin-left: 2rem;
|
||||
}
|
||||
|
||||
.settings hr {
|
||||
border-color: var(--color-three);
|
||||
margin: 0.3rem 0 1rem 0;
|
||||
margin: 0.3rem 0 1rem;
|
||||
}
|
||||
|
||||
.settings > hr {
|
||||
margin-left: 2rem;
|
||||
}
|
||||
|
||||
.settings_container .sidebar {
|
||||
|
@ -548,7 +615,6 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
margin-left: -0.7rem;
|
||||
padding: 0.7rem;
|
||||
border-radius: 5px;
|
||||
font-weight: bold;
|
||||
margin-bottom: 0.5rem;
|
||||
color: var(--foreground-color);
|
||||
text-transform: capitalize;
|
||||
|
@ -556,18 +622,30 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
}
|
||||
|
||||
.settings_container .sidebar .btn {
|
||||
padding: 0.5rem;
|
||||
padding: 2rem;
|
||||
border-radius: 0.5rem;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
}
|
||||
|
||||
.settings_container .sidebar .btn:active {
|
||||
outline: 2px solid var(--color-two);
|
||||
}
|
||||
|
||||
.settings_container .sidebar .btn:not(.active):hover {
|
||||
color: var(--color-two);
|
||||
}
|
||||
|
||||
.settings_container .sidebar .btn.active {
|
||||
background-color: var(--color-two);
|
||||
color: var(--background-color);
|
||||
}
|
||||
|
||||
.settings_container .main_container {
|
||||
width: 70%;
|
||||
border-left: 1.5px solid var(--color-three);
|
||||
padding-left: 3rem;
|
||||
border: none;
|
||||
}
|
||||
|
||||
.settings_container .tab {
|
||||
|
@ -576,6 +654,7 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
|
||||
.settings_container .tab.active {
|
||||
display: flex;
|
||||
gap: 1.2rem;
|
||||
flex-direction: column;
|
||||
justify-content: space-around;
|
||||
}
|
||||
|
@ -623,7 +702,7 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
.settings_container .general select {
|
||||
margin: 0.7rem 0;
|
||||
width: 20rem;
|
||||
background-color: var(--background-color);
|
||||
background-color: var(--color-one);
|
||||
color: var(--foreground-color);
|
||||
padding: 1rem 2rem;
|
||||
border-radius: 0.5rem;
|
||||
|
@ -641,16 +720,19 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
gap: 1rem;
|
||||
padding: 1rem 0;
|
||||
margin-bottom: 2rem;
|
||||
gap: 2rem;
|
||||
}
|
||||
|
||||
.settings_container .engines .toggle_btn {
|
||||
color: var(--foreground-color);
|
||||
font-size: 1.5rem;
|
||||
display: flex;
|
||||
gap: 0.5rem;
|
||||
align-items: center;
|
||||
border-radius: 100px;
|
||||
gap: 1.5rem;
|
||||
letter-spacing: 1px;
|
||||
}
|
||||
|
||||
.settings_container .engines hr {
|
||||
|
@ -658,11 +740,11 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
}
|
||||
|
||||
.settings_container .cookies input {
|
||||
margin: 1rem 0rem;
|
||||
margin: 1rem 0;
|
||||
}
|
||||
```
|
||||
|
||||
### Styles for the Toggle Button
|
||||
#### Styles for the Toggle Button
|
||||
|
||||
This part is only available right now in the **rolling/edge/unstable** version
|
||||
|
||||
|
@ -686,25 +768,26 @@ This part is only available right now in the **rolling/edge/unstable** version
|
|||
.slider {
|
||||
position: absolute;
|
||||
cursor: pointer;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background-color: var(--background-color);
|
||||
-webkit-transition: 0.4s;
|
||||
transition: 0.4s;
|
||||
inset: 0;
|
||||
background-color: var(--foreground-color);
|
||||
transition: 0.2s;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
}
|
||||
|
||||
.slider:before {
|
||||
.slider:active {
|
||||
outline: 2px solid var(--foreground-color);
|
||||
}
|
||||
|
||||
.slider::before {
|
||||
position: absolute;
|
||||
content: '';
|
||||
height: 2.6rem;
|
||||
width: 2.6rem;
|
||||
left: 0.4rem;
|
||||
bottom: 0.4rem;
|
||||
background-color: var(--foreground-color);
|
||||
-webkit-transition: 0.4s;
|
||||
transition: 0.4s;
|
||||
background-color: var(--background-color);
|
||||
transition: 0.2s;
|
||||
}
|
||||
|
||||
input:checked + .slider {
|
||||
|
@ -715,9 +798,7 @@ input:focus + .slider {
|
|||
box-shadow: 0 0 1px var(--color-three);
|
||||
}
|
||||
|
||||
input:checked + .slider:before {
|
||||
-webkit-transform: translateX(2.6rem);
|
||||
-ms-transform: translateX(2.6rem);
|
||||
input:checked + .slider::before {
|
||||
transform: translateX(2.6rem);
|
||||
}
|
||||
|
||||
|
@ -726,9 +807,79 @@ input:checked + .slider:before {
|
|||
border-radius: 3.4rem;
|
||||
}
|
||||
|
||||
.slider.round:before {
|
||||
.slider.round::before {
|
||||
border-radius: 50%;
|
||||
}
|
||||
```
|
||||
|
||||
## Animations
|
||||
|
||||
### Built-in
|
||||
|
||||
By default `websurfx` comes with 1 animation to choose from which can be easily chosen using the config file or via the settings page on the website.
|
||||
|
||||
> To how to change animations using the config file. See: [**Configuration**](https://github.com/neon-mmd/websurfx/wiki/configuration)
|
||||
|
||||
### Custom
|
||||
|
||||
To write custom animation, it requires the user to have some knowledge of `themes` and the `HTML of the page for which the animation is being provided for`.
|
||||
|
||||
The animations can be of 2 categories:
|
||||
|
||||
- Theme specific animations
|
||||
- Universal animations
|
||||
|
||||
#### Theme Specific Animations
|
||||
|
||||
These animations can only be used with a specific theme and should not be used with other themes otherwise it either won't look good or won't work at all or would work partially.
|
||||
|
||||
Here is an example of `simple-frosted-glow` animation for the `simple theme` (which we provide by default with the app) which will give you a better idea on how to create a custom animation for a specific theme:
|
||||
|
||||
```css
|
||||
.results_aggregated .result {
|
||||
margin: 1rem;
|
||||
padding: 1rem;
|
||||
border-radius: 1rem;
|
||||
}
|
||||
|
||||
.results_aggregated .result:hover {
|
||||
box-shadow:
|
||||
inset 0 0 3rem var(--color-two),
|
||||
inset 0 0 6rem var(--color-five),
|
||||
inset 0 0 9rem var(--color-three),
|
||||
0 0 0.25rem var(--color-two),
|
||||
0 0 0.5rem var(--color-five),
|
||||
0 0 0.75rem var(--color-three);
|
||||
}
|
||||
```
|
||||
|
||||
#### Universal Animations
|
||||
|
||||
These animations are independent of the theme being used and can be used with all the themes.
|
||||
|
||||
Here is an example of `text-tilt` animation which will give you an idea on how to create universal animations for the search engine website.
|
||||
|
||||
```css
|
||||
.results_aggregated .result:hover {
|
||||
transform: skewX(10deg);
|
||||
}
|
||||
```
|
||||
|
||||
> [!Note]
|
||||
> 1. The above-mentioned examples of animations was covered for the search page of the search engine website. While the same way of creating custom animations can also be done for other pages also.
|
||||
> 2. While the naming the file for the new theme file. Follow the following naming conventions:
|
||||
> 1. If the animation is theme specfic then name of the animation file should look like this:
|
||||
> `<name of the theme which these animation is for><seperated by a hyphen or dash><name of the animation with whitespaces replaced with hyphens>`
|
||||
> **For example:**
|
||||
> If the animation to make search results frosty glow on hover was to be created for the `simple` theme then the name of the file would look something like this:
|
||||
> `simple-frosted-glow`
|
||||
> Where `simple` is the name of the theme the animation targets and `frosted-glow` is the name of the animation where each word has been seperated by a hyphen.
|
||||
> 2. If the animation is not theme specfic (univeral theme) then name of the animation file should look like this:
|
||||
> `<name of the animation with whitespaces replaced with hyphens>`
|
||||
> **For example:**
|
||||
> If the animation to make search results text tilt on hover was to be created then the name of the file would look something like this:
|
||||
> `text-tilt`
|
||||
> Where `text-tilt` is the name of the animation where each word has been seperated by a hyphen. (While naming the files for these types of themes, You do not need to add a theme name in frontend of the file name.).
|
||||
|
||||
|
||||
[⬅️ Go back to Home](./README.md)
|
||||
|
|
6
flake.lock
generated
|
@ -34,11 +34,11 @@
|
|||
},
|
||||
"nixpkgs_2": {
|
||||
"locked": {
|
||||
"lastModified": 1695318763,
|
||||
"narHash": "sha256-FHVPDRP2AfvsxAdc+AsgFJevMz5VBmnZglFUMlxBkcY=",
|
||||
"lastModified": 1725194671,
|
||||
"narHash": "sha256-tLGCFEFTB5TaOKkpfw3iYT9dnk4awTP/q4w+ROpMfuw=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "e12483116b3b51a185a33a272bf351e357ba9a99",
|
||||
"rev": "b833ff01a0d694b910daca6e2ff4a3f26dee478c",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
|
13
flake.nix
|
@ -32,17 +32,28 @@
|
|||
buildInputs = [
|
||||
actionlint
|
||||
cargo
|
||||
docker
|
||||
haskellPackages.hadolint
|
||||
nodejs
|
||||
nodePackages_latest.cspell
|
||||
nodePackages_latest.eslint
|
||||
eslint
|
||||
nodePackages_latest.markdownlint-cli2
|
||||
nodePackages_latest.stylelint
|
||||
redis
|
||||
rustPackages.clippy
|
||||
rust-analyzer
|
||||
cargo-watch
|
||||
rustc
|
||||
rustfmt
|
||||
yamllint
|
||||
openssl
|
||||
pkg-config
|
||||
];
|
||||
RUST_SRC_PATH = rustPlatform.rustLibSrc;
|
||||
shellHook = ''
|
||||
export PATH="$PATH:$HOME/.cargo/bin"
|
||||
export NODE_PATH="$NODE_PATH:./node_modules"
|
||||
'';
|
||||
};
|
||||
|
||||
# Build via "nix build .#websurfx", which is basically just
|
||||
|
|
Before Width: | Height: | Size: 86 KiB After Width: | Height: | Size: 73 KiB |
BIN
images/intro.png
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 9.8 KiB |
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 45 KiB |
Before Width: | Height: | Size: 158 KiB After Width: | Height: | Size: 80 KiB |
Before Width: | Height: | Size: 5.4 KiB After Width: | Height: | Size: 5.1 KiB |
Before Width: | Height: | Size: 8.1 KiB After Width: | Height: | Size: 36 KiB |
Before Width: | Height: | Size: 892 KiB After Width: | Height: | Size: 876 KiB |
1
public/images/close.svg
Normal file
|
@ -0,0 +1 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-x"><line x1="18" y1="6" x2="6" y2="18"></line><line x1="6" y1="6" x2="18" y2="18"></line></svg>
|
After Width: | Height: | Size: 299 B |
Before Width: | Height: | Size: 102 KiB After Width: | Height: | Size: 100 KiB |
|
@ -1 +1 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?><svg width="24px" height="24px" stroke-width="1.5" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg" color="#000000" data-darkreader-inline-color="" style="--darkreader-inline-color: #e8e6e3;"><path d="M12 11.5v5M12 7.51l.01-.011M12 22c5.523 0 10-4.477 10-10S17.523 2 12 2 2 6.477 2 12s4.477 10 10 10z" stroke="#000000" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round" data-darkreader-inline-stroke="" style="--darkreader-inline-stroke: #000000;"></path></svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" stroke-width="1.5" color="#000" viewBox="0 0 24 24" style="--darkreader-inline-color:#e8e6e3"><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5" d="M12 11.5v5M12 7.51l.01-.011M12 22c5.523 0 10-4.477 10-10S17.523 2 12 2 2 6.477 2 12s4.477 10 10 10z" style="--darkreader-inline-stroke:#000000"/></svg>
|
Before Width: | Height: | Size: 532 B After Width: | Height: | Size: 409 B |
1
public/images/magnifying_glass.svg
Normal file
|
@ -0,0 +1 @@
|
|||
<?xml version="1.0" encoding="utf-8"?><!-- Generator: Adobe Illustrator 16.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) --><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 50 50" width="24px" height="24px"><circle fill="none" stroke="#000000" stroke-width="2" stroke-linecap="round" stroke-miterlimit="10" cx="21" cy="20" r="16"/><line fill="none" stroke="#000000" stroke-width="4" stroke-miterlimit="10" x1="32.229" y1="32.229" x2="45.5" y2="45.5"/></svg>
|
After Width: | Height: | Size: 610 B |
Before Width: | Height: | Size: 92 KiB After Width: | Height: | Size: 71 KiB |
|
@ -1 +1 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?><svg width="24px" height="24px" stroke-width="1.5" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg" color="#000000" data-darkreader-inline-color="" style="--darkreader-inline-color: #e8e6e3;"><path d="M20.043 21H3.957c-1.538 0-2.5-1.664-1.734-2.997l8.043-13.988c.77-1.337 2.699-1.337 3.468 0l8.043 13.988C22.543 19.336 21.58 21 20.043 21zM12 9v4" stroke="#000000" stroke-width="1.5" stroke-linecap="round" data-darkreader-inline-stroke="" style="--darkreader-inline-stroke: #000000;"></path><path d="M12 17.01l.01-.011" stroke="#000000" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round" data-darkreader-inline-stroke="" style="--darkreader-inline-stroke: #000000;"></path></svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" stroke-width="1.5" color="#000" viewBox="0 0 24 24" style="--darkreader-inline-color:#e8e6e3"><path stroke="#000" stroke-linecap="round" stroke-width="1.5" d="M20.043 21H3.957c-1.538 0-2.5-1.664-1.734-2.997l8.043-13.988c.77-1.337 2.699-1.337 3.468 0l8.043 13.988C22.543 19.336 21.58 21 20.043 21zM12 9v4" style="--darkreader-inline-stroke:#000000"/><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5" d="M12 17.01l.01-.011" style="--darkreader-inline-stroke:#000000"/></svg>
|
Before Width: | Height: | Size: 750 B After Width: | Height: | Size: 583 B |
Before Width: | Height: | Size: 8.1 KiB |
15
public/static/animations/simple-frosted-glow.css
Normal file
|
@ -0,0 +1,15 @@
|
|||
.results_aggregated .result {
|
||||
margin: 1rem;
|
||||
padding: 1rem;
|
||||
border-radius: 1rem;
|
||||
}
|
||||
|
||||
.results_aggregated .result:hover {
|
||||
box-shadow:
|
||||
inset 0 0 3rem var(--color-two),
|
||||
inset 0 0 6rem var(--color-five),
|
||||
inset 0 0 9rem var(--color-three),
|
||||
0 0 0.25rem var(--color-two),
|
||||
0 0 0.5rem var(--color-five),
|
||||
0 0 0.75rem var(--color-three);
|
||||
}
|
|
@ -1,11 +1,12 @@
|
|||
:root {
|
||||
--background-color: #1e1e2e;
|
||||
--foreground-color: #cdd6f4;
|
||||
--logo-color: #f5c2e7;
|
||||
--color-one: #45475a;
|
||||
--color-two: #f38ba8;
|
||||
--color-three: #a6e3a1;
|
||||
--color-four: #f9e2af;
|
||||
--color-five: #89b4fa;
|
||||
--color-six: #f5c2e7;
|
||||
--color-seven: #ffffff;
|
||||
--color-seven: #fff;
|
||||
}
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
:root {
|
||||
--background-color: #000000;
|
||||
--foreground-color: #ffffff;
|
||||
--background-color: #000;
|
||||
--foreground-color: #fff;
|
||||
--logo-color: #e0e0e0;
|
||||
--color-one: #121212;
|
||||
--color-two: #808080;
|
||||
--color-three: #999999;
|
||||
--color-four: #666666;
|
||||
--color-three: #999;
|
||||
--color-four: #666;
|
||||
--color-five: #bfbfbf;
|
||||
--color-six: #e0e0e0;
|
||||
--color-seven: #555555;
|
||||
--color-seven: #555;
|
||||
}
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
:root {
|
||||
--background-color: #44475a;
|
||||
--foreground-color: #8be9fd;
|
||||
--color-one: #ff5555;
|
||||
--logo-color: #ffb86c;
|
||||
--color-one: #f55;
|
||||
--color-two: #50fa7b;
|
||||
--color-three: #ffb86c;
|
||||
--color-four: #bd93f9;
|
||||
--color-five: #ff79c6;
|
||||
--color-six: #94a3a5;
|
||||
--color-seven: #ffffff;
|
||||
--color-seven: #fff;
|
||||
}
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
:root {
|
||||
--background-color: #1d2021;
|
||||
--foreground-color: #ebdbb2;
|
||||
--logo-color: #ebdbb2;
|
||||
--color-one: #282828;
|
||||
--color-two: #98971a;
|
||||
--color-three: #d79921;
|
||||
--color-four: #458588;
|
||||
--color-five: #b16286;
|
||||
--color-six: #689d6a;
|
||||
--color-seven: #ffffff;
|
||||
--color-seven: #fff;
|
||||
}
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
:root {
|
||||
--background-color: #49483Eff;
|
||||
--foreground-color: #FFB269;
|
||||
--logo-color: #ffd866;
|
||||
--color-one: #272822ff;
|
||||
--color-two: #61AFEF;
|
||||
--color-three: #ffd866;
|
||||
--color-four: #fc9867;
|
||||
--color-five: #ab9df2;
|
||||
--color-six: #78dce8;
|
||||
--color-seven: #ffffff;
|
||||
--color-seven: #fff;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
:root {
|
||||
--background-color: #122736ff;
|
||||
--foreground-color: #a2e2a9;
|
||||
--logo-color: #e2ecd6;
|
||||
--color-one: #121B2Cff;
|
||||
--color-two: #f08282;
|
||||
--color-three: #ABC5AAff;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
:root {
|
||||
--background-color: #1b2b34;
|
||||
--foreground-color: #d8dee9;
|
||||
--logo-color: #d8dee9;
|
||||
--color-one: #343d46;
|
||||
--color-two: #5FB3B3ff;
|
||||
--color-three: #69Cf;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
:root {
|
||||
--background-color: #282c34;
|
||||
--foreground-color: #abb2bf;
|
||||
--logo-color: #c8ccd4;
|
||||
--color-one: #3b4048;
|
||||
--color-two: #a3be8c;
|
||||
--color-three: #b48ead;
|
||||
|
|
12
public/static/colorschemes/rose-pine-dawn.css
Normal file
|
@ -0,0 +1,12 @@
|
|||
:root {
|
||||
--background-color: #faf4ed;
|
||||
--foreground-color: #575279;
|
||||
--logo-color: #d7827e;
|
||||
--color-one: #f2e9e1;
|
||||
--color-two: #907aa9;
|
||||
--color-three: #56949f;
|
||||
--color-four: #ea9d34;
|
||||
--color-five: #d7827e;
|
||||
--color-six: #9893a5;
|
||||
--color-seven: #575279;
|
||||
}
|
12
public/static/colorschemes/rose-pine-moon.css
Normal file
|
@ -0,0 +1,12 @@
|
|||
:root {
|
||||
--background-color: #232136;
|
||||
--foreground-color: #e0def4;
|
||||
--logo-color: #ea9a97;
|
||||
--color-one: #393552;
|
||||
--color-two: #c4a7e7;
|
||||
--color-three: #9ccfd8;
|
||||
--color-four: #f6c177;
|
||||
--color-five: #ea9a97;
|
||||
--color-six: #6e6a86;
|
||||
--color-seven: #e0def4;
|
||||
}
|
12
public/static/colorschemes/rose-pine.css
Normal file
|
@ -0,0 +1,12 @@
|
|||
:root {
|
||||
--background-color: #191724;
|
||||
--foreground-color: #e0def4;
|
||||
--logo-color: #ebbcba;
|
||||
--color-one: #26233a;
|
||||
--color-two: #c4a7e7;
|
||||
--color-three: #9ccfd8;
|
||||
--color-four: #f6c177;
|
||||
--color-five: #eb6f92;
|
||||
--color-six: #6e6a86;
|
||||
--color-seven: #e0def4;
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
:root {
|
||||
--background-color: #002b36;
|
||||
--foreground-color: #c9e0e6;
|
||||
--logo-color: #EEE8D5ff;
|
||||
--color-one: #073642;
|
||||
--color-two: #2AA198ff;
|
||||
--color-three: #2AA198ff;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
:root {
|
||||
--background-color: #EEE8D5ff;
|
||||
--foreground-color: #b1ab97;
|
||||
--logo-color: #586E75;
|
||||
--color-one: #fdf6e3;
|
||||
--color-two: #DC322Fff;
|
||||
--color-three: #586E75ff;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
:root {
|
||||
--background-color: #1a1b26;
|
||||
--foreground-color: #c0caf5;
|
||||
--logo-color: #e2afff;
|
||||
--color-one: #32364a;
|
||||
--color-two: #a9b1d6;
|
||||
--color-three: #5a5bb8;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
:root {
|
||||
--background-color: #35383Cff;
|
||||
--foreground-color: #D7DAD8ff;
|
||||
--logo-color: #D7DAD8ff;
|
||||
--color-one: #1d1f21;
|
||||
--color-two: #D77C79ff;
|
||||
--color-three: #f0c674;
|
||||
|
|
|
@ -1,3 +1,62 @@
|
|||
/**
|
||||
* This functions gets the saved cookies if it is present on the user's machine If it
|
||||
* is available then it is parsed and converted to an object which is then used to
|
||||
* retrieve the preferences that the user had selected previously and is then loaded
|
||||
* and used for displaying the user provided settings by setting them as the selected
|
||||
* options in the settings page.
|
||||
*
|
||||
* @function
|
||||
* @param {string} cookie - It takes the client settings cookie as a string.
|
||||
* @returns {void}
|
||||
*/
|
||||
function setClientSettingsOnPage(cookie) {
|
||||
let cookie_value = cookie
|
||||
.split(';')
|
||||
.map((item) => item.split('='))
|
||||
.reduce((acc, [_, v]) => (acc = JSON.parse(v)) && acc, {})
|
||||
|
||||
// Loop through all select tags and add their values to the cookie dictionary
|
||||
document.querySelectorAll('select').forEach((select_tag) => {
|
||||
switch (select_tag.name) {
|
||||
case 'themes':
|
||||
select_tag.value = cookie_value['theme']
|
||||
break
|
||||
case 'colorschemes':
|
||||
select_tag.value = cookie_value['colorscheme']
|
||||
break
|
||||
case 'animations':
|
||||
select_tag.value = cookie_value['animation']
|
||||
break
|
||||
case 'safe_search_levels':
|
||||
select_tag.value = cookie_value['safe_search_level']
|
||||
break
|
||||
}
|
||||
})
|
||||
let engines = document.querySelectorAll('.engine')
|
||||
let engines_cookie = cookie_value['engines']
|
||||
|
||||
if (engines_cookie.length === engines.length) {
|
||||
document.querySelector('.select_all').checked = true
|
||||
engines.forEach((engine_checkbox) => {
|
||||
engine_checkbox.checked = true
|
||||
})
|
||||
} else {
|
||||
engines.forEach((engines_checkbox) => {
|
||||
engines_checkbox.checked = false
|
||||
})
|
||||
engines_cookie.forEach((engine_name) => {
|
||||
engines.forEach((engine_checkbox) => {
|
||||
if (
|
||||
engine_checkbox.parentNode.parentNode.innerText.trim() ===
|
||||
engine_name.trim()
|
||||
) {
|
||||
engine_checkbox.checked = true
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is executed when any page on the website finishes loading and
|
||||
* this function retrieves the cookies if it is present on the user's machine.
|
||||
|
@ -16,9 +75,14 @@ document.addEventListener(
|
|||
let cookie = decodeURIComponent(document.cookie)
|
||||
// Set the value of the input field to the decoded cookie value if it is not empty
|
||||
// Otherwise, display a message indicating that no cookies have been saved on the user's system
|
||||
document.querySelector('.cookies input').value = cookie.length
|
||||
? cookie
|
||||
: 'No cookies have been saved on your system'
|
||||
if (cookie.length) {
|
||||
document.querySelector('.cookies input').value = cookie
|
||||
// This function displays the user provided settings on the settings page.
|
||||
setClientSettingsOnPage(cookie)
|
||||
} else {
|
||||
document.querySelector('.cookies input').value =
|
||||
'No cookies have been saved on your system'
|
||||
}
|
||||
} catch (error) {
|
||||
// If there is an error decoding the cookie, log the error to the console
|
||||
// and display an error message in the input field
|
||||
|
|
|
@ -1,34 +1,6 @@
|
|||
/**
|
||||
* Selects the input element for the search box
|
||||
* @type {HTMLInputElement}
|
||||
*/
|
||||
const searchBox = document.querySelector('input')
|
||||
|
||||
/**
|
||||
* Redirects the user to the search results page with the query parameter
|
||||
*/
|
||||
function searchWeb() {
|
||||
const query = searchBox.value.trim()
|
||||
try {
|
||||
let safeSearchLevel = document.querySelector('.search_options select').value
|
||||
if (query) {
|
||||
window.location.href = `search?q=${encodeURIComponent(
|
||||
query,
|
||||
)}&safesearch=${encodeURIComponent(safeSearchLevel)}`
|
||||
}
|
||||
} catch (error) {
|
||||
if (query) {
|
||||
window.location.href = `search?q=${encodeURIComponent(query)}`
|
||||
}
|
||||
}
|
||||
* A function that clears the search input text when the clear button is clicked.
|
||||
*/
|
||||
function clearSearchText() {
|
||||
document.querySelector('.search_bar > input').value = ''
|
||||
}
|
||||
|
||||
/**
|
||||
* Listens for the 'Enter' key press event on the search box and calls the searchWeb function
|
||||
* @param {KeyboardEvent} e - The keyboard event object
|
||||
*/
|
||||
searchBox.addEventListener('keyup', (e) => {
|
||||
if (e.key === 'Enter') {
|
||||
searchWeb()
|
||||
}
|
||||
})
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
/**
|
||||
* Navigates to the next page by incrementing the current page number in the URL query string.
|
||||
* @returns {void}
|
||||
*/
|
||||
function navigate_forward() {
|
||||
let url = new URL(window.location);
|
||||
let searchParams = url.searchParams;
|
||||
|
||||
let q = searchParams.get('q');
|
||||
let page = parseInt(searchParams.get('page'));
|
||||
|
||||
if (isNaN(page)) {
|
||||
page = 1;
|
||||
} else {
|
||||
page++;
|
||||
}
|
||||
|
||||
window.location.href = `${url.origin}${url.pathname}?q=${encodeURIComponent(q)}&page=${page}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Navigates to the previous page by decrementing the current page number in the URL query string.
|
||||
* @returns {void}
|
||||
*/
|
||||
function navigate_backward() {
|
||||
let url = new URL(window.location);
|
||||
let searchParams = url.searchParams;
|
||||
|
||||
let q = searchParams.get('q');
|
||||
let page = parseInt(searchParams.get('page'));
|
||||
|
||||
if (isNaN(page)) {
|
||||
page = 0;
|
||||
} else if (page > 0) {
|
||||
page--;
|
||||
}
|
||||
|
||||
window.location.href = `${url.origin}${url.pathname}?q=${encodeURIComponent(q)}&page=${page}`;
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
document.addEventListener(
|
||||
'DOMContentLoaded',
|
||||
() => {
|
||||
let url = new URL(window.location)
|
||||
let searchParams = url.searchParams
|
||||
|
||||
let safeSearchLevel = searchParams.get('safesearch')
|
||||
|
||||
if (
|
||||
safeSearchLevel >= 0 &&
|
||||
safeSearchLevel <= 2 &&
|
||||
safeSearchLevel !== null
|
||||
) {
|
||||
document.querySelector('.search_options select').value = safeSearchLevel
|
||||
}
|
||||
},
|
||||
false,
|
||||
)
|
|
@ -50,6 +50,9 @@ function setClientSettings() {
|
|||
case 'colorschemes':
|
||||
cookie_dictionary['colorscheme'] = select_tag.value
|
||||
break
|
||||
case 'animations':
|
||||
cookie_dictionary['animation'] = select_tag.value || null
|
||||
break
|
||||
case 'safe_search_levels':
|
||||
cookie_dictionary['safe_search_level'] = Number(select_tag.value)
|
||||
break
|
||||
|
@ -103,13 +106,50 @@ function getClientSettings() {
|
|||
.map((item) => item.split('='))
|
||||
.reduce((acc, [_, v]) => (acc = JSON.parse(v)) && acc, {})
|
||||
|
||||
// Loop through all link tags and update their href values to match the user's preferences
|
||||
Array.from(document.querySelectorAll('link')).forEach((item) => {
|
||||
if (item.href.includes('static/themes')) {
|
||||
item.href = `static/themes/${cookie_value['theme']}.css`
|
||||
} else if (item.href.includes('static/colorschemes')) {
|
||||
item.href = `static/colorschemes/${cookie_value['colorscheme']}.css`
|
||||
let links = Array.from(document.querySelectorAll('link'))
|
||||
|
||||
// A check to determine whether the animation link exists under the head tag or not.
|
||||
// If it does not exists then create and add a new animation link under the head tag
|
||||
// and update the other link tags href according to the settings provided by the user
|
||||
// via the UI. On the other hand if it does exist then just update all the link tags
|
||||
// href according to the settings provided by the user via the UI.
|
||||
if (!links.some((item) => item.href.includes('static/animations'))) {
|
||||
if (cookie_value['animation']) {
|
||||
let animation_link = document.createElement('link')
|
||||
animation_link.href = `static/animations/${cookie_value['animation']}.css`
|
||||
animation_link.rel = 'stylesheet'
|
||||
animation_link.type = 'text/css'
|
||||
document.querySelector('head').appendChild(animation_link)
|
||||
}
|
||||
})
|
||||
// Loop through all link tags and update their href values to match the user's preferences
|
||||
links.forEach((item) => {
|
||||
if (item.href.includes('static/themes')) {
|
||||
item.href = `static/themes/${cookie_value['theme']}.css`
|
||||
} else if (item.href.includes('static/colorschemes')) {
|
||||
item.href = `static/colorschemes/${cookie_value['colorscheme']}.css`
|
||||
}
|
||||
})
|
||||
} else {
|
||||
// Loop through all link tags and update their href values to match the user's preferences
|
||||
links.forEach((item) => {
|
||||
if (item.href.includes('static/themes')) {
|
||||
item.href = `static/themes/${cookie_value['theme']}.css`
|
||||
} else if (item.href.includes('static/colorschemes')) {
|
||||
item.href = `static/colorschemes/${cookie_value['colorscheme']}.css`
|
||||
} else if (
|
||||
item.href.includes('static/animations') &&
|
||||
cookie_value['animation']
|
||||
) {
|
||||
item.href = `static/colorschemes/${cookie_value['animation']}.css`
|
||||
}
|
||||
})
|
||||
if (!cookie_value['animation']) {
|
||||
document
|
||||
.querySelector('head')
|
||||
.removeChild(
|
||||
links.filter((item) => item.href.includes('static/animations')),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,13 @@
|
|||
/* @import url('./catppuccin-mocha.css'); */
|
||||
@font-face {
|
||||
font-family: Rubik;
|
||||
font-style: normal;
|
||||
font-weight: 200 600;
|
||||
font-stretch: 0% 200%;
|
||||
font-display: swap;
|
||||
src: url('https://fonts.gstatic.com/s/rubik/v28/iJWKBXyIfDnIV7nErXyi0A.woff2')
|
||||
format('woff2');
|
||||
}
|
||||
|
||||
* {
|
||||
padding: 0;
|
||||
|
@ -15,8 +24,14 @@ body {
|
|||
flex-direction: column;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
height: 100vh;
|
||||
background: var(--color-one);
|
||||
min-height: 100vh;
|
||||
font-family: Rubik, sans-serif;
|
||||
background-color: var(--background-color);
|
||||
}
|
||||
|
||||
/* enforce font for buttons */
|
||||
button {
|
||||
font-family: Rubik, sans-serif;
|
||||
}
|
||||
|
||||
/* styles for the index page */
|
||||
|
@ -29,6 +44,10 @@ body {
|
|||
align-items: center;
|
||||
}
|
||||
|
||||
.search-container svg {
|
||||
color: var(--logo-color);
|
||||
}
|
||||
|
||||
.search-container div {
|
||||
display: flex;
|
||||
}
|
||||
|
@ -37,32 +56,66 @@ body {
|
|||
|
||||
.search_bar {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.search_bar input {
|
||||
padding: 1rem;
|
||||
border-radius: 6px;
|
||||
padding: 2.6rem 2.2rem;
|
||||
width: 50rem;
|
||||
height: 3rem;
|
||||
outline: none;
|
||||
border: none;
|
||||
box-shadow: rgba(0, 0, 0, 1);
|
||||
background: var(--foreground-color);
|
||||
box-shadow: rgb(0 0 0 / 1);
|
||||
background-color: var(--color-one);
|
||||
color: var(--foreground-color);
|
||||
outline-offset: 3px;
|
||||
font-size: 1.6rem;
|
||||
}
|
||||
|
||||
.search_bar input::-webkit-search-results-button,
|
||||
.search_bar input::-webkit-search-cancel-button{
|
||||
display: none;
|
||||
}
|
||||
|
||||
.search_bar input:focus {
|
||||
outline: 2px solid var(--foreground-color);
|
||||
}
|
||||
|
||||
.search_bar input::placeholder {
|
||||
color: var(--foreground-color);
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.search_bar button {
|
||||
padding: 1rem;
|
||||
border-radius: 0;
|
||||
padding: 2.6rem 3.2rem;
|
||||
border-radius: 6px;
|
||||
height: 3rem;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
outline: none;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
border: none;
|
||||
transition: 0.1s;
|
||||
gap: 0;
|
||||
background: var(--background-color);
|
||||
color: var(--color-three);
|
||||
background-color: var(--color-six);
|
||||
color: var(--background-color);
|
||||
font-weight: 600;
|
||||
letter-spacing: 0.1rem;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.search_bar button img {
|
||||
position: absolute;
|
||||
left: 50%;
|
||||
top: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
}
|
||||
|
||||
.search_bar button:active {
|
||||
outline: 2px solid var(--color-three);
|
||||
}
|
||||
|
||||
.search_bar button:active,
|
||||
|
@ -81,13 +134,19 @@ body {
|
|||
width: 20rem;
|
||||
background-color: var(--color-one);
|
||||
color: var(--foreground-color);
|
||||
padding: 1rem 2rem;
|
||||
padding: 1.2rem 2rem;
|
||||
border-radius: 0.5rem;
|
||||
outline: none;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
border: none;
|
||||
text-transform: capitalize;
|
||||
}
|
||||
|
||||
.search_area .search_options select:active,
|
||||
.search_area .search_options select:hover {
|
||||
outline: 2px solid var(--color-three);
|
||||
}
|
||||
|
||||
.search_area .search_options option:hover {
|
||||
background-color: var(--color-one);
|
||||
}
|
||||
|
@ -126,9 +185,11 @@ body {
|
|||
min-height: 20rem;
|
||||
min-width: 22rem;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box.show {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .error_item,
|
||||
.error_box .dropdown_error_box .no_errors {
|
||||
display: flex;
|
||||
|
@ -138,22 +199,25 @@ body {
|
|||
padding: 1rem;
|
||||
font-size: 1.2rem;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .error_item {
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .no_errors {
|
||||
min-height: 18rem;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.error_box .dropdown_error_box .error_item:hover {
|
||||
box-shadow: inset 0 0 100px 100px rgba(255, 255, 255, 0.1);
|
||||
box-shadow: inset 0 0 100px 100px rgb(255 255 255 / 0.1);
|
||||
}
|
||||
|
||||
.error_box .error_item .severity_color {
|
||||
width: 1.2rem;
|
||||
height: 1.2rem;
|
||||
}
|
||||
|
||||
.results .result_disallowed,
|
||||
.results .result_filtered,
|
||||
.results .result_engine_not_selected {
|
||||
|
@ -163,7 +227,7 @@ body {
|
|||
gap: 10rem;
|
||||
font-size: 2rem;
|
||||
color: var(--foreground-color);
|
||||
margin: 0rem 7rem;
|
||||
margin: 0 7rem;
|
||||
}
|
||||
|
||||
.results .result_disallowed .user_query,
|
||||
|
@ -190,17 +254,24 @@ body {
|
|||
|
||||
/* styles for the footer and header */
|
||||
|
||||
header,
|
||||
header {
|
||||
width: 100%;
|
||||
background: var(--background-color);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 2rem 3rem;
|
||||
}
|
||||
|
||||
footer {
|
||||
width: 100%;
|
||||
background: var(--background-color);
|
||||
display: flex;
|
||||
padding: 1rem;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
header {
|
||||
justify-content: space-between;
|
||||
padding: 1.7rem 1.7rem 4rem;
|
||||
gap: 1.8rem;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
header h1 a {
|
||||
|
@ -208,7 +279,6 @@ header h1 a {
|
|||
text-decoration: none;
|
||||
color: var(--foreground-color);
|
||||
letter-spacing: 0.1rem;
|
||||
margin-left: 1rem;
|
||||
}
|
||||
|
||||
header ul,
|
||||
|
@ -250,11 +320,6 @@ footer div {
|
|||
gap: 1rem;
|
||||
}
|
||||
|
||||
footer {
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
/* Styles for the search page */
|
||||
|
||||
.results {
|
||||
|
@ -262,6 +327,11 @@ footer {
|
|||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: space-around;
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.result {
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.results .search_bar {
|
||||
|
@ -273,6 +343,7 @@ footer {
|
|||
flex-direction: column;
|
||||
justify-content: space-between;
|
||||
margin: 2rem 0;
|
||||
content-visibility: auto;
|
||||
}
|
||||
|
||||
.results_aggregated .result {
|
||||
|
@ -282,10 +353,10 @@ footer {
|
|||
}
|
||||
|
||||
.results_aggregated .result h1 a {
|
||||
font-size: 1.5rem;
|
||||
font-size: 1.7rem;
|
||||
font-weight: normal;
|
||||
color: var(--color-two);
|
||||
text-decoration: none;
|
||||
letter-spacing: 0.1rem;
|
||||
}
|
||||
|
||||
.results_aggregated .result h1 a:hover {
|
||||
|
@ -293,19 +364,20 @@ footer {
|
|||
}
|
||||
|
||||
.results_aggregated .result h1 a:visited {
|
||||
color: var(--background-color);
|
||||
color: var(--color-five);
|
||||
}
|
||||
|
||||
.results_aggregated .result small {
|
||||
color: var(--color-three);
|
||||
font-size: 1.1rem;
|
||||
font-size: 1.3rem;
|
||||
word-wrap: break-word;
|
||||
line-break: anywhere;
|
||||
}
|
||||
|
||||
.results_aggregated .result p {
|
||||
color: var(--foreground-color);
|
||||
font-size: 1.2rem;
|
||||
font-size: 1.4rem;
|
||||
line-height: 2.4rem;
|
||||
margin-top: 0.3rem;
|
||||
word-wrap: break-word;
|
||||
line-break: anywhere;
|
||||
|
@ -316,6 +388,9 @@ footer {
|
|||
font-size: 1.2rem;
|
||||
padding: 1rem;
|
||||
color: var(--color-five);
|
||||
display: flex;
|
||||
gap: 1rem;
|
||||
justify-content: right;
|
||||
}
|
||||
|
||||
/* Styles for the 404 page */
|
||||
|
@ -367,13 +442,13 @@ footer {
|
|||
}
|
||||
|
||||
.page_navigation {
|
||||
padding: 0 0 2rem 0;
|
||||
padding: 0 0 2rem;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.page_navigation button {
|
||||
.page_navigation a {
|
||||
background: var(--background-color);
|
||||
color: var(--foreground-color);
|
||||
padding: 1rem;
|
||||
|
@ -382,7 +457,7 @@ footer {
|
|||
border: none;
|
||||
}
|
||||
|
||||
.page_navigation button:active {
|
||||
.page_navigation a:active {
|
||||
filter: brightness(1.2);
|
||||
}
|
||||
|
||||
|
@ -392,39 +467,122 @@ footer {
|
|||
font-size: 1.5rem;
|
||||
color: var(--foreground-color);
|
||||
padding-bottom: 10px;
|
||||
max-width: 1100px;
|
||||
margin: 14rem auto;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
row-gap: 100px;
|
||||
}
|
||||
|
||||
.about-container article h1 {
|
||||
color: var(--color-two);
|
||||
font-size: 2.8rem;
|
||||
font-size: 4.5rem;
|
||||
}
|
||||
|
||||
.about-container article div {
|
||||
padding-bottom: 15px;
|
||||
.about-container article .logo-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.about-container article .logo-container svg {
|
||||
width: clamp(200px, 530px, 815px);
|
||||
color: var(--logo-color);
|
||||
}
|
||||
|
||||
.about-container article .text-block {
|
||||
box-shadow: 0 0 0 100vmax var(--foreground-color);
|
||||
background-color: var(--foreground-color);
|
||||
clip-path: inset(0 -100vmax);
|
||||
padding: 90px 0;
|
||||
display: flex;
|
||||
gap: 40px;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-direction: column;
|
||||
text-align: center;
|
||||
color: var(--background-color);
|
||||
}
|
||||
|
||||
.about-container article .text-block .text-block-title {
|
||||
font-size: 64px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.hero-text-container {
|
||||
width: 860px;
|
||||
}
|
||||
|
||||
.hero-text {
|
||||
font-size: 45px;
|
||||
font-weight: 200;
|
||||
}
|
||||
|
||||
.about-container a {
|
||||
color: var(--color-three);
|
||||
}
|
||||
|
||||
.about-container article h2 {
|
||||
color: var(--color-three);
|
||||
font-size: 1.8rem;
|
||||
padding-bottom: 10px;
|
||||
}
|
||||
|
||||
.about-container p {
|
||||
color: var(--foreground-color);
|
||||
font-size: 1.6rem;
|
||||
padding-bottom: 10px;
|
||||
}
|
||||
|
||||
.about-container h3 {
|
||||
font-size: 1.5rem;
|
||||
}
|
||||
|
||||
.about-container {
|
||||
width: 80%;
|
||||
margin-bottom: 140px;
|
||||
}
|
||||
|
||||
.feature-list {
|
||||
padding: 35px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-direction: column;
|
||||
row-gap: 60px;
|
||||
}
|
||||
|
||||
.feature-list-title {
|
||||
text-align: center;
|
||||
font-size: 64px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.features {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(3, 1fr);
|
||||
gap: 40px;
|
||||
}
|
||||
|
||||
.feature-card {
|
||||
background-color: var(--foreground-color);
|
||||
color: var(--background-color);
|
||||
text-align: center;
|
||||
display: flex;
|
||||
padding: 30px;
|
||||
border-radius: 24px;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 15px;
|
||||
}
|
||||
|
||||
.feature-card-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-direction: column;
|
||||
row-gap: 15px;
|
||||
}
|
||||
|
||||
.feature-card-header h4 {
|
||||
font-size: 33px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.feature-card-body p {
|
||||
font-size: 20px;
|
||||
font-weight: 200;
|
||||
}
|
||||
|
||||
.about-footnote {
|
||||
font-size: 24px;
|
||||
text-align: center;
|
||||
color: var(--foreground-color);
|
||||
}
|
||||
|
||||
/* Styles for the settings page */
|
||||
|
@ -432,6 +590,7 @@ footer {
|
|||
display: flex;
|
||||
justify-content: space-around;
|
||||
width: 80dvw;
|
||||
margin: 5rem 0;
|
||||
}
|
||||
|
||||
.settings h1 {
|
||||
|
@ -439,9 +598,18 @@ footer {
|
|||
font-size: 2.5rem;
|
||||
}
|
||||
|
||||
.settings > h1 {
|
||||
margin-bottom: 4rem;
|
||||
margin-left: 2rem;
|
||||
}
|
||||
|
||||
.settings hr {
|
||||
border-color: var(--color-three);
|
||||
margin: 0.3rem 0 1rem 0;
|
||||
margin: 0.3rem 0 1rem;
|
||||
}
|
||||
|
||||
.settings > hr {
|
||||
margin-left: 2rem;
|
||||
}
|
||||
|
||||
.settings_container .sidebar {
|
||||
|
@ -454,7 +622,6 @@ footer {
|
|||
margin-left: -0.7rem;
|
||||
padding: 0.7rem;
|
||||
border-radius: 5px;
|
||||
font-weight: bold;
|
||||
margin-bottom: 0.5rem;
|
||||
color: var(--foreground-color);
|
||||
text-transform: capitalize;
|
||||
|
@ -462,18 +629,30 @@ footer {
|
|||
}
|
||||
|
||||
.settings_container .sidebar .btn {
|
||||
padding: 0.5rem;
|
||||
padding: 2rem;
|
||||
border-radius: 0.5rem;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
}
|
||||
|
||||
.settings_container .sidebar .btn:active {
|
||||
outline: 2px solid var(--color-two);
|
||||
}
|
||||
|
||||
.settings_container .sidebar .btn:not(.active):hover {
|
||||
color: var(--color-two);
|
||||
}
|
||||
|
||||
.settings_container .sidebar .btn.active {
|
||||
background-color: var(--color-two);
|
||||
color: var(--background-color);
|
||||
}
|
||||
|
||||
.settings_container .main_container {
|
||||
width: 70%;
|
||||
border-left: 1.5px solid var(--color-three);
|
||||
padding-left: 3rem;
|
||||
border: none;
|
||||
}
|
||||
|
||||
.settings_container .tab {
|
||||
|
@ -482,6 +661,7 @@ footer {
|
|||
|
||||
.settings_container .tab.active {
|
||||
display: flex;
|
||||
gap: 1.2rem;
|
||||
flex-direction: column;
|
||||
justify-content: space-around;
|
||||
}
|
||||
|
@ -519,17 +699,26 @@ footer {
|
|||
text-transform: capitalize;
|
||||
}
|
||||
|
||||
.settings_container .tab .description {
|
||||
.settings_container .tab .description,
|
||||
.settings_container .tab .admin_warning {
|
||||
font-size: 1.5rem;
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.settings_container .tab .description {
|
||||
color: var(--foreground-color);
|
||||
}
|
||||
|
||||
.settings_container .tab .admin_warning {
|
||||
color: var(--color-two);
|
||||
}
|
||||
|
||||
.settings_container .user_interface select,
|
||||
.settings_container .general select {
|
||||
.settings_container .general select,
|
||||
.settings_container .general form input {
|
||||
margin: 0.7rem 0;
|
||||
width: 20rem;
|
||||
background-color: var(--background-color);
|
||||
background-color: var(--color-one);
|
||||
color: var(--foreground-color);
|
||||
padding: 1rem 2rem;
|
||||
border-radius: 0.5rem;
|
||||
|
@ -538,6 +727,38 @@ footer {
|
|||
text-transform: capitalize;
|
||||
}
|
||||
|
||||
.settings_container .general form input {
|
||||
padding: 0;
|
||||
width: 30rem;
|
||||
text-align: center;
|
||||
text-transform: none;
|
||||
}
|
||||
|
||||
.settings_container .general form input::file-selector-button {
|
||||
content: 'Browse';
|
||||
padding: 1rem 2rem;
|
||||
font-size: 1.5rem;
|
||||
background: var(--color-three);
|
||||
color: var(--background-color);
|
||||
border-radius: 0.5rem;
|
||||
border: 2px solid transparent;
|
||||
font-weight: bold;
|
||||
transition: all 0.1s ease-out;
|
||||
cursor: pointer;
|
||||
box-shadow: 5px 5px;
|
||||
outline: none;
|
||||
translate: -1rem 0;
|
||||
}
|
||||
|
||||
.settings_container .general form input::file-selector-button:active {
|
||||
box-shadow: none;
|
||||
translate: 5px 5px;
|
||||
}
|
||||
|
||||
.settings_container .general .export_btn {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.settings_container .user_interface option:hover,
|
||||
.settings_container .general option:hover {
|
||||
background-color: var(--color-one);
|
||||
|
@ -547,16 +768,19 @@ footer {
|
|||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
gap: 1rem;
|
||||
padding: 1rem 0;
|
||||
margin-bottom: 2rem;
|
||||
gap: 2rem;
|
||||
}
|
||||
|
||||
.settings_container .engines .toggle_btn {
|
||||
color: var(--foreground-color);
|
||||
font-size: 1.5rem;
|
||||
display: flex;
|
||||
gap: 0.5rem;
|
||||
align-items: center;
|
||||
border-radius: 100px;
|
||||
gap: 1.5rem;
|
||||
letter-spacing: 1px;
|
||||
}
|
||||
|
||||
.settings_container .engines hr {
|
||||
|
@ -564,10 +788,11 @@ footer {
|
|||
}
|
||||
|
||||
.settings_container .cookies input {
|
||||
margin: 1rem 0rem;
|
||||
margin: 1rem 0;
|
||||
}
|
||||
|
||||
/* Styles for the toggle button */
|
||||
|
||||
/* The switch - the box around the slider */
|
||||
.switch {
|
||||
position: relative;
|
||||
|
@ -587,25 +812,26 @@ footer {
|
|||
.slider {
|
||||
position: absolute;
|
||||
cursor: pointer;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background-color: var(--background-color);
|
||||
-webkit-transition: 0.4s;
|
||||
transition: 0.4s;
|
||||
inset: 0;
|
||||
background-color: var(--foreground-color);
|
||||
transition: 0.2s;
|
||||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
}
|
||||
|
||||
.slider:before {
|
||||
.slider:active {
|
||||
outline: 2px solid var(--foreground-color);
|
||||
}
|
||||
|
||||
.slider::before {
|
||||
position: absolute;
|
||||
content: '';
|
||||
height: 2.6rem;
|
||||
width: 2.6rem;
|
||||
left: 0.4rem;
|
||||
bottom: 0.4rem;
|
||||
background-color: var(--foreground-color);
|
||||
-webkit-transition: 0.4s;
|
||||
transition: 0.4s;
|
||||
background-color: var(--background-color);
|
||||
transition: 0.2s;
|
||||
}
|
||||
|
||||
input:checked + .slider {
|
||||
|
@ -616,9 +842,7 @@ input:focus + .slider {
|
|||
box-shadow: 0 0 1px var(--color-three);
|
||||
}
|
||||
|
||||
input:checked + .slider:before {
|
||||
-webkit-transform: translateX(2.6rem);
|
||||
-ms-transform: translateX(2.6rem);
|
||||
input:checked + .slider::before {
|
||||
transform: translateX(2.6rem);
|
||||
}
|
||||
|
||||
|
@ -627,6 +851,50 @@ input:checked + .slider:before {
|
|||
border-radius: 3.4rem;
|
||||
}
|
||||
|
||||
.slider.round:before {
|
||||
.slider.round::before {
|
||||
border-radius: 50%;
|
||||
}
|
||||
|
||||
@media screen and (width <=1136px) {
|
||||
.hero-text-container {
|
||||
width: unset;
|
||||
}
|
||||
|
||||
.features {
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (width <=706px) {
|
||||
.about-container article .logo-container svg {
|
||||
width: clamp(200px, 290px, 815px);
|
||||
}
|
||||
|
||||
.about-container article .text-block .text-block-title {
|
||||
font-size: 33px;
|
||||
}
|
||||
|
||||
.hero-text {
|
||||
font-size: 22px;
|
||||
}
|
||||
|
||||
.about-container {
|
||||
width: unset;
|
||||
}
|
||||
|
||||
.feature-list-title {
|
||||
font-size: 33px;
|
||||
}
|
||||
|
||||
.features {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
|
||||
.feature-list {
|
||||
padding: 35px 0;
|
||||
}
|
||||
|
||||
.feature-card {
|
||||
border-radius: 0;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
{{>header this}}
|
||||
<main class="error_container">
|
||||
<img src="images/robot-404.svg" alt="Image of broken robot." />
|
||||
<div class="error_content">
|
||||
<h1>Aw! snap</h1>
|
||||
<h2>404 Page Not Found!</h2>
|
||||
<p>Go to <a href="/">search page</a></p>
|
||||
</div>
|
||||
</main>
|
||||
{{>footer}}
|
|
@ -1,29 +0,0 @@
|
|||
{{>header this}}
|
||||
<main class="about-container">
|
||||
<article >
|
||||
<div>
|
||||
<h1 >Websurfx</h1>
|
||||
<hr size="4" width="100%" color="#a6e3a1">
|
||||
</div>
|
||||
<p>A modern-looking, lightning-fast, privacy-respecting, secure meta search engine written in Rust. It provides a fast and secure search experience while respecting user privacy.<br> It aggregates results from multiple search engines and presents them in an unbiased manner, filtering out trackers and ads.
|
||||
</p>
|
||||
|
||||
<h2>Some of the Top Features:</h2>
|
||||
|
||||
<ul><strong>Lightning fast </strong>- Results load within milliseconds for an instant search experience.</ul>
|
||||
|
||||
<ul><strong>Secure search</strong> - All searches are performed over an encrypted connection to prevent snooping.</ul>
|
||||
|
||||
<ul><strong>Ad free results</strong> - All search results are ad free and clutter free for a clean search experience.</ul>
|
||||
|
||||
<ul><strong>Privacy focused</strong> - Websurface does not track, store or sell your search data. Your privacy is our priority.</ul>
|
||||
|
||||
<ul><strong>Free and Open source</strong> - The entire project's code is open source and available for free on <a href="https://github.com/neon-mmd/websurfx">GitHub</a> under an GNU Affero General Public License.</ul>
|
||||
|
||||
<ul><strong>Highly customizable</strong> - Websurface comes with 9 built-in color themes and supports creating custom themes effortlessly.</ul>
|
||||
</article>
|
||||
|
||||
<h3>Devoloped by: <a href="https://github.com/neon-mmd/websurfx">Websurfx team</a></h3>
|
||||
</main>
|
||||
{{>footer}}
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
<div class="search_bar">
|
||||
<input type="search" name="search-box" value="{{this.pageQuery}}" placeholder="Type to search" />
|
||||
<button type="submit" onclick="searchWeb()">search</button>
|
|
@ -1,12 +0,0 @@
|
|||
<div class="cookies tab">
|
||||
<h1>Cookies</h1>
|
||||
<p class="description">
|
||||
This is the cookies are saved on your system and it contains the preferences
|
||||
you chose in the settings page
|
||||
</p>
|
||||
<input type="text" name="cookie_field" value="" readonly />
|
||||
<p class="description">
|
||||
The cookies stored are not used by us for any malicious intend or for
|
||||
tracking you in any way.
|
||||
</p>
|
||||
</div>
|
|
@ -1,32 +0,0 @@
|
|||
<div class="engines tab">
|
||||
<h1>Engines</h1>
|
||||
<h3>select search engines</h3>
|
||||
<p class="description">
|
||||
Select the search engines from the list of engines that you want results
|
||||
from
|
||||
</p>
|
||||
<div class="engine_selection">
|
||||
<div class="toggle_btn">
|
||||
<label class="switch">
|
||||
<input type="checkbox" class="select_all" onchange="toggleAllSelection()" />
|
||||
<span class="slider round"></span>
|
||||
</label>
|
||||
Select All
|
||||
</div>
|
||||
<hr />
|
||||
<div class="toggle_btn">
|
||||
<label class="switch">
|
||||
<input type="checkbox" class="engine" />
|
||||
<span class="slider round"></span>
|
||||
</label>
|
||||
DuckDuckGo
|
||||
</div>
|
||||
<div class="toggle_btn">
|
||||
<label class="switch">
|
||||
<input type="checkbox" class="engine" />
|
||||
<span class="slider round"></span>
|
||||
</label>
|
||||
Searx
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
|
@ -1,16 +0,0 @@
|
|||
<footer>
|
||||
<div>
|
||||
<span>Powered By <b>Websurfx</b></span><span>-</span><span>a lightening fast, privacy respecting, secure meta
|
||||
search engine</span>
|
||||
</div>
|
||||
<div>
|
||||
<ul>
|
||||
<li><a href="https://github.com/neon-mmd/websurfx">Source Code</a></li>
|
||||
<li><a href="https://github.com/neon-mmd/websurfx/issues">Issues/Bugs</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</footer>
|
||||
<script src="static/settings.js"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
|
@ -1,13 +0,0 @@
|
|||
<div class="general tab active">
|
||||
<h1>General</h1>
|
||||
<h3>Select a safe search level</h3>
|
||||
<p class="description">
|
||||
Select a safe search level from the menu below to filter content based on
|
||||
the level.
|
||||
</p>
|
||||
<select name="safe_search_levels">
|
||||
<option value=0>None</option>
|
||||
<option value=1>Low</option>
|
||||
<option value=2>Moderate</option>
|
||||
</select>
|
||||
</div>
|
|
@ -1,16 +0,0 @@
|
|||
<!doctype html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<title>Websurfx</title>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<link href="static/colorschemes/{{colorscheme}}.css" rel="stylesheet" type="text/css" />
|
||||
<link href="static/themes/{{theme}}.css" rel="stylesheet" type="text/css" />
|
||||
</head>
|
||||
|
||||
<body onload="getClientSettings()">
|
||||
<header>
|
||||
<h1><a href="/">Websurfx</a></h1>
|
||||
{{>navbar}}
|
||||
</header>
|
|
@ -1,8 +0,0 @@
|
|||
{{>header this}}
|
||||
<main class="search-container">
|
||||
<img src="../images/websurfx_logo.png" alt="Websurfx meta-search engine logo" />
|
||||
{{>bar}}
|
||||
</div>
|
||||
</main>
|
||||
<script src="static/index.js"></script>
|
||||
{{>footer}}
|
|
@ -1,6 +0,0 @@
|
|||
<nav>
|
||||
<ul>
|
||||
<li><a href="about">about</a></li>
|
||||
<li><a href="settings">settings</a></li>
|
||||
</ul>
|
||||
</nav>
|
|
@ -1,86 +0,0 @@
|
|||
{{>header this.style}}
|
||||
<main class="results">
|
||||
{{>search_bar this}}
|
||||
<div class="results_aggregated">
|
||||
{{#if results}} {{#each results}}
|
||||
<div class="result">
|
||||
<h1><a href="{{{this.url}}}">{{{this.title}}}</a></h1>
|
||||
<small>{{{this.url}}}</small>
|
||||
<p>{{{this.description}}}</p>
|
||||
<div class="upstream_engines">
|
||||
{{#each engine}}
|
||||
<span>{{{this}}}</span>
|
||||
{{/each}}
|
||||
</div>
|
||||
</div>
|
||||
{{/each}} {{else}} {{#if disallowed}}
|
||||
<div class="result_disallowed">
|
||||
<div class="description">
|
||||
<p>
|
||||
Your search - <span class="user_query">{{{this.pageQuery}}}</span> -
|
||||
has been disallowed.
|
||||
</p>
|
||||
<p class="description_paragraph">Dear user,</p>
|
||||
<p class="description_paragraph">
|
||||
The query - <span class="user_query">{{{this.pageQuery}}}</span> - has
|
||||
been blacklisted via server configuration and hence disallowed by the
|
||||
server. Henceforth no results could be displayed for your query.
|
||||
</p>
|
||||
</div>
|
||||
<img src="./images/barricade.png" alt="Image of a Barricade" />
|
||||
</div>
|
||||
{{else}} {{#if filtered}}
|
||||
<div class="result_filtered">
|
||||
<div class="description">
|
||||
<p>
|
||||
Your search - <span class="user_query">{{{this.pageQuery}}}</span> -
|
||||
has been filtered.
|
||||
</p>
|
||||
<p class="description_paragraph">Dear user,</p>
|
||||
<p class="description_paragraph">
|
||||
All the search results contain results that has been configured to be
|
||||
filtered out via server configuration and henceforth has been
|
||||
completely filtered out.
|
||||
</p>
|
||||
</div>
|
||||
<img src="./images/filter.png" alt="Image of a paper inside a funnel" />
|
||||
</div>
|
||||
{{else}} {{#if noEnginesSelected}}
|
||||
<div class="result_engine_not_selected">
|
||||
<div class="description">
|
||||
<p>
|
||||
No results could be fetched for your search "<span class="user_query">{{{this.pageQuery}}}</span>" .
|
||||
</p>
|
||||
<p class="description_paragraph">Dear user,</p>
|
||||
<p class="description_paragraph">
|
||||
No results could be retrieved from the upstream search engines as no
|
||||
upstream search engines were selected from the settings page.
|
||||
</p>
|
||||
</div>
|
||||
<img src="./images/no_selection.png" alt="Image of a white cross inside a red circle" />
|
||||
</div>
|
||||
{{else}}
|
||||
<div class="result_not_found">
|
||||
<p>Your search - {{{this.pageQuery}}} - did not match any documents.</p>
|
||||
<p class="suggestions">Suggestions:</p>
|
||||
<ul>
|
||||
<li>Make sure that all words are spelled correctly.</li>
|
||||
<li>Try different keywords.</li>
|
||||
<li>Try more general keywords.</li>
|
||||
</ul>
|
||||
<img src="./images/no_results.gif" alt="Man fishing gif" />
|
||||
</div>
|
||||
{{/if}} {{/if}} {{/if}} {{/if}}
|
||||
</div>
|
||||
<div class="page_navigation">
|
||||
<button type="button" onclick="navigate_backward()">
|
||||
← previous
|
||||
</button>
|
||||
<button type="button" onclick="navigate_forward()">next →</button>
|
||||
</div>
|
||||
</main>
|
||||
<script src="static/index.js"></script>
|
||||
<script src="static/search_area_options.js"></script>
|
||||
<script src="static/pagination.js"></script>
|
||||
<script src="static/error_box.js"></script>
|
||||
{{>footer}}
|
|
@ -1,36 +0,0 @@
|
|||
<div class="search_area">
|
||||
{{>bar this}}
|
||||
<div class="error_box">
|
||||
{{#if engineErrorsInfo}}
|
||||
<button onclick="toggleErrorBox()" class="error_box_toggle_button">
|
||||
<img src="./images/warning.svg" alt="Info icon for error box" />
|
||||
</button>
|
||||
<div class="dropdown_error_box">
|
||||
{{#each engineErrorsInfo}}
|
||||
<div class="error_item">
|
||||
<span class="engine_name">{{{this.engine}}}</span>
|
||||
<span class="engine_name">{{{this.error}}}</span>
|
||||
<span class="severity_color" style="background: {{{this.severity_color}}};"></span>
|
||||
</div>
|
||||
{{/each}}
|
||||
</div>
|
||||
{{else}}
|
||||
<button onclick="toggleErrorBox()" class="error_box_toggle_button">
|
||||
<img src="./images/info.svg" alt="Warning icon for error box" />
|
||||
</button>
|
||||
<div class="dropdown_error_box">
|
||||
<div class="no_errors">
|
||||
Everything looks good 🙂!!
|
||||
</div>
|
||||
</div>
|
||||
{{/if}}
|
||||
</div>
|
||||
</div>
|
||||
<div class="search_options">
|
||||
<select name="safe_search_levels" {{#if (gte safeSearchLevel 3)}} disabled {{/if}}>
|
||||
<option value=0 {{#if (eq safeSearchLevel 0)}} selected {{/if}}>SafeSearch: None</option>
|
||||
<option value=1 {{#if (eq safeSearchLevel 1)}} selected {{/if}}>SafeSearch: Low</option>
|
||||
<option value=2 {{#if (eq safeSearchLevel 2)}} selected {{/if}}>SafeSearch: Moderate</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
|
@ -1,22 +0,0 @@
|
|||
{{>header this}}
|
||||
<main class="settings" >
|
||||
<h1>Settings</h1>
|
||||
<hr />
|
||||
<div class="settings_container">
|
||||
<div class="sidebar">
|
||||
<div class="btn active" onclick="setActiveTab(this)">general</div>
|
||||
<div class="btn" onclick="setActiveTab(this)">user interface</div>
|
||||
<div class="btn" onclick="setActiveTab(this)">engines</div>
|
||||
<div class="btn" onclick="setActiveTab(this)">cookies</div>
|
||||
</div>
|
||||
<div class="main_container">
|
||||
{{> general_tab}} {{> user_interface_tab}} {{> engines_tab}} {{>
|
||||
cookies_tab}}
|
||||
<p class="message"></p>
|
||||
<button type="submit" onclick="setClientSettings()">Save</button>
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
<script src="static/settings.js"></script>
|
||||
<script src="static/cookies.js"></script>
|
||||
{{>footer}}
|
|
@ -1,28 +0,0 @@
|
|||
<div class="user_interface tab">
|
||||
<h1>User Interface</h1>
|
||||
<h3>select theme</h3>
|
||||
<p class="description">
|
||||
Select the theme from the available themes to be used in user interface
|
||||
</p>
|
||||
<select name="themes">
|
||||
<option value="simple">simple</option>
|
||||
</select>
|
||||
<h3>select color scheme</h3>
|
||||
<p class="description">
|
||||
Select the color scheme for your theme to be used in user interface
|
||||
</p>
|
||||
<select name="colorschemes">
|
||||
<option value="catppuccin-mocha">catppuccin mocha</option>
|
||||
<option value="dark-chocolate">dark chocolate</option>
|
||||
<option value="dracula">dracula</option>
|
||||
<option value="gruvbox-dark">gruvbox dark</option>
|
||||
<option value="monokai">monokai</option>
|
||||
<option value="nord">nord</option>
|
||||
<option value="oceanic-next">oceanic next</option>
|
||||
<option value="one-dark">one dark</option>
|
||||
<option value="solarized-dark">solarized dark</option>
|
||||
<option value="solarized-light">solarized light</option>
|
||||
<option value="tokyo-night">tokyo night</option>
|
||||
<option value="tomorrow-night">tomorrow night</option>
|
||||
</select>
|
||||
</div>
|
|
@ -2,10 +2,11 @@
|
|||
//!
|
||||
//! This module contains the main function which handles the logging of the application to the
|
||||
//! stdout and handles the command line arguments provided and launches the `websurfx` server.
|
||||
|
||||
#[cfg(not(feature = "dhat-heap"))]
|
||||
use mimalloc::MiMalloc;
|
||||
use std::net::TcpListener;
|
||||
use websurfx::{cache::cacher::Cache, config::parser::Config, run};
|
||||
|
||||
use std::{net::TcpListener, sync::OnceLock};
|
||||
use websurfx::{cache::cacher::create_cache, config::parser::Config, run};
|
||||
|
||||
/// A dhat heap memory profiler
|
||||
#[cfg(feature = "dhat-heap")]
|
||||
|
@ -16,6 +17,9 @@ static ALLOC: dhat::Alloc = dhat::Alloc;
|
|||
#[global_allocator]
|
||||
static GLOBAL: MiMalloc = MiMalloc;
|
||||
|
||||
/// A static constant for holding the parsed config.
|
||||
static CONFIG: OnceLock<Config> = OnceLock::new();
|
||||
|
||||
/// The function that launches the main server and registers all the routes of the website.
|
||||
///
|
||||
/// # Error
|
||||
|
@ -28,10 +32,10 @@ async fn main() -> std::io::Result<()> {
|
|||
#[cfg(feature = "dhat-heap")]
|
||||
let _profiler = dhat::Profiler::new_heap();
|
||||
|
||||
// Initialize the parsed config file.
|
||||
let config = Config::parse(false).unwrap();
|
||||
// Initialize the parsed config globally.
|
||||
let config = CONFIG.get_or_init(|| Config::parse(false).unwrap());
|
||||
|
||||
let cache = Cache::build(&config).await;
|
||||
let cache = create_cache(config).await;
|
||||
|
||||
log::info!(
|
||||
"started server on port {} and IP {}",
|
||||
|
@ -44,7 +48,7 @@ async fn main() -> std::io::Result<()> {
|
|||
config.port,
|
||||
);
|
||||
|
||||
let listener = TcpListener::bind((config.binding_ip.clone(), config.port))?;
|
||||
let listener = TcpListener::bind((config.binding_ip.as_str(), config.port))?;
|
||||
|
||||
run(listener, config, cache)?.await
|
||||
}
|
||||
|
|
658
src/cache/cacher.rs
vendored
|
@ -2,8 +2,10 @@
|
|||
//! from the upstream search engines in a json format.
|
||||
|
||||
use error_stack::Report;
|
||||
use futures::future::join_all;
|
||||
#[cfg(feature = "memory-cache")]
|
||||
use mini_moka::sync::Cache as MokaCache;
|
||||
use moka::future::Cache as MokaCache;
|
||||
|
||||
#[cfg(feature = "memory-cache")]
|
||||
use std::time::Duration;
|
||||
use tokio::sync::Mutex;
|
||||
|
@ -14,24 +16,13 @@ use super::error::CacheError;
|
|||
#[cfg(feature = "redis-cache")]
|
||||
use super::redis_cacher::RedisCache;
|
||||
|
||||
/// Different implementations for caching, currently it is possible to cache in-memory or in Redis.
|
||||
#[derive(Clone)]
|
||||
pub enum Cache {
|
||||
/// Caching is disabled
|
||||
Disabled,
|
||||
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||
/// Encapsulates the Redis based cache
|
||||
Redis(RedisCache),
|
||||
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||
/// Contains the in-memory cache.
|
||||
InMemory(MokaCache<String, SearchResults>),
|
||||
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
||||
/// Contains both the in-memory cache and Redis based cache
|
||||
Hybrid(RedisCache, MokaCache<String, SearchResults>),
|
||||
}
|
||||
#[cfg(any(feature = "encrypt-cache-results", feature = "cec-cache-results"))]
|
||||
use super::encryption::*;
|
||||
|
||||
impl Cache {
|
||||
/// A function that builds the cache from the given configuration.
|
||||
/// Abstraction trait for common methods provided by a cache backend.
|
||||
#[async_trait::async_trait]
|
||||
pub trait Cacher: Send + Sync {
|
||||
// A function that builds the cache from the given configuration.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
|
@ -39,89 +30,10 @@ impl Cache {
|
|||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// It returns a newly initialized variant based on the feature enabled by the user.
|
||||
pub async fn build(_config: &Config) -> Self {
|
||||
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
||||
{
|
||||
log::info!("Using a hybrid cache");
|
||||
Cache::new_hybrid(
|
||||
RedisCache::new(&_config.redis_url, 5)
|
||||
.await
|
||||
.expect("Redis cache configured"),
|
||||
)
|
||||
}
|
||||
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||
{
|
||||
log::info!("Listening redis server on {}", &_config.redis_url);
|
||||
Cache::new(
|
||||
RedisCache::new(&_config.redis_url, 5)
|
||||
.await
|
||||
.expect("Redis cache configured"),
|
||||
)
|
||||
}
|
||||
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||
{
|
||||
log::info!("Using an in-memory cache");
|
||||
Cache::new_in_memory()
|
||||
}
|
||||
#[cfg(not(any(feature = "memory-cache", feature = "redis-cache")))]
|
||||
{
|
||||
log::info!("Caching is disabled");
|
||||
Cache::Disabled
|
||||
}
|
||||
}
|
||||
|
||||
/// A function that initializes a new connection pool struct.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `redis_cache` - It takes the newly initialized connection pool struct as an argument.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// It returns a `Redis` variant with the newly initialized connection pool struct.
|
||||
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||
pub fn new(redis_cache: RedisCache) -> Self {
|
||||
Cache::Redis(redis_cache)
|
||||
}
|
||||
|
||||
/// A function that initializes the `in memory` cache which is used to cache the results in
|
||||
/// memory with the search engine thus improving performance by making retrieval and caching of
|
||||
/// results faster.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// It returns a `InMemory` variant with the newly initialized in memory cache type.
|
||||
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||
pub fn new_in_memory() -> Self {
|
||||
let cache = MokaCache::builder()
|
||||
.max_capacity(1000)
|
||||
.time_to_live(Duration::from_secs(60))
|
||||
.build();
|
||||
Cache::InMemory(cache)
|
||||
}
|
||||
|
||||
/// A function that initializes both in memory cache and redis client connection for being used
|
||||
/// for managing hybrid cache which increases resiliancy of the search engine by allowing the
|
||||
/// cache to switch to `in memory` caching if the `redis` cache server is temporarily
|
||||
/// unavailable.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `redis_cache` - It takes `redis` client connection struct as an argument.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// It returns a tuple variant `Hybrid` storing both the in-memory cache type and the `redis`
|
||||
/// client connection struct.
|
||||
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
||||
pub fn new_hybrid(redis_cache: RedisCache) -> Self {
|
||||
let cache = MokaCache::builder()
|
||||
.max_capacity(1000)
|
||||
.time_to_live(Duration::from_secs(60))
|
||||
.build();
|
||||
Cache::Hybrid(redis_cache, cache)
|
||||
}
|
||||
/// It returns a newly initialized backend based on the feature enabled by the user.
|
||||
async fn build(config: &Config) -> Self
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
/// A function which fetches the cached json results as json string.
|
||||
///
|
||||
|
@ -133,31 +45,7 @@ impl Cache {
|
|||
///
|
||||
/// Returns the `SearchResults` from the cache if the program executes normally otherwise
|
||||
/// returns a `CacheError` if the results cannot be retrieved from the cache.
|
||||
pub async fn cached_json(&mut self, _url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||
match self {
|
||||
Cache::Disabled => Err(Report::new(CacheError::MissingValue)),
|
||||
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||
Cache::Redis(redis_cache) => {
|
||||
let json = redis_cache.cached_json(_url).await?;
|
||||
Ok(serde_json::from_str::<SearchResults>(&json)
|
||||
.map_err(|_| CacheError::SerializationError)?)
|
||||
}
|
||||
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||
Cache::InMemory(in_memory) => match in_memory.get(&_url.to_string()) {
|
||||
Some(res) => Ok(res),
|
||||
None => Err(Report::new(CacheError::MissingValue)),
|
||||
},
|
||||
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
||||
Cache::Hybrid(redis_cache, in_memory) => match redis_cache.cached_json(_url).await {
|
||||
Ok(res) => Ok(serde_json::from_str::<SearchResults>(&res)
|
||||
.map_err(|_| CacheError::SerializationError)?),
|
||||
Err(_) => match in_memory.get(&_url.to_string()) {
|
||||
Some(res) => Ok(res),
|
||||
None => Err(Report::new(CacheError::MissingValue)),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
async fn cached_results(&mut self, url: &str) -> Result<SearchResults, Report<CacheError>>;
|
||||
|
||||
/// A function which caches the results by using the `url` as the key and
|
||||
/// `json results` as the value and stores it in the cache
|
||||
|
@ -172,44 +60,469 @@ impl Cache {
|
|||
/// Returns a unit type if the program caches the given search results without a failure
|
||||
/// otherwise it returns a `CacheError` if the search results cannot be cached due to a
|
||||
/// failure.
|
||||
pub async fn cache_results(
|
||||
async fn cache_results(
|
||||
&mut self,
|
||||
_search_results: &SearchResults,
|
||||
_url: &str,
|
||||
) -> Result<(), Report<CacheError>> {
|
||||
match self {
|
||||
Cache::Disabled => Ok(()),
|
||||
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||
Cache::Redis(redis_cache) => {
|
||||
let json = serde_json::to_string(_search_results)
|
||||
.map_err(|_| CacheError::SerializationError)?;
|
||||
redis_cache.cache_results(&json, _url).await
|
||||
search_results: &[SearchResults],
|
||||
urls: &[String],
|
||||
) -> Result<(), Report<CacheError>>;
|
||||
|
||||
/// A helper function which computes the hash of the url and formats and returns it as string.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `url` - It takes an url as string.
|
||||
fn hash_url(&self, url: &str) -> String {
|
||||
blake3::hash(url.as_bytes()).to_string()
|
||||
}
|
||||
|
||||
/// A helper function that returns either encrypted or decrypted results.
|
||||
/// Feature flags (**encrypt-cache-results or cec-cache-results**) are required for this to work.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `bytes` - It takes a slice of bytes as an argument.
|
||||
/// * `encrypt` - A boolean to choose whether to encrypt or decrypt the bytes
|
||||
|
||||
///
|
||||
/// # Error
|
||||
/// Returns either encrypted or decrypted bytes on success otherwise it returns a CacheError
|
||||
/// on failure.
|
||||
#[cfg(any(
|
||||
// feature = "compress-cache-results",
|
||||
feature = "encrypt-cache-results",
|
||||
feature = "cec-cache-results"
|
||||
))]
|
||||
async fn encrypt_or_decrypt_results(
|
||||
&mut self,
|
||||
mut bytes: Vec<u8>,
|
||||
encrypt: bool,
|
||||
) -> Result<Vec<u8>, Report<CacheError>> {
|
||||
use chacha20poly1305::{
|
||||
aead::{Aead, AeadCore, KeyInit, OsRng},
|
||||
ChaCha20Poly1305,
|
||||
};
|
||||
|
||||
let cipher = CIPHER.get_or_init(|| {
|
||||
let key = ChaCha20Poly1305::generate_key(&mut OsRng);
|
||||
ChaCha20Poly1305::new(&key)
|
||||
});
|
||||
|
||||
let encryption_key = ENCRYPTION_KEY.get_or_init(
|
||||
|| ChaCha20Poly1305::generate_nonce(&mut OsRng), // 96-bits; unique per message
|
||||
);
|
||||
|
||||
bytes = if encrypt {
|
||||
cipher
|
||||
.encrypt(encryption_key, bytes.as_ref())
|
||||
.map_err(|_| CacheError::EncryptionError)?
|
||||
} else {
|
||||
cipher
|
||||
.decrypt(encryption_key, bytes.as_ref())
|
||||
.map_err(|_| CacheError::EncryptionError)?
|
||||
};
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
/// A helper function that returns compressed results.
|
||||
/// Feature flags (**compress-cache-results or cec-cache-results**) are required for this to work.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `bytes` - It takes a slice of bytes as an argument.
|
||||
|
||||
///
|
||||
/// # Error
|
||||
/// Returns the compressed bytes on success otherwise it returns a CacheError
|
||||
/// on failure.
|
||||
#[cfg(any(feature = "compress-cache-results", feature = "cec-cache-results"))]
|
||||
async fn compress_results(
|
||||
&mut self,
|
||||
mut bytes: Vec<u8>,
|
||||
) -> Result<Vec<u8>, Report<CacheError>> {
|
||||
use tokio::io::AsyncWriteExt;
|
||||
let mut writer = async_compression::tokio::write::BrotliEncoder::new(Vec::new());
|
||||
writer
|
||||
.write_all(&bytes)
|
||||
.await
|
||||
.map_err(|_| CacheError::CompressionError)?;
|
||||
writer
|
||||
.shutdown()
|
||||
.await
|
||||
.map_err(|_| CacheError::CompressionError)?;
|
||||
bytes = writer.into_inner();
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
/// A helper function that returns compressed-encrypted results.
|
||||
/// Feature flag (**cec-cache-results**) is required for this to work.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `bytes` - It takes a slice of bytes as an argument.
|
||||
|
||||
///
|
||||
/// # Error
|
||||
/// Returns the compressed and encrypted bytes on success otherwise it returns a CacheError
|
||||
/// on failure.
|
||||
#[cfg(feature = "cec-cache-results")]
|
||||
async fn compress_encrypt_compress_results(
|
||||
&mut self,
|
||||
mut bytes: Vec<u8>,
|
||||
) -> Result<Vec<u8>, Report<CacheError>> {
|
||||
// compress first
|
||||
bytes = self.compress_results(bytes).await?;
|
||||
// encrypt
|
||||
bytes = self.encrypt_or_decrypt_results(bytes, true).await?;
|
||||
|
||||
// compress again;
|
||||
bytes = self.compress_results(bytes).await?;
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
/// A helper function that returns compressed results.
|
||||
/// Feature flags (**compress-cache-results or cec-cache-results**) are required for this to work.
|
||||
/// If bytes where
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `bytes` - It takes a slice of bytes as an argument.
|
||||
|
||||
///
|
||||
/// # Error
|
||||
/// Returns the uncompressed bytes on success otherwise it returns a CacheError
|
||||
/// on failure.
|
||||
|
||||
#[cfg(any(feature = "compress-cache-results", feature = "cec-cache-results"))]
|
||||
async fn decompress_results(&mut self, bytes: &[u8]) -> Result<Vec<u8>, Report<CacheError>> {
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(feature = "compress-cache-results")]
|
||||
{
|
||||
decompress_util(bytes).await
|
||||
|
||||
}
|
||||
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||
Cache::InMemory(cache) => {
|
||||
cache.insert(_url.to_string(), _search_results.clone());
|
||||
Ok(())
|
||||
}
|
||||
#[cfg(all(feature = "memory-cache", feature = "redis-cache"))]
|
||||
Cache::Hybrid(redis_cache, cache) => {
|
||||
let json = serde_json::to_string(_search_results)
|
||||
.map_err(|_| CacheError::SerializationError)?;
|
||||
match redis_cache.cache_results(&json, _url).await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(_) => {
|
||||
cache.insert(_url.to_string(), _search_results.clone());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
else if #[cfg(feature = "cec-cache-results")]
|
||||
{
|
||||
let decompressed = decompress_util(bytes)?;
|
||||
let decrypted = self.encrypt_or_decrypt_results(decompressed, false)?;
|
||||
|
||||
decompress_util(&decrypted).await
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A helper function that compresses or encrypts search results before they're inserted into a cache store
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `search_results` - A reference to the search_Results to process.
|
||||
///
|
||||
///
|
||||
/// # Error
|
||||
/// Returns a Vec of compressed or encrypted bytes on success otherwise it returns a CacheError
|
||||
/// on failure.
|
||||
async fn pre_process_search_results(
|
||||
&mut self,
|
||||
search_results: &SearchResults,
|
||||
) -> Result<Vec<u8>, Report<CacheError>> {
|
||||
#[allow(unused_mut)] // needs to be mutable when any of the features is enabled
|
||||
let mut bytes: Vec<u8> = search_results.try_into()?;
|
||||
#[cfg(feature = "compress-cache-results")]
|
||||
{
|
||||
let compressed = self.compress_results(bytes).await?;
|
||||
bytes = compressed;
|
||||
}
|
||||
|
||||
#[cfg(feature = "encrypt-cache-results")]
|
||||
{
|
||||
let encrypted = self.encrypt_or_decrypt_results(bytes, true).await?;
|
||||
bytes = encrypted;
|
||||
}
|
||||
|
||||
#[cfg(feature = "cec-cache-results")]
|
||||
{
|
||||
let compressed_encrypted_compressed =
|
||||
self.compress_encrypt_compress_results(bytes).await?;
|
||||
bytes = compressed_encrypted_compressed;
|
||||
}
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
/// A helper function that decompresses or decrypts search results after they're fetched from the cache-store
|
||||
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `bytes` - A Vec of bytes stores in the cache.
|
||||
///
|
||||
|
||||
///
|
||||
/// # Error
|
||||
/// Returns the SearchResults struct on success otherwise it returns a CacheError
|
||||
/// on failure.
|
||||
|
||||
#[allow(unused_mut)] // needs to be mutable when any of the features is enabled
|
||||
async fn post_process_search_results(
|
||||
&mut self,
|
||||
mut bytes: Vec<u8>,
|
||||
) -> Result<SearchResults, Report<CacheError>> {
|
||||
#[cfg(feature = "compress-cache-results")]
|
||||
{
|
||||
let decompressed = self.decompress_results(&bytes).await?;
|
||||
bytes = decompressed
|
||||
}
|
||||
|
||||
#[cfg(feature = "encrypt-cache-results")]
|
||||
{
|
||||
let decrypted = self.encrypt_or_decrypt_results(bytes, false).await?;
|
||||
bytes = decrypted
|
||||
}
|
||||
|
||||
#[cfg(feature = "cec-cache-results")]
|
||||
{
|
||||
let decompressed_decrypted = self.decompress_results(&bytes).await?;
|
||||
bytes = decompressed_decrypted;
|
||||
}
|
||||
|
||||
Ok(bytes.try_into()?)
|
||||
}
|
||||
}
|
||||
|
||||
/// A helper function that returns compressed results.
|
||||
/// Feature flags (**compress-cache-results or cec-cache-results**) are required for this to work.
|
||||
/// If bytes where
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `bytes` - It takes a slice of bytes as an argument.
|
||||
|
||||
///
|
||||
/// # Error
|
||||
/// Returns the uncompressed bytes on success otherwise it returns a CacheError
|
||||
/// on failure.
|
||||
|
||||
#[cfg(any(feature = "compress-cache-results", feature = "cec-cache-results"))]
|
||||
async fn decompress_util(input: &[u8]) -> Result<Vec<u8>, Report<CacheError>> {
|
||||
use tokio::io::AsyncWriteExt;
|
||||
let mut writer = async_compression::tokio::write::BrotliDecoder::new(Vec::new());
|
||||
|
||||
writer
|
||||
.write_all(input)
|
||||
.await
|
||||
.map_err(|_| CacheError::CompressionError)?;
|
||||
writer
|
||||
.shutdown()
|
||||
.await
|
||||
.map_err(|_| CacheError::CompressionError)?;
|
||||
let bytes = writer.into_inner();
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
#[cfg(feature = "redis-cache")]
|
||||
#[async_trait::async_trait]
|
||||
impl Cacher for RedisCache {
|
||||
async fn build(config: &Config) -> Self {
|
||||
log::info!(
|
||||
"Initialising redis cache. Listening to {}",
|
||||
&config.redis_url
|
||||
);
|
||||
RedisCache::new(&config.redis_url, 5, config.cache_expiry_time)
|
||||
.await
|
||||
.expect("Redis cache configured")
|
||||
}
|
||||
|
||||
async fn cached_results(&mut self, url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||
use base64::Engine;
|
||||
let hashed_url_string: &str = &self.hash_url(url);
|
||||
let base64_string = self.cached_json(hashed_url_string).await?;
|
||||
|
||||
let bytes = base64::engine::general_purpose::STANDARD_NO_PAD
|
||||
.decode(base64_string)
|
||||
.map_err(|_| CacheError::Base64DecodingOrEncodingError)?;
|
||||
self.post_process_search_results(bytes).await
|
||||
}
|
||||
|
||||
async fn cache_results(
|
||||
&mut self,
|
||||
search_results: &[SearchResults],
|
||||
urls: &[String],
|
||||
) -> Result<(), Report<CacheError>> {
|
||||
use base64::Engine;
|
||||
|
||||
// size of search_results is expected to be equal to size of urls -> key/value pairs for cache;
|
||||
let search_results_len = search_results.len();
|
||||
|
||||
let mut bytes = Vec::with_capacity(search_results_len);
|
||||
|
||||
for result in search_results {
|
||||
let processed = self.pre_process_search_results(result).await?;
|
||||
bytes.push(processed);
|
||||
}
|
||||
|
||||
let base64_strings = bytes
|
||||
.iter()
|
||||
.map(|bytes_vec| base64::engine::general_purpose::STANDARD_NO_PAD.encode(bytes_vec));
|
||||
|
||||
let mut hashed_url_strings = Vec::with_capacity(search_results_len);
|
||||
|
||||
for url in urls {
|
||||
let hash = self.hash_url(url);
|
||||
hashed_url_strings.push(hash);
|
||||
}
|
||||
self.cache_json(base64_strings, hashed_url_strings.into_iter())
|
||||
.await
|
||||
}
|
||||
}
|
||||
/// TryInto implementation for SearchResults from Vec<u8>
|
||||
use std::{convert::TryInto, sync::Arc};
|
||||
|
||||
impl TryInto<SearchResults> for Vec<u8> {
|
||||
type Error = CacheError;
|
||||
|
||||
fn try_into(self) -> Result<SearchResults, Self::Error> {
|
||||
bincode::deserialize_from(self.as_slice()).map_err(|_| CacheError::SerializationError)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryInto<Vec<u8>> for &SearchResults {
|
||||
type Error = CacheError;
|
||||
|
||||
fn try_into(self) -> Result<Vec<u8>, Self::Error> {
|
||||
bincode::serialize(self).map_err(|_| CacheError::SerializationError)
|
||||
}
|
||||
}
|
||||
|
||||
/// Memory based cache backend.
|
||||
#[cfg(feature = "memory-cache")]
|
||||
pub struct InMemoryCache {
|
||||
/// The backend cache which stores data.
|
||||
cache: Arc<MokaCache<String, Vec<u8>>>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "memory-cache")]
|
||||
impl Clone for InMemoryCache {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
cache: self.cache.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "memory-cache")]
|
||||
#[async_trait::async_trait]
|
||||
impl Cacher for InMemoryCache {
|
||||
async fn build(config: &Config) -> Self {
|
||||
log::info!("Initialising in-memory cache");
|
||||
|
||||
InMemoryCache {
|
||||
cache: Arc::new(
|
||||
MokaCache::builder()
|
||||
.time_to_live(Duration::from_secs(config.cache_expiry_time.into()))
|
||||
.build(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
async fn cached_results(&mut self, url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||
let hashed_url_string = self.hash_url(url);
|
||||
match self.cache.get(&hashed_url_string).await {
|
||||
Some(res) => self.post_process_search_results(res).await,
|
||||
None => Err(Report::new(CacheError::MissingValue)),
|
||||
}
|
||||
}
|
||||
|
||||
async fn cache_results(
|
||||
&mut self,
|
||||
search_results: &[SearchResults],
|
||||
urls: &[String],
|
||||
) -> Result<(), Report<CacheError>> {
|
||||
let mut tasks: Vec<_> = Vec::with_capacity(urls.len());
|
||||
for (url, search_result) in urls.iter().zip(search_results.iter()) {
|
||||
let hashed_url_string = self.hash_url(url);
|
||||
let bytes = self.pre_process_search_results(search_result).await?;
|
||||
let new_self = self.clone();
|
||||
tasks.push(tokio::spawn(async move {
|
||||
new_self.cache.insert(hashed_url_string, bytes).await
|
||||
}));
|
||||
}
|
||||
|
||||
join_all(tasks).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Cache backend which utilises both memory and redis based caches.
|
||||
///
|
||||
/// The hybrid cache system uses both the types of cache to ensure maximum availability.
|
||||
/// The set method sets the key, value pair in both the caches. Therefore in a case where redis
|
||||
/// cache becomes unavailable, the backend will retreive the value from in-memory cache.
|
||||
#[cfg(all(feature = "memory-cache", feature = "redis-cache"))]
|
||||
pub struct HybridCache {
|
||||
/// The in-memory backend cache which stores data.
|
||||
memory_cache: InMemoryCache,
|
||||
/// The redis backend cache which stores data.
|
||||
redis_cache: RedisCache,
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "memory-cache", feature = "redis-cache"))]
|
||||
#[async_trait::async_trait]
|
||||
impl Cacher for HybridCache {
|
||||
async fn build(config: &Config) -> Self {
|
||||
log::info!("Initialising hybrid cache");
|
||||
HybridCache {
|
||||
memory_cache: InMemoryCache::build(config).await,
|
||||
redis_cache: RedisCache::build(config).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn cached_results(&mut self, url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||
match self.redis_cache.cached_results(url).await {
|
||||
Ok(res) => Ok(res),
|
||||
Err(_) => self.memory_cache.cached_results(url).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn cache_results(
|
||||
&mut self,
|
||||
search_results: &[SearchResults],
|
||||
urls: &[String],
|
||||
) -> Result<(), Report<CacheError>> {
|
||||
self.redis_cache.cache_results(search_results, urls).await?;
|
||||
self.memory_cache
|
||||
.cache_results(search_results, urls)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Dummy cache backend
|
||||
pub struct DisabledCache;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Cacher for DisabledCache {
|
||||
async fn build(_config: &Config) -> Self {
|
||||
log::info!("Caching is disabled");
|
||||
DisabledCache
|
||||
}
|
||||
|
||||
async fn cached_results(&mut self, _url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||
Err(Report::new(CacheError::MissingValue))
|
||||
}
|
||||
|
||||
async fn cache_results(
|
||||
&mut self,
|
||||
_search_results: &[SearchResults],
|
||||
_urls: &[String],
|
||||
) -> Result<(), Report<CacheError>> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// A structure to efficiently share the cache between threads - as it is protected by a Mutex.
|
||||
pub struct SharedCache {
|
||||
/// The internal cache protected from concurrent access by a mutex
|
||||
cache: Mutex<Cache>,
|
||||
cache: Mutex<Box<dyn Cacher>>,
|
||||
}
|
||||
|
||||
impl SharedCache {
|
||||
|
@ -220,9 +533,9 @@ impl SharedCache {
|
|||
/// * `cache` - It takes the `Cache` enum variant as an argument with the prefered cache type.
|
||||
///
|
||||
/// Returns a newly constructed `SharedCache` struct.
|
||||
pub fn new(cache: Cache) -> Self {
|
||||
pub fn new(cache: impl Cacher + 'static) -> Self {
|
||||
Self {
|
||||
cache: Mutex::new(cache),
|
||||
cache: Mutex::new(Box::new(cache)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -231,15 +544,15 @@ impl SharedCache {
|
|||
/// # Arguments
|
||||
///
|
||||
/// * `url` - It takes the search url as an argument which will be used as the key to fetch the
|
||||
/// cached results from the cache.
|
||||
/// cached results from the cache.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// Returns a `SearchResults` struct containing the search results from the cache if nothing
|
||||
/// goes wrong otherwise returns a `CacheError`.
|
||||
pub async fn cached_json(&self, url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||
pub async fn cached_results(&self, url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||
let mut mut_cache = self.cache.lock().await;
|
||||
mut_cache.cached_json(url).await
|
||||
mut_cache.cached_results(url).await
|
||||
}
|
||||
|
||||
/// A setter function which caches the results by using the `url` as the key and
|
||||
|
@ -248,9 +561,9 @@ impl SharedCache {
|
|||
/// # Arguments
|
||||
///
|
||||
/// * `search_results` - It takes the `SearchResults` as an argument which are results that
|
||||
/// needs to be cached.
|
||||
/// needs to be cached.
|
||||
/// * `url` - It takes the search url as an argument which will be used as the key for storing
|
||||
/// results in the cache.
|
||||
/// results in the cache.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
|
@ -258,10 +571,27 @@ impl SharedCache {
|
|||
/// on a failure.
|
||||
pub async fn cache_results(
|
||||
&self,
|
||||
search_results: &SearchResults,
|
||||
url: &str,
|
||||
search_results: &[SearchResults],
|
||||
urls: &[String],
|
||||
) -> Result<(), Report<CacheError>> {
|
||||
let mut mut_cache = self.cache.lock().await;
|
||||
mut_cache.cache_results(search_results, url).await
|
||||
mut_cache.cache_results(search_results, urls).await
|
||||
}
|
||||
}
|
||||
|
||||
/// A function to initialise the cache backend.
|
||||
pub async fn create_cache(config: &Config) -> impl Cacher {
|
||||
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
||||
return HybridCache::build(config).await;
|
||||
|
||||
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||
return InMemoryCache::build(config).await;
|
||||
|
||||
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||
return RedisCache::build(config).await;
|
||||
|
||||
#[cfg(not(any(feature = "memory-cache", feature = "redis-cache")))]
|
||||
return DisabledCache::build(config).await;
|
||||
}
|
||||
|
||||
//#[cfg(feature = "Compress-cache-results")]
|
||||
|
|
25
src/cache/encryption.rs
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
use chacha20poly1305::{
|
||||
consts::{B0, B1},
|
||||
ChaChaPoly1305,
|
||||
};
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use chacha20::{
|
||||
cipher::{
|
||||
generic_array::GenericArray,
|
||||
typenum::{UInt, UTerm},
|
||||
StreamCipherCoreWrapper,
|
||||
},
|
||||
ChaChaCore,
|
||||
};
|
||||
|
||||
/// The ChaCha20 core wrapped in a stream cipher for use in ChaCha20-Poly1305 authenticated encryption.
|
||||
type StreamCipherCoreWrapperType =
|
||||
StreamCipherCoreWrapper<ChaChaCore<UInt<UInt<UInt<UInt<UTerm, B1>, B0>, B1>, B0>>>;
|
||||
/// Our ChaCha20-Poly1305 cipher instance, lazily initialized.
|
||||
pub static CIPHER: OnceLock<ChaChaPoly1305<StreamCipherCoreWrapperType>> = OnceLock::new();
|
||||
|
||||
/// The type alias for our encryption key, a 32-byte array.
|
||||
type GenericArrayType = GenericArray<u8, UInt<UInt<UInt<UInt<UTerm, B1>, B1>, B0>, B0>>;
|
||||
/// Our encryption key, lazily initialized.
|
||||
pub static ENCRYPTION_KEY: OnceLock<GenericArrayType> = OnceLock::new();
|
18
src/cache/error.rs
vendored
|
@ -18,6 +18,12 @@ pub enum CacheError {
|
|||
SerializationError,
|
||||
/// Returned when the value is missing.
|
||||
MissingValue,
|
||||
/// whenever encryption or decryption of cache results fails
|
||||
EncryptionError,
|
||||
/// Whenever compression of the cache results fails
|
||||
CompressionError,
|
||||
/// Whenever base64 decoding failed
|
||||
Base64DecodingOrEncodingError,
|
||||
}
|
||||
|
||||
impl fmt::Display for CacheError {
|
||||
|
@ -43,6 +49,18 @@ impl fmt::Display for CacheError {
|
|||
CacheError::SerializationError => {
|
||||
write!(f, "Unable to serialize, deserialize from the cache")
|
||||
}
|
||||
|
||||
CacheError::EncryptionError => {
|
||||
write!(f, "Failed to encrypt or decrypt cache-results")
|
||||
}
|
||||
|
||||
CacheError::CompressionError => {
|
||||
write!(f, "failed to compress or uncompress cache results")
|
||||
}
|
||||
|
||||
CacheError::Base64DecodingOrEncodingError => {
|
||||
write!(f, "base64 encoding or decoding failed")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
6
src/cache/mod.rs
vendored
|
@ -1,7 +1,11 @@
|
|||
//! This module provides the modules which provide the functionality to cache the aggregated
|
||||
//! results fetched and aggregated from the upstream search engines in a json format.
|
||||
|
||||
pub mod cacher;
|
||||
|
||||
#[cfg(any(feature = "encrypt-cache-results", feature = "cec-cache-results"))]
|
||||
/// encryption module contains encryption utils such the cipher and key
|
||||
pub mod encryption;
|
||||
pub mod error;
|
||||
|
||||
#[cfg(feature = "redis-cache")]
|
||||
pub mod redis_cacher;
|
||||
|
|
98
src/cache/redis_cacher.rs
vendored
|
@ -1,24 +1,31 @@
|
|||
//! This module provides the functionality to cache the aggregated results fetched and aggregated
|
||||
//! from the upstream search engines in a json format.
|
||||
|
||||
use error_stack::Report;
|
||||
use futures::future::try_join_all;
|
||||
use md5::compute;
|
||||
use redis::{aio::ConnectionManager, AsyncCommands, Client, RedisError};
|
||||
|
||||
use super::error::CacheError;
|
||||
use error_stack::Report;
|
||||
use futures::stream::FuturesUnordered;
|
||||
use redis::{
|
||||
aio::ConnectionManager, AsyncCommands, Client, ExistenceCheck, RedisError, SetExpiry,
|
||||
SetOptions,
|
||||
};
|
||||
|
||||
/// A constant holding the redis pipeline size.
|
||||
const REDIS_PIPELINE_SIZE: usize = 3;
|
||||
|
||||
/// A named struct which stores the redis Connection url address to which the client will
|
||||
/// connect to.
|
||||
#[derive(Clone)]
|
||||
pub struct RedisCache {
|
||||
/// It stores a pool of connections ready to be used.
|
||||
connection_pool: Vec<ConnectionManager>,
|
||||
connection_pool: Box<[ConnectionManager]>,
|
||||
/// It stores the size of the connection pool (in other words the number of
|
||||
/// connections that should be stored in the pool).
|
||||
pool_size: u8,
|
||||
/// It stores the index of which connection is being used at the moment.
|
||||
current_connection: u8,
|
||||
/// It stores the max TTL for keys.
|
||||
cache_ttl: u16,
|
||||
/// It stores the redis pipeline struct of size 3.
|
||||
pipeline: redis::Pipeline,
|
||||
}
|
||||
|
||||
impl RedisCache {
|
||||
|
@ -29,6 +36,8 @@ impl RedisCache {
|
|||
/// * `redis_connection_url` - It takes the redis Connection url address.
|
||||
/// * `pool_size` - It takes the size of the connection pool (in other words the number of
|
||||
/// connections that should be stored in the pool).
|
||||
/// * `cache_ttl` - It takes the the time to live for cached results to live in the redis
|
||||
/// server.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
|
@ -37,48 +46,50 @@ impl RedisCache {
|
|||
pub async fn new(
|
||||
redis_connection_url: &str,
|
||||
pool_size: u8,
|
||||
cache_ttl: u16,
|
||||
) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let client = Client::open(redis_connection_url)?;
|
||||
let mut tasks: Vec<_> = Vec::new();
|
||||
let tasks: FuturesUnordered<_> = FuturesUnordered::new();
|
||||
|
||||
for _ in 0..pool_size {
|
||||
tasks.push(client.get_tokio_connection_manager());
|
||||
let client_partially_cloned = client.clone();
|
||||
tasks.push(tokio::spawn(async move {
|
||||
client_partially_cloned.get_connection_manager().await
|
||||
}));
|
||||
}
|
||||
|
||||
let mut outputs = Vec::with_capacity(tasks.len());
|
||||
for task in tasks {
|
||||
outputs.push(task.await??);
|
||||
}
|
||||
|
||||
let redis_cache = RedisCache {
|
||||
connection_pool: try_join_all(tasks).await?,
|
||||
connection_pool: outputs.into_boxed_slice(),
|
||||
pool_size,
|
||||
current_connection: Default::default(),
|
||||
cache_ttl,
|
||||
pipeline: redis::Pipeline::with_capacity(REDIS_PIPELINE_SIZE),
|
||||
};
|
||||
|
||||
Ok(redis_cache)
|
||||
}
|
||||
|
||||
/// A helper function which computes the hash of the url and formats and returns it as string.
|
||||
/// A function which fetches the cached json as json string from the redis server.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `url` - It takes an url as string.
|
||||
fn hash_url(&self, url: &str) -> String {
|
||||
format!("{:?}", compute(url))
|
||||
}
|
||||
|
||||
/// A function which fetches the cached json results as json string from the redis server.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `url` - It takes an url as a string.
|
||||
/// * `key` - It takes a string as key.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// Returns the results as a String from the cache on success otherwise returns a `CacheError`
|
||||
/// Returns the json as a String from the cache on success otherwise returns a `CacheError`
|
||||
/// on a failure.
|
||||
pub async fn cached_json(&mut self, url: &str) -> Result<String, Report<CacheError>> {
|
||||
pub async fn cached_json(&mut self, key: &str) -> Result<String, Report<CacheError>> {
|
||||
self.current_connection = Default::default();
|
||||
let hashed_url_string: &str = &self.hash_url(url);
|
||||
|
||||
let mut result: Result<String, RedisError> = self.connection_pool
|
||||
[self.current_connection as usize]
|
||||
.get(hashed_url_string)
|
||||
.get(key)
|
||||
.await;
|
||||
|
||||
// Code to check whether the current connection being used is dropped with connection error
|
||||
|
@ -99,7 +110,7 @@ impl RedisCache {
|
|||
));
|
||||
}
|
||||
result = self.connection_pool[self.current_connection as usize]
|
||||
.get(hashed_url_string)
|
||||
.get(key)
|
||||
.await;
|
||||
continue;
|
||||
}
|
||||
|
@ -110,30 +121,40 @@ impl RedisCache {
|
|||
}
|
||||
}
|
||||
|
||||
/// A function which caches the results by using the hashed `url` as the key and
|
||||
/// A function which caches the json by using the key and
|
||||
/// `json results` as the value and stores it in redis server with ttl(time to live)
|
||||
/// set to 60 seconds.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `json_results` - It takes the json results string as an argument.
|
||||
/// * `url` - It takes the url as a String.
|
||||
/// * `key` - It takes the key as a String.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// Returns an unit type if the results are cached succesfully otherwise returns a `CacheError`
|
||||
/// on a failure.
|
||||
pub async fn cache_results(
|
||||
pub async fn cache_json(
|
||||
&mut self,
|
||||
json_results: &str,
|
||||
url: &str,
|
||||
json_results: impl Iterator<Item = String>,
|
||||
keys: impl Iterator<Item = String>,
|
||||
) -> Result<(), Report<CacheError>> {
|
||||
self.current_connection = Default::default();
|
||||
let hashed_url_string: &str = &self.hash_url(url);
|
||||
|
||||
let mut result: Result<(), RedisError> = self.connection_pool
|
||||
[self.current_connection as usize]
|
||||
.set_ex(hashed_url_string, json_results, 60)
|
||||
for (key, json_result) in keys.zip(json_results) {
|
||||
self.pipeline.set_options(
|
||||
key,
|
||||
json_result,
|
||||
SetOptions::default()
|
||||
.conditional_set(ExistenceCheck::NX)
|
||||
.get(true)
|
||||
.with_expiration(SetExpiry::EX(self.cache_ttl.into())),
|
||||
);
|
||||
}
|
||||
|
||||
let mut result: Result<(), RedisError> = self
|
||||
.pipeline
|
||||
.query_async(&mut self.connection_pool[self.current_connection as usize])
|
||||
.await;
|
||||
|
||||
// Code to check whether the current connection being used is dropped with connection error
|
||||
|
@ -153,8 +174,11 @@ impl RedisCache {
|
|||
CacheError::PoolExhaustionWithConnectionDropError,
|
||||
));
|
||||
}
|
||||
result = self.connection_pool[self.current_connection as usize]
|
||||
.set_ex(hashed_url_string, json_results, 60)
|
||||
result = self
|
||||
.pipeline
|
||||
.query_async(
|
||||
&mut self.connection_pool[self.current_connection as usize],
|
||||
)
|
||||
.await;
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
//! This module provides the functionality to parse the lua config and convert the config options
|
||||
//! into rust readable form.
|
||||
|
||||
use crate::handler::paths::{file_path, FileType};
|
||||
use crate::handler::{file_path, FileType};
|
||||
|
||||
use crate::models::parser_models::{AggregatorConfig, RateLimiter, Style};
|
||||
use log::LevelFilter;
|
||||
use mlua::Lua;
|
||||
use reqwest::Proxy;
|
||||
use std::{collections::HashMap, fs, thread::available_parallelism};
|
||||
|
||||
/// A named struct which stores the parsed config file options.
|
||||
#[derive(Clone)]
|
||||
pub struct Config {
|
||||
/// It stores the parsed port number option on which the server should launch.
|
||||
pub port: u16,
|
||||
|
@ -21,23 +21,40 @@ pub struct Config {
|
|||
/// It stores the redis connection url address on which the redis
|
||||
/// client should connect.
|
||||
pub redis_url: String,
|
||||
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
|
||||
/// It stores the max TTL for search results in cache.
|
||||
pub cache_expiry_time: u16,
|
||||
/// It stores the option to whether enable or disable production use.
|
||||
pub aggregator: AggregatorConfig,
|
||||
/// It stores the option to whether enable or disable logs.
|
||||
pub logging: bool,
|
||||
/// It stores the option to whether enable or disable debug mode.
|
||||
pub debug: bool,
|
||||
/// It toggles whether to use adaptive HTTP windows
|
||||
pub adaptive_window: bool,
|
||||
/// It stores all the engine names that were enabled by the user.
|
||||
pub upstream_search_engines: Vec<crate::models::engine_models::EngineHandler>,
|
||||
pub upstream_search_engines: HashMap<String, bool>,
|
||||
/// It stores the time (secs) which controls the server request timeout.
|
||||
pub request_timeout: u8,
|
||||
/// It stores the number of threads which controls the app will use to run.
|
||||
pub threads: u8,
|
||||
/// Set the keep-alive time for client connections to the HTTP server
|
||||
pub client_connection_keep_alive: u8,
|
||||
/// It stores configuration options for the ratelimiting middleware.
|
||||
pub rate_limiter: RateLimiter,
|
||||
/// It stores the level of safe search to be used for restricting content in the
|
||||
/// search results.
|
||||
pub safe_search: u8,
|
||||
/// It stores the TCP connection keepalive duration in seconds.
|
||||
pub tcp_connection_keep_alive: u8,
|
||||
/// It stores the pool idle connection timeout in seconds.
|
||||
pub pool_idle_connection_timeout: u8,
|
||||
/// Url of the proxy to use for outgoing requests.
|
||||
pub proxy: Option<Proxy>,
|
||||
/// It stores the number of https connections to keep in the pool.
|
||||
pub number_of_https_connections: u8,
|
||||
/// It stores the operating system's TLS certificates for https requests.
|
||||
pub operating_system_tls_certificates: bool,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
|
@ -47,7 +64,7 @@ impl Config {
|
|||
/// # Arguments
|
||||
///
|
||||
/// * `logging_initialized` - It takes a boolean which ensures that the logging doesn't get
|
||||
/// initialized twice. Pass false if the logger has not yet been initialized.
|
||||
/// initialized twice. Pass false if the logger has not yet been initialized.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
|
@ -61,10 +78,11 @@ impl Config {
|
|||
lua.load(&fs::read_to_string(file_path(FileType::Config)?)?)
|
||||
.exec()?;
|
||||
|
||||
let parsed_threads: u8 = globals.get::<_, u8>("threads")?;
|
||||
let parsed_threads: u8 = globals.get("threads")?;
|
||||
|
||||
let debug: bool = globals.get::<_, bool>("debug")?;
|
||||
let logging: bool = globals.get::<_, bool>("logging")?;
|
||||
let debug: bool = globals.get("debug")?;
|
||||
let logging: bool = globals.get("logging")?;
|
||||
let adaptive_window: bool = globals.get("adaptive_window")?;
|
||||
|
||||
if !logging_initialized {
|
||||
set_logging_level(debug, logging);
|
||||
|
@ -81,9 +99,9 @@ impl Config {
|
|||
parsed_threads
|
||||
};
|
||||
|
||||
let rate_limiter = globals.get::<_, HashMap<String, u8>>("rate_limiter")?;
|
||||
let rate_limiter: HashMap<String, u8> = globals.get("rate_limiter")?;
|
||||
|
||||
let parsed_safe_search: u8 = globals.get::<_, u8>("safe_search")?;
|
||||
let parsed_safe_search: u8 = globals.get::<_>("safe_search")?;
|
||||
let safe_search: u8 = match parsed_safe_search {
|
||||
0..=4 => parsed_safe_search,
|
||||
_ => {
|
||||
|
@ -93,33 +111,61 @@ impl Config {
|
|||
}
|
||||
};
|
||||
|
||||
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
|
||||
let parsed_cet = globals.get::<_>("cache_expiry_time")?;
|
||||
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
|
||||
let cache_expiry_time = match parsed_cet {
|
||||
0..=59 => {
|
||||
log::error!(
|
||||
"Config Error: The value of `cache_expiry_time` must be greater than 60"
|
||||
);
|
||||
log::error!("Falling back to using the value `60` for the option");
|
||||
60
|
||||
}
|
||||
_ => parsed_cet,
|
||||
};
|
||||
|
||||
let proxy_opt: Option<String> = globals.get::<_>("proxy")?;
|
||||
let proxy = proxy_opt.and_then(|proxy_str| {
|
||||
Proxy::all(proxy_str).ok().and_then(|_| {
|
||||
log::error!("Invalid proxy url, defaulting to no proxy.");
|
||||
None
|
||||
})
|
||||
});
|
||||
|
||||
Ok(Config {
|
||||
port: globals.get::<_, u16>("port")?,
|
||||
binding_ip: globals.get::<_, String>("binding_ip")?,
|
||||
operating_system_tls_certificates: globals
|
||||
.get::<_>("operating_system_tls_certificates")?,
|
||||
port: globals.get::<_>("port")?,
|
||||
binding_ip: globals.get::<_>("binding_ip")?,
|
||||
style: Style::new(
|
||||
globals.get::<_, String>("theme")?,
|
||||
globals.get::<_, String>("colorscheme")?,
|
||||
globals.get::<_>("theme")?,
|
||||
globals.get::<_>("colorscheme")?,
|
||||
globals.get::<_>("animation")?,
|
||||
),
|
||||
#[cfg(feature = "redis-cache")]
|
||||
redis_url: globals.get::<_, String>("redis_url")?,
|
||||
redis_url: globals.get::<_>("redis_url")?,
|
||||
aggregator: AggregatorConfig {
|
||||
random_delay: globals.get::<_, bool>("production_use")?,
|
||||
random_delay: globals.get::<_>("production_use")?,
|
||||
},
|
||||
logging,
|
||||
debug,
|
||||
upstream_search_engines: globals
|
||||
.get::<_, HashMap<String, bool>>("upstream_search_engines")?
|
||||
.into_iter()
|
||||
.filter_map(|(key, value)| value.then_some(key))
|
||||
.filter_map(|engine| crate::models::engine_models::EngineHandler::new(&engine))
|
||||
.collect(),
|
||||
request_timeout: globals.get::<_, u8>("request_timeout")?,
|
||||
adaptive_window,
|
||||
upstream_search_engines: globals.get::<_>("upstream_search_engines")?,
|
||||
request_timeout: globals.get::<_>("request_timeout")?,
|
||||
tcp_connection_keep_alive: globals.get::<_>("tcp_connection_keep_alive")?,
|
||||
pool_idle_connection_timeout: globals.get::<_>("pool_idle_connection_timeout")?,
|
||||
number_of_https_connections: globals.get::<_>("number_of_https_connections")?,
|
||||
threads,
|
||||
client_connection_keep_alive: globals.get::<_>("client_connection_keep_alive")?,
|
||||
rate_limiter: RateLimiter {
|
||||
number_of_requests: rate_limiter["number_of_requests"],
|
||||
time_limit: rate_limiter["time_limit"],
|
||||
},
|
||||
safe_search,
|
||||
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
|
||||
cache_expiry_time,
|
||||
proxy,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
122
src/engines/bing.rs
Normal file
|
@ -0,0 +1,122 @@
|
|||
//! The `bing` module handles the scraping of results from the bing search engine
|
||||
//! by querying the upstream bing search engine with user provided query and with a page
|
||||
//! number if provided.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use regex::Regex;
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::Client;
|
||||
use scraper::Html;
|
||||
|
||||
use crate::models::aggregation_models::SearchResult;
|
||||
|
||||
use crate::models::engine_models::{EngineError, SearchEngine};
|
||||
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
|
||||
use super::common::build_cookie;
|
||||
use super::search_result_parser::SearchResultParser;
|
||||
|
||||
/// A new Bing engine type defined in-order to implement the `SearchEngine` trait which allows to
|
||||
/// reduce code duplication as well as allows to create vector of different search engines easily.
|
||||
pub struct Bing {
|
||||
/// The parser, used to interpret the search result.
|
||||
parser: SearchResultParser,
|
||||
}
|
||||
|
||||
impl Bing {
|
||||
/// Creates the Bing parser.
|
||||
pub fn new() -> Result<Self, EngineError> {
|
||||
Ok(Self {
|
||||
parser: SearchResultParser::new(
|
||||
".b_results",
|
||||
".b_algo",
|
||||
"h2 a",
|
||||
".tpcn a.tilk",
|
||||
".b_caption p",
|
||||
)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SearchEngine for Bing {
|
||||
async fn results(
|
||||
&self,
|
||||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
client: &Client,
|
||||
_safe_search: u8,
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
// Bing uses `start results from this number` convention
|
||||
// So, for 10 results per page, page 0 starts at 1, page 1
|
||||
// starts at 11, and so on.
|
||||
let results_per_page = 10;
|
||||
let start_result = results_per_page * page + 1;
|
||||
|
||||
let url: String = match page {
|
||||
0 => {
|
||||
format!("https://www.bing.com/search?q={query}")
|
||||
}
|
||||
_ => {
|
||||
format!("https://www.bing.com/search?q={query}&first={start_result}")
|
||||
}
|
||||
};
|
||||
|
||||
let query_params: Vec<(&str, &str)> = vec![
|
||||
("_EDGE_V", "1"),
|
||||
("SRCHD=AF", "NOFORM"),
|
||||
("_Rwho=u", "d"),
|
||||
("bngps=s", "0"),
|
||||
("_UR=QS=0&TQS", "0"),
|
||||
("_UR=QS=0&TQS", "0"),
|
||||
];
|
||||
|
||||
let cookie_string = build_cookie(&query_params);
|
||||
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
("Referer".to_string(), "https://google.com/".to_string()),
|
||||
(
|
||||
"Content-Type".to_string(),
|
||||
"application/x-www-form-urlencoded".to_string(),
|
||||
),
|
||||
("Cookie".to_string(), cookie_string),
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&Bing::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
// Bing is very aggressive in finding matches
|
||||
// even with the most absurd of queries. ".b_algo" is the
|
||||
// class for the list item of results
|
||||
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(0) {
|
||||
if no_result_msg
|
||||
.value()
|
||||
.attr("class")
|
||||
.map(|classes| classes.contains("b_algo"))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return Err(Report::new(EngineError::EmptyResultSet));
|
||||
}
|
||||
}
|
||||
|
||||
let re_span = Regex::new(r#"<span.*?>.*?(?:</span> ·|</span>)"#).unwrap();
|
||||
let re_strong = Regex::new(r#"(<strong>|</strong>)"#).unwrap();
|
||||
|
||||
// scrape all the results from the html
|
||||
self.parser
|
||||
.parse_for_results(&document, |title, url, desc| {
|
||||
Some(SearchResult::new(
|
||||
&re_strong.replace_all(title.inner_html().trim(), ""),
|
||||
url.value().attr("href").unwrap(),
|
||||
&re_span.replace_all(desc.inner_html().trim(), ""),
|
||||
&["bing"],
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
95
src/engines/brave.rs
Normal file
|
@ -0,0 +1,95 @@
|
|||
//! The `brave` module handles the scraping of results from the brave search engine
|
||||
//! by querying the upstream brave search engine with user provided query and with a page
|
||||
//! number if provided.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use reqwest::{header::HeaderMap, Client};
|
||||
use scraper::Html;
|
||||
|
||||
use crate::models::aggregation_models::SearchResult;
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
|
||||
use crate::models::engine_models::{EngineError, SearchEngine};
|
||||
|
||||
use super::search_result_parser::SearchResultParser;
|
||||
|
||||
/// Scrapes the results from the Brave search engine.
|
||||
pub struct Brave {
|
||||
/// Utilises generic logic for parsing search results.
|
||||
parser: SearchResultParser,
|
||||
}
|
||||
|
||||
impl Brave {
|
||||
/// Creates the Brave parser.
|
||||
pub fn new() -> Result<Brave, EngineError> {
|
||||
Ok(Self {
|
||||
parser: SearchResultParser::new(
|
||||
"#results h4",
|
||||
"#results [data-pos]",
|
||||
"a > .url",
|
||||
"a",
|
||||
".snippet-description",
|
||||
)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SearchEngine for Brave {
|
||||
async fn results(
|
||||
&self,
|
||||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
client: &Client,
|
||||
safe_search: u8,
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
let url = format!("https://search.brave.com/search?q={query}&offset={page}");
|
||||
|
||||
let safe_search_level = match safe_search {
|
||||
0 => "off",
|
||||
1 => "moderate",
|
||||
_ => "strict",
|
||||
};
|
||||
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
(
|
||||
"Content-Type".to_string(),
|
||||
"application/x-www-form-urlencoded".to_string(),
|
||||
),
|
||||
("Referer".to_string(), "https://google.com/".to_string()),
|
||||
(
|
||||
"Cookie".to_string(),
|
||||
format!("safe_search={safe_search_level}"),
|
||||
),
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&Brave::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(0) {
|
||||
if no_result_msg
|
||||
.inner_html()
|
||||
.contains("Not many great matches came back for your search")
|
||||
{
|
||||
return Err(Report::new(EngineError::EmptyResultSet));
|
||||
}
|
||||
}
|
||||
|
||||
self.parser
|
||||
.parse_for_results(&document, |title, url, desc| {
|
||||
url.value().attr("href").map(|url| {
|
||||
SearchResult::new(
|
||||
title.text().collect::<Vec<_>>().join("").trim(),
|
||||
url.trim(),
|
||||
desc.inner_html().trim(),
|
||||
&["brave"],
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
23
src/engines/common.rs
Normal file
|
@ -0,0 +1,23 @@
|
|||
//! This module provides common functionalities for engines
|
||||
|
||||
/**
|
||||
* Build a query from a list of key value pairs.
|
||||
*/
|
||||
pub fn build_query(query_params: &[(&str, &str)]) -> String {
|
||||
let mut query_params_string = String::new();
|
||||
for (k, v) in query_params {
|
||||
query_params_string.push_str(&format!("&{k}={v}"));
|
||||
}
|
||||
query_params_string
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a cookie from a list of key value pairs.
|
||||
*/
|
||||
pub fn build_cookie(cookie_params: &[(&str, &str)]) -> String {
|
||||
let mut cookie_string = String::new();
|
||||
for (k, v) in cookie_params {
|
||||
cookie_string.push_str(&format!("{k}={v}; "));
|
||||
}
|
||||
cookie_string
|
||||
}
|
|
@ -5,7 +5,8 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use reqwest::header::HeaderMap;
|
||||
use scraper::{Html, Selector};
|
||||
use reqwest::Client;
|
||||
use scraper::Html;
|
||||
|
||||
use crate::models::aggregation_models::SearchResult;
|
||||
|
||||
|
@ -13,9 +14,29 @@ use crate::models::engine_models::{EngineError, SearchEngine};
|
|||
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
|
||||
use super::search_result_parser::SearchResultParser;
|
||||
|
||||
/// A new DuckDuckGo engine type defined in-order to implement the `SearchEngine` trait which allows to
|
||||
/// reduce code duplication as well as allows to create vector of different search engines easily.
|
||||
pub struct DuckDuckGo;
|
||||
pub struct DuckDuckGo {
|
||||
/// The parser, used to interpret the search result.
|
||||
parser: SearchResultParser,
|
||||
}
|
||||
|
||||
impl DuckDuckGo {
|
||||
/// Creates the DuckDuckGo parser.
|
||||
pub fn new() -> Result<Self, EngineError> {
|
||||
Ok(Self {
|
||||
parser: SearchResultParser::new(
|
||||
".no-results",
|
||||
".results>.result",
|
||||
".result__title>.result__a",
|
||||
".result__url",
|
||||
".result__snippet",
|
||||
)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SearchEngine for DuckDuckGo {
|
||||
|
@ -24,93 +45,53 @@ impl SearchEngine for DuckDuckGo {
|
|||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
request_timeout: u8,
|
||||
client: &Client,
|
||||
_safe_search: u8,
|
||||
) -> Result<HashMap<String, SearchResult>, EngineError> {
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
// Page number can be missing or empty string and so appropriate handling is required
|
||||
// so that upstream server recieves valid page number.
|
||||
let url: String = match page {
|
||||
1 | 0 => {
|
||||
0 => {
|
||||
format!("https://html.duckduckgo.com/html/?q={query}&s=&dc=&v=1&o=json&api=/d.js")
|
||||
}
|
||||
_ => {
|
||||
format!(
|
||||
"https://duckduckgo.com/html/?q={}&s={}&dc={}&v=1&o=json&api=/d.js",
|
||||
query,
|
||||
(page / 2 + (page % 2)) * 30,
|
||||
(page / 2 + (page % 2)) * 30 + 1
|
||||
"https://duckduckgo.com/html/?q={query}&s={}&dc={}&v=1&o=json&api=/d.js",
|
||||
page * 30,
|
||||
page * 30 + 1
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
// initializing HeaderMap and adding appropriate headers.
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("USER_AGENT".to_string(), user_agent.to_string()),
|
||||
("REFERER".to_string(), "https://google.com/".to_string()),
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
("Referer".to_string(), "https://google.com/".to_string()),
|
||||
(
|
||||
"CONTENT_TYPE".to_string(),
|
||||
"Content-Type".to_string(),
|
||||
"application/x-www-form-urlencoded".to_string(),
|
||||
),
|
||||
("COOKIE".to_string(), "kl=wt-wt".to_string()),
|
||||
("Cookie".to_string(), "kl=wt-wt".to_string()),
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&DuckDuckGo::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
|
||||
&DuckDuckGo::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
let no_result: Selector = Selector::parse(".no-results")
|
||||
.map_err(|_| Report::new(EngineError::UnexpectedError))
|
||||
.attach_printable_lazy(|| format!("invalid CSS selector: {}", ".no-results"))?;
|
||||
|
||||
if document.select(&no_result).next().is_some() {
|
||||
if self.parser.parse_for_no_results(&document).next().is_some() {
|
||||
return Err(Report::new(EngineError::EmptyResultSet));
|
||||
}
|
||||
|
||||
let results: Selector = Selector::parse(".result")
|
||||
.map_err(|_| Report::new(EngineError::UnexpectedError))
|
||||
.attach_printable_lazy(|| format!("invalid CSS selector: {}", ".result"))?;
|
||||
let result_title: Selector = Selector::parse(".result__a")
|
||||
.map_err(|_| Report::new(EngineError::UnexpectedError))
|
||||
.attach_printable_lazy(|| format!("invalid CSS selector: {}", ".result__a"))?;
|
||||
let result_url: Selector = Selector::parse(".result__url")
|
||||
.map_err(|_| Report::new(EngineError::UnexpectedError))
|
||||
.attach_printable_lazy(|| format!("invalid CSS selector: {}", ".result__url"))?;
|
||||
let result_desc: Selector = Selector::parse(".result__snippet")
|
||||
.map_err(|_| Report::new(EngineError::UnexpectedError))
|
||||
.attach_printable_lazy(|| format!("invalid CSS selector: {}", ".result__snippet"))?;
|
||||
|
||||
// scrape all the results from the html
|
||||
Ok(document
|
||||
.select(&results)
|
||||
.map(|result| {
|
||||
SearchResult::new(
|
||||
result
|
||||
.select(&result_title)
|
||||
.next()
|
||||
.unwrap()
|
||||
.inner_html()
|
||||
.trim(),
|
||||
format!(
|
||||
"https://{}",
|
||||
result
|
||||
.select(&result_url)
|
||||
.next()
|
||||
.unwrap()
|
||||
.inner_html()
|
||||
.trim()
|
||||
)
|
||||
.as_str(),
|
||||
result
|
||||
.select(&result_desc)
|
||||
.next()
|
||||
.unwrap()
|
||||
.inner_html()
|
||||
.trim(),
|
||||
self.parser
|
||||
.parse_for_results(&document, |title, url, desc| {
|
||||
Some(SearchResult::new(
|
||||
title.inner_html().trim(),
|
||||
&format!("https://{}", url.inner_html().trim()),
|
||||
desc.inner_html().trim(),
|
||||
&["duckduckgo"],
|
||||
)
|
||||
))
|
||||
})
|
||||
.map(|search_result| (search_result.url.clone(), search_result))
|
||||
.collect())
|
||||
}
|
||||
}
|
||||
|
|
104
src/engines/librex.rs
Normal file
|
@ -0,0 +1,104 @@
|
|||
//! The `librex` module contains the implementation of a search engine for LibreX using the reqwest and scraper libraries.
|
||||
//! It includes a `SearchEngine` trait implementation for interacting with the search engine and retrieving search results.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::Client;
|
||||
use scraper::Html;
|
||||
|
||||
use crate::models::aggregation_models::SearchResult;
|
||||
use crate::models::engine_models::{EngineError, SearchEngine};
|
||||
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
|
||||
use super::search_result_parser::SearchResultParser;
|
||||
|
||||
/// Represents the LibreX search engine.
|
||||
pub struct LibreX {
|
||||
/// The parser used to extract search results from HTML documents.
|
||||
parser: SearchResultParser,
|
||||
}
|
||||
|
||||
impl LibreX {
|
||||
/// Creates a new instance of LibreX with a default configuration.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns a `Result` containing `LibreX` if successful, otherwise an `EngineError`.
|
||||
pub fn new() -> Result<Self, EngineError> {
|
||||
Ok(Self {
|
||||
parser: SearchResultParser::new(
|
||||
".text-result-container>p",
|
||||
".text-result-wrapper",
|
||||
".text-result-wrapper>a>h2",
|
||||
".text-result-wrapper>a",
|
||||
".text-result-wrapper>span",
|
||||
)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SearchEngine for LibreX {
|
||||
/// Retrieves search results from LibreX based on the provided query, page, user agent, and client.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `query` - The search query.
|
||||
/// * `page` - The page number for pagination.
|
||||
/// * `user_agent` - The user agent string.
|
||||
/// * `client` - The reqwest client for making HTTP requests.
|
||||
/// * `_safe_search` - A parameter for safe search (not currently used).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns a `Result` containing a `HashMap` of search results if successful, otherwise an `EngineError`.
|
||||
/// The `Err` variant is explicit for better documentation.
|
||||
async fn results(
|
||||
&self,
|
||||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
client: &Client,
|
||||
_safe_search: u8,
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
// Page number can be missing or empty string and so appropriate handling is required
|
||||
// so that upstream server recieves valid page number.
|
||||
let url: String = format!(
|
||||
"https://search.ahwx.org/search.php?q={query}&p={}&t=10",
|
||||
page * 10
|
||||
);
|
||||
|
||||
// initializing HeaderMap and adding appropriate headers.
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
("Referer".to_string(), "https://google.com/".to_string()),
|
||||
("Content-Type".to_string(), "application/x-www-form-urlencoded".to_string()),
|
||||
(
|
||||
"Cookie".to_string(),
|
||||
"theme=amoled; disable_special=on; disable_frontends=on; language=en; number_of_results=10; safe_search=on; save=1".to_string(),
|
||||
),
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&LibreX::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
if self.parser.parse_for_no_results(&document).next().is_some() {
|
||||
return Err(Report::new(EngineError::EmptyResultSet));
|
||||
}
|
||||
|
||||
// scrape all the results from the html
|
||||
self.parser
|
||||
.parse_for_results(&document, |title, url, desc| {
|
||||
Some(SearchResult::new(
|
||||
title.inner_html().trim(),
|
||||
url.inner_html().trim(),
|
||||
desc.inner_html().trim(),
|
||||
&["librex"],
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
|
@ -3,5 +3,13 @@
|
|||
//! provide a standard functions to be implemented for all the upstream search engine handling
|
||||
//! code. Moreover, it also provides a custom error for the upstream search engine handling code.
|
||||
|
||||
pub mod bing;
|
||||
pub mod brave;
|
||||
pub mod common;
|
||||
pub mod duckduckgo;
|
||||
pub mod librex;
|
||||
pub mod mojeek;
|
||||
pub mod search_result_parser;
|
||||
pub mod searx;
|
||||
pub mod startpage;
|
||||
pub mod wikipedia;
|
||||
|
|
161
src/engines/mojeek.rs
Normal file
|
@ -0,0 +1,161 @@
|
|||
//! The `mojeek` module handles the scraping of results from the mojeek search engine
|
||||
//! by querying the upstream mojeek search engine with user provided query and with a page
|
||||
//! number if provided.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::Client;
|
||||
use scraper::Html;
|
||||
|
||||
use crate::models::aggregation_models::SearchResult;
|
||||
|
||||
use crate::models::engine_models::{EngineError, SearchEngine};
|
||||
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
|
||||
use super::common::{build_cookie, build_query};
|
||||
use super::search_result_parser::SearchResultParser;
|
||||
|
||||
/// A new Mojeek engine type defined in-order to implement the `SearchEngine` trait which allows to
|
||||
/// reduce code duplication as well as allows to create vector of different search engines easily.
|
||||
pub struct Mojeek {
|
||||
/// The parser, used to interpret the search result.
|
||||
parser: SearchResultParser,
|
||||
}
|
||||
|
||||
impl Mojeek {
|
||||
/// Creates the Mojeek parser.
|
||||
pub fn new() -> Result<Self, EngineError> {
|
||||
Ok(Self {
|
||||
parser: SearchResultParser::new(
|
||||
".result-col",
|
||||
".results-standard li",
|
||||
"h2 > a.title",
|
||||
"a.ob",
|
||||
"p.s",
|
||||
)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SearchEngine for Mojeek {
|
||||
async fn results(
|
||||
&self,
|
||||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
client: &Client,
|
||||
safe_search: u8,
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
// Mojeek uses `start results from this number` convention
|
||||
// So, for 10 results per page, page 0 starts at 1, page 1
|
||||
// starts at 11, and so on.
|
||||
let results_per_page = 10;
|
||||
let start_result = results_per_page * page + 1;
|
||||
|
||||
let results_per_page = results_per_page.to_string();
|
||||
let start_result = start_result.to_string();
|
||||
|
||||
let search_engines = vec![
|
||||
"Bing",
|
||||
"Brave",
|
||||
"DuckDuckGo",
|
||||
"Ecosia",
|
||||
"Google",
|
||||
"Lilo",
|
||||
"Metager",
|
||||
"Qwant",
|
||||
"Startpage",
|
||||
"Swisscows",
|
||||
"Yandex",
|
||||
"Yep",
|
||||
"You",
|
||||
];
|
||||
|
||||
let qss = search_engines.join("%2C");
|
||||
|
||||
// A branchless condition to check whether the `safe_search` parameter has the
|
||||
// value 0 or not. If it is zero then it sets the value 0 otherwise it sets
|
||||
// the value to 1 for all other values of `safe_search`
|
||||
//
|
||||
// Moreover, the below branchless code is equivalent to the following code below:
|
||||
//
|
||||
// ```rust
|
||||
// let safe = if safe_search == 0 { 0 } else { 1 }.to_string();
|
||||
// ```
|
||||
//
|
||||
// For more information on branchless programming. See:
|
||||
//
|
||||
// * https://piped.video/watch?v=bVJ-mWWL7cE
|
||||
let safe = u8::from(safe_search != 0).to_string();
|
||||
|
||||
// Mojeek detects automated requests, these are preferences that are
|
||||
// able to circumvent the countermeasure. Some of these are
|
||||
// not documented in their Search API
|
||||
let query_params: Vec<(&str, &str)> = vec![
|
||||
("t", results_per_page.as_str()),
|
||||
("theme", "dark"),
|
||||
("arc", "none"),
|
||||
("date", "1"),
|
||||
("cdate", "1"),
|
||||
("tlen", "100"),
|
||||
("ref", "1"),
|
||||
("hp", "minimal"),
|
||||
("lb", "en"),
|
||||
("qss", &qss),
|
||||
("safe", &safe),
|
||||
];
|
||||
|
||||
let query_params_string = build_query(&query_params);
|
||||
|
||||
let url: String = match page {
|
||||
0 => {
|
||||
format!("https://www.mojeek.com/search?q={query}{query_params_string}")
|
||||
}
|
||||
_ => {
|
||||
format!(
|
||||
"https://www.mojeek.com/search?q={query}&s={start_result}{query_params_string}"
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let cookie_string = build_cookie(&query_params);
|
||||
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
("Referer".to_string(), "https://google.com/".to_string()),
|
||||
(
|
||||
"Content-Type".to_string(),
|
||||
"application/x-www-form-urlencoded".to_string(),
|
||||
),
|
||||
("Cookie".to_string(), cookie_string),
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&Mojeek::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(0) {
|
||||
if no_result_msg
|
||||
.inner_html()
|
||||
.contains("No pages found matching:")
|
||||
{
|
||||
return Err(Report::new(EngineError::EmptyResultSet));
|
||||
}
|
||||
}
|
||||
|
||||
// scrape all the results from the html
|
||||
self.parser
|
||||
.parse_for_results(&document, |title, url, desc| {
|
||||
Some(SearchResult::new(
|
||||
title.inner_html().trim(),
|
||||
url.attr("href")?.trim(),
|
||||
desc.inner_html().trim(),
|
||||
&["mojeek"],
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
75
src/engines/search_result_parser.rs
Normal file
|
@ -0,0 +1,75 @@
|
|||
//! This modules provides helper functionalities for parsing a html document into internal SearchResult.
|
||||
|
||||
use crate::models::{aggregation_models::SearchResult, engine_models::EngineError};
|
||||
use error_stack::{Report, Result};
|
||||
use scraper::{html::Select, ElementRef, Html, Selector};
|
||||
|
||||
/// A html search result parser, based on a predefined CSS selectors.
|
||||
pub struct SearchResultParser {
|
||||
/// selector to locate the element which is displayed, if there were nothing found.
|
||||
no_result: Selector,
|
||||
/// selector to locate the element which contains one item from the search result.
|
||||
results: Selector,
|
||||
/// selector to locate the title relative to the search result item.
|
||||
result_title: Selector,
|
||||
/// selector to locate the url relative to the search result item.
|
||||
result_url: Selector,
|
||||
/// selector to locate the description relative to the search result item.
|
||||
result_desc: Selector,
|
||||
}
|
||||
|
||||
impl SearchResultParser {
|
||||
/// Creates a new parser, if all the selectors are valid, otherwise it returns an EngineError
|
||||
pub fn new(
|
||||
no_result_selector: &str,
|
||||
results_selector: &str,
|
||||
result_title_selector: &str,
|
||||
result_url_selector: &str,
|
||||
result_desc_selector: &str,
|
||||
) -> Result<SearchResultParser, EngineError> {
|
||||
Ok(SearchResultParser {
|
||||
no_result: new_selector(no_result_selector)?,
|
||||
results: new_selector(results_selector)?,
|
||||
result_title: new_selector(result_title_selector)?,
|
||||
result_url: new_selector(result_url_selector)?,
|
||||
result_desc: new_selector(result_desc_selector)?,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse the html and returns element representing the 'no result found' response.
|
||||
pub fn parse_for_no_results<'a>(&'a self, document: &'a Html) -> Select<'a, 'a> {
|
||||
document.select(&self.no_result)
|
||||
}
|
||||
|
||||
/// Parse the html, and convert the results to SearchResult with the help of the builder function
|
||||
pub fn parse_for_results(
|
||||
&self,
|
||||
document: &Html,
|
||||
builder: impl Fn(&ElementRef<'_>, &ElementRef<'_>, &ElementRef<'_>) -> Option<SearchResult>,
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
let res = document
|
||||
.select(&self.results)
|
||||
.filter_map(|result| {
|
||||
let title = result.select(&self.result_title).next();
|
||||
let url = result.select(&self.result_url).next();
|
||||
let desc = result.select(&self.result_desc).next();
|
||||
match (title, url, desc) {
|
||||
(Some(ref t), Some(ref u), Some(ref d)) => builder(t, u, d),
|
||||
_ => None,
|
||||
}
|
||||
})
|
||||
.map(|search_result| (search_result.url.clone(), search_result))
|
||||
.collect();
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a Selector struct, if the given parameter is a valid css expression, otherwise convert it into an EngineError.
|
||||
fn new_selector(selector: &str) -> Result<Selector, EngineError> {
|
||||
Selector::parse(selector).map_err(|err| {
|
||||
Report::new(EngineError::UnexpectedError).attach_printable(format!(
|
||||
"invalid CSS selector: {}, err: {:?}",
|
||||
selector, err
|
||||
))
|
||||
})
|
||||
}
|
|
@ -3,16 +3,36 @@
|
|||
//! number if provided.
|
||||
|
||||
use reqwest::header::HeaderMap;
|
||||
use scraper::{Html, Selector};
|
||||
use reqwest::Client;
|
||||
use scraper::Html;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::search_result_parser::SearchResultParser;
|
||||
use crate::models::aggregation_models::SearchResult;
|
||||
use crate::models::engine_models::{EngineError, SearchEngine};
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
|
||||
/// A new Searx engine type defined in-order to implement the `SearchEngine` trait which allows to
|
||||
/// reduce code duplication as well as allows to create vector of different search engines easily.
|
||||
pub struct Searx;
|
||||
pub struct Searx {
|
||||
/// The parser, used to interpret the search result.
|
||||
parser: SearchResultParser,
|
||||
}
|
||||
|
||||
impl Searx {
|
||||
/// creates a Searx parser
|
||||
pub fn new() -> Result<Searx, EngineError> {
|
||||
Ok(Self {
|
||||
parser: SearchResultParser::new(
|
||||
"#urls>.dialog-error>p",
|
||||
".result",
|
||||
"h3>a",
|
||||
"h3>a",
|
||||
".content",
|
||||
)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SearchEngine for Searx {
|
||||
|
@ -21,44 +41,43 @@ impl SearchEngine for Searx {
|
|||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
request_timeout: u8,
|
||||
client: &Client,
|
||||
mut safe_search: u8,
|
||||
) -> Result<HashMap<String, SearchResult>, EngineError> {
|
||||
// Page number can be missing or empty string and so appropriate handling is required
|
||||
// so that upstream server recieves valid page number.
|
||||
if safe_search == 3 {
|
||||
safe_search = 2;
|
||||
};
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
// A branchless condition to check whether the `safe_search` parameter has the
|
||||
// value greater than equal to three or not. If it is, then it modifies the
|
||||
// `safesearch` parameters value to 2.
|
||||
//
|
||||
// Moreover, the below branchless code is equivalent to the following code below:
|
||||
//
|
||||
// ```rust
|
||||
// safe_search = u8::from(safe_search == 3) * 2;
|
||||
// ```
|
||||
//
|
||||
// For more information on branchless programming. See:
|
||||
//
|
||||
// * https://piped.video/watch?v=bVJ-mWWL7cE
|
||||
safe_search = u8::from(safe_search >= 3) * 2;
|
||||
|
||||
let url: String = match page {
|
||||
0 | 1 => {
|
||||
format!("https://searx.work/search?q={query}&pageno=1&safesearch={safe_search}")
|
||||
}
|
||||
_ => format!(
|
||||
"https://searx.work/search?q={query}&pageno={page}&safesearch={safe_search}"
|
||||
),
|
||||
};
|
||||
let url: String = format!(
|
||||
"https://searx.be/search?q={query}&pageno={}&safesearch={safe_search}",
|
||||
page + 1
|
||||
);
|
||||
|
||||
// initializing headers and adding appropriate headers.
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("USER_AGENT".to_string(), user_agent.to_string()),
|
||||
("REFERER".to_string(), "https://google.com/".to_string()),
|
||||
("CONTENT_TYPE".to_string(), "application/x-www-form-urlencoded".to_string()),
|
||||
("COOKIE".to_string(), "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".to_string())
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
("Referer".to_string(), "https://google.com/".to_string()),
|
||||
("Content-Type".to_string(), "application/x-www-form-urlencoded".to_string()),
|
||||
("Cookie".to_string(), "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".to_string())
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&Searx::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
|
||||
&Searx::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
let no_result: Selector = Selector::parse("#urls>.dialog-error>p")
|
||||
.map_err(|_| Report::new(EngineError::UnexpectedError))
|
||||
.attach_printable_lazy(|| {
|
||||
format!("invalid CSS selector: {}", "#urls>.dialog-error>p")
|
||||
})?;
|
||||
|
||||
if let Some(no_result_msg) = document.select(&no_result).nth(1) {
|
||||
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(1) {
|
||||
if no_result_msg.inner_html()
|
||||
== "we didn't find any results. Please use another query or search in more categories"
|
||||
{
|
||||
|
@ -66,48 +85,17 @@ impl SearchEngine for Searx {
|
|||
}
|
||||
}
|
||||
|
||||
let results: Selector = Selector::parse(".result")
|
||||
.map_err(|_| Report::new(EngineError::UnexpectedError))
|
||||
.attach_printable_lazy(|| format!("invalid CSS selector: {}", ".result"))?;
|
||||
let result_title: Selector = Selector::parse("h3>a")
|
||||
.map_err(|_| Report::new(EngineError::UnexpectedError))
|
||||
.attach_printable_lazy(|| format!("invalid CSS selector: {}", "h3>a"))?;
|
||||
let result_url: Selector = Selector::parse("h3>a")
|
||||
.map_err(|_| Report::new(EngineError::UnexpectedError))
|
||||
.attach_printable_lazy(|| format!("invalid CSS selector: {}", "h3>a"))?;
|
||||
|
||||
let result_desc: Selector = Selector::parse(".content")
|
||||
.map_err(|_| Report::new(EngineError::UnexpectedError))
|
||||
.attach_printable_lazy(|| format!("invalid CSS selector: {}", ".content"))?;
|
||||
|
||||
// scrape all the results from the html
|
||||
Ok(document
|
||||
.select(&results)
|
||||
.map(|result| {
|
||||
SearchResult::new(
|
||||
result
|
||||
.select(&result_title)
|
||||
.next()
|
||||
.unwrap()
|
||||
.inner_html()
|
||||
.trim(),
|
||||
result
|
||||
.select(&result_url)
|
||||
.next()
|
||||
.unwrap()
|
||||
.value()
|
||||
.attr("href")
|
||||
.unwrap(),
|
||||
result
|
||||
.select(&result_desc)
|
||||
.next()
|
||||
.unwrap()
|
||||
.inner_html()
|
||||
.trim(),
|
||||
&["searx"],
|
||||
)
|
||||
self.parser
|
||||
.parse_for_results(&document, |title, url, desc| {
|
||||
url.value().attr("href").map(|url| {
|
||||
SearchResult::new(
|
||||
title.inner_html().trim(),
|
||||
url,
|
||||
desc.inner_html().trim(),
|
||||
&["searx"],
|
||||
)
|
||||
})
|
||||
})
|
||||
.map(|search_result| (search_result.url.clone(), search_result))
|
||||
.collect())
|
||||
}
|
||||
}
|
||||
|
|
89
src/engines/startpage.rs
Normal file
|
@ -0,0 +1,89 @@
|
|||
//! The `startpage` module handles the scraping of results from the startpage search engine
|
||||
//! by querying the upstream startpage search engine with user provided query and with a page
|
||||
//! number if provided.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::Client;
|
||||
use scraper::Html;
|
||||
|
||||
use crate::models::aggregation_models::SearchResult;
|
||||
|
||||
use crate::models::engine_models::{EngineError, SearchEngine};
|
||||
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
|
||||
use super::search_result_parser::SearchResultParser;
|
||||
|
||||
/// A new Startpage engine type defined in-order to implement the `SearchEngine` trait which allows to
|
||||
/// reduce code duplication as well as allows to create vector of different search engines easily.
|
||||
pub struct Startpage {
|
||||
/// The parser, used to interpret the search result.
|
||||
parser: SearchResultParser,
|
||||
}
|
||||
|
||||
impl Startpage {
|
||||
/// Creates the Startpage parser.
|
||||
pub fn new() -> Result<Self, EngineError> {
|
||||
Ok(Self {
|
||||
parser: SearchResultParser::new(
|
||||
".no-results",
|
||||
".w-gl__result__main",
|
||||
".w-gl__result-second-line-container>.w-gl__result-title>h3",
|
||||
".w-gl__result-url",
|
||||
".w-gl__description",
|
||||
)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SearchEngine for Startpage {
|
||||
async fn results(
|
||||
&self,
|
||||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
client: &Client,
|
||||
_safe_search: u8,
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
// Page number can be missing or empty string and so appropriate handling is required
|
||||
// so that upstream server recieves valid page number.
|
||||
let url: String = format!(
|
||||
"https://startpage.com/do/dsearch?q={query}&num=10&start={}",
|
||||
page * 10,
|
||||
);
|
||||
|
||||
// initializing HeaderMap and adding appropriate headers.
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
("Referer".to_string(), "https://google.com/".to_string()),
|
||||
(
|
||||
"Content-Type".to_string(),
|
||||
"application/x-www-form-urlencoded".to_string(),
|
||||
),
|
||||
("Cookie".to_string(), "preferences=connect_to_serverEEE0N1Ndate_timeEEEworldN1Ndisable_family_filterEEE0N1Ndisable_open_in_new_windowEEE0N1Nenable_post_methodEEE1N1Nenable_proxy_safety_suggestEEE1N1Nenable_stay_controlEEE0N1Ninstant_answersEEE1N1Nlang_homepageEEEs%2Fnight%2FenN1NlanguageEEEenglishN1Nlanguage_uiEEEenglishN1Nnum_of_resultsEEE10N1Nsearch_results_regionEEEallN1NsuggestionsEEE1N1Nwt_unitEEEcelsius".to_string()),
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&Startpage::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
if self.parser.parse_for_no_results(&document).next().is_some() {
|
||||
return Err(Report::new(EngineError::EmptyResultSet));
|
||||
}
|
||||
|
||||
// scrape all the results from the html
|
||||
self.parser
|
||||
.parse_for_results(&document, |title, url, desc| {
|
||||
Some(SearchResult::new(
|
||||
title.inner_html().trim(),
|
||||
url.inner_html().trim(),
|
||||
desc.inner_html().trim(),
|
||||
&["startpage"],
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|