Compare commits

..

No commits in common. "rolling" and "v0.8.0" have entirely different histories.

157 changed files with 2463 additions and 12286 deletions

View file

@ -1,40 +0,0 @@
name: 🐛 Bug
description: Report an issue to help improve the project.
title: "🐛 <description>"
labels: ["🛠️ goal: fix","🚦 status: awaiting triage"]
body:
- type: textarea
id: description
attributes:
label: Description
description: A brief description of the question or issue, also include what you tried and what didn't work
validations:
required: true
- type: textarea
id: screenshots
attributes:
label: Screenshots
description: Please add screenshots if applicable
validations:
required: false
- type: dropdown
id: assignee
attributes:
label: Do you want to work on this issue?
multiple: false
options:
- "Yes"
- "No"
validations:
required: false
- type: textarea
id: extrainfo
attributes:
label: Additional information
description: Is there anything else we should know about this bug?
validations:
required: false
- type: markdown
attributes:
value: |
You can also join our Discord community [here](https://discord.gg/SWnda7Mw5u)

36
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View file

@ -0,0 +1,36 @@
---
name: Bug report
about: Create a report to help us improve
title: "[BUG] <your bug report title>"
labels: bug
assignees: ''
---
<!-- PLEASE FILL THESE FIELDS, IT REALLY HELPS THE MAINTAINERS OF Websurfx -->
**Version of Websurfx, commit number if you are using on master branch**
<!-- If you are running on master branch using git execute this command
in order to fetch the latest commit ID:
```
git log -1
```
-->
**How did you install Websurfx?**
<!-- Did you install Websurfx following the README ? -->
**What happened?**
<!-- A clear and concise description of what the bug is. -->
**Steps To Reproduce**
<!-- How can we reproduce this issue? (as minimally and as precisely as possible) -->
**Expected behavior**
<!-- A clear and concise description of what you expected to happen. -->
**Screenshots**
<!-- If applicable, provide screenshots to help explain your problem better. -->
**Additional context**
<!-- Add any other context about the problem here. -->

View file

@ -1,5 +1 @@
blank_issues_enabled: false
contact_links:
- name: Question?
url: https://discord.gg/SWnda7Mw5u
about: Feel free to ask your question by joining our Discord server.
blank_issues_enabled: true

View file

@ -1,40 +0,0 @@
name: 📝 Documentation issue
description: Found an issue in the documentation? You can use this one!
title: "📝 <description>"
labels: ["📄 aspect: text","🚦 status: awaiting triage"]
body:
- type: textarea
id: description
attributes:
label: Description
description: A brief description of the question or issue, also include what you tried and what didn't work
validations:
required: true
- type: textarea
id: screenshots
attributes:
label: Screenshots
description: Please add screenshots if applicable
validations:
required: false
- type: dropdown
id: assignee
attributes:
label: Do you want to work on this issue?
multiple: false
options:
- "Yes"
- "No"
validations:
required: false
- type: textarea
id: extrainfo
attributes:
label: Additional information
description: Is there anything else we should know about this issue?
validations:
required: false
- type: markdown
attributes:
value: |
You can also join our Discord community [here](https://discord.gg/SWnda7Mw5u)

View file

@ -0,0 +1,27 @@
---
name: Engine request
about: 'Suggest a new engine to be add '
title: "[ENGINE] <your engine request title>"
labels: engine
assignees: ''
---
<!-- PLEASE FILL THESE FIELDS, IT REALLY HELPS THE MAINTAINERS OF Websurfx -->
**Working URL of the engine**
<!-- Please check if the engine is responding correctly before submitting it. -->
**Why do you want to add this engine?**
<!-- What's special about this engine? Is it open source or libre? -->
**Features of this engine**
<!-- Features of this engine: Doesn't track its users, fast, easy to integrate, ... -->
**Applicable category of this engine**
<!-- Where should this new engine fit in Websurfx? Current categories in Websurfx:
general, files, images, it, map, music, news, science, social media and videos.
You can add multiple categories at the same time. -->
**Additional context**
<!-- Add any other context about this engine here. -->

View file

@ -1,72 +0,0 @@
name: ✨ Engine
description: Have a new engine to suggest for Websurfx? Please suggest!
title: '✨ <your engine request title>'
labels: ['⭐ goal: addition', '🚦 status: awaiting triage']
body:
- type: textarea
id: workingUrl
attributes:
label: Working URL of the engine
description: Please check if the engine is responding correctly before submitting it.
validations:
required: true
- type: textarea
id: reason
attributes:
label: Why do you want to add this engine?
description: What's special about this engine? Is it open source or libre?
validations:
required: true
- type: textarea
id: features
attributes:
label: Features of this engine
description: Features of this engine: Doesn't track its users, fast, easy to integrate, or anything else that we can know about.
validations:
required: true
- type: textarea
id: screenshots
attributes:
label: Screenshots
description: Please add screenshots if applicable
validations:
required: false
- type: dropdown
id: assignee
attributes:
label: Do you want to work on this issue?
multiple: true
options:
- 'General'
- 'Files'
- 'Images'
- 'IT'
- 'Map'
- 'Music'
- 'News'
- 'Science'
- 'Social Media'
- 'Videos'
validations:
required: true
- type: dropdown
id: assignee
attributes:
label: Do you want to work on this issue?
multiple: false
options:
- 'Yes'
- 'No'
validations:
required: false
- type: textarea
id: extrainfo
attributes:
label: Additional information
description: Is there anything else we should know about this idea?
validations:
required: false
- type: markdown
attributes:
value: |
You can also join our Discord community [here](https://discord.gg/SWnda7Mw5u)

View file

@ -1,40 +0,0 @@
name: 💡 General Feature Request
description: Have a new idea/feature for Websurfx? Please suggest!
title: "✨ <description>"
labels: ["⭐ goal: addition", "🚦 status: awaiting triage"]
body:
- type: textarea
id: description
attributes:
label: Description
description: A brief description of the enhancement you propose, also include what you tried and what worked.
validations:
required: true
- type: textarea
id: screenshots
attributes:
label: Screenshots
description: Please add screenshots if applicable
validations:
required: false
- type: dropdown
id: assignee
attributes:
label: Do you want to work on this issue?
multiple: false
options:
- "Yes"
- "No"
validations:
required: false
- type: textarea
id: extrainfo
attributes:
label: Additional information
description: Is there anything else we should know about this idea?
validations:
required: false
- type: markdown
attributes:
value: |
You can also join our Discord community [here](https://discord.gg/SWnda7Mw5u)

View file

@ -0,0 +1,20 @@
---
name: Feature request
about: Suggest an idea for this project
title: "[FEATURE] <your feature request title>"
labels: enhancement
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View file

@ -1,36 +0,0 @@
name: 🧱 Other
description: Use this for any other issues. Please do NOT create blank issues
title: "🧱 <Add your title here>"
labels: ["🚦 status: awaiting triage"]
body:
- type: markdown
attributes:
value: "# Other issue"
- type: textarea
id: issuedescription
attributes:
label: What would you like to share?
description: Provide a clear and concise explanation of your issue.
validations:
required: true
- type: dropdown
id: assignee
attributes:
label: Do you want to work on this issue?
multiple: false
options:
- "Yes"
- "No"
validations:
required: false
- type: textarea
id: extrainfo
attributes:
label: Additional information
description: Is there anything else we should know about this issue?
validations:
required: false
- type: markdown
attributes:
value: |
You can also join our Discord community [here](https://discord.gg/SWnda7Mw5u)

View file

@ -1,14 +0,0 @@
version: 2
updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "monthly"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
- package-ecosystem: "docker"
directory: "/"
schedule:
interval: "monthly"

View file

@ -1,11 +0,0 @@
"🚦 status: awaiting triage":
issues:
comment: >
To reduce notifications, issues are locked until they are https://github.com/neon-mmd/websurfx/labels/%F0%9F%8F%81%20status%3A%20ready%20for%20dev and to be assigned. You can learn more in our contributing guide https://github.com/neon-mmd/websurfx/blob/rolling/CONTRIBUTING.md
lock: true
"🏁 status: ready for dev":
issues:
comment: >
The issue has been unlocked and is now ready for dev. If you would like to work on this issue, you can comment to have it assigned to you. You can learn more in our contributing guide https://github.com/neon-mmd/websurfx/blob/rolling/CONTRIBUTING.md
unlock: true

27
.github/labeler.yml vendored
View file

@ -1,27 +0,0 @@
'💻 aspect: code':
- src/*
- Cargo.toml
- Cargo.lock
- Dockerfile
- docker-compose.yml
- websurfx/*
'🤖 aspect: dx':
- '**/*.json'
- .dockerignore
- .gitignore
- .gitpod.Dockerfile
- .gitpod.yml
- .rusty-hook.toml
- PULL_REQUEST_TEMPLATE.md
- SECURITY.md
- .github/*
- .mega-linter.yml
- tests/*
'📄 aspect: text':
- any: ['**/*.md', '!PULL_REQUEST_TEMPLATE.md', '!SECURITY.md']
- LICENSE
'🕹️ aspect: interface':
- public/*

View file

@ -1,79 +0,0 @@
name: Release stable image
on:
push:
branches:
- "release/stable/**"
pull_request:
branches:
- "release/stable/**"
types: [opened, synchronize]
env:
CARGO_TERM_COLOR: always
jobs:
release_image:
strategy:
fail-fast: false
matrix:
cache:
- memory
- redis
- hybrid
- no-cache
name: Release ${{ matrix.cache }} image
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
# Install buildx
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
# Set buildx cache
- name: Cache register
uses: actions/cache@v4
with:
path: /tmp/.buildx-cache
key: buildx-cache
# Login to ghcr.io
- name: Log in to Docker Hub
uses: docker/login-action@v3
with:
username: neonmmd
password: ${{ secrets.DOCKERHUB_TOKEN }}
# Extract branch info
- name: Set info
run: |
echo "VERSION=$(echo ${GITHUB_REF} | awk -F/ '{print $6}')" >> $GITHUB_ENV
# Print info for debug
- name: Print Info
run: |
echo $VERSION
# Create buildx multiarch
- name: Create buildx multiarch
run: docker buildx create --use --name=buildx-multi-arch --driver=docker-container --driver-opt=network=host
# Modify cache variable in the dockerfile.
- name: Modify Cache variable
run: |
sed -i "s/ARG CACHE=[a-z]*/ARG CACHE=${{ matrix.cache }}/g" Dockerfile
# Publish image
- name: Publish image
run: docker buildx build --builder=buildx-multi-arch --platform=linux/amd64,linux/arm64 --build-arg CACHE=${{ matrix.cache }} --push -t neonmmd/websurfx:$VERSION-${{ matrix.cache }} -t neon-mmd/websurfx:${{matrix.cache}} -f Dockerfile .
- name: Publish latest
if: ${{ matrix.cache }} == 'hybrid'
run: docker buildx build --builder=buildx-multi-arch --platform=linux/amd64,linux/arm64 --build-arg CACHE=${{ matrix.cache }} --push -t neon-mmd/websurfx:latest -f Dockerfile .
# Upload it to release
- name: Test if release already exists
id: release-exists
continue-on-error: true
run: gh release view $BINARY_NAME-$VERSION
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Create new draft release
if: steps.release-exists.outcome == 'failure' && steps.release-exists.conclusion == 'success'
run: gh release create -t $VERSION -d $VERSION
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View file

@ -1,19 +0,0 @@
---
name: Welcome first time contributors
on:
pull_request_target:
types:
- opened
jobs:
welcome:
name: Welcome
runs-on: ubuntu-latest
steps:
- uses: actions/first-interaction@v1
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
pr-message: |-
Congrats on making your first Pull Request and thanks for taking the time to improve Websurfx! ❤️!
Say hello by joining the conversation in our [Discord](https://discord.gg/SWnda7Mw5u)

View file

@ -1,16 +0,0 @@
name: "lock/unlock issue"
on:
issues:
types: labeled
permissions:
issues: write
jobs:
action:
runs-on: ubuntu-latest
steps:
- uses: dessant/label-actions@v4
with:
process-only: issues

View file

@ -1,10 +1,9 @@
---
name: Import open source standard labels
on:
push:
branches:
- rolling
- master
jobs:
labels:
@ -12,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/setup-node@v4
- uses: actions/setup-node@v2
with:
node-version: '14'
- uses: EddieHubCommunity/gh-action-open-source-labels@main
@ -20,4 +19,4 @@ jobs:
github-token: ${{ secrets.GITHUB_TOKEN }}
owner-name: ${{ github.repository_owner }}
repository-name: ${{ github.event.repository.name }}
force: false
force: true # optional to clear existing labels, default to true

View file

@ -1,15 +0,0 @@
name: "Pull Request Auto Labeler"
on:
- pull_request_target
jobs:
triage:
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: actions/labeler@v5
with:
sync-labels: true
dot: true

View file

@ -1,72 +0,0 @@
name: Bump release version
on:
pull_request:
branches: [rolling]
types:
- closed
permissions:
contents: write
pull-requests: write
repository-projects: write
concurrency: production
jobs:
build:
name: bump tag version and release
if: github.event.pull_request.merged == true
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.sha }}
fetch-depth: 0
- name: Bump version and push tag
id: version-bump
uses: hennejg/github-tag-action@v4.4.0
with:
github_token: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
release_branches: rolling
- name: create branch
uses: peterjgrainger/action-create-branch@v3.0.0
env:
GITHUB_TOKEN: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
with:
branch: update-from-${{ github.sha }}
- name: update cargo.toml
run: |
appversion=$(echo "${{ steps.version-bump.outputs.new_tag }}" | grep -oE '[0-9]+\.[0-9]+\.[0-9]+')
sed -i -e "s/^version = .*/version = \"$appversion\"/" Cargo.toml
- run: rustup toolchain install stable --profile minimal
- run: rustup update stable && rustup default stable
- name: regenerate cargo.lock
run: cargo generate-lockfile
- name: auto commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "[skip ci] updating app version to ${{ steps.version-bump.outputs.new_tag }}"
branch: update-from-${{ github.sha }}
# create PR using GitHub CLI
- name: create PR with update info
id: create-pr
run: gh pr create --base rolling --head update-from-${{ github.sha }} --title 'Merge new update into rolling' --body 'Created by Github action'
env:
GH_TOKEN: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
# merge PR using GitHub CLI
- name: merge PR with update info
id: merge-pr
run: gh pr merge --admin --merge --subject 'Merge update info' --delete-branch
env:
GH_TOKEN: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
- name: Create Release
uses: softprops/action-gh-release@v2
with:
token: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
generate_release_notes: true
name: ${{ steps.version-bump.outputs.new_tag }}
tag_name: ${{ steps.version-bump.outputs.new_tag }}
prerelease: false
env:
GITHUB_REPOSITORY: ${{ github.repository }}

View file

@ -1,13 +1,12 @@
---
name: Rust
on:
push:
branches:
- '**'
- "**"
pull_request:
branches:
- 'rolling'
- "rolling"
env:
CARGO_TERM_COLOR: always
@ -21,27 +20,23 @@ jobs:
- stable
steps:
- name: Install LuaJIT and Lua
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends liblua5.4-dev liblua5.3-dev liblua5.2-dev liblua5.1-0-dev libluajit-5.1-dev
- uses: actions/checkout@v4
- run: rustup toolchain install stable --profile minimal
- uses: Swatinem/rust-cache@v2
with:
prefix-key: ''
shared-key: ''
key: ''
env-vars: ''
workspaces: ''
cache-directories: ''
cache-targets: ''
cache-on-failure: ''
cache-all-crates: ''
save-if: ''
- uses: actions/checkout@v4
- run: rustup update ${{ matrix.toolchain }} && rustup default ${{ matrix.toolchain }}
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose
- uses: actions/checkout@v3
- run: rustup toolchain install stable --profile minimal
- uses: Swatinem/rust-cache@v2
with:
prefix-key: ""
shared-key: ""
key: ""
env-vars: ""
workspaces: ""
cache-directories: ""
cache-targets: ""
cache-on-failure: ""
cache-all-crates: ""
save-if: ""
- uses: actions/checkout@v3
- run: rustup update ${{ matrix.toolchain }} && rustup default ${{ matrix.toolchain }}
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose

View file

@ -1,4 +1,3 @@
---
name: Rust format and clippy checks
on:
push:
@ -13,11 +12,7 @@ jobs:
name: Rust project
runs-on: ubuntu-latest
steps:
- name: Install LuaJIT and Lua
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends liblua5.4-dev liblua5.3-dev liblua5.2-dev liblua5.1-0-dev libluajit-5.1-dev
- uses: actions/checkout@v4
- uses: actions/checkout@v2
- name: Install minimal stable with clippy and rustfmt
uses: actions-rs/toolchain@v1
with:
@ -33,7 +28,7 @@ jobs:
uses: actions-rs/cargo@v1
with:
command: clippy
args: --all-targets --all-features --all
args: --all-features --all-targets --all
- name: Run cargo check
uses: actions-rs/cargo@v1
with:

View file

@ -1,4 +1,3 @@
---
# This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
#
# You can adjust the behavior by modifying this file.
@ -19,7 +18,7 @@ jobs:
pull-requests: write
steps:
- uses: actions/stale@v9
- uses: actions/stale@v5
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-issue-message: 'Stale issue message'

9
.gitignore vendored
View file

@ -1,8 +1,3 @@
.vscode
/target
dhat-heap.json
dump.rdb
megalinter-reports/
package-lock.json
package.json
result
dump.rdb

3
.gitpod.Dockerfile vendored
View file

@ -1,3 +0,0 @@
FROM gitpod/workspace-rust
RUN sudo install-packages redis-server nodejs npm liblua5.4-dev liblua5.3-dev liblua5.2-dev liblua5.1-0-dev libluajit-5.1-dev

View file

@ -1,50 +0,0 @@
---
image:
file: .gitpod.Dockerfile
# Commands that will run on workspace start
tasks:
- name: Start Redis Server
command: redis-server --port 8082
- name: Run The App
init: cargo build
command: PKG_ENV=dev ./target/debug/websurfx
- name: Tests
command: cargo test
- name: Clippy Checks
command: cargo clippy
# vscode IDE setup
vscode:
extensions:
- vadimcn.vscode-lldb
- cschleiden.vscode-github-actions
- rust-lang.rust-analyzer
- bungcip.better-toml
- serayuzgur.crates
- usernamehw.errorlens
- DavidAnson.vscode-markdownlint
- esbenp.prettier-vscode
- stylelint.vscode-stylelint
- dbaeumer.vscode-eslint
- evgeniypeshkov.syntax-highlighter
- ms-azuretools.vscode-docker
- Catppuccin.catppuccin-vsc
- PKief.material-icon-theme
- oderwat.indent-rainbow
- formulahendry.auto-rename-tag
- swellaby.vscode-rust-test-adapter
- belfz.search-crates-io
- hbenl.test-adapter-converter
- hbenl.vscode-test-explorer
- eamodio.gitlens
github:
prebuilds:
master: true
branches: true
pullRequests: true
pullRequestsFromForks: true
addCheck: true
addComment: false
addBadge: true

View file

@ -1,25 +0,0 @@
queue_rules:
- name: default
queue_conditions:
- "#approved-reviews-by>=2"
- check-success=build (stable)
- check-success=CodeFactor
- check-success=Rust project
merge_conditions: []
merge_method: squash
pull_request_rules:
- name: automatic update of pull requests where more 5 commits behind
conditions:
- "#commits-behind>5"
actions:
update:
- name: delete head branch after merge
conditions:
- merged
actions:
delete_head_branch: {}
- name: Automatic merge on approval
conditions: []
actions:
queue:

View file

@ -1,5 +0,0 @@
[hooks]
pre-commit = "cargo test && cargo fmt -- --check && cargo clippy && stylelint ./public/static/themes/*.css ./public/static/colorschemes/*.css ./public/static/*.js"
[logging]
verbose = true

View file

@ -1,16 +0,0 @@
{
"extends": "stylelint-config-standard",
"rules": {
"alpha-value-notation": "number",
"selector-class-pattern": null,
"no-descending-specificity": null
},
"fix": true,
"cache": true,
"overrides": [
{
"files": ["*.js"],
"customSyntax": "postcss-lit"
}
]
}

View file

@ -1,54 +1,34 @@
# What You Can Contribute To?
# Things to Consider Before Contributing
## Documentation/Wiki
## Knowledge Required
Found a typo, or something that isn't as clear as it could be? Maybe I've missed something off altogether, or you hit a roadblock that took you a while to figure out. Edit the [docs](./docs/) to add to or improve the documentation. This will help future users get Websurfx up and running more easily.
## Readme
Did you find a typo, or the Readme is not as clear as it should be? Consider Submitting a Pull request to the [Readme](https://github.com/neon-mmd/websurfx/blob/master/README.md) to add to or improve the Readme. This will help future users to better understand the project more clearly.
## Help Improve GitHub Actions
Know how to fix or improve a GitHub action? Consider Submitting a Pull request to help make automation and testing better.
## Source Code
You should know at least one of the things below to start contributing:
- Rust basics
- Actix-web crate basics
- Tokio crate and async/await
- Reqwest crate basics
- Serde and serde_json crate basics
- Scraper crate basics
- Frontend (handlebars, css and js).
- Fake useragent crate basics
- pyo3/hlua/rlua crates basics
## Report a Bug/Issue
If you've found a bug, then please consider raising it as an issue [here](https://github.com/neon-mmd/websurfx/issues). This will help me know if there's something that needs fixing. Try and include as much detail as possible, such as your environment, steps to reproduce, any console output and maybe an example screenshot or recording if necessary.
## Spread the word
Websurfx is still a relatively young project, and as such not many people know of it. It would be great to see more users, and so it would be awesome if you could consider sharing with your friends or on social platforms.
- Rust basics
- Actix-web crate basics
- Tokio crate and async/await
- Reqwest crate basics
- Serde and serde~json~ crate basics
- fake~useragent~ crate basics
- pyo3/hlua/rlua crates basics
## Guidelines
- Please be patient.
- Treat everyone with respect -- \"give respect and take respect.\"
- Document your code properly with Rust coding conventions in mind.
- Provide a brief description of the changes you made in the pull request.
- Provide an appropriate header for the pull request.
- Please be patient.
## Join the discussion
- Treat everyone with respect -- \"give respect and take respect.\"
We have a [Discord](https://discord.gg/SWnda7Mw5u) channel, feel free to join and share your ideas and ask questions about the project, we would be glad to hear you out.
- Document your code properly with Rust coding conventions in mind.
# Where To Contribute?
- Provide a brief description of the changes you made in the pull
request.
The _rolling branch_ is where we intend all contributions should go.
- Provide an appropriate header for the pull request.
**NOTE:** The rolling branch is where all contributions should go.
In other words, it is the working branch for this project.
We appreciate any contributions whether of any size or topic and suggestions to help improve the Websurfx project. Please keep in mind the above requirements and guidelines before submitting a pull request and also if you have any doubts/concerns/questions about the project, its source code or anything related to the project then feel free to ask by opening an [issue](https://github.com/neon-mmd/websurfx/issues) or by asking us on our [Discord](https://discord.gg/SWnda7Mw5u) channel.
We appreciate any contributions and suggestions to help improve the
Websurfx project. Please keep in mind the above requirements and
guidelines before submitting a pull request and also if you have any
doubts/concerns/questions about the project, its source code or anything
related to the project than feel free to ask by opening an
\[issue\](<https://github.com/neon-mmd/websurfx/issues>).

3751
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,191 +1,24 @@
[package]
name = "websurfx"
version = "1.21.0"
version = "0.8.0"
edition = "2021"
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
repository = "https://github.com/neon-mmd/websurfx"
license = "AGPL-3.0"
[[bin]]
name = "websurfx"
test = true
bench = false
path = "src/bin/websurfx.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
reqwest = { version = "0.12.5", default-features = false, features = [
"rustls-tls",
"brotli",
"gzip",
"http2",
"socks",
] }
tokio = { version = "1.43.0", features = [
"rt-multi-thread",
"macros",
"fs",
"io-util",
], default-features = false }
serde = { version = "1.0.215", default-features = false, features = ["derive"] }
serde_json = { version = "1.0.134", default-features = false }
bincode = {version="1.3.3", default-features=false}
maud = { version = "0.26.0", default-features = false, features = [
"actix-web",
] }
scraper = { version = "0.21.0", default-features = false }
actix-web = { version = "4.9.0", features = [
"cookies",
"macros",
"compress-brotli",
], default-features = false }
actix-files = { version = "0.6.5", default-features = false }
actix-cors = { version = "0.7.0", default-features = false }
fake-useragent = { version = "0.1.3", default-features = false }
env_logger = { version = "0.11.6", default-features = false }
log = { version = "0.4.21", default-features = false }
mlua = { version = "0.10.2", features = [
"luajit",
"vendored",
], default-features = false }
redis = { version = "0.28.1", features = [
"tokio-comp",
"connection-manager",
"tcp_nodelay"
], default-features = false, optional = true }
blake3 = { version = "1.5.4", default-features = false }
error-stack = { version = "0.4.0", default-features = false, features = [
"std",
] }
async-trait = { version = "0.1.80", default-features = false }
regex = { version = "1.11.1", features = ["perf"], default-features = false }
futures = { version = "0.3.31", default-features = false, features = ["alloc"] }
dhat = { version = "0.3.2", optional = true, default-features = false }
mimalloc = { version = "0.1.43", default-features = false }
async-once-cell = { version = "0.5.4", default-features = false }
actix-governor = { version = "0.8.0", default-features = false }
moka = { version = "0.12.8", optional = true, default-features = false, features = [
"future",
] }
async-compression = { version = "0.4.12", default-features = false, features = [
"brotli",
"tokio",
], optional = true }
chacha20poly1305 = { version = "0.10.1", default-features = false, features = [
"alloc",
"getrandom",
], optional = true }
chacha20 = { version = "0.9.1", default-features = false, optional = true }
base64 = { version = "0.21.5", default-features = false, features = [
"std",
], optional = true }
cfg-if = { version = "1.0.0", default-features = false, optional = true }
keyword_extraction = { version = "1.5.0", default-features = false, features = [
"tf_idf",
"rayon",
] }
stop-words = { version = "0.8.0", default-features = false, features = ["iso"] }
thesaurus = { version = "0.5.2", default-features = false, optional = true, features = [
"moby",
]}
actix-multipart = { version = "0.7.2", default-features = false, features = [
"derive",
"tempfile",
]}
itertools = {version = "0.14.0", default-features = false}
[dev-dependencies]
rusty-hook = { version = "^0.11.2", default-features = false }
criterion = { version = "0.5.1", default-features = false }
tempfile = { version = "3.14.0", default-features = false }
[build-dependencies]
lightningcss = { version = "1.0.0-alpha.57", default-features = false, features = [
"grid",
] }
# Disabled until bug fixing update
# minify-js = { version = "0.6.0", default-features = false }
# Temporary fork with fix
minify-js = { git = "https://github.com/RuairidhWilliamson/minify-js", branch = "master", version = "0.6.0", default-features = false}
[profile.dev]
opt-level = 0
debug = true
split-debuginfo = 'unpacked'
debug-assertions = true
overflow-checks = true
lto = false
panic = 'unwind'
incremental = true
codegen-units = 256
rpath = false
[profile.release]
opt-level = 3
debug = false # This should only be commented when testing with dhat profiler
# debug = 1 # This should only be uncommented when testing with dhat profiler
split-debuginfo = '...'
debug-assertions = false
overflow-checks = false
lto = 'thin'
panic = 'abort'
incremental = false
codegen-units = 1
rpath = false
strip = "symbols"
[profile.bsr1]
inherits = "release"
opt-level = "s"
[profile.bsr2]
inherits = "bsr1"
opt-level = "z"
[profile.lpcb1]
inherits = "release"
codegen-units = 16
[profile.lpcb2]
inherits = "lpcb1"
lto = "off"
[profile.lpcb3]
inherits = "lpcb2"
opt-level = 2
[profile.bsr_and_lpcb1]
inherits = "lpcb1"
opt-level = "s"
[profile.bsr_and_lpcb2]
inherits = "lpcb2"
opt-level = "s"
[profile.bsr_and_lpcb3]
inherits = "lpcb3"
opt-level = "s"
[profile.bsr_and_lpcb4]
inherits = "lpcb1"
opt-level = "z"
[profile.bsr_and_lpcb5]
inherits = "lpcb1"
opt-level = "z"
[profile.bsr_and_lpcb6]
inherits = "lpcb1"
opt-level = "z"
[features]
use-synonyms-search = ["thesaurus/static"]
default = ["memory-cache"]
dhat-heap = ["dep:dhat"]
memory-cache = ["dep:moka"]
redis-cache = ["dep:redis", "dep:base64"]
compress-cache-results = ["dep:async-compression", "dep:cfg-if"]
encrypt-cache-results = ["dep:chacha20poly1305", "dep:chacha20"]
cec-cache-results = ["compress-cache-results", "encrypt-cache-results"]
experimental-io-uring = ["actix-web/experimental-io-uring"]
use-non-static-synonyms-search = ["thesaurus"]
reqwest = {version="*",features=["json"]}
tokio = {version="*",features=["full"]}
serde = {version="*",features=["derive"]}
handlebars = { version = "4.3.6", features = ["dir_source"] }
scraper = {version="*"}
actix-web = {version="4.3.1"}
actix-files = {version="0.6.2"}
serde_json = {version="*"}
fake-useragent = {version="*"}
env_logger = {version="0.10.0"}
log = {version="0.4.17"}
rlua = {version="*"}
redis = {version="*"}
md5 = {version="*"}
rand={version="*"}
once_cell = {version="*"}

View file

@ -1,43 +1,26 @@
FROM --platform=$BUILDPLATFORM rust:1.78.0-alpine3.18 AS chef
FROM rust:latest AS chef
# We only pay the installation cost once,
# it will be cached from the second build onwards
RUN apk add --no-cache alpine-sdk musl-dev g++ make libcrypto3 libressl-dev upx perl build-base
RUN cargo install cargo-chef --locked
RUN cargo install cargo-chef
WORKDIR /app
WORKDIR app
FROM chef AS planner
COPY ./Cargo.toml ./Cargo.lock ./
COPY . .
RUN cargo chef prepare --recipe-path recipe.json
FROM --platform=$BUILDPLATFORM chef AS builder
FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json
# Specify the cache type to use (memory, redis, hybrid, no-cache)
ARG CACHE=memory
ENV CACHE=${CACHE}
# Cook the dependencies
RUN export ARCH=$(uname -m) && \
if [ "$CACHE" = "memory" ] ; then cargo chef cook --release --target=$ARCH-unknown-linux-musl --recipe-path recipe.json ; \
else if [ "$CACHE" = "redis" ] ; then cargo chef cook --release --target=$ARCH-unknown-linux-musl --no-default-features --features redis-cache --recipe-path recipe.json ; \
else if [ "$CACHE" = "hybrid" ] ; then cargo chef cook --release --target=$ARCH-unknown-linux-musl --features redis-cache --recipe-path recipe.json ; \
else if [ "$CACHE" = "no-cache" ] ; then cargo chef cook --release --target=$ARCH-unknown-linux-musl --no-default-features --recipe-path recipe.json ; fi ; fi ; fi ; fi
# Copy the source code and public folder
COPY ./src ./src
COPY ./public ./public
# Build the application
RUN export ARCH=$(uname -m) && \
if [ "$CACHE" = "memory" ] ; then cargo build --release --target=$ARCH-unknown-linux-musl ; \
else if [ "$CACHE" = "redis" ] ; then cargo build --release --target=$ARCH-unknown-linux-musl --no-default-features --features redis-cache ; \
else if [ "$CACHE" = "hybrid" ] ; then cargo build --release --target=$ARCH-unknown-linux-musl --features redis-cache ; \
else if [ "$CACHE" = "no-cache" ] ; then cargo build --release --target=$ARCH-unknown-linux-musl --no-default-features ; fi ; fi ; fi ; fi
# Optimise binary size with UPX
RUN export ARCH=$(uname -m) \
&& upx --lzma --best /app/target/$ARCH-unknown-linux-musl/release/websurfx \
&& cp /app/target/$ARCH-unknown-linux-musl/release/websurfx /usr/local/bin/websurfx
# Build dependencies - this is the caching Docker layer!
RUN cargo chef cook --release --recipe-path recipe.json
# Build application
COPY . .
RUN cargo install --path .
FROM --platform=$BUILDPLATFORM scratch
COPY --from=builder /app/public/ /opt/websurfx/public/
VOLUME ["/etc/xdg/websurfx/"]
COPY --from=builder /usr/local/bin/websurfx /usr/local/bin/websurfx
# We do not need the Rust toolchain to run the binary!
FROM gcr.io/distroless/cc-debian11
COPY --from=builder ./public/ ./public/
COPY --from=builder ./websurfx/ ./websurfx/
COPY --from=builder /usr/local/cargo/bin/* /usr/local/bin/
CMD ["websurfx"]

View file

@ -16,7 +16,7 @@
## Author's checklist
<!-- additional notes for reviewers -->
<!-- additional notes for reviewiers -->
## Related issues

247
README.md
View file

@ -1,21 +1,27 @@
<h1 align="center">
<img src="./images/websurfx_logo.png" alt="websurfx logo" align="center" />
</h1>
<h1 align="center">Websurfx</h1>
<p align="center">
<b align="center"><a href="README.md">Readme</a></b> |
<b align="center"><a href="README.org">Readme</a></b> |
<b><a href="https://discord.gg/SWnda7Mw5u">Discord</a></b> |
<b><a href="docs/instances.md">Instances</a></b> |
<b><a href="https://discord.gg/VKCAememnr">User Showcase</a></b> |
<b><a href="https://github.com/neon-mmd/websurfx">GitHub</a></b> |
<b><a href="docs">Documentation</a></b>
<b><a href="https://github.com/neon-mmd/websurfx/wiki">Documentation</a></b>
<br /><br />
<a
href="https://github.com/awesome-selfhosted/awesome-selfhosted#search-engines"
>
<img
src="https://cdn.rawgit.com/sindresorhus/awesome/d7305f38d29fed78fa85652e3a63e154dd8e8829/media/badge.svg"
alt="Awesome Self-Hosted"
/>
<a href="./LICENSE">
<img
alt="GitHub"
src="https://img.shields.io/github/license/neon-mmd/websurfx?style=flat-square"
/>
</a>
<a href="https://github.com/neon-mmd/websurfx/stargazers">
<img
alt="GitHub Repo stars"
src="https://img.shields.io/github/stars/neon-mmd/websurfx?style=flat-square"
/>
</a>
<a href="https://github.com/neon-mmd/websurfx/forks">
<img
alt="GitHub forks"
src="https://img.shields.io/github/forks/neon-mmd/websurfx?style=flat-square"
/>
</a>
<a href="#">
<img
@ -23,28 +29,40 @@
src="https://img.shields.io/github/languages/code-size/neon-mmd/websurfx?style=flat-square"
/>
</a>
<a href="https://github.com/neon-mmd/websurfx/issues">
<img
alt="GitHub issues"
src="https://img.shields.io/github/issues/neon-mmd/websurfx?style=flat-square"
/>
</a>
<a href="https://github.com/neon-mmd/websurfx/pulls">
<img
alt="GitHub pull requests"
src="https://img.shields.io/github/issues-pr/neon-mmd/websurfx?style=flat-square"
/>
</a>
<a href="https://github.com/neon-mmd/websurfx/actions">
<img
alt="GitHub Workflow Status"
src="https://img.shields.io/github/actions/workflow/status/neon-mmd/websurfx/rust.yml?style=flat-square"
/>
</a>
<a href="">
<img
alt="GitHub release (latest by date including pre-releases)"
src="https://img.shields.io/github/v/release/neon-mmd/websurfx?include_prereleases"
/>
</a>
<a href=""
><img
alt="Maintenance"
src="https://img.shields.io/maintenance/yes/2024?style=flat-square"
src="https://img.shields.io/maintenance/yes/2023?style=flat-square"
/>
</a>
<a href="https://www.codefactor.io/repository/github/neon-mmd/websurfx">
<a href="">
<img
alt="CodeFactor"
src="https://www.codefactor.io/repository/github/neon-mmd/websurfx/badge"
/>
</a>
<a href="https://gitpod.io/#https://github.com/neon-mmd/websurfx">
<img
alt="Gitpod"
src="https://img.shields.io/badge/Gitpod-Ready--to--Code-blue?logo=gitpod"
alt="GitHub contributors"
src="https://img.shields.io/github/contributors-anon/neon-mmd/websurfx?style=flat-square"
/>
</a>
<br />
@ -55,7 +73,7 @@
>meta search engine</a
>
(pronounced as websurface or web-surface /wɛbˈːrfəs/.) written in Rust. It
provides a quick and secure search experience while completely respecting user
provides a fast and secure search experience while respecting user
privacy.</i
>
</p>
@ -65,31 +83,30 @@
<p>
- **Getting Started**
- [🔭 Preview](#preview-)
- [🚀 Features](#features-)
- [🔗 Instances](#instances-)
- [🛠️ Installation and Testing](#installation-and-testing-%EF%B8%8F)
- [🔧 Configuration](#configuration-)
- [🔭 Preview](#preview-)
- [🌈 Features](#features-)
- [🛠️ Installation and Testing](#installation-and-testing-)
- [🔧 Configuration](#configuration-)
- **Feature Overview**
- [🎨 Theming](#theming-)
- [🌍 Multi-Language Support](#multi-language-support-)
- [🎨 Theming](#theming-)
- [🌍 Multi-Language Support](#multi-language-support-)
- **Community**
- [📊 System Requirements](#system-requirements-)
- [🗨️ FAQ (Frequently Asked Questions)](#faq-frequently-asked-questions-%EF%B8%8F)
- [📣 More Contributors Wanted](#more-contributors-wanted-)
- [💖 Supporting Websurfx](#supporting-websurfx-)
- [📘 Documentation](#documentation-)
- [🛣️ Roadmap](#roadmap-%EF%B8%8F)
- [🙋 Contributing](#contributing-)
- [📜 License](#license-)
- [🤝 Credits](#credits-)
- [📊 System Requirements](#system-requirements-)
- [🗨️ FAQ (Frequently Asked Questions)](#faq-frequently-asked-questions-)
- [📣 More Contributers Wanted](#more-contributers-wanted-)
- [💖 Supporting Websurfx](#supporting-websurfx-)
- [📘 Documentation](#documentation-)
- [🛣️ Roadmap](#roadmap-)
- [🙋 Contributing](#contributing-)
- [📜 License](#license-)
- [🤝 Credits](#credits-)
</p>
</details>
# Preview 🔭
# Preview 🔭
## Home Page
## Main Page
<img align="center" src="./images/main_page.png" />
@ -101,140 +118,122 @@
<img align="center" src="./images/404_error_page.png" />
**[⬆️ Back to Top](#--)**
**[⬆️ Back to Top](#websurfx)**
# Instances 🔗
# Features 🌈
> For a full list of publicly available community driven `websurfx` instances to test or for daily use. see [**Instances**](docs/instances.md)
**[⬆️ Back to Top](#--)**
# Features 🚀
- 🎨 Make Websurfx uniquely yours with the twelve color schemes provided by default. It also supports the creation of custom themes and color schemes in a quick and easy way, so unleash your creativity!
- 🚀 Easy to setup with Docker or on bare metal with various installation and deployment options.
- ⛔ Search filtering to filter search results based on four different levels.
- 💾 Different caching levels focusing on reliability, speed and resiliancy.
- ⬆️ Organic Search results (with ranking algorithm builtin to rerank the search results according to user's search query.).
- 🔒 Different compression and encryption levels focusing on speed and privacy.
- 🧪 Experimental IO-uring feature for Linux operating systems focused on performance of the engine.
- 🔐 Fast, private, and secure
- 🎨 High level customizability with 9 colorchemes provided by default with a simple theme, also supporting creation of your custom themes and colorschemes very quickly and easily
- 🔐 Fast, private and secure
- 🆓 100% free and open source
- 💨 Ad-free and clean results
- 🌟 and lots more...
- 🧹 Ad free and clean results
- 🌈 and lots more...
**[⬆️ Back to Top](#--)**
**[⬆️ Back to Top](#websurfx)**
# Installation and Testing 🛠️
# Installation and Testing 🛠️
> For full setup instructions, see: [**Installation**](docs/installation.md)
> For full setup instructions, see: [**Installation**](https://github.com/neon-mmd/websurfx/wiki/installation)
Before you can start building `websurfx`, you will need to have `Cargo` installed on your system. You can find the installation instructions [here](https://doc.rust-lang.org/cargo/getting-started/installation.html).
To get started with Websurfx, clone the repository, edit the config file which is located in the `websurfx`{.verbatim} directory and install redis server by following the instructions located [here](https://redis.io/docs/getting-started/) and then run the websurfx server and redis server using the following commands:
To get started with Websurfx, clone the repository, edit the config file, which is located in the `websurfx/` directory, and install the Redis server by following the instructions located [here](https://redis.io/docs/getting-started/) and then run the websurfx server and redis server using the following commands:
```shell
``` shell
git clone https://github.com/neon-mmd/websurfx.git
cd websurfx
git checkout stable
cargo build -r
redis-server --port 8082 &
./target/release/websurfx
cargo build
redis-server -p 8082 &
./target/debug/websurfx
```
Once you have started the server, open your preferred web browser and navigate to <http://127.0.0.1:8080> to start using Websurfx.
> [!Note]
>
> 1. The project is no longer in the testing phase and is now ready for production use.
> 2. There are many features still missing, like `support for image search`, `different categories`, `quick apps`, etc., but they will be added soon as part of future releases.
> **Warning**
> Please be aware that the project is still in the testing phase and is not ready for production use.
**[⬆️ Back to Top](#--)**
**[⬆️ Back to Top](#websurfx)**
# Configuration 🔧
# Configuration 🔧
> For full configuration instructions, see: [**Configuration**](docs/configuration.md)
> For full configuration instructions, see: [**Configuration**](https://github.com/neon-mmd/websurfx/wiki/configuration)
Websurfx is configured through the config.lua file, located at `websurfx/config.lua`.
**[⬆️ Back to Top](#--)**
**[⬆️ Back to Top](#websurfx)**
# Theming 🎨
# Theming 🎨
> For full theming and customization instructions, see: [**Theming**](docs/theming.md)
> For full theming and customization instructions, see: [**Theming**](https://github.com/neon-mmd/websurfx/wiki/theming)
Websurfx comes loaded with several themes and color schemes, which you can apply and edit through the config file. It also supports custom themes and color schemes using CSS, allowing you to make it truly yours.
Websurfx comes with several themes and colorschemes by default which you can apply and edit through the config file. Support for custom themes and colorschemes using css and develop your own unique-looking website.
**[⬆️ Back to Top](#--)**
**[⬆️ Back to Top](#websurfx)**
# Multi-Language Support 🌍
> [!Note]
> Currently, we do not support other languages, but we will start accepting contributions regarding language support in the future. We believe language should never be a barrier to entry.
> **Note**
> Currently, we do not support other languages but in future we would start accepting contributions regarding language support because we believe that language should not be a barrier for entry.
**[⬆️ Back to Top](#--)**
**[⬆️ Back to Top](#websurfx)**
# System Requirements 📊
At present, we only support x86_64 architecture systems, but we would love to have contributions that extend to other architectures as well.
At present, we only support x86_64 architecture systems but will love to have contributions to extend to other architectures as well.
**[⬆️ Back to Top](#--)**
**[⬆️ Back to Top](#websurfx)**
# FAQ (Frequently Asked Questions) 🗨️
## Why Websurfx?
The primary purpose of the Websurfx project is to create a fast, secure, and privacy-focused meta-search engine. There are numerous meta-search engines available, but not all guarantee the security of their search engines, which is critical for maintaining privacy. Memory flaws, for example, can expose private or sensitive information, which is understandably bad. There is also the added problem of spam, ads, and inorganic results, which most engines don't have a full-proof answer to. Until now. With Websurfx, I finally put a full stop to this problem. Websurfx is based on Rust, which ensures memory safety and removes such issues. Many meta-search engines also lack important features like advanced picture search, required by graphic designers, content providers, and others. Websurfx improves the user experience by providing these and other features, such as proper NSFW blocking and micro-apps or quick results (providing a calculator, currency exchanges, etc. in the search results).
The main goal of the Websurfx project is to provide a fast, secure, and privacy-focused [meta search engine](https://en.wikipedia.org/wiki/Metasearch_engine). While there are many meta search engines available, they do not always guarantee the security of their search engine, which is essential for ensuring privacy. For example, memory vulnerabilities can leak private or sensitive information, which is never good. Websurfx is written in Rust, which guarantees memory safety and eliminates such problems. Many meta search engines also lack key features such as advanced image search, which is required by many graphic designers, content creators, and others. Websurfx aims to provide these features and others, such as proper NSFW blocking, to improve the user experience.
## Why AGPLv3?
Websurfx is distributed under the **AGPLv3** license to keep the source code open and transparent. This helps keep malware, telemetry, and other dangers out of the project. **AGPLv3** is a strong copyleft license that ensures the software's source code, including any modifications or improvements made to the code, remains open and available to everyone.
Websurfx is released under the **AGPLv3** license to ensure that the source code remains open and transparent. This helps to prevent the inclusion of spyware, telemetry, or other malicious code in the project. **AGPLv3** is a strong copyleft license that ensures the source code of the software remains open and available to everyone, including any modifications or improvements made to the code.
## Why Rust?
Websurfx is based on Rust due to its memory safety features, which prevent vulnerabilities and make the codebase more secure. Rust is also faster than C++, contributing to Websurfx's speed and responsiveness. Finally, the Rust ownership and borrowing system enables secure concurrency and thread safety in the program.
Rust was chosen as the programming language for Websurfx due to its memory safety features, which can help prevent vulnerabilities and make the codebase more secure. Rust is also faster than C++, which helps to make Websurfx fast and responsive. In addition, Rust\'s ownership and borrowing system allows for safe concurrency and thread safety in the codebase.
**[⬆️ Back to Top](#--)**
**[⬆️ Back to Top](#websurfx)**
# More Contributors Wanted 📣
# More Contributers Wanted 📣
We are looking for more willing contributors to help grow this project. For more information on how you can contribute, check out the [project board](https://github.com/neon-mmd/websurfx/projects?query=is%3Aopen) and the [CONTRIBUTING.md](CONTRIBUTING.md) file for guidelines and rules for making contributions.
We are looking for more willing contributors to help grow this project.For more information on how you can contribute, check out the [project board](https://github.com/neon-mmd/websurfx/projects?query=is%3Aopen) and the [CONTRIBUTING.org](CONTRIBUTING.org) file for guidelines and rules for making contributions.
**[⬆️ Back to Top](#--)**
**[⬆️ Back to Top](#websurfx)**
# Supporting Websurfx 💖
> For full details and other ways you can help out, see: [**Contributing**](CONTRIBUTING.md)
> For full details and other ways you can help out, see: [**Contributing**]()
If you use Websurfx and would like to contribute to its development, we're glad to have you on board! Contributions of any size or type are always welcome, and we will always acknowledge your efforts.
If you're using Websurfx and would like to help support its development, then that would be awesome! Contributions of any type, any size, are always very much appreciated, and we will appropriately credit you for your effort.
Several areas that we need a bit of help with at the moment are:
- **Better and more color schemes**: Help fix color schemes and add other famous color schemes.
- **Improve evasion code for bot detection**: Help improve code related to evading IP blocking and emulating human behaviors located in everyone's engine file.
- **Logo**: Help create a logo for the project and website.
- **Docker Support**: Help write a Docker Compose file for the project.
- Submit a PR to add a new feature, fix a bug, update the docs, add a theme, widget, or anything else.
- **Better and more colorchemes** - Help fix colorchemes and add other famous colorchemes.
- **Improve evasion code for bot detection** - Help improve code related to evade ip blocking and emulate human behaviours located in everyone engine file.
- **Logo** - Help create a logo for the project and website.
- **Docker Support** - Help write a docker compose file for the project.
- Submit a PR to add a new feature, fix a bug, update the docs, add a theme, widget or something else.
- Star Websurfx on GitHub.
**[⬆️ Back to Top](#--)**
**[⬆️ Back to Top](#websurfx)**
# Documentation 📘
> [!Note]
> We welcome any contributions to the [documentation](docs) as this will benefit everyone who uses this project.
> **Note**
> We are willing to have any contribution regarding [documentation](https://github.com/neon-mmd/websurfx/wiki) as this helps everyone using this project.
**[⬆️ Back to Top](#--)**
**[⬆️ Back to Top](#websurfx)**
# Roadmap 🛣️
> Coming soon! 🙂.
> Coming soon!! 🙂.
**[⬆️ Back to Top](#--)**
**[⬆️ Back to Top](#websurfx)**
# Contributing 🙋
# Contributing 🙋
Contributions are welcome from anyone. It doesn't matter who you are; you can still contribute to the project in your own way.
Contributions are welcome from anyone. It doesn\'t matter who you are; you can still contribute to the project in your way.
## Not a developer but still want to contribute?
@ -242,22 +241,20 @@ Check out this [video](https://youtu.be/FccdqCucVSI) by Mr. Nick on how to contr
## Developer
If you are a developer, have a look at the [CONTRIBUTING.md](CONTRIBUTING.md) document for more information.
If you are a developer, have a look at the [CONTRIBUTING.org](CONTRIBUTING.org) document for more information.
**[⬆️ Back to Top](#--)**
**[⬆️ Back to Top](#websurfx)**
# License 📜
# License 📜
Websurfx is licensed under the [AGPLv3](LICENSE) license.
Websurfx is available under the [AGPLv3](LICENSE) license.
**[⬆️ Back to Top](#--)**
**[⬆️ Back to Top](#websurfx)**
# Credits 🤝
# Credits 🤝
We would like to thank the following people for their contributions and support:
**Contributors**
<p>
<br />
<a href="https://github.com/neon-mmd/websurfx/graphs/contributors">
@ -266,15 +263,7 @@ We would like to thank the following people for their contributions and support:
<br />
</p>
**Stargazers**
<p>
<a href="https://github.com/neon-mmd/websurfx/stargazers">
<img src="http://reporoster.com/stars/dark/neon-mmd/websurfx"/>
</a>
</p>
**[⬆️ Back to Top](#--)**
**[⬆️ Back to Top](#websurfx)**
---

View file

@ -1,85 +0,0 @@
//! A build module of the application which minifies the project's css and js files on build which
//! helps reduce the initial page by loading the files faster.
#![forbid(unsafe_code, clippy::panic)]
#![deny(missing_docs, clippy::missing_docs_in_private_items, clippy::perf)]
#![warn(clippy::cognitive_complexity, rust_2018_idioms)]
// ------- Imports -------
use lightningcss::stylesheet::{MinifyOptions, ParserOptions, PrinterOptions, StyleSheet};
use minify_js::{minify, Session, TopLevelMode};
use std::{
fs::{read_dir, read_to_string, File, OpenOptions},
io::{Read, Write},
};
// ------- Constants -------
/// A constant for the path to the public/theme folder in the codebase.
const COMMON_STATIC_SOURCE_CODE_FOLDER: &str = "./public/static/";
/// A constant for the names of the folders located in the "/public/static/"
/// folder in the codebase which contains the css files to be minified.
const STYLE_FOLDERS: [&str; 2] = ["themes", "colorschemes"];
/// A constant for the environment variable name.
const PACKAGE_ENVIRONMENT_VARIABLE: &str = "PKG_ENV";
/// A constant for the `prod` value of the `pkg_env` environment variable.
const PRODUCTION_PKG_ENV_VARIABLE_VALUE: &str = "prod";
/// A main function which minifies both css and js files using `lightningcss` and `minify_js` when
/// the `PKG_ENV` environment and it is set to the value of `prod`.
///
/// # Error
///
/// This function returns the unit type when the minification process runs successfully otherwise
/// it returns a standard error.
fn main() -> Result<(), Box<dyn std::error::Error>> {
if let Ok(pkg_env_var) = std::env::var(PACKAGE_ENVIRONMENT_VARIABLE) {
if pkg_env_var.to_lowercase() == PRODUCTION_PKG_ENV_VARIABLE_VALUE {
// A for loop that loops over each file name containing in the `colorschemes` and `themes` folders
// and minifies it using the `lightningcss` minifier.
for folder_name in STYLE_FOLDERS {
for file in read_dir(format!("{COMMON_STATIC_SOURCE_CODE_FOLDER}{folder_name}/"))? {
let file_path = file?.path();
let source = read_to_string(file_path.clone())?;
let mut stylesheet = StyleSheet::parse(&source, ParserOptions::default())
.map_err(|err| format!("{err}\n{:?}", file_path.file_name().unwrap()))?;
stylesheet.minify(MinifyOptions::default())?;
let minified_css = stylesheet.to_css(PrinterOptions::default())?;
let mut old_css_file = OpenOptions::new()
.write(true)
.truncate(true)
.open(file_path)?;
old_css_file.write_all(minified_css.code.as_bytes())?;
old_css_file.flush()?;
}
}
// A for loop that loops over each file name containing in the `public/static` folder and minifies
// it using the `minify-js` minifier.
for file in read_dir(COMMON_STATIC_SOURCE_CODE_FOLDER)? {
let file_path = file?.path();
if file_path.is_file() {
let mut code = Vec::new();
let mut js_file = File::open(file_path.clone())?;
js_file.read_to_end(&mut code)?;
drop(js_file);
let mut out = Vec::new();
minify(&Session::new(), TopLevelMode::Global, &code, &mut out)
.map_err(|err| format!("{err}\n{:?}", file_path.file_name().unwrap()))?;
let mut old_js_file = OpenOptions::new()
.write(true)
.truncate(true)
.open(file_path)?;
old_js_file.write_all(&out)?;
old_js_file.flush()?;
}
}
}
}
Ok(())
}

View file

@ -1,15 +0,0 @@
# Create Builder image
FROM --platform=$BUILDPLATFORM rust:1.78.0-alpine3.18
# Install required dependencies
RUN apk add --no-cache alpine-sdk musl-dev g++ make libcrypto3 libressl-dev perl build-base
RUN cargo install cargo-watch --locked
# Create project directory
RUN mkdir -p /project
WORKDIR /project
ENV RUSTFLAGS="-C target-feature=-crt-static"
ENTRYPOINT ["cargo"]

View file

@ -1,26 +0,0 @@
---
version: "3.9"
services:
redis:
container_name: redis
image: redis:6.2.5-alpine
tty: true
hostname: surfx-redis
websurx:
container_name: websurx-dev
image: websurfx:dev
working_dir: /project
tty: true
build:
context: .
dockerfile: dev.Dockerfile
ports:
- 8080:8080
volumes:
- type: bind
source: .
target: /project
command:
- watch
- -x
- run

View file

@ -1,4 +1,3 @@
---
version: "3.9"
services:
app:
@ -6,13 +5,11 @@ services:
build: .
ports:
- 8080:8080
# Uncomment the following lines if you are using the `hybrid` or `redis` caching feature.
# depends_on:
# - redis
# links:
# - redis
volumes:
- ./websurfx/:/etc/xdg/websurfx/
# Uncomment the following lines if you are using the `hybrid` or `redis` caching feature.
# redis:
# image: redis:latest
depends_on:
- redis
links:
- redis
redis:
image: redis:latest
ports:
- 6379:6379

View file

@ -1,20 +0,0 @@
<h1 align="center"><img src="../images/websurfx_docs_image.png" alt="Websurfx Docs" align="center"></h1>
# General
- [Introduction](./introduction.md)
- [**FAQ**](./faq.md)
# Users
- [Instances](./instances.md)
- [Installation](./installation.md)
- [Features](./features.md)
- [Configuration](./configuration.md)
- [Theming](./theming.md)
# Developers
- [Developing](./developing.md)
- [**Contribute**](https://github.com/neon-mmd/websurfx/blob/master/CONTRIBUTING.md)
- [**Coding style**](https://rust-lang.github.io/api-guidelines/naming.html)

View file

@ -1,93 +0,0 @@
# Configuration
## Installed From Source
If you have built `websurfx` from the source then the configuration file will be located under the project directory (codebase) at `websurfx/`
> [!Note]
> If you have built websurfx with an unstable/rolling/edge branch then you can copy the configuration file from `websurfx/config.lua` located under the project directory (codebase) to `~/.config/websurfx/` and make the changes there and rerun the websurfx server. _This is only available from unstable/rolling/edge version_.
## Installed From Package
If you have installed `websurfx` using the package manager of your Linux distro then the default configuration file will be located at `/etc/xdg/websurfx/`. You can copy the default config to `~/.config/websurfx/` make the changes there and rerun the websurfx server.
Some of the configuration options provided in the file are stated below. These are subdivided into the following categories:
- General
- Server
- Search
- Website
- Cache
- Search Engines
# General
- **logging:** An option to enable or disable logs.
- **debug:** An option to enable or disable debug mode.
- **threads:** The amount of threads that the app will use to run (the value should be greater than 0).
## Server
- **port:** Port number on which server should be launched.
- **binding_ip_addr:** IP address on the which server should be launched.
- **production_use:** Whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users). If production_use is set to true. There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
- **request_timeout:** Timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
- **rate_limiter:** The configuration option to configure rate limiting on the search engine website.
## Search
- **safe_search:** This option is used to configure the search filtering based on different safe search levels. (value a number between 0 to 4)
> This option provides 4 levels of search filtering:
>
> - Level 0 - With this level no search filtering occurs.
> - Level 1 - With this level some search filtering occurs.
> - Level 2 - With this level the upstream search engines are restricted to sending sensitive content like NSFW search results, etc.
> - Level 3 - With this level the regex-based filter lists are used alongside level 2 to filter more search results that have slipped in or custom results that need to be filtered using the filter lists.
> - Level 4 - This level is similar to level 3 except in this level the regex-based filter lists are used to disallow users to search sensitive or disallowed content. This level could be useful if you are a parent or someone who wants to completely disallow their kids or yourself from watching sensitive content.
## Website
- **colorscheme:** The colorscheme name which should be used for the website theme (the name should be by the colorscheme file name present in the `public/static/colorschemes` folder).
> By Default we provide 12 colorschemes to choose from these are:
>
> 1. catppuccin-mocha
> 2. dark-chocolate
> 3. dracula
> 4. gruvbox-dark
> 5. monokai
> 6. nord
> 7. oceanic-next
> 8. one-dark
> 9. solarized-dark
> 10. solarized-light
> 11. tokyo-night
> 12. tomorrow-night
- **theme:** The theme name that should be used for the website (again, the name should be by the theme file name present in the `public/static/themes` folder).
> By Default we provide 1 theme to choose from these are:
>
> 1. simple
- **animation:** The animation name that should be used for the website (again, the name should be by the animation file name present in the `public/static/animations` folder).
> By Default we provide 1 animation to choose from these are:
>
> 1. simple-frosted-glow
## Cache
- **redis_url:** Redis connection URL address on which the client should connect.
> **Note**
> This option can be commented out if you have compiled the app without the `redis-cache` feature. For more information, See [**building**](./building.md).
- **cache_expiry_time:** The maximum time the server will store the cache for, after which it flushs/removes/expires/invalidates the cached results. (value provided to this option should be in seconds and the value should be greater than or equal to 60 seconds).
## Search Engines
- **upstream_search_engines:** Select from the different upstream search engines from which the results should be fetched.
[⬅️ Go back to Home](./README.md)

View file

@ -1,643 +0,0 @@
# Developing
This page of the docs outlines how to get **Websurfx** up and running in a development environment, and outlines the common workflow, different ways to work on the project, a high-level overview of how the project works, project structure, and the best practices that should be followed when working on the project.
<details>
<summary><b>Table of Contents</b></summary>
<p>
- [Setting up the Development Environment](#setting-up-the-development-environment)
- [Local Development](#local-development-)
- [Gitpod](#gitpod-)
- [NixOS Dev Shell using Nix Flake](#nixos-dev-shell-using-nix-flake-)
- [Local Development with Docker Compose](#local-development-with-docker-compose-)
- [Project Commands](#project-commands)
- [Environment Variables](#environment-variables)
- [Git Strategy](#git-strategy)
- [Flow](#git-flow)
- [Branches](#git-branch-naming)
- [Commit emojis](#commit-emojis)
- [PR Guidelines](#pr-guidelines)
- [Resources for Beginners](#resources-for-beginners)
- [App Info](#app-info)
- [Code Style Guide](#style-guide)
- [Application Structure](#application-structure)
- [Development Tools](#development-tools)
- [Misc / Notes](#notes)
</p>
</details>
## Setting up the Development Environment
By default, we provide four different ways to work on the project. These are as follows:
- [Local Development](#local-development-)
- [Gitpod](#gitpod-)
- [NixOS Dev Shell using Nix Flake](#nixos-dev-shell-using-nix-flake-)
- [Local Development with Docker Compose](#local-development-with-docker-compose-)
The different methods are explained in depth below.
### Local Development
This section covers how to set up the project for development on your local machine (bare metal).
#### Prerequisites
Before you start working on the project. You will need the following packages installed on your system:
- The latest version of `cargo` installed on your system which is required to manage building and running the project. The installation instructions for this can be found [here](https://doc.rust-lang.org/cargo/getting-started/installation.html).
- The latest version of `npm` installed on your system which is required to allow the installation of other tools necessary for the project. The installation for this can be found [here](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
- The latest version of `redis` installed on your system which will be used to avoid introducing unexpected issues when working on the project. The installation for this can be found [here](https://redis.io/docs/getting-started/installation/).
- The latest version of `stylelint` should be installed on your system which will be used by the pre-commit checks to lint the code before a commit can be made to ensure better code quality. Before you install `stylelint` on your system, make sure you have `npm` installed on your system. To install `stylelint` and plugins run the following command:
```shell
$ npm i -g stylelint
$ npm i -g stylelint stylelint-config-standard postcss-lit
```
> [!Note]
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
- `Cargo-watch` installed on your system which will allow you to auto-build the project when any checks occur in the source code files in the codebase (`websurfx` directory). Before you install `cargo-watch` on your system, make sure you have `cargo` installed on your system. To install `cargo-watch` run the following command:
```shell
cargo install cargo-watch
```
- `Git` installed on your system. The installation instructions for this can be found [here](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git).
- Finally, The latest version of `Docker` is installed on your system which will be used to avoid introducing unexpected issues when working on the project. The installation instructions for this can be found [here](https://docs.docker.com/engine/install/).
> [!Note]
> For **rolling release Linux distributions (distros)**, the above-mentioned required packages except for `stylelint` and `cargo-watch` can also be installed via the distro-specific package manager.
>
> **For Example:**
>
> On `arch linux` the following packages can be installed by following the link to the installation instructions provided below:
>
> - `Cargo`: https://wiki.archlinux.org/title/rust
> - `Npm`: https://wiki.archlinux.org/title/Node.js
> - `Redis`: https://wiki.archlinux.org/title/redis
> - `Git`: https://wiki.archlinux.org/title/git
> - `Docker`: https://wiki.archlinux.org/title/docker
>
> But we do not recommend this method for **stable release Linux distros** as they tend to not provide very up-to-date versions of the required packages.
#### Setting up Pre-commit Checks
Before you set `pre-commit` checks, you will first need to clone **your fork of the project** and navigate into the cloned repository by running the following command:
```shell
git clone https://github.com/<your_github_username>/websurfx.git
cd websurfx
```
Once you have finished running the above commands then run the following command to set the `pre-commit` checks:
```shell
cargo test
```
By running the above-mentioned command, it will automatically set up all the pre-commit checks in the project.
#### Running the Project
If you have followed the above section then you should have a cloned repository folder present on your system. In the same directory run the following command to run the project:
```shell
cargo watch -q -x "run" -w "."
```
This will compile the app by default with the **In-Memory caching** feature. To compile, run, and test the app with other features follow the build options listed below:
##### Hybrid Cache
To build and run the app with the `Hybrid caching` feature. Run the following command:
```shell
cargo watch -q -x "run --features redis-cache" -w .
```
##### No Cache
To build and run the search engine with the `No caching` feature. Run the following command:
```shell
cargo watch -q -x "run --no-default-features" -w .
```
##### Redis Cache
To build the search engine with the `Redis caching` feature. Run the following command:
```shell
cargo watch -q -x "run --no-default-features --features redis-cache" -w .
```
> Optionally, If you have build and run the app with the `Redis cache`or `Hybrid cache` feature (as mentioned above) then you will need to start the redis server alongside the app which can be done so by running the following command:
>
> ```shell
> redis-server --port 8082 &
> ```
Once you have finished running the above command, Websurfx should now be served on the address http://127.0.0.1:8080. Hot reload is enabled, so making changes to any of the files will trigger the project to be rebuilt.
> For more info on all the project commands. See: [**Project Commands**](#project-commands-)
### Gitpod
This section covers how to use and set up the Gitpod development environment for working on the project.
> [!Note]
> By default the project only supports the Vscode **IDE/Editor** for Gitpod.
#### Launching Gitpod
> For a full guide on how to fork the project. See: [**Forking**](#)
To launch gitpod and start working on the project from your fork of the Websurfx, Just navigate to the following link:
```text
https://gitpod.io/#https://github.com/<your_github_username>/websurfx
```
> For a full guide on how to use it and how to use it in different ways. See [**Learn Gitpod**](https://piped.kavin.rocks/playlist?list=PL3TSF5whlprXVp-7Br2oKwQgU4bji1S7H)
#### Default Plugins
The project by default provides a set of pre-installed plugins for gitpod which is done to improve productivity and efficiency while working on the project. Also to make working on the project more fun and engaging which can be customized from within the `Gitpod` instance.
The list of all the pre-installed plugins are listed below:
**Productivity**
- [CodeLLDB](https://open-vsx.org/extension/vadimcn/vscode-lldb): Provides a native debugger for rust programming langauge.
- [GitHub Actions](https://open-vsx.org/extension/cschleiden/vscode-github-actions): Provides an easy to work with github actions.
- [rust-analyzer](https://open-vsx.org/extension/rust-lang/rust-analyzer): Provides a language server for rust programming langauge.
- [better-toml](https://open-vsx.org/extension/bungcip/better-toml): Provides support for toml files.
- [crates](https://open-vsx.org/extension/serayuzgur/crates): Makes managing rust dependencies easier.
- [Error Lens](https://open-vsx.org/extension/usernamehw/errorlens): Provides better highlighting of errors.
- [markdownlint](https://open-vsx.org/extension/DavidAnson/vscode-markdownlint): Provides a linter for linting markdown documents.
- [Prettier](https://open-vsx.org/extension/esbenp/prettier-vscode): Provides a code formatter.
- [Stylelint](https://open-vsx.org/extension/stylelint/vscode-stylelint): Provides a linter for CSS files.
- [ESLint](https://open-vsx.org/extension/dbaeumer/vscode-eslint): Provides a linter for JS files.
- [Syntax Highlighter](https://open-vsx.org/extension/evgeniypeshkov/syntax-highlighter): A better syntax highlighting for code.
- [Docker](https://open-vsx.org/extension/ms-azuretools/vscode-docker): Makes handling docker files easier.
- [indent-rainbow](https://open-vsx.org/extension/oderwat/indent-rainbow): Highlightes code idents for better visualization.
- [Auto Rename Tag](https://open-vsx.org/extension/formulahendry/auto-rename-tag): Provides a way to easily and quickly rename html tags.
- [Rust Test Explorer](https://open-vsx.org/extension/Swellaby/vscode-rust-test-adapter): View and run cargo tests easily from a convenient sidebar.
- [Search crates-io](https://open-vsx.org/extension/belfz/search-crates-io): Provides crates suggestions in the `cargo.toml` file.
- [Test Adapter Converter](https://open-vsx.org/extension/hbenl/test-adapter-converter): A vscode native way to view and run tests.
- [Test Explorer UI](https://open-vsx.org/extension/hbenl/vscode-test-explorer): Provides a way to run any test from a convenient sidebar.
- [GitLens](https://open-vsx.org/extension/eamodio/gitlens): Provides a better and more efficient way to manage common git workflows.
> Optionally, if you prefer a more keyboard-centric workflow then we would recommend using the following extension:
>
> - [VSCode Neovim](https://open-vsx.org/extension/asvetliakov/vscode-neovim): Provides complete vim emulation for vscode.
**Theming**
- [Catppuccin for VSCode](https://open-vsx.org/extension/Catppuccin/catppuccin-vsc): Provides the catpuccin theme for vscode.
- [Material Icon Theme](https://open-vsx.org/extension/PKief/material-icon-theme): Provides material design icons for files dependening on the file extension.
> If you have more ideas and ways to improve Gitpod for development purposes then feel free to do so by contributing a PR to this project [**here**](https://github.com/neon-mmd/websurfx/pulls).
### NixOS Dev Shell using Nix Flake
This section covers how to setup the project for development using the `NixOS dev-shell`.
#### Pre Setup Requirements
Before you start working on the project. You will need the following packages installed on your system:
- `Git` installed on your system. The installation instructions for this can be found [here](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git).
#### Setting up Pre-commit Checks
Before you setup `pre-commit` checks, you will first need to clone **your fork of the project** and navigate into the cloned repository by running the following command:
```shell
git clone https://github.com/<your_github_username>/websurfx.git
cd websurfx
```
Then run the following command to setup the `NixOS dev-shell`:
```shell
nix develop
```
> You can use `nix-direnv` to simplify entering into the `nix-shell`. Its setup is beyond the scope of this guide. Read more about it here: [nix-direnv](https://github.com/nix-community/nix-direnv)
This will add `docker`, `cargo-watch`, and other dev environment essentials to your `nix-shell` so you don't have to install everything imperatively.
After finishing the commands above, run the following command to setup the `pre-commit` checks:
```shell
cargo test
```
By running the above-mentioned command, it will automatically set up all the pre-commit checks in the project.
#### Post Setup Requirements
The final step is to run
```shell
npm i -D stylelint-config-standard postcss-lit`
```
This will add `node_modules` in the current directory.
Run `git commit` and if every thing is setup correctly, it should say that your branch is up to date.
#### Running the Project
If you have followed the above section then you should now be inside a `dev-shell` environment. In the same environment run the following command to run the project:
```shell
cargo watch -q -x "run" -w "."
```
This will compile the app by default with the **In-Memory caching** feature. To compile, run, and test the app with other features follow the build options listed below:
##### Hybrid Cache
To build and run the app with the `Hybrid caching` feature. Run the following command:
```shell
cargo watch -q -x "run --features redis-cache" -w .
```
##### No Cache
To build and run the search engine with the `No caching` feature. Run the following command:
```shell
cargo watch -q -x "run --no-default-features" -w .
```
##### Redis Cache
To build the search engine with the `Redis caching` feature. Run the following command:
```shell
cargo watch -q -x "run --no-default-features --features redis-cache" -w .
```
> Optionally, If you have build and run the app with the `Redis cache`or `Hybrid cache` feature (as mentioned above) then you will need to start the redis server alongside the app which can be done by running the following command:
>
> ```shell
> redis-server --port 8082 &
> ```
Once you have finished running the above command, Websurfx should now be served on the address http://127.0.0.1:8080. Hot reload is enabled, so making changes to any of the files will trigger the project to be rebuilt.
### Local Development with Docker Compose
This section covers how to set up the project for development on your local machine (bare metal) using `docker compose`.
#### Prerequisites
Before you start working on the project. You will need the following packages installed on your system:
- The latest version of `cargo` installed on your system which is required to manage the building and running the project. The installation instructions for this can be found [here](https://doc.rust-lang.org/cargo/getting-started/installation.html).
- The latest version of `npm` installed on your system which is required to allow the installation of other tools necessary for the project. The installation for this can be found [here](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
- The latest version of `stylelint` should be installed on your system which will be used by the pre-commit checks to lint the code before a commit can be made to ensure better code quality. Before you install `stylelint` on your system, make sure you have `npm` installed on your system. To install `stylelint` run the following command:
```shell
$ npm i -g stylelint
```
> [!Note]
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
- `Git` installed on your system. The installation instructions for this can be found [here](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git).
- Finally, The latest version of `Docker` is installed on your system which will be used to avoid introducing unexpected issues when working on the project. The installation instructions for this can be found [here](https://docs.docker.com/engine/install/).
> [!Note]
> For **rolling release Linux distributions (distros)**, the above-mentioned all required packages can also be installed via the distro-specific package manager.
>
> **For Example:**
>
> On `arch linux` the following packages can be installed by following the link to the installation instructions provided below:
>
> - `Cargo`: https://wiki.archlinux.org/title/rust
> - `Npm`: https://wiki.archlinux.org/title/Node.js
> - `Git`: https://wiki.archlinux.org/title/git
> - `Docker`: https://wiki.archlinux.org/title/docker
>
> But we do not recommend this method for **stable release Linux distros** as they tend to not provide very up-to-date versions of the required packages.
#### Setting up Pre-commit Checks
Before you setup `pre-commit` checks, you will first need to clone **your fork of the project** and navigate into the cloned repository by running the following command:
```shell
git clone https://github.com/<your_github_username>/websurfx.git
cd websurfx
```
Once you have finished running the above commands then run the following command to setup the `pre-commit` checks:
```shell
cargo test
```
By running the above-mentioned command, it will automatically set up all the pre-commit checks in the project.
#### Running the Project
If you have followed the above section then you should have a cloned repository folder present on your system. In the same directory, edit the `dev.docker-compose.yml` file as required before running the following command to run the project:
```shell
$ docker compose -f dev.docker-compose.yml up
```
> [!Note]
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
Once you have finished running the above command, Websurfx should now be served on the address http://127.0.0.1:8080. Hot reload is enabled, so making changes to any of the files will trigger the project to be rebuilt.
### Project Commands
#### Basics
- `cargo build`: Builds the project.
> [!Note]
> When you build the project first time with the above command it will require the app to compile every dependency in the project which will then be cached on your system. So when you compile the app next time it will only compile for the new changes.
- `cargo run`: Starts the app and serves the project on http://127.0.0.1:8080.
> [!Important]
> You must run the build command first.
#### Development
- `cargo watch -q -x "run" -w .`: Starts the development server with hot reloading.
- `cargo fmt -- --check`: Checks the code for proper formatting.
- `cargo clippy`: Lints code to ensure it follows a consistent, neat style.
- `cargo test`: Runs unit tests, integrations tests and doc tests.
### Environment Variables
All environment variables are optional. Currently, there are not many environment variables used, as most of the user preferences are stored under the `websurfx` folder (located under the codebase (`websurfx` directory)) in the `config.lua` file.
The list of all the available environment variables are listed below:
- `PKG_ENV`: Sets the logging level for the app to **Trace** which can be useful for better debugging of the app. These environment variables accept two values `dev` or `prod` as strings.
- `RUST_BACKTRACE`: Rust-specific environment variable useful for getting more elaborate error messages with an error stack to better diagnose the issue. This environment variable accepts three values `0` (off), `1` (on), and `full` (for long error stack to being printed out).
## Git Strategy
### Git Flow
Like most Git repos, we are following the [Github Flow](https://guides.github.com/introduction/flow) standard.
1. Create a branch (or fork if you don't have write access)
2. Code some awesome stuff 🧑‍💻
3. Add, commit, and push your changes to your branch/ fork
4. Head over to GitHub and create a Pull Request
5. Fill in the required sections in the template, and hit submit
6. Follow up with any reviews on your code
7. Merge 🎉
### Git Branch Naming
The format of your branch name should be something similar to: `[TYPE]/[TICKET]_[TITLE]`
For example, `FEATURE/420_Awesome-feature` or `FIX/690_login-server-error`
### Commit Emojis
Using a single emoji at the start of each commit message, issue title, and pull request title, to indicate the type of task, makes the commit ledger, issue, and pull request easier to understand, it looks cool.
- 🎨 `:art:` - Improve the structure/format of the code.
- ⚡️ `:zap:` - Improve performance.
- 🔥 `:fire:` - Remove code or files.
- 🐛 `:bug:` - Fix a bug.
- 🚑️ `:ambulance:` - Critical hotfix
- ✨ `:sparkles:` - Introduce new features.
- 📝 `:memo:` - Add or update documentation.
- 🚀 `:rocket:` - Deploy stuff.
- 💄 `:lipstick:` - Add or update the UI and style files.
- 🎉 `:tada:` - Begin a project.
- ✅ `:white_check_mark:` - Add, update, or pass tests.
- 🔒️ `:lock:` - Fix security issues.
- 🔖 `:bookmark:` - Make a Release or Version tag.
- 🚨 `:rotating_light:` - Fix compiler/linter warnings.
- 🚧 `:construction:` - Work in progress.
- ⬆️ `:arrow_up:` - Upgrade dependencies.
- 👷 `:construction_worker:` - Add or update the CI build system.
- ♻️ `:recycle:` - Refactor code.
- 🩹 `:adhesive_bandage:` - Simple fix for a non-critical issue.
- 🔧 `:wrench:` - Add or update configuration files.
- 🍱 `:bento:` - Add or update assets.
- 🗃️ `:card_file_box:` - Perform database schema-related changes.
- ✏️ `:pencil2:` - Fix typos.
- 🌐 `:globe_with_meridians:` - Internationalization and translations.
For a full list of options, see [gitmoji.dev](https://gitmoji.dev/)
### PR Guidelines
Once you've made your changes, and pushed them to your fork or branch, you're ready to open a pull request!
For a pull request to be merged, it must:
- The build, lint, and tests (run by GH actions) must pass
- There must not be any merge conflicts
When you submit your pull request, include the required info, by filling out the pull request template. Including:
- A brief description of your changes.
- The issue or ticket number (if applicable).
- For UI-related updates include a screenshot.
- If any dependencies were added, explain why it was needed, and state the cost. associated, and confirm it does not introduce any security, privacy, or speed issues
- Optionally, provide a checklist of all the changes that were included in the pull request.
> [!Important]
> Make sure to fill all the required/mandatory sections of the pull request as filling them helps us distinguish between spam pull requests and legitimate pull requests.
> [!Note]
> The pull request template contains comments in the following form `<!-- -->` which are used to provide a guide on what should be provided under each heading of the template. These comments are never rendered when the pull request is either created or updated and hence anything provided in such comments is never displayed.
## Resources for Beginners
New to Web Development? Or New to GitHub? Glad to see you're here!! :slightly_smiling_face: Websurfx is a pretty simple app, so it should make a good candidate for your first PR. The following articles (which have been divided into parts for convenience) should point you in the right direction for getting up to speed with the technologies used in this project:
**Development**
- [Basics of Rust](https://piped.kavin.rocks/playlist?list=PLai5B987bZ9CoVR-QEIN9foz4QCJ0H2Y8)
- [Introduction and deep dive into async/await in rust](https://piped.kavin.rocks/watch?v=ThjvMReOXYM)
- [Getting Started to Actix Guide](https://actix.rs/docs/getting-started)
- [Basics of Lua](https://learn.coregames.com/courses/intro-to-lua/)
- [Complete course on CSS](https://piped.kavin.rocks/watch?v=1Rs2ND1ryYc)
- [Complete course on JS](https://piped.kavin.rocks/playlist?list=PL_c9BZzLwBRLVh9OdCBYFEql6esA6aRsi)
- [Responsive web design](https://piped.kavin.rocks/watch?v=srvUrASNj0s)
- [Complete beginners guide to Docker](https://docker-curriculum.com/)
- [Docker Classroom - Interactive Tutorials](https://training.play-with-docker.com/)
- [Docker Compose Tutorial](https://docs.docker.com/compose/gettingstarted/)
- [ES6 Tutorial](https://piped.kavin.rocks/watch?v=nZ1DMMsyVyI)
- [Cargo Guide Book](https://doc.rust-lang.org/cargo/index.html)
**GitHub**
- [Complete Guide to Open Source - How to Contribute](https://piped.kavin.rocks/watch?v=yzeVMecydCE)
- [Forking a Project](https://piped.kavin.rocks/watch?v=FnxFwyzm4Z4)
- [A Tutorial on Git](https://piped.kavin.rocks/playlist?list=PL4lTrYcDuAfxAgSefXftJXbhw0qvjfOFo)
- [Git cheat sheet](http://git-cheatsheet.com/)
For Rust, CSS, JS, HTML, Git, and Docker- you'll need an IDE (e.g. [VSCode](https://code.visualstudio.com/) or [Neovim](https://neovim.io/) and a terminal (Windows users may find [WSL](https://docs.microsoft.com/en-us/windows/wsl/) more convenient).
## App Info
### Style Guides
Linting is done using [Cargo Clippy](https://doc.rust-lang.org/clippy/) and [StyleLint](https://stylelint.io/) or [ESLint](https://eslint.org/). Also, linting is run as a git pre-commit hook.
> [!Important]
> All lint checks must pass before any PR can be merged.
Styleguides to follow:
- [Rust API Guidelines](https://rust-lang.github.io/api-guidelines/naming.html)
- [Airbnb JS Guidelines](https://github.com/airbnb/javascript)
- [Google's Html and CSS Guidelines](https://google.github.io/styleguide/htmlcssguide.html)
## Application Structure
> [!Important]
> We follow the Unix style naming conventions for all the files and folders in the project (except for all files under the `themes` and `colorschemes` folder in the frontend's source code which requires that the names of the files and folders should be in lowercase and the words be separated with a hyphen.) which includes the name of the files and folders should be in lowercase and every word should be separated with an underscore.
**Files in the root of the codebase:** `./`
```
./
├── .dockerignore # Docker ignore file to ignore stuff being included in the file docker image.
├── .gitignore # Git ignore file to ignore stuff from being
├── Cargo.lock # Auto-generated list of current packages and version numbers.
├── Cargo.toml # Project meta-data and dependencies.
├── Dockerfile # The blueprint for building the Docker container.
├── LICENSE # License for use.
├── README.md # Readme, basic info for getting started.
├── dev.Dockerfile # The blueprint for building the Docker container for development purposes.
├── dev.docker-compose.yml # A Docker run command for development environments.
├── docker-compose.yml # A Docker run command.
├── flake.lock # NixOS auto-generated flake configuration.
├── flake.nix # Nix flake package configuration.
├── docs # Markdown documentation
├── public # Project front-end source code
├── src # Project back-end source code
├── tests # Project integration tests for the back-end source code.
└── websurfx # Project folder containing config files for the app.
```
**Frontend Source:** `./public/`
```
./public/
├── robots.txt # Robots file for the Website.
├── images # Images for the Website.
└── static # The directory containing all the UI handlers.
├── cookies.js # Handles the loading of saved cookies.
├── error_box.js # Handles the toggling functionality of the error box on the search page.
├── index.js # Functions to handle the search functionality of the search bar.
├── pagination.js # Functions to handle the navigation between the previous and next page in the search page.
├── search_area_options.js # Changes the search options under the search bar in the search page according to the safe search level set using the URL safesearch parameter.
├── settings.js # Handles the settings and saving of all the settings page options as a cookie.
├── colorschemes # A folder containing all the popular colorscheme files as CSS files.
└── themes # A folder containing all the popular theme files as CSS files.
```
**Fronted Maud HTML Framework Source:** `./src/templates/`
```
./src/templates/
├── mod.rs # A module file for the rust project.
├── partials # A folder containing the code for partials for the views.
│ ├── bar.rs # Provides partial code for the search bar.
│ ├── footer.rs # Provides partial code for the footer section.
│ ├── header.rs # Provides partial code for the header section.
│ ├── mod.rs # A module file for the rust project.
│ ├── navbar.rs # Provides partial code for the navbar inside the header section.
│ ├── search_bar.rs # Provides partial code for the search bar present in the search page.
│ └── settings_tabs # A folder containing all the partials for the settings page tabs.
│ ├── cookies.rs # Provides partial code for the cookies tab.
│ ├── engines.rs # Provides partial code for the engines tab.
│ ├── general.rs # Provides partial code for the general tab.
│ ├── mod.rs # A module file for the rust project.
│ └── user_interface.rs # Provides partial code for the user interface tab.
└── views # A folder containing the code for the views.
├── about.rs # Provides code for the about page view.
├── index.rs # Provides code for the homepage view.
├── mod.rs # A module file for the rust project.
├── not_found.rs # Provides code for the 404 page view.
├── search.rs # Provides code for the search page view.
└── settings.rs # Provides code for the settings page view.
```
**Backend Source:** `./src/`
```
./src/
├── lib.rs # A library file for the rust project.
├── bin # A folder containing the source code that would produce the binary file when compiled.
│ └── websurfx.rs # A file that would be compiled into a binary file.
├── cache # A folder that contains code to handle the caching functionality of the search engine.
│ ├── cacher.rs # Handles the different caching features.
│ ├── error.rs # Provides custom error messages for different types of caches and their related errors.
│ ├── mod.rs # A module file for the rust project.
│ └── redis_cacher.rs # Provides custom asynchronous pool implementation with auto background reconnection functionality.
├── config # A folder that holds the code to help parse the lua config file that would be used in the app.
│ ├── mod.rs # A module file for the rust project.
│ └── parser.rs # Provides the code to parse the config file.
├── engines # A folder that holds code to handle fetching data from different upstream engines.
│ ├── brave.rs # Provides code to fetch and remove unnecessary or waste results from the fetched results from the brave search engine.
│ ├── duckduckgo.rs # Provides code to fetch and remove unnecessary or waste results from the fetched results from the duckduckgo search engine.
│ ├── mod.rs # A module file for the rust project.
│ ├── search_result_parser.rs # Provides helper function to help ease the process of defining different result selection selectors.
│ └── searx.rs # Provides code to fetch and remove unnecessary or waste results from the fetched results from the searx engine.
├── handler # A folder that provides helper code to provide a proper path to the public (theme) folder, config file, blocklist file, and allowlist file based on where they are located.
│ ├── mod.rs # A module file for the rust project.
│ └── paths.rs # Provides helper code to handle different paths.
├── models # A folder that provides different models for the different modules in the backend code.
│ ├── aggregation_models.rs # Provides different models (enums, structs) for handling and standardizing different parts in the "results" module code.
│ ├── engine_models.rs # Provides different models (enums, structs) for handling and standardizing different parts in the "engines" module code.
│ ├── mod.rs # A module file for the rust project.
│ ├── parser_models.rs # Provides different models (enums, structs) for handling and standardizing different parts in the "config" module code.
│ └── server_models.rs # Provides different models (enums, structs) for handling and standardizing different parts in the "server" module code.
├── results # A folder that provides code to handle the fetching and aggregating of results from the upstream search engines.
│ ├── aggregator.rs # Provides code aggregate and fetches results from the upstream engines.
│ ├── mod.rs # A module file for the rust project.
│ └── user_agent.rs # Provides a helper function to allow random user agents to pass in the server request code to improve user privacy and avoiding detected as a bot.
├── server # A folder that holds code to handle the routes for the search engine website.
│ ├── mod.rs # A module file for the rust project.
│ ├── router.rs # Provides functions to handle the different routes on the website.
│ └── routes # A folder that contains code to handle the bigger route for the website.
│ ├── mod.rs # A module file for the rust project.
│ └── search.rs # Provides the function to handle the search route.
└── templates # A module that provides and handles Maud HTML framework source code for the search engine website (subfolders and files are explained in the above frontend section.)
```
## Development Tools
### Performance - Lighthouse
The easiest method of checking performance is to use Chromium's built-in auditing tool, Lighthouse. To run the test, open Developer Tools (usually F12) --> Lighthouse and click on the 'Generate Report' button at the bottom.
## Notes
### Known warnings
When running the build command, a warning appears. This is not an error and does not affect the security or performance of the application. They will be addressed soon in a future update.
```shell
warning: the following packages contain code that will be rejected by a future version of Rust: html5ever v0.23.0
note: to see what the problems were, use the option `--future-incompat-report`, or run `cargo report future-incompatibilities --id 2`
```
This warning just means that any dependencies or code using the `html5ever` code would be deprecated and rejected in future versions of the Rust language. So right now these dependencies can be used as these have not happened yet.
[⬅️ Go back to Home](./README.md)

View file

@ -1,15 +0,0 @@
# General Questions
## Why Websurfx?
The primary purpose of the Websurfx project is to create a fast, secure, and privacy-focused [meta-search engine](https://en.wikipedia.org/wiki/Metasearch_engine). While there are numerous meta-search engines available, not all of them guarantee the security of their search engine, which is critical for maintaining privacy. Memory flaws, for example, can expose private or sensitive information, which is never a good thing. Also, there is the added problem of Spam, ads, and unorganic results which most engines don't have the full-proof answer to it till now but with Websurfx I finally put a full stop to this problem, also, Rust is used to write Websurfx, which ensures memory safety and removes such issues. Many meta-search engines also lack important features like advanced picture search, which is required by many graphic designers, content providers, and others. Websurfx attempts to improve the user experience by providing these and other features, such as proper NSFW blocking and Micro-apps or Quick results (like providing a calculator, currency exchanges, etc in the search results).
## Why AGPLv3?
Websurfx is released under the AGPLv3 license to ensure that the source code remains open and transparent. This helps to prevent the inclusion of spyware, telemetry, or other malicious code in the project. AGPLv3 is a strong copyleft license that ensures the source code of the software remains open and available to everyone, including any modifications or improvements made to the code.
## Why Rust?
Rust was chosen as the programming language for Websurfx due to its memory safety features, which can help prevent vulnerabilities and make the codebase more secure. Rust is also faster than C++, which helps to make Websurfx fast and responsive. In addition, Rust's ownership and borrowing system allows for safe concurrency and thread safety in the codebase.
[⬅️ Go back to Home](./README.md)

View file

@ -1,42 +0,0 @@
# Features
The project provides 4 caching options as conditionally compiled features. This helps reduce the size of the compiled app by only including the code that is necessary for a particular caching option.
The different caching features provided are as follows:
- No cache
- Redis cache
- In memory cache
- Hybrid cache
## Explanation
### No Cache
This feature can drastically reduce binary size but with the cost that subsequent search requests and previous & next page search results are not cached which can make navigating between pages slower. As well as Page refreshes of the same page also become slower as each refresh has to fetch the results from the upstream search engines.
### Redis Cache
This feature allows the search engine to cache the results on the redis server. This feature can be useful for having a dedicated cache server for multiple devices hosted with the `Websurfx` server which can use the one dedicated cache server for hosting their cache on it. But a disadvantage of this solution is that if the `Redis`server is located far away (for example provided by a vps as service) and if it is unavailable or down for some reason then the `Websurfx` server would not be able to function properly or will crash on startup.
### In Memory Cache
This feature is the default feature provided by the project. This feature allows the search engine to cache the results in the memory which can help increase the speed of the fetched cache results and it also has the advantage that it is extremely reliable as all the results are stored in memory within the search engine. Though the disadvantage of this solution is that caching of results is slightly slower than the `redis-cache` solution, it requires a good amount of memory on the system and as such is not ideal for very low memory devices and is highly unscalable.
### Hybrid Cache
This feature provides the advantages of both `In Memory` caching and `Redis` caching and it is an ideal solution if you need a very resilient and reliable solution for the `Websurfx` which can provide both speed and reliability. Like for example if the `Redis` server becomes unavailable then the search engine switches to `In Memory` caching until the server becomes available again. This solution can be useful for hosting a `Websurfx` instance which will be used by hundreds or thousands of users all over the world.
## Tabular Summary
| **Attributes** | **Hybrid** | **In-Memory** | **No Cache** | **Redis** |
|-----------------------------------------|------------|------------------------------------------------------|-----------------|------------------------|
| **Speed** | Fast | Caching is slow, but retrieval of cache data is fast | Slow | Fastest |
| **Reliability** | ✅ | ✅ | ✅ | ❌ |
| **Scalability** | ✅ | ❌ | - | ✅ |
| **Resiliency** | ✅ | ✅ | ✅ | ❌ |
| **Production/Large Scale/Instance use** | ✅ | Not Recommended | Not Recommended | Not Recommended |
| **Low Memory Support** | ❌ | ❌ | ✅ | ❌ |
| **Binary Size** | Big | Bigger than `No Cache` | small | Bigger than `No Cache` |
[⬅️ Go back to Home](./README.md)

View file

@ -1,409 +0,0 @@
# Install From Package
## Arch Linux
### Rolling/Edge/Unstable
You can install `Websurfx` through the [Aur](https://aur.archlinux.org/packages/websurfx-git), By running the following command (using [paru](https://github.com/Morganamilo/paru)):
```shell
paru -S websurfx-edge-git
```
After installing it you can run the websurfx server by running the following commands:
```shell
websurfx
```
Once you have started the server, open your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx.
If you want to change the port or the IP or any other configuration setting check out the [configuration docs](./configuration.md).
### Stable
For the stable version, follow the same steps as above (as mentioned for the `unstable/rolling/edge` version) with the only difference being that the package to be installed for the stable version is called `websurfx-git` instead of `websurfx-edge-git`.
## NixOS
A `flake.nix` has been provided to allow installing `websurfx` easily. It utilizes [nearsk](https://github.com/nix-community/naersk) to automatically generate a derivation based on `Cargo.toml` and `Cargo.lock`.
The Websurfx project provides 2 versions/flavours for the flake `stable` and `rolling/unstable/edge`. The steps for each are covered below in different sections.
### Rolling/Edge/Unstable
To get started, First, clone the repository, edit the config file which is located in the `websurfx` directory, and then build and run the websurfx server by running the following commands:
```shell
git clone https://github.com/neon-mmd/websurfx.git
cd websurfx
cp -rf ./websurfx/ ~/.config/
$ mkdir /opt/websurfx/
$ cp -rf ./public/ /opt/websurfx/
nix build .#websurfx
nix run .#websurfx
```
> [!Note]
> In the above command the dollar sign(**$**) refers to running the command in Privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
Once you have run the above set of commands, open your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx.
If you want to change the port or the IP or any other configuration setting check out the [configuration docs](./configuration.md).
> Optionally, you may include it in your own flake by adding this repo to its inputs and adding it to `environment.systemPackages` as follows:
>
> ```nix
> {
> description = "My awesome configuration";
>
> inputs = {
> websurfx.url = "github:neon-mmd/websurfx";
> };
>
> outputs = { nixpkgs, ... }@inputs: {
> nixosConfigurations = {
> hostname = nixpkgs.lib.nixosSystem {
> system = "x86_64-linux";
> modules = [{
> environment.systemPackages = [inputs.websurfx.packages.x86_64-linux.websurfx];
> }];
> };
> };
> };
> }
> ```
### Stable
For the stable version, follow the same steps as above (as mentioned for the `unstable/rolling/edge version`) with an addition of one command which has to be performed after cloning and changing the directory into the repository which makes the building step as follows:
```shell
git clone https://github.com/neon-mmd/websurfx.git
cd websurfx
git checkout stable
cp -rf ./websurfx/ ~/.config/
$ mkdir /opt/websurfx/
$ cp -rf ./public/ /opt/websurfx/
nix build .#websurfx
nix run .#websurfx
```
> [!Note]
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
## Other Distros
The package is currently not available on other Linux distros. With contribution and support it can be made available on other distros as well 🙂.
# Install From Source
Before you can start building `websurfx`, you will need to have `Cargo` installed on your system. You can find the installation instructions [here](https://doc.rust-lang.org/cargo/getting-started/installation.html).
## Stable
To get started with Websurfx, clone the repository, edit the config file which is located in the `websurfx` directory, and install redis server by following the instructions located [here](https://redis.io/docs/getting-started/) and then build and run the websurfx server by running the following commands:
```shell
git clone https://github.com/neon-mmd/websurfx.git
cd websurfx
git checkout stable
cargo build -r
redis-server --port 8082 &
./target/release/websurfx
```
Once you have started the server, open your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx.
If you want to change the port or the IP or any other configuration setting check out the [configuration docs](./configuration.md).
## Rolling/Edge/Unstable
If you want to use the rolling/edge branch, run the following commands instead:
```shell
git clone https://github.com/neon-mmd/websurfx.git
cd websurfx
```
Once you have changed the directory to the `websurfx` directory then follow the build options listed below:
> [!Note]
> Before you start building the search engine using one of the below listed command. We would strongly recommend setting the `PKG_ENV` enviroment variable as this applies some special optimization to code to reduce the file and improve the page load speed of the website.
> To set the `PKG_ENV` enviroment variable in the `bash` shell run the following command:
>
> ```bash
> export PKG_ENV="prod"
> ```
>
> For how to set the environment variables in other shells. You can follow the instructions on how to do so by visiting the documentation of the specific shell you are using.
### Hybrid Cache
> For more information on the features and their pros and cons. see: [**Features**](./features.md)
To build the search engine with the `Hybrid caching` feature. Run the following build command:
```shell
cargo build -r --features redis-cache
```
### Memory Cache (Default Features)
> For more information on the features and their pros and cons. see: [**Features**](./features.md)
To build the search engine with the `In-Memory caching` feature. Run the following build command:
```shell
cargo build -r
```
### No Cache
> For more information on the features and their pros and cons. see: [**Features**](./features.md)
To build the search engine with the `No caching` feature. Run the following build command:
```shell
cargo build -r --no-default-features
```
### Redis Cache
> For more information on the features and their pros and cons. see: [**Features**](./features.md)
To build the search engine with the `hybrid caching` feature. Run the following build command:
```shell
cargo build -r --no-default-features --features redis-cache
```
> Optionally, If you have built the app with the `Redis cache`or `Hybrid cache` feature (as mentioned above) then before launching the search engine run the following command:
>
> ```shell
> redis-server --port 8082 &
> ```
Once you have finished building the `search engine`. then run the following command to start the search engine:
```shell
./target/release/websurfx
```
Once you have started the server, launch your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx.
If you want to change the port or the IP or any other configuration setting check out the [configuration docs](./configuration.md).
# Docker Deployment
Before you start, you will need [Docker](https://docs.docker.com/get-docker/) installed on your system first.
## Prebuild
The Websurfx project provides several prebuilt images based on the different features provided by the search engine. To get started using the prebuild image, you will first need to create a `docker-compose.yml` file with the following content:
```yaml
---
version: '3.9'
services:
app:
# Comment the line below if you don't want to use the `hybrid/latest` image.
image: neonmmd/websurfx:latest
# Uncomment the line below if you want to use the `no cache` image.
# image: neonmmd/websurfx:nocache
# Uncomment the line below if you want to use the `memory` image.
# image: neonmmd/websurfx:memory
# Uncomment the line below if you want to use the `redis` image.
# image: neonmmd/websurfx:redis
ports:
- 8080:8080
# Uncomment the following lines if you are using the `hybrid/latest` or `redis` image.
# depends_on:
# - redis
# links:
# - redis
volumes:
- ./websurfx/:/etc/xdg/websurfx/
# Uncomment the following lines if you are using the `hybrid/latest` or `redis` image.
# redis:
# image: redis:latest
```
Then make sure to edit the `docker-compose.yml` file as required. After that create a directory `websurfx` in the directory you have placed the `docker-compose.yml` file, and then in the new directory create two new empty files named `allowlist.txt` and `blocklist.txt`. Finally, create a new config file `config.lua` with the default configuration, which looks something like this:
```lua
-- ### General ###
logging = true -- an option to enable or disable logs.
debug = false -- an option to enable or disable debug mode.
threads = 8 -- the amount of threads that the app will use to run (the value should be greater than 0).
-- ### Server ###
port = "8080" -- port on which server should be launched
binding_ip = "0.0.0.0" --ip address on the which server should be launched.
production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users (more than one))
-- if production_use is set to true
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
rate_limiter = {
number_of_requests = 20, -- The number of requests that are allowed within a provided time limit.
time_limit = 3, -- The time limit in which the number of requests that should be accepted.
}
-- ### Search ###
-- Filter results based on different levels. The levels provided are:
-- {{
-- 0 - None
-- 1 - Low
-- 2 - Moderate
-- 3 - High
-- 4 - Aggressive
-- }}
safe_search = 2
-- ### Website ###
-- The different colorschemes provided are:
-- {{
-- catppuccin-mocha
-- dark-chocolate
-- dracula
-- gruvbox-dark
-- monokai
-- nord
-- oceanic-next
-- one-dark
-- solarized-dark
-- solarized-light
-- tokyo-night
-- tomorrow-night
-- }}
colorscheme = "catppuccin-mocha" -- the colorscheme name that should be used for the website theme
theme = "simple" -- the theme name that should be used for the website
-- ### Caching ###
redis_url = "redis://redis:6379" -- redis connection url address on which the client should connect on.
-- ### Search Engines ###
upstream_search_engines = {
DuckDuckGo = true,
Searx = false,
} -- select the upstream search engines from which the results should be fetched.
```
Then run the following command to deploy the search engine:
```shell
$ docker compose up -d
```
> [!Note]
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec` or any other privileged access methods.
Then launch the browser of your choice and navigate to http://<ip_address_of_the_device>:<whatever_port_you_provided_in_the_config>.
> [!Note]
> The official prebuild images only support `stable` versions of the app and will not support `rolling/edge/unstable` versions. But with support and contribution, it could be made available for these versions as well 🙂.
## Manual Deployment
This section covers how to deploy the app with docker manually by manually building the image and deploying it.
> [!Note]
> This section is provided for those who want to further customize the docker image or for those who are extra cautious about security.
> [!Warning]
> A note of caution the project currently only supports **x86-64** architecture and as such we do not recommend deploying the project on devices with other architectures. Though if you still want to do it then **do it at your own risk**.
### Unstable/Edge/Rolling
First, clone the repository by running the following command:
```bash
git clone https://github.com/neon-mmd/websurfx.git
cd websurfx
```
After that edit the config.lua file located under `websurfx` directory. In the config file, you will specifically need to change to values which are `binding_ip_addr` and `redis_connection_url` which should make the config look something like this:
```lua
-- ### General ###
logging = true -- an option to enable or disable logs.
debug = false -- an option to enable or disable debug mode.
threads = 10 -- the amount of threads that the app will use to run (the value should be greater than 0).
-- ### Server ###
port = "8080" -- port on which server should be launched
binding_ip = "127.0.0.1" --ip address on the which server should be launched.
production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users (more than one))
-- if production_use is set to true
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
rate_limiter = {
number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
time_limit = 3, -- The time limit in which the quantity of requests that should be accepted.
}
-- ### Search ###
-- Filter results based on different levels. The levels provided are:
-- {{
-- 0 - None
-- 1 - Low
-- 2 - Moderate
-- 3 - High
-- 4 - Aggressive
-- }}
safe_search = 2
-- ### Website ###
-- The different colorschemes provided are:
-- {{
-- catppuccin-mocha
-- dark-chocolate
-- dracula
-- gruvbox-dark
-- monokai
-- nord
-- oceanic-next
-- one-dark
-- solarized-dark
-- solarized-light
-- tokyo-night
-- tomorrow-night
-- }}
colorscheme = "catppuccin-mocha" -- the colorscheme name which should be used for the website theme
theme = "simple" -- the theme name which should be used for the website
-- ### Caching ###
redis_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
cache_expiry_time = 600 -- This option takes the expiry time of the search results (value in seconds and the value should be greater than or equal to 60 seconds).
-- ### Search Engines ###
upstream_search_engines = {
DuckDuckGo = true,
Searx = false,
Brave = false,
Startpage = false,
LibreX = false,
} -- select the upstream search engines from which the results should be fetched.
```
After this make sure to edit the `docker-compose.yml` and `Dockerfile` files as required and run the following command to deploy the app:
```bash
$ docker compose up -d --build
```
> [!Note]
> In the above command the dollar sign(**$**) refers to running the command in privileged mode by using utilities `sudo`, `doas`, `pkgexec`, or any other privileged access methods.
This will take around 5-10 mins for the first deployment, afterwards, the docker build stages will be cached so it will be faster to build from next time onwards. After the above step finishes launch your preferred browser and then navigate to `http://<ip_address_of_the_device>:<whatever_port_you_provided_in_the_config>`.
### Stable
For the stable version, follow the same steps as above (as mentioned for the unstable/rolling/edge version) with an addition of one command which has to be performed after cloning and changing the directory into the repository which makes the cloning step as follows:
```bash
git clone https://github.com/neon-mmd/websurfx.git
cd websurfx
git checkout stable
```
[⬅️ Go back to Home](./README.md)

View file

@ -1,15 +0,0 @@
# Instances
> To contribute your server instance, check out the contributing guide [here](https://github.com/neon-mmd/websurfx/blob/HEAD/CONTRIBUTING.md).
This page provides a list of `Websurfx` instances provided by us and our community.
|URL|Network|Version|Location|Status|Maintained By|TLS|IPv6|Comment|
|-|-|-|-|-|-|-|-|-|
|https://websurfx.pp.ua|www|rolling|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Fwebsurfx.pp.ua&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|✅||
|https://alamin655-spacex.hf.space|www|rolling|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Falamin655-spacex.hf.space&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|❌||
|https://websurfx.instance.pp.ua|www|rolling|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Fwebsurfx.instance.pp.ua&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|✅||
|https://alamin655-surfx.hf.space|www|stable|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Falamin655-surfx.hf.space&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|❌||
[⬅️ Go back to Home](./README.md)

View file

@ -1,13 +0,0 @@
# Introduction
A modern-looking, lightning-fast, privacy-respecting, secure [meta search engine](https://en.wikipedia.org/wiki/Metasearch_engine) (pronounced as websurface or web-surface /wɛbˈːrfəs/.) written in Rust. It provides a fast and secure search experience while respecting user privacy.
# Motivation
Most meta search engines tend to be slow, lack a high level of customization, and miss many features, and all of them lack security as they are written in unsafe languages like Python, JavaScript, etc., which tend to open a wide variety of vulnerabilities, which can also sometimes pose a threat to privacy as sometimes this can be exploited and can be used to leak out sensitive information, which is never good.
# Solution
Websurfx is a project that seeks to provide privacy, security, speed, and all the features that the user wants.
[⬅️ Go back to Home](./README.md)

View file

@ -1,885 +0,0 @@
# Theming
## Colorschemes
### Built-in
By default `websurfx` comes with 12 colorschemes to choose from which can be easily chosen using the config file or via the settings page on the website.
> To how to change colorschemes using the config file. See: [**Configuration**](https://github.com/neon-mmd/websurfx/wiki/configuration)
### Custom
To write a custom theme for the website, you will first need to create a new file under the `public/static/themes` folder with name of the theme containing each word seperated with a hyphen (**-**). Then after that edit the newly created file as required with new css code.
Creating coloschemes is as easy as it gets it requires the user to have a colorscheme file name with the name of the colorscheme that is to be provided in which every space should be replaced with a `-` (dash) and it should end with a `.css` file extension. After creating the file you need to add the following code with the `colors` you want to include:
```css
:root {
--background-color: <background color>;
--foreground-color: <foreground color (text color on the website) >;
--logo-color: <logo color
(the color of the logo svg image on the website homepage) >;
--color-one: <color 1>;
--color-two: <color 2>;
--color-three: <color 3>;
--color-four: <color 4>;
--color-five: <color 5>;
--color-six: <color 6>;
--color-seven: <color 7>;
}
```
> [!Note]
> Please infer the theme file located under `public/static/themes` to better understand where each color is being used.
**Example of `catppuccin-mocha` colorscheme:**
```css
:root {
--background-color: #1e1e2e;
--foreground-color: #cdd6f4;
--logo-color: #f5c2e7;
--color-one: #45475a;
--color-two: #f38ba8;
--color-three: #a6e3a1;
--color-four: #f9e2af;
--color-five: #89b4fa;
--color-six: #f5c2e7;
--color-seven: #ffffff;
}
```
## Themes
### Built-in
By default `websurfx` comes with 1 theme to choose from which can be easily chosen using the config file or via the settings page on the website.
> To how to change themes using the config file. See: [**Configuration**](https://github.com/neon-mmd/websurfx/wiki/configuration)
### Custom
> This section expects the user to have some knowledge of `css`.
To write a custom theme for the website, you will first need to create a new file under the `public/static/themes` folder with name of the theme containing each word seperated with a hyphen (**-**). Then after that edit the newly created file as required with new css code.
Here is an example of `simple theme` (which we provide by default with the app) which will give you a better idea on how you can create your own custom theme for the website:
#### General
```css
@font-face {
font-family: Rubik;
src: url('https://fonts.googleapis.com/css2?family=Rubik:wght@400;500;600;700;800&display=swap');
fallback: sans-serif;
}
* {
padding: 0;
margin: 0;
box-sizing: border-box;
}
html {
font-size: 62.5%;
}
body {
display: flex;
flex-direction: column;
justify-content: space-between;
align-items: center;
height: 100vh;
font-family: Rubik, sans-serif;
background-color: var(--background-color);
}
/* enforce font for buttons */
button {
font-family: Rubik, sans-serif;
}
```
#### Styles for the index page
```css
.search-container {
display: flex;
flex-direction: column;
gap: 5rem;
justify-content: center;
align-items: center;
}
.search-container svg {
color: var(--logo-color);
}
.search-container div {
display: flex;
}
```
#### Styles for the search box and search button
```css
.search_bar {
display: flex;
gap: 10px;
align-items: center;
}
.search_bar input {
border-radius: 6px;
padding: 2.6rem 2.2rem;
width: 50rem;
height: 3rem;
outline: none;
border: none;
box-shadow: rgb(0 0 0 / 1);
background-color: var(--color-one);
color: var(--foreground-color);
outline-offset: 3px;
font-size: 1.6rem;
}
.search_bar input:focus {
outline: 2px solid var(--foreground-color);
}
.search_bar input::placeholder {
color: var(--foreground-color);
opacity: 1;
}
.search_bar button {
padding: 2.6rem 3.2rem;
border-radius: 6px;
height: 3rem;
display: flex;
justify-content: center;
align-items: center;
outline-offset: 3px;
outline: 2px solid transparent;
border: none;
transition: 0.1s;
gap: 0;
background-color: var(--color-six);
color: var(--background-color);
font-weight: 600;
letter-spacing: 0.1rem;
}
.search_bar button:active {
outline: 2px solid var(--color-three);
}
.search_bar button:active,
.search_bar button:hover {
filter: brightness(1.2);
}
.search_area .search_options {
display: flex;
justify-content: space-between;
align-items: center;
}
.search_area .search_options select {
margin: 0.7rem 0;
width: 20rem;
background-color: var(--color-one);
color: var(--foreground-color);
padding: 1.2rem 2rem;
border-radius: 0.5rem;
outline-offset: 3px;
outline: 2px solid transparent;
border: none;
text-transform: capitalize;
}
.search_area .search_options select:active,
.search_area .search_options select:hover {
outline: 2px solid var(--color-three);
}
.search_area .search_options option:hover {
background-color: var(--color-one);
}
.result_not_found {
display: flex;
flex-direction: column;
font-size: 1.5rem;
color: var(--foreground-color);
}
.result_not_found p {
margin: 1rem 0;
}
.result_not_found ul {
margin: 1rem 0;
}
.result_not_found img {
width: 40rem;
}
/* styles for the error box */
.error_box .error_box_toggle_button {
background: var(--foreground-color);
}
.error_box .dropdown_error_box {
position: absolute;
display: none;
flex-direction: column;
background: var(--background-color);
border-radius: 0;
margin-left: 2rem;
min-height: 20rem;
min-width: 22rem;
}
.error_box .dropdown_error_box.show {
display: flex;
}
.error_box .dropdown_error_box .error_item,
.error_box .dropdown_error_box .no_errors {
display: flex;
align-items: center;
color: var(--foreground-color);
letter-spacing: 0.1rem;
padding: 1rem;
font-size: 1.2rem;
}
.error_box .dropdown_error_box .error_item {
justify-content: space-between;
}
.error_box .dropdown_error_box .no_errors {
min-height: 18rem;
justify-content: center;
}
.error_box .dropdown_error_box .error_item:hover {
box-shadow: inset 0 0 100px 100px rgb(255 255 255 / 0.1);
}
.error_box .error_item .severity_color {
width: 1.2rem;
height: 1.2rem;
}
.results .result_disallowed,
.results .result_filtered,
.results .result_engine_not_selected {
display: flex;
justify-content: center;
align-items: center;
gap: 10rem;
font-size: 2rem;
color: var(--foreground-color);
margin: 0 7rem;
}
.results .result_disallowed .user_query,
.results .result_filtered .user_query,
.results .result_engine_not_selected .user_query {
color: var(--background-color);
font-weight: 300;
}
.results .result_disallowed img,
.results .result_filtered img,
.results .result_engine_not_selected img {
width: 30rem;
}
.results .result_disallowed div,
.results .result_filtered div,
.results .result_engine_not_selected div {
display: flex;
flex-direction: column;
gap: 1rem;
line-break: strict;
}
```
#### Styles for the footer and header
```css
header {
width: 100%;
background: var(--background-color);
display: flex;
align-items: center;
justify-content: space-between;
padding: 2rem 3rem;
}
footer {
width: 100%;
background: var(--background-color);
display: flex;
align-items: center;
padding: 1.7rem 1.7rem 4rem;
gap: 1.8rem;
flex-direction: column;
justify-content: center;
}
header h1 a {
text-transform: capitalize;
text-decoration: none;
color: var(--foreground-color);
letter-spacing: 0.1rem;
}
header ul,
footer ul {
list-style: none;
display: flex;
justify-content: space-around;
align-items: center;
font-size: 1.5rem;
gap: 2rem;
}
header ul li a,
footer ul li a,
header ul li a:visited,
footer ul li a:visited {
text-decoration: none;
color: var(--color-two);
text-transform: capitalize;
letter-spacing: 0.1rem;
}
header ul li a {
font-weight: 600;
}
header ul li a:hover,
footer ul li a:hover {
color: var(--color-five);
}
footer div span {
font-size: 1.5rem;
color: var(--color-four);
}
footer div {
display: flex;
gap: 1rem;
}
```
#### Styles for the search page
```css
.results {
width: 90%;
display: flex;
flex-direction: column;
justify-content: space-around;
gap: 1rem;
}
.result {
gap: 1rem;
}
.results .search_bar {
margin: 1rem 0;
}
.results_aggregated {
display: flex;
flex-direction: column;
justify-content: space-between;
margin: 2rem 0;
content-visibility: auto;
}
.results_aggregated .result {
display: flex;
flex-direction: column;
margin-top: 1rem;
}
.results_aggregated .result h1 a {
font-size: 1.7rem;
font-weight: normal;
color: var(--color-two);
text-decoration: none;
}
.results_aggregated .result h1 a:hover {
color: var(--color-five);
}
.results_aggregated .result h1 a:visited {
color: var(--background-color);
}
.results_aggregated .result small {
color: var(--color-three);
font-size: 1.3rem;
word-wrap: break-word;
line-break: anywhere;
}
.results_aggregated .result p {
color: var(--foreground-color);
font-size: 1.4rem;
line-height: 2.4rem;
margin-top: 0.3rem;
word-wrap: break-word;
line-break: anywhere;
}
.results_aggregated .result .upstream_engines {
text-align: right;
font-size: 1.2rem;
padding: 1rem;
color: var(--color-five);
display: flex;
gap: 1rem;
justify-content: right;
}
```
#### Styles for the 404 page
```css
.error_container {
display: flex;
justify-content: center;
align-items: center;
width: 100%;
gap: 5rem;
}
.error_container img {
width: 30%;
}
.error_content {
display: flex;
flex-direction: column;
justify-content: center;
gap: 1rem;
}
.error_content h1,
.error_content h2 {
letter-spacing: 0.1rem;
}
.error_content h1 {
font-size: 3rem;
}
.error_content h2 {
font-size: 2rem;
}
.error_content p {
font-size: 1.2rem;
}
.error_content p a,
.error_content p a:visited {
color: var(--color-two);
text-decoration: none;
}
.error_content p a:hover {
color: var(--color-five);
}
```
#### Styles for the previous and next button on the search page
```css
.page_navigation {
padding: 0 0 2rem;
display: flex;
justify-content: space-between;
align-items: center;
}
.page_navigation button {
background: var(--background-color);
color: var(--foreground-color);
padding: 1rem;
border-radius: 0.5rem;
outline: none;
border: none;
}
.page_navigation button:active {
filter: brightness(1.2);
}
```
#### Styles for the about page
This part is only available right now in the **rolling/edge/unstable** version
```css
.about-container article {
font-size: 1.5rem;
color: var(--foreground-color);
padding-bottom: 10px;
}
.about-container article h1 {
color: var(--color-two);
font-size: 2.8rem;
}
.about-container article div {
padding-bottom: 15px;
}
.about-container a {
color: var(--color-three);
}
.about-container article h2 {
color: var(--color-three);
font-size: 1.8rem;
padding-bottom: 10px;
}
.about-container p {
color: var(--foreground-color);
font-size: 1.6rem;
padding-bottom: 10px;
}
.about-container h3 {
font-size: 1.5rem;
}
.about-container {
width: 80%;
}
```
#### Styles for the Settings Page
This part is only available right now in the **rolling/edge/unstable** version
```css
.settings_container {
display: flex;
justify-content: space-around;
width: 80dvw;
margin: 5rem 0;
}
.settings h1 {
color: var(--color-two);
font-size: 2.5rem;
}
.settings > h1 {
margin-bottom: 4rem;
margin-left: 2rem;
}
.settings hr {
border-color: var(--color-three);
margin: 0.3rem 0 1rem;
}
.settings > hr {
margin-left: 2rem;
}
.settings_container .sidebar {
width: 30%;
cursor: pointer;
font-size: 2rem;
display: flex;
flex-direction: column;
margin-right: 0.5rem;
margin-left: -0.7rem;
padding: 0.7rem;
border-radius: 5px;
margin-bottom: 0.5rem;
color: var(--foreground-color);
text-transform: capitalize;
gap: 1.5rem;
}
.settings_container .sidebar .btn {
padding: 2rem;
border-radius: 0.5rem;
outline-offset: 3px;
outline: 2px solid transparent;
}
.settings_container .sidebar .btn:active {
outline: 2px solid var(--color-two);
}
.settings_container .sidebar .btn:not(.active):hover {
color: var(--color-two);
}
.settings_container .sidebar .btn.active {
background-color: var(--color-two);
color: var(--background-color);
}
.settings_container .main_container {
width: 70%;
border-left: 1.5px solid var(--color-three);
padding-left: 3rem;
border: none;
}
.settings_container .tab {
display: none;
}
.settings_container .tab.active {
display: flex;
gap: 1.2rem;
flex-direction: column;
justify-content: space-around;
}
.settings_container button {
margin-top: 1rem;
padding: 1rem 2rem;
font-size: 1.5rem;
background: var(--color-three);
color: var(--background-color);
border-radius: 0.5rem;
border: 2px solid transparent;
font-weight: bold;
transition: all 0.1s ease-out;
cursor: pointer;
box-shadow: 5px 5px;
outline: none;
}
.settings_container button:active {
box-shadow: none;
translate: 5px 5px;
}
.settings_container .main_container .message {
font-size: 1.5rem;
color: var(--foreground-color);
}
.settings_container .tab h3 {
font-size: 2rem;
font-weight: bold;
color: var(--color-four);
margin-top: 1.5rem;
text-transform: capitalize;
}
.settings_container .tab .description {
font-size: 1.5rem;
margin-bottom: 0.5rem;
color: var(--foreground-color);
}
.settings_container .user_interface select,
.settings_container .general select {
margin: 0.7rem 0;
width: 20rem;
background-color: var(--color-one);
color: var(--foreground-color);
padding: 1rem 2rem;
border-radius: 0.5rem;
outline: none;
border: none;
text-transform: capitalize;
}
.settings_container .user_interface option:hover,
.settings_container .general option:hover {
background-color: var(--color-one);
}
.settings_container .engines .engine_selection {
display: flex;
flex-direction: column;
justify-content: center;
padding: 1rem 0;
margin-bottom: 2rem;
gap: 2rem;
}
.settings_container .engines .toggle_btn {
color: var(--foreground-color);
font-size: 1.5rem;
display: flex;
align-items: center;
border-radius: 100px;
gap: 1.5rem;
letter-spacing: 1px;
}
.settings_container .engines hr {
margin: 0;
}
.settings_container .cookies input {
margin: 1rem 0;
}
```
#### Styles for the Toggle Button
This part is only available right now in the **rolling/edge/unstable** version
```css
/* The switch - the box around the slider */
.switch {
position: relative;
display: inline-block;
width: 6rem;
height: 3.4rem;
}
/* Hide default HTML checkbox */
.switch input {
opacity: 0;
width: 0;
height: 0;
}
/* The slider */
.slider {
position: absolute;
cursor: pointer;
inset: 0;
background-color: var(--foreground-color);
transition: 0.2s;
outline-offset: 3px;
outline: 2px solid transparent;
}
.slider:active {
outline: 2px solid var(--foreground-color);
}
.slider::before {
position: absolute;
content: '';
height: 2.6rem;
width: 2.6rem;
left: 0.4rem;
bottom: 0.4rem;
background-color: var(--background-color);
transition: 0.2s;
}
input:checked + .slider {
background-color: var(--color-three);
}
input:focus + .slider {
box-shadow: 0 0 1px var(--color-three);
}
input:checked + .slider::before {
transform: translateX(2.6rem);
}
/* Rounded sliders */
.slider.round {
border-radius: 3.4rem;
}
.slider.round::before {
border-radius: 50%;
}
```
## Animations
### Built-in
By default `websurfx` comes with 1 animation to choose from which can be easily chosen using the config file or via the settings page on the website.
> To how to change animations using the config file. See: [**Configuration**](https://github.com/neon-mmd/websurfx/wiki/configuration)
### Custom
To write custom animation, it requires the user to have some knowledge of `themes` and the `HTML of the page for which the animation is being provided for`.
The animations can be of 2 categories:
- Theme specific animations
- Universal animations
#### Theme Specific Animations
These animations can only be used with a specific theme and should not be used with other themes otherwise it either won't look good or won't work at all or would work partially.
Here is an example of `simple-frosted-glow` animation for the `simple theme` (which we provide by default with the app) which will give you a better idea on how to create a custom animation for a specific theme:
```css
.results_aggregated .result {
margin: 1rem;
padding: 1rem;
border-radius: 1rem;
}
.results_aggregated .result:hover {
box-shadow:
inset 0 0 3rem var(--color-two),
inset 0 0 6rem var(--color-five),
inset 0 0 9rem var(--color-three),
0 0 0.25rem var(--color-two),
0 0 0.5rem var(--color-five),
0 0 0.75rem var(--color-three);
}
```
#### Universal Animations
These animations are independent of the theme being used and can be used with all the themes.
Here is an example of `text-tilt` animation which will give you an idea on how to create universal animations for the search engine website.
```css
.results_aggregated .result:hover {
transform: skewX(10deg);
}
```
> [!Note]
> 1. The above-mentioned examples of animations was covered for the search page of the search engine website. While the same way of creating custom animations can also be done for other pages also.
> 2. While the naming the file for the new theme file. Follow the following naming conventions:
> 1. If the animation is theme specfic then name of the animation file should look like this:
> `<name of the theme which these animation is for><seperated by a hyphen or dash><name of the animation with whitespaces replaced with hyphens>`
> **For example:**
> If the animation to make search results frosty glow on hover was to be created for the `simple` theme then the name of the file would look something like this:
> `simple-frosted-glow`
> Where `simple` is the name of the theme the animation targets and `frosted-glow` is the name of the animation where each word has been seperated by a hyphen.
> 2. If the animation is not theme specfic (univeral theme) then name of the animation file should look like this:
> `<name of the animation with whitespaces replaced with hyphens>`
> **For example:**
> If the animation to make search results text tilt on hover was to be created then the name of the file would look something like this:
> `text-tilt`
> Where `text-tilt` is the name of the animation where each word has been seperated by a hyphen. (While naming the files for these types of themes, You do not need to add a theme name in frontend of the file name.).
[⬅️ Go back to Home](./README.md)

94
flake.lock generated
View file

@ -1,94 +0,0 @@
{
"nodes": {
"naersk": {
"inputs": {
"nixpkgs": "nixpkgs"
},
"locked": {
"lastModified": 1694081375,
"narHash": "sha256-vzJXOUnmkMCm3xw8yfPP5m8kypQ3BhAIRe4RRCWpzy8=",
"owner": "nix-community",
"repo": "naersk",
"rev": "3f976d822b7b37fc6fb8e6f157c2dd05e7e94e89",
"type": "github"
},
"original": {
"owner": "nix-community",
"ref": "master",
"repo": "naersk",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1695318763,
"narHash": "sha256-FHVPDRP2AfvsxAdc+AsgFJevMz5VBmnZglFUMlxBkcY=",
"path": "/nix/store/p7iz0r8gs6ppkhj83zjmwyd21k8b7v3y-source",
"rev": "e12483116b3b51a185a33a272bf351e357ba9a99",
"type": "path"
},
"original": {
"id": "nixpkgs",
"type": "indirect"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1725194671,
"narHash": "sha256-tLGCFEFTB5TaOKkpfw3iYT9dnk4awTP/q4w+ROpMfuw=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "b833ff01a0d694b910daca6e2ff4a3f26dee478c",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"naersk": "naersk",
"nixpkgs": "nixpkgs_2",
"utils": "utils"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1694529238,
"narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "ff7b65b44d01cf9ba6a71320833626af21126384",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

View file

@ -1,63 +0,0 @@
{
# Websurfx NixOS flake
inputs = {
naersk.url = "github:nix-community/naersk/master";
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
utils.url = "github:numtide/flake-utils";
};
outputs = {
naersk,
nixpkgs,
self,
utils,
}:
# We do this for all systems - namely x86_64-linux, aarch64-linux,
# x86_64-darwin and aarch64-darwin
utils.lib.eachDefaultSystem (system: let
pkgs = import nixpkgs {inherit system;};
naersk-lib = pkgs.callPackage naersk {};
in rec {
# Build via "nix build .#default"
packages.default = naersk-lib.buildPackage {
# The build dependencies
buildInputs = with pkgs; [pkg-config openssl];
src = ./.;
};
# Enter devshell with all the tools via "nix develop"
# or "nix-shell"
devShells.default = with pkgs;
mkShell {
buildInputs = [
actionlint
cargo
docker
haskellPackages.hadolint
nodejs
nodePackages_latest.cspell
eslint
nodePackages_latest.markdownlint-cli2
nodePackages_latest.stylelint
redis
rustPackages.clippy
rust-analyzer
cargo-watch
rustc
rustfmt
yamllint
openssl
pkg-config
];
RUST_SRC_PATH = rustPlatform.rustLibSrc;
shellHook = ''
export PATH="$PATH:$HOME/.cargo/bin"
export NODE_PATH="$NODE_PATH:./node_modules"
'';
};
# Build via "nix build .#websurfx", which is basically just
# calls the build function
packages.websurfx = packages.default;
});
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 73 KiB

After

Width:  |  Height:  |  Size: 86 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.8 KiB

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 45 KiB

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 80 KiB

After

Width:  |  Height:  |  Size: 158 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 876 KiB

View file

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-x"><line x1="18" y1="6" x2="6" y2="18"></line><line x1="6" y1="6" x2="18" y2="18"></line></svg>

Before

Width:  |  Height:  |  Size: 299 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 100 KiB

View file

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" stroke-width="1.5" color="#000" viewBox="0 0 24 24" style="--darkreader-inline-color:#e8e6e3"><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5" d="M12 11.5v5M12 7.51l.01-.011M12 22c5.523 0 10-4.477 10-10S17.523 2 12 2 2 6.477 2 12s4.477 10 10 10z" style="--darkreader-inline-stroke:#000000"/></svg>

Before

Width:  |  Height:  |  Size: 409 B

View file

@ -1 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!-- Generator: Adobe Illustrator 16.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) --><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 50 50" width="24px" height="24px"><circle fill="none" stroke="#000000" stroke-width="2" stroke-linecap="round" stroke-miterlimit="10" cx="21" cy="20" r="16"/><line fill="none" stroke="#000000" stroke-width="4" stroke-miterlimit="10" x1="32.229" y1="32.229" x2="45.5" y2="45.5"/></svg>

Before

Width:  |  Height:  |  Size: 610 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 233 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 71 KiB

View file

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" stroke-width="1.5" color="#000" viewBox="0 0 24 24" style="--darkreader-inline-color:#e8e6e3"><path stroke="#000" stroke-linecap="round" stroke-width="1.5" d="M20.043 21H3.957c-1.538 0-2.5-1.664-1.734-2.997l8.043-13.988c.77-1.337 2.699-1.337 3.468 0l8.043 13.988C22.543 19.336 21.58 21 20.043 21zM12 9v4" style="--darkreader-inline-stroke:#000000"/><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5" d="M12 17.01l.01-.011" style="--darkreader-inline-stroke:#000000"/></svg>

Before

Width:  |  Height:  |  Size: 583 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.1 KiB

View file

@ -1,15 +0,0 @@
.results_aggregated .result {
margin: 1rem;
padding: 1rem;
border-radius: 1rem;
}
.results_aggregated .result:hover {
box-shadow:
inset 0 0 3rem var(--color-two),
inset 0 0 6rem var(--color-five),
inset 0 0 9rem var(--color-three),
0 0 0.25rem var(--color-two),
0 0 0.5rem var(--color-five),
0 0 0.75rem var(--color-three);
}

View file

@ -1,12 +1,11 @@
:root {
--background-color: #1e1e2e;
--foreground-color: #cdd6f4;
--logo-color: #f5c2e7;
--color-one: #45475a;
--color-two: #f38ba8;
--color-three: #a6e3a1;
--color-four: #f9e2af;
--color-five: #89b4fa;
--color-six: #f5c2e7;
--color-seven: #fff;
--bg: #1e1e2e;
--fg: #cdd6f4;
--1: #45475a;
--2: #f38ba8;
--3: #a6e3a1;
--4: #f9e2af;
--5: #89b4fa;
--6: #f5c2e7;
--7: #ffffff;
}

View file

@ -1,12 +0,0 @@
:root {
--background-color: #000;
--foreground-color: #fff;
--logo-color: #e0e0e0;
--color-one: #121212;
--color-two: #808080;
--color-three: #999;
--color-four: #666;
--color-five: #bfbfbf;
--color-six: #e0e0e0;
--color-seven: #555;
}

View file

@ -1,12 +1,11 @@
:root {
--background-color: #44475a;
--foreground-color: #8be9fd;
--logo-color: #ffb86c;
--color-one: #f55;
--color-two: #50fa7b;
--color-three: #ffb86c;
--color-four: #bd93f9;
--color-five: #ff79c6;
--color-six: #94a3a5;
--color-seven: #fff;
--bg: #44475a;
--fg: #8be9fd;
--1: #ff5555;
--2: #50fa7b;
--3: #ffb86c;
--4: #bd93f9;
--5: #ff79c6;
--6: #94a3a5;
--7: #ffffff;
}

View file

@ -1,12 +1,11 @@
:root {
--background-color: #1d2021;
--foreground-color: #ebdbb2;
--logo-color: #ebdbb2;
--color-one: #282828;
--color-two: #98971a;
--color-three: #d79921;
--color-four: #458588;
--color-five: #b16286;
--color-six: #689d6a;
--color-seven: #fff;
--bg: #282828;
--fg: #ebdbb2;
--1: #cc241d;
--2: #98971a;
--3: #d79921;
--4: #458588;
--5: #b16286;
--6: #689d6a;
--7: #ffffff;
}

View file

@ -1,12 +1,11 @@
:root {
--background-color: #49483Eff;
--foreground-color: #FFB269;
--logo-color: #ffd866;
--color-one: #272822ff;
--color-two: #61AFEF;
--color-three: #ffd866;
--color-four: #fc9867;
--color-five: #ab9df2;
--color-six: #78dce8;
--color-seven: #fff;
--bg: #403e41;
--fg: #fcfcfa;
--1: #ff6188;
--2: #a9dc76;
--3: #ffd866;
--4: #fc9867;
--5: #ab9df2;
--6: #78dce8;
--7: #ffffff;
}

View file

@ -1,12 +1,11 @@
:root {
--background-color: #122736ff;
--foreground-color: #a2e2a9;
--logo-color: #e2ecd6;
--color-one: #121B2Cff;
--color-two: #f08282;
--color-three: #ABC5AAff;
--color-four: #e6d2d2;
--color-five: #81a1c1;
--color-six: #e2ecd6;
--color-seven: #fff;
--bg: #2e3440;
--fg: #d8dee9;
--1: #3b4252;
--2: #bf616a;
--3: #a3be8c;
--4: #ebcb8b;
--5: #81a1c1;
--6: #b48ead;
--7: #ffffff;
}

View file

@ -1,12 +1,11 @@
:root {
--background-color: #1b2b34;
--foreground-color: #d8dee9;
--logo-color: #d8dee9;
--color-one: #343d46;
--color-two: #5FB3B3ff;
--color-three: #69Cf;
--color-four: #99c794;
--color-five: #69c;
--color-six: #c594c5;
--color-seven: #D8DEE9ff;
--bg: #1b2b34;
--fg: #d8dee9;
--1: #343d46;
--2: #ec5f67;
--3: #99c794;
--4: #fac863;
--5: #6699cc;
--6: #c594c5;
--7: #ffffff;
}

View file

@ -1,12 +0,0 @@
:root {
--background-color: #282c34;
--foreground-color: #abb2bf;
--logo-color: #c8ccd4;
--color-one: #3b4048;
--color-two: #a3be8c;
--color-three: #b48ead;
--color-four: #c8ccd4;
--color-five: #e06c75;
--color-six: #61afef;
--color-seven: #be5046;
}

View file

@ -1,12 +0,0 @@
:root {
--background-color: #faf4ed;
--foreground-color: #575279;
--logo-color: #d7827e;
--color-one: #f2e9e1;
--color-two: #907aa9;
--color-three: #56949f;
--color-four: #ea9d34;
--color-five: #d7827e;
--color-six: #9893a5;
--color-seven: #575279;
}

View file

@ -1,12 +0,0 @@
:root {
--background-color: #232136;
--foreground-color: #e0def4;
--logo-color: #ea9a97;
--color-one: #393552;
--color-two: #c4a7e7;
--color-three: #9ccfd8;
--color-four: #f6c177;
--color-five: #ea9a97;
--color-six: #6e6a86;
--color-seven: #e0def4;
}

View file

@ -1,12 +0,0 @@
:root {
--background-color: #191724;
--foreground-color: #e0def4;
--logo-color: #ebbcba;
--color-one: #26233a;
--color-two: #c4a7e7;
--color-three: #9ccfd8;
--color-four: #f6c177;
--color-five: #eb6f92;
--color-six: #6e6a86;
--color-seven: #e0def4;
}

View file

@ -1,12 +1,11 @@
:root {
--background-color: #002b36;
--foreground-color: #c9e0e6;
--logo-color: #EEE8D5ff;
--color-one: #073642;
--color-two: #2AA198ff;
--color-three: #2AA198ff;
--color-four: #EEE8D5ff;
--color-five: #268bd2;
--color-six: #d33682;
--color-seven: #fff;
--bg: #002b36;
--fg: #839496;
--1: #073642;
--2: #dc322f;
--3: #859900;
--4: #b58900;
--5: #268bd2;
--6: #d33682;
--7: #ffffff;
}

View file

@ -1,12 +1,11 @@
:root {
--background-color: #EEE8D5ff;
--foreground-color: #b1ab97;
--logo-color: #586E75;
--color-one: #fdf6e3;
--color-two: #DC322Fff;
--color-three: #586E75ff;
--color-four: #b58900;
--color-five: #268bd2;
--color-six: #d33682;
--color-seven: #fff;
--bg: #fdf6e3;
--fg: #657b83;
--1: #073642;
--2: #dc322f;
--3: #859900;
--4: #b58900;
--5: #268bd2;
--6: #d33682;
--7: #ffffff;
}

View file

@ -1,12 +0,0 @@
:root {
--background-color: #1a1b26;
--foreground-color: #c0caf5;
--logo-color: #e2afff;
--color-one: #32364a;
--color-two: #a9b1d6;
--color-three: #5a5bb8;
--color-four: #6b7089;
--color-five: #e2afff;
--color-six: #a9a1e1;
--color-seven: #988bc7;
}

View file

@ -1,12 +1,11 @@
:root {
--background-color: #35383Cff;
--foreground-color: #D7DAD8ff;
--logo-color: #D7DAD8ff;
--color-one: #1d1f21;
--color-two: #D77C79ff;
--color-three: #f0c674;
--color-four: #92B2CAff;
--color-five: #C0A7C7ff;
--color-six: #9AC9C4ff;
--color-seven: #fff;
--bg: #1d1f21;
--fg: #c5c8c6;
--1: #cc6666;
--2: #b5bd68;
--3: #f0c674;
--4: #81a2be;
--5: #b294bb;
--6: #8abeb7;
--7: #ffffff;
}

View file

@ -1,94 +0,0 @@
/**
* This functions gets the saved cookies if it is present on the user's machine If it
* is available then it is parsed and converted to an object which is then used to
* retrieve the preferences that the user had selected previously and is then loaded
* and used for displaying the user provided settings by setting them as the selected
* options in the settings page.
*
* @function
* @param {string} cookie - It takes the client settings cookie as a string.
* @returns {void}
*/
function setClientSettingsOnPage(cookie) {
let cookie_value = cookie
.split(';')
.map((item) => item.split('='))
.reduce((acc, [_, v]) => (acc = JSON.parse(v)) && acc, {})
// Loop through all select tags and add their values to the cookie dictionary
document.querySelectorAll('select').forEach((select_tag) => {
switch (select_tag.name) {
case 'themes':
select_tag.value = cookie_value['theme']
break
case 'colorschemes':
select_tag.value = cookie_value['colorscheme']
break
case 'animations':
select_tag.value = cookie_value['animation']
break
case 'safe_search_levels':
select_tag.value = cookie_value['safe_search_level']
break
}
})
let engines = document.querySelectorAll('.engine')
let engines_cookie = cookie_value['engines']
if (engines_cookie.length === engines.length) {
document.querySelector('.select_all').checked = true
engines.forEach((engine_checkbox) => {
engine_checkbox.checked = true
})
} else {
engines.forEach((engines_checkbox) => {
engines_checkbox.checked = false
})
engines_cookie.forEach((engine_name) => {
engines.forEach((engine_checkbox) => {
if (
engine_checkbox.parentNode.parentNode.innerText.trim() ===
engine_name.trim()
) {
engine_checkbox.checked = true
}
})
})
}
}
/**
* This function is executed when any page on the website finishes loading and
* this function retrieves the cookies if it is present on the user's machine.
* If it is available then the saved cookies is display in the cookies tab
* otherwise an appropriate message is displayed if it is not available.
*
* @function
* @listens DOMContentLoaded
* @returns {void}
*/
document.addEventListener(
'DOMContentLoaded',
() => {
try {
// Decode the cookie value
let cookie = decodeURIComponent(document.cookie)
// Set the value of the input field to the decoded cookie value if it is not empty
// Otherwise, display a message indicating that no cookies have been saved on the user's system
if (cookie.length) {
document.querySelector('.cookies input').value = cookie
// This function displays the user provided settings on the settings page.
setClientSettingsOnPage(cookie)
} else {
document.querySelector('.cookies input').value =
'No cookies have been saved on your system'
}
} catch (error) {
// If there is an error decoding the cookie, log the error to the console
// and display an error message in the input field
console.error('Error decoding cookie:', error)
document.querySelector('.cookies input').value = 'Error decoding cookie'
}
},
false,
)

View file

@ -1,7 +0,0 @@
/**
* This function provides the ability for the button to toggle the dropdown error-box
* in the search page.
*/
function toggleErrorBox() {
document.querySelector('.dropdown_error_box').classList.toggle('show')
}

View file

@ -1,6 +1,10 @@
/**
* A function that clears the search input text when the clear button is clicked.
*/
function clearSearchText() {
document.querySelector('.search_bar > input').value = ''
let search_box = document.querySelector('input')
function search_web() {
window.location = `search?q=${search_box.value}`
}
search_box.addEventListener('keyup', (e) => {
if (e.keyCode === 13) {
search_web()
}
})

View file

@ -0,0 +1,26 @@
function navigate_forward() {
const url = new URL(window.location)
const searchParams = url.searchParams
let q = searchParams.get('q')
let page = searchParams.get('page')
if (page === null) {
page = 2
window.location = `${url.origin}${url.pathname}?q=${q}&page=${page}`
} else {
window.location = `${url.origin}${url.pathname}?q=${q}&page=${++page}`
}
}
function navigate_backward() {
const url = new URL(window.location)
const searchParams = url.searchParams
let q = searchParams.get('q')
let page = searchParams.get('page')
if (page !== null && page > 1) {
window.location = `${url.origin}${url.pathname}?q=${q}&page=${--page}`
}
}

View file

@ -1,155 +0,0 @@
/**
* This function handles the toggling of selections of all upstream search engines
* options in the settings page under the tab engines.
*/
function toggleAllSelection() {
document
.querySelectorAll('.engine')
.forEach(
(engine_checkbox) =>
(engine_checkbox.checked =
document.querySelector('.select_all').checked),
)
}
/**
* This function adds the functionality to sidebar buttons to only show settings
* related to that tab.
* @param {HTMLElement} current_tab - The current tab that was clicked.
*/
function setActiveTab(current_tab) {
// Remove the active class from all tabs and buttons
document
.querySelectorAll('.tab')
.forEach((tab) => tab.classList.remove('active'))
document
.querySelectorAll('.btn')
.forEach((tab) => tab.classList.remove('active'))
// Add the active class to the current tab and its corresponding settings
current_tab.classList.add('active')
document
.querySelector(`.${current_tab.innerText.toLowerCase().replace(' ', '_')}`)
.classList.add('active')
}
/**
* This function adds the functionality to save all the user selected preferences
* to be saved in a cookie on the users machine.
*/
function setClientSettings() {
// Create an object to store the user's preferences
let cookie_dictionary = new Object()
// Loop through all select tags and add their values to the cookie dictionary
document.querySelectorAll('select').forEach((select_tag) => {
switch (select_tag.name) {
case 'themes':
cookie_dictionary['theme'] = select_tag.value
break
case 'colorschemes':
cookie_dictionary['colorscheme'] = select_tag.value
break
case 'animations':
cookie_dictionary['animation'] = select_tag.value || null
break
case 'safe_search_levels':
cookie_dictionary['safe_search_level'] = Number(select_tag.value)
break
}
})
// Loop through all engine checkboxes and add their values to the cookie dictionary
let engines = []
document.querySelectorAll('.engine').forEach((engine_checkbox) => {
if (engine_checkbox.checked) {
engines.push(engine_checkbox.parentNode.parentNode.innerText.trim())
}
})
cookie_dictionary['engines'] = engines
// Set the expiration date for the cookie to 1 year from the current date
let expiration_date = new Date()
expiration_date.setFullYear(expiration_date.getFullYear() + 1)
// Save the cookie to the user's machine
document.cookie = `appCookie=${JSON.stringify(
cookie_dictionary,
)}; expires=${expiration_date.toUTCString()}`
// Display a success message to the user
document.querySelector('.message').innerText =
'✅ The settings have been saved sucessfully!!'
// Clear the success message after 10 seconds
setTimeout(() => {
document.querySelector('.message').innerText = ''
}, 10000)
}
/**
* This functions gets the saved cookies if it is present on the user's machine If it
* is available then it is parsed and converted to an object which is then used to
* retrieve the preferences that the user had selected previously and is then loaded in the
* website otherwise the function does nothing and the default server side settings are loaded.
*/
function getClientSettings() {
// Get the appCookie from the user's machine
let cookie = decodeURIComponent(document.cookie)
// If the cookie is not empty, parse it and use it to set the user's preferences
if (cookie.length) {
let cookie_value = cookie
.split(';')
.map((item) => item.split('='))
.reduce((acc, [_, v]) => (acc = JSON.parse(v)) && acc, {})
let links = Array.from(document.querySelectorAll('link'))
// A check to determine whether the animation link exists under the head tag or not.
// If it does not exists then create and add a new animation link under the head tag
// and update the other link tags href according to the settings provided by the user
// via the UI. On the other hand if it does exist then just update all the link tags
// href according to the settings provided by the user via the UI.
if (!links.some((item) => item.href.includes('static/animations'))) {
if (cookie_value['animation']) {
let animation_link = document.createElement('link')
animation_link.href = `static/animations/${cookie_value['animation']}.css`
animation_link.rel = 'stylesheet'
animation_link.type = 'text/css'
document.querySelector('head').appendChild(animation_link)
}
// Loop through all link tags and update their href values to match the user's preferences
links.forEach((item) => {
if (item.href.includes('static/themes')) {
item.href = `static/themes/${cookie_value['theme']}.css`
} else if (item.href.includes('static/colorschemes')) {
item.href = `static/colorschemes/${cookie_value['colorscheme']}.css`
}
})
} else {
// Loop through all link tags and update their href values to match the user's preferences
links.forEach((item) => {
if (item.href.includes('static/themes')) {
item.href = `static/themes/${cookie_value['theme']}.css`
} else if (item.href.includes('static/colorschemes')) {
item.href = `static/colorschemes/${cookie_value['colorscheme']}.css`
} else if (
item.href.includes('static/animations') &&
cookie_value['animation']
) {
item.href = `static/colorschemes/${cookie_value['animation']}.css`
}
})
if (!cookie_value['animation']) {
document
.querySelector('head')
.removeChild(
links.filter((item) => item.href.includes('static/animations')),
)
}
}
}
}

View file

@ -1,13 +1,4 @@
/* @import url('./catppuccin-mocha.css'); */
@font-face {
font-family: Rubik;
font-style: normal;
font-weight: 200 600;
font-stretch: 0% 200%;
font-display: swap;
src: url('https://fonts.gstatic.com/s/rubik/v28/iJWKBXyIfDnIV7nErXyi0A.woff2')
format('woff2');
}
* {
padding: 0;
@ -24,14 +15,8 @@ body {
flex-direction: column;
justify-content: space-between;
align-items: center;
min-height: 100vh;
font-family: Rubik, sans-serif;
background-color: var(--background-color);
}
/* enforce font for buttons */
button {
font-family: Rubik, sans-serif;
height: 100vh;
background: var(--1);
}
/* styles for the index page */
@ -44,10 +29,6 @@ button {
align-items: center;
}
.search-container svg {
color: var(--logo-color);
}
.search-container div {
display: flex;
}
@ -56,66 +37,32 @@ button {
.search_bar {
display: flex;
gap: 10px;
align-items: center;
}
.search_bar input {
border-radius: 6px;
padding: 2.6rem 2.2rem;
padding: 1rem;
width: 50rem;
height: 3rem;
outline: none;
border: none;
box-shadow: rgb(0 0 0 / 1);
background-color: var(--color-one);
color: var(--foreground-color);
outline-offset: 3px;
font-size: 1.6rem;
}
.search_bar input::-webkit-search-results-button,
.search_bar input::-webkit-search-cancel-button{
display: none;
}
.search_bar input:focus {
outline: 2px solid var(--foreground-color);
}
.search_bar input::placeholder {
color: var(--foreground-color);
opacity: 1;
box-shadow: rgba(0, 0, 0, 1);
background: var(--fg);
}
.search_bar button {
padding: 2.6rem 3.2rem;
border-radius: 6px;
padding: 1rem;
border-radius: 0;
height: 3rem;
display: flex;
justify-content: center;
align-items: center;
outline-offset: 3px;
outline: 2px solid transparent;
outline: none;
border: none;
transition: 0.1s;
gap: 0;
background-color: var(--color-six);
color: var(--background-color);
background: var(--bg);
color: var(--3);
font-weight: 600;
letter-spacing: 0.1rem;
position: relative;
}
.search_bar button img {
position: absolute;
left: 50%;
top: 50%;
transform: translate(-50%, -50%);
}
.search_bar button:active {
outline: 2px solid var(--color-three);
}
.search_bar button:active,
@ -123,162 +70,15 @@ button {
filter: brightness(1.2);
}
.search_area .search_options {
display: flex;
justify-content: space-between;
align-items: center;
}
.search_area .search_options select {
margin: 0.7rem 0;
width: 20rem;
background-color: var(--color-one);
color: var(--foreground-color);
padding: 1.2rem 2rem;
border-radius: 0.5rem;
outline-offset: 3px;
outline: 2px solid transparent;
border: none;
text-transform: capitalize;
}
.search_area .search_options select:active,
.search_area .search_options select:hover {
outline: 2px solid var(--color-three);
}
.search_area .search_options option:hover {
background-color: var(--color-one);
}
.result_not_found {
display: flex;
flex-direction: column;
font-size: 1.5rem;
color: var(--foreground-color);
}
.result_not_found p {
margin: 1rem 0;
}
.result_not_found ul {
margin: 1rem 0;
}
.result_not_found img {
width: 40rem;
}
/* styles for the error box */
.error_box .error_box_toggle_button {
background: var(--foreground-color);
}
.error_box .dropdown_error_box {
position: absolute;
display: none;
flex-direction: column;
background: var(--background-color);
border-radius: 0;
margin-left: 2rem;
min-height: 20rem;
min-width: 22rem;
}
.error_box .dropdown_error_box.show {
display: flex;
}
.error_box .dropdown_error_box .error_item,
.error_box .dropdown_error_box .no_errors {
display: flex;
align-items: center;
color: var(--foreground-color);
letter-spacing: 0.1rem;
padding: 1rem;
font-size: 1.2rem;
}
.error_box .dropdown_error_box .error_item {
justify-content: space-between;
}
.error_box .dropdown_error_box .no_errors {
min-height: 18rem;
justify-content: center;
}
.error_box .dropdown_error_box .error_item:hover {
box-shadow: inset 0 0 100px 100px rgb(255 255 255 / 0.1);
}
.error_box .error_item .severity_color {
width: 1.2rem;
height: 1.2rem;
}
.results .result_disallowed,
.results .result_filtered,
.results .result_engine_not_selected {
display: flex;
justify-content: center;
align-items: center;
gap: 10rem;
font-size: 2rem;
color: var(--foreground-color);
margin: 0 7rem;
}
.results .result_disallowed .user_query,
.results .result_filtered .user_query,
.results .result_engine_not_selected .user_query {
color: var(--background-color);
font-weight: 300;
}
.results .result_disallowed img,
.results .result_filtered img,
.results .result_engine_not_selected img {
width: 30rem;
}
.results .result_disallowed div,
.results .result_filtered div,
.results .result_engine_not_selected div {
display: flex;
flex-direction: column;
gap: 1rem;
line-break: strict;
}
/* styles for the footer and header */
header {
background: var(--bg);
width: 100%;
background: var(--background-color);
display: flex;
justify-content: right;
align-items: center;
justify-content: space-between;
padding: 2rem 3rem;
}
footer {
width: 100%;
background: var(--background-color);
display: flex;
align-items: center;
padding: 1.7rem 1.7rem 4rem;
gap: 1.8rem;
flex-direction: column;
justify-content: center;
}
header h1 a {
text-transform: capitalize;
text-decoration: none;
color: var(--foreground-color);
letter-spacing: 0.1rem;
padding: 1rem;
}
header ul,
@ -296,7 +96,7 @@ footer ul li a,
header ul li a:visited,
footer ul li a:visited {
text-decoration: none;
color: var(--color-two);
color: var(--2);
text-transform: capitalize;
letter-spacing: 0.1rem;
}
@ -307,12 +107,12 @@ header ul li a {
header ul li a:hover,
footer ul li a:hover {
color: var(--color-five);
color: var(--5);
}
footer div span {
font-size: 1.5rem;
color: var(--color-four);
color: var(--4);
}
footer div {
@ -320,6 +120,16 @@ footer div {
gap: 1rem;
}
footer {
background: var(--bg);
width: 100%;
padding: 1rem;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
}
/* Styles for the search page */
.results {
@ -327,11 +137,6 @@ footer div {
display: flex;
flex-direction: column;
justify-content: space-around;
gap: 1rem;
}
.result {
gap: 1rem;
}
.results .search_bar {
@ -343,7 +148,6 @@ footer div {
flex-direction: column;
justify-content: space-between;
margin: 2rem 0;
content-visibility: auto;
}
.results_aggregated .result {
@ -353,31 +157,30 @@ footer div {
}
.results_aggregated .result h1 a {
font-size: 1.7rem;
font-weight: normal;
color: var(--color-two);
font-size: 1.5rem;
color: var(--2);
text-decoration: none;
letter-spacing: 0.1rem;
}
.results_aggregated .result h1 a:hover {
color: var(--color-five);
color: var(--5);
}
.results_aggregated .result h1 a:visited {
color: var(--color-five);
color: var(--bg);
}
.results_aggregated .result small {
color: var(--color-three);
font-size: 1.3rem;
color: var(--3);
font-size: 1.1rem;
word-wrap: break-word;
line-break: anywhere;
}
.results_aggregated .result p {
color: var(--foreground-color);
font-size: 1.4rem;
line-height: 2.4rem;
color: var(--fg);
font-size: 1.2rem;
margin-top: 0.3rem;
word-wrap: break-word;
line-break: anywhere;
@ -387,10 +190,7 @@ footer div {
text-align: right;
font-size: 1.2rem;
padding: 1rem;
color: var(--color-five);
display: flex;
gap: 1rem;
justify-content: right;
color: var(--5);
}
/* Styles for the 404 page */
@ -433,468 +233,67 @@ footer div {
.error_content p a,
.error_content p a:visited {
color: var(--color-two);
color: var(--2);
text-decoration: none;
}
.error_content p a:hover {
color: var(--color-five);
color: var(--5);
}
.page_navigation {
padding: 0 0 2rem;
padding: 0 0 2rem 0;
display: flex;
justify-content: space-between;
align-items: center;
}
.page_navigation a {
background: var(--background-color);
color: var(--foreground-color);
.page_navigation button {
background: var(--bg);
color: var(--fg);
padding: 1rem;
border-radius: 0.5rem;
outline: none;
border: none;
}
.page_navigation a:active {
.page_navigation button:active {
filter: brightness(1.2);
}
/* Styles for the about page */
.about-container article {
font-size: 1.5rem;
color: var(--foreground-color);
padding-bottom: 10px;
max-width: 1100px;
margin: 14rem auto;
display: flex;
flex-direction: column;
row-gap: 100px;
}
.about-container article{
font-size: 1.5rem;
color:var(--fg);
padding-bottom: 10px;
}
.about-container article h1 {
color: var(--color-two);
font-size: 4.5rem;
}
.about-container article h1{
color: var(--2);
font-size: 2.8rem;
}
.about-container article .logo-container {
display: flex;
align-items: center;
justify-content: center;
}
.about-container article div{
padding-bottom: 15px;
}
.about-container article .logo-container svg {
width: clamp(200px, 530px, 815px);
color: var(--logo-color);
}
.about-container article .text-block {
box-shadow: 0 0 0 100vmax var(--foreground-color);
background-color: var(--foreground-color);
clip-path: inset(0 -100vmax);
padding: 90px 0;
display: flex;
gap: 40px;
align-items: center;
justify-content: center;
flex-direction: column;
text-align: center;
color: var(--background-color);
}
.about-container article .text-block .text-block-title {
font-size: 64px;
font-weight: 500;
}
.hero-text-container {
width: 860px;
}
.hero-text {
font-size: 45px;
font-weight: 200;
}
.about-container a {
color: var(--color-three);
}
.about-container {
width: 80%;
margin-bottom: 140px;
}
.feature-list {
padding: 35px;
display: flex;
align-items: center;
justify-content: center;
flex-direction: column;
row-gap: 60px;
}
.feature-list-title {
text-align: center;
font-size: 64px;
font-weight: 500;
}
.features {
display: grid;
grid-template-columns: repeat(3, 1fr);
gap: 40px;
}
.feature-card {
background-color: var(--foreground-color);
color: var(--background-color);
text-align: center;
display: flex;
padding: 30px;
border-radius: 24px;
flex-direction: column;
align-items: center;
justify-content: center;
gap: 15px;
}
.feature-card-header {
display: flex;
align-items: center;
justify-content: center;
flex-direction: column;
row-gap: 15px;
}
.feature-card-header h4 {
font-size: 33px;
font-weight: 500;
}
.feature-card-body p {
font-size: 20px;
font-weight: 200;
}
.about-footnote {
font-size: 24px;
text-align: center;
color: var(--foreground-color);
}
/* Styles for the settings page */
.settings_container {
display: flex;
justify-content: space-around;
width: 80dvw;
margin: 5rem 0;
}
.settings h1 {
color: var(--color-two);
font-size: 2.5rem;
}
.settings > h1 {
margin-bottom: 4rem;
margin-left: 2rem;
}
.settings hr {
border-color: var(--color-three);
margin: 0.3rem 0 1rem;
}
.settings > hr {
margin-left: 2rem;
}
.settings_container .sidebar {
width: 30%;
cursor: pointer;
font-size: 2rem;
display: flex;
flex-direction: column;
margin-right: 0.5rem;
margin-left: -0.7rem;
padding: 0.7rem;
border-radius: 5px;
margin-bottom: 0.5rem;
color: var(--foreground-color);
text-transform: capitalize;
gap: 1.5rem;
}
.settings_container .sidebar .btn {
padding: 2rem;
border-radius: 0.5rem;
outline-offset: 3px;
outline: 2px solid transparent;
}
.settings_container .sidebar .btn:active {
outline: 2px solid var(--color-two);
}
.settings_container .sidebar .btn:not(.active):hover {
color: var(--color-two);
}
.settings_container .sidebar .btn.active {
background-color: var(--color-two);
color: var(--background-color);
}
.settings_container .main_container {
width: 70%;
border-left: 1.5px solid var(--color-three);
padding-left: 3rem;
border: none;
}
.settings_container .tab {
display: none;
}
.settings_container .tab.active {
display: flex;
gap: 1.2rem;
flex-direction: column;
justify-content: space-around;
}
.settings_container button {
margin-top: 1rem;
padding: 1rem 2rem;
font-size: 1.5rem;
background: var(--color-three);
color: var(--background-color);
border-radius: 0.5rem;
border: 2px solid transparent;
font-weight: bold;
transition: all 0.1s ease-out;
cursor: pointer;
box-shadow: 5px 5px;
outline: none;
}
.settings_container button:active {
box-shadow: none;
translate: 5px 5px;
}
.settings_container .main_container .message {
font-size: 1.5rem;
color: var(--foreground-color);
}
.settings_container .tab h3 {
font-size: 2rem;
font-weight: bold;
color: var(--color-four);
margin-top: 1.5rem;
text-transform: capitalize;
}
.settings_container .tab .description,
.settings_container .tab .admin_warning {
font-size: 1.5rem;
margin-bottom: 0.5rem;
}
.settings_container .tab .description {
color: var(--foreground-color);
}
.settings_container .tab .admin_warning {
color: var(--color-two);
}
.settings_container .user_interface select,
.settings_container .general select,
.settings_container .general form input {
margin: 0.7rem 0;
width: 20rem;
background-color: var(--color-one);
color: var(--foreground-color);
padding: 1rem 2rem;
border-radius: 0.5rem;
outline: none;
border: none;
text-transform: capitalize;
}
.settings_container .general form input {
padding: 0;
width: 30rem;
text-align: center;
text-transform: none;
}
.settings_container .general form input::file-selector-button {
content: 'Browse';
padding: 1rem 2rem;
font-size: 1.5rem;
background: var(--color-three);
color: var(--background-color);
border-radius: 0.5rem;
border: 2px solid transparent;
font-weight: bold;
transition: all 0.1s ease-out;
cursor: pointer;
box-shadow: 5px 5px;
outline: none;
translate: -1rem 0;
}
.settings_container .general form input::file-selector-button:active {
box-shadow: none;
translate: 5px 5px;
}
.settings_container .general .export_btn {
margin-bottom: 1rem;
}
.settings_container .user_interface option:hover,
.settings_container .general option:hover {
background-color: var(--color-one);
}
.settings_container .engines .engine_selection {
display: flex;
flex-direction: column;
justify-content: center;
padding: 1rem 0;
margin-bottom: 2rem;
gap: 2rem;
}
.settings_container .engines .toggle_btn {
color: var(--foreground-color);
font-size: 1.5rem;
display: flex;
align-items: center;
border-radius: 100px;
gap: 1.5rem;
letter-spacing: 1px;
}
.settings_container .engines hr {
margin: 0;
}
.settings_container .cookies input {
margin: 1rem 0;
}
/* Styles for the toggle button */
/* The switch - the box around the slider */
.switch {
position: relative;
display: inline-block;
width: 6rem;
height: 3.4rem;
}
/* Hide default HTML checkbox */
.switch input {
opacity: 0;
width: 0;
height: 0;
}
/* The slider */
.slider {
position: absolute;
cursor: pointer;
inset: 0;
background-color: var(--foreground-color);
transition: 0.2s;
outline-offset: 3px;
outline: 2px solid transparent;
}
.slider:active {
outline: 2px solid var(--foreground-color);
}
.slider::before {
position: absolute;
content: '';
height: 2.6rem;
width: 2.6rem;
left: 0.4rem;
bottom: 0.4rem;
background-color: var(--background-color);
transition: 0.2s;
}
input:checked + .slider {
background-color: var(--color-three);
}
input:focus + .slider {
box-shadow: 0 0 1px var(--color-three);
}
input:checked + .slider::before {
transform: translateX(2.6rem);
}
/* Rounded sliders */
.slider.round {
border-radius: 3.4rem;
}
.slider.round::before {
border-radius: 50%;
}
@media screen and (width <=1136px) {
.hero-text-container {
width: unset;
.about-container a{
color:var(--3);
}
.features {
grid-template-columns: repeat(2, 1fr);
}
}
@media screen and (width <=706px) {
.about-container article .logo-container svg {
width: clamp(200px, 290px, 815px);
.about-container article h2{
color: var(--3);
font-size: 1.8rem;
padding-bottom: 10px;
}
.about-container article .text-block .text-block-title {
font-size: 33px;
.about-container p{
color:var(--fg);
font-size: 1.6rem;
padding-bottom: 10px;
}
.hero-text {
font-size: 22px;
.about-container h3{
font-size: 1.5rem;
}
.about-container {
width: unset;
}
.feature-list-title {
font-size: 33px;
}
.features {
grid-template-columns: 1fr;
}
.feature-list {
padding: 35px 0;
}
.feature-card {
border-radius: 0;
}
}

10
public/templates/404.html Normal file
View file

@ -0,0 +1,10 @@
{{>header this}}
<main class="error_container">
<img src="images/robot-404.svg" alt="Image of broken robot." />
<div class="error_content">
<h1>Aw! snap</h1>
<h2>404 Page Not Found!</h2>
<p>Go to <a href="/">search page</a></p>
</div>
</main>
{{>footer}}

View file

@ -0,0 +1,29 @@
{{>header this}}
<main class="about-container">
<article >
<div>
<h1 >Websurfx</h1>
<hr size="4" width="100%" color="#a6e3a1">
</div>
<p>A modern-looking, lightning-fast, privacy-respecting, secure meta search engine written in Rust. It provides a fast and secure search experience while respecting user privacy.<br> It aggregates results from multiple search engines and presents them in an unbiased manner, filtering out trackers and ads.
</p>
<h2>Some of the Top Features:</h2>
<ul><strong>Lightning fast </strong>- Results load within milliseconds for an instant search experience.</ul>
<ul><strong>Secure search</strong> - All searches are performed over an encrypted connection to prevent snooping.</ul>
<ul><strong>Ad free results</strong> - All search results are ad free and clutter free for a clean search experience.</ul>
<ul><strong>Privacy focused</strong> - Websurface does not track, store or sell your search data. Your privacy is our priority.</ul>
<ul><strong>Free and Open source</strong> - The entire project's code is open source and available for free on <a href="https://github.com/neon-mmd/websurfx">GitHub</a> under an GNU Affero General Public License.</ul>
<ul><strong>Highly customizable</strong> - Websurface comes with 9 built-in color themes and supports creating custom themes effortlessly.</ul>
</article>
<h3>Devoloped by: <a href="https://github.com/neon-mmd/websurfx">Websurfx team</a></h3>
</main>
{{>footer}}

View file

@ -0,0 +1,15 @@
<footer>
<div>
<span>Powered By <b>Websurfx</b></span><span>-</span><span>a lightening fast, privacy respecting, secure meta
search engine</span>
</div>
<div>
<ul>
<li><a href="#">Source Code</a></li>
<li><a href="#">Issues/Bugs</a></li>
</ul>
</div>
</footer>
</body>
</html>

View file

@ -0,0 +1,12 @@
<!DOCTYPE html>
<html lang="en">
<head>
<title>Websurfx</title>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<link href="static/colorschemes/{{colorscheme}}.css" rel="stylesheet" type="text/css" />
<link href="static/themes/{{theme}}.css" rel="stylesheet" type="text/css" />
</head>
<body>
<header>{{>navbar}}</header>

View file

@ -0,0 +1,7 @@
{{>header this}}
<main class="search-container">
<img src="../images/websurfx_logo.png" alt="Websurfx meta-search engine logo" />
{{>search_bar}}
</main>
<script src="static/index.js"></script>
{{>footer}}

View file

@ -0,0 +1,6 @@
<nav>
<ul>
<li><a href="about">about</a></li>
<li><a href="settings">settings</a></li>
</ul>
</nav>

View file

@ -0,0 +1,25 @@
{{>header this.style}}
<main class="results">
{{>search_bar}}
<div class="results_aggregated">
{{#each results}}
<div class="result">
<h1><a href="{{this.visitingUrl}}">{{{this.title}}}</a></h1>
<small>{{this.url}}</small>
<p>{{{this.description}}}</p>
<div class="upstream_engines">
{{#each engine}}
<span>{{this}}</span>
{{/each}}
</div>
</div>
{{/each}}
</div>
<div class="page_navigation">
<button type="button" onclick="navigate_backward()">&#8592; previous</button>
<button type="button" onclick="navigate_forward()">next &#8594;</button>
</div>
</main>
<script src="static/index.js"></script>
<script src="static/pagination.js"></script>
{{>footer}}

View file

@ -0,0 +1,9 @@
<div class="search_bar">
<input
type="search"
name="search-box"
value="{{this.pageQuery}}"
placeholder="Type to search"
/>
<button type="submit" onclick="search_web()">search</button>
</div>

View file

@ -0,0 +1,5 @@
{{>header this}}
<main class="settings">
<h1>Page is under construction</h1>
</main>
{{>footer}}

View file

@ -2,23 +2,11 @@
//!
//! This module contains the main function which handles the logging of the application to the
//! stdout and handles the command line arguments provided and launches the `websurfx` server.
#[cfg(not(feature = "dhat-heap"))]
use mimalloc::MiMalloc;
use std::{net::TcpListener, sync::OnceLock};
use websurfx::{cache::cacher::create_cache, config::parser::Config, run};
use std::net::TcpListener;
/// A dhat heap memory profiler
#[cfg(feature = "dhat-heap")]
#[global_allocator]
static ALLOC: dhat::Alloc = dhat::Alloc;
#[cfg(not(feature = "dhat-heap"))]
#[global_allocator]
static GLOBAL: MiMalloc = MiMalloc;
/// A static constant for holding the parsed config.
static CONFIG: OnceLock<Config> = OnceLock::new();
use env_logger::Env;
use websurfx::{config_parser::parser::Config, run};
/// The function that launches the main server and registers all the routes of the website.
///
@ -28,27 +16,15 @@ static CONFIG: OnceLock<Config> = OnceLock::new();
/// available for being used for other applications.
#[actix_web::main]
async fn main() -> std::io::Result<()> {
// A dhat heap profiler initialization.
#[cfg(feature = "dhat-heap")]
let _profiler = dhat::Profiler::new_heap();
// Initialize the parsed config file.
let config = Config::parse().unwrap();
// Initialize the parsed config globally.
let config = CONFIG.get_or_init(|| Config::parse(false).unwrap());
// Initializing logging middleware with level set to default or info.
env_logger::Builder::from_env(Env::default().default_filter_or("info")).init();
let cache = create_cache(config).await;
log::info!("started server on port {}", config.port);
log::info!(
"started server on port {} and IP {}",
config.port,
config.binding_ip
);
log::info!(
"Open http://{}:{}/ in your browser",
config.binding_ip,
config.port,
);
let listener = TcpListener::bind((config.binding_ip_addr.clone(), config.port))?;
let listener = TcpListener::bind((config.binding_ip.as_str(), config.port))?;
run(listener, config, cache)?.await
run(listener, config)?.await
}

609
src/cache/cacher.rs vendored
View file

@ -1,597 +1,74 @@
//! This module provides the functionality to cache the aggregated results fetched and aggregated
//! from the upstream search engines in a json format.
use error_stack::Report;
use futures::future::join_all;
#[cfg(feature = "memory-cache")]
use moka::future::Cache as MokaCache;
use md5::compute;
use redis::{Client, Commands, Connection};
#[cfg(feature = "memory-cache")]
use std::time::Duration;
use tokio::sync::Mutex;
/// A named struct which stores the redis Connection url address to which the client will
/// connect to.
///
/// # Fields
///
/// * `redis_connection_url` - It stores the redis Connection url address.
pub struct RedisCache {
connection: Connection,
}
use crate::{config::parser::Config, models::aggregation_models::SearchResults};
use super::error::CacheError;
#[cfg(feature = "redis-cache")]
use super::redis_cacher::RedisCache;
#[cfg(any(feature = "encrypt-cache-results", feature = "cec-cache-results"))]
use super::encryption::*;
/// Abstraction trait for common methods provided by a cache backend.
#[async_trait::async_trait]
pub trait Cacher: Send + Sync {
// A function that builds the cache from the given configuration.
impl RedisCache {
/// Constructs a new `SearchResult` with the given arguments needed for the struct.
///
/// # Arguments
///
/// * `config` - It takes the config struct as an argument.
///
/// # Returns
///
/// It returns a newly initialized backend based on the feature enabled by the user.
async fn build(config: &Config) -> Self
where
Self: Sized;
/// A function which fetches the cached json results as json string.
///
/// # Arguments
///
/// * `url` - It takes an url as a string.
///
/// # Error
///
/// Returns the `SearchResults` from the cache if the program executes normally otherwise
/// returns a `CacheError` if the results cannot be retrieved from the cache.
async fn cached_results(&mut self, url: &str) -> Result<SearchResults, Report<CacheError>>;
/// A function which caches the results by using the `url` as the key and
/// `json results` as the value and stores it in the cache
///
/// # Arguments
///
/// * `json_results` - It takes the json results string as an argument.
/// * `url` - It takes the url as a String.
///
/// # Error
///
/// Returns a unit type if the program caches the given search results without a failure
/// otherwise it returns a `CacheError` if the search results cannot be cached due to a
/// failure.
async fn cache_results(
&mut self,
search_results: &[SearchResults],
urls: &[String],
) -> Result<(), Report<CacheError>>;
/// * `redis_connection_url` - It stores the redis Connection url address.
pub fn new(redis_connection_url: String) -> Result<Self, Box<dyn std::error::Error>> {
let client = Client::open(redis_connection_url)?;
let connection = client.get_connection()?;
let redis_cache = RedisCache { connection };
Ok(redis_cache)
}
/// A helper function which computes the hash of the url and formats and returns it as string.
///
/// # Arguments
///
/// * `url` - It takes an url as string.
fn hash_url(&self, url: &str) -> String {
blake3::hash(url.as_bytes()).to_string()
fn compute_url_hash(url: &str) -> String {
format!("{:?}", compute(url))
}
/// A helper function that returns either encrypted or decrypted results.
/// Feature flags (**encrypt-cache-results or cec-cache-results**) are required for this to work.
/// A function which fetches the cached json results as json string from the redis server.
///
/// # Arguments
///
/// * `bytes` - It takes a slice of bytes as an argument.
/// * `encrypt` - A boolean to choose whether to encrypt or decrypt the bytes
///
/// # Error
/// Returns either encrypted or decrypted bytes on success otherwise it returns a CacheError
/// on failure.
#[cfg(any(
// feature = "compress-cache-results",
feature = "encrypt-cache-results",
feature = "cec-cache-results"
))]
async fn encrypt_or_decrypt_results(
&mut self,
mut bytes: Vec<u8>,
encrypt: bool,
) -> Result<Vec<u8>, Report<CacheError>> {
use chacha20poly1305::{
aead::{Aead, AeadCore, KeyInit, OsRng},
ChaCha20Poly1305,
};
let cipher = CIPHER.get_or_init(|| {
let key = ChaCha20Poly1305::generate_key(&mut OsRng);
ChaCha20Poly1305::new(&key)
});
let encryption_key = ENCRYPTION_KEY.get_or_init(
|| ChaCha20Poly1305::generate_nonce(&mut OsRng), // 96-bits; unique per message
);
bytes = if encrypt {
cipher
.encrypt(encryption_key, bytes.as_ref())
.map_err(|_| CacheError::EncryptionError)?
} else {
cipher
.decrypt(encryption_key, bytes.as_ref())
.map_err(|_| CacheError::EncryptionError)?
};
Ok(bytes)
/// * `url` - It takes an url as a string.
pub fn cached_results_json(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error>> {
let hashed_url_string = Self::compute_url_hash(url);
Ok(self.connection.get(hashed_url_string)?)
}
/// A helper function that returns compressed results.
/// Feature flags (**compress-cache-results or cec-cache-results**) are required for this to work.
/// A function which caches the results by using the hashed `url` as the key and
/// `json results` as the value and stores it in redis server with ttl(time to live)
/// set to 60 seconds.
///
/// # Arguments
///
/// * `bytes` - It takes a slice of bytes as an argument.
///
/// # Error
/// Returns the compressed bytes on success otherwise it returns a CacheError
/// on failure.
#[cfg(any(feature = "compress-cache-results", feature = "cec-cache-results"))]
async fn compress_results(
/// * `json_results` - It takes the json results string as an argument.
/// * `url` - It takes the url as a String.
pub fn cache_results(
&mut self,
mut bytes: Vec<u8>,
) -> Result<Vec<u8>, Report<CacheError>> {
use tokio::io::AsyncWriteExt;
let mut writer = async_compression::tokio::write::BrotliEncoder::new(Vec::new());
writer
.write_all(&bytes)
.await
.map_err(|_| CacheError::CompressionError)?;
writer
.shutdown()
.await
.map_err(|_| CacheError::CompressionError)?;
bytes = writer.into_inner();
Ok(bytes)
}
json_results: String,
url: &str,
) -> Result<(), Box<dyn std::error::Error>> {
let hashed_url_string = Self::compute_url_hash(url);
/// A helper function that returns compressed-encrypted results.
/// Feature flag (**cec-cache-results**) is required for this to work.
///
/// # Arguments
///
/// * `bytes` - It takes a slice of bytes as an argument.
// put results_json into cache
self.connection.set(&hashed_url_string, json_results)?;
///
/// # Error
/// Returns the compressed and encrypted bytes on success otherwise it returns a CacheError
/// on failure.
#[cfg(feature = "cec-cache-results")]
async fn compress_encrypt_compress_results(
&mut self,
mut bytes: Vec<u8>,
) -> Result<Vec<u8>, Report<CacheError>> {
// compress first
bytes = self.compress_results(bytes).await?;
// encrypt
bytes = self.encrypt_or_decrypt_results(bytes, true).await?;
// compress again;
bytes = self.compress_results(bytes).await?;
Ok(bytes)
}
/// A helper function that returns compressed results.
/// Feature flags (**compress-cache-results or cec-cache-results**) are required for this to work.
/// If bytes where
/// # Arguments
///
/// * `bytes` - It takes a slice of bytes as an argument.
///
/// # Error
/// Returns the uncompressed bytes on success otherwise it returns a CacheError
/// on failure.
#[cfg(any(feature = "compress-cache-results", feature = "cec-cache-results"))]
async fn decompress_results(&mut self, bytes: &[u8]) -> Result<Vec<u8>, Report<CacheError>> {
cfg_if::cfg_if! {
if #[cfg(feature = "compress-cache-results")]
{
decompress_util(bytes).await
}
else if #[cfg(feature = "cec-cache-results")]
{
let decompressed = decompress_util(bytes)?;
let decrypted = self.encrypt_or_decrypt_results(decompressed, false)?;
decompress_util(&decrypted).await
}
}
}
/// A helper function that compresses or encrypts search results before they're inserted into a cache store
/// # Arguments
///
/// * `search_results` - A reference to the search_Results to process.
///
///
/// # Error
/// Returns a Vec of compressed or encrypted bytes on success otherwise it returns a CacheError
/// on failure.
async fn pre_process_search_results(
&mut self,
search_results: &SearchResults,
) -> Result<Vec<u8>, Report<CacheError>> {
#[allow(unused_mut)] // needs to be mutable when any of the features is enabled
let mut bytes: Vec<u8> = search_results.try_into()?;
#[cfg(feature = "compress-cache-results")]
{
let compressed = self.compress_results(bytes).await?;
bytes = compressed;
}
#[cfg(feature = "encrypt-cache-results")]
{
let encrypted = self.encrypt_or_decrypt_results(bytes, true).await?;
bytes = encrypted;
}
#[cfg(feature = "cec-cache-results")]
{
let compressed_encrypted_compressed =
self.compress_encrypt_compress_results(bytes).await?;
bytes = compressed_encrypted_compressed;
}
Ok(bytes)
}
/// A helper function that decompresses or decrypts search results after they're fetched from the cache-store
/// # Arguments
///
/// * `bytes` - A Vec of bytes stores in the cache.
///
///
/// # Error
/// Returns the SearchResults struct on success otherwise it returns a CacheError
/// on failure.
#[allow(unused_mut)] // needs to be mutable when any of the features is enabled
async fn post_process_search_results(
&mut self,
mut bytes: Vec<u8>,
) -> Result<SearchResults, Report<CacheError>> {
#[cfg(feature = "compress-cache-results")]
{
let decompressed = self.decompress_results(&bytes).await?;
bytes = decompressed
}
#[cfg(feature = "encrypt-cache-results")]
{
let decrypted = self.encrypt_or_decrypt_results(bytes, false).await?;
bytes = decrypted
}
#[cfg(feature = "cec-cache-results")]
{
let decompressed_decrypted = self.decompress_results(&bytes).await?;
bytes = decompressed_decrypted;
}
Ok(bytes.try_into()?)
}
}
/// A helper function that returns compressed results.
/// Feature flags (**compress-cache-results or cec-cache-results**) are required for this to work.
/// If bytes where
/// # Arguments
///
/// * `bytes` - It takes a slice of bytes as an argument.
///
/// # Error
/// Returns the uncompressed bytes on success otherwise it returns a CacheError
/// on failure.
#[cfg(any(feature = "compress-cache-results", feature = "cec-cache-results"))]
async fn decompress_util(input: &[u8]) -> Result<Vec<u8>, Report<CacheError>> {
use tokio::io::AsyncWriteExt;
let mut writer = async_compression::tokio::write::BrotliDecoder::new(Vec::new());
writer
.write_all(input)
.await
.map_err(|_| CacheError::CompressionError)?;
writer
.shutdown()
.await
.map_err(|_| CacheError::CompressionError)?;
let bytes = writer.into_inner();
Ok(bytes)
}
#[cfg(feature = "redis-cache")]
#[async_trait::async_trait]
impl Cacher for RedisCache {
async fn build(config: &Config) -> Self {
log::info!(
"Initialising redis cache. Listening to {}",
&config.redis_url
);
RedisCache::new(&config.redis_url, 5, config.cache_expiry_time)
.await
.expect("Redis cache configured")
}
async fn cached_results(&mut self, url: &str) -> Result<SearchResults, Report<CacheError>> {
use base64::Engine;
let hashed_url_string: &str = &self.hash_url(url);
let base64_string = self.cached_json(hashed_url_string).await?;
let bytes = base64::engine::general_purpose::STANDARD_NO_PAD
.decode(base64_string)
.map_err(|_| CacheError::Base64DecodingOrEncodingError)?;
self.post_process_search_results(bytes).await
}
async fn cache_results(
&mut self,
search_results: &[SearchResults],
urls: &[String],
) -> Result<(), Report<CacheError>> {
use base64::Engine;
// size of search_results is expected to be equal to size of urls -> key/value pairs for cache;
let search_results_len = search_results.len();
let mut bytes = Vec::with_capacity(search_results_len);
for result in search_results {
let processed = self.pre_process_search_results(result).await?;
bytes.push(processed);
}
let base64_strings = bytes
.iter()
.map(|bytes_vec| base64::engine::general_purpose::STANDARD_NO_PAD.encode(bytes_vec));
let mut hashed_url_strings = Vec::with_capacity(search_results_len);
for url in urls {
let hash = self.hash_url(url);
hashed_url_strings.push(hash);
}
self.cache_json(base64_strings, hashed_url_strings.into_iter())
.await
}
}
/// TryInto implementation for SearchResults from Vec<u8>
use std::{convert::TryInto, sync::Arc};
impl TryInto<SearchResults> for Vec<u8> {
type Error = CacheError;
fn try_into(self) -> Result<SearchResults, Self::Error> {
bincode::deserialize_from(self.as_slice()).map_err(|_| CacheError::SerializationError)
}
}
impl TryInto<Vec<u8>> for &SearchResults {
type Error = CacheError;
fn try_into(self) -> Result<Vec<u8>, Self::Error> {
bincode::serialize(self).map_err(|_| CacheError::SerializationError)
}
}
/// Memory based cache backend.
#[cfg(feature = "memory-cache")]
pub struct InMemoryCache {
/// The backend cache which stores data.
cache: Arc<MokaCache<String, Vec<u8>>>,
}
#[cfg(feature = "memory-cache")]
impl Clone for InMemoryCache {
fn clone(&self) -> Self {
Self {
cache: self.cache.clone(),
}
}
}
#[cfg(feature = "memory-cache")]
#[async_trait::async_trait]
impl Cacher for InMemoryCache {
async fn build(config: &Config) -> Self {
log::info!("Initialising in-memory cache");
InMemoryCache {
cache: Arc::new(
MokaCache::builder()
.time_to_live(Duration::from_secs(config.cache_expiry_time.into()))
.build(),
),
}
}
async fn cached_results(&mut self, url: &str) -> Result<SearchResults, Report<CacheError>> {
let hashed_url_string = self.hash_url(url);
match self.cache.get(&hashed_url_string).await {
Some(res) => self.post_process_search_results(res).await,
None => Err(Report::new(CacheError::MissingValue)),
}
}
async fn cache_results(
&mut self,
search_results: &[SearchResults],
urls: &[String],
) -> Result<(), Report<CacheError>> {
let mut tasks: Vec<_> = Vec::with_capacity(urls.len());
for (url, search_result) in urls.iter().zip(search_results.iter()) {
let hashed_url_string = self.hash_url(url);
let bytes = self.pre_process_search_results(search_result).await?;
let new_self = self.clone();
tasks.push(tokio::spawn(async move {
new_self.cache.insert(hashed_url_string, bytes).await
}));
}
join_all(tasks).await;
// Set the TTL for the key to 60 seconds
self.connection
.expire::<String, u32>(hashed_url_string, 60)
.unwrap();
Ok(())
}
}
/// Cache backend which utilises both memory and redis based caches.
///
/// The hybrid cache system uses both the types of cache to ensure maximum availability.
/// The set method sets the key, value pair in both the caches. Therefore in a case where redis
/// cache becomes unavailable, the backend will retreive the value from in-memory cache.
#[cfg(all(feature = "memory-cache", feature = "redis-cache"))]
pub struct HybridCache {
/// The in-memory backend cache which stores data.
memory_cache: InMemoryCache,
/// The redis backend cache which stores data.
redis_cache: RedisCache,
}
#[cfg(all(feature = "memory-cache", feature = "redis-cache"))]
#[async_trait::async_trait]
impl Cacher for HybridCache {
async fn build(config: &Config) -> Self {
log::info!("Initialising hybrid cache");
HybridCache {
memory_cache: InMemoryCache::build(config).await,
redis_cache: RedisCache::build(config).await,
}
}
async fn cached_results(&mut self, url: &str) -> Result<SearchResults, Report<CacheError>> {
match self.redis_cache.cached_results(url).await {
Ok(res) => Ok(res),
Err(_) => self.memory_cache.cached_results(url).await,
}
}
async fn cache_results(
&mut self,
search_results: &[SearchResults],
urls: &[String],
) -> Result<(), Report<CacheError>> {
self.redis_cache.cache_results(search_results, urls).await?;
self.memory_cache
.cache_results(search_results, urls)
.await?;
Ok(())
}
}
/// Dummy cache backend
pub struct DisabledCache;
#[async_trait::async_trait]
impl Cacher for DisabledCache {
async fn build(_config: &Config) -> Self {
log::info!("Caching is disabled");
DisabledCache
}
async fn cached_results(&mut self, _url: &str) -> Result<SearchResults, Report<CacheError>> {
Err(Report::new(CacheError::MissingValue))
}
async fn cache_results(
&mut self,
_search_results: &[SearchResults],
_urls: &[String],
) -> Result<(), Report<CacheError>> {
Ok(())
}
}
/// A structure to efficiently share the cache between threads - as it is protected by a Mutex.
pub struct SharedCache {
/// The internal cache protected from concurrent access by a mutex
cache: Mutex<Box<dyn Cacher>>,
}
impl SharedCache {
/// A function that creates a new `SharedCache` from a Cache implementation.
///
/// # Arguments
///
/// * `cache` - It takes the `Cache` enum variant as an argument with the prefered cache type.
///
/// Returns a newly constructed `SharedCache` struct.
pub fn new(cache: impl Cacher + 'static) -> Self {
Self {
cache: Mutex::new(Box::new(cache)),
}
}
/// A getter function which retrieves the cached SearchResulsts from the internal cache.
///
/// # Arguments
///
/// * `url` - It takes the search url as an argument which will be used as the key to fetch the
/// cached results from the cache.
///
/// # Error
///
/// Returns a `SearchResults` struct containing the search results from the cache if nothing
/// goes wrong otherwise returns a `CacheError`.
pub async fn cached_results(&self, url: &str) -> Result<SearchResults, Report<CacheError>> {
let mut mut_cache = self.cache.lock().await;
mut_cache.cached_results(url).await
}
/// A setter function which caches the results by using the `url` as the key and
/// `SearchResults` as the value.
///
/// # Arguments
///
/// * `search_results` - It takes the `SearchResults` as an argument which are results that
/// needs to be cached.
/// * `url` - It takes the search url as an argument which will be used as the key for storing
/// results in the cache.
///
/// # Error
///
/// Returns an unit type if the results are cached succesfully otherwise returns a `CacheError`
/// on a failure.
pub async fn cache_results(
&self,
search_results: &[SearchResults],
urls: &[String],
) -> Result<(), Report<CacheError>> {
let mut mut_cache = self.cache.lock().await;
mut_cache.cache_results(search_results, urls).await
}
}
/// A function to initialise the cache backend.
pub async fn create_cache(config: &Config) -> impl Cacher {
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
return HybridCache::build(config).await;
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
return InMemoryCache::build(config).await;
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
return RedisCache::build(config).await;
#[cfg(not(any(feature = "memory-cache", feature = "redis-cache")))]
return DisabledCache::build(config).await;
}
//#[cfg(feature = "Compress-cache-results")]

View file

@ -1,25 +0,0 @@
use chacha20poly1305::{
consts::{B0, B1},
ChaChaPoly1305,
};
use std::sync::OnceLock;
use chacha20::{
cipher::{
generic_array::GenericArray,
typenum::{UInt, UTerm},
StreamCipherCoreWrapper,
},
ChaChaCore,
};
/// The ChaCha20 core wrapped in a stream cipher for use in ChaCha20-Poly1305 authenticated encryption.
type StreamCipherCoreWrapperType =
StreamCipherCoreWrapper<ChaChaCore<UInt<UInt<UInt<UInt<UTerm, B1>, B0>, B1>, B0>>>;
/// Our ChaCha20-Poly1305 cipher instance, lazily initialized.
pub static CIPHER: OnceLock<ChaChaPoly1305<StreamCipherCoreWrapperType>> = OnceLock::new();
/// The type alias for our encryption key, a 32-byte array.
type GenericArrayType = GenericArray<u8, UInt<UInt<UInt<UInt<UTerm, B1>, B1>, B0>, B0>>;
/// Our encryption key, lazily initialized.
pub static ENCRYPTION_KEY: OnceLock<GenericArrayType> = OnceLock::new();

68
src/cache/error.rs vendored
View file

@ -1,68 +0,0 @@
//! This module provides the error enum to handle different errors associated while requesting data from
//! the redis server using an async connection pool.
use std::fmt;
#[cfg(feature = "redis-cache")]
use redis::RedisError;
/// A custom error type used for handling redis async pool associated errors.
#[derive(Debug)]
pub enum CacheError {
/// This variant handles all errors related to `RedisError`,
#[cfg(feature = "redis-cache")]
RedisError(RedisError),
/// This variant handles the errors which occurs when all the connections
/// in the connection pool return a connection dropped redis error.
PoolExhaustionWithConnectionDropError,
/// Whenever serialization or deserialization fails during communication with the cache.
SerializationError,
/// Returned when the value is missing.
MissingValue,
/// whenever encryption or decryption of cache results fails
EncryptionError,
/// Whenever compression of the cache results fails
CompressionError,
/// Whenever base64 decoding failed
Base64DecodingOrEncodingError,
}
impl fmt::Display for CacheError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
#[cfg(feature = "redis-cache")]
CacheError::RedisError(redis_error) => {
if let Some(detail) = redis_error.detail() {
write!(f, "{}", detail)
} else {
write!(f, "")
}
}
CacheError::PoolExhaustionWithConnectionDropError => {
write!(
f,
"Error all connections from the pool dropped with connection error"
)
}
CacheError::MissingValue => {
write!(f, "The value is missing from the cache")
}
CacheError::SerializationError => {
write!(f, "Unable to serialize, deserialize from the cache")
}
CacheError::EncryptionError => {
write!(f, "Failed to encrypt or decrypt cache-results")
}
CacheError::CompressionError => {
write!(f, "failed to compress or uncompress cache results")
}
CacheError::Base64DecodingOrEncodingError => {
write!(f, "base64 encoding or decoding failed")
}
}
}
}
impl error_stack::Context for CacheError {}

10
src/cache/mod.rs vendored
View file

@ -1,11 +1 @@
//! This module provides the modules which provide the functionality to cache the aggregated
//! results fetched and aggregated from the upstream search engines in a json format.
pub mod cacher;
#[cfg(any(feature = "encrypt-cache-results", feature = "cec-cache-results"))]
/// encryption module contains encryption utils such the cipher and key
pub mod encryption;
pub mod error;
#[cfg(feature = "redis-cache")]
pub mod redis_cacher;

View file

@ -1,191 +0,0 @@
//! This module provides the functionality to cache the aggregated results fetched and aggregated
//! from the upstream search engines in a json format.
use super::error::CacheError;
use error_stack::Report;
use futures::stream::FuturesUnordered;
use redis::{
aio::ConnectionManager, AsyncCommands, Client, ExistenceCheck, RedisError, SetExpiry,
SetOptions,
};
/// A constant holding the redis pipeline size.
const REDIS_PIPELINE_SIZE: usize = 3;
/// A named struct which stores the redis Connection url address to which the client will
/// connect to.
pub struct RedisCache {
/// It stores a pool of connections ready to be used.
connection_pool: Box<[ConnectionManager]>,
/// It stores the size of the connection pool (in other words the number of
/// connections that should be stored in the pool).
pool_size: u8,
/// It stores the index of which connection is being used at the moment.
current_connection: u8,
/// It stores the max TTL for keys.
cache_ttl: u16,
/// It stores the redis pipeline struct of size 3.
pipeline: redis::Pipeline,
}
impl RedisCache {
/// A function which fetches the cached json results as json string.
///
/// # Arguments
///
/// * `redis_connection_url` - It takes the redis Connection url address.
/// * `pool_size` - It takes the size of the connection pool (in other words the number of
/// connections that should be stored in the pool).
/// * `cache_ttl` - It takes the the time to live for cached results to live in the redis
/// server.
///
/// # Error
///
/// Returns a newly constructed `RedisCache` struct on success otherwise returns a standard
/// error type.
pub async fn new(
redis_connection_url: &str,
pool_size: u8,
cache_ttl: u16,
) -> Result<Self, Box<dyn std::error::Error>> {
let client = Client::open(redis_connection_url)?;
let tasks: FuturesUnordered<_> = FuturesUnordered::new();
for _ in 0..pool_size {
let client_partially_cloned = client.clone();
tasks.push(tokio::spawn(async move {
client_partially_cloned.get_connection_manager().await
}));
}
let mut outputs = Vec::with_capacity(tasks.len());
for task in tasks {
outputs.push(task.await??);
}
let redis_cache = RedisCache {
connection_pool: outputs.into_boxed_slice(),
pool_size,
current_connection: Default::default(),
cache_ttl,
pipeline: redis::Pipeline::with_capacity(REDIS_PIPELINE_SIZE),
};
Ok(redis_cache)
}
/// A function which fetches the cached json as json string from the redis server.
///
/// # Arguments
///
/// * `key` - It takes a string as key.
///
/// # Error
///
/// Returns the json as a String from the cache on success otherwise returns a `CacheError`
/// on a failure.
pub async fn cached_json(&mut self, key: &str) -> Result<String, Report<CacheError>> {
self.current_connection = Default::default();
let mut result: Result<String, RedisError> = self.connection_pool
[self.current_connection as usize]
.get(key)
.await;
// Code to check whether the current connection being used is dropped with connection error
// or not. if it drops with the connection error then the current connection is replaced
// with a new connection from the pool which is then used to run the redis command then
// that connection is also checked whether it is dropped or not if it is not then the
// result is passed as a `Result` or else the same process repeats again and if all of the
// connections in the pool result in connection drop error then a custom pool error is
// returned.
loop {
match result {
Err(error) => match error.is_connection_dropped() {
true => {
self.current_connection += 1;
if self.current_connection == self.pool_size {
return Err(Report::new(
CacheError::PoolExhaustionWithConnectionDropError,
));
}
result = self.connection_pool[self.current_connection as usize]
.get(key)
.await;
continue;
}
false => return Err(Report::new(CacheError::RedisError(error))),
},
Ok(res) => return Ok(res),
}
}
}
/// A function which caches the json by using the key and
/// `json results` as the value and stores it in redis server with ttl(time to live)
/// set to 60 seconds.
///
/// # Arguments
///
/// * `json_results` - It takes the json results string as an argument.
/// * `key` - It takes the key as a String.
///
/// # Error
///
/// Returns an unit type if the results are cached succesfully otherwise returns a `CacheError`
/// on a failure.
pub async fn cache_json(
&mut self,
json_results: impl Iterator<Item = String>,
keys: impl Iterator<Item = String>,
) -> Result<(), Report<CacheError>> {
self.current_connection = Default::default();
for (key, json_result) in keys.zip(json_results) {
self.pipeline.set_options(
key,
json_result,
SetOptions::default()
.conditional_set(ExistenceCheck::NX)
.get(true)
.with_expiration(SetExpiry::EX(self.cache_ttl.into())),
);
}
let mut result: Result<(), RedisError> = self
.pipeline
.query_async(&mut self.connection_pool[self.current_connection as usize])
.await;
// Code to check whether the current connection being used is dropped with connection error
// or not. if it drops with the connection error then the current connection is replaced
// with a new connection from the pool which is then used to run the redis command then
// that connection is also checked whether it is dropped or not if it is not then the
// result is passed as a `Result` or else the same process repeats again and if all of the
// connections in the pool result in connection drop error then a custom pool error is
// returned.
loop {
match result {
Err(error) => match error.is_connection_dropped() {
true => {
self.current_connection += 1;
if self.current_connection == self.pool_size {
return Err(Report::new(
CacheError::PoolExhaustionWithConnectionDropError,
));
}
result = self
.pipeline
.query_async(
&mut self.connection_pool[self.current_connection as usize],
)
.await;
continue;
}
false => return Err(Report::new(CacheError::RedisError(error))),
},
Ok(_) => return Ok(()),
}
}
}
}

Some files were not shown because too many files have changed in this diff Show more