Compare commits
63 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
bd461fa820 | ||
![]() |
2870624d44 | ||
![]() |
30c599ab92 | ||
![]() |
c34465bab5 | ||
![]() |
1f6471556e | ||
![]() |
0cd0864fa1 | ||
![]() |
9f7dcfc21e | ||
![]() |
c8e055a718 | ||
![]() |
84564f44a3 | ||
![]() |
a1c9524f74 | ||
![]() |
2257b91b40 | ||
![]() |
ece6bfc2ce | ||
![]() |
d49344f9d8 | ||
![]() |
f944ebed81 | ||
![]() |
07812d2c85 | ||
![]() |
2e900be698 | ||
![]() |
91dc271d74 | ||
![]() |
fa896f6bb9 | ||
![]() |
b77a691b7d | ||
![]() |
91a17ece5c | ||
![]() |
e784e8d239 | ||
![]() |
80e5347178 | ||
![]() |
4a61a4b857 | ||
![]() |
12de82e7b4 | ||
![]() |
192ecea2a4 | ||
![]() |
ed85da51df | ||
![]() |
3433c5c3d5 | ||
![]() |
6b3636013c | ||
![]() |
60c90d7549 | ||
![]() |
5e77821f78 | ||
![]() |
fcab855fda | ||
![]() |
1f2fec198f | ||
![]() |
776298511b | ||
![]() |
de65eec3a9 | ||
![]() |
18e9d5c148 | ||
![]() |
fd243fa5ab | ||
![]() |
36a63e8878 | ||
![]() |
00ce9660ef | ||
![]() |
03df2ac128 | ||
![]() |
efab99fda2 | ||
![]() |
ce53925f36 | ||
![]() |
7dae430759 | ||
![]() |
a6df350843 | ||
![]() |
7f8f1cf65f | ||
![]() |
4be6936026 | ||
![]() |
9b7825bc59 | ||
![]() |
8c176d3840 | ||
![]() |
13e917d97b | ||
![]() |
49ecbb56f7 | ||
![]() |
cf16bf65f7 | ||
![]() |
18ae584836 | ||
![]() |
a389772d96 | ||
![]() |
0f09633899 | ||
![]() |
972424829c | ||
![]() |
25c32a6b95 | ||
![]() |
fb5a88c22c | ||
![]() |
567270e177 | ||
![]() |
5af6e059b7 | ||
![]() |
630df3083f | ||
![]() |
b7e215f9c2 | ||
![]() |
c375b48ebf | ||
![]() |
25f0a3f814 | ||
![]() |
2bd8d7ba01 |
110 changed files with 15631 additions and 6619 deletions
|
@ -10,34 +10,52 @@ CARGO_REGISTRIES_CRATES_IO_PROTOCOL ?= sparse
|
|||
RUSTFLAGS ?= -D warnings -W unreachable-pub -W rust-2021-compatibility -C debuginfo=0
|
||||
RUSTUP_MAX_RETRIES ?= 10
|
||||
RUST_BACKTRACE ?= short
|
||||
NIGHTLY_EXISTS=`((cargo +nightly 2> /dev/null 1> /dev/null) && echo 0)|| echo 1)`
|
||||
GIT=env GIT_CONFIG_GLOBAL="" GIT_CONFIG_SYSTEM="" GIT_CONFIG_NOSYSTEM=1 git
|
||||
|
||||
.PHONY: all
|
||||
all: rustfmt clippy cargo-derivefmt-melib cargo-derivefmt-meli cargo-derivefmt-tools
|
||||
@printf "All completed.\n"
|
||||
all: cargo-msrv rustfmt clippy cargo-derivefmt-melib cargo-derivefmt-meli cargo-derivefmt-tools
|
||||
@printf "All checks completed.\n"
|
||||
|
||||
# Check both melib and meli in the same Make target, because if melib does not
|
||||
# satisfy MSRV then meli won't either, since it depends on melib.
|
||||
.PHONY: cargo-msrv
|
||||
cargo-msrv:
|
||||
@printf "cargo-msrv\n"
|
||||
cargo msrv --output-format json --log-level trace --log-target stdout --path meli verify -- cargo check --all-targets
|
||||
cargo msrv --output-format json --log-level trace --log-target stdout --path melib verify -- cargo check --all-targets
|
||||
|
||||
.PHONY: rustfmt
|
||||
rustfmt:
|
||||
@printf "rustfmt\n"
|
||||
cargo +nightly fmt --check --all || cargo fmt --check --all
|
||||
@((if [ "${NIGHTLY_EXISTS}" -eq 0 ]; then printf "running rustfmt with nightly toolchain\n"; else printf "running rustfmt with active toolchain\n"; fi))
|
||||
@((if [ "${NIGHTLY_EXISTS}" -eq 0 ]; then cargo +nightly fmt --check --all; else cargo fmt --check --all; fi))
|
||||
|
||||
.PHONY: clippy
|
||||
clippy:
|
||||
@printf "clippy\n"
|
||||
cargo clippy --no-deps --all-features --all --tests --examples --benches --bins
|
||||
|
||||
.PHONY: cargo-derivefmt-melib
|
||||
cargo-derivefmt-melib:
|
||||
@printf "cargo-derivefmt-melib\n"
|
||||
@printf "Checking that derives are sorted alphabetically...\n"
|
||||
cargo derivefmt --manifest-path ./melib/Cargo.toml
|
||||
git checkout meli/src/conf/overrides.rs
|
||||
git add --update ./melib/ && git diff --quiet && git diff --cached --quiet
|
||||
@$(GIT) checkout --quiet meli/src/conf/overrides.rs
|
||||
@($(GIT) diff --quiet ./melib && $(GIT) diff --cached --quiet ./melib && printf "All ./melib derives are sorted alphabetically.\n") || (printf "Some derives in the ./melib crate are not sorted alphabetically, see diff:\n"; $(GIT) diff HEAD; exit 1)
|
||||
|
||||
.PHONY: cargo-derivefmt-meli
|
||||
cargo-derivefmt-meli:
|
||||
@printf "cargo-derivefmt-meli\n"
|
||||
@printf "Checking that derives are sorted alphabetically...\n"
|
||||
cargo derivefmt --manifest-path ./meli/Cargo.toml
|
||||
git checkout meli/src/conf/overrides.rs
|
||||
git add --update ./meli/ && git diff --quiet && git diff --cached --quiet
|
||||
@$(GIT) checkout --quiet meli/src/conf/overrides.rs
|
||||
@($(GIT) diff --quiet ./meli && $(GIT) diff --cached --quiet ./meli && printf "All ./meli derives are sorted alphabetically.\n") || (printf "Some derives in the ./meli crate are not sorted alphabetically, see diff:\n"; $(GIT) diff HEAD; exit 1)
|
||||
|
||||
.PHONY: cargo-derivefmt-tools
|
||||
cargo-derivefmt-tools:
|
||||
@printf "cargo-derivefmt-tools\n"
|
||||
@printf "Checking that derives are sorted alphabetically...\n"
|
||||
cargo derivefmt --manifest-path ./tools/Cargo.toml
|
||||
git checkout meli/src/conf/overrides.rs
|
||||
git add --update ./tools/ && git diff --quiet && git diff --cached --quiet
|
||||
@$(GIT) checkout --quiet meli/src/conf/overrides.rs
|
||||
@($(GIT) diff --quiet ./tools && $(GIT) diff --cached --quiet ./tools && printf "All ./tools derives are sorted alphabetically.\n") || (printf "Some derives in the ./tools crate are not sorted alphabetically, see diff:\n"; $(GIT) diff HEAD; exit 1)
|
||||
|
|
|
@ -12,20 +12,16 @@ RUSTUP_MAX_RETRIES ?= 10
|
|||
RUST_BACKTRACE ?= short
|
||||
|
||||
.PHONY: all
|
||||
all: cargo-msrv cargo-sort check-debian-changelog
|
||||
@printf "All completed.\n"
|
||||
all: cargo-sort check-debian-changelog
|
||||
@printf "All checks completed.\n"
|
||||
|
||||
.PHONY: cargo-msrv
|
||||
cargo-msrv:
|
||||
@printf "cargo-msrv\n"
|
||||
cargo-msrv --output-format json --log-level trace --log-target stdout --path meli verify -- cargo check --all-targets
|
||||
cargo-msrv --output-format json --log-level trace --log-target stdout --path melib verify -- cargo check --all-targets
|
||||
.PHONY: cargo-sort
|
||||
cargo-sort:
|
||||
@printf "cargo-sort\n"
|
||||
cargo-sort --check --check-format --grouped --order package,bin,lib,dependencies,features,build-dependencies,dev-dependencies,workspace fuzz
|
||||
cargo-sort --check --check-format --grouped --order package,bin,lib,dependencies,features,build-dependencies,dev-dependencies,workspace tools
|
||||
cargo-sort --check --check-format --grouped --order package,bin,lib,dependencies,features,build-dependencies,dev-dependencies,workspace --workspace
|
||||
|
||||
.PHONY: check-debian-changelog
|
||||
check-debian-changelog:
|
||||
@printf "Check debian/changelog is up-to-date.\n"
|
||||
|
|
109
.gitea/check_dco.sh
Executable file
109
.gitea/check_dco.sh
Executable file
|
@ -0,0 +1,109 @@
|
|||
#!/usr/bin/env sh
|
||||
# SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later
|
||||
|
||||
# Lint with shellcheck -s sh -S style check_dco.sh
|
||||
|
||||
# Notes:
|
||||
# ======
|
||||
#
|
||||
# - We need to make sure git commands do not read from any existing configs to
|
||||
# prevent surprises like default trailers being added.
|
||||
# - we need to pass `--always` to `git-format-patch` to check even empty
|
||||
# commits despite them not being something we would merge. This tripped me up
|
||||
# when debugging this workflow because I tested it with empty commits. My
|
||||
# fault.
|
||||
|
||||
export GIT_CONFIG_GLOBAL=""
|
||||
export GIT_CONFIG_SYSTEM=""
|
||||
export GIT_CONFIG_NOSYSTEM=1
|
||||
|
||||
ensure_env_var() {
|
||||
set | grep -q "^${1}=" || (printf "Environment variable %s missing from process environment, exiting.\n" "${1}"; exit "${2}")
|
||||
}
|
||||
|
||||
ensure_env_var "GITHUB_BASE_REF" 1 || exit $?
|
||||
ensure_env_var "GITHUB_HEAD_REF" 2 || exit $?
|
||||
|
||||
# contains_correct_signoff() {
|
||||
# author=$(git log --author="$1" --pretty="%an <%ae>" -1)
|
||||
# git format-patch --always --stdout "${1}^..${1}" | git interpret-trailers --parse | grep -q "^Signed-off-by: ${author}"
|
||||
# }
|
||||
contains_signoff() {
|
||||
GIT_CONFIG_GLOBAL="" git format-patch --always -1 --stdout "${1}" | git interpret-trailers --parse | grep -q "^Signed-off-by: "
|
||||
}
|
||||
|
||||
get_commit_sha() {
|
||||
if OUT=$(git rev-parse "${1}"); then
|
||||
printf "%s" "${OUT}"
|
||||
return
|
||||
fi
|
||||
printf "Could not git-rev-parse %s, falling back to HEAD...\n" "${1}" 1>&2
|
||||
git rev-parse HEAD
|
||||
}
|
||||
|
||||
echo "Debug workflow info:"
|
||||
echo "Base ref GITHUB_BASE_REF=${GITHUB_BASE_REF}"
|
||||
echo "Head ref GITHUB_HEAD_REF=${GITHUB_HEAD_REF}"
|
||||
BASE_REF=$(get_commit_sha "${GITHUB_BASE_REF}")
|
||||
HEAD_REF=$(get_commit_sha "${GITHUB_HEAD_REF}")
|
||||
echo "Processed base ref BASE_REF=${BASE_REF}"
|
||||
echo "Processed head ref HEAD_REF=${HEAD_REF}"
|
||||
|
||||
RANGE="${BASE_REF}..${HEAD_REF}"
|
||||
echo "Range to examine is RANGE=${RANGE}"
|
||||
|
||||
if ! SHA_LIST=$(git rev-list "${RANGE}"); then
|
||||
printf "Could not get commit range %s with git rev-list, bailing out...\n" "${RANGE}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "SHA list to examine is SHA_LIST="
|
||||
echo "---------------------------------------------------------------------"
|
||||
echo "${SHA_LIST}"
|
||||
echo "---------------------------------------------------------------------"
|
||||
echo ""
|
||||
echo "Starting checks..."
|
||||
|
||||
output=$(printf "%s" "${SHA_LIST}" | while read -r commit_sha; do
|
||||
contains_signoff_result=""
|
||||
|
||||
contains_signoff "${commit_sha}"; contains_signoff_result="$?"
|
||||
if [ "${contains_signoff_result}" -ne 0 ]; then
|
||||
printf "Commit does not contain Signed-off-by git trailer: %s\n\n" "${commit_sha}"
|
||||
echo "patch was:"
|
||||
echo "---------------------------------------------------------------------"
|
||||
GIT_CONFIG_GLOBAL="" git format-patch --always -1 --stdout "${commit_sha}"
|
||||
echo "---------------------------------------------------------------------"
|
||||
echo "trailers were:"
|
||||
echo "---------------------------------------------------------------------"
|
||||
GIT_CONFIG_GLOBAL="" git format-patch --always -1 --stdout "${commit_sha}" | git interpret-trailers --parse
|
||||
echo "---------------------------------------------------------------------"
|
||||
echo "commit was:"
|
||||
echo "---------------------------------------------------------------------"
|
||||
git log --no-decorate --pretty=oneline --abbrev-commit -n 1 "${commit_sha}"
|
||||
echo "---------------------------------------------------------------------"
|
||||
fi
|
||||
done)
|
||||
|
||||
if [ "${output}" = "" ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "One or more of your commits in this Pull Request lack the Developer Certificate of Origin "
|
||||
echo "which is more commonly known as DCO or the \"Signed-off-by: \" trailer line in the "
|
||||
echo "git commit message."
|
||||
echo "For information, documentation, help, check: https://wiki.linuxfoundation.org/dco"
|
||||
|
||||
echo "The reported errors were:"
|
||||
printf "%s\n" "${output}" 1>&2
|
||||
|
||||
echo ""
|
||||
echo "Solution:"
|
||||
echo ""
|
||||
echo "- end all your commits with a 'Signed-off-by: User <user@localhost>' line, "
|
||||
echo " with your own display name and email address."
|
||||
echo "- Make sure the signoff is separated by the commit message body with an empty line."
|
||||
echo "- Make sure the signoff is the last line in your commit message."
|
||||
echo "- Lastly, make sure the signoff matches your git commit author name and email identity."
|
||||
|
||||
exit 1
|
|
@ -28,12 +28,18 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
build: [linux-amd64, ]
|
||||
build: [linux-amd64, linux-arm64]
|
||||
include:
|
||||
- build: linux-amd64
|
||||
arch: amd64
|
||||
os: ubuntu-latest
|
||||
rust: stable
|
||||
target: x86_64-unknown-linux-gnu
|
||||
- build: linux-arm64
|
||||
arch: arm64
|
||||
os: ubuntu-latest-arm64
|
||||
rust: stable
|
||||
target: aarch64-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- id: os-deps
|
||||
|
@ -41,14 +47,21 @@ jobs:
|
|||
run: |
|
||||
apt-get update
|
||||
apt-get install -y libdbus-1-dev pkg-config mandoc libssl-dev make
|
||||
#- id: cache-rustup
|
||||
# name: Cache Rust toolchain
|
||||
# uses: https://github.com/actions/cache@v3
|
||||
# with:
|
||||
# path: ~/.rustup
|
||||
# key: toolchain-${{ matrix.os }}-${{ matrix.rust }}
|
||||
#- if: ${{ steps.cache-rustup.outputs.cache-hit != 'true' }}
|
||||
- name: Cache rustup
|
||||
id: cache-rustup
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.rustup/
|
||||
~/.cargo/env
|
||||
~/.cargo/config.toml
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: build-workflow-${{ matrix.build }}-rustup
|
||||
- id: rustup-setup
|
||||
if: steps.cache-rustup.outputs.cache-hit != 'true'
|
||||
name: Install rustup and toolchains
|
||||
shell: bash
|
||||
run: |
|
||||
|
@ -56,36 +69,43 @@ jobs:
|
|||
curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused --location --silent --show-error --fail "https://sh.rustup.rs" | sh -s -- --default-toolchain none -y
|
||||
source "${HOME}/.cargo/env"
|
||||
echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH
|
||||
echo "CARGO_HOME=${CARGO_HOME:-$HOME/.cargo}" >> $GITHUB_ENV
|
||||
rustup toolchain install --profile minimal ${{ matrix.rust }} --target ${{ matrix.target }}
|
||||
fi
|
||||
- name: Configure cargo data directory
|
||||
# After this point, all cargo registry and crate data is stored in
|
||||
# $GITHUB_WORKSPACE/.cargo_home. This allows us to cache only the files
|
||||
# that are needed during the build process. Additionally, this works
|
||||
# around a bug in the 'cache' action that causes directories outside of
|
||||
# the workspace dir to be saved/restored incorrectly.
|
||||
run: echo "CARGO_HOME=$(pwd)/.cargo_home" >> $GITHUB_ENV
|
||||
#- id: cache-cargo
|
||||
# name: Cache cargo configuration and installations
|
||||
# uses: https://github.com/actions/cache@v3
|
||||
# with:
|
||||
# path: ${{ env.CARGO_HOME }}
|
||||
# key: cargo-${{ matrix.os }}-${{ matrix.rust }}
|
||||
#- if: ${{ steps.cache-cargo.outputs.cache-hit != 'true' }} && matrix.target
|
||||
- name: Source .cargo/env
|
||||
shell: bash
|
||||
run: |
|
||||
source "${HOME}/.cargo/env"
|
||||
echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH
|
||||
echo "CARGO_HOME=${CARGO_HOME:-$HOME/.cargo}" >> $GITHUB_ENV
|
||||
- name: Setup Rust target
|
||||
if: steps.cache-rustup.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
mkdir -p "${{ env.CARGO_HOME }}"
|
||||
cat << EOF > "${{ env.CARGO_HOME }}"/config.toml
|
||||
[build]
|
||||
target = "${{ matrix.target }}"
|
||||
EOF
|
||||
- if: ${{ steps.cache-cargo.outputs.cache-hit != 'true' }} && matrix.target
|
||||
name: Add test dependencies
|
||||
- name: Add test dependencies
|
||||
if: steps.cache-rustup.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
cargo install --quiet --version 0.9.54 --target "${{ matrix.target }}" cargo-nextest
|
||||
- name: Restore build artifacts cache in target dir
|
||||
id: cache-deps
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: target/
|
||||
key: workflow-${{ matrix.build }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
- name: cargo-check
|
||||
run: |
|
||||
make -f ./.gitea/Makefile.build cargo-check
|
||||
- if: steps.cache-deps.outputs.cache-hit != 'true'
|
||||
name: Save build artifacts in target dir
|
||||
id: save-cache-deps
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: target/
|
||||
key: workflow-${{ matrix.build }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
- name: cargo-test-compiles
|
||||
if: success() || failure()
|
||||
run: |
|
||||
|
@ -94,7 +114,7 @@ jobs:
|
|||
run: |
|
||||
make -f ./.gitea/Makefile.build cargo-test
|
||||
- name: rustdoc build
|
||||
if: success() || failure() # always run even if other steps fail, except when cancelled <https://stackoverflow.com/questions/58858429/how-to-run-a-github-actions-step-even-if-the-previous-step-fails-while-still-f>
|
||||
if: success() || failure()
|
||||
run: |
|
||||
make -f ./.gitea/Makefile.build rustdoc-build
|
||||
- name: rustdoc tests
|
||||
|
|
|
@ -22,13 +22,20 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
build: [linux-amd64, ]
|
||||
build: [linux-amd64, linux-arm64]
|
||||
include:
|
||||
- build: linux-amd64
|
||||
arch: amd64
|
||||
os: ubuntu-latest
|
||||
rust: stable
|
||||
artifact_name: 'meli-linux-amd64'
|
||||
target: x86_64-unknown-linux-gnu
|
||||
- build: linux-arm64
|
||||
arch: arm64
|
||||
os: ubuntu-latest-arm64
|
||||
rust: stable
|
||||
artifact_name: 'meli-linux-arm64'
|
||||
target: aarch64-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- id: os-deps
|
||||
|
@ -36,13 +43,6 @@ jobs:
|
|||
run: |
|
||||
apt-get update
|
||||
apt-get install -y libdbus-1-dev pkg-config mandoc libssl-dev
|
||||
#- id: cache-rustup
|
||||
# name: Cache Rust toolchain
|
||||
# uses: https://github.com/actions/cache@v3
|
||||
# with:
|
||||
# path: ~/.rustup
|
||||
# key: toolchain-${{ matrix.os }}-${{ matrix.rust }}
|
||||
#- if: ${{ steps.cache-rustup.outputs.cache-hit != 'true' }}
|
||||
- id: rustup-setup
|
||||
name: Install rustup and toolchains
|
||||
shell: bash
|
||||
|
@ -51,22 +51,9 @@ jobs:
|
|||
curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused --location --silent --show-error --fail "https://sh.rustup.rs" | sh -s -- --default-toolchain none -y
|
||||
source "${HOME}/.cargo/env"
|
||||
echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH
|
||||
echo "CARGO_HOME=${CARGO_HOME:-$HOME/.cargo}" >> $GITHUB_ENV
|
||||
rustup toolchain install --profile minimal ${{ matrix.rust }} --target ${{ matrix.target }}
|
||||
fi
|
||||
- name: Configure cargo data directory
|
||||
# After this point, all cargo registry and crate data is stored in
|
||||
# $GITHUB_WORKSPACE/.cargo_home. This allows us to cache only the files
|
||||
# that are needed during the build process. Additionally, this works
|
||||
# around a bug in the 'cache' action that causes directories outside of
|
||||
# the workspace dir to be saved/restored incorrectly.
|
||||
run: echo "CARGO_HOME=$(pwd)/.cargo_home" >> $GITHUB_ENV
|
||||
#- id: cache-cargo
|
||||
# name: Cache cargo configuration and installations
|
||||
# uses: https://github.com/actions/cache@v3
|
||||
# with:
|
||||
# path: ${{ env.CARGO_HOME }}
|
||||
# key: cargo-${{ matrix.os }}-${{ matrix.rust }}
|
||||
#- if: ${{ steps.cache-cargo.outputs.cache-hit != 'true' }} && matrix.target
|
||||
- name: Setup Rust target
|
||||
run: |
|
||||
mkdir -p "${{ env.CARGO_HOME }}"
|
||||
|
@ -76,15 +63,17 @@ jobs:
|
|||
EOF
|
||||
- name: Build binary
|
||||
run: |
|
||||
VERSION=$(grep -m1 version meli/Cargo.toml | head -n1 | cut -d'"' -f 2 | head -n1)
|
||||
echo "VERSION=${VERSION}" >> $GITHUB_ENV
|
||||
make
|
||||
mkdir artifacts
|
||||
mv target/*/release/* target/ || true
|
||||
mv target/release/* target/ || true
|
||||
mv target/meli artifacts/
|
||||
mv target/meli artifacts/meli-${VERSION}-${{ matrix.target }}
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.artifact_name }}
|
||||
path: artifacts/meli
|
||||
name: ${{ matrix.artifact_name }}-${{ env.VERSION }}
|
||||
path: artifacts/meli-${{ env.VERSION }}-${{ matrix.target }}
|
||||
if-no-files-found: error
|
||||
retention-days: 30
|
||||
|
|
|
@ -16,13 +16,13 @@ on:
|
|||
- v*
|
||||
|
||||
jobs:
|
||||
build:
|
||||
build-debian:
|
||||
name: Create debian package
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
build: [linux-amd64, ]
|
||||
build: [linux-amd64, linux-arm64]
|
||||
include:
|
||||
- build: linux-amd64
|
||||
arch: amd64
|
||||
|
@ -30,6 +30,12 @@ jobs:
|
|||
rust: stable
|
||||
artifact_name: 'linux-amd64'
|
||||
target: x86_64-unknown-linux-gnu
|
||||
- build: linux-arm64
|
||||
arch: arm64
|
||||
os: ubuntu-latest-arm64
|
||||
rust: stable
|
||||
artifact_name: 'linux-arm64'
|
||||
target: aarch64-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- id: os-deps
|
||||
|
|
26
.gitea/workflows/check_dco.yaml
Normal file
26
.gitea/workflows/check_dco.yaml
Normal file
|
@ -0,0 +1,26 @@
|
|||
# SPDX-License-Identifier: EUPL-1.2
|
||||
name: Verify DCO
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Verify DCO signoff on commit messages
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
build: [linux-amd64, ]
|
||||
include:
|
||||
- build: linux-amd64
|
||||
os: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- id: check-dco
|
||||
shell: sh
|
||||
name: Check that commit messages end with a Signed-off-by git trailer
|
||||
run: |
|
||||
env GITHUB_BASE_REF="origin/${{env.GITHUB_BASE_REF}}" GITHUB_HEAD_REF="origin/${{env.GITHUB_HEAD_REF}}" sh ./.gitea/check_dco.sh
|
|
@ -22,7 +22,7 @@ on:
|
|||
- 'Cargo.lock'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
lints:
|
||||
name: Run lints
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
|
@ -41,14 +41,25 @@ jobs:
|
|||
run: |
|
||||
apt-get update
|
||||
apt-get install -y libdbus-1-dev pkg-config mandoc libssl-dev
|
||||
#- id: cache-rustup
|
||||
# name: Cache Rust toolchain
|
||||
# uses: https://github.com/actions/cache@v3
|
||||
# with:
|
||||
# path: ~/.rustup
|
||||
# key: toolchain-${{ matrix.os }}-${{ matrix.rust }}
|
||||
#- if: ${{ steps.cache-rustup.outputs.cache-hit != 'true' }}
|
||||
- name: Find meli MSRV from meli/Cargo.toml.
|
||||
run: |
|
||||
echo MELI_MSRV=$(grep -m1 rust-version meli/Cargo.toml | head -n1 | cut -d'"' -f 2 | head -n1) >> $GITHUB_ENV
|
||||
printf "Rust MSRV is %s\n" $(grep -m1 rust-version meli/Cargo.toml | head -n1 | cut -d'"' -f 2 | head -n1)
|
||||
- name: Cache rustup
|
||||
id: cache-rustup
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.rustup/
|
||||
~/.cargo/env
|
||||
~/.cargo/config.toml
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: lints-workflow-${{ matrix.build }}-rustup
|
||||
- id: rustup-setup
|
||||
if: steps.cache-rustup.outputs.cache-hit != 'true'
|
||||
name: Install Rustup and toolchains
|
||||
shell: bash
|
||||
run: |
|
||||
|
@ -56,53 +67,74 @@ jobs:
|
|||
curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused --location --silent --show-error --fail "https://sh.rustup.rs" | sh -s -- --default-toolchain none -y
|
||||
source "${HOME}/.cargo/env"
|
||||
echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH
|
||||
rustup toolchain install --profile minimal --component clippy,rustfmt --target ${{ matrix.target }} -- "${{ matrix.rust }}"
|
||||
echo "CARGO_HOME=${CARGO_HOME:-$HOME/.cargo}" >> $GITHUB_ENV
|
||||
rustup toolchain install --profile minimal --component "rustfmt" --target "${{ matrix.target }}" -- "${{ env.MELI_MSRV }}"
|
||||
rustup component add rustfmt --toolchain ${{ env.MELI_MSRV }}-${{ matrix.target }}
|
||||
rustup toolchain install --profile minimal --component clippy,rustfmt --target "${{ matrix.target }}" -- "${{ matrix.rust }}"
|
||||
rustup component add rustfmt --toolchain ${{ matrix.rust }}-${{ matrix.target }}
|
||||
rustup default ${{ matrix.rust }}
|
||||
fi
|
||||
- name: Configure cargo data directory
|
||||
# After this point, all cargo registry and crate data is stored in
|
||||
# $GITHUB_WORKSPACE/.cargo_home. This allows us to cache only the files
|
||||
# that are needed during the build process. Additionally, this works
|
||||
# around a bug in the 'cache' action that causes directories outside of
|
||||
# the workspace dir to be saved/restored incorrectly.
|
||||
run: echo "CARGO_HOME=$(pwd)/.cargo_home" >> $GITHUB_ENV
|
||||
#- id: cache-cargo
|
||||
# name: Cache cargo configuration and installations
|
||||
# uses: https://github.com/actions/cache@v3
|
||||
# with:
|
||||
# path: ${{ env.CARGO_HOME }}
|
||||
# key: cargo-${{ matrix.os }}-${{ matrix.rust }}
|
||||
#- if: ${{ steps.cache-cargo.outputs.cache-hit != 'true' }} && matrix.target
|
||||
- name: Source .cargo/env
|
||||
shell: bash
|
||||
run: |
|
||||
source "${HOME}/.cargo/env"
|
||||
echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH
|
||||
echo "CARGO_HOME=${CARGO_HOME:-$HOME/.cargo}" >> $GITHUB_ENV
|
||||
- name: Setup Rust target
|
||||
if: steps.cache-rustup.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
mkdir -p "${{ env.CARGO_HOME }}"
|
||||
cat << EOF > "${{ env.CARGO_HOME }}"/config.toml
|
||||
[build]
|
||||
target = "${{ matrix.target }}"
|
||||
EOF
|
||||
- if: ${{ steps.cache-cargo.outputs.cache-hit != 'true' }} && matrix.target
|
||||
name: Add lint dependencies
|
||||
- name: Add lint dependencies
|
||||
if: steps.cache-rustup.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
cargo install --quiet --version 1.0.9 --target "${{ matrix.target }}" cargo-sort
|
||||
cargo install --version 0.15.1 --target "${{ matrix.target }}" cargo-msrv
|
||||
# "This package is currently implemented using rust-analyzer internals, so cannot be published on crates.io."
|
||||
RUSTFLAGS="" cargo install --locked --target "${{ matrix.target }}" --git https://github.com/dcchut/cargo-derivefmt --rev 95da8eee343de4adb25850893873b979258aed7f --bin cargo-derivefmt
|
||||
- name: rustfmt
|
||||
if: success() || failure()
|
||||
run: |
|
||||
make -f .gitea/Makefile.lint rustfmt
|
||||
- name: Restore build artifacts cache in target dir
|
||||
id: cache-deps
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: target/
|
||||
key: workflow-${{ matrix.build }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
- name: clippy
|
||||
if: success() || failure()
|
||||
run: |
|
||||
source "${HOME}/.cargo/env"
|
||||
make -f .gitea/Makefile.lint clippy
|
||||
- if: steps.cache-deps.outputs.cache-hit != 'true'
|
||||
name: Save build artifacts in target dir
|
||||
id: save-cache-deps
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: target/
|
||||
key: workflow-${{ matrix.build }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
- name: cargo-msrv verify melib MSRV
|
||||
if: success() || failure()
|
||||
run: |
|
||||
source "${HOME}/.cargo/env"
|
||||
make -f ./.gitea/Makefile.lint cargo-msrv
|
||||
- name: rustfmt
|
||||
if: success() || failure()
|
||||
run: |
|
||||
source "${HOME}/.cargo/env"
|
||||
make -f .gitea/Makefile.lint rustfmt
|
||||
- name: cargo-derivefmt melib
|
||||
if: success() || failure()
|
||||
run: |
|
||||
source "${HOME}/.cargo/env"
|
||||
make -f .gitea/Makefile.lint cargo-derivefmt-melib
|
||||
- name: cargo-derivefmt meli
|
||||
if: success() || failure()
|
||||
run: |
|
||||
source "${HOME}/.cargo/env"
|
||||
make -f .gitea/Makefile.lint cargo-derivefmt-meli
|
||||
- name: cargo-derivefmt tools
|
||||
if: success() || failure()
|
||||
run: |
|
||||
source "${HOME}/.cargo/env"
|
||||
make -f .gitea/Makefile.lint cargo-derivefmt-tools
|
||||
|
|
|
@ -42,8 +42,19 @@ jobs:
|
|||
run: |
|
||||
apt-get update
|
||||
apt-get install -y mandoc
|
||||
- name: Find meli MSRV from meli/Cargo.toml.
|
||||
run: echo MELI_MSRV=$(grep -m1 rust-version meli/Cargo.toml | head -n1 | cut -d'"' -f 2 | head -n1) >> $GITHUB_ENV
|
||||
- name: Cache rustup
|
||||
id: cache-rustup
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.rustup/
|
||||
~/.cargo/env
|
||||
~/.cargo/config.toml
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: manifest_lints-workflow-${{ matrix.build }}-rustup
|
||||
- id: rustup-setup
|
||||
name: Install Rustup and toolchains
|
||||
shell: bash
|
||||
|
@ -53,30 +64,29 @@ jobs:
|
|||
source "${HOME}/.cargo/env"
|
||||
echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH
|
||||
echo "CARGO_HOME=${CARGO_HOME:-$HOME/.cargo}" >> $GITHUB_ENV
|
||||
rustup toolchain install --profile minimal --component "rustfmt" --target "${{ matrix.target }}" -- "${{ env.MELI_MSRV }}"
|
||||
rustup component add rustfmt --toolchain ${{ env.MELI_MSRV }}-${{ matrix.target }}
|
||||
rustup toolchain install --profile minimal --component "rustfmt" --target "${{ matrix.target }}" -- "${{ matrix.rust }}"
|
||||
rustup component add rustfmt --toolchain ${{ matrix.rust }}-${{ matrix.target }}
|
||||
rustup default ${{ matrix.rust }}
|
||||
fi
|
||||
- name: Source .cargo/env
|
||||
shell: bash
|
||||
run: |
|
||||
source "${HOME}/.cargo/env"
|
||||
echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH
|
||||
echo "CARGO_HOME=${CARGO_HOME:-$HOME/.cargo}" >> $GITHUB_ENV
|
||||
- name: Setup Rust target
|
||||
if: steps.cache-rustup.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
mkdir -p "${{ env.CARGO_HOME }}"
|
||||
cat << EOF > "${{ env.CARGO_HOME }}"/config.toml
|
||||
[build]
|
||||
target = "${{ matrix.target }}"
|
||||
EOF
|
||||
- if: ${{ steps.cache-cargo.outputs.cache-hit != 'true' }} && matrix.target
|
||||
name: Add manifest lint dependencies
|
||||
- name: Add manifest lint dependencies
|
||||
if: steps.cache-rustup.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
source "${HOME}/.cargo/env"
|
||||
cargo install --quiet --version 1.0.9 --target "${{ matrix.target }}" cargo-sort
|
||||
cargo install --quiet --version 0.15.1 --target "${{ matrix.target }}" cargo-msrv
|
||||
- name: cargo-msrv verify melib MSRV
|
||||
if: success() || failure()
|
||||
run: |
|
||||
source "${HOME}/.cargo/env"
|
||||
make -f ./.gitea/Makefile.manifest-lint cargo-msrv
|
||||
- name: cargo-sort
|
||||
if: success() || failure()
|
||||
run: |
|
||||
|
|
11
BUILD.md
11
BUILD.md
|
@ -20,7 +20,13 @@ You can build and run `meli` with one command: `cargo run --release`.
|
|||
|
||||
## Build features
|
||||
|
||||
Some functionality is held behind "feature gates", or compile-time flags. The following list explains each feature's purpose:
|
||||
Some functionality is held behind "feature gates", or compile-time flags.
|
||||
|
||||
Cargo features for `meli` are documented in its [`README.md`](./meli/README.md) file.
|
||||
|
||||
Cargo features for `melib` are documented in its [`README.md`](./melib/README.md) file.
|
||||
|
||||
The following list explains each feature's purpose:
|
||||
|
||||
- `gpgme` enables GPG support via `libgpgme` (on by default)
|
||||
- `dbus-notifications` enables showing notifications using `dbus` (on by default)
|
||||
|
@ -30,9 +36,6 @@ Some functionality is held behind "feature gates", or compile-time flags. The fo
|
|||
- `cli-docs` includes the manpage documentation compiled by either `mandoc` or `man` binary to plain text in `meli`'s command line. Embedded documentation can be viewed with the subcommand `meli man [PAGE]` (on by default).
|
||||
- `static` and `*-static` bundle C libraries in dependencies so that you don't need them installed in your system (on by default).
|
||||
|
||||
Though not a feature, the presence of the environment variable `UNICODE_REGENERATE_TABLES` in compile-time of the `melib` crate will force the regeneration of unicode tables.
|
||||
Otherwise the tables are included with the source code, and there's no real reason to regenerate them unless you intend to modify the code or update to a new Unicode version.
|
||||
|
||||
## Build Debian package (*deb*)
|
||||
|
||||
Building with Debian's packaged cargo might require the installation of these two packages: `librust-openssl-sys-dev librust-libdbus-sys-dev`
|
||||
|
|
111
CHANGELOG.md
111
CHANGELOG.md
|
@ -21,6 +21,115 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
|
||||
<!-- ### Miscellaneous Tasks -->
|
||||
|
||||
## [v0.8.10] - 2024-12-06
|
||||
|
||||
Highlights:
|
||||
|
||||
- added `pipe-attachment` command
|
||||
- added sample scripts for using `meli` as a `mailto` scheme handler in `contrib/`
|
||||
- fixed GPG encryption with libgpgme
|
||||
|
||||
Contributors in alphabetical order:
|
||||
|
||||
- Manos Pitsidianakis
|
||||
- Matthias Geiger
|
||||
|
||||
### Added
|
||||
|
||||
- [**`5e77821f`**](https://git.meli-email.org/meli/meli/commit/5e77821f781b9f80f62df0a74f33f899a7cd8d92) `mail/view: add pipe-attachment command` in PR [`#540` "mail/view: add pipe-attachment command"](https://git.meli-email.org/meli/meli/pulls/540)
|
||||
- [**`fa896f6b`**](https://git.meli-email.org/meli/meli/commit/fa896f6bb9dccd83952e2db57f78abdf0b514ffe) `contrib: add mailto: scheme handler scripts`
|
||||
- [**`00ce9660`**](https://git.meli-email.org/meli/meli/commit/00ce9660ef783289fb35d0c7b23bd164f8f6efda) `melib/backends: add as_any/as_any_mut methods to BackendMailbox`
|
||||
- [**`fd243fa5`**](https://git.meli-email.org/meli/meli/commit/fd243fa5abfbee704f480caef0831269a65f2762) `maildir: add mailbox creation tests`
|
||||
- [**`de65eec3`**](https://git.meli-email.org/meli/meli/commit/de65eec3a9b42e4a82542dc4744f8222bbd5e23b) `meli/accounts: add mailbox_by_path() tests` in PR [`#535` "Rework maildir mailbox path logic, add tests"](https://git.meli-email.org/meli/meli/pulls/535)
|
||||
- [**`6b363601`**](https://git.meli-email.org/meli/meli/commit/6b3636013c1e827491f776b0086d28c072cbd518) `melib/gpgme: impl Display for gpgme::Key`
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- [**`60c90d75`**](https://git.meli-email.org/meli/meli/commit/60c90d75497be47c3c7d5e6b06fcfc49577f9eaf) `melib/attachments: ensure MIME boundary prefixed with CRLF`
|
||||
- [**`3433c5c3`**](https://git.meli-email.org/meli/meli/commit/3433c5c3d5924bfd4d980abce6e9dc0fbac8df87) `compose/pgp: rewrite key selection logic` in PR [`#541` "More gpgme/PGP fixes again"](https://git.meli-email.org/meli/meli/pulls/541)
|
||||
- [**`12de82e7`**](https://git.meli-email.org/meli/meli/commit/12de82e7b40b24ebdb53a67e044b3cb9b2150134) `melib/conf: fix mutt_alias_file not being validated` in PR [`#550` "Remove sealed_test dependency"](https://git.meli-email.org/meli/meli/pulls/550)
|
||||
- [**`c8e055a7`**](https://git.meli-email.org/meli/meli/commit/c8e055a718703004505ed690f7a3e36fa18d8ada) `Fix version migrations being triggered backwards` in PR [`#557` "Fix version migrations being triggered backwards"](https://git.meli-email.org/meli/meli/pulls/557)
|
||||
- [**`efab99fd`**](https://git.meli-email.org/meli/meli/commit/efab99fda24e8b6b22804c3029d757579a3008a6) `terminal: check for NO_COLOR env var without unicode validation`
|
||||
- [**`36a63e88`**](https://git.meli-email.org/meli/meli/commit/36a63e8878d9bfdba7994fb193294faa1462fafc) `melib/maildir: rewrite create_mailbox()`
|
||||
- [**`fcab855f`**](https://git.meli-email.org/meli/meli/commit/fcab855fdad4af20f52f9f318551c1377fe5fbb6) `view: ensure envelope headers are always populated` in PR [`#538` "view: ensure envelope headers are always populated"](https://git.meli-email.org/meli/meli/pulls/538)
|
||||
- [**`84564f44`**](https://git.meli-email.org/meli/meli/commit/84564f44a3c5823e11f1d6097c772667f1c62df2) `mailcap: don't drop File before opening it` in PR [`#552` "mailcap: don't drop File before opening it"](https://git.meli-email.org/meli/meli/pulls/552)
|
||||
|
||||
### Changes
|
||||
|
||||
- [**`ed85da51`**](https://git.meli-email.org/meli/meli/commit/ed85da51dff4a1de5622bae234381bbbf6603271) `Remove sealed_test dependency`
|
||||
|
||||
### Refactoring
|
||||
|
||||
- [**`03df2ac1`**](https://git.meli-email.org/meli/meli/commit/03df2ac12894ef11dfb51af1a634ef87a36daa9b) `meli/utilities: add print utilities for tests`
|
||||
- [**`18e9d5c1`**](https://git.meli-email.org/meli/meli/commit/18e9d5c148f2126647a8e17713af37d456cce9f8) `conf.rs: impl From<melib::AccountSettings> for AccountConf`
|
||||
- [**`1f2fec19`**](https://git.meli-email.org/meli/meli/commit/1f2fec198fb5127fcfc7bd3e1004b496b3be7de8) `Fix 1.83.0 lints` in PR [`#536` "CI: Add action to check for DCO signoffs in PRs"](https://git.meli-email.org/meli/meli/pulls/536)
|
||||
- [**`192ecea2`**](https://git.meli-email.org/meli/meli/commit/192ecea2a43363fe2ffc0decefe22a703d8c53ca) `compose/gpg.rs: Fix msrv regression`
|
||||
|
||||
### Documentation
|
||||
|
||||
- [**`4a61a4b8`**](https://git.meli-email.org/meli/meli/commit/4a61a4b8577c1e33cdcd46cb74a18ddafcf037fb) `melib: include README.md as preamble of crate rustdocs`
|
||||
- [**`80e53471`**](https://git.meli-email.org/meli/meli/commit/80e53471786a6986b71d7c8922472cfa5bf5f571) `BUILD.md: move melib specific stuff to melib/README.md`
|
||||
- [**`91a17ece`**](https://git.meli-email.org/meli/meli/commit/91a17ece5c7f9651e57929487021fa2ed553d2c6) `melib/README.md: mention sqlite3-static feature`
|
||||
- [**`b77a691b`**](https://git.meli-email.org/meli/meli/commit/b77a691b7d6f6373620847f492791fe0c694fa2a) `meli/README.md: Add cargo features section` in PR [`#549` "Document cargo features in READMEs"](https://git.meli-email.org/meli/meli/pulls/549)
|
||||
- [**`91dc271d`**](https://git.meli-email.org/meli/meli/commit/91dc271d74e946790aa8a0818cc8c0db9f8fc0bb) `contrib: add a README.md file`
|
||||
- [**`2e900be6`**](https://git.meli-email.org/meli/meli/commit/2e900be69898c9d734e24ed8e49635d2b1c7a97e) `contrib/README.md: add section about oauth2.py`
|
||||
- [**`07812d2c`**](https://git.meli-email.org/meli/meli/commit/07812d2c8581b2292c3755ce5e76d0a521376f08) `contrib/README.md: elaborate a bit about mailto` in PR [`#545` "Add external mailto: handler support via scripts in contrib"](https://git.meli-email.org/meli/meli/pulls/545)
|
||||
- [**`e784e8d2`**](https://git.meli-email.org/meli/meli/commit/e784e8d239f948c279200d1b28b2fd0326dfa96f) `scripts: add markdown_doc_lints.py`
|
||||
|
||||
### Miscellaneous Tasks
|
||||
|
||||
|
||||
### Continuous Integration
|
||||
|
||||
- [**`77629851`**](https://git.meli-email.org/meli/meli/commit/776298511bd9c9a868ceecf6eaee93f9df4821fa) `CI: Add action to check for DCO signoffs in PRs`
|
||||
- [**`f944ebed`**](https://git.meli-email.org/meli/meli/commit/f944ebed813aaa36c11506a783f951f031e50c25) `CI: Add error msg when cargo-derivefmt check fails`
|
||||
- [**`d49344f9`**](https://git.meli-email.org/meli/meli/commit/d49344f9d855fb2d0fa1d74ee7bc74ca1ad974fe) `CI: Move MSRV checks from manifest to lints` in PR [`#553` "ci-workflow-fixes"](https://git.meli-email.org/meli/meli/pulls/553)
|
||||
- [**`ece6bfc2`**](https://git.meli-email.org/meli/meli/commit/ece6bfc2ce4daf8a6d7de70bed4a1e7dfbec3ff6) `CI: non-zero exit if cargo-derivefmt-* targets fail`
|
||||
- [**`2257b91b`**](https://git.meli-email.org/meli/meli/commit/2257b91b403003a91a9658e139b784d93d7ffe70) `CI: add actions/cache steps` in PR [`#554` "CI: add actions/cache steps"](https://git.meli-email.org/meli/meli/pulls/554)
|
||||
- [**`a1c9524f`**](https://git.meli-email.org/meli/meli/commit/a1c9524f7405321f5e8d5ab2490719a327a0789b) `CI: fix check_dco.sh not working with other repos` in PR [`#555` "CI: fix check_dco.sh not working with other repos"](https://git.meli-email.org/meli/meli/pulls/555)
|
||||
|
||||
## [v0.8.9](https://git.meli-email.org/meli/meli/releases/tag/v0.8.9) - 2024-11-27
|
||||
|
||||
This is mostly a fixups release.
|
||||
|
||||
### Added
|
||||
|
||||
- [**`cf16bf65`**](https://git.meli-email.org/meli/meli/commit/cf16bf65f7d031084c73f070ee40efbfd40720e6) `meli/sqlite3: add tests for reindexing`
|
||||
- [**`a389772d`**](https://git.meli-email.org/meli/meli/commit/a389772d96d845a1a009e54f1157460d640c1104) `accounts: suggest tips on mailbox_by_path error`
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- [**`25f0a3f8`**](https://git.meli-email.org/meli/meli/commit/25f0a3f814ff40b8e218fca7ba099a84399a2a1b) `conf/terminal: fix serde of ProgressSpinnerSequence`
|
||||
- [**`c375b48e`**](https://git.meli-email.org/meli/meli/commit/c375b48ebf25065c495e0740f85a74db9dd6facd) `terminal: fix Synchronized Output response parsed as input` in PR [`#523` "terminal: fix Synchronized Output response parsed as input"](https://git.meli-email.org/meli/meli/pulls/523)
|
||||
- [**`b7e215f9`**](https://git.meli-email.org/meli/meli/commit/b7e215f9c238f8364e2a1f0d10ac668d0cfe91ad) `melib/utils: fix test_fd_locks() on platforms without OFD support` in PR [`#524` "melib/utils: fix test_fd_locks() on platforms without OFD support"](https://git.meli-email.org/meli/meli/pulls/524)
|
||||
- [**`25c32a6b`**](https://git.meli-email.org/meli/meli/commit/25c32a6b95dce00f6715115796e27bff0fcee413) `meli/docs/meli.conf.examples.5: fix .Dt macro arguments`
|
||||
- [**`18ae5848`**](https://git.meli-email.org/meli/meli/commit/18ae58483694119985d9ce7b8f384798114a8d1e) `meli: fix reindex of previously indexed account with sqlite3 backend`
|
||||
- [**`13e917d9`**](https://git.meli-email.org/meli/meli/commit/13e917d97b2c8ff8da403dc415eb1dffa8491a9b) `Fix some compilation errors with cfg feature attrs` in PR [`#531` "accounts: suggest tips on mailbox_by_path error"](https://git.meli-email.org/meli/meli/pulls/531)
|
||||
- [**`8c176d38`**](https://git.meli-email.org/meli/meli/commit/8c176d38408a822d9b127f282f9c43fb1bada8d7) `contacts/editor: fix crash on saving contact` in PR [`#532` "contacts/editor: fix crash on saving contact"](https://git.meli-email.org/meli/meli/pulls/532)
|
||||
- [**`fb5a88c2`**](https://git.meli-email.org/meli/meli/commit/fb5a88c22c7c7107f1a124d721d493be324ea25e) `melib/collection: ensure mailbox exists when inserting new envelopes` in PR [`#529` "Small account stuff fixes"](https://git.meli-email.org/meli/meli/pulls/529)
|
||||
|
||||
### Changes
|
||||
|
||||
- [**`7f8f1cf6`**](https://git.meli-email.org/meli/meli/commit/7f8f1cf65f644090ea450ecf9423585cc89b4a65) `melib/gpgme bindings renewal` in PR [`#533` "melib/gpgme bindings renewal"](https://git.meli-email.org/meli/meli/pulls/533)
|
||||
- [**`9b7825bc`**](https://git.meli-email.org/meli/meli/commit/9b7825bc59fb9c86dda4f86c1116517ae3e88514) `Update futures-util dep, remove stderrlog dep`
|
||||
- [**`4be69360`**](https://git.meli-email.org/meli/meli/commit/4be6936026bdf87563b3e6832d01fd9b112a414e) `Remove obsolete "encoding" dependency` in PR [`#530` "Remove/update obsolete dependencies"](https://git.meli-email.org/meli/meli/pulls/530)
|
||||
|
||||
### Refactoring
|
||||
|
||||
- [**`5af6e059`**](https://git.meli-email.org/meli/meli/commit/5af6e059b78ca67594ce773d935169c26ce31a70) `meli/accounts: use Arc<str> for account name`
|
||||
- [**`567270e1`**](https://git.meli-email.org/meli/meli/commit/567270e177253cfbf8cee2df9e9a8f981ca9ab97) `melib: use Vec instead of SmallVec for search results`
|
||||
- [**`2bd8d7ba`**](https://git.meli-email.org/meli/meli/commit/2bd8d7ba01df4eaf01488c2f01fd95905916c0b9) `conf/tests.rs: Rename test functions to follow path convention`
|
||||
|
||||
### Documentation
|
||||
|
||||
- [**`97242482`**](https://git.meli-email.org/meli/meli/commit/972424829c29d9cfb6d45d589e17fb30a9ff52c6) `meli/docs: add meli.conf.examples to CLI and tests`
|
||||
- [**`0f096338`**](https://git.meli-email.org/meli/meli/commit/0f0963389913736b8a8a73b3928abeb1d59a5898) `README.md: Update ways to install, add gitlab mirror link` in PR [`#528` "Integrate `meli.conf.examples.5` into CLI and build, also update README with installation instructions"](https://git.meli-email.org/meli/meli/pulls/528)
|
||||
|
||||
### Continuous Integration
|
||||
|
||||
- [**`630df308`**](https://git.meli-email.org/meli/meli/commit/630df3083f794a6551b1006ac57b9ce20b92a329) `CI: Add arm64 runners in job matrices` in PR [`#527` "CI: Add arm64 runners in job matrices"](https://git.meli-email.org/meli/meli/pulls/527)
|
||||
- [**`49ecbb56`**](https://git.meli-email.org/meli/meli/commit/49ecbb56f7a5a6c5d9b9659215348132e1c71ac4) `CI: .gitea/Makefile.lint: check if nightly exists`
|
||||
|
||||
## [v0.8.8](https://git.meli-email.org/meli/meli/releases/tag/v0.8.8) - 2024-11-19
|
||||
|
||||
*WARNING*: This release contains a breaking change in the configuration file: a
|
||||
|
@ -1291,3 +1400,5 @@ Notable changes:
|
|||
[v0.8.6]: https://git.meli-email.org/meli/meli/releases/tag/v0.8.6
|
||||
[v0.8.7]: https://git.meli-email.org/meli/meli/releases/tag/v0.8.7
|
||||
[v0.8.8]: https://git.meli-email.org/meli/meli/releases/tag/v0.8.8
|
||||
[v0.8.9]: https://git.meli-email.org/meli/meli/releases/tag/v0.8.9
|
||||
[v0.8.10]: https://git.meli-email.org/meli/meli/releases/tag/v0.8.10
|
||||
|
|
193
Cargo.lock
generated
193
Cargo.lock
generated
|
@ -228,17 +228,6 @@ version = "1.1.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
|
||||
|
||||
[[package]]
|
||||
name = "atty"
|
||||
version = "0.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
|
||||
dependencies = [
|
||||
"hermit-abi 0.1.19",
|
||||
"libc",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.3.0"
|
||||
|
@ -360,9 +349,7 @@ checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401"
|
|||
dependencies = [
|
||||
"android-tzdata",
|
||||
"iana-time-zone",
|
||||
"js-sys",
|
||||
"num-traits",
|
||||
"wasm-bindgen",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
|
@ -578,70 +565,6 @@ version = "0.3.3"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
|
||||
|
||||
[[package]]
|
||||
name = "encoding"
|
||||
version = "0.2.33"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6b0d943856b990d12d3b55b359144ff341533e516d94098b1d3fc1ac666d36ec"
|
||||
dependencies = [
|
||||
"encoding-index-japanese",
|
||||
"encoding-index-korean",
|
||||
"encoding-index-simpchinese",
|
||||
"encoding-index-singlebyte",
|
||||
"encoding-index-tradchinese",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "encoding-index-japanese"
|
||||
version = "1.20141219.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "04e8b2ff42e9a05335dbf8b5c6f7567e5591d0d916ccef4e0b1710d32a0d0c91"
|
||||
dependencies = [
|
||||
"encoding_index_tests",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "encoding-index-korean"
|
||||
version = "1.20141219.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4dc33fb8e6bcba213fe2f14275f0963fd16f0a02c878e3095ecfdf5bee529d81"
|
||||
dependencies = [
|
||||
"encoding_index_tests",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "encoding-index-simpchinese"
|
||||
version = "1.20141219.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d87a7194909b9118fc707194baa434a4e3b0fb6a5a757c73c3adb07aa25031f7"
|
||||
dependencies = [
|
||||
"encoding_index_tests",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "encoding-index-singlebyte"
|
||||
version = "1.20141219.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3351d5acffb224af9ca265f435b859c7c01537c0849754d3db3fdf2bfe2ae84a"
|
||||
dependencies = [
|
||||
"encoding_index_tests",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "encoding-index-tradchinese"
|
||||
version = "1.20141219.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fd0e20d5688ce3cab59eb3ef3a2083a5c77bf496cb798dc6fcdb75f323890c18"
|
||||
dependencies = [
|
||||
"encoding_index_tests",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "encoding_index_tests"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a246d82be1c9d791c5dfde9a2bd045fc3cbba3fa2b11ad558f27d01712f00569"
|
||||
|
||||
[[package]]
|
||||
name = "encoding_rs"
|
||||
version = "0.8.34"
|
||||
|
@ -782,12 +705,6 @@ dependencies = [
|
|||
"percent-encoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fs_extra"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c"
|
||||
|
||||
[[package]]
|
||||
name = "fsevent-sys"
|
||||
version = "4.1.0"
|
||||
|
@ -814,9 +731,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "futures-channel"
|
||||
version = "0.3.30"
|
||||
version = "0.3.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78"
|
||||
checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
|
@ -824,9 +741,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "futures-core"
|
||||
version = "0.3.30"
|
||||
version = "0.3.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d"
|
||||
checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
|
||||
|
||||
[[package]]
|
||||
name = "futures-executor"
|
||||
|
@ -841,9 +758,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "futures-io"
|
||||
version = "0.3.30"
|
||||
version = "0.3.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1"
|
||||
checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
|
||||
|
||||
[[package]]
|
||||
name = "futures-lite"
|
||||
|
@ -875,9 +792,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "futures-macro"
|
||||
version = "0.3.30"
|
||||
version = "0.3.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
|
||||
checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -886,21 +803,21 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "futures-sink"
|
||||
version = "0.3.30"
|
||||
version = "0.3.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5"
|
||||
checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7"
|
||||
|
||||
[[package]]
|
||||
name = "futures-task"
|
||||
version = "0.3.30"
|
||||
version = "0.3.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004"
|
||||
checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
|
||||
|
||||
[[package]]
|
||||
name = "futures-util"
|
||||
version = "0.3.30"
|
||||
version = "0.3.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48"
|
||||
checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
|
||||
dependencies = [
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
|
@ -953,15 +870,6 @@ dependencies = [
|
|||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.1.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.4.0"
|
||||
|
@ -1264,8 +1172,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "meli"
|
||||
version = "0.8.8"
|
||||
version = "0.8.10"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"assert_cmd",
|
||||
"async-task",
|
||||
"bitflags 2.6.0",
|
||||
|
@ -1284,7 +1193,7 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"sealed_test",
|
||||
"rusty-fork",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
|
@ -1301,7 +1210,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "melib"
|
||||
version = "0.8.8"
|
||||
version = "0.8.10"
|
||||
dependencies = [
|
||||
"async-fn-stream",
|
||||
"async-io",
|
||||
|
@ -1310,7 +1219,6 @@ dependencies = [
|
|||
"cfg-if",
|
||||
"chrono",
|
||||
"data-encoding",
|
||||
"encoding",
|
||||
"encoding_rs",
|
||||
"flate2",
|
||||
"futures",
|
||||
|
@ -1327,7 +1235,7 @@ dependencies = [
|
|||
"polling 3.7.3",
|
||||
"regex",
|
||||
"rusqlite",
|
||||
"sealed_test",
|
||||
"rusty-fork",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
|
@ -1335,7 +1243,6 @@ dependencies = [
|
|||
"smallvec",
|
||||
"smol",
|
||||
"socket2",
|
||||
"stderrlog",
|
||||
"tempfile",
|
||||
"toml",
|
||||
"unicode-segmentation",
|
||||
|
@ -1664,7 +1571,7 @@ checksum = "cc2790cd301dec6cd3b7a025e4815cf825724a51c98dccfe6a3e55f05ffb6511"
|
|||
dependencies = [
|
||||
"cfg-if",
|
||||
"concurrent-queue",
|
||||
"hermit-abi 0.4.0",
|
||||
"hermit-abi",
|
||||
"pin-project-lite",
|
||||
"rustix",
|
||||
"tracing",
|
||||
|
@ -1862,10 +1769,10 @@ dependencies = [
|
|||
]
|
||||
|
||||
[[package]]
|
||||
name = "rusty-forkfork"
|
||||
version = "0.4.0"
|
||||
name = "rusty-fork"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7ce85af4dfa2fb0c0143121ab5e424c71ea693867357c9159b8777b59984c218"
|
||||
checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f"
|
||||
dependencies = [
|
||||
"fnv",
|
||||
"quick-error",
|
||||
|
@ -1897,28 +1804,6 @@ dependencies = [
|
|||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sealed_test"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2a1867f8f005bd7fb73c367e2e45dd628417906a2ca27597fe59cbf04279a222"
|
||||
dependencies = [
|
||||
"fs_extra",
|
||||
"rusty-forkfork",
|
||||
"sealed_test_derive",
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sealed_test_derive"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77253fb2d4451418d07025826028bcb96ee42d3e58859689a70ce62908009db6"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn 2.0.76",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "security-framework"
|
||||
version = "2.11.1"
|
||||
|
@ -2080,19 +1965,6 @@ dependencies = [
|
|||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stderrlog"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69a26bbf6de627d389164afa9783739b56746c6c72c4ed16539f4ff54170327b"
|
||||
dependencies = [
|
||||
"atty",
|
||||
"chrono",
|
||||
"log",
|
||||
"termcolor",
|
||||
"thread_local",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "structopt"
|
||||
version = "0.3.26"
|
||||
|
@ -2163,15 +2035,6 @@ dependencies = [
|
|||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "termcolor"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
|
||||
dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "termion"
|
||||
version = "1.5.6"
|
||||
|
@ -2219,16 +2082,6 @@ dependencies = [
|
|||
"syn 2.0.76",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thread_local"
|
||||
version = "1.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "time"
|
||||
version = "0.3.36"
|
||||
|
|
34
Makefile
34
Makefile
|
@ -25,7 +25,7 @@ CARGO_ARGS ?=
|
|||
RUSTFLAGS ?= -D warnings -W unreachable-pub -W rust-2021-compatibility
|
||||
CARGO_SORT_BIN = cargo-sort
|
||||
CARGO_HACK_BIN = cargo-hack
|
||||
PRINTF = /usr/bin/printf
|
||||
PRINTF := `command -v printf`
|
||||
|
||||
# Options
|
||||
PREFIX ?= /usr/local
|
||||
|
@ -55,7 +55,8 @@ YELLOW ?= `[ -z $${NO_COLOR+x} ] && ([ -z $${TERM} ] && echo "" || tput setaf 3)
|
|||
|
||||
.PHONY: meli
|
||||
meli: check-deps
|
||||
${CARGO_BIN} build ${CARGO_ARGS} ${CARGO_COLOR}--target-dir="${CARGO_TARGET_DIR}" ${FEATURES} --release --bin meli
|
||||
@echo ${CARGO_BIN} build ${CARGO_ARGS} ${CARGO_COLOR}--target-dir=\""${CARGO_TARGET_DIR}"\" ${FEATURES} --release --bin meli
|
||||
@${CARGO_BIN} build ${CARGO_ARGS} ${CARGO_COLOR}--target-dir="${CARGO_TARGET_DIR}" ${FEATURES} --release --bin meli
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
|
@ -75,7 +76,8 @@ help:
|
|||
@echo " - ${BOLD}deb-dist${ANSI_RESET} (builds debian package in the parent directory)"
|
||||
@echo " - ${BOLD}distclean${ANSI_RESET} (cleans distribution build artifacts)"
|
||||
@echo " - ${BOLD}build-rustdoc${ANSI_RESET} (builds rustdoc documentation for all packages in \$$CARGO_TARGET_DIR)"
|
||||
@echo "\nENVIRONMENT variables of interest:"
|
||||
@echo ""
|
||||
@echo "ENVIRONMENT variables of interest:"
|
||||
@$(PRINTF) "* MELI_FEATURES "
|
||||
@[ -z $${MELI_FEATURES+x} ] && echo "unset" || echo "= ${UNDERLINE}"$${MELI_FEATURES}${ANSI_RESET}
|
||||
@$(PRINTF) "* PREFIX "
|
||||
|
@ -89,20 +91,26 @@ help:
|
|||
@$(PRINTF) "* NO_MAN "
|
||||
@[ $${NO_MAN+x} ] && echo "set" || echo "unset"
|
||||
@$(PRINTF) "* NO_COLOR "
|
||||
@[ $${NO_COLOR+x} ] && echo "set" || echo "unset"
|
||||
@([ $${NO_COLOR+x} ] && [ "$${NO_COLOR}" != "" ] && echo "set") || echo "unset"
|
||||
@echo "* CARGO_BIN = ${UNDERLINE}${CARGO_BIN}${ANSI_RESET}"
|
||||
@$(PRINTF) "* CARGO_ARGS "
|
||||
@[ -z $${CARGO_ARGS+x} ] && echo "unset" || echo "= ${UNDERLINE}"$${CARGO_ARGS}${ANSI_RESET}
|
||||
@([ -z "${CARGO_ARGS}" ] && echo "unset") || echo = ${UNDERLINE}${CARGO_ARGS}${ANSI_RESET}
|
||||
@$(PRINTF) "* RUSTFLAGS = "
|
||||
@([ -z "${RUSTFLAGS}" ] && echo "unset") || echo = ${UNDERLINE}${RUSTFLAGS}${ANSI_RESET}
|
||||
@$(PRINTF) "* AUTHOR (for deb-dist) "
|
||||
@[ -z $${AUTHOR+x} ] && echo "unset" || echo "= ${UNDERLINE}"$${AUTHOR}${ANSI_RESET}
|
||||
@echo "* MIN_RUSTC = ${UNDERLINE}${MIN_RUSTC}${ANSI_RESET}"
|
||||
@echo "* VERSION = ${UNDERLINE}${VERSION}${ANSI_RESET}"
|
||||
@echo "* GIT_COMMIT = ${UNDERLINE}${GIT_COMMIT}${ANSI_RESET}"
|
||||
@#@echo "* CARGO_COLOR = ${CARGO_COLOR}"
|
||||
@echo "* CARGO_TARGET_DIR = ${CARGO_TARGET_DIR}"
|
||||
@echo ""
|
||||
@echo "Built-in/binary utilities"
|
||||
@echo "* PRINTF = ${UNDERLINE}${PRINTF}${ANSI_RESET}"
|
||||
|
||||
.PHONY: check
|
||||
check: check-tagrefs
|
||||
RUSTFLAGS='${RUSTFLAGS}' ${CARGO_BIN} check ${CARGO_ARGS} ${CARGO_COLOR}--target-dir="${CARGO_TARGET_DIR}" ${FEATURES} --all --tests --examples --benches --bins
|
||||
@echo RUSTFLAGS=\"'${RUSTFLAGS}'\" ${CARGO_BIN} check ${CARGO_ARGS} ${CARGO_COLOR}--target-dir=\""${CARGO_TARGET_DIR}"\" ${FEATURES} --all --tests --examples --benches --bins
|
||||
@RUSTFLAGS='${RUSTFLAGS}' ${CARGO_BIN} check ${CARGO_ARGS} ${CARGO_COLOR}--target-dir="${CARGO_TARGET_DIR}" ${FEATURES} --all --tests --examples --benches --bins
|
||||
|
||||
.PHONY: fmt
|
||||
fmt:
|
||||
|
@ -111,15 +119,18 @@ fmt:
|
|||
|
||||
.PHONY: lint
|
||||
lint:
|
||||
RUSTFLAGS='${RUSTFLAGS}' $(CARGO_BIN) clippy --no-deps ${FEATURES} --all --tests --examples --benches --bins
|
||||
@echo RUSTFLAGS=\"'${RUSTFLAGS}'\" $(CARGO_BIN) clippy --no-deps ${FEATURES} --all --tests --examples --benches --bins
|
||||
@RUSTFLAGS='${RUSTFLAGS}' $(CARGO_BIN) clippy --no-deps ${FEATURES} --all --tests --examples --benches --bins
|
||||
|
||||
.PHONY: test
|
||||
test: test-docs
|
||||
RUSTFLAGS='${RUSTFLAGS}' ${CARGO_BIN} test ${CARGO_ARGS} ${CARGO_COLOR}--target-dir="${CARGO_TARGET_DIR}" ${FEATURES} --all --tests --examples --benches --bins
|
||||
@echo RUSTFLAGS=\"'${RUSTFLAGS}'\" ${CARGO_BIN} test ${CARGO_ARGS} ${CARGO_COLOR}--target-dir=\""${CARGO_TARGET_DIR}"\" ${FEATURES} --all --tests --examples --benches --bins
|
||||
@RUSTFLAGS='${RUSTFLAGS}' ${CARGO_BIN} test ${CARGO_ARGS} ${CARGO_COLOR}--target-dir="${CARGO_TARGET_DIR}" ${FEATURES} --all --tests --examples --benches --bins
|
||||
|
||||
.PHONY: test-docs
|
||||
test-docs:
|
||||
RUSTFLAGS='${RUSTFLAGS}' ${CARGO_BIN} test ${CARGO_ARGS} ${CARGO_COLOR}--target-dir="${CARGO_TARGET_DIR}" ${FEATURES} --all --doc
|
||||
@echo RUSTFLAGS=\"'${RUSTFLAGS}'\" ${CARGO_BIN} test ${CARGO_ARGS} ${CARGO_COLOR}--target-dir=\""${CARGO_TARGET_DIR}"\" ${FEATURES} --all --doc
|
||||
@RUSTFLAGS='${RUSTFLAGS}' ${CARGO_BIN} test ${CARGO_ARGS} ${CARGO_COLOR}--target-dir="${CARGO_TARGET_DIR}" ${FEATURES} --all --doc
|
||||
|
||||
.PHONY: test-feature-permutations
|
||||
test-feature-permutations:
|
||||
|
@ -204,7 +215,8 @@ deb-dist:
|
|||
|
||||
.PHONY: build-rustdoc
|
||||
build-rustdoc:
|
||||
RUSTDOCFLAGS="--crate-version ${VERSION}_${GIT_COMMIT}_${DATE}" ${CARGO_BIN} doc ${CARGO_ARGS} ${CARGO_COLOR}--target-dir="${CARGO_TARGET_DIR}" --all-features --no-deps --workspace --document-private-items --open
|
||||
@echo RUSTDOCFLAGS=\""--crate-version ${VERSION}_${GIT_COMMIT}_${DATE}"\" ${CARGO_BIN} doc ${CARGO_ARGS} ${CARGO_COLOR}--target-dir=\""${CARGO_TARGET_DIR}"\" --all-features --no-deps --workspace --document-private-items --open
|
||||
@RUSTDOCFLAGS="--crate-version ${VERSION}_${GIT_COMMIT}_${DATE}" ${CARGO_BIN} doc ${CARGO_ARGS} ${CARGO_COLOR}--target-dir="${CARGO_TARGET_DIR}" --all-features --no-deps --workspace --document-private-items --open
|
||||
|
||||
.PHONY: check-tagrefs
|
||||
check-tagrefs:
|
||||
|
|
101
README.md
101
README.md
|
@ -2,14 +2,16 @@
|
|||
|
||||
**BSD/Linux/macos terminal email client with support for multiple accounts and Maildir / mbox / notmuch / IMAP / JMAP / NNTP (Usenet).**
|
||||
|
||||
Try an [old online interactive web demo](https://meli-email.org/wasm2.html "online interactive web demo") powered by WebAssembly!
|
||||
Try an [old, outdated but online and interactive web demo](https://meli-email.org/wasm2.html "online interactive web demo") powered by WebAssembly!
|
||||
|
||||
* `#meli` on OFTC IRC | [mailing lists](https://lists.meli-email.org/)
|
||||
* Repository:
|
||||
- Main <https://git.meli-email.org/meli/meli> Report bugs and/or feature requests in [meli's issue tracker](https://git.meli-email.org/meli/meli/issues "meli gitea issue tracker")
|
||||
- Official mirror <https://codeberg.org/meli/meli>
|
||||
- Official mirror <https://github.com/meli/meli>
|
||||
- Official mirror <https://ayllu-forge.org/meli/meli>
|
||||
* `#meli` on OFTC IRC
|
||||
* [Mailing lists](https://lists.meli-email.org/)
|
||||
* Main repository <https://git.meli-email.org/meli/meli> Report bugs and/or feature requests in [meli's issue tracker](https://git.meli-email.org/meli/meli/issues "meli gitea issue tracker")<details><summary>Official git mirrors</summary>
|
||||
- <https://codeberg.org/meli/meli>
|
||||
- <https://github.com/meli/meli>
|
||||
- <https://ayllu-forge.org/meli/meli>
|
||||
- <https://gitlab.com/meli-project/meli>
|
||||
</details>
|
||||
|
||||
**Table of contents**:
|
||||
|
||||
|
@ -24,23 +26,55 @@ Try an [old online interactive web demo](https://meli-email.org/wasm2.html "onli
|
|||
|
||||
## Install
|
||||
|
||||
- `cargo install meli` or `cargo install --git https://git.meli-email.org/meli/meli.git meli`
|
||||
- [pkgsrc](https://pkgsrc.se/mail/meli)
|
||||
- [openbsd ports](https://openports.pl/path/mail/meli)
|
||||
- [Pre-built debian package, static binaries](https://github.com/meli/meli/releases/ "github releases for meli")
|
||||
- [Nix](https://search.nixos.org/packages?show=meli&query=meli&from=0&size=30&sort=relevance&channel=unstable#disabled "nixos package search results for 'meli'")
|
||||
- [MacPorts](https://ports.macports.org/port/meli/)
|
||||
<a href="https://repology.org/project/meli/versions">
|
||||
<img src="https://repology.org/badge/vertical-allrepos/meli.svg" alt="Packaging status table by repology.org" align="right">
|
||||
</a>
|
||||
|
||||
- `cargo install meli` or `cargo install --git https://git.meli-email.org/meli/meli.git meli` [crates.io link](https://crates.io/crates/meli)
|
||||
- Official Debian packages <https://packages.debian.org/trixie/meli>
|
||||
- AUR (archlinux) <https://aur.archlinux.org/packages/meli>
|
||||
- NetBSD with pkgsrc <https://pkgsrc.se/mail/meli>
|
||||
- OpenBSD ports <https://openports.pl/path/mail/meli>
|
||||
- macOS with MacPorts <https://ports.macports.org/port/meli/>
|
||||
- Nix with Nixpkgs <https://search.nixos.org/packages?query=meli>
|
||||
- [Pre-built debian package, static binaries](https://github.com/meli/meli/releases/ "github releases for meli") for <code>amd64</code>, <code>arm64</code> architectures
|
||||
|
||||
## Build
|
||||
|
||||
Run `cargo build --release --bin meli` or `make`.
|
||||
Run `make` or `cargo build --release --bin meli`.
|
||||
|
||||
For detailed building instructions, see [`BUILD.md`](./BUILD.md)
|
||||
|
||||
## Quick start
|
||||
### Cargo Compile-time Features
|
||||
|
||||
<table>
|
||||
<tr><td>
|
||||
`meli` supports opting in and out of features at compile time with cargo features.
|
||||
|
||||
The contents of the `default` feature are:
|
||||
|
||||
```toml
|
||||
default = ["sqlite3", "notmuch", "smtp", "dbus-notifications", "gpgme", "cli-docs", "jmap", "static"]
|
||||
```
|
||||
|
||||
A list of all the features and a description for each follows:
|
||||
|
||||
| Feature flag | Dependencies | Notes |
|
||||
|---------------------------------------------------------------|----------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| <a name="notmuch-feature">`notmuch`</a> | `maildir` feature | Provides the *notmuch* backend |
|
||||
| <a name="jmap-feature">`jmap`</a> | `http` feature, `url` crate with `serde` feature | Provides the *JMAP* backend |
|
||||
| <a name="smtp-feature">`smtp`</a> | `tls` feature | Integrated async *SMTP* client |
|
||||
| <a name="sqlite3-feature">`sqlite3`</a> | `rusqlite` crate with `bundled-full` feature | Used in caches |
|
||||
| <a name="sqlite3-static-feature">`sqlite3-static`</a> | `rusqlite` crate with `bundled-full` feature | Same as `sqlite3` feature but provided for consistency and in case `sqlite3` feature stops bundling libsqlite3 statically in the future. |
|
||||
| <a name="smtp-trace-feature">`smtp-trace`</a> | `smtp` feature | Connection trace logs on the `trace` logging level |
|
||||
| <a name="gpgme-feature">`gpgme`</a> | | *GPG* use by dynamically loading `libgpgme.so` |
|
||||
| <a name="tls-static-feature">`tls-static`</a> | `native-tls` crate with `vendored` feature | Links with `OpenSSL` statically where it's used |
|
||||
| <a name="http-static-feature">`http-static`</a> | `isahc` crate with `static-curl` feature | Links with `curl` statically |
|
||||
| <a name="dbus-notifications-feature">`dbus-notifications`</a> | `notify-rust` dependency | Uses DBus notifications |
|
||||
| <a name="dbus-static-feature">`dbus-static`</a> | `notify-rust` dependency and enableds its `d_vendored` feature | Includes the dbus library statically. |
|
||||
| <a name="cli-docs-feature">`cli-docs`</a> | `flate2` dependency | Includes the manpage documentation compiled by either `mandoc` or `man` binary to plain text in `meli`'s command line. Embedded documentation can be viewed with the subcommand `meli man [PAGE]` |
|
||||
| <a name="libz-static-feature">`libz-static`</a> | `libz-sys` dependency and enables its `static` feature | Allows for the transitive dependency libz (from `curl`) to be linked statically. |
|
||||
| <a name="static-feature">`static`</a> | enables `tls-static`, `http-static`, `sqlite3-static`, `dbus-static`, `libz-static` features | |
|
||||
|
||||
## Quick start
|
||||
|
||||
```sh
|
||||
# Create configuration file in ${XDG_CONFIG_HOME}/meli/config.toml:
|
||||
|
@ -51,16 +85,19 @@ $ meli edit-config
|
|||
$ meli install-man
|
||||
# Ready to go.
|
||||
$ meli
|
||||
# You can read any manual page with the CLI subcommand `man`:
|
||||
$ meli man meli.7
|
||||
# See help output for all options and subcommands.
|
||||
$ meli --help
|
||||
```
|
||||
|
||||
</td><td>
|
||||
|
||||
See a comprehensive tour of `meli` in the manual page [`meli(7)`](./meli/docs/meli.7).
|
||||
|
||||
See also the [Quickstart tutorial](https://meli-email.org/documentation.html#quick-start) online.
|
||||
|
||||
After installing `meli`, see `meli(1)`, `meli.conf(5)`, `meli(7)` and `meli-themes(5)` for documentation.
|
||||
Sample configuration and theme files can be found in the `meli/docs/samples/` subdirectory.
|
||||
Examples for configuration file settings can be found in `meli.conf.examples(5)`
|
||||
Manual pages are also [hosted online](https://meli-email.org/documentation.html "meli documentation").
|
||||
`meli` by default looks for a configuration file in this location: `${XDG_CONFIG_HOME}/meli/config.toml`.
|
||||
|
||||
|
@ -70,26 +107,22 @@ You can run meli with arbitrary configuration files by setting the `${MELI_CONFI
|
|||
MELI_CONFIG=./test_config cargo run
|
||||
```
|
||||
|
||||
</td></tr>
|
||||
</table>
|
||||
|
||||
See [`meli(7)`](./meli/docs/meli.7) for an extensive tutorial and [`meli.conf(5)`](./meli/docs/meli.conf.5) for all configuration values.
|
||||
|
||||
| | | |
|
||||
:---:|:---:|:---:
|
||||
 |  | 
|
||||
Main view | Compact main view | Compose with embed terminal editor
|
||||
| Main view | Compact main view | Compose with embed terminal editor |
|
||||
|-----------|-------------------|------------------------------------|
|
||||
|  |  |  |
|
||||
|
||||
### Supported E-mail backends
|
||||
|
||||
| Protocol | Support |
|
||||
|:------------:|:----------------|
|
||||
| IMAP | full |
|
||||
| Maildir | full |
|
||||
| notmuch | full[^0] |
|
||||
| mbox | read-only |
|
||||
| JMAP | functional |
|
||||
| NNTP / Usenet| functional |
|
||||
| Protocol | Support |
|
||||
|---------------|------------|
|
||||
| IMAP | full |
|
||||
| Maildir | full |
|
||||
| notmuch | full[^0] |
|
||||
| mbox | read-only |
|
||||
| JMAP | functional |
|
||||
| NNTP / Usenet | functional |
|
||||
|
||||
[^0]: there's no support for searching through all email directly, you'd have to
|
||||
create a mailbox with a notmuch query that returns everything and search
|
||||
|
|
|
@ -15,10 +15,10 @@
|
|||
],
|
||||
"codeRepository": "https://git.meli-email.org/meli/meli.git",
|
||||
"dateCreated": "2016-04-25",
|
||||
"dateModified": "2024-11-19",
|
||||
"dateModified": "2024-11-27",
|
||||
"datePublished": "2017-07-23",
|
||||
"description": "BSD/Linux/macos terminal email client with support for multiple accounts and Maildir / mbox / notmuch / IMAP / JMAP / NNTP (Usenet).",
|
||||
"downloadUrl": "https://git.meli-email.org/meli/meli/archive/v0.8.8.tar.gz",
|
||||
"downloadUrl": "https://git.meli-email.org/meli/meli/archive/v0.8.10.tar.gz",
|
||||
"identifier": "https://meli-email.org/",
|
||||
"isPartOf": "https://meli-email.org/",
|
||||
"keywords": [
|
||||
|
@ -49,14 +49,21 @@
|
|||
],
|
||||
"programmingLanguage": "Rust",
|
||||
"relatedLink": [
|
||||
"https://lists.meli-email.org/",
|
||||
"https://codeberg.org/meli/meli",
|
||||
"https://github.com/meli/meli",
|
||||
"https://lists.meli-email.org/"
|
||||
"https://gitlab.com/meli-project/meli",
|
||||
"https://crates.io/crates/meli",
|
||||
"https://packages.debian.org/trixie/meli",
|
||||
"https://pkgsrc.se/mail/meli",
|
||||
"https://openports.pl/path/mail/meli",
|
||||
"https://ports.macports.org/port/meli/",
|
||||
"https://search.nixos.org/packages?query=meli"
|
||||
],
|
||||
"version": "0.8.8",
|
||||
"version": "0.8.10",
|
||||
"contIntegration": "https://git.meli-email.org/meli/meli/actions",
|
||||
"developmentStatus": "active",
|
||||
"issueTracker": "https://git.meli-email.org/meli/meli/issues",
|
||||
"readme": "https://git.meli-email.org/meli/meli/raw/tag/v0.8.8/README.md",
|
||||
"buildInstructions": "https://git.meli-email.org/meli/meli/raw/tag/v0.8.8/BUILD.md"
|
||||
"readme": "https://git.meli-email.org/meli/meli/raw/tag/v0.8.10/README.md",
|
||||
"buildInstructions": "https://git.meli-email.org/meli/meli/raw/tag/v0.8.10/BUILD.md"
|
||||
}
|
||||
|
|
50
contrib/README.md
Normal file
50
contrib/README.md
Normal file
|
@ -0,0 +1,50 @@
|
|||
<!-- SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later -->
|
||||
# Useful scripts and files for use with `meli`
|
||||
|
||||
This directory includes various useful scripts and files that are contributed
|
||||
by the community and not actively maintained or supported.
|
||||
|
||||
If you believe something in this directory needs updates to work with the
|
||||
current version of `meli` or there are bugs that need fixing, please file an
|
||||
issue on our issue tracker!
|
||||
|
||||
## Connecting to a Gmail account with OAUTH2
|
||||
|
||||
The script [`./oauth2.py`](./oauth2.py) is a helper script to authenticate to a Gmail account using IMAP OAUTH2 tokens.
|
||||
|
||||
See [`meli.conf(5)`](../meli/docs/meli.conf.5) for documentation.
|
||||
|
||||
If the script does not work and you're certain it's because it needs changes to
|
||||
work with Google's servers and not a user error on your part, please file a bug
|
||||
on our issue tracker!
|
||||
|
||||
## Using `meli` for `mailto:` links
|
||||
|
||||
To use `meli` to open `mailto:` links from your browser place the [`mailto-meli`](./mailto-meli) and [`mailto-meli-expect`](./mailto-meli-expect) scripts into `/usr/bin`
|
||||
(or `.local/bin`, and adjust the path in the script accordingly).
|
||||
|
||||
Ensure all scripts are executable by your user account, if not set the permissions accordingly:
|
||||
|
||||
```sh
|
||||
chmod u+x /path/to/mailto-meli
|
||||
```
|
||||
|
||||
and
|
||||
|
||||
```sh
|
||||
chmod u+x /path/to/mailto-meli-expect
|
||||
```
|
||||
|
||||
Then set `mailto-meli` as program to open `mailto` links
|
||||
in your browser.
|
||||
|
||||
E.g. in Firefox this can be done under "Settings" (`about:preferences`) which you can access from the menu button or `Edit -> Settings`.
|
||||
|
||||
```text
|
||||
General -> Applications -> Content-Type: mailto.
|
||||
```
|
||||
|
||||
You can test that it works by clicking the system menu entry `File -> Email link...`.
|
||||
|
||||
_NOTE_: that you need to have the [`expect`](https://en.wikipedia.org/wiki/Expect) binary installed for this to work.
|
||||
`expect` is a scripting language used for interactive with interactive terminal applications like `meli`.
|
21
contrib/mailto-meli
Executable file
21
contrib/mailto-meli
Executable file
|
@ -0,0 +1,21 @@
|
|||
#!/usr/bin/env sh
|
||||
#
|
||||
# mailto-meli -- wrapper to use meli as mailto handler
|
||||
# To use meli as mailto: handler point your browser to use this as application for opening
|
||||
# mailto: links.
|
||||
# Note: This assumes that x-terminal-emulator supports the "-e" flag for passing along arguments.
|
||||
|
||||
# Copyright: 2024 Matthias Geiger <werdahias@debian.org>
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Check if mailto-meli and expect are present
|
||||
if ! command -v mailto-meli > /dev/null 2>&1
|
||||
then echo "mailto-meli not found" && exit 1
|
||||
else
|
||||
if ! command -v expect > /dev/null 2>&1
|
||||
then echo "expect not found" && exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
exec x-terminal-emulator -e mailto-meli-expect "$@"
|
||||
|
18
contrib/mailto-meli-expect
Executable file
18
contrib/mailto-meli-expect
Executable file
|
@ -0,0 +1,18 @@
|
|||
#!/usr/bin/env -S expect -f
|
||||
# Copyright 2024 Manos Pitsidianakis
|
||||
#
|
||||
# SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later
|
||||
|
||||
# Trap window resize signal
|
||||
trap {
|
||||
set rows [stty rows]
|
||||
set cols [stty columns]
|
||||
stty rows $rows columns $cols < $spawn_out(slave,name)
|
||||
} WINCH
|
||||
# send the input with human-like delay:
|
||||
set send_human {.001 .003 0.01 .005 .005}
|
||||
spawn meli
|
||||
send -h ":mailto "
|
||||
send -h [lindex $argv 0]
|
||||
send -h "\n"
|
||||
interact
|
156
debian/changelog
vendored
156
debian/changelog
vendored
|
@ -1,3 +1,159 @@
|
|||
meli (0.8.10-1) bookworm; urgency=low
|
||||
|
||||
Highlights:
|
||||
===========
|
||||
|
||||
- added pipe-attachment command
|
||||
- added sample scripts for using meli as a mailto scheme handler in
|
||||
contrib/
|
||||
- fixed GPG encryption with libgpgme
|
||||
|
||||
Contributors in alphabetical order:
|
||||
===================================
|
||||
|
||||
- Manos Pitsidianakis
|
||||
- Matthias Geiger
|
||||
|
||||
Added
|
||||
=====
|
||||
|
||||
- 5e77821f mail/view: add pipe-attachment command in PR #540
|
||||
"mail/view: add pipe-attachment command"
|
||||
- fa896f6b contrib: add mailto: scheme handler scripts
|
||||
- 00ce9660
|
||||
melib/backends: add as_any/as_any_mut methods to BackendMailbox
|
||||
- fd243fa5 maildir: add mailbox creation tests
|
||||
- de65eec3 meli/accounts: add mailbox_by_path() tests in PR #535
|
||||
"Rework maildir mailbox path logic, add tests"
|
||||
- 6b363601 melib/gpgme: impl Display for gpgme::Key
|
||||
|
||||
Bug Fixes
|
||||
=========
|
||||
|
||||
- 60c90d75 melib/attachments: ensure MIME boundary prefixed with CRLF
|
||||
- 3433c5c3 compose/pgp: rewrite key selection logic in PR #541 "More
|
||||
gpgme/PGP fixes again"
|
||||
- 12de82e7 melib/conf: fix mutt_alias_file not being validated in PR
|
||||
#550 "Remove sealed_test dependency"
|
||||
- c8e055a7 Fix version migrations being triggered backwards in PR #557
|
||||
"Fix version migrations being triggered backwards"
|
||||
- efab99fd
|
||||
terminal: check for NO_COLOR env var without unicode validation
|
||||
- 36a63e88 melib/maildir: rewrite create_mailbox()
|
||||
- fcab855f view: ensure envelope headers are always populated in PR
|
||||
#538 "view: ensure envelope headers are always populated"
|
||||
- 84564f44 mailcap: don't drop File before opening it in PR #552
|
||||
"mailcap: don't drop File before opening it"
|
||||
|
||||
Changes
|
||||
=======
|
||||
|
||||
- ed85da51 Remove sealed_test dependency
|
||||
|
||||
Refactoring
|
||||
===========
|
||||
|
||||
- 03df2ac1 meli/utilities: add print utilities for tests
|
||||
- 18e9d5c1 conf.rs: impl From<melib::AccountSettings> for AccountConf
|
||||
- 1f2fec19 Fix 1.83.0 lints in PR #536 "CI: Add action to check for
|
||||
DCO signoffs in PRs"
|
||||
- 192ecea2 compose/gpg.rs: Fix msrv regression
|
||||
|
||||
Documentation
|
||||
=============
|
||||
|
||||
- 4a61a4b8 melib: include README.md as preamble of crate rustdocs
|
||||
- 80e53471 BUILD.md: move melib specific stuff to melib/README.md
|
||||
- 91a17ece melib/README.md: mention sqlite3-static feature
|
||||
- b77a691b meli/README.md: Add cargo features section in PR #549
|
||||
"Document cargo features in READMEs"
|
||||
- 91dc271d contrib: add a README.md file
|
||||
- 2e900be6 contrib/README.md: add section about oauth2.py
|
||||
- 07812d2c contrib/README.md: elaborate a bit about mailto in PR #545
|
||||
"Add external mailto: handler support via scripts in contrib"
|
||||
- e784e8d2 scripts: add markdown_doc_lints.py
|
||||
|
||||
Continuous Integration
|
||||
======================
|
||||
|
||||
- 77629851 CI: Add action to check for DCO signoffs in PRs
|
||||
- f944ebed CI: Add error msg when cargo-derivefmt check fails
|
||||
- d49344f9 CI: Move MSRV checks from manifest to lints in PR #553
|
||||
"ci-workflow-fixes"
|
||||
- ece6bfc2 CI: non-zero exit if cargo-derivefmt-* targets fail
|
||||
- 2257b91b CI: add actions/cache steps in PR #554 "CI: add
|
||||
actions/cache steps"
|
||||
- a1c9524f CI: fix check_dco.sh not working with other repos in PR
|
||||
#555 "CI: fix check_dco.sh not working with other repos"
|
||||
|
||||
-- Manos Pitsidianakis <manos@pitsidianak.is> Fri, 06 Dec 2024 07:03:58 +0200
|
||||
|
||||
meli (0.8.9-1) bookworm; urgency=low
|
||||
|
||||
This is mostly a fixups release.
|
||||
|
||||
Added
|
||||
=====
|
||||
|
||||
- cf16bf65 meli/sqlite3: add tests for reindexing
|
||||
- a389772d accounts: suggest tips on mailbox_by_path error
|
||||
|
||||
Bug Fixes
|
||||
=========
|
||||
|
||||
- 25f0a3f8 conf/terminal: fix serde of ProgressSpinnerSequence
|
||||
- c375b48e terminal: fix Synchronized Output response parsed as input
|
||||
in PR #523 "terminal: fix Synchronized Output response parsed as
|
||||
input"
|
||||
- b7e215f9
|
||||
melib/utils: fix test_fd_locks() on platforms without OFD support in
|
||||
PR #524 "melib/utils: fix test_fd_locks() on platforms without OFD
|
||||
support"
|
||||
- 25c32a6b meli/docs/meli.conf.examples.5: fix .Dt macro arguments
|
||||
- 18ae5848 meli: fix reindex of previously indexed account with sqlite3
|
||||
backend
|
||||
- 13e917d9 Fix some compilation errors with cfg feature attrs in PR #531
|
||||
"accounts: suggest tips on mailbox_by_path error"
|
||||
- 8c176d38 contacts/editor: fix crash on saving contact in PR #532
|
||||
"contacts/editor: fix crash on saving contact"
|
||||
- fb5a88c2
|
||||
melib/collection: ensure mailbox exists when inserting new envelopes
|
||||
in PR #529 "Small account stuff fixes"
|
||||
|
||||
Changes
|
||||
=======
|
||||
|
||||
- 7f8f1cf6 melib/gpgme bindings renewal in PR #533 "melib/gpgme
|
||||
bindings renewal"
|
||||
- 9b7825bc Update futures-util dep, remove stderrlog dep
|
||||
- 4be69360 Remove obsolete "encoding" dependency in PR #530
|
||||
"Remove/update obsolete dependencies"
|
||||
|
||||
Refactoring
|
||||
===========
|
||||
|
||||
- 5af6e059 meli/accounts: use Arc<str> for account name
|
||||
- 567270e1 melib: use Vec instead of SmallVec for search results
|
||||
- 2bd8d7ba
|
||||
conf/tests.rs: Rename test functions to follow path convention
|
||||
|
||||
Documentation
|
||||
=============
|
||||
|
||||
- 97242482 meli/docs: add meli.conf.examples to CLI and tests
|
||||
- 0f096338 README.md: Update ways to install, add gitlab mirror link
|
||||
in PR #528 "Integrate meli.conf.examples.5 into CLI and build, also
|
||||
update README with installation instructions"
|
||||
|
||||
Continuous Integration
|
||||
======================
|
||||
|
||||
- 630df308 CI: Add arm64 runners in job matrices in PR #527 "CI: Add
|
||||
arm64 runners in job matrices"
|
||||
- 49ecbb56 CI: .gitea/Makefile.lint: check if nightly exists
|
||||
|
||||
-- Manos Pitsidianakis <manos@pitsidianak.is> Wed, 27 Nov 2024 16:16:06 +0200
|
||||
|
||||
meli (0.8.8-1) bookworm; urgency=low
|
||||
|
||||
WARNING: This release contains a breaking change in the configuration
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "meli"
|
||||
version = "0.8.8"
|
||||
version = "0.8.10"
|
||||
authors = ["Manos Pitsidianakis <manos@pitsidianak.is>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.70.0"
|
||||
|
@ -23,6 +23,7 @@ name = "meli"
|
|||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
async-task = { version = "^4.2.0" }
|
||||
bitflags = { version = "2.4", features = ["serde"] }
|
||||
crossbeam = { version = "^0.8" }
|
||||
|
@ -33,7 +34,7 @@ itoa = { version = "1.0.11", default-features = false }
|
|||
libc = { version = "0.2.125", default-features = false, features = ["extra_traits"] }
|
||||
libz-sys = { version = "1.1", features = ["static"], optional = true }
|
||||
linkify = { version = "^0.10", default-features = false }
|
||||
melib = { path = "../melib", version = "0.8.8", features = [] }
|
||||
melib = { path = "../melib", version = "0.8.10", features = [] }
|
||||
nix = { version = "0.29", default-features = false, features = ["signal", "poll", "term", "ioctl", "process"] }
|
||||
regex = { version = "1" }
|
||||
serde = { version = "1.0.71" }
|
||||
|
@ -83,7 +84,7 @@ assert_cmd = { version = "=2.0.13" }
|
|||
flate2 = { version = "1" }
|
||||
predicates = { version = "3" }
|
||||
regex = { version = "1" }
|
||||
sealed_test = { version = "1.1.0" }
|
||||
rusty-fork = { version = "0.3.0" }
|
||||
tempfile = { version = "3.3" }
|
||||
|
||||
[target.'cfg(target_os="linux")'.dependencies]
|
||||
|
|
|
@ -75,10 +75,20 @@ fn main() {
|
|||
|
||||
cl("docs/meli.1", "meli.txt.gz", false);
|
||||
cl("docs/meli.conf.5", "meli.conf.txt.gz", false);
|
||||
cl(
|
||||
"docs/meli.conf.examples.5",
|
||||
"meli.conf.examples.txt.gz",
|
||||
false,
|
||||
);
|
||||
cl("docs/meli-themes.5", "meli-themes.txt.gz", false);
|
||||
cl("docs/meli.7", "meli.7.txt.gz", false);
|
||||
cl("docs/meli.1", "meli.mdoc.gz", true);
|
||||
cl("docs/meli.conf.5", "meli.conf.mdoc.gz", true);
|
||||
cl(
|
||||
"docs/meli.conf.examples.5",
|
||||
"meli.conf.examples.mdoc.gz",
|
||||
true,
|
||||
);
|
||||
cl("docs/meli-themes.5", "meli-themes.mdoc.gz", true);
|
||||
cl("docs/meli.7", "meli.7.mdoc.gz", true);
|
||||
}
|
||||
|
|
|
@ -119,3 +119,32 @@ The HTML of the e-mail is piped into `html_filter`'s standard input.
|
|||
If your account's syncing is handled by an external tool, you can use the
|
||||
refresh shortcuts within `meli` to call this tool with
|
||||
`accounts.refresh_command`.
|
||||
|
||||
## Viewing binary attachments such as images inside your terminal
|
||||
|
||||
If you have a specific terminal tool that lets you pipe binary data to it and
|
||||
it outputs command suitable for the terminal, you can use the `pipe-attachment`
|
||||
command to view/preview attachments without leaving `meli` or opening a GUI app.
|
||||
|
||||
This requires the output to be interactive otherwise `meli` will run the tool
|
||||
and immediately return, probably too quickly for you to notice the output in
|
||||
your terminal. A general solution is to pipe the output to an interactive pager
|
||||
like `less` which requires the user to exit it interactively.
|
||||
|
||||
The [`chafa`] tool can be used for images in this example:
|
||||
|
||||
Write a wrapper script that outputs the tool's output into a pager, for example
|
||||
`less`. If the output contains ANSI escape codes (i.e. colors, or bold/italic
|
||||
text) make sure to use `less -r` to preserve those codes.
|
||||
|
||||
```sh
|
||||
#!/bin/sh
|
||||
|
||||
/bin/chafa "$@" | less -r
|
||||
```
|
||||
|
||||
Save it somewhere as a file with executable permissions and you can use
|
||||
`pipe-attachment 1 /path/to/your/chafa/wrapper` to view the first attachment as
|
||||
an image with [`chafa`].
|
||||
|
||||
[`chafa`]: https://hpjansson.org/chafa/
|
||||
|
|
|
@ -214,14 +214,30 @@ See
|
|||
for the location of the mailcap files and
|
||||
.Xr mailcap 5
|
||||
for their syntax.
|
||||
You can save individual attachments with the
|
||||
.Command save-attachment Ar INDEX Ar path-to-file
|
||||
command.
|
||||
You can save individual attachments with the following command:
|
||||
.Command save\-attachment Ar INDEX Ar path\-to\-file
|
||||
.Ar INDEX
|
||||
is the attachment's index in the listing.
|
||||
If the path provided is a directory, the attachment is saved with its filename set to the filename in the attachment, if any.
|
||||
If the 0th index is provided, the entire message is saved.
|
||||
If the path provided is a directory, the message is saved as an eml file with its filename set to the messages message-id.
|
||||
.Bl -tag -compact -width 8n
|
||||
.It If the path provided is a directory, the attachment is saved with its filename set to the filename in the attachment, if any.
|
||||
.It If the 0th index is provided, the entire message is saved.
|
||||
.It If the path provided is a directory, the message is saved as an eml file with its filename set to the messages message\-id.
|
||||
.El
|
||||
.Pp
|
||||
You can pipe individual attachments to binaries with the following command:
|
||||
.Command pipe\-attachment Ar INDEX Ar binary Ar ARGS
|
||||
Example usage with the
|
||||
.Xr less 1
|
||||
pager:
|
||||
.D1 pipe\-attachment 0 less
|
||||
If the binary does not wait for your input before exiting, you will probably
|
||||
not see its output since you will return back to the user interface
|
||||
immediately.
|
||||
You can write a wrapper script that pipes your binary's output to
|
||||
.Dl less
|
||||
or
|
||||
.Dl less \-r
|
||||
if you want to preserve the ANSI escape codes in the pager's output.
|
||||
.Sh SEARCH
|
||||
Each e\-mail storage backend has a default search method assigned.
|
||||
.Em IMAP
|
||||
|
@ -582,7 +598,7 @@ open list archive with
|
|||
.Bl -tag -width 36n
|
||||
.It Cm mailto Ar MAILTO_ADDRESS
|
||||
Opens a composer tab with initial values parsed from the
|
||||
.Li mailto:
|
||||
.Li mailto :
|
||||
address.
|
||||
.It Cm add-attachment Ar PATH
|
||||
in composer, add
|
||||
|
@ -603,7 +619,7 @@ Launch command
|
|||
\&.
|
||||
The command should print file paths in stdout, separated by NUL bytes.
|
||||
Example usage with
|
||||
.Xr fzf Ns
|
||||
.Xr fzf 1 Ns
|
||||
:
|
||||
.D1 add-attachment-file-picker < fzf --print0
|
||||
.It Cm remove-attachment Ar INDEX
|
||||
|
|
|
@ -293,6 +293,11 @@ Path of
|
|||
.Xr mutt 1
|
||||
compatible alias file in the option
|
||||
They are parsed and imported read-only.
|
||||
.It Ic notmuch_address_book_query Ar String
|
||||
.Pq Em optional
|
||||
Query passed to
|
||||
.Qq Li notmuch address
|
||||
to import contacts into meli. Contacts are parsed and imported read-only.
|
||||
.It Ic mailboxes Ar mailbox
|
||||
.Pq Em optional
|
||||
Configuration for each mailbox.
|
||||
|
@ -987,7 +992,7 @@ Example:
|
|||
.Bd -literal
|
||||
[composing]
|
||||
editor_cmd = '~/.local/bin/vim +/^$'
|
||||
embed = true
|
||||
embedded_pty = true
|
||||
custom_compose_hooks = [ { name ="spellcheck", command="aspell --mode email --dont-suggest --ignore-case list" }]
|
||||
.Ed
|
||||
.\"
|
||||
|
|
|
@ -45,8 +45,8 @@ Rendered as:
|
|||
.Sm
|
||||
..
|
||||
.\".Dd November 11, 2022
|
||||
.Dd August 30, 2024
|
||||
.Dt MELI.CONF EXAMPLES 5
|
||||
.Dd November 22, 2024
|
||||
.Dt MELI.CONF.EXAMPLES 5
|
||||
.Os
|
||||
.Sh NAME
|
||||
.Nm meli.conf examples
|
||||
|
|
|
@ -46,7 +46,6 @@ use melib::{
|
|||
utils::{fnmatch::Fnmatch, futures::sleep, random, shellexpand::ShellExpandTrait},
|
||||
Contacts, SortField, SortOrder,
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
|
||||
#[cfg(feature = "sqlite3")]
|
||||
use crate::command::actions::AccountAction;
|
||||
|
@ -123,7 +122,7 @@ impl IsOnline {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct Account {
|
||||
pub name: String,
|
||||
pub name: Arc<str>,
|
||||
pub hash: AccountHash,
|
||||
pub is_online: IsOnline,
|
||||
pub mailbox_entries: IndexMap<MailboxHash, MailboxEntry>,
|
||||
|
@ -143,7 +142,7 @@ pub struct Account {
|
|||
|
||||
impl Drop for Account {
|
||||
fn drop(&mut self) {
|
||||
if let Ok(data_dir) = xdg::BaseDirectories::with_profile("meli", &self.name) {
|
||||
if let Ok(data_dir) = xdg::BaseDirectories::with_profile("meli", self.name.as_ref()) {
|
||||
if let Ok(data) = data_dir.place_data_file("contacts") {
|
||||
/* place result in cache directory */
|
||||
let f = match fs::File::create(data) {
|
||||
|
@ -201,6 +200,7 @@ impl Account {
|
|||
main_loop_handler: MainLoopHandler,
|
||||
event_consumer: BackendEventConsumer,
|
||||
) -> Result<Self> {
|
||||
let name: Arc<str> = name.into();
|
||||
let s = settings.clone();
|
||||
let backend = map.get(&settings.account().format)(
|
||||
settings.account(),
|
||||
|
@ -219,7 +219,7 @@ impl Account {
|
|||
event_consumer,
|
||||
)?;
|
||||
|
||||
let data_dir = xdg::BaseDirectories::with_profile("meli", &name)?;
|
||||
let data_dir = xdg::BaseDirectories::with_profile("meli", name.as_ref())?;
|
||||
let mut contacts = Contacts::with_account(settings.account());
|
||||
|
||||
if let Ok(data) = data_dir.place_data_file("contacts") {
|
||||
|
@ -700,7 +700,7 @@ impl Account {
|
|||
body: format!(
|
||||
"{}\n{} | {}",
|
||||
from,
|
||||
self.name,
|
||||
&self.name,
|
||||
self.mailbox_entries[&mailbox_hash].name()
|
||||
)
|
||||
.into(),
|
||||
|
@ -1009,10 +1009,7 @@ impl Account {
|
|||
flags: Option<Flag>,
|
||||
) -> Result<()> {
|
||||
if self.settings.account.read_only {
|
||||
return Err(Error::new(format!(
|
||||
"Account {} is read-only.",
|
||||
self.name.as_str()
|
||||
)));
|
||||
return Err(Error::new(format!("Account {} is read-only.", &self.name)));
|
||||
}
|
||||
let job = self
|
||||
.backend
|
||||
|
@ -1288,7 +1285,7 @@ impl Account {
|
|||
search_term: &str,
|
||||
_sort: (SortField, SortOrder),
|
||||
mailbox_hash: MailboxHash,
|
||||
) -> ResultFuture<SmallVec<[EnvelopeHash; 512]>> {
|
||||
) -> ResultFuture<Vec<EnvelopeHash>> {
|
||||
let query = melib::search::Query::try_from(search_term)?;
|
||||
match self.settings.conf.search_backend {
|
||||
#[cfg(feature = "sqlite3")]
|
||||
|
@ -1305,7 +1302,7 @@ impl Account {
|
|||
.search(query, Some(mailbox_hash))
|
||||
} else {
|
||||
use melib::search::QueryTrait;
|
||||
let mut ret = SmallVec::new();
|
||||
let mut ret = Vec::with_capacity(512);
|
||||
let envelopes = self.collection.envelopes.read().unwrap();
|
||||
for &env_hash in self.collection.get_mailbox(mailbox_hash).iter() {
|
||||
if let Some(envelope) = envelopes.get(&env_hash) {
|
||||
|
@ -1334,7 +1331,60 @@ impl Account {
|
|||
{
|
||||
Ok(*mailbox_hash)
|
||||
} else {
|
||||
Err(Error::new("Mailbox with that path not found."))
|
||||
use aho_corasick::AhoCorasick;
|
||||
|
||||
let nodes = self
|
||||
.list_mailboxes()
|
||||
.into_iter()
|
||||
.map(|n| (n.hash, n.depth))
|
||||
.collect::<IndexMap<MailboxHash, usize>>();
|
||||
let mut entries = self
|
||||
.mailbox_entries
|
||||
.iter()
|
||||
.map(|(h, f)| (h, f.ref_mailbox.name(), f.ref_mailbox.path()))
|
||||
.collect::<Vec<_>>();
|
||||
entries.sort_by_cached_key(|(h, n, p)| {
|
||||
(
|
||||
n.len(),
|
||||
nodes.get(*h).cloned().unwrap_or(usize::MAX),
|
||||
*p,
|
||||
*n,
|
||||
)
|
||||
});
|
||||
let patterns = &[path.trim_matches('/')];
|
||||
let mut potential_matches = IndexSet::new();
|
||||
for (_, _, haystack) in &entries {
|
||||
let ac = AhoCorasick::builder()
|
||||
.ascii_case_insensitive(true)
|
||||
.build(patterns)
|
||||
.unwrap();
|
||||
if ac.find_iter(haystack).next().is_some() {
|
||||
potential_matches.insert(haystack.to_string());
|
||||
}
|
||||
}
|
||||
const MANAGE_MAILBOXES_TIP: &str = "You can inspect the list of mailbox paths of an \
|
||||
account with the manage-mailboxes command.";
|
||||
let details_msg = if potential_matches.is_empty() {
|
||||
Cow::Borrowed(MANAGE_MAILBOXES_TIP)
|
||||
} else {
|
||||
let mut potential_matches = potential_matches.into_iter().collect::<Vec<_>>();
|
||||
let matches_length = potential_matches.len();
|
||||
potential_matches.truncate(5);
|
||||
Cow::Owned(format!(
|
||||
"Some matching paths that were found: {matches:?}{others}. {tip}",
|
||||
matches = potential_matches,
|
||||
tip = MANAGE_MAILBOXES_TIP,
|
||||
others = if matches_length > 5 {
|
||||
format!(" and {} others", matches_length - 5)
|
||||
} else {
|
||||
String::with_capacity(0)
|
||||
}
|
||||
))
|
||||
};
|
||||
|
||||
Err(Error::new("Mailbox with that path not found.")
|
||||
.set_details(details_msg)
|
||||
.set_kind(ErrorKind::NotFound))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1803,7 +1853,7 @@ impl Account {
|
|||
}
|
||||
|
||||
pub fn signature_file(&self) -> Option<PathBuf> {
|
||||
xdg::BaseDirectories::with_profile("meli", &self.name)
|
||||
xdg::BaseDirectories::with_profile("meli", self.name.as_ref())
|
||||
.ok()
|
||||
.and_then(|d| {
|
||||
d.place_config_file("signature")
|
||||
|
|
|
@ -26,7 +26,7 @@ use super::*;
|
|||
use crate::command::actions::MailboxOperation;
|
||||
|
||||
impl Account {
|
||||
pub fn mailbox_operation(&mut self, op: MailboxOperation) -> Result<()> {
|
||||
pub fn mailbox_operation(&mut self, op: MailboxOperation) -> Result<JobId> {
|
||||
if self.settings.account.read_only {
|
||||
return Err(Error::new("Account is read-only."));
|
||||
}
|
||||
|
@ -42,11 +42,12 @@ impl Account {
|
|||
job,
|
||||
self.is_async(),
|
||||
);
|
||||
let job_id = handle.job_id;
|
||||
self.insert_job(
|
||||
handle.job_id,
|
||||
JobRequest::Mailbox(MailboxJobRequest::CreateMailbox { path, handle }),
|
||||
);
|
||||
Ok(())
|
||||
Ok(job_id)
|
||||
}
|
||||
MailboxOperation::Delete(path) => {
|
||||
if self.mailbox_entries.len() == 1 {
|
||||
|
@ -60,6 +61,7 @@ impl Account {
|
|||
job,
|
||||
self.is_async(),
|
||||
);
|
||||
let job_id = handle.job_id;
|
||||
self.insert_job(
|
||||
handle.job_id,
|
||||
JobRequest::Mailbox(MailboxJobRequest::DeleteMailbox {
|
||||
|
@ -67,7 +69,7 @@ impl Account {
|
|||
handle,
|
||||
}),
|
||||
);
|
||||
Ok(())
|
||||
Ok(job_id)
|
||||
}
|
||||
MailboxOperation::Subscribe(path) => {
|
||||
let mailbox_hash = self.mailbox_by_path(&path)?;
|
||||
|
@ -81,6 +83,7 @@ impl Account {
|
|||
job,
|
||||
self.is_async(),
|
||||
);
|
||||
let job_id = handle.job_id;
|
||||
self.insert_job(
|
||||
handle.job_id,
|
||||
JobRequest::Mailbox(MailboxJobRequest::SetMailboxSubscription {
|
||||
|
@ -89,7 +92,7 @@ impl Account {
|
|||
handle,
|
||||
}),
|
||||
);
|
||||
Ok(())
|
||||
Ok(job_id)
|
||||
}
|
||||
MailboxOperation::Unsubscribe(path) => {
|
||||
let mailbox_hash = self.mailbox_by_path(&path)?;
|
||||
|
@ -103,15 +106,16 @@ impl Account {
|
|||
job,
|
||||
self.is_async(),
|
||||
);
|
||||
let job_id = handle.job_id;
|
||||
self.insert_job(
|
||||
handle.job_id,
|
||||
job_id,
|
||||
JobRequest::Mailbox(MailboxJobRequest::SetMailboxSubscription {
|
||||
mailbox_hash,
|
||||
new_value: false,
|
||||
handle,
|
||||
}),
|
||||
);
|
||||
Ok(())
|
||||
Ok(job_id)
|
||||
}
|
||||
MailboxOperation::Rename(path, new_path) => {
|
||||
let mailbox_hash = self.mailbox_by_path(&path)?;
|
||||
|
@ -125,15 +129,16 @@ impl Account {
|
|||
job,
|
||||
self.is_async(),
|
||||
);
|
||||
let job_id = handle.job_id;
|
||||
self.insert_job(
|
||||
handle.job_id,
|
||||
job_id,
|
||||
JobRequest::Mailbox(MailboxJobRequest::RenameMailbox {
|
||||
handle,
|
||||
mailbox_hash,
|
||||
new_path,
|
||||
}),
|
||||
);
|
||||
Ok(())
|
||||
Ok(job_id)
|
||||
}
|
||||
MailboxOperation::SetPermissions(_) => Err(Error::new("Not implemented.")),
|
||||
}
|
||||
|
@ -185,7 +190,7 @@ impl Account {
|
|||
if !err.is_recoverable() {
|
||||
self.main_loop_handler.send(ThreadEvent::UIEvent(
|
||||
UIEvent::Notification {
|
||||
title: Some(self.name.clone().into()),
|
||||
title: Some(self.name.to_string().into()),
|
||||
source: Some(err.clone()),
|
||||
body: err.to_string().into(),
|
||||
kind: Some(NotificationType::Error(err.kind)),
|
||||
|
|
|
@ -20,13 +20,21 @@
|
|||
//
|
||||
// SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later
|
||||
|
||||
use melib::{
|
||||
backends::{Mailbox, MailboxHash},
|
||||
error::Result,
|
||||
MailboxPermissions, SpecialUsageMailbox,
|
||||
};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::accounts::{FileMailboxConf, MailboxEntry, MailboxStatus};
|
||||
use melib::{
|
||||
backends::{prelude::*, Mailbox, MailboxHash},
|
||||
error::Result,
|
||||
maildir::MaildirType,
|
||||
smol, MailboxPermissions, SpecialUsageMailbox,
|
||||
};
|
||||
use tempfile::TempDir;
|
||||
|
||||
use crate::{
|
||||
accounts::{AccountConf, FileMailboxConf, MailboxEntry, MailboxStatus},
|
||||
command::actions::MailboxOperation,
|
||||
utilities::tests::{eprint_step_fn, eprintln_ok_fn},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_mailbox_utf7() {
|
||||
|
@ -81,6 +89,14 @@ fn test_mailbox_utf7() {
|
|||
fn count(&self) -> Result<(usize, usize)> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
for (n, d) in [
|
||||
("~peter/mail/&U,BTFw-/&ZeVnLIqe-", "~peter/mail/台北/日本語"),
|
||||
|
@ -102,3 +118,448 @@ fn test_mailbox_utf7() {
|
|||
assert_eq!(&entry.path, d);
|
||||
}
|
||||
}
|
||||
|
||||
fn new_maildir_backend(
|
||||
temp_dir: &TempDir,
|
||||
acc_name: &str,
|
||||
event_consumer: BackendEventConsumer,
|
||||
with_root_mailbox: bool,
|
||||
) -> Result<(PathBuf, AccountConf, Box<MaildirType>)> {
|
||||
let root_mailbox = temp_dir.path().join("inbox");
|
||||
{
|
||||
std::fs::create_dir(&root_mailbox).expect("Could not create root mailbox directory.");
|
||||
if with_root_mailbox {
|
||||
for d in &["cur", "new", "tmp"] {
|
||||
std::fs::create_dir(root_mailbox.join(d))
|
||||
.expect("Could not create root mailbox directory contents.");
|
||||
}
|
||||
}
|
||||
}
|
||||
let subscribed_mailboxes = if with_root_mailbox {
|
||||
vec!["inbox".into()]
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
let mailboxes = if with_root_mailbox {
|
||||
vec![(
|
||||
"inbox".into(),
|
||||
melib::conf::MailboxConf {
|
||||
extra: indexmap::indexmap! {
|
||||
"path".into() => root_mailbox.display().to_string(),
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
)]
|
||||
.into_iter()
|
||||
.collect()
|
||||
} else {
|
||||
indexmap::indexmap! {}
|
||||
};
|
||||
let extra = if with_root_mailbox {
|
||||
indexmap::indexmap! {
|
||||
"root_mailbox".into() => root_mailbox.display().to_string(),
|
||||
}
|
||||
} else {
|
||||
indexmap::indexmap! {}
|
||||
};
|
||||
|
||||
let account_conf = melib::AccountSettings {
|
||||
name: acc_name.to_string(),
|
||||
root_mailbox: root_mailbox.display().to_string(),
|
||||
format: "maildir".to_string(),
|
||||
identity: "user@localhost".to_string(),
|
||||
extra_identities: vec![],
|
||||
read_only: false,
|
||||
display_name: None,
|
||||
order: Default::default(),
|
||||
subscribed_mailboxes,
|
||||
mailboxes,
|
||||
manual_refresh: true,
|
||||
extra,
|
||||
};
|
||||
|
||||
let maildir = MaildirType::new(&account_conf, Default::default(), event_consumer)?;
|
||||
Ok((root_mailbox, account_conf.into(), maildir))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_accounts_mailbox_by_path_error_msg() {
|
||||
const ACCOUNT_NAME: &str = "test";
|
||||
|
||||
let eprintln_ok = eprintln_ok_fn();
|
||||
let mut eprint_step_closure = eprint_step_fn();
|
||||
macro_rules! eprint_step {
|
||||
($($arg:tt)+) => {{
|
||||
eprint_step_closure(format_args!($($arg)+));
|
||||
}};
|
||||
}
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
{
|
||||
eprint_step!(
|
||||
"Create maildir backend with a root mailbox, \"inbox\" which will be a valid maildir \
|
||||
folder because it will contain cur, new, tmp subdirectories..."
|
||||
);
|
||||
let mut ctx = crate::Context::new_mock(&temp_dir);
|
||||
let backend_event_queue = Arc::new(std::sync::Mutex::new(
|
||||
std::collections::VecDeque::with_capacity(16),
|
||||
));
|
||||
|
||||
let backend_event_consumer = {
|
||||
let backend_event_queue = Arc::clone(&backend_event_queue);
|
||||
|
||||
BackendEventConsumer::new(Arc::new(move |ah, be| {
|
||||
backend_event_queue.lock().unwrap().push_back((ah, be));
|
||||
}))
|
||||
};
|
||||
|
||||
let (root_mailbox, settings, maildir) =
|
||||
new_maildir_backend(&temp_dir, ACCOUNT_NAME, backend_event_consumer, true).unwrap();
|
||||
eprintln_ok();
|
||||
let name = maildir.account_name.to_string();
|
||||
let account_hash = maildir.account_hash;
|
||||
let backend = maildir as Box<dyn MailBackend>;
|
||||
let ref_mailboxes = smol::block_on(backend.mailboxes().unwrap()).unwrap();
|
||||
let contacts = melib::contacts::Contacts::new(name.to_string());
|
||||
|
||||
let mut account = super::Account {
|
||||
hash: account_hash,
|
||||
name: name.into(),
|
||||
is_online: super::IsOnline::True,
|
||||
mailbox_entries: Default::default(),
|
||||
mailboxes_order: Default::default(),
|
||||
tree: Default::default(),
|
||||
contacts,
|
||||
collection: backend.collection(),
|
||||
settings,
|
||||
main_loop_handler: ctx.main_loop_handler.clone(),
|
||||
active_jobs: HashMap::default(),
|
||||
active_job_instants: std::collections::BTreeMap::default(),
|
||||
event_queue: IndexMap::default(),
|
||||
backend_capabilities: backend.capabilities(),
|
||||
backend: Arc::new(std::sync::RwLock::new(backend)),
|
||||
};
|
||||
account.init(ref_mailboxes).unwrap();
|
||||
while let Ok(thread_event) = ctx.receiver.try_recv() {
|
||||
if let crate::ThreadEvent::JobFinished(job_id) = thread_event {
|
||||
if !account.process_event(&job_id) {
|
||||
assert!(
|
||||
ctx.accounts[0].process_event(&job_id),
|
||||
"unclaimed job id: {:?}",
|
||||
job_id
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
eprint_step!("Assert that mailbox_by_path(\"inbox\") returns the root mailbox...");
|
||||
account.mailbox_by_path("inbox").unwrap();
|
||||
eprintln_ok();
|
||||
eprint_step!(
|
||||
"Assert that mailbox_by_path(\"box\") returns an error mentioning the root mailbox..."
|
||||
);
|
||||
assert_eq!(
|
||||
account.mailbox_by_path("box").unwrap_err().to_string(),
|
||||
Error {
|
||||
summary: "Mailbox with that path not found.".into(),
|
||||
details: Some(
|
||||
"Some matching paths that were found: [\"inbox\"]. You can inspect the list \
|
||||
of mailbox paths of an account with the manage-mailboxes command."
|
||||
.into()
|
||||
),
|
||||
source: None,
|
||||
inner: None,
|
||||
related_path: None,
|
||||
kind: ErrorKind::NotFound
|
||||
}
|
||||
.to_string()
|
||||
);
|
||||
eprintln_ok();
|
||||
|
||||
macro_rules! wait_for_job {
|
||||
($job_id:expr) => {{
|
||||
let wait_for = $job_id;
|
||||
while let Ok(thread_event) = ctx.receiver.recv() {
|
||||
if let crate::ThreadEvent::JobFinished(job_id) = thread_event {
|
||||
if !account.process_event(&job_id) {
|
||||
assert!(
|
||||
ctx.accounts[0].process_event(&job_id),
|
||||
"unclaimed job id: {:?}",
|
||||
job_id
|
||||
);
|
||||
} else if job_id == wait_for {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}};
|
||||
}
|
||||
eprint_step!(
|
||||
"Create new mailboxes: \"Sent\", \"Trash\", \"Drafts\", \"Archive\", \"Outbox\", \
|
||||
\"Archive/Archive (old)\"..."
|
||||
);
|
||||
wait_for_job!(account
|
||||
.mailbox_operation(MailboxOperation::Create("Sent".to_string()))
|
||||
.unwrap());
|
||||
wait_for_job!(account
|
||||
.mailbox_operation(MailboxOperation::Create("Trash".to_string()))
|
||||
.unwrap());
|
||||
wait_for_job!(account
|
||||
.mailbox_operation(MailboxOperation::Create("Drafts".to_string()))
|
||||
.unwrap());
|
||||
wait_for_job!(account
|
||||
.mailbox_operation(MailboxOperation::Create("Archive".to_string()))
|
||||
.unwrap());
|
||||
wait_for_job!(account
|
||||
.mailbox_operation(MailboxOperation::Create("Outbox".to_string()))
|
||||
.unwrap());
|
||||
wait_for_job!(account
|
||||
.mailbox_operation(MailboxOperation::Create(
|
||||
"inbox/Archive/Archive (old)".to_string(),
|
||||
))
|
||||
.unwrap());
|
||||
eprintln_ok();
|
||||
eprint_step!(
|
||||
"Assert that mailbox_by_path(\"rchive\") returns an error and mentions matching \
|
||||
archives with mailboxes with the least depth in the tree hierarchy of mailboxes \
|
||||
mentioned first..."
|
||||
);
|
||||
assert_eq!(
|
||||
account.mailbox_by_path("rchive").unwrap_err().to_string(),
|
||||
Error {
|
||||
summary: "Mailbox with that path not found.".into(),
|
||||
details: Some(
|
||||
"Some matching paths that were found: [\"inbox/Archive\", \
|
||||
\"inbox/Archive/Archive (old)\"]. You can inspect the list of mailbox paths \
|
||||
of an account with the manage-mailboxes command."
|
||||
.into()
|
||||
),
|
||||
source: None,
|
||||
inner: None,
|
||||
related_path: None,
|
||||
kind: ErrorKind::NotFound
|
||||
}
|
||||
.to_string()
|
||||
);
|
||||
eprintln_ok();
|
||||
eprint_step!("Create \"inbox/Archive/Archive{{1,2,3,4,5,6,7,8,9,10}}\" mailboxes...");
|
||||
for i in 1..=10 {
|
||||
wait_for_job!(account
|
||||
.mailbox_operation(MailboxOperation::Create(format!(
|
||||
"inbox/Archive/Archive{i}"
|
||||
)))
|
||||
.unwrap());
|
||||
}
|
||||
eprintln_ok();
|
||||
eprint_step!(
|
||||
"Assert that mailbox_by_path(\"inbox/Archive/Archive{{n}}\") works, i.e. we have to \
|
||||
specify the root prefix \"inbox\"..."
|
||||
);
|
||||
for i in 1..=10 {
|
||||
account
|
||||
.mailbox_by_path(&format!("inbox/Archive/Archive{i}"))
|
||||
.unwrap();
|
||||
account
|
||||
.mailbox_by_path(&format!("Archive/Archive{i}"))
|
||||
.unwrap_err();
|
||||
}
|
||||
eprintln_ok();
|
||||
eprint_step!(
|
||||
"Assert that mailbox_by_path(\"rchive\") returns and error and truncates the matching \
|
||||
mailbox paths to 5 maximum..."
|
||||
);
|
||||
assert_eq!(
|
||||
account.mailbox_by_path("rchive").unwrap_err().to_string(),
|
||||
Error {
|
||||
summary: "Mailbox with that path not found.".into(),
|
||||
details: Some(
|
||||
"Some matching paths that were found: [\"inbox/Archive\", \
|
||||
\"inbox/Archive/Archive1\", \"inbox/Archive/Archive2\", \
|
||||
\"inbox/Archive/Archive3\", \"inbox/Archive/Archive4\"] and 7 others. You \
|
||||
can inspect the list of mailbox paths of an account with the \
|
||||
manage-mailboxes command."
|
||||
.into()
|
||||
),
|
||||
source: None,
|
||||
inner: None,
|
||||
related_path: None,
|
||||
kind: ErrorKind::NotFound
|
||||
}
|
||||
.to_string()
|
||||
);
|
||||
eprintln_ok();
|
||||
eprint_step!(
|
||||
"Assert that mailbox_by_path(\"inbox/Archive\") returns a valid result (since the \
|
||||
root mailbox is a valid maildir folder)..."
|
||||
);
|
||||
account.mailbox_by_path("inbox/Archive").unwrap();
|
||||
eprintln_ok();
|
||||
|
||||
eprint_step!("Cleanup maildir account with valid root mailbox...");
|
||||
std::fs::remove_dir_all(root_mailbox).unwrap();
|
||||
eprintln_ok();
|
||||
}
|
||||
|
||||
{
|
||||
eprint_step!(
|
||||
"Create maildir backend with a root mailbox, \"inbox\" which will NOT be a valid \
|
||||
maildir folder because it will NOT contain cur, new, tmp subdirectories..."
|
||||
);
|
||||
let mut ctx = crate::Context::new_mock(&temp_dir);
|
||||
let backend_event_queue = Arc::new(std::sync::Mutex::new(
|
||||
std::collections::VecDeque::with_capacity(16),
|
||||
));
|
||||
|
||||
let backend_event_consumer = {
|
||||
let backend_event_queue = Arc::clone(&backend_event_queue);
|
||||
|
||||
BackendEventConsumer::new(Arc::new(move |ah, be| {
|
||||
backend_event_queue.lock().unwrap().push_back((ah, be));
|
||||
}))
|
||||
};
|
||||
|
||||
let (_root_mailbox, settings, maildir) =
|
||||
new_maildir_backend(&temp_dir, ACCOUNT_NAME, backend_event_consumer, false).unwrap();
|
||||
eprintln_ok();
|
||||
let name = maildir.account_name.to_string();
|
||||
let account_hash = maildir.account_hash;
|
||||
let backend = maildir as Box<dyn MailBackend>;
|
||||
let ref_mailboxes = smol::block_on(backend.mailboxes().unwrap()).unwrap();
|
||||
eprint_step!("Assert that created account has no mailboxes at all...");
|
||||
assert!(
|
||||
ref_mailboxes.is_empty(),
|
||||
"ref_mailboxes were not empty: {:?}",
|
||||
ref_mailboxes
|
||||
);
|
||||
eprintln_ok();
|
||||
let contacts = melib::contacts::Contacts::new(name.to_string());
|
||||
|
||||
let mut account = super::Account {
|
||||
hash: account_hash,
|
||||
name: name.into(),
|
||||
is_online: super::IsOnline::True,
|
||||
mailbox_entries: Default::default(),
|
||||
mailboxes_order: Default::default(),
|
||||
tree: Default::default(),
|
||||
contacts,
|
||||
collection: backend.collection(),
|
||||
settings,
|
||||
main_loop_handler: ctx.main_loop_handler.clone(),
|
||||
active_jobs: HashMap::default(),
|
||||
active_job_instants: std::collections::BTreeMap::default(),
|
||||
event_queue: IndexMap::default(),
|
||||
backend_capabilities: backend.capabilities(),
|
||||
backend: Arc::new(std::sync::RwLock::new(backend)),
|
||||
};
|
||||
account.init(ref_mailboxes).unwrap();
|
||||
while let Ok(thread_event) = ctx.receiver.try_recv() {
|
||||
if let crate::ThreadEvent::JobFinished(job_id) = thread_event {
|
||||
if !account.process_event(&job_id) {
|
||||
assert!(
|
||||
ctx.accounts[0].process_event(&job_id),
|
||||
"unclaimed job id: {:?}",
|
||||
job_id
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
eprint_step!(
|
||||
"Assert that mailbox_by_path(\"inbox\") does not return a valid result (there are no \
|
||||
mailboxes)..."
|
||||
);
|
||||
assert_eq!(
|
||||
account.mailbox_by_path("inbox").unwrap_err().to_string(),
|
||||
Error {
|
||||
summary: "Mailbox with that path not found.".into(),
|
||||
details: Some(
|
||||
"You can inspect the list of mailbox paths of an account with the \
|
||||
manage-mailboxes command."
|
||||
.into()
|
||||
),
|
||||
source: None,
|
||||
inner: None,
|
||||
related_path: None,
|
||||
kind: ErrorKind::NotFound
|
||||
}
|
||||
.to_string()
|
||||
);
|
||||
eprintln_ok();
|
||||
eprint_step!(
|
||||
"Create multiple maildir folders \"inbox/Archive{{1,2,3,4,5,6,7,8,9,10}}\"..."
|
||||
);
|
||||
macro_rules! wait_for_job {
|
||||
($job_id:expr) => {{
|
||||
let wait_for = $job_id;
|
||||
while let Ok(thread_event) = ctx.receiver.recv() {
|
||||
if let crate::ThreadEvent::JobFinished(job_id) = thread_event {
|
||||
if !account.process_event(&job_id) {
|
||||
assert!(
|
||||
ctx.accounts[0].process_event(&job_id),
|
||||
"unclaimed job id: {:?}",
|
||||
job_id
|
||||
);
|
||||
} else if job_id == wait_for {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}};
|
||||
}
|
||||
for i in 1..=10 {
|
||||
wait_for_job!(account
|
||||
.mailbox_operation(MailboxOperation::Create(format!("inbox/Archive{i}")))
|
||||
.unwrap());
|
||||
}
|
||||
eprintln_ok();
|
||||
eprint_step!(
|
||||
"Assert that mailbox_by_path(\"Archive{{n}}\") works, and that we don't have to \
|
||||
specify the root prefix \"inbox\"..."
|
||||
);
|
||||
for i in 1..=10 {
|
||||
account.mailbox_by_path(&format!("Archive{i}")).unwrap();
|
||||
}
|
||||
eprintln_ok();
|
||||
eprint_step!(
|
||||
"Assert that mailbox_by_path(\"rchive\") returns an error message with matches..."
|
||||
);
|
||||
assert_eq!(
|
||||
account.mailbox_by_path("rchive").unwrap_err().to_string(),
|
||||
Error {
|
||||
summary: "Mailbox with that path not found.".into(),
|
||||
details: Some(
|
||||
"Some matching paths that were found: [\"Archive1\", \"Archive2\", \
|
||||
\"Archive3\", \"Archive4\", \"Archive5\"] and 5 others. You can inspect the \
|
||||
list of mailbox paths of an account with the manage-mailboxes command."
|
||||
.into()
|
||||
),
|
||||
source: None,
|
||||
inner: None,
|
||||
related_path: None,
|
||||
kind: ErrorKind::NotFound
|
||||
}
|
||||
.to_string()
|
||||
);
|
||||
eprintln_ok();
|
||||
eprint_step!(
|
||||
"Assert that mailbox_by_path(\"inbox/Archive{{n}}\") does not return a valid result..."
|
||||
);
|
||||
assert_eq!(
|
||||
account
|
||||
.mailbox_by_path("inbox/Archive1")
|
||||
.unwrap_err()
|
||||
.to_string(),
|
||||
Error {
|
||||
summary: "Mailbox with that path not found.".into(),
|
||||
details: Some(
|
||||
"You can inspect the list of mailbox paths of an account with the \
|
||||
manage-mailboxes command."
|
||||
.into()
|
||||
),
|
||||
source: None,
|
||||
inner: None,
|
||||
related_path: None,
|
||||
kind: ErrorKind::NotFound
|
||||
}
|
||||
.to_string()
|
||||
);
|
||||
eprintln_ok();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
|
||||
//! User actions that need to be handled by the UI
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
use melib::{email::mailto::Mailto, Flag, SortField, SortOrder};
|
||||
|
||||
|
@ -102,6 +102,7 @@ pub enum ViewAction {
|
|||
Pipe(String, Vec<String>),
|
||||
Filter(Option<String>),
|
||||
SaveAttachment(usize, String),
|
||||
PipeAttachment(usize, String, Vec<String>),
|
||||
ExportMail(String),
|
||||
AddAddressesToContacts,
|
||||
}
|
||||
|
@ -168,7 +169,7 @@ type MailboxPath = String;
|
|||
type NewMailboxPath = String;
|
||||
|
||||
macro_rules! impl_into_action {
|
||||
($({$t:ty => $var:tt}),*) => {
|
||||
($({$t:ty => $var:tt}),*$(,)?) => {
|
||||
$(
|
||||
impl From<$t> for Action {
|
||||
fn from(v: $t) -> Self {
|
||||
|
@ -179,11 +180,11 @@ macro_rules! impl_into_action {
|
|||
};
|
||||
}
|
||||
macro_rules! impl_tuple_into_action {
|
||||
($({$a:ty,$b:ty => $var:tt}),*) => {
|
||||
($({$a:ty,$b:ty => $var:tt}),*$(,)?) => {
|
||||
$(
|
||||
impl From<($a,$b)> for Action {
|
||||
fn from((a, b): ($a,$b)) -> Self {
|
||||
Self::$var(a, b)
|
||||
Self::$var(a.to_string(), b)
|
||||
}
|
||||
}
|
||||
)*
|
||||
|
@ -199,5 +200,7 @@ impl_into_action!(
|
|||
);
|
||||
impl_tuple_into_action!(
|
||||
{ AccountName, MailboxOperation => Mailbox },
|
||||
{ AccountName, AccountAction => AccountAction }
|
||||
{ AccountName, AccountAction => AccountAction },
|
||||
{ Arc<str>, MailboxOperation => Mailbox },
|
||||
{ Arc<str>, AccountAction => AccountAction },
|
||||
);
|
||||
|
|
|
@ -139,6 +139,7 @@ pub fn view(input: &[u8]) -> IResult<&[u8], Result<Action, CommandError>> {
|
|||
filter,
|
||||
pipe,
|
||||
save_attachment,
|
||||
pipe_attachment,
|
||||
export_mail,
|
||||
add_addresses_to_contacts,
|
||||
))(input)
|
||||
|
@ -895,6 +896,35 @@ pub fn save_attachment(input: &[u8]) -> IResult<&[u8], Result<Action, CommandErr
|
|||
let (input, _) = eof(input)?;
|
||||
Ok((input, Ok(View(SaveAttachment(idx, path.to_string())))))
|
||||
}
|
||||
pub fn pipe_attachment<'a>(input: &'a [u8]) -> IResult<&'a [u8], Result<Action, CommandError>> {
|
||||
let mut check = arg_init! { min_arg:2, max_arg:{u8::MAX}, pipe_attachment};
|
||||
let (input, _) = tag("pipe-attachment")(input.trim())?;
|
||||
arg_chk!(start check, input);
|
||||
let (input, _) = is_a(" ")(input)?;
|
||||
arg_chk!(inc check, input);
|
||||
let (input, idx) = map_res(quoted_argument, usize::from_str)(input)?;
|
||||
let (input, _) = is_a(" ")(input)?;
|
||||
arg_chk!(inc check, input);
|
||||
let (input, bin) = quoted_argument(input)?;
|
||||
arg_chk!(inc check, input);
|
||||
let (input, args) = alt((
|
||||
|input: &'a [u8]| -> IResult<&'a [u8], Vec<String>> {
|
||||
let (input, _) = is_a(" ")(input)?;
|
||||
let (input, args) = separated_list1(is_a(" "), quoted_argument)(input)?;
|
||||
let (input, _) = eof(input)?;
|
||||
Ok((
|
||||
input,
|
||||
args.into_iter().map(String::from).collect::<Vec<String>>(),
|
||||
))
|
||||
},
|
||||
|input: &'a [u8]| -> IResult<&'a [u8], Vec<String>> {
|
||||
let (input, _) = eof(input)?;
|
||||
Ok((input, Vec::with_capacity(0)))
|
||||
},
|
||||
))(input)?;
|
||||
arg_chk!(finish check, input);
|
||||
Ok((input, Ok(View(PipeAttachment(idx, bin.to_string(), args)))))
|
||||
}
|
||||
pub fn export_mail(input: &[u8]) -> IResult<&[u8], Result<Action, CommandError>> {
|
||||
let mut check = arg_init! { min_arg:1, max_arg: 1, export_mail};
|
||||
let (input, _) = tag("export-mail")(input.trim())?;
|
||||
|
@ -1050,36 +1080,38 @@ pub fn manage_jobs(input: &[u8]) -> IResult<&[u8], Result<Action, CommandError>>
|
|||
Ok((input, Ok(Tab(ManageJobs))))
|
||||
}
|
||||
|
||||
#[cfg(feature = "cli-docs")]
|
||||
pub fn view_manpage(input: &[u8]) -> IResult<&[u8], Result<Action, CommandError>> {
|
||||
let mut check = arg_init! { min_arg:1, max_arg: 1, view_manpage };
|
||||
let (input, _) = tag("man")(input.trim())?;
|
||||
arg_chk!(start check, input);
|
||||
let (input, _) = is_a(" ")(input)?;
|
||||
arg_chk!(inc check, input);
|
||||
#[allow(unused_variables)]
|
||||
let (input, manpage) = map_res(not_line_ending, std::str::from_utf8)(input.trim())?;
|
||||
let (input, _) = eof(input)?;
|
||||
arg_chk!(finish check, input);
|
||||
match crate::manpages::parse_manpage(manpage) {
|
||||
Ok(m) => Ok((input, Ok(Tab(Man(m))))),
|
||||
Err(err) => Ok((
|
||||
input,
|
||||
Err(CommandError::BadValue {
|
||||
inner: err.to_string().into(),
|
||||
suggestions: Some(crate::manpages::POSSIBLE_VALUES),
|
||||
}),
|
||||
)),
|
||||
#[cfg(feature = "cli-docs")]
|
||||
{
|
||||
match crate::manpages::parse_manpage(manpage) {
|
||||
Ok(m) => Ok((input, Ok(Tab(Man(m))))),
|
||||
Err(err) => Ok((
|
||||
input,
|
||||
Err(CommandError::BadValue {
|
||||
inner: err.to_string().into(),
|
||||
suggestions: Some(crate::manpages::POSSIBLE_VALUES),
|
||||
}),
|
||||
)),
|
||||
}
|
||||
}
|
||||
#[cfg(not(feature = "cli-docs"))]
|
||||
{
|
||||
Ok((
|
||||
input,
|
||||
Err(CommandError::Other {
|
||||
inner: "this meli binary has not been compiled with the cli-docs feature".into(),
|
||||
}),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "cli-docs"))]
|
||||
pub fn view_manpage(input: &[u8]) -> IResult<&[u8], Result<Action, CommandError>> {
|
||||
Ok((
|
||||
input,
|
||||
Err(CommandError::Other {
|
||||
inner: "this meli binary has not been compiled with the cli-docs feature".into(),
|
||||
}),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn quit(input: &[u8]) -> IResult<&[u8], Result<Action, CommandError>> {
|
||||
|
|
|
@ -71,6 +71,13 @@ pub use themes::*;
|
|||
|
||||
pub use self::{composing::*, pgp::*, shortcuts::*, tags::*};
|
||||
|
||||
/// Utility macro to access an [`AccountConf`] setting field from
|
||||
/// [`Context`](crate::Context) indexed by `$account_hash`
|
||||
///
|
||||
/// The value returned is the optionally overriden one in the
|
||||
/// [`AccountConf::conf_override`] field, otherwise the global one.
|
||||
///
|
||||
/// See also the [`mailbox_settings`](crate::mailbox_settings) macro.
|
||||
#[macro_export]
|
||||
macro_rules! account_settings {
|
||||
($context:ident[$account_hash:expr].$setting:ident.$field:ident) => {{
|
||||
|
@ -87,6 +94,14 @@ macro_rules! account_settings {
|
|||
}};
|
||||
}
|
||||
|
||||
/// Utility macro to access an [`AccountConf`] setting field from
|
||||
/// [`Context`](crate::Context) indexed by `$account_hash` and a mailbox.
|
||||
///
|
||||
/// The value returned is the optionally overriden one in the
|
||||
/// [`FileMailboxConf::conf_override`] field, otherwise the
|
||||
/// [`AccountConf::conf_override`] field, otherwise the global one.
|
||||
///
|
||||
/// See also the [`account_settings`] macro.
|
||||
#[macro_export]
|
||||
macro_rules! mailbox_settings {
|
||||
($context:ident[$account_hash:expr][$mailbox_path:expr].$setting:ident.$field:ident) => {{
|
||||
|
@ -256,6 +271,14 @@ impl AccountConf {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<melib::AccountSettings> for AccountConf {
|
||||
fn from(account: melib::AccountSettings) -> Self {
|
||||
Self {
|
||||
account,
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
impl From<FileAccount> for AccountConf {
|
||||
fn from(x: FileAccount) -> Self {
|
||||
let format = x.format.to_lowercase();
|
||||
|
@ -269,7 +292,7 @@ impl From<FileAccount> for AccountConf {
|
|||
.map(|(k, v)| (k.clone(), v.mailbox_conf.clone()))
|
||||
.collect();
|
||||
|
||||
let acc = melib::AccountSettings {
|
||||
let account = melib::AccountSettings {
|
||||
name: String::new(),
|
||||
root_mailbox,
|
||||
format,
|
||||
|
@ -286,13 +309,13 @@ impl From<FileAccount> for AccountConf {
|
|||
|
||||
let mailbox_confs = x.mailboxes.clone();
|
||||
Self {
|
||||
account: acc,
|
||||
send_mail: x.send_mail.clone(),
|
||||
default_mailbox: None,
|
||||
sent_mailbox: None,
|
||||
conf_override: x.conf_override.clone(),
|
||||
conf: x,
|
||||
mailbox_confs,
|
||||
..Self::from(account)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -330,7 +353,11 @@ impl FileSettings {
|
|||
path_string
|
||||
));
|
||||
#[cfg(not(test))]
|
||||
if ask.run() {
|
||||
let mut stdout = std::io::stdout();
|
||||
#[cfg(not(test))]
|
||||
let stdin = std::io::stdin();
|
||||
#[cfg(not(test))]
|
||||
if ask.run(&mut stdout, &mut stdin.lock()) {
|
||||
create_config_file(&config_path)?;
|
||||
return Err(
|
||||
Error::new("Edit the sample configuration and relaunch meli.")
|
||||
|
@ -345,7 +372,9 @@ impl FileSettings {
|
|||
);
|
||||
}
|
||||
|
||||
crate::version_migrations::version_setup(&config_path)?;
|
||||
let mut stdout = std::io::stdout();
|
||||
let stdin = std::io::stdin();
|
||||
crate::version_migrations::version_setup(&config_path, &mut stdout, &mut stdin.lock())?;
|
||||
Self::validate(config_path, false)
|
||||
}
|
||||
|
||||
|
|
|
@ -185,7 +185,7 @@ pub struct FindIter<'r, 's> {
|
|||
char_offset: usize,
|
||||
}
|
||||
|
||||
impl<'r, 's> Iterator for FindIter<'r, 's> {
|
||||
impl Iterator for FindIter<'_, '_> {
|
||||
type Item = (usize, usize);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
|
|
@ -79,7 +79,7 @@ impl TerminalSettings {
|
|||
// Don't use color if
|
||||
// - Either NO_COLOR is set and user hasn't explicitly set use_colors or
|
||||
// - User has explicitly set use_colors to false
|
||||
!((std::env::var("NO_COLOR").is_ok()
|
||||
!((std::env::var_os("NO_COLOR").is_some()
|
||||
&& (self.use_color.is_false() || self.use_color.is_internal()))
|
||||
|| (self.use_color.is_false() && !self.use_color.is_internal()))
|
||||
}
|
||||
|
@ -120,19 +120,96 @@ impl DotAddressable for TerminalSettings {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
#[serde(untagged)]
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum ProgressSpinnerSequence {
|
||||
Integer(usize),
|
||||
Custom {
|
||||
frames: Vec<String>,
|
||||
#[serde(default = "interval_ms_val")]
|
||||
interval_ms: u64,
|
||||
},
|
||||
}
|
||||
|
||||
impl ProgressSpinnerSequence {
|
||||
pub const fn interval_ms(&self) -> u64 {
|
||||
match self {
|
||||
Self::Integer(_) => interval_ms_val(),
|
||||
Self::Custom {
|
||||
frames: _,
|
||||
interval_ms,
|
||||
} => *interval_ms,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
const fn interval_ms_val() -> u64 {
|
||||
crate::utilities::ProgressSpinner::INTERVAL_MS
|
||||
}
|
||||
|
||||
impl DotAddressable for ProgressSpinnerSequence {}
|
||||
|
||||
impl<'de> Deserialize<'de> for ProgressSpinnerSequence {
|
||||
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
#[serde(untagged)]
|
||||
enum Inner {
|
||||
Integer(usize),
|
||||
Frames(Vec<String>),
|
||||
Custom {
|
||||
frames: Vec<String>,
|
||||
#[serde(default = "interval_ms_val")]
|
||||
interval_ms: u64,
|
||||
},
|
||||
}
|
||||
let s = <Inner>::deserialize(deserializer)?;
|
||||
match s {
|
||||
Inner::Integer(i) => Ok(Self::Integer(i)),
|
||||
Inner::Frames(frames) => Ok(Self::Custom {
|
||||
frames,
|
||||
interval_ms: interval_ms_val(),
|
||||
}),
|
||||
Inner::Custom {
|
||||
frames,
|
||||
interval_ms,
|
||||
} => Ok(Self::Custom {
|
||||
frames,
|
||||
interval_ms,
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for ProgressSpinnerSequence {
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match self {
|
||||
Self::Integer(i) => serializer.serialize_i64(*i as i64),
|
||||
Self::Custom {
|
||||
frames,
|
||||
interval_ms,
|
||||
} => {
|
||||
if *interval_ms == interval_ms_val() {
|
||||
use serde::ser::SerializeSeq;
|
||||
let mut seq = serializer.serialize_seq(Some(frames.len()))?;
|
||||
for element in frames {
|
||||
seq.serialize_element(element)?;
|
||||
}
|
||||
seq.end()
|
||||
} else {
|
||||
use serde::ser::SerializeMap;
|
||||
|
||||
let mut map = serializer.serialize_map(Some(2))?;
|
||||
map.serialize_entry("frames", frames)?;
|
||||
map.serialize_entry("interval_ms", interval_ms)?;
|
||||
map.end()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -109,7 +109,7 @@ server_password_command = "false"
|
|||
"#;
|
||||
|
||||
#[test]
|
||||
fn test_config_parse() {
|
||||
fn test_conf_config_parse() {
|
||||
let tempdir = tempfile::tempdir().unwrap();
|
||||
let new_file = ConfigFile::new(TEST_CONFIG, &tempdir).unwrap();
|
||||
let err = FileSettings::validate(new_file.path.clone(), true).unwrap_err();
|
||||
|
@ -135,31 +135,36 @@ fn test_config_parse() {
|
|||
|
||||
/* Test sample config */
|
||||
|
||||
let example_config = FileSettings::EXAMPLE_CONFIG.replace("\n#", "\n");
|
||||
let re = regex::Regex::new(r#"root_mailbox\s*=\s*"[^"]*""#).unwrap();
|
||||
let example_config = re.replace_all(
|
||||
&example_config,
|
||||
&format!(r#"root_mailbox = "{}""#, tempdir.path().to_str().unwrap()),
|
||||
);
|
||||
// Sample config contains `crate::conf::composing::SendMail::Smtp` variant which
|
||||
// only exists if meli is build with `smtp` feature.
|
||||
if cfg!(feature = "smtp") {
|
||||
let example_config = FileSettings::EXAMPLE_CONFIG.replace("\n#", "\n");
|
||||
let re = regex::Regex::new(r#"root_mailbox\s*=\s*"[^"]*""#).unwrap();
|
||||
let example_config = re.replace_all(
|
||||
&example_config,
|
||||
&format!(r#"root_mailbox = "{}""#, tempdir.path().to_str().unwrap()),
|
||||
);
|
||||
|
||||
let new_file = ConfigFile::new(&example_config, &tempdir).unwrap();
|
||||
let config = FileSettings::validate(new_file.path.clone(), true)
|
||||
.expect("Could not parse example config!");
|
||||
for (accname, acc) in config.accounts.iter() {
|
||||
if !acc.extra.is_empty() {
|
||||
panic!(
|
||||
"In example config, account `{}` has unrecognised configuration entries: {:?}",
|
||||
accname, acc.extra
|
||||
);
|
||||
let new_file = ConfigFile::new(&example_config, &tempdir).unwrap();
|
||||
let config = FileSettings::validate(new_file.path.clone(), true)
|
||||
.expect("Could not parse example config!");
|
||||
for (accname, acc) in config.accounts.iter() {
|
||||
if !acc.extra.is_empty() {
|
||||
panic!(
|
||||
"In example config, account `{}` has unrecognised configuration entries: {:?}",
|
||||
accname, acc.extra
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(err) = tempdir.close() {
|
||||
eprintln!("Could not cleanup tempdir: {}", err);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_theme_parsing() {
|
||||
fn test_conf_theme_parsing() {
|
||||
/* MUST SUCCEED: default themes should be valid */
|
||||
let def = Themes::default();
|
||||
def.validate().unwrap();
|
||||
|
@ -272,7 +277,7 @@ color_aliases= { "Jebediah" = "$JebediahJr", "JebediahJr" = "mail.listing.tag_de
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_theme_key_values() {
|
||||
fn test_conf_theme_key_values() {
|
||||
use std::{collections::VecDeque, fs::File, io::Read, path::PathBuf};
|
||||
let mut rust_files: VecDeque<PathBuf> = VecDeque::new();
|
||||
let mut dirs_queue: VecDeque<PathBuf> = VecDeque::new();
|
||||
|
@ -309,3 +314,71 @@ fn test_theme_key_values() {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conf_progress_spinner_sequence() {
|
||||
use crate::{conf::terminal::ProgressSpinnerSequence, utilities::ProgressSpinner};
|
||||
|
||||
let int_0 = ProgressSpinnerSequence::Integer(5);
|
||||
assert_eq!(
|
||||
toml::Value::try_from(&int_0).unwrap(),
|
||||
toml::Value::try_from(5).unwrap()
|
||||
);
|
||||
|
||||
let frames = ProgressSpinnerSequence::Custom {
|
||||
frames: vec![
|
||||
"⠁".to_string(),
|
||||
"⠂".to_string(),
|
||||
"⠄".to_string(),
|
||||
"⡀".to_string(),
|
||||
"⢀".to_string(),
|
||||
"⠠".to_string(),
|
||||
"⠐".to_string(),
|
||||
"⠈".to_string(),
|
||||
],
|
||||
interval_ms: ProgressSpinner::INTERVAL_MS,
|
||||
};
|
||||
assert_eq!(frames.interval_ms(), ProgressSpinner::INTERVAL_MS);
|
||||
assert_eq!(
|
||||
toml::Value::try_from(&frames).unwrap(),
|
||||
toml::Value::try_from(["⠁", "⠂", "⠄", "⡀", "⢀", "⠠", "⠐", "⠈"]).unwrap()
|
||||
);
|
||||
let frames = ProgressSpinnerSequence::Custom {
|
||||
frames: vec![
|
||||
"⠁".to_string(),
|
||||
"⠂".to_string(),
|
||||
"⠄".to_string(),
|
||||
"⡀".to_string(),
|
||||
"⢀".to_string(),
|
||||
"⠠".to_string(),
|
||||
"⠐".to_string(),
|
||||
"⠈".to_string(),
|
||||
],
|
||||
interval_ms: ProgressSpinner::INTERVAL_MS + 1,
|
||||
};
|
||||
assert_eq!(
|
||||
toml::Value::try_from(&frames).unwrap(),
|
||||
toml::Value::try_from(indexmap::indexmap! {
|
||||
"frames" => toml::Value::try_from(["⠁", "⠂", "⠄", "⡀", "⢀", "⠠", "⠐", "⠈"]).unwrap(),
|
||||
"interval_ms" => toml::Value::try_from(ProgressSpinner::INTERVAL_MS + 1).unwrap()
|
||||
})
|
||||
.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
toml::from_str::<ProgressSpinnerSequence>(
|
||||
r#"frames = ["⠁", "⠂", "⠄", "⡀", "⢀", "⠠", "⠐", "⠈"]
|
||||
interval_ms = 51"#
|
||||
)
|
||||
.unwrap(),
|
||||
frames
|
||||
);
|
||||
assert_eq!(
|
||||
toml::from_str::<indexmap::IndexMap<String, ProgressSpinnerSequence>>(
|
||||
r#"sequence = { frames = ["⠁", "⠂", "⠄", "⡀", "⢀", "⠠", "⠐", "⠈"], interval_ms = 51 }"#
|
||||
)
|
||||
.unwrap(),
|
||||
indexmap::indexmap! {
|
||||
"sequence".to_string() => frames,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
|
@ -209,7 +209,7 @@ impl Component for ContactManager {
|
|||
match self.form.buttons_result() {
|
||||
None => {}
|
||||
Some(FormButtonAction::Accept) => {
|
||||
let fields = std::mem::take(&mut self.form).collect().unwrap();
|
||||
let fields = std::mem::take(&mut self.form).collect();
|
||||
let fields: IndexMap<String, String> = fields
|
||||
.into_iter()
|
||||
.map(|(s, v)| {
|
||||
|
@ -237,7 +237,7 @@ impl Component for ContactManager {
|
|||
}
|
||||
Some(FormButtonAction::Other("Export")) => {
|
||||
let card = if self.has_changes {
|
||||
let fields = self.form.clone().collect().unwrap();
|
||||
let fields = self.form.clone().collect();
|
||||
let fields: IndexMap<String, String> = fields
|
||||
.into_iter()
|
||||
.map(|(s, v)| {
|
||||
|
|
|
@ -725,6 +725,12 @@ To: {}
|
|||
let attachments_no = self.draft.attachments().len();
|
||||
let theme_default = crate::conf::value(context, "theme_default");
|
||||
grid.clear_area(area, theme_default);
|
||||
let our_map: ShortcutMap =
|
||||
account_settings!(context[self.account_hash].shortcuts.composing).key_values();
|
||||
let mut shortcuts: ShortcutMaps = Default::default();
|
||||
shortcuts.insert(Shortcuts::COMPOSING, our_map);
|
||||
let toggle_shortcut = Key::Char('\n');
|
||||
let edit_shortcut = &shortcuts[Shortcuts::COMPOSING]["edit"];
|
||||
#[cfg(feature = "gpgme")]
|
||||
if self
|
||||
.gpg_state
|
||||
|
@ -736,18 +742,20 @@ To: {}
|
|||
.gpg_state
|
||||
.sign_keys
|
||||
.iter()
|
||||
.map(|k| k.fingerprint())
|
||||
.map(|k| k.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
|
||||
grid.write_string(
|
||||
&format!(
|
||||
"☑ sign with {}",
|
||||
"☑ sign with [toggle: {}, edit: {}] {}",
|
||||
toggle_shortcut,
|
||||
edit_shortcut,
|
||||
if self.gpg_state.sign_keys.is_empty() {
|
||||
"default key"
|
||||
} else {
|
||||
key_list.as_str()
|
||||
}
|
||||
},
|
||||
),
|
||||
theme_default.fg,
|
||||
if self.cursor == Cursor::Sign {
|
||||
|
@ -762,7 +770,10 @@ To: {}
|
|||
);
|
||||
} else {
|
||||
grid.write_string(
|
||||
"☐ don't sign",
|
||||
&format!(
|
||||
"☐ don't sign [toggle: {}, edit: {}]",
|
||||
toggle_shortcut, edit_shortcut,
|
||||
),
|
||||
theme_default.fg,
|
||||
if self.cursor == Cursor::Sign {
|
||||
crate::conf::value(context, "highlight").bg
|
||||
|
@ -786,18 +797,28 @@ To: {}
|
|||
.gpg_state
|
||||
.encrypt_keys
|
||||
.iter()
|
||||
.map(|k| k.fingerprint())
|
||||
.map(|k| k.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
|
||||
grid.write_string(
|
||||
&format!(
|
||||
"{}{}",
|
||||
"{}{}{}",
|
||||
if self.gpg_state.encrypt_keys.is_empty() {
|
||||
"☐ no keys to encrypt with!"
|
||||
"☐ no keys selected to encrypt with"
|
||||
} else {
|
||||
"☑ encrypt with "
|
||||
"☑ encrypt with"
|
||||
},
|
||||
&format!(
|
||||
" [toggle: {}, edit: {}]{}",
|
||||
toggle_shortcut,
|
||||
edit_shortcut,
|
||||
if self.gpg_state.encrypt_keys.is_empty() {
|
||||
""
|
||||
} else {
|
||||
" "
|
||||
}
|
||||
),
|
||||
if self.gpg_state.encrypt_keys.is_empty() {
|
||||
""
|
||||
} else {
|
||||
|
@ -817,7 +838,10 @@ To: {}
|
|||
);
|
||||
} else {
|
||||
grid.write_string(
|
||||
"☐ don't encrypt",
|
||||
&format!(
|
||||
"☐ don't encrypt [toggle: {}, edit: {}]",
|
||||
toggle_shortcut, edit_shortcut,
|
||||
),
|
||||
theme_default.fg,
|
||||
if self.cursor == Cursor::Encrypt {
|
||||
crate::conf::value(context, "highlight").bg
|
||||
|
@ -832,7 +856,7 @@ To: {}
|
|||
}
|
||||
if attachments_no == 0 {
|
||||
grid.write_string(
|
||||
"no attachments",
|
||||
&format!("no attachments [edit: {}]", edit_shortcut),
|
||||
theme_default.fg,
|
||||
if self.cursor == Cursor::Attachments {
|
||||
crate::conf::value(context, "highlight").bg
|
||||
|
@ -846,7 +870,7 @@ To: {}
|
|||
);
|
||||
} else {
|
||||
grid.write_string(
|
||||
&format!("{} attachments ", attachments_no),
|
||||
&format!("{} attachments [edit: {}]", attachments_no, edit_shortcut),
|
||||
theme_default.fg,
|
||||
if self.cursor == Cursor::Attachments {
|
||||
crate::conf::value(context, "highlight").bg
|
||||
|
@ -912,6 +936,36 @@ To: {}
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "gpgme")]
|
||||
fn create_key_selection_widget(
|
||||
&self,
|
||||
secret: bool,
|
||||
header: &HeaderName,
|
||||
context: &Context,
|
||||
) -> Result<gpg::KeySelectionLoading> {
|
||||
let (_, mut list) = melib::email::parser::address::rfc2822address_list(
|
||||
self.form.values()[header.as_str()].as_str().as_bytes(),
|
||||
)
|
||||
.map_err(|_err| -> Error { format!("No valid address in `{header}:`").into() })?;
|
||||
if list.is_empty() {
|
||||
return Err(format!("No valid address in `{header}:`").into());
|
||||
}
|
||||
let first = list.remove(0);
|
||||
let patterns = (
|
||||
first.get_email(),
|
||||
list.into_iter()
|
||||
.map(|addr| addr.get_email())
|
||||
.collect::<Vec<String>>(),
|
||||
);
|
||||
gpg::KeySelectionLoading::new(
|
||||
secret,
|
||||
account_settings!(context[self.account_hash].pgp.allow_remote_lookup).is_true(),
|
||||
patterns,
|
||||
*account_settings!(context[self.account_hash].pgp.allow_remote_lookup),
|
||||
context,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Component for Composer {
|
||||
|
@ -978,12 +1032,27 @@ impl Component for Composer {
|
|||
|
||||
if self.dirty {
|
||||
grid.clear_area(area.nth_row(0), crate::conf::value(context, "highlight"));
|
||||
let our_map: ShortcutMap =
|
||||
account_settings!(context[self.account_hash].shortcuts.composing).key_values();
|
||||
let mut shortcuts: ShortcutMaps = Default::default();
|
||||
shortcuts.insert(Shortcuts::COMPOSING, our_map);
|
||||
let scroll_down_shortcut = &shortcuts[Shortcuts::COMPOSING]["scroll_down"];
|
||||
let scroll_up_shortcut = &shortcuts[Shortcuts::COMPOSING]["scroll_up"];
|
||||
let field_shortcut = Key::Char('\n');
|
||||
let edit_shortcut = &shortcuts[Shortcuts::COMPOSING]["edit"];
|
||||
grid.write_string(
|
||||
if self.reply_context.is_some() {
|
||||
"COMPOSING REPLY"
|
||||
} else {
|
||||
"COMPOSING MESSAGE"
|
||||
},
|
||||
&format!(
|
||||
"COMPOSING {} [scroll down: {}, scroll up: {}, edit fields: {}, edit body: {}]",
|
||||
if self.reply_context.is_some() {
|
||||
"REPLY"
|
||||
} else {
|
||||
"MESSAGE"
|
||||
},
|
||||
scroll_down_shortcut,
|
||||
scroll_up_shortcut,
|
||||
field_shortcut,
|
||||
edit_shortcut
|
||||
),
|
||||
crate::conf::value(context, "highlight").fg,
|
||||
crate::conf::value(context, "highlight").bg,
|
||||
crate::conf::value(context, "highlight").attrs,
|
||||
|
@ -1049,11 +1118,11 @@ impl Component for Composer {
|
|||
let stopped_message: String =
|
||||
format!("Process with PID {} has stopped.", guard.child_pid);
|
||||
let stopped_message_2: String = format!(
|
||||
"-press '{}' (edit shortcut) to re-activate.",
|
||||
"- re-activate '{}' (edit shortcut)",
|
||||
shortcuts[Shortcuts::COMPOSING]["edit"]
|
||||
);
|
||||
const STOPPED_MESSAGE_3: &str =
|
||||
"-press Ctrl-C to forcefully kill it and return to editor.";
|
||||
"- press Ctrl-C to forcefully kill it and return to editor";
|
||||
let max_len = std::cmp::max(
|
||||
stopped_message.len(),
|
||||
std::cmp::max(stopped_message_2.len(), STOPPED_MESSAGE_3.len()),
|
||||
|
@ -1473,13 +1542,13 @@ impl Component for Composer {
|
|||
ViewMode::SelectKey(is_encrypt, ref mut selector),
|
||||
UIEvent::FinishedUIDialog(id, result),
|
||||
) if *id == selector.id() => {
|
||||
if let Some(Some(key)) = result.downcast_mut::<Option<melib::gpgme::Key>>() {
|
||||
if let Some(Some(keys)) = result.downcast_mut::<Option<Vec<melib::gpgme::Key>>>() {
|
||||
if *is_encrypt {
|
||||
self.gpg_state.encrypt_keys.clear();
|
||||
self.gpg_state.encrypt_keys.push(key.clone());
|
||||
self.gpg_state.encrypt_keys = std::mem::take(keys);
|
||||
} else {
|
||||
self.gpg_state.sign_keys.clear();
|
||||
self.gpg_state.sign_keys.push(key.clone());
|
||||
self.gpg_state.sign_keys = std::mem::take(keys);
|
||||
}
|
||||
}
|
||||
self.mode = ViewMode::Edit;
|
||||
|
@ -1756,39 +1825,28 @@ impl Component for Composer {
|
|||
&& shortcut!(key == shortcuts[Shortcuts::COMPOSING]["edit"]) =>
|
||||
{
|
||||
#[cfg(feature = "gpgme")]
|
||||
match melib::email::parser::address::rfc2822address_list(
|
||||
self.form.values()["From"].as_str().as_bytes(),
|
||||
)
|
||||
.map_err(|_err| -> Error { "No valid sender address in `From:`".into() })
|
||||
.and_then(|(_, list)| {
|
||||
list.first()
|
||||
.cloned()
|
||||
.ok_or_else(|| "No valid sender address in `From:`".into())
|
||||
})
|
||||
.and_then(|addr| {
|
||||
gpg::KeySelection::new(
|
||||
false,
|
||||
account_settings!(context[self.account_hash].pgp.allow_remote_lookup)
|
||||
.is_true(),
|
||||
addr.get_email(),
|
||||
*account_settings!(context[self.account_hash].pgp.allow_remote_lookup),
|
||||
context,
|
||||
)
|
||||
}) {
|
||||
Ok(widget) => {
|
||||
self.gpg_state.sign_mail = Some(ActionFlag::from(true));
|
||||
self.mode = ViewMode::SelectKey(false, widget);
|
||||
}
|
||||
Err(err) => {
|
||||
context.replies.push_back(UIEvent::Notification {
|
||||
title: Some("Could not list keys.".into()),
|
||||
source: None,
|
||||
body: format!("libgpgme error: {err}").into(),
|
||||
kind: Some(NotificationType::Error(melib::error::ErrorKind::External)),
|
||||
});
|
||||
{
|
||||
match self
|
||||
.create_key_selection_widget(false, &HeaderName::FROM, context)
|
||||
.map(Into::into)
|
||||
{
|
||||
Ok(widget) => {
|
||||
self.gpg_state.sign_mail = Some(ActionFlag::from(true));
|
||||
self.mode = ViewMode::SelectKey(false, widget);
|
||||
}
|
||||
Err(err) => {
|
||||
context.replies.push_back(UIEvent::Notification {
|
||||
title: Some("Could not list keys.".into()),
|
||||
source: None,
|
||||
body: format!("libgpgme error: {err}").into(),
|
||||
kind: Some(NotificationType::Error(
|
||||
melib::error::ErrorKind::External,
|
||||
)),
|
||||
});
|
||||
}
|
||||
}
|
||||
self.set_dirty(true);
|
||||
}
|
||||
self.set_dirty(true);
|
||||
return true;
|
||||
}
|
||||
UIEvent::Input(ref key)
|
||||
|
@ -1797,39 +1855,63 @@ impl Component for Composer {
|
|||
&& shortcut!(key == shortcuts[Shortcuts::COMPOSING]["edit"]) =>
|
||||
{
|
||||
#[cfg(feature = "gpgme")]
|
||||
match melib::email::parser::address::rfc2822address_list(
|
||||
self.form.values()["To"].as_str().as_bytes(),
|
||||
)
|
||||
.map_err(|_err| -> Error { "No valid recipient addresses in `To:`".into() })
|
||||
.and_then(|(_, list)| {
|
||||
list.first()
|
||||
.cloned()
|
||||
.ok_or_else(|| "No valid recipient addresses in `To:`".into())
|
||||
})
|
||||
.and_then(|addr| {
|
||||
gpg::KeySelection::new(
|
||||
false,
|
||||
account_settings!(context[self.account_hash].pgp.allow_remote_lookup)
|
||||
.is_true(),
|
||||
addr.get_email(),
|
||||
*account_settings!(context[self.account_hash].pgp.allow_remote_lookup),
|
||||
context,
|
||||
)
|
||||
}) {
|
||||
Ok(widget) => {
|
||||
self.gpg_state.encrypt_mail = Some(ActionFlag::from(true));
|
||||
self.mode = ViewMode::SelectKey(true, widget);
|
||||
}
|
||||
Err(err) => {
|
||||
context.replies.push_back(UIEvent::Notification {
|
||||
title: Some("Could not list keys.".into()),
|
||||
source: None,
|
||||
body: format!("libgpgme error: {err}").into(),
|
||||
kind: Some(NotificationType::Error(melib::error::ErrorKind::External)),
|
||||
{
|
||||
let mut result =
|
||||
self.create_key_selection_widget(false, &HeaderName::TO, context);
|
||||
if !self.form.values()[HeaderName::CC.as_str()]
|
||||
.as_str()
|
||||
.is_empty()
|
||||
{
|
||||
result = result.and_then(|mut to_result| {
|
||||
let cc_result =
|
||||
self.create_key_selection_widget(false, &HeaderName::CC, context)?;
|
||||
to_result.merge(cc_result);
|
||||
Ok(to_result)
|
||||
});
|
||||
}
|
||||
if !self.form.values()[HeaderName::BCC.as_str()]
|
||||
.as_str()
|
||||
.is_empty()
|
||||
{
|
||||
result = result.and_then(|mut to_result| {
|
||||
let bcc_result =
|
||||
self.create_key_selection_widget(false, &HeaderName::BCC, context)?;
|
||||
to_result.merge(bcc_result);
|
||||
Ok(to_result)
|
||||
});
|
||||
}
|
||||
if !self.form.values()[HeaderName::FROM.as_str()]
|
||||
.as_str()
|
||||
.is_empty()
|
||||
{
|
||||
result = result.and_then(|mut to_result| {
|
||||
let from_result = self.create_key_selection_widget(
|
||||
false,
|
||||
&HeaderName::FROM,
|
||||
context,
|
||||
)?;
|
||||
to_result.merge(from_result);
|
||||
Ok(to_result)
|
||||
});
|
||||
}
|
||||
match result.map(Into::into) {
|
||||
Ok(widget) => {
|
||||
self.gpg_state.encrypt_mail = Some(ActionFlag::from(true));
|
||||
self.mode = ViewMode::SelectKey(true, widget);
|
||||
}
|
||||
Err(err) => {
|
||||
context.replies.push_back(UIEvent::Notification {
|
||||
title: Some("Could not list keys.".into()),
|
||||
source: None,
|
||||
body: format!("libgpgme error: {err}").into(),
|
||||
kind: Some(NotificationType::Error(
|
||||
melib::error::ErrorKind::External,
|
||||
)),
|
||||
});
|
||||
}
|
||||
}
|
||||
self.set_dirty(true);
|
||||
}
|
||||
self.set_dirty(true);
|
||||
return true;
|
||||
}
|
||||
UIEvent::Input(ref key)
|
||||
|
@ -2658,7 +2740,7 @@ pub fn send_draft_async(
|
|||
gpg_state.encrypt_for_self.then_some(()).map_or_else(
|
||||
|| Ok(None),
|
||||
|()| {
|
||||
draft.headers().get(HeaderName::TO).map_or_else(
|
||||
draft.headers().get(HeaderName::FROM).map_or_else(
|
||||
|| Ok(None),
|
||||
|s| Some(melib::Address::try_from(s)).transpose(),
|
||||
)
|
||||
|
|
|
@ -21,15 +21,94 @@
|
|||
|
||||
use super::*;
|
||||
|
||||
type KeylistJoinHandle = JoinHandle<Result<Vec<melib::gpgme::Key>>>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum KeySelection {
|
||||
LoadingKeys {
|
||||
handle: JoinHandle<Result<Vec<melib::gpgme::Key>>>,
|
||||
progress_spinner: ProgressSpinner,
|
||||
pub struct KeySelectionLoading {
|
||||
handles: (KeylistJoinHandle, Vec<KeylistJoinHandle>),
|
||||
progress_spinner: ProgressSpinner,
|
||||
secret: bool,
|
||||
local: bool,
|
||||
patterns: (String, Vec<String>),
|
||||
allow_remote_lookup: ActionFlag,
|
||||
}
|
||||
|
||||
impl KeySelectionLoading {
|
||||
pub fn new(
|
||||
secret: bool,
|
||||
local: bool,
|
||||
pattern: String,
|
||||
patterns: (String, Vec<String>),
|
||||
allow_remote_lookup: ActionFlag,
|
||||
context: &Context,
|
||||
) -> Result<Self> {
|
||||
use melib::gpgme::{self, *};
|
||||
let mut ctx = gpgme::Context::new()?;
|
||||
if local {
|
||||
ctx.set_auto_key_locate(LocateKey::LOCAL)?;
|
||||
} else {
|
||||
ctx.set_auto_key_locate(LocateKey::WKD | LocateKey::LOCAL)?;
|
||||
}
|
||||
let (pattern, other_patterns) = patterns;
|
||||
let main_job = ctx.keylist(secret, Some(pattern.clone()))?;
|
||||
let main_handle = context.main_loop_handler.job_executor.spawn(
|
||||
"gpg::keylist".into(),
|
||||
main_job,
|
||||
IsAsync::Blocking,
|
||||
);
|
||||
let other_handles = other_patterns
|
||||
.iter()
|
||||
.map(|pattern| {
|
||||
let job = ctx.keylist(secret, Some(pattern.clone()))?;
|
||||
Ok(context.main_loop_handler.job_executor.spawn(
|
||||
"gpg::keylist".into(),
|
||||
job,
|
||||
IsAsync::Blocking,
|
||||
))
|
||||
})
|
||||
.collect::<Result<Vec<KeylistJoinHandle>>>()?;
|
||||
let mut progress_spinner = ProgressSpinner::new(8, context);
|
||||
progress_spinner.start();
|
||||
Ok(Self {
|
||||
handles: (main_handle, other_handles),
|
||||
secret,
|
||||
local,
|
||||
patterns: (pattern, other_patterns),
|
||||
allow_remote_lookup,
|
||||
progress_spinner,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn merge(&mut self, rhs: Self) {
|
||||
let Self {
|
||||
handles: (_, ref mut other_handles),
|
||||
secret: _,
|
||||
local: _,
|
||||
patterns: (_, ref mut other_patterns),
|
||||
allow_remote_lookup: _,
|
||||
progress_spinner: _,
|
||||
} = self;
|
||||
let Self {
|
||||
handles: (rhs_handle, rhs_other_handles),
|
||||
patterns: (rhs_pattern, rhs_other_patterns),
|
||||
secret: _,
|
||||
local: _,
|
||||
allow_remote_lookup: _,
|
||||
progress_spinner: _,
|
||||
} = rhs;
|
||||
other_handles.push(rhs_handle);
|
||||
other_handles.extend(rhs_other_handles);
|
||||
other_patterns.push(rhs_pattern);
|
||||
other_patterns.extend(rhs_other_patterns);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum KeySelection {
|
||||
Loading {
|
||||
inner: KeySelectionLoading,
|
||||
/// Accumulate results from intermediate results (i.e. not the main
|
||||
/// pattern)
|
||||
keys_accumulator: Vec<melib::gpgme::Key>,
|
||||
},
|
||||
Error {
|
||||
id: ComponentId,
|
||||
|
@ -41,52 +120,31 @@ pub enum KeySelection {
|
|||
},
|
||||
}
|
||||
|
||||
impl From<KeySelectionLoading> for KeySelection {
|
||||
fn from(inner: KeySelectionLoading) -> Self {
|
||||
Self::Loading {
|
||||
inner,
|
||||
keys_accumulator: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for KeySelection {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(f, "select pgp keys")
|
||||
}
|
||||
}
|
||||
|
||||
impl KeySelection {
|
||||
pub fn new(
|
||||
secret: bool,
|
||||
local: bool,
|
||||
pattern: String,
|
||||
allow_remote_lookup: ActionFlag,
|
||||
context: &Context,
|
||||
) -> Result<Self> {
|
||||
use melib::gpgme::{self, *};
|
||||
let mut ctx = gpgme::Context::new()?;
|
||||
if local {
|
||||
ctx.set_auto_key_locate(LocateKey::LOCAL)?;
|
||||
} else {
|
||||
ctx.set_auto_key_locate(LocateKey::WKD | LocateKey::LOCAL)?;
|
||||
}
|
||||
let job = ctx.keylist(secret, Some(pattern.clone()))?;
|
||||
let handle = context.main_loop_handler.job_executor.spawn(
|
||||
"gpg::keylist".into(),
|
||||
job,
|
||||
IsAsync::Blocking,
|
||||
);
|
||||
let mut progress_spinner = ProgressSpinner::new(8, context);
|
||||
progress_spinner.start();
|
||||
Ok(Self::LoadingKeys {
|
||||
handle,
|
||||
secret,
|
||||
local,
|
||||
pattern,
|
||||
allow_remote_lookup,
|
||||
progress_spinner,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Component for KeySelection {
|
||||
fn draw(&mut self, grid: &mut CellBuffer, area: Area, context: &mut Context) {
|
||||
match self {
|
||||
Self::LoadingKeys {
|
||||
ref mut progress_spinner,
|
||||
..
|
||||
Self::Loading {
|
||||
inner:
|
||||
KeySelectionLoading {
|
||||
ref mut progress_spinner,
|
||||
..
|
||||
},
|
||||
keys_accumulator: _,
|
||||
} => progress_spinner.draw(grid, area.center_inside((2, 2)), context),
|
||||
Self::Error { ref err, .. } => {
|
||||
let theme_default = crate::conf::value(context, "theme_default");
|
||||
|
@ -106,16 +164,31 @@ impl Component for KeySelection {
|
|||
|
||||
fn process_event(&mut self, event: &mut UIEvent, context: &mut Context) -> bool {
|
||||
match self {
|
||||
Self::LoadingKeys {
|
||||
ref mut progress_spinner,
|
||||
ref mut handle,
|
||||
secret,
|
||||
local,
|
||||
ref mut pattern,
|
||||
allow_remote_lookup,
|
||||
..
|
||||
Self::Loading {
|
||||
inner:
|
||||
KeySelectionLoading {
|
||||
ref mut progress_spinner,
|
||||
handles: (ref mut main_handle, ref mut other_handles),
|
||||
secret,
|
||||
local,
|
||||
patterns: (ref mut pattern, ref mut other_patterns),
|
||||
allow_remote_lookup,
|
||||
..
|
||||
},
|
||||
ref mut keys_accumulator,
|
||||
} => match event {
|
||||
UIEvent::StatusEvent(StatusEvent::JobFinished(ref id)) if *id == handle.job_id => {
|
||||
UIEvent::StatusEvent(StatusEvent::JobFinished(ref id))
|
||||
if *id == main_handle.job_id
|
||||
|| other_handles.iter().any(|h| h.job_id == *id) =>
|
||||
{
|
||||
let mut main_handle_ref = &mut (*main_handle);
|
||||
let is_main = *id == main_handle_ref.job_id;
|
||||
let other_handle_ref_opt = other_handles.iter_mut().find(|h| h.job_id == *id);
|
||||
let handle = if is_main {
|
||||
&mut main_handle_ref
|
||||
} else {
|
||||
&mut (*other_handle_ref_opt.unwrap())
|
||||
};
|
||||
match handle.chan.try_recv() {
|
||||
Err(_) => { /* Job was canceled */ }
|
||||
Ok(None) => { /* something happened, perhaps a worker thread panicked */ }
|
||||
|
@ -123,15 +196,19 @@ impl Component for KeySelection {
|
|||
if keys.is_empty() {
|
||||
let id = progress_spinner.id();
|
||||
if allow_remote_lookup.is_true() {
|
||||
match Self::new(
|
||||
match KeySelectionLoading::new(
|
||||
*secret,
|
||||
*local,
|
||||
std::mem::take(pattern),
|
||||
(std::mem::take(pattern), std::mem::take(other_patterns)),
|
||||
*allow_remote_lookup,
|
||||
context,
|
||||
) {
|
||||
Ok(w) => {
|
||||
*self = w;
|
||||
Ok(inner) => {
|
||||
let keys_accumulator = std::mem::take(keys_accumulator);
|
||||
*self = Self::Loading {
|
||||
inner,
|
||||
keys_accumulator,
|
||||
};
|
||||
}
|
||||
Err(err) => *self = Self::Error { err, id },
|
||||
}
|
||||
|
@ -157,42 +234,74 @@ impl Component for KeySelection {
|
|||
context.replies.push_back(UIEvent::StatusEvent(
|
||||
StatusEvent::DisplayMessage(err.to_string()),
|
||||
));
|
||||
let res: Option<melib::gpgme::Key> = None;
|
||||
// Even in case of error, we should send a FinishedUIDialog
|
||||
// event so that the component parent knows we're done.
|
||||
let res: Option<Vec<melib::gpgme::Key>> = None;
|
||||
context
|
||||
.replies
|
||||
.push_back(UIEvent::FinishedUIDialog(id, Box::new(res)));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
let mut widget = Box::new(UIDialog::new(
|
||||
"select key",
|
||||
keys.iter()
|
||||
.map(|k| {
|
||||
(
|
||||
k.clone(),
|
||||
if let Some(primary_uid) = k.primary_uid() {
|
||||
format!("{} {}", k.fingerprint(), primary_uid)
|
||||
} else {
|
||||
k.fingerprint().to_string()
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<Vec<(melib::gpgme::Key, String)>>(),
|
||||
true,
|
||||
Some(Box::new(
|
||||
move |id: ComponentId, results: &[melib::gpgme::Key]| {
|
||||
Some(UIEvent::FinishedUIDialog(
|
||||
id,
|
||||
Box::new(results.first().cloned()),
|
||||
))
|
||||
},
|
||||
)),
|
||||
context,
|
||||
));
|
||||
widget.set_dirty(true);
|
||||
*self = Self::Loaded { widget, keys };
|
||||
keys_accumulator.extend(keys);
|
||||
if !is_main {
|
||||
other_handles.retain(|h| h.job_id != *id);
|
||||
return false;
|
||||
}
|
||||
if other_handles.is_empty() {
|
||||
// We are done with all Futures, so finally transition into the
|
||||
// "show the user the list of keys to select" state.
|
||||
let mut widget = Box::new(UIDialog::new(
|
||||
"select key",
|
||||
keys_accumulator
|
||||
.iter()
|
||||
.map(|k| {
|
||||
(
|
||||
k.clone(),
|
||||
if let Some(primary_uid) = k.primary_uid() {
|
||||
format!("{} {}", k.fingerprint(), primary_uid)
|
||||
} else {
|
||||
k.fingerprint().to_string()
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<Vec<(melib::gpgme::Key, String)>>(),
|
||||
false,
|
||||
Some(Box::new(
|
||||
move |id: ComponentId, results: &[melib::gpgme::Key]| {
|
||||
Some(UIEvent::FinishedUIDialog(
|
||||
id,
|
||||
Box::new(if results.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(results.to_vec())
|
||||
}),
|
||||
))
|
||||
},
|
||||
)),
|
||||
context,
|
||||
));
|
||||
widget.set_dirty(true);
|
||||
*self = Self::Loaded {
|
||||
widget,
|
||||
keys: std::mem::take(keys_accumulator),
|
||||
};
|
||||
} else {
|
||||
// Main handle has finished, replace it with some other one from
|
||||
// other_handles.
|
||||
*main_handle = other_handles.remove(0);
|
||||
}
|
||||
}
|
||||
Ok(Some(Err(err))) => {
|
||||
context.replies.push_back(UIEvent::StatusEvent(
|
||||
StatusEvent::DisplayMessage(err.to_string()),
|
||||
));
|
||||
// Even in case of error, we should send a FinishedUIDialog
|
||||
// event so that the component parent knows we're done.
|
||||
let res: Option<Vec<melib::gpgme::Key>> = None;
|
||||
context
|
||||
.replies
|
||||
.push_back(UIEvent::FinishedUIDialog(self.id(), Box::new(res)));
|
||||
*self = Self::Error {
|
||||
err,
|
||||
id: ComponentId::default(),
|
||||
|
@ -210,9 +319,13 @@ impl Component for KeySelection {
|
|||
|
||||
fn is_dirty(&self) -> bool {
|
||||
match self {
|
||||
Self::LoadingKeys {
|
||||
ref progress_spinner,
|
||||
..
|
||||
Self::Loading {
|
||||
inner:
|
||||
KeySelectionLoading {
|
||||
ref progress_spinner,
|
||||
..
|
||||
},
|
||||
keys_accumulator: _,
|
||||
} => progress_spinner.is_dirty(),
|
||||
Self::Error { .. } => true,
|
||||
Self::Loaded { ref widget, .. } => widget.is_dirty(),
|
||||
|
@ -221,9 +334,13 @@ impl Component for KeySelection {
|
|||
|
||||
fn set_dirty(&mut self, value: bool) {
|
||||
match self {
|
||||
Self::LoadingKeys {
|
||||
ref mut progress_spinner,
|
||||
..
|
||||
Self::Loading {
|
||||
inner:
|
||||
KeySelectionLoading {
|
||||
ref mut progress_spinner,
|
||||
..
|
||||
},
|
||||
keys_accumulator: _,
|
||||
} => progress_spinner.set_dirty(value),
|
||||
Self::Error { .. } => {}
|
||||
Self::Loaded { ref mut widget, .. } => widget.set_dirty(value),
|
||||
|
@ -234,16 +351,20 @@ impl Component for KeySelection {
|
|||
|
||||
fn shortcuts(&self, context: &Context) -> ShortcutMaps {
|
||||
match self {
|
||||
Self::LoadingKeys { .. } | Self::Error { .. } => ShortcutMaps::default(),
|
||||
Self::Loading { .. } | Self::Error { .. } => ShortcutMaps::default(),
|
||||
Self::Loaded { ref widget, .. } => widget.shortcuts(context),
|
||||
}
|
||||
}
|
||||
|
||||
fn id(&self) -> ComponentId {
|
||||
match self {
|
||||
Self::LoadingKeys {
|
||||
ref progress_spinner,
|
||||
..
|
||||
Self::Loading {
|
||||
inner:
|
||||
KeySelectionLoading {
|
||||
ref progress_spinner,
|
||||
..
|
||||
},
|
||||
keys_accumulator: _,
|
||||
} => progress_spinner.id(),
|
||||
Self::Error { ref id, .. } => *id,
|
||||
Self::Loaded { ref widget, .. } => widget.id(),
|
||||
|
@ -277,7 +398,7 @@ mod tests {
|
|||
use std::{borrow::Cow, ffi::CString, thread::sleep, time::Duration};
|
||||
|
||||
use melib::gpgme::{EngineInfo, LocateKey, Protocol};
|
||||
use sealed_test::prelude::*;
|
||||
use rusty_fork::rusty_fork_test;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
@ -303,20 +424,24 @@ mod tests {
|
|||
);
|
||||
let mut progress_spinner = ProgressSpinner::new(8, context);
|
||||
progress_spinner.start();
|
||||
Ok(Self::LoadingKeys {
|
||||
handle,
|
||||
secret,
|
||||
local,
|
||||
pattern,
|
||||
allow_remote_lookup,
|
||||
progress_spinner,
|
||||
Ok(Self::Loading {
|
||||
inner: KeySelectionLoading {
|
||||
handles: (handle, vec![]),
|
||||
secret,
|
||||
local,
|
||||
patterns: (pattern, vec![]),
|
||||
allow_remote_lookup,
|
||||
progress_spinner,
|
||||
},
|
||||
keys_accumulator: vec![],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const PUBKEY: &[u8]=b"-----BEGIN PGP PUBLIC KEY BLOCK-----\r\nVersion: GnuPG v2.1.0-gitb3c71eb (GNU/Linux)\r\n\r\nmQGiBDo41NoRBADSfQazKGYf8nokq6zUKH/6INtV6MypSzSGmX2XErnARkIIPPYj\r\ncQRQ8zCbGV7ZU2ezVbzhFLUSJveE8PZUzzCrLp1O2NSyBTRcR5HVSXW95nJfY8eV\r\npOvZRAKul0BVLh81kYTsrfzaaCjh9VWNP26LoeN2r+PjZyktXe7gM3C4SwCgoTxK\r\nWUVi9HoT2HCLY7p7oig5hEcEALdCJal0UYomX3nJapIVLVZg3vkidr1RICYMb2vz\r\n58i17h8sxEtobD1vdIKNejulntaRAXs4n0tDYD9z7pRlwG1CLz1R9WxYzeOOqUDr\r\nfnVXdmU8L/oVWABat8v1V7QQhjMMf+41fuzVwDMMGqjVPLhu4X6wp3A8uyM3YDnQ\r\nVMN1A/4n2G5gHoOvjqxn8Ch5tBAdMGfO8gH4RjQOwzm2R1wPQss/yzUN1+tlMZGX\r\nK2dQ2FCWC/hDUSNaEQRlI15wxxBNZ2RQwlzE2A8v113DpvyzOtv0QO95gJ1teCXC\r\n7j/BN9asgHaBBc39JLO/TcpuI7Hf8PQ5VcP2F0UE3lczGhXbLLRESm9lIFJhbmRv\r\nbSBIYWNrZXIgKHRlc3Qga2V5IHdpdGggcGFzc3BocmFzZSAiYWJjIikgPGpvZUBl\r\neGFtcGxlLmNvbT6IYgQTEQIAIgUCTbdXqQIbIwYLCQgHAwIGFQgCCQoLBBYCAwEC\r\nHgECF4AACgkQr4IkT5zZ/VUcCACfQvSPi//9/gBv8SVrK6O4DiyD+jAAn3LEnfF1\r\n4j6MjwlqXTqol2VgQn1yuQENBDo41N0QBACedJb7Qhm50JSPe1V+rSZKLHT5nc3l\r\n2k1n7//wNsJkgDW2J7snIRjGtSzeNxMPh+hVzFidzAf3sbOlARQoBrMPPKpnJWtm\r\n6LEDf2lSwO36l0/bo6qDRmiFRJoHWytTJEjxVwRclVt4bXqHfNw9FKhZZbcKeAN2\r\noHgmBVSU6edHdwADBQP+OGAkEG4PcfSb8x191R+wkV/q2hA5Ay9z289Dx2rO28CO\r\n4M2fhhcjSmgr6x0DsrkfESCiG47UGJ169eu+QqJwk3HiF4crGN9rE5+VelBVFtrd\r\nMWkX2rPLGQWyw8iCZKbeH8g/ujmkaLovSmalzDcLe4v1xSLaP7Fnfzit0iIGZAGI\r\nRgQYEQIABgUCOjjU3QAKCRCvgiRPnNn9VVSaAJ9+rj1lIQnRl20i8Rom2Hwbe3re\r\n9QCfSYFnkZUw0yKF2DfCfqrDzdGAsbaIRgQYEQIABgUCOjjU3gAKCRCvgiRPnNn9\r\nVe4iAJ9FrGMlFR7s+GWf1scTeeyrthKrPQCfSpc/Yps72aFI7hPfyIa9MuerVZ4=\r\n=QRit\r\n-----END PGP PUBLIC KEY BLOCK-----\r\n";
|
||||
|
||||
#[sealed_test]
|
||||
rusty_fork_test! {
|
||||
#[test]
|
||||
fn test_gpg_verify_sig() {
|
||||
let tempdir = tempfile::tempdir().unwrap();
|
||||
{
|
||||
|
@ -445,4 +570,5 @@ mod tests {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1011,13 +1011,7 @@ pub trait ListingTrait: Component {
|
|||
fn prev_entry(&mut self, context: &mut Context);
|
||||
fn draw_list(&mut self, grid: &mut CellBuffer, area: Area, context: &mut Context);
|
||||
fn highlight_line(&mut self, grid: &mut CellBuffer, area: Area, idx: usize, context: &Context);
|
||||
fn filter(
|
||||
&mut self,
|
||||
_filter_term: String,
|
||||
_results: SmallVec<[EnvelopeHash; 512]>,
|
||||
_context: &Context,
|
||||
) {
|
||||
}
|
||||
fn filter(&mut self, _filter_term: String, _results: Vec<EnvelopeHash>, _context: &Context) {}
|
||||
fn unfocused(&self) -> bool;
|
||||
fn view_area(&self) -> Option<Area>;
|
||||
fn set_modifier_active(&mut self, _new_val: bool);
|
||||
|
|
|
@ -143,9 +143,9 @@ pub struct CompactListing {
|
|||
rows: RowsState<(ThreadHash, EnvelopeHash)>,
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
search_job: Option<(String, JoinHandle<Result<SmallVec<[EnvelopeHash; 512]>>>)>,
|
||||
search_job: Option<(String, JoinHandle<Result<Vec<EnvelopeHash>>>)>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
select_job: Option<(String, JoinHandle<Result<SmallVec<[EnvelopeHash; 512]>>>)>,
|
||||
select_job: Option<(String, JoinHandle<Result<Vec<EnvelopeHash>>>)>,
|
||||
filter_term: String,
|
||||
filtered_selection: Vec<ThreadHash>,
|
||||
filtered_order: HashMap<ThreadHash, usize>,
|
||||
|
@ -764,12 +764,7 @@ impl ListingTrait for CompactListing {
|
|||
context.dirty_areas.push_back(area);
|
||||
}
|
||||
|
||||
fn filter(
|
||||
&mut self,
|
||||
filter_term: String,
|
||||
results: SmallVec<[EnvelopeHash; 512]>,
|
||||
context: &Context,
|
||||
) {
|
||||
fn filter(&mut self, filter_term: String, results: Vec<EnvelopeHash>, context: &Context) {
|
||||
if filter_term.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
@ -1374,7 +1369,7 @@ impl CompactListing {
|
|||
fn select(
|
||||
&mut self,
|
||||
search_term: &str,
|
||||
results: Result<SmallVec<[EnvelopeHash; 512]>>,
|
||||
results: Result<Vec<EnvelopeHash>>,
|
||||
context: &mut Context,
|
||||
) {
|
||||
let account = &context.accounts[&self.cursor_pos.0];
|
||||
|
|
|
@ -118,7 +118,7 @@ pub struct ConversationsListing {
|
|||
error: std::result::Result<(), String>,
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
search_job: Option<(String, JoinHandle<Result<SmallVec<[EnvelopeHash; 512]>>>)>,
|
||||
search_job: Option<(String, JoinHandle<Result<Vec<EnvelopeHash>>>)>,
|
||||
filter_term: String,
|
||||
filtered_selection: Vec<ThreadHash>,
|
||||
filtered_order: HashMap<ThreadHash, usize>,
|
||||
|
@ -521,12 +521,7 @@ impl ListingTrait for ConversationsListing {
|
|||
context.dirty_areas.push_back(area);
|
||||
}
|
||||
|
||||
fn filter(
|
||||
&mut self,
|
||||
filter_term: String,
|
||||
results: SmallVec<[EnvelopeHash; 512]>,
|
||||
context: &Context,
|
||||
) {
|
||||
fn filter(&mut self, filter_term: String, results: Vec<EnvelopeHash>, context: &Context) {
|
||||
if filter_term.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -140,9 +140,9 @@ pub struct PlainListing {
|
|||
data_columns: DataColumns<5>,
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
search_job: Option<(String, JoinHandle<Result<SmallVec<[EnvelopeHash; 512]>>>)>,
|
||||
search_job: Option<(String, JoinHandle<Result<Vec<EnvelopeHash>>>)>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
select_job: Option<(String, JoinHandle<Result<SmallVec<[EnvelopeHash; 512]>>>)>,
|
||||
select_job: Option<(String, JoinHandle<Result<Vec<EnvelopeHash>>>)>,
|
||||
filter_term: String,
|
||||
filtered_selection: Vec<EnvelopeHash>,
|
||||
filtered_order: HashMap<EnvelopeHash, usize>,
|
||||
|
@ -502,12 +502,7 @@ impl ListingTrait for PlainListing {
|
|||
context.dirty_areas.push_back(area);
|
||||
}
|
||||
|
||||
fn filter(
|
||||
&mut self,
|
||||
filter_term: String,
|
||||
results: SmallVec<[EnvelopeHash; 512]>,
|
||||
context: &Context,
|
||||
) {
|
||||
fn filter(&mut self, filter_term: String, results: Vec<EnvelopeHash>, context: &Context) {
|
||||
if filter_term.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
@ -1286,7 +1281,7 @@ impl PlainListing {
|
|||
fn select(
|
||||
&mut self,
|
||||
search_term: &str,
|
||||
results: Result<SmallVec<[EnvelopeHash; 512]>>,
|
||||
results: Result<Vec<EnvelopeHash>>,
|
||||
context: &mut Context,
|
||||
) {
|
||||
let account = &context.accounts[&self.cursor_pos.0];
|
||||
|
|
|
@ -139,9 +139,9 @@ pub struct ThreadListing {
|
|||
color_cache: ColorCache,
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
search_job: Option<(String, JoinHandle<Result<SmallVec<[EnvelopeHash; 512]>>>)>,
|
||||
search_job: Option<(String, JoinHandle<Result<Vec<EnvelopeHash>>>)>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
select_job: Option<(String, JoinHandle<Result<SmallVec<[EnvelopeHash; 512]>>>)>,
|
||||
select_job: Option<(String, JoinHandle<Result<Vec<EnvelopeHash>>>)>,
|
||||
filter_term: String,
|
||||
filtered_selection: Vec<ThreadHash>,
|
||||
filtered_order: HashMap<ThreadHash, usize>,
|
||||
|
@ -671,12 +671,7 @@ impl ListingTrait for ThreadListing {
|
|||
}
|
||||
}
|
||||
|
||||
fn filter(
|
||||
&mut self,
|
||||
filter_term: String,
|
||||
results: SmallVec<[EnvelopeHash; 512]>,
|
||||
context: &Context,
|
||||
) {
|
||||
fn filter(&mut self, filter_term: String, results: Vec<EnvelopeHash>, context: &Context) {
|
||||
if filter_term.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
@ -1224,7 +1219,7 @@ impl ThreadListing {
|
|||
fn select(
|
||||
&mut self,
|
||||
search_term: &str,
|
||||
results: Result<SmallVec<[EnvelopeHash; 512]>>,
|
||||
results: Result<Vec<EnvelopeHash>>,
|
||||
context: &mut Context,
|
||||
) {
|
||||
let account = &context.accounts[&self.cursor_pos.0];
|
||||
|
|
|
@ -1521,6 +1521,68 @@ impl Component for EnvelopeView {
|
|||
}
|
||||
return true;
|
||||
}
|
||||
UIEvent::Action(View(ViewAction::PipeAttachment(a_i, ref bin, ref args))) => {
|
||||
use std::borrow::Cow;
|
||||
|
||||
let bytes =
|
||||
if let Some(u) = self.open_attachment(a_i, context) {
|
||||
Cow::Owned(u.decode(Default::default()))
|
||||
} else if a_i == 0 {
|
||||
Cow::Borrowed(&self.mail.bytes)
|
||||
} else {
|
||||
context.replies.push_back(UIEvent::StatusEvent(
|
||||
StatusEvent::DisplayMessage(format!("Attachment `{}` not found.", a_i)),
|
||||
));
|
||||
return true;
|
||||
};
|
||||
// Kill input thread so that spawned command can be sole receiver of stdin
|
||||
{
|
||||
context.input_kill();
|
||||
}
|
||||
let pipe_command = format!("{} {}", bin, args.as_slice().join(" "));
|
||||
log::trace!("Executing: {}", &pipe_command);
|
||||
match Command::new(bin)
|
||||
.args(args)
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::inherit())
|
||||
.stderr(Stdio::inherit())
|
||||
.spawn()
|
||||
.map_err(Error::from)
|
||||
.and_then(|mut child| {
|
||||
let Some(mut stdin) = child.stdin.take() else {
|
||||
let _ = child.wait();
|
||||
return Err(Error::new(format!(
|
||||
"Could not open standard input of {bin}"
|
||||
))
|
||||
.set_kind(ErrorKind::External));
|
||||
};
|
||||
stdin.write_all(&bytes).chain_err_summary(|| {
|
||||
format!("Could not write to standard input of {bin}")
|
||||
})?;
|
||||
|
||||
Ok(child)
|
||||
}) {
|
||||
Ok(mut child) => {
|
||||
let _ = child.wait();
|
||||
}
|
||||
Err(err) => {
|
||||
context.replies.push_back(UIEvent::Notification {
|
||||
title: Some(
|
||||
format!("Failed to execute {}: {}", pipe_command, err).into(),
|
||||
),
|
||||
source: None,
|
||||
body: err.to_string().into(),
|
||||
kind: Some(NotificationType::Error(melib::error::ErrorKind::External)),
|
||||
});
|
||||
context.replies.push_back(UIEvent::Fork(ForkType::Finished));
|
||||
context.restore_input();
|
||||
self.set_dirty(true);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
context.replies.push_back(UIEvent::Fork(ForkType::Finished));
|
||||
return true;
|
||||
}
|
||||
UIEvent::Input(ref key)
|
||||
if shortcut!(key == shortcuts[Shortcuts::ENVELOPE_VIEW]["open_attachment"])
|
||||
&& !self.cmd_buf.is_empty() =>
|
||||
|
|
|
@ -96,17 +96,12 @@ impl MailViewState {
|
|||
return;
|
||||
};
|
||||
let account = &mut context.accounts[&coordinates.0];
|
||||
if account
|
||||
// Ensure all envelope headers are populated, because the email backend might
|
||||
// have not populated them all.
|
||||
_ = account
|
||||
.collection
|
||||
.get_env(coordinates.2)
|
||||
.other_headers()
|
||||
.is_empty()
|
||||
{
|
||||
let _ = account
|
||||
.collection
|
||||
.get_env_mut(coordinates.2)
|
||||
.populate_headers(&bytes);
|
||||
}
|
||||
.get_env_mut(coordinates.2)
|
||||
.populate_headers(&bytes);
|
||||
let env = Box::new(account.collection.get_env(coordinates.2).clone());
|
||||
let env_view = Box::new(EnvelopeView::new(
|
||||
Mail {
|
||||
|
|
|
@ -167,14 +167,14 @@ impl MailcapEntry {
|
|||
.map(|arg| match *arg {
|
||||
"%s" => {
|
||||
needs_stdin = false;
|
||||
let _f = File::create_temp_file(
|
||||
let file = File::create_temp_file(
|
||||
&a.decode(Default::default()),
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
true,
|
||||
false,
|
||||
)?;
|
||||
let p = _f.path().display().to_string();
|
||||
let p = file.path().display().to_string();
|
||||
Ok(p)
|
||||
}
|
||||
"%t" => Ok(a.content_type().to_string()),
|
||||
|
|
|
@ -38,6 +38,8 @@ pub const POSSIBLE_VALUES: &[&str] = &[
|
|||
"conf",
|
||||
"meli.conf",
|
||||
"meli.conf.5",
|
||||
"meli.conf.examples",
|
||||
"meli.conf.examples.5",
|
||||
"themes",
|
||||
"meli-themes",
|
||||
"meli-themes.5",
|
||||
|
@ -51,6 +53,7 @@ pub fn parse_manpage(src: &str) -> Result<ManPages> {
|
|||
"meli.7" | "guide" => Ok(ManPages::Guide),
|
||||
"meli.conf" | "meli.conf.5" | "conf" | "config" | "configuration" => Ok(ManPages::Conf),
|
||||
"meli-themes" | "meli-themes.5" | "themes" | "theming" | "theme" => Ok(ManPages::Themes),
|
||||
"meli.conf.examples" | "meli.conf.examples.5" => Ok(ManPages::ConfExamples),
|
||||
_ => Err(Error::new(format!("Invalid documentation page: {src}",))),
|
||||
}
|
||||
}
|
||||
|
@ -66,6 +69,8 @@ pub enum ManPages {
|
|||
Themes = 2,
|
||||
/// meli(7)
|
||||
Guide = 3,
|
||||
/// meli.conf.examples(5)
|
||||
ConfExamples = 4,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ManPages {
|
||||
|
@ -76,6 +81,7 @@ impl std::fmt::Display for ManPages {
|
|||
match self {
|
||||
Self::Main => "meli.1",
|
||||
Self::Conf => "meli.conf.5",
|
||||
Self::ConfExamples => "meli.conf.examples.5",
|
||||
Self::Themes => "meli-themes.5",
|
||||
Self::Guide => "meli.7",
|
||||
}
|
||||
|
@ -84,17 +90,19 @@ impl std::fmt::Display for ManPages {
|
|||
}
|
||||
|
||||
impl ManPages {
|
||||
const MANPAGES: [&'static [u8]; 4] = [
|
||||
const MANPAGES: [&'static [u8]; 5] = [
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/meli.txt.gz")),
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/meli.conf.txt.gz")),
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/meli-themes.txt.gz")),
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/meli.7.txt.gz")),
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/meli.conf.examples.txt.gz")),
|
||||
];
|
||||
const MANPAGES_MDOC: [&'static [u8]; 4] = [
|
||||
const MANPAGES_MDOC: [&'static [u8]; 5] = [
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/meli.mdoc.gz")),
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/meli.conf.mdoc.gz")),
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/meli-themes.mdoc.gz")),
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/meli.7.mdoc.gz")),
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/meli.conf.examples.mdoc.gz")),
|
||||
];
|
||||
|
||||
pub fn install(destination: Option<PathBuf>) -> Result<PathBuf> {
|
||||
|
@ -135,6 +143,7 @@ impl ManPages {
|
|||
for (p, dir) in [
|
||||
(Self::Main, "man1"),
|
||||
(Self::Conf, "man5"),
|
||||
(Self::ConfExamples, "man5"),
|
||||
(Self::Themes, "man5"),
|
||||
(Self::Guide, "man7"),
|
||||
] {
|
||||
|
|
|
@ -37,7 +37,9 @@ use melib::{
|
|||
utils::sqlite3::{rusqlite::params, DatabaseDescription},
|
||||
Error, Result, ResultIntoError, SortField, SortOrder,
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
const DB: DatabaseDescription = DatabaseDescription {
|
||||
name: "index.db",
|
||||
|
@ -150,10 +152,10 @@ impl AccountCache {
|
|||
pub async fn insert(
|
||||
envelope: Envelope,
|
||||
backend: Arc<RwLock<Box<dyn MailBackend>>>,
|
||||
acc_name: String,
|
||||
acc_name: Arc<str>,
|
||||
) -> Result<()> {
|
||||
let db_desc = DatabaseDescription {
|
||||
identifier: Some(acc_name.clone().into()),
|
||||
identifier: Some(acc_name.to_string().into()),
|
||||
..DB.clone()
|
||||
};
|
||||
|
||||
|
@ -183,7 +185,7 @@ impl AccountCache {
|
|||
conn.transaction_with_behavior(melib::rusqlite::TransactionBehavior::Immediate)?;
|
||||
if let Err(err) = tx.execute(
|
||||
"INSERT OR IGNORE INTO accounts (name) VALUES (?1)",
|
||||
params![acc_name,],
|
||||
params![acc_name],
|
||||
) {
|
||||
log::error!("Failed to insert envelope {}: {err}", envelope.message_id());
|
||||
return Err(Error::new(format!(
|
||||
|
@ -243,9 +245,9 @@ impl AccountCache {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn remove(acc_name: String, env_hash: EnvelopeHash) -> Result<()> {
|
||||
pub async fn remove(acc_name: Arc<str>, env_hash: EnvelopeHash) -> Result<()> {
|
||||
let db_desc = DatabaseDescription {
|
||||
identifier: Some(acc_name.clone().into()),
|
||||
identifier: Some(acc_name.to_string().into()),
|
||||
..DB.clone()
|
||||
};
|
||||
let db_path = db_desc.db_path()?;
|
||||
|
@ -277,7 +279,7 @@ impl AccountCache {
|
|||
}
|
||||
|
||||
pub async fn index(
|
||||
acc_name: Arc<String>,
|
||||
acc_name: Arc<str>,
|
||||
collection: melib::Collection,
|
||||
backend_mutex: Arc<RwLock<Box<dyn MailBackend>>>,
|
||||
) -> Result<()> {
|
||||
|
@ -302,8 +304,8 @@ impl AccountCache {
|
|||
let tx = conn
|
||||
.transaction_with_behavior(melib::rusqlite::TransactionBehavior::Immediate)?;
|
||||
tx.execute(
|
||||
"INSERT OR REPLACE INTO accounts (name) VALUES (?1)",
|
||||
params![acc_name.as_str(),],
|
||||
"INSERT OR IGNORE INTO accounts (name) VALUES (?1)",
|
||||
params![acc_name.as_ref()],
|
||||
)
|
||||
.chain_err_summary(|| "Failed to update index:")?;
|
||||
let account_id = {
|
||||
|
@ -311,7 +313,7 @@ impl AccountCache {
|
|||
.prepare("SELECT id FROM accounts WHERE name = ?")
|
||||
.unwrap();
|
||||
let x = stmt
|
||||
.query_map(params![acc_name.as_str()], |row| row.get(0))
|
||||
.query_map(params![acc_name.as_ref()], |row| row.get(0))
|
||||
.unwrap()
|
||||
.next()
|
||||
.unwrap()
|
||||
|
@ -398,12 +400,12 @@ impl AccountCache {
|
|||
}
|
||||
|
||||
pub async fn search(
|
||||
acc_name: String,
|
||||
acc_name: Arc<str>,
|
||||
query: Query,
|
||||
(sort_field, sort_order): (SortField, SortOrder),
|
||||
) -> Result<SmallVec<[EnvelopeHash; 512]>> {
|
||||
) -> Result<Vec<EnvelopeHash>> {
|
||||
let db_desc = DatabaseDescription {
|
||||
identifier: Some(acc_name.clone().into()),
|
||||
identifier: Some(acc_name.to_string().into()),
|
||||
..DB.clone()
|
||||
};
|
||||
|
||||
|
@ -443,13 +445,13 @@ impl AccountCache {
|
|||
.query_map([], |row| row.get::<_, EnvelopeHash>(0))
|
||||
.map_err(Error::from)?
|
||||
.map(|item| item.map_err(Error::from))
|
||||
.collect::<Result<SmallVec<[EnvelopeHash; 512]>>>();
|
||||
.collect::<Result<Vec<EnvelopeHash>>>();
|
||||
x
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn db_path(acc_name: &str) -> Result<Option<PathBuf>> {
|
||||
pub fn db_path(acc_name: &Arc<str>) -> Result<Option<PathBuf>> {
|
||||
let db_desc = DatabaseDescription {
|
||||
identifier: Some(acc_name.to_string().into()),
|
||||
..DB.clone()
|
||||
|
@ -568,29 +570,3 @@ pub fn query_to_sql(q: &Query) -> String {
|
|||
rec(q, &mut ret);
|
||||
ret
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_query_to_sql() {
|
||||
use melib::{search::query, utils::parsec::Parser};
|
||||
assert_eq!(
|
||||
"(subject LIKE \"%test%\" ) AND (body_text LIKE \"%i%\" ) ",
|
||||
&query_to_sql(&query().parse_complete("subject:test and i").unwrap().1)
|
||||
);
|
||||
assert_eq!(
|
||||
"(subject LIKE \"%github%\" ) OR ((_from LIKE \"%epilys%\" ) AND ((subject LIKE \
|
||||
\"%lib%\" ) OR (subject LIKE \"%meli%\" ) ) ) ",
|
||||
&query_to_sql(
|
||||
&query()
|
||||
.parse_complete(
|
||||
"subject:github or (from:epilys and (subject:lib or subject:meli))"
|
||||
)
|
||||
.unwrap()
|
||||
.1
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
430
meli/src/sqlite3/tests.rs
Normal file
430
meli/src/sqlite3/tests.rs
Normal file
|
@ -0,0 +1,430 @@
|
|||
//
|
||||
// meli
|
||||
//
|
||||
// Copyright 2024 Emmanouil Pitsidianakis <manos@pitsidianak.is>
|
||||
//
|
||||
// This file is part of meli.
|
||||
//
|
||||
// meli is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// meli is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with meli. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later
|
||||
|
||||
use std::{
|
||||
collections::{HashSet, VecDeque},
|
||||
io::Read,
|
||||
path::PathBuf,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use melib::{backends::prelude::*, maildir::MaildirType};
|
||||
use rusty_fork::rusty_fork_test;
|
||||
use tempfile::TempDir;
|
||||
|
||||
use super::*;
|
||||
use crate::utilities::tests::{eprint_step_fn, eprintln_ok_fn};
|
||||
|
||||
#[test]
|
||||
fn test_sqlite3_query_to_sql() {
|
||||
use melib::{search::query, utils::parsec::Parser};
|
||||
assert_eq!(
|
||||
"(subject LIKE \"%test%\" ) AND (body_text LIKE \"%i%\" ) ",
|
||||
&query_to_sql(&query().parse_complete("subject:test and i").unwrap().1)
|
||||
);
|
||||
assert_eq!(
|
||||
"(subject LIKE \"%github%\" ) OR ((_from LIKE \"%epilys%\" ) AND ((subject LIKE \"%lib%\" \
|
||||
) OR (subject LIKE \"%meli%\" ) ) ) ",
|
||||
&query_to_sql(
|
||||
&query()
|
||||
.parse_complete("subject:github or (from:epilys and (subject:lib or subject:meli))")
|
||||
.unwrap()
|
||||
.1
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
fn new_maildir_backend(
|
||||
temp_dir: &TempDir,
|
||||
acc_name: &str,
|
||||
event_consumer: BackendEventConsumer,
|
||||
) -> Result<Box<MaildirType>> {
|
||||
let root_mailbox = temp_dir.path().join("inbox");
|
||||
{
|
||||
std::fs::create_dir(&root_mailbox).expect("Could not create root mailbox directory.");
|
||||
for d in &["cur", "new", "tmp"] {
|
||||
std::fs::create_dir(root_mailbox.join(d))
|
||||
.expect("Could not create root mailbox directory contents.");
|
||||
}
|
||||
}
|
||||
|
||||
let account_conf = melib::AccountSettings {
|
||||
name: acc_name.to_string(),
|
||||
root_mailbox: root_mailbox.display().to_string(),
|
||||
format: "maildir".to_string(),
|
||||
identity: "user@localhost".to_string(),
|
||||
extra_identities: vec![],
|
||||
read_only: true,
|
||||
display_name: None,
|
||||
order: Default::default(),
|
||||
subscribed_mailboxes: vec!["inbox".into()],
|
||||
mailboxes: vec![(
|
||||
"inbox".into(),
|
||||
melib::conf::MailboxConf {
|
||||
extra: indexmap::indexmap! {
|
||||
"path".into() => root_mailbox.display().to_string(),
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
)]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
manual_refresh: true,
|
||||
extra: indexmap::indexmap! {
|
||||
"root_mailbox".into() => root_mailbox.display().to_string(),
|
||||
},
|
||||
};
|
||||
|
||||
MaildirType::new(&account_conf, Default::default(), event_consumer)
|
||||
}
|
||||
|
||||
rusty_fork_test! {
|
||||
#[test]
|
||||
fn test_sqlite3_reindex() {
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
/// Account name to use throughout the test.
|
||||
const ACCOUNT_NAME: &str = "test";
|
||||
const DB_FILE_SHOULD_EXIST_ERR_MSG: &str = "A db file should now exist, after indexing.";
|
||||
|
||||
let eprintln_ok = eprintln_ok_fn();
|
||||
let mut eprint_step_closure = eprint_step_fn();
|
||||
macro_rules! eprint_step {
|
||||
($($arg:tt)+) => {{
|
||||
eprint_step_closure(format_args!($($arg)+));
|
||||
}};
|
||||
}
|
||||
|
||||
/// Helper functions
|
||||
mod helpers {
|
||||
use super::*;
|
||||
|
||||
/// Helper to convert a GZipped bytes string to text.
|
||||
pub(super) fn gz_to_string(bytes: &'static [u8]) -> String {
|
||||
use flate2::bufread::GzDecoder;
|
||||
|
||||
let mut gz = GzDecoder::new(bytes);
|
||||
let mut s = String::new();
|
||||
gz.read_to_string(&mut s).unwrap();
|
||||
s
|
||||
}
|
||||
|
||||
/// List file entries of `${XDG_DATA_HOME}` dir to see if there's an
|
||||
/// sqlite database there or not.
|
||||
pub(super) fn list_xdg_data_home_dir_entries() -> Vec<PathBuf> {
|
||||
use std::{
|
||||
fs::{self, DirEntry},
|
||||
io,
|
||||
path::Path,
|
||||
};
|
||||
|
||||
fn visit_dirs(dir: &Path, cb: &mut dyn FnMut(&DirEntry)) -> io::Result<()> {
|
||||
if dir.is_dir() {
|
||||
for entry in fs::read_dir(dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
visit_dirs(&path, cb)?;
|
||||
} else {
|
||||
cb(&entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
let xdg_data_home =
|
||||
std::env::var_os("XDG_DATA_HOME").expect("env var should be present");
|
||||
let mut entries = Vec::with_capacity(1);
|
||||
let mut collect_file_entries = |direntry: &DirEntry| {
|
||||
entries.push(direntry.path());
|
||||
};
|
||||
|
||||
visit_dirs(Path::new(&xdg_data_home), &mut collect_file_entries).unwrap();
|
||||
|
||||
entries.sort();
|
||||
entries
|
||||
}
|
||||
|
||||
/// Helper function to perform a reindex operation, which is async, by
|
||||
/// blocking on it to completion.
|
||||
pub(super) fn perform_reindex(
|
||||
acc_name: Arc<str>,
|
||||
collection: melib::Collection,
|
||||
backend_mutex: Arc<RwLock<Box<dyn MailBackend>>>,
|
||||
) {
|
||||
let reindex_fut = AccountCache::index(acc_name, collection, Arc::clone(&backend_mutex));
|
||||
smol::block_on(reindex_fut).unwrap();
|
||||
}
|
||||
|
||||
/// Helper function to perform a search operation, which is async, by
|
||||
/// blocking on it to completion.
|
||||
pub(super) fn perform_search(acc_name: &Arc<str>, query: Query) -> Vec<EnvelopeHash> {
|
||||
let search_fut = AccountCache::search(
|
||||
Arc::clone(acc_name),
|
||||
query,
|
||||
(SortField::Date, SortOrder::Desc),
|
||||
);
|
||||
smol::block_on(search_fut).unwrap()
|
||||
}
|
||||
}
|
||||
use helpers::*;
|
||||
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
eprint_step!("Sanitize environment...");
|
||||
for var in [
|
||||
"PAGER",
|
||||
"MANPATH",
|
||||
"EDITOR",
|
||||
"MELI_CONFIG",
|
||||
"HOME",
|
||||
"XDG_CACHE_HOME",
|
||||
"XDG_STATE_HOME",
|
||||
"XDG_CONFIG_DIRS",
|
||||
"XDG_CONFIG_HOME",
|
||||
"XDG_DATA_DIRS",
|
||||
"XDG_DATA_HOME",
|
||||
] {
|
||||
std::env::remove_var(var);
|
||||
}
|
||||
std::env::set_var("HOME", temp_dir.path());
|
||||
std::env::set_var("XDG_CONFIG_HOME", temp_dir.path().join(".config"));
|
||||
std::env::set_var(
|
||||
"XDG_DATA_HOME",
|
||||
temp_dir.path().join(".local").join(".share"),
|
||||
);
|
||||
|
||||
eprintln_ok();
|
||||
eprint_step!("Create maildir backend we will use for the sqlite3 index database...");
|
||||
let backend_event_queue = Arc::new(Mutex::new(VecDeque::with_capacity(16)));
|
||||
|
||||
let backend_event_consumer = {
|
||||
let backend_event_queue = Arc::clone(&backend_event_queue);
|
||||
|
||||
BackendEventConsumer::new(Arc::new(move |ah, be| {
|
||||
backend_event_queue.lock().unwrap().push_back((ah, be));
|
||||
}))
|
||||
};
|
||||
|
||||
let mut maildir = new_maildir_backend(&temp_dir, ACCOUNT_NAME, backend_event_consumer)
|
||||
.expect("Could not create Maildir backend instance");
|
||||
let acc_name = Arc::<str>::from(maildir.account_name.to_string());
|
||||
let collection = melib::Collection::default();
|
||||
|
||||
eprintln_ok();
|
||||
eprint_step!(
|
||||
"Confirm the root mailbox was created by fetching all mailboxes and inspecting their \
|
||||
contents..."
|
||||
);
|
||||
let mailboxes_fut = maildir
|
||||
.mailboxes()
|
||||
.expect("Could not create mailboxes future");
|
||||
let mut mailboxes: HashMap<MailboxHash, Mailbox> = smol::block_on(mailboxes_fut).unwrap();
|
||||
assert_eq!(
|
||||
mailboxes.len(),
|
||||
1,
|
||||
"Only one mailbox was expected but got: {:?}",
|
||||
mailboxes
|
||||
);
|
||||
let root_mailbox_hash: MailboxHash = *mailboxes.keys().next().unwrap();
|
||||
let mailbox = mailboxes.remove(&root_mailbox_hash).unwrap();
|
||||
assert!(mailbox.is_subscribed());
|
||||
assert_eq!(mailbox.hash(), root_mailbox_hash);
|
||||
assert_eq!(mailbox.name(), "inbox");
|
||||
assert_eq!(mailbox.path(), "inbox");
|
||||
assert_eq!(mailbox.children(), &[]);
|
||||
assert_eq!(mailbox.special_usage(), SpecialUsageMailbox::Normal);
|
||||
assert_eq!(mailbox.count().unwrap(), (0, 0));
|
||||
|
||||
eprintln_ok();
|
||||
eprint_step!("Insert actual email into our backend...");
|
||||
|
||||
macro_rules! batch_entry {
|
||||
($path:literal) => {
|
||||
(
|
||||
gz_to_string(include_bytes!($path).as_slice()).into_bytes(),
|
||||
root_mailbox_hash,
|
||||
None,
|
||||
)
|
||||
};
|
||||
}
|
||||
let mail_batch: Vec<(Vec<u8>, MailboxHash, Option<Flag>)> = vec![
|
||||
batch_entry!("../../../melib/tests/data/PATCH-Put-sha1dc-on-a-diet_op.mbox.gz"),
|
||||
batch_entry!("../../../melib/tests/data/PATCH-Put-sha1dc-on-a-diet.mbox.gz"),
|
||||
batch_entry!("../../../melib/tests/data/git-am-breakage-with-MIME-decoding_op.mbox.gz"),
|
||||
batch_entry!("../../../melib/tests/data/git-am-breakage-with-MIME-decoding.mbox.gz"),
|
||||
];
|
||||
|
||||
eprintln_ok();
|
||||
eprint_step!(
|
||||
"Perform a save operation for {} emails...",
|
||||
mail_batch.len()
|
||||
);
|
||||
|
||||
let save_batch_fut = maildir
|
||||
.save_batch(mail_batch)
|
||||
.expect("Could not create save mail batch future");
|
||||
smol::block_on(async move { save_batch_fut.try_collect::<Vec<Result<()>>>().await })
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.collect::<Result<Vec<()>>>()
|
||||
.unwrap();
|
||||
|
||||
eprintln_ok();
|
||||
eprint_step!(
|
||||
"Perform a manual refresh operation, since we have not spawned any watcher threads with \
|
||||
`MailBackend::watch` (we do not need to)..."
|
||||
);
|
||||
let refresh_fut = maildir
|
||||
.refresh(root_mailbox_hash)
|
||||
.expect("Could not create refresh future");
|
||||
smol::block_on(refresh_fut).unwrap();
|
||||
assert_eq!(backend_event_queue.lock().unwrap().len(), 4);
|
||||
for (ah, ev) in backend_event_queue.lock().unwrap().drain(0..) {
|
||||
assert_eq!(ah, maildir.account_hash);
|
||||
match ev {
|
||||
BackendEvent::Refresh(RefreshEvent {
|
||||
account_hash,
|
||||
mailbox_hash,
|
||||
kind: RefreshEventKind::Create(env),
|
||||
}) => {
|
||||
assert_eq!(account_hash, maildir.account_hash);
|
||||
assert_eq!(mailbox_hash, root_mailbox_hash);
|
||||
collection.insert(*env, mailbox_hash);
|
||||
}
|
||||
other => {
|
||||
panic!(
|
||||
"Got unexpected BackendEvent from maildir backend: {:?}",
|
||||
other
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
eprintln_ok();
|
||||
eprint_step!("Backend setup over, we're now finally ready to test sqlite3 indexing...");
|
||||
let backend_mutex = Arc::new(RwLock::new(maildir as Box<dyn MailBackend>));
|
||||
|
||||
assert_eq!(
|
||||
list_xdg_data_home_dir_entries(),
|
||||
Vec::<PathBuf>::new(),
|
||||
"expected no sqlite3 database in XDG_DATA_HOME dir or any other entries"
|
||||
);
|
||||
assert_eq!(
|
||||
AccountCache::db_path(&acc_name).unwrap(),
|
||||
None,
|
||||
"There should be no database file because we have not performed an indexing yet."
|
||||
);
|
||||
perform_reindex(
|
||||
Arc::clone(&acc_name),
|
||||
collection.clone(),
|
||||
Arc::clone(&backend_mutex),
|
||||
);
|
||||
let db_path = AccountCache::db_path(&acc_name)
|
||||
.expect(DB_FILE_SHOULD_EXIST_ERR_MSG)
|
||||
.expect(DB_FILE_SHOULD_EXIST_ERR_MSG);
|
||||
|
||||
assert_eq!(
|
||||
db_path,
|
||||
temp_dir
|
||||
.path()
|
||||
.join(".local")
|
||||
.join(".share")
|
||||
.join("meli")
|
||||
.join("test_index.db")
|
||||
);
|
||||
|
||||
assert_eq!(list_xdg_data_home_dir_entries(), vec![db_path.clone()],);
|
||||
eprintln_ok();
|
||||
eprint_step!("Ensure re-indexing for a second time does not trigger any errors...");
|
||||
perform_reindex(
|
||||
Arc::clone(&acc_name),
|
||||
collection.clone(),
|
||||
Arc::clone(&backend_mutex),
|
||||
);
|
||||
let db_path_2 = AccountCache::db_path(&acc_name)
|
||||
.expect(DB_FILE_SHOULD_EXIST_ERR_MSG)
|
||||
.expect(DB_FILE_SHOULD_EXIST_ERR_MSG);
|
||||
assert_eq!(db_path, db_path_2);
|
||||
assert_eq!(list_xdg_data_home_dir_entries(), vec![db_path],);
|
||||
|
||||
eprintln_ok();
|
||||
eprint_step!("Search for all envelopes, as a smoke test...");
|
||||
let search_results = perform_search(&acc_name, Query::Body(String::new()));
|
||||
assert_eq!(
|
||||
search_results.len(),
|
||||
collection.len(),
|
||||
"Expected search results to return all envelopes, but the results size do not match the \
|
||||
envelopes we have in total. Search results were: {:?}",
|
||||
search_results
|
||||
);
|
||||
assert_eq!(
|
||||
search_results
|
||||
.clone()
|
||||
.into_iter()
|
||||
.collect::<HashSet<EnvelopeHash>>(),
|
||||
*collection.get_mailbox(root_mailbox_hash)
|
||||
);
|
||||
eprintln_ok();
|
||||
eprint_step!(
|
||||
"Search for torvalds as a submitter, since he sent all those patches we inserted into the \
|
||||
backend. So this should return all envelopes as well..."
|
||||
);
|
||||
let torvalds_search_results = perform_search(
|
||||
&acc_name,
|
||||
Query::From("torvalds@linux-foundation.org".into()),
|
||||
);
|
||||
assert_eq!(
|
||||
search_results
|
||||
.into_iter()
|
||||
.collect::<HashSet<EnvelopeHash>>(),
|
||||
torvalds_search_results
|
||||
.into_iter()
|
||||
.collect::<HashSet<EnvelopeHash>>(),
|
||||
);
|
||||
|
||||
eprintln_ok();
|
||||
eprint_step!("Search for only specific recipients, which should not return all envelopes...");
|
||||
let search_results = perform_search(&acc_name, Query::To("marc.stevens@cwi.nl".into()));
|
||||
assert_eq!(
|
||||
search_results.len(),
|
||||
2,
|
||||
"Expected search results to return 2 envelopes but the results were: {:?}",
|
||||
search_results
|
||||
);
|
||||
assert_eq!(
|
||||
search_results
|
||||
.into_iter()
|
||||
.collect::<HashSet<EnvelopeHash>>(),
|
||||
collection
|
||||
.get_mailbox(root_mailbox_hash)
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|env_hash| collection.get_env(env_hash).clone())
|
||||
.filter(|env| env.other_headers()[HeaderName::TO].contains("marc.stevens@cwi.nl"))
|
||||
.map(|env| env.hash())
|
||||
.collect::<HashSet<EnvelopeHash>>()
|
||||
);
|
||||
eprintln_ok();
|
||||
}
|
||||
}
|
|
@ -811,10 +811,8 @@ impl State {
|
|||
return;
|
||||
}
|
||||
let account = &self.context.accounts[account_index];
|
||||
let (acc_name, backend_mutex): (Arc<String>, Arc<_>) = (
|
||||
Arc::new(account.name().to_string()),
|
||||
account.backend.clone(),
|
||||
);
|
||||
let (acc_name, backend_mutex): (Arc<str>, Arc<_>) =
|
||||
(Arc::clone(&account.name), account.backend.clone());
|
||||
let job = crate::sqlite3::AccountCache::index(
|
||||
acc_name,
|
||||
account.collection.clone(),
|
||||
|
|
|
@ -36,10 +36,7 @@ pub mod embedded;
|
|||
mod tests;
|
||||
pub mod text_editing;
|
||||
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
io::{BufRead, Write},
|
||||
};
|
||||
use std::borrow::Cow;
|
||||
|
||||
pub use braille::BraillePixelIter;
|
||||
pub use screen::{Area, Screen, ScreenGeneration, StateStdout, Tty, Virtual};
|
||||
|
@ -119,13 +116,10 @@ pub struct Hyperlink<
|
|||
}
|
||||
|
||||
impl<
|
||||
'a,
|
||||
'b,
|
||||
'i,
|
||||
T: std::fmt::Display + ?Sized,
|
||||
U: std::fmt::Display + ?Sized,
|
||||
I: std::fmt::Display + ?Sized,
|
||||
> std::fmt::Display for Hyperlink<'a, 'b, 'i, T, U, I>
|
||||
> std::fmt::Display for Hyperlink<'_, '_, '_, T, U, I>
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
let id: &dyn std::fmt::Display = if let Some(ref id) = self.id { id } else { &"" };
|
||||
|
@ -453,10 +447,8 @@ impl<'m> Ask<'m> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn run(self) -> bool {
|
||||
pub fn run(self, writer: &mut impl std::io::Write, reader: &mut impl std::io::BufRead) -> bool {
|
||||
let mut buffer = String::new();
|
||||
let stdin = std::io::stdin();
|
||||
let mut handle = stdin.lock();
|
||||
|
||||
let default = match self.default {
|
||||
None => "y/n",
|
||||
|
@ -464,11 +456,11 @@ impl<'m> Ask<'m> {
|
|||
Some(false) => "y/N",
|
||||
};
|
||||
|
||||
print!("{} [{default}] ", self.message.as_ref());
|
||||
let _ = std::io::stdout().flush();
|
||||
_ = write!(writer, "{} [{default}] ", self.message.as_ref());
|
||||
_ = writer.flush();
|
||||
loop {
|
||||
buffer.clear();
|
||||
handle
|
||||
reader
|
||||
.read_line(&mut buffer)
|
||||
.expect("Could not read from stdin.");
|
||||
|
||||
|
@ -481,8 +473,8 @@ impl<'m> Ask<'m> {
|
|||
return false;
|
||||
}
|
||||
_ => {
|
||||
print!("\n{} [{default}] ", self.message.as_ref());
|
||||
let _ = std::io::stdout().flush();
|
||||
_ = write!(writer, "\n{} [{default}] ", self.message.as_ref());
|
||||
_ = writer.flush();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -209,11 +209,55 @@ impl PartialEq<Key> for &Key {
|
|||
}
|
||||
}
|
||||
|
||||
/// Setting mode value in ANSI or DEC report sequences.
|
||||
///
|
||||
/// See <https://vt100.net/docs/vt510-rm/DECRPM.html>.
|
||||
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
|
||||
#[repr(u8)]
|
||||
enum ANSIDECModeSetting {
|
||||
#[default]
|
||||
ModeNotRecognized = 0,
|
||||
Set = 1,
|
||||
Reset = 2,
|
||||
PermanentlySet = 3,
|
||||
PermanentlyReset = 4,
|
||||
}
|
||||
|
||||
/// Report Mode, Terminal to Host.
|
||||
///
|
||||
/// See <https://vt100.net/docs/vt510-rm/DECRPM.html>.
|
||||
///
|
||||
/// Format is:
|
||||
///
|
||||
/// ```text
|
||||
/// CSI ? Pd ; Ps $ y
|
||||
/// ```
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
enum DECRPMReport {
|
||||
WaitingForSemicolon {
|
||||
mode: u16,
|
||||
},
|
||||
Semicolon {
|
||||
mode: u16,
|
||||
},
|
||||
WaitingForDollar {
|
||||
mode: u16,
|
||||
setting: ANSIDECModeSetting,
|
||||
},
|
||||
WaitingForEnd {
|
||||
mode: u16,
|
||||
setting: ANSIDECModeSetting,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
/// Keep track of whether we're accepting normal user input or a pasted string.
|
||||
enum InputMode {
|
||||
Normal,
|
||||
EscapeSequence(Vec<u8>),
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
/// Report Mode, Terminal to Host.
|
||||
DECRPM(DECRPMReport),
|
||||
Paste(Vec<u8>),
|
||||
}
|
||||
|
||||
|
@ -319,6 +363,72 @@ pub fn get_events(
|
|||
closure((Key::Mouse(mev.into()), bytes));
|
||||
continue 'poll_while;
|
||||
}
|
||||
(Ok((TEvent::Unsupported(ref k,), _)), InputMode::Normal) if k.as_slice() == [27, 91, 63] => {
|
||||
// DECRPM - Report Mode - Terminal To Host
|
||||
esc_seq_buf.clear();
|
||||
input_mode = InputMode::DECRPM(DECRPMReport::WaitingForSemicolon { mode: 0});
|
||||
}
|
||||
(Ok((TEvent::Key(TKey::Char(k)), _)), InputMode::DECRPM(ref report_state)) => {
|
||||
// CSI ? Pd ; Ps $ y
|
||||
match (k, report_state) {
|
||||
(d, DECRPMReport::WaitingForSemicolon { mode }) if d.is_ascii_digit() => {
|
||||
let mut mode = *mode;
|
||||
mode *= 10;
|
||||
// SAFETY: we performed an char::is_ascii_digit() check in
|
||||
// the guard above.
|
||||
mode += (d as u8 - b'0') as u16;
|
||||
input_mode = InputMode::DECRPM(DECRPMReport::WaitingForSemicolon { mode });
|
||||
},
|
||||
(';', DECRPMReport::WaitingForSemicolon { mode }) => {
|
||||
input_mode = InputMode::DECRPM(DECRPMReport::Semicolon { mode: *mode });
|
||||
},
|
||||
(other, DECRPMReport::WaitingForSemicolon { mode }) => {
|
||||
log::trace!("Received invalid DECRPM response: Was waiting for an ASCII digit or `;` after `Pd` argument (mode, whose value was currently {mode:?} but instead got character {other:?}");
|
||||
// Revert to normal input mode, to prevent locking
|
||||
// up the user's terminal input
|
||||
input_mode = InputMode::Normal;
|
||||
}
|
||||
(d, DECRPMReport::Semicolon { mode }) if d.is_ascii_digit() => {
|
||||
let setting = match d {
|
||||
'0' => ANSIDECModeSetting::ModeNotRecognized,
|
||||
'1' => ANSIDECModeSetting::Set,
|
||||
'2' => ANSIDECModeSetting::Reset,
|
||||
'3' => ANSIDECModeSetting::PermanentlySet,
|
||||
'4' => ANSIDECModeSetting::PermanentlyReset,
|
||||
other => {
|
||||
log::trace!("Received invalid DECRPM setting value: {:?}: expected one of {{0, 1, 2, 3, 4}}", other);
|
||||
ANSIDECModeSetting::default()
|
||||
}
|
||||
};
|
||||
input_mode = InputMode::DECRPM(DECRPMReport::WaitingForDollar { mode: *mode, setting });
|
||||
},
|
||||
(other, DECRPMReport::Semicolon { ref mode }) => {
|
||||
log::trace!("Received invalid DECRPM response: Was waiting for an ASCII digit reporting setting value (`Ps` argument), for mode {mode:?} but instead got character {other:?}");
|
||||
// Revert to normal input mode, to prevent locking
|
||||
// up the user's terminal input
|
||||
input_mode = InputMode::Normal;
|
||||
}
|
||||
('$', DECRPMReport::WaitingForDollar { mode, setting }) => {
|
||||
input_mode = InputMode::DECRPM(DECRPMReport::WaitingForEnd { mode: *mode, setting: *setting });
|
||||
},
|
||||
(other, DECRPMReport::WaitingForDollar { mode, setting }) => {
|
||||
log::trace!("Received invalid DECRPM response: Was waiting for an ASCII `$` character (`Pm` argument was {mode:?} and `Ps` argument was {setting:?}) but instead got character {other:?}");
|
||||
// Revert to normal input mode, to prevent locking
|
||||
// up the user's terminal input
|
||||
input_mode = InputMode::Normal;
|
||||
}
|
||||
(c, DECRPMReport::WaitingForEnd { mode, setting }) => {
|
||||
if c != 'y' {
|
||||
log::trace!("Received invalid DECRPM response: Was waiting for an ASCII `y` character (`Pm` argument was {mode:?} and `Ps` argument was {setting:?}) but instead got character {c:?}");
|
||||
} else {
|
||||
log::trace!("Got an DECRPM Terminal mode report: Mode {mode:?} is set to {setting:?}");
|
||||
}
|
||||
// end of report sequence.
|
||||
input_mode = InputMode::Normal;
|
||||
},
|
||||
|
||||
}
|
||||
}
|
||||
other => {
|
||||
log::trace!("get_events other = {:?}", other);
|
||||
continue 'poll_while;
|
||||
|
@ -363,7 +473,7 @@ impl<'de> Deserialize<'de> for Key {
|
|||
{
|
||||
struct KeyVisitor;
|
||||
|
||||
impl<'de> Visitor<'de> for KeyVisitor {
|
||||
impl Visitor<'_> for KeyVisitor {
|
||||
type Value = Key;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
|
|
|
@ -45,7 +45,7 @@ mod tables;
|
|||
pub use self::tables::*;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
pub mod tests;
|
||||
|
||||
pub type AutoCompleteFn = Box<dyn Fn(&Context, &str) -> Vec<AutoCompleteEntry> + Send + Sync>;
|
||||
|
||||
|
|
|
@ -176,3 +176,69 @@ fn test_utilities_text_input_field() {
|
|||
assert_eq!(field.as_str(), EMOJIGRAM);
|
||||
_ = tmpdir.close();
|
||||
}
|
||||
|
||||
/// Returns a closure that prints the string " OK\n" to `stderr`.
|
||||
///
|
||||
/// If `stderr` is a TTY, the output will contain escape code sequences to
|
||||
/// change the foreground color of the text with the second indexed color
|
||||
/// (usually green).
|
||||
pub fn eprintln_ok_fn() -> Box<dyn Fn()> {
|
||||
if crate::terminal::is_tty() && std::env::var_os("NO_COLOR").is_none() {
|
||||
struct SetAf2(String);
|
||||
struct Sgr0(String);
|
||||
fn get_tput_sequences() -> Option<(SetAf2, Sgr0)> {
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
let setaf_2 = SetAf2(
|
||||
String::from_utf8(
|
||||
Command::new("tput")
|
||||
.args(["setaf", "2"])
|
||||
.stdout(Stdio::piped())
|
||||
.stdin(Stdio::null())
|
||||
.stderr(Stdio::inherit())
|
||||
.output()
|
||||
.ok()?
|
||||
.stdout,
|
||||
)
|
||||
.ok()?,
|
||||
);
|
||||
|
||||
let sgr0 = Sgr0(
|
||||
String::from_utf8(
|
||||
Command::new("tput")
|
||||
.arg("sgr0")
|
||||
.stdout(Stdio::piped())
|
||||
.stdin(Stdio::null())
|
||||
.stderr(Stdio::inherit())
|
||||
.output()
|
||||
.ok()?
|
||||
.stdout,
|
||||
)
|
||||
.ok()?,
|
||||
);
|
||||
Some((setaf_2, sgr0))
|
||||
}
|
||||
|
||||
if let Some((SetAf2(setaf_2), Sgr0(sgr0))) = get_tput_sequences() {
|
||||
return Box::new(move || {
|
||||
eprintln!(" {setaf_2}OK{sgr0}");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Box::new(|| {
|
||||
eprintln!(" OK");
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns a closure that prints a formatted message, without a trailing
|
||||
/// newline, and prefixed with an increasing counter.
|
||||
pub fn eprint_step_fn() -> Box<dyn FnMut(std::fmt::Arguments) -> usize> {
|
||||
let mut counter = 1;
|
||||
Box::new(move |args: std::fmt::Arguments| {
|
||||
let step = counter;
|
||||
eprint!("{step}. {}", args);
|
||||
counter += 1;
|
||||
step
|
||||
})
|
||||
}
|
||||
|
|
|
@ -352,12 +352,8 @@ impl<T: 'static + std::fmt::Debug + Copy + Default + Send + Sync> FormWidget<T>
|
|||
&mut self.fields
|
||||
}
|
||||
|
||||
pub fn collect(self) -> Option<HashMap<Cow<'static, str>, Field>> {
|
||||
if self.buttons.result.is_some() {
|
||||
Some(self.fields)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
pub fn collect(self) -> HashMap<Cow<'static, str>, Field> {
|
||||
self.fields
|
||||
}
|
||||
|
||||
pub fn buttons_result(&mut self) -> Option<T> {
|
||||
|
@ -913,17 +909,41 @@ impl AutoComplete {
|
|||
}
|
||||
}
|
||||
|
||||
/// A widget that draws a scrollbar.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust,no_run
|
||||
/// # use meli::{Area, Component, CellBuffer, Context, utilities::ScrollBar};
|
||||
/// // Mock `Component::draw` impl
|
||||
/// fn draw(grid: &mut CellBuffer, area: Area, context: &mut Context) {
|
||||
/// let position = 0;
|
||||
/// let visible_rows = area.height();
|
||||
/// let scrollbar_area = area.nth_col(area.width());
|
||||
/// let total_rows = 100;
|
||||
/// ScrollBar::default().set_show_arrows(true).draw(
|
||||
/// grid,
|
||||
/// scrollbar_area,
|
||||
/// context,
|
||||
/// position,
|
||||
/// visible_rows,
|
||||
/// total_rows,
|
||||
/// );
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Clone, Copy, Default)]
|
||||
pub struct ScrollBar {
|
||||
pub show_arrows: bool,
|
||||
}
|
||||
|
||||
impl ScrollBar {
|
||||
/// Update `self.show_arrows` field.
|
||||
pub fn set_show_arrows(&mut self, new_val: bool) -> &mut Self {
|
||||
self.show_arrows = new_val;
|
||||
self
|
||||
}
|
||||
|
||||
/// Draw `self` vertically.
|
||||
pub fn draw(
|
||||
&self,
|
||||
grid: &mut CellBuffer,
|
||||
|
@ -980,6 +1000,7 @@ impl ScrollBar {
|
|||
}
|
||||
}
|
||||
|
||||
/// Draw `self` horizontally.
|
||||
pub fn draw_horizontal(
|
||||
&self,
|
||||
grid: &mut CellBuffer,
|
||||
|
@ -1037,6 +1058,82 @@ impl ScrollBar {
|
|||
}
|
||||
}
|
||||
|
||||
/// A widget that displays a customizable progress spinner.
|
||||
///
|
||||
/// It uses a [`Timer`](crate::jobs::Timer) and each time its timer fires, it
|
||||
/// cycles to the next stage of its `kind` sequence.
|
||||
///
|
||||
/// `kind` is an array of strings/string slices and an
|
||||
/// [`Duration` interval](std::time::Duration), and each item represents a stage
|
||||
/// or frame of the progress spinner. For example, a
|
||||
/// `(Duration::from_millis(130), &["-", "\\", "|", "/"])` value would cycle
|
||||
/// through the sequence `-`, `\`, `|`, `/`, `-`, `\`, `|` and so on roughly
|
||||
/// every 130 milliseconds.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust,no_run
|
||||
/// use std::collections::HashSet;
|
||||
///
|
||||
/// use meli::{jobs::JobId, utilities::ProgressSpinner, Component, Context, StatusEvent, UIEvent};
|
||||
///
|
||||
/// struct JobMonitoringWidget {
|
||||
/// progress_spinner: ProgressSpinner,
|
||||
/// in_progress_jobs: HashSet<JobId>,
|
||||
/// }
|
||||
///
|
||||
/// impl JobMonitoringWidget {
|
||||
/// fn new(context: &Context, container: Box<dyn Component>) -> Self {
|
||||
/// let mut progress_spinner = ProgressSpinner::new(20, context);
|
||||
/// match context.settings.terminal.progress_spinner_sequence.as_ref() {
|
||||
/// Some(meli::conf::terminal::ProgressSpinnerSequence::Integer(k)) => {
|
||||
/// progress_spinner.set_kind(*k);
|
||||
/// }
|
||||
/// Some(meli::conf::terminal::ProgressSpinnerSequence::Custom {
|
||||
/// ref frames,
|
||||
/// ref interval_ms,
|
||||
/// }) => {
|
||||
/// progress_spinner.set_custom_kind(frames.clone(), *interval_ms);
|
||||
/// }
|
||||
/// None => {}
|
||||
/// }
|
||||
/// Self {
|
||||
/// progress_spinner,
|
||||
/// in_progress_jobs: Default::default(),
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// // Mock `Component::process_event` impl.
|
||||
/// fn process_event(&mut self, event: &mut UIEvent, context: &mut Context) -> bool {
|
||||
/// match event {
|
||||
/// UIEvent::StatusEvent(StatusEvent::JobCanceled(ref job_id))
|
||||
/// | UIEvent::StatusEvent(StatusEvent::JobFinished(ref job_id)) => {
|
||||
/// self.in_progress_jobs.remove(job_id);
|
||||
/// if self.in_progress_jobs.is_empty() {
|
||||
/// self.progress_spinner.stop();
|
||||
/// }
|
||||
/// self.progress_spinner.set_dirty(true);
|
||||
/// false
|
||||
/// }
|
||||
/// UIEvent::StatusEvent(StatusEvent::NewJob(ref job_id)) => {
|
||||
/// if self.in_progress_jobs.is_empty() {
|
||||
/// self.progress_spinner.start();
|
||||
/// }
|
||||
/// self.progress_spinner.set_dirty(true);
|
||||
/// self.in_progress_jobs.insert(*job_id);
|
||||
/// false
|
||||
/// }
|
||||
/// UIEvent::Timer(_) => {
|
||||
/// if self.progress_spinner.process_event(event, context) {
|
||||
/// return true;
|
||||
/// }
|
||||
/// false
|
||||
/// }
|
||||
/// _ => false,
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Debug)]
|
||||
pub struct ProgressSpinner {
|
||||
timer: crate::jobs::Timer,
|
||||
|
@ -1124,6 +1221,7 @@ impl ProgressSpinner {
|
|||
pub const INTERVAL_MS: u64 = 50;
|
||||
const INTERVAL: std::time::Duration = std::time::Duration::from_millis(Self::INTERVAL_MS);
|
||||
|
||||
/// See source code of [`Self::KINDS`].
|
||||
pub fn new(kind: usize, context: &Context) -> Self {
|
||||
let kind = kind % Self::KINDS.len();
|
||||
let width = Self::KINDS[kind]
|
||||
|
@ -1155,10 +1253,12 @@ impl ProgressSpinner {
|
|||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn is_active(&self) -> bool {
|
||||
self.active
|
||||
}
|
||||
|
||||
/// See source code of [`Self::KINDS`].
|
||||
pub fn set_kind(&mut self, kind: usize) {
|
||||
self.stage = 0;
|
||||
self.width = Self::KINDS[kind % Self::KINDS.len()]
|
||||
|
@ -1201,11 +1301,12 @@ impl ProgressSpinner {
|
|||
|
||||
impl std::fmt::Display for ProgressSpinner {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(f, "progress bar")
|
||||
write!(f, "progress spinner")
|
||||
}
|
||||
}
|
||||
|
||||
impl Component for ProgressSpinner {
|
||||
/// Draw current stage, if `self` is dirty.
|
||||
fn draw(&mut self, grid: &mut CellBuffer, area: Area, context: &mut Context) {
|
||||
if self.dirty {
|
||||
grid.clear_area(area, self.theme_attr);
|
||||
|
@ -1228,6 +1329,7 @@ impl Component for ProgressSpinner {
|
|||
}
|
||||
}
|
||||
|
||||
/// If the `event` is our timer firing, proceed to next stage.
|
||||
fn process_event(&mut self, event: &mut UIEvent, _context: &mut Context) -> bool {
|
||||
match event {
|
||||
UIEvent::Timer(id) if *id == self.timer.id() => {
|
||||
|
|
|
@ -21,194 +21,293 @@
|
|||
// SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later
|
||||
|
||||
//! Helping users move to newer `meli` versions.
|
||||
//!
|
||||
//! # How version information is stored in the filesystem and examined
|
||||
//!
|
||||
//! On start-up, `meli` checks the contents of `${XDG_DATA_HOME}/meli/.version`
|
||||
//! (the "version file") to determine if there has been a version upgrade since
|
||||
//! the last time it was launched, if any. Regardless of the version file
|
||||
//! existence or its contents, it will write the latest version string as
|
||||
//! recorded in the module `const` global [`LATEST`] **unless** the version
|
||||
//! file's content match `LATEST`.
|
||||
//!
|
||||
//! [`LATEST`] is verified to contain the actual version at compile-time using
|
||||
//! Cargo's environment variable `CARGO_PKG_VERSION`.
|
||||
//!
|
||||
//! If the version file does not exist, no migrations need to be performed.
|
||||
//!
|
||||
//! If the version file is determined to be a previous version,
|
||||
//! [`calculate_migrations`] is called which examines every migration in the
|
||||
//! version range starting from the previous version up to the latest. If any
|
||||
//! migration is applicable, it asks the user interactively whether to perform
|
||||
//! them. This happens in [`version_setup`].
|
||||
//!
|
||||
//! # How `meli` encodes version information statically with types and modules
|
||||
//!
|
||||
//! Every release **MUST** have a module associated with it. The module
|
||||
//! contains:
|
||||
//!
|
||||
//! - a public [`VersionIdentifier`] `const` item
|
||||
//! - a `struct` that represents the version, named by convention `VX_Y_Z[..]`.
|
||||
//! The `struct` definition can be empty (e.g. `pub struct V0_0_1;`). The
|
||||
//! `struct` **MUST** implement the [`Version`] trait, which can be used to
|
||||
//! retrieve the version identifier and the migrations.
|
||||
//! - Any number of structs representing migrations, which implement the
|
||||
//! [`Migration`] trait, and which are returned by the [`Version::migrations`]
|
||||
//! method.
|
||||
//!
|
||||
//! All versions must be stored in an `IndexMap` (type alias [`VersionMap`])
|
||||
//! which is retrieved by the function [`versions`].
|
||||
//!
|
||||
//! # How migrations work
|
||||
//!
|
||||
//! Migrations are **not** guaranteed to be lossless; stored metadata
|
||||
//! information in the filesystem may be lost.
|
||||
//!
|
||||
//! Migrations can optionally claim they are not applicable, which means they
|
||||
//! will be skipped entirely if migrations are to be applied. The check is done
|
||||
//! in the [`Migration::is_applicable`] trait method which can opt-in to make
|
||||
//! checks in the configuration file.
|
||||
//!
|
||||
//! Migrations can be performed using the [`Migration::perform`] method, which
|
||||
//! can optionally attempt to make a "dry run" application, which can check for
|
||||
//! errors but not make any actual changes in the filesystem.
|
||||
//!
|
||||
//! If a migration can be reverted, it can implement the revert logic in
|
||||
//! [`Migration::revert`] which follows the same logic as [`Migration::perform`]
|
||||
//! but in reverse. It is not always possible a migration can be reverted, since
|
||||
//! migrations are not necessarily lossless.
|
||||
|
||||
mod v0_8_8;
|
||||
use v0_8_8::V0_8_8;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
type VersionMap = IndexMap<VersionIdentifier, Box<dyn Version + Send + Sync + 'static>>;
|
||||
/// A container for [`Version`]s indexed by their [`VersionIdentifier`].
|
||||
///
|
||||
/// Internally it is returned by the [`versions`] function in this
|
||||
/// module.
|
||||
pub type VersionMap = IndexMap<VersionIdentifier, Box<dyn Version + Send + Sync + 'static>>;
|
||||
|
||||
fn versions() -> &'static VersionMap {
|
||||
use std::sync::OnceLock;
|
||||
/// Utility macro to define version module imports and a function `versions() ->
|
||||
/// &'static VersionMap`.
|
||||
///
|
||||
/// Version arguments must be given in sorted, ascending order:
|
||||
///
|
||||
/// ```rust
|
||||
/// # use meli::{decl_version_map, version_migrations::*};
|
||||
/// decl_version_map! {
|
||||
/// v0_8_8::V0_8_8_ID => v0_8_8::V0_8_8,
|
||||
/// v0_8_9::V0_8_9_ID => v0_8_9::V0_8_9,
|
||||
/// v0_8_10::V0_8_10_ID => v0_8_10::V0_8_10,
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Otherwise compilation will fail:
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// # use meli::{decl_version_map, version_migrations::*};
|
||||
/// mod v0_0_0 {
|
||||
/// use meli::version_migrations::*;
|
||||
///
|
||||
/// pub const V0_0_0_ID: VersionIdentifier = VersionIdentifier::NULL;
|
||||
///
|
||||
/// #[derive(Clone, Copy, Debug)]
|
||||
/// pub struct V0_0_0;
|
||||
///
|
||||
/// impl Version for V0_0_0 {
|
||||
/// fn version(&self) -> &VersionIdentifier {
|
||||
/// &V0_0_0_ID
|
||||
/// }
|
||||
///
|
||||
/// fn migrations(&self) -> Vec<Box<dyn Migration + Send + Sync + 'static>> {
|
||||
/// vec![]
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// decl_version_map! {
|
||||
/// v0_8_8::V0_8_8_ID => v0_8_8::V0_8_8,
|
||||
/// v0_0_0::V0_0_0_ID => v0_0_0::V0_0_0,
|
||||
/// v0_8_9::V0_8_9_ID => v0_8_9::V0_8_9
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// # use meli::{decl_version_map, version_migrations::*};
|
||||
/// decl_version_map! {
|
||||
/// v0_8_9::V0_8_9_ID => v0_8_9::V0_8_9,
|
||||
/// v0_8_8::V0_8_8_ID => v0_8_8::V0_8_8
|
||||
/// }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! decl_version_map {
|
||||
($($version_id:path => $m:ident::$v:ident),*$(,)?) => {
|
||||
/// Return all versions in a [`VersionMap`] container.
|
||||
///
|
||||
/// The value is lazily initialized on first access.
|
||||
pub fn versions() -> &'static VersionMap {
|
||||
use std::sync::OnceLock;
|
||||
#[allow(dead_code)]
|
||||
const fn const_bytes_cmp(lhs: &[u8], rhs: &[u8]) -> std::cmp::Ordering {
|
||||
if lhs.len() < rhs.len() {
|
||||
return std::cmp::Ordering::Less;
|
||||
} else if lhs.len() > rhs.len() {
|
||||
return std::cmp::Ordering::Greater;
|
||||
};
|
||||
let mut i = 0;
|
||||
while i < lhs.len() {
|
||||
if lhs[i] < rhs[i] {
|
||||
return std::cmp::Ordering::Less;
|
||||
} else if lhs[i] > rhs[i] {
|
||||
return std::cmp::Ordering::Greater;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
std::cmp::Ordering::Equal
|
||||
}
|
||||
|
||||
static VERSIONS: OnceLock<VersionMap> = OnceLock::new();
|
||||
VERSIONS.get_or_init(|| {
|
||||
indexmap::indexmap! {
|
||||
v0_8_8::V0_8_8_ID => Box::new(V0_8_8) as Box<dyn Version + Send + Sync + 'static>,
|
||||
#[allow(dead_code)]
|
||||
const fn const_str_cmp(lhs: &str, rhs: &str) -> std::cmp::Ordering {
|
||||
const_bytes_cmp(lhs.as_bytes(), rhs.as_bytes())
|
||||
}
|
||||
|
||||
macro_rules! v_ids_cmp {
|
||||
($v2:expr, $v1:expr) => {{
|
||||
|
||||
$v2.major() >= $v1.major()
|
||||
&& ($v2.minor() >= $v1.minor())
|
||||
&& ($v2.patch() >= $v1.patch())
|
||||
&& ((const_str_cmp($v2.pre(), $v1.pre()) as i8 == std::cmp::Ordering::Greater as i8) || (const_str_cmp($v2.pre(), $v1.pre()) as i8 == std::cmp::Ordering::Equal as i8))
|
||||
&& !($v2.major() == $v1.major()
|
||||
&& ($v2.minor() == $v1.minor())
|
||||
&& ($v2.patch() == $v1.patch()))
|
||||
}}
|
||||
}
|
||||
macro_rules! is_version_ids_sorted {
|
||||
() => {
|
||||
true
|
||||
};
|
||||
($v0:expr) => {
|
||||
$v0 > VersionIdentifier::NULL && true
|
||||
};
|
||||
($v1:expr, $v2:expr) => {{
|
||||
v_ids_cmp!($v2, $v1)
|
||||
}};
|
||||
($v1:expr, $v2:expr, $tail_v:tt) => {{
|
||||
v_ids_cmp!($v2, $v1) && is_version_ids_sorted! { $v1, $tail_v }
|
||||
}};
|
||||
}
|
||||
const fn __assert_sorted() -> () {
|
||||
assert!(is_version_ids_sorted! { $($version_id),* }, "Version ids in decl_version_mods are not sorted! Please fix it.");
|
||||
}
|
||||
const _SORT_ASSERTION: () = __assert_sorted();
|
||||
const fn __assert_latest() -> () {
|
||||
macro_rules! latest_version_id {
|
||||
($v0:expr) => {
|
||||
$v0
|
||||
};
|
||||
($v1:expr, $v2:expr) => {{
|
||||
$v2
|
||||
}};
|
||||
($v1:expr, $v2:expr, $tail_v:tt) => {{
|
||||
latest_version_id! { $tail_v }
|
||||
}};
|
||||
}
|
||||
if let Some(current_version) = option_env!("CARGO_PKG_VERSION") {
|
||||
let latest_version = latest_version_id!{ $($version_id),* };
|
||||
if const_str_cmp(current_version, latest_version.as_str()) as i8 != std::cmp::Ordering::Equal as i8 {
|
||||
panic!("Current version does not match latest version from version migrations map declaration, please fix it.");
|
||||
}
|
||||
if const_str_cmp(current_version, LATEST.as_str()) as i8 != std::cmp::Ordering::Equal as i8 {
|
||||
panic!("Current version does not match latest version const `LATEST` in meli::version_migrations, please fix it.");
|
||||
}
|
||||
}
|
||||
}
|
||||
const _LATEST_ASSERTION: () = __assert_latest();
|
||||
|
||||
|
||||
static VERSIONS: OnceLock<VersionMap> = OnceLock::new();
|
||||
VERSIONS.get_or_init(|| {
|
||||
let val = indexmap::indexmap! {
|
||||
$(
|
||||
$version_id => Box::new($m::$v) as Box<dyn Version + Send + Sync + 'static>
|
||||
),*
|
||||
};
|
||||
{
|
||||
let version_ids = val.keys().collect::<Vec<_>>();
|
||||
let mut version_ids_sorted = version_ids.clone();
|
||||
version_ids_sorted.sort();
|
||||
assert_eq!(version_ids, version_ids_sorted, "Version map returned by versions() is not sorted! Check out decl_version_mods! invocation. Version ids were: {:?}", version_ids);
|
||||
}
|
||||
val
|
||||
})
|
||||
}
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
use std::{cmp::Ordering, path::Path};
|
||||
/// Wrapper macro over [`decl_version_map`] that also defines the arguments as
|
||||
/// modules.
|
||||
macro_rules! decl_version_mods {
|
||||
($($version_id:path => $m:ident::$v:ident),*$(,)?) => {
|
||||
$(
|
||||
pub mod $m;
|
||||
pub use $m::$v;
|
||||
)*
|
||||
|
||||
decl_version_map! {
|
||||
$($version_id => $m::$v),*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
decl_version_mods! {
|
||||
v0_8_8::V0_8_8_ID => v0_8_8::V0_8_8,
|
||||
v0_8_9::V0_8_9_ID => v0_8_9::V0_8_9,
|
||||
v0_8_10::V0_8_10_ID => v0_8_10::V0_8_10
|
||||
}
|
||||
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use indexmap::{self, IndexMap};
|
||||
use melib::{error::*, log};
|
||||
|
||||
use crate::{conf::FileSettings, terminal::Ask};
|
||||
|
||||
pub const LATEST: VersionIdentifier = v0_8_8::V0_8_8_ID;
|
||||
/// The latest version as defined in the Cargo manifest file of `meli`.
|
||||
///
|
||||
/// On compile-time if the `CARGO_PKG_VERSION` environment variable is
|
||||
/// available, the macro [`decl_version_map`] asserts that it matches the actual
|
||||
/// latest version string.
|
||||
pub const LATEST: VersionIdentifier = v0_8_10::V0_8_10_ID;
|
||||
|
||||
/// Inspect current/previous version setup, perform migrations if necessary,
|
||||
/// etc.
|
||||
pub fn version_setup(config: &Path) -> Result<()> {
|
||||
if let Ok(xdg_dirs) = xdg::BaseDirectories::with_prefix("meli") {
|
||||
let version_file = match xdg_dirs.place_data_file(".version") {
|
||||
Ok(v) => v,
|
||||
Err(err) => {
|
||||
log::debug!(
|
||||
"Could not place file with version metadata, .version, in your \
|
||||
${{XDG_DATA_HOME}}: {}",
|
||||
err
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
let stored_version = if !version_file.try_exists().unwrap_or(false) {
|
||||
None
|
||||
} else {
|
||||
let mut stored_version =
|
||||
std::fs::read_to_string(&version_file).chain_err_related_path(&version_file)?;
|
||||
while stored_version.ends_with(['\r', '\n', ' ', '\t']) {
|
||||
stored_version.pop();
|
||||
}
|
||||
if LATEST.as_str() == stored_version {
|
||||
return Ok(());
|
||||
}
|
||||
Some(stored_version)
|
||||
};
|
||||
let version_map = versions();
|
||||
if let Some(newer_versions) = stored_version
|
||||
.as_ref()
|
||||
.and_then(|v| version_map.get_index_of(v.as_str()))
|
||||
.or(Some(0))
|
||||
.and_then(|i| version_map.get_range(i..))
|
||||
{
|
||||
let mut migrations = vec![];
|
||||
for (k, v) in newer_versions {
|
||||
let vec = v.migrations();
|
||||
if !vec.is_empty() {
|
||||
migrations.push((k, vec));
|
||||
}
|
||||
}
|
||||
if migrations.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
if let Some(prev) = stored_version {
|
||||
println!(
|
||||
"meli appears updated; file {} contains the value {:?} and the latest version \
|
||||
is {}",
|
||||
version_file.display(),
|
||||
prev,
|
||||
LATEST
|
||||
);
|
||||
} else {
|
||||
// Check if any migrations are applicable; they might not be any (for example if
|
||||
// user runs meli for the first time).
|
||||
if !migrations.iter().any(|(_, migrs)| {
|
||||
migrs
|
||||
.iter()
|
||||
.any(|migr| migr.is_applicable(config) != Some(false))
|
||||
}) {
|
||||
log::info!(
|
||||
"Creating version info file {} with value {}",
|
||||
version_file.display(),
|
||||
LATEST
|
||||
);
|
||||
std::fs::write(&version_file, LATEST.as_str())
|
||||
.chain_err_related_path(&version_file)?;
|
||||
return Ok(());
|
||||
}
|
||||
println!(
|
||||
"meli appears updated; version file {} was not found and there are potential \
|
||||
migrations to be made.",
|
||||
version_file.display()
|
||||
);
|
||||
}
|
||||
println!(
|
||||
"You might need to migrate your configuration data for the new version to \
|
||||
work.\nYou can skip any changes you don't want to happen and you can quit at any \
|
||||
time."
|
||||
);
|
||||
println!(
|
||||
"{} migration{} {} about to be performed:",
|
||||
migrations.len(),
|
||||
if migrations.len() == 1 { "" } else { "s" },
|
||||
if migrations.len() == 1 { "is" } else { "are" }
|
||||
);
|
||||
for (vers, migrs) in &migrations {
|
||||
for m in migrs {
|
||||
println!("v{}/{}: {}", vers, m.id(), m.description());
|
||||
}
|
||||
}
|
||||
let ask = Ask::new(format!(
|
||||
"Perform {} migration{}?",
|
||||
migrations.len(),
|
||||
if migrations.len() == 1 { "" } else { "s" }
|
||||
));
|
||||
if !ask.run() {
|
||||
let ask = Ask::new("Update .version file despite not attempting migrations?")
|
||||
.yes_by_default(false);
|
||||
if ask.run() {
|
||||
std::fs::write(&version_file, LATEST.as_str())
|
||||
.chain_err_related_path(&version_file)?;
|
||||
return Ok(());
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
let mut perform_history: Vec<Box<dyn Migration + Send + Sync + 'static>> = vec![];
|
||||
for (vers, migrs) in migrations {
|
||||
println!("Updating to {}...", vers);
|
||||
'migrations: for m in migrs {
|
||||
let ask = Ask::new(m.question());
|
||||
if ask.run() {
|
||||
if let Err(err) = m.perform(config, false, true) {
|
||||
println!("\nCould not perform migration: {}", err);
|
||||
let ask = Ask::new("Continue?");
|
||||
if ask.run() {
|
||||
continue 'migrations;
|
||||
}
|
||||
if !perform_history.is_empty() {
|
||||
let ask =
|
||||
Ask::new("Undo already performed migrations before exiting?")
|
||||
.without_default();
|
||||
if ask.run() {
|
||||
while let Some(m) = perform_history.pop() {
|
||||
print!("Undoing {}...", m.id());
|
||||
if let Err(err) = m.revert(config, false, true) {
|
||||
println!(
|
||||
" [ERROR] could not revert migration: {}",
|
||||
err
|
||||
);
|
||||
} else {
|
||||
println!(" [OK]");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
println!("v{}/{} [OK]", vers, m.id());
|
||||
perform_history.push(m);
|
||||
}
|
||||
}
|
||||
}
|
||||
std::fs::write(&version_file, LATEST.as_str()).chain_err_related_path(&version_file)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// An application version identifier.
|
||||
/// An application version identifier with [Semantic Versioning v2.0.0]
|
||||
/// semantics.
|
||||
///
|
||||
/// There's no support for "Build metadata" of the specification since we're not
|
||||
/// using those.
|
||||
///
|
||||
/// [Semantic Versioning v2.0.0]: https://semver.org/spec/v2.0.0.html
|
||||
#[derive(Clone, Copy, Debug, Eq)]
|
||||
pub struct VersionIdentifier {
|
||||
string: &'static str,
|
||||
major: u8,
|
||||
minor: u8,
|
||||
patch: u8,
|
||||
pre: Option<&'static str>,
|
||||
pre: &'static str,
|
||||
}
|
||||
|
||||
impl VersionIdentifier {
|
||||
/// An invalid non-existent release, `v0.0.0`, used for comparison with
|
||||
/// other identifiers.
|
||||
pub const NULL: Self = Self {
|
||||
string: "0.0.0",
|
||||
major: 0,
|
||||
minor: 0,
|
||||
patch: 0,
|
||||
pre: "",
|
||||
};
|
||||
|
||||
/// The identifier as a string.
|
||||
pub const fn as_str(&self) -> &'static str {
|
||||
self.string
|
||||
|
@ -230,7 +329,7 @@ impl VersionIdentifier {
|
|||
}
|
||||
|
||||
/// The pre-release part of the version (`MAJOR.MINOR.PATCH[-PRE]`).
|
||||
pub const fn pre(&self) -> Option<&'static str> {
|
||||
pub const fn pre(&self) -> &'static str {
|
||||
self.pre
|
||||
}
|
||||
}
|
||||
|
@ -241,6 +340,8 @@ impl std::fmt::Display for VersionIdentifier {
|
|||
}
|
||||
}
|
||||
|
||||
/// Compare `(self.major, self.minor, self.patch, self.pre)` fields with another
|
||||
/// [`VersionIdentifier`].
|
||||
impl Ord for VersionIdentifier {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
(self.major, self.minor, self.patch, self.pre).cmp(&(
|
||||
|
@ -252,6 +353,8 @@ impl Ord for VersionIdentifier {
|
|||
}
|
||||
}
|
||||
|
||||
/// Compare `(self.major, self.minor, self.patch, self.pre)` fields with another
|
||||
/// [`VersionIdentifier`].
|
||||
impl PartialOrd for VersionIdentifier {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
|
@ -269,14 +372,15 @@ impl std::borrow::Borrow<str> for VersionIdentifier {
|
|||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for VersionIdentifier {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
(self.major, self.minor, self.patch, self.pre).hash(state)
|
||||
self.as_str().hash(state)
|
||||
}
|
||||
}
|
||||
|
||||
/// A migration contained in a released version.
|
||||
pub trait Migration {
|
||||
pub trait Migration: Send + Sync {
|
||||
/// The source code identifier of the migration; usually the `struct`'s Rust
|
||||
/// identifier.
|
||||
fn id(&self) -> &'static str;
|
||||
|
@ -297,13 +401,246 @@ pub trait Migration {
|
|||
}
|
||||
|
||||
/// A released application version.
|
||||
pub trait Version {
|
||||
pub trait Version: Send + Sync {
|
||||
/// Associated version identifier.
|
||||
fn version(&self) -> &VersionIdentifier;
|
||||
/// Associated migrations, if any.
|
||||
fn migrations(&self) -> Vec<Box<dyn Migration + Send + Sync + 'static>>;
|
||||
// /// Associated changelog, if any.
|
||||
// fn changelog(&self) -> &str;
|
||||
// /// Important notice messagese for users, if any.
|
||||
// /// Important notice messages for users, if any.
|
||||
// fn notices(&self) -> &[&str];
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for dyn Version + Send + Sync {
|
||||
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fmt.debug_struct(melib::identify!(dyn Version + Send + Sync))
|
||||
.field("version", &self.version())
|
||||
.field("migrations", &self.migrations())
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for dyn Migration + Send + Sync {
|
||||
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fmt.debug_struct(melib::identify!(dyn Migration + Send + Sync))
|
||||
.field("id", &self.id())
|
||||
.field("version", &self.version())
|
||||
.field("description", &self.description())
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the path to the `.version` file, a plain text file that contains the
|
||||
/// version of meli that "owns" the configuration and data files.
|
||||
///
|
||||
/// The actual path examined is `${XDG_DATA_HOME}/meli/.version`.
|
||||
pub fn version_file() -> Result<PathBuf> {
|
||||
let xdg_dirs = xdg::BaseDirectories::with_prefix("meli")?;
|
||||
Ok(xdg_dirs.place_data_file(".version")?)
|
||||
}
|
||||
|
||||
/// Inspect current/previous version setup, perform migrations if necessary,
|
||||
/// etc.
|
||||
///
|
||||
/// This function requires an interactive user session, if stdout is not an
|
||||
/// interactive TTY, the process caller must ensure `stdin` contains the
|
||||
/// necessary input (`y`, `n`, newline) otherwise this function _blocks_.
|
||||
pub fn version_setup(
|
||||
config: &Path,
|
||||
writer: &mut impl std::io::Write,
|
||||
reader: &mut impl std::io::BufRead,
|
||||
) -> Result<()> {
|
||||
let version_file = match version_file() {
|
||||
Ok(v) => v,
|
||||
Err(err) => {
|
||||
log::debug!(
|
||||
"Could not place file with version metadata, .version, in your \
|
||||
${{XDG_DATA_HOME}}: {}",
|
||||
err
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
let stored_version = if !version_file.try_exists().unwrap_or(false) {
|
||||
None
|
||||
} else {
|
||||
let mut stored_version =
|
||||
std::fs::read_to_string(&version_file).chain_err_related_path(&version_file)?;
|
||||
while stored_version.ends_with(['\r', '\n', ' ', '\t']) {
|
||||
stored_version.pop();
|
||||
}
|
||||
if LATEST.as_str() == stored_version {
|
||||
return Ok(());
|
||||
}
|
||||
Some(stored_version)
|
||||
};
|
||||
let version_map = versions();
|
||||
let migrations = calculate_migrations(stored_version.as_deref(), version_map);
|
||||
if !migrations.is_empty() {
|
||||
if let Some(prev) = stored_version {
|
||||
if prev.as_str() < LATEST.as_str() {
|
||||
writeln!(
|
||||
writer,
|
||||
"meli appears updated; file {} contains the value {:?} and the latest version \
|
||||
is {}",
|
||||
version_file.display(),
|
||||
prev,
|
||||
LATEST
|
||||
)?;
|
||||
writer.flush()?;
|
||||
} else {
|
||||
writeln!(
|
||||
writer,
|
||||
"This version of meli, {}, appears to be older than the previously used one \
|
||||
stored in the file {}: {}.",
|
||||
LATEST,
|
||||
version_file.display(),
|
||||
prev,
|
||||
)?;
|
||||
writeln!(
|
||||
writer,
|
||||
"Certain configuration options might not be compatible with this version, \
|
||||
refer to release changelogs if you need to troubleshoot configuration \
|
||||
options problems."
|
||||
)?;
|
||||
writer.flush()?;
|
||||
let ask = Ask::new(
|
||||
"Update .version file to make this warning go away? (CAUTION: current \
|
||||
configuration and stored data might not be compatible with this version!!)",
|
||||
)
|
||||
.yes_by_default(false);
|
||||
if ask.run(writer, reader) {
|
||||
std::fs::write(&version_file, LATEST.as_str())
|
||||
.chain_err_related_path(&version_file)?;
|
||||
return Ok(());
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
} else {
|
||||
// Check if any migrations are applicable; they might not be any (for example if
|
||||
// user runs meli for the first time).
|
||||
if !migrations.iter().any(|(_, migrs)| {
|
||||
migrs
|
||||
.iter()
|
||||
.any(|migr| migr.is_applicable(config) != Some(false))
|
||||
}) {
|
||||
log::info!(
|
||||
"Creating version info file {} with value {}",
|
||||
version_file.display(),
|
||||
LATEST
|
||||
);
|
||||
std::fs::write(&version_file, LATEST.as_str())
|
||||
.chain_err_related_path(&version_file)?;
|
||||
return Ok(());
|
||||
}
|
||||
writeln!(
|
||||
writer,
|
||||
"meli appears updated; version file {} was not found and there are potential \
|
||||
migrations to be made.",
|
||||
version_file.display()
|
||||
)?;
|
||||
writer.flush()?;
|
||||
}
|
||||
writeln!(
|
||||
writer,
|
||||
"You might need to migrate your configuration data for the new version to work.\nYou \
|
||||
can skip any changes you don't want to happen and you can quit at any time."
|
||||
)?;
|
||||
writeln!(
|
||||
writer,
|
||||
"{} migration{} {} about to be performed:",
|
||||
migrations.len(),
|
||||
if migrations.len() == 1 { "" } else { "s" },
|
||||
if migrations.len() == 1 { "is" } else { "are" }
|
||||
)?;
|
||||
for (vers, migrs) in &migrations {
|
||||
for m in migrs {
|
||||
writeln!(writer, "v{}/{}: {}", vers, m.id(), m.description())?;
|
||||
}
|
||||
}
|
||||
writer.flush()?;
|
||||
let ask = Ask::new(format!(
|
||||
"Perform {} migration{}?",
|
||||
migrations.len(),
|
||||
if migrations.len() == 1 { "" } else { "s" }
|
||||
));
|
||||
if !ask.run(writer, reader) {
|
||||
let ask = Ask::new("Update .version file despite not attempting migrations?")
|
||||
.yes_by_default(false);
|
||||
if ask.run(writer, reader) {
|
||||
std::fs::write(&version_file, LATEST.as_str())
|
||||
.chain_err_related_path(&version_file)?;
|
||||
return Ok(());
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
let mut perform_history: Vec<Box<dyn Migration + 'static>> = vec![];
|
||||
for (vers, migrs) in migrations {
|
||||
writeln!(writer, "Updating to {}...", vers)?;
|
||||
writer.flush()?;
|
||||
'migrations: for m in migrs {
|
||||
let ask = Ask::new(m.question());
|
||||
if ask.run(writer, reader) {
|
||||
if let Err(err) = m.perform(config, false, true) {
|
||||
writeln!(writer, "\nCould not perform migration: {}", err)?;
|
||||
writer.flush()?;
|
||||
let ask = Ask::new("Continue?");
|
||||
if ask.run(writer, reader) {
|
||||
continue 'migrations;
|
||||
}
|
||||
if !perform_history.is_empty() {
|
||||
let ask = Ask::new("Undo already performed migrations before exiting?")
|
||||
.without_default();
|
||||
if ask.run(writer, reader) {
|
||||
while let Some(m) = perform_history.pop() {
|
||||
write!(writer, "Undoing {}...", m.id())?;
|
||||
writer.flush()?;
|
||||
if let Err(err) = m.revert(config, false, true) {
|
||||
writeln!(
|
||||
writer,
|
||||
" [ERROR] could not revert migration: {}",
|
||||
err
|
||||
)?;
|
||||
} else {
|
||||
writeln!(writer, " [OK]")?;
|
||||
}
|
||||
writer.flush()?;
|
||||
}
|
||||
}
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
writeln!(writer, "v{}/{} [OK]", vers, m.id())?;
|
||||
writer.flush()?;
|
||||
perform_history.push(m);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
std::fs::write(&version_file, LATEST.as_str()).chain_err_related_path(&version_file)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Return any migrations between current version and latest version, if any.
|
||||
pub fn calculate_migrations<'v>(
|
||||
current_version: Option<&str>,
|
||||
version_map: &'v VersionMap,
|
||||
) -> Vec<(&'v VersionIdentifier, Vec<Box<dyn Migration + Send + Sync>>)> {
|
||||
let mut migrations = vec![];
|
||||
if let Some(newer_versions) = current_version
|
||||
.and_then(|v| version_map.get_index_of(v))
|
||||
.map(|i| i + 1)
|
||||
.or(Some(0))
|
||||
.and_then(|i| version_map.get_range(i..))
|
||||
{
|
||||
for (k, v) in newer_versions {
|
||||
let vec = v.migrations();
|
||||
if !vec.is_empty() {
|
||||
migrations.push((k, vec));
|
||||
}
|
||||
}
|
||||
}
|
||||
migrations
|
||||
}
|
||||
|
|
145
meli/src/version_migrations/tests.rs
Normal file
145
meli/src/version_migrations/tests.rs
Normal file
|
@ -0,0 +1,145 @@
|
|||
//
|
||||
// meli
|
||||
//
|
||||
// Copyright 2024 Emmanouil Pitsidianakis <manos@pitsidianak.is>
|
||||
//
|
||||
// This file is part of meli.
|
||||
//
|
||||
// meli is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// meli is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with meli. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later
|
||||
|
||||
use rusty_fork::rusty_fork_test;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_version_migrations_version_map() {
|
||||
let version_map = indexmap::indexmap! {
|
||||
v0_8_8::V0_8_8_ID => Box::new(V0_8_8) as Box<dyn Version + Send + Sync + 'static>,
|
||||
v0_8_9::V0_8_9_ID => Box::new(V0_8_9) as Box<dyn Version + Send + Sync + 'static>,
|
||||
};
|
||||
assert!(
|
||||
version_map.contains_key("0.8.8"),
|
||||
"Could not access Version identifier by &str key in version map"
|
||||
);
|
||||
assert!(
|
||||
version_map.contains_key("0.8.9"),
|
||||
"Could not access Version identifier by &str key in version map"
|
||||
);
|
||||
assert!(!version_map.contains_key("0.0.0"),);
|
||||
assert!(
|
||||
version_map.contains_key(&v0_8_8::V0_8_8_ID),
|
||||
"Could not access Version identifier by VersionIdentifier key in version map"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_version_migrations_returns_correct_migration() {
|
||||
let version_map = indexmap::indexmap! {
|
||||
v0_8_8::V0_8_8_ID => Box::new(V0_8_8) as Box<dyn Version + Send + Sync + 'static>,
|
||||
v0_8_9::V0_8_9_ID => Box::new(V0_8_9) as Box<dyn Version + Send + Sync + 'static>,
|
||||
};
|
||||
let migrations = calculate_migrations(Some("0.8.8"), &version_map);
|
||||
assert!(
|
||||
migrations.is_empty(),
|
||||
"Calculated migrations between 0.8.8 and 0.8.9 are not empty: {:?}",
|
||||
migrations
|
||||
);
|
||||
let migrations = calculate_migrations(None, &version_map);
|
||||
assert!(
|
||||
!migrations.is_empty(),
|
||||
"Calculated migrations between no version and 0.8.8 are empty",
|
||||
);
|
||||
}
|
||||
|
||||
rusty_fork_test! {
|
||||
#[test]
|
||||
fn test_version_migrations_ignores_newer_version() {
|
||||
const MAX: VersionIdentifier = VersionIdentifier {
|
||||
string: "255.255.255",
|
||||
major: u8::MAX,
|
||||
minor: u8::MAX,
|
||||
patch: u8::MAX,
|
||||
pre: "",
|
||||
};
|
||||
let tempdir = tempfile::tempdir().unwrap();
|
||||
for var in [
|
||||
"MELI_CONFIG",
|
||||
"HOME",
|
||||
"XDG_CACHE_HOME",
|
||||
"XDG_STATE_HOME",
|
||||
"XDG_CONFIG_DIRS",
|
||||
"XDG_CONFIG_HOME",
|
||||
"XDG_DATA_DIRS",
|
||||
"XDG_DATA_HOME",
|
||||
] {
|
||||
std::env::remove_var(var);
|
||||
}
|
||||
std::env::set_var("HOME", tempdir.path());
|
||||
std::env::set_var("XDG_DATA_HOME", tempdir.path());
|
||||
let version_file = version_file().unwrap();
|
||||
std::fs::write(&version_file, MAX.as_str()).unwrap();
|
||||
let config_path = tempdir.path().join("meli.toml");
|
||||
std::env::set_var("MELI_CONFIG", config_path.as_path());
|
||||
std::fs::write(&config_path,
|
||||
br#"
|
||||
[accounts.imap]
|
||||
root_mailbox = "INBOX"
|
||||
format = "imap"
|
||||
send_mail = 'false'
|
||||
identity="username@example.com"
|
||||
server_username = "null"
|
||||
server_hostname = "example.com"
|
||||
server_password_command = "false"
|
||||
"#).unwrap();
|
||||
|
||||
{
|
||||
let mut stdout = vec![];
|
||||
let mut stdin = &b"y\n"[..];
|
||||
let mut stdin_buf_reader = std::io::BufReader::new(&mut stdin);
|
||||
version_setup(&config_path, &mut stdout, &mut stdin_buf_reader).unwrap();
|
||||
let expected_output = format!("This version of meli, {latest}, appears to be older than the previously used one stored in the file {version_file}: {max_version}.\nCertain configuration options might not be compatible with this version, refer to release changelogs if you need to troubleshoot configuration options problems.\nUpdate .version file to make this warning go away? (CAUTION: current configuration and stored data might not be compatible with this version!!) [y/N] ", latest = LATEST.as_str(), version_file = version_file.display(), max_version = MAX.as_str());
|
||||
assert_eq!(String::from_utf8_lossy(&stdout).as_ref(), &expected_output);
|
||||
assert_eq!(stdin_buf_reader.buffer(), b"");
|
||||
let updated_version =
|
||||
std::fs::read_to_string(&version_file).unwrap();
|
||||
assert_eq!(updated_version.trim(), LATEST.as_str());
|
||||
}
|
||||
{
|
||||
use std::io::BufRead;
|
||||
|
||||
let mut stdout = vec![];
|
||||
let mut stdin = &b"N\n"[..];
|
||||
let mut stdin_buf_reader = std::io::BufReader::new(&mut stdin);
|
||||
|
||||
version_setup(&config_path, &mut stdout, &mut stdin_buf_reader).unwrap();
|
||||
assert_eq!(String::from_utf8_lossy(&stdout).as_ref(), "");
|
||||
assert_eq!(stdin_buf_reader.fill_buf().unwrap(), b"N\n");
|
||||
}
|
||||
{
|
||||
std::fs::write(&version_file, MAX.as_str()).unwrap();
|
||||
let mut stdout = vec![];
|
||||
let mut stdin = &b"n\n"[..];
|
||||
let mut stdin_buf_reader = std::io::BufReader::new(&mut stdin);
|
||||
version_setup(&config_path, &mut stdout, &mut stdin_buf_reader).unwrap();
|
||||
let expected_output = format!("This version of meli, {latest}, appears to be older than the previously used one stored in the file {version_file}: {max_version}.\nCertain configuration options might not be compatible with this version, refer to release changelogs if you need to troubleshoot configuration options problems.\nUpdate .version file to make this warning go away? (CAUTION: current configuration and stored data might not be compatible with this version!!) [y/N] ", latest = LATEST.as_str(), version_file = version_file.display(), max_version = MAX.as_str());
|
||||
assert_eq!(String::from_utf8_lossy(&stdout).as_ref(), &expected_output);
|
||||
assert_eq!(stdin_buf_reader.buffer(), b"");
|
||||
let stored_version =
|
||||
std::fs::read_to_string(&version_file).unwrap();
|
||||
assert_eq!(stored_version.trim(), MAX.as_str());
|
||||
}
|
||||
}
|
||||
}
|
48
meli/src/version_migrations/v0_8_10.rs
Normal file
48
meli/src/version_migrations/v0_8_10.rs
Normal file
|
@ -0,0 +1,48 @@
|
|||
//
|
||||
// meli
|
||||
//
|
||||
// Copyright 2024 Emmanouil Pitsidianakis <manos@pitsidianak.is>
|
||||
//
|
||||
// This file is part of meli.
|
||||
//
|
||||
// meli is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// meli is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with meli. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later
|
||||
|
||||
//! <https://release.meli-email.org/v0.8.10>
|
||||
|
||||
use crate::version_migrations::*;
|
||||
|
||||
/// <https://release.meli-email.org/v0.8.10>
|
||||
pub const V0_8_10_ID: VersionIdentifier = VersionIdentifier {
|
||||
string: "0.8.10",
|
||||
major: 0,
|
||||
minor: 8,
|
||||
patch: 10,
|
||||
pre: "",
|
||||
};
|
||||
|
||||
/// <https://release.meli-email.org/v0.8.10>
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct V0_8_10;
|
||||
|
||||
impl Version for V0_8_10 {
|
||||
fn version(&self) -> &VersionIdentifier {
|
||||
&V0_8_10_ID
|
||||
}
|
||||
|
||||
fn migrations(&self) -> Vec<Box<dyn Migration + Send + Sync + 'static>> {
|
||||
vec![]
|
||||
}
|
||||
}
|
|
@ -20,18 +20,22 @@
|
|||
//
|
||||
// SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later
|
||||
|
||||
//! <https://release.meli-email.org/v0.8.8>
|
||||
|
||||
use crate::version_migrations::*;
|
||||
|
||||
pub(super) const V0_8_8_ID: VersionIdentifier = VersionIdentifier {
|
||||
/// <https://release.meli-email.org/v0.8.8>
|
||||
pub const V0_8_8_ID: VersionIdentifier = VersionIdentifier {
|
||||
string: "0.8.8",
|
||||
major: 0,
|
||||
minor: 8,
|
||||
patch: 8,
|
||||
pre: None,
|
||||
pre: "",
|
||||
};
|
||||
|
||||
/// <https://release.meli-email.org/v0.8.8>
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub(super) struct V0_8_8;
|
||||
pub struct V0_8_8;
|
||||
|
||||
impl Version for V0_8_8 {
|
||||
fn version(&self) -> &VersionIdentifier {
|
||||
|
@ -43,6 +47,9 @@ impl Version for V0_8_8 {
|
|||
}
|
||||
}
|
||||
|
||||
/// Rename `addressbook` to `contacts`.
|
||||
///
|
||||
/// "The storage file for contacts, stored in the application's data folder, was renamed from `addressbook` to `contacts` to better reflect its purpose."
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
struct AddressbookRename;
|
||||
|
||||
|
|
48
meli/src/version_migrations/v0_8_9.rs
Normal file
48
meli/src/version_migrations/v0_8_9.rs
Normal file
|
@ -0,0 +1,48 @@
|
|||
//
|
||||
// meli
|
||||
//
|
||||
// Copyright 2024 Emmanouil Pitsidianakis <manos@pitsidianak.is>
|
||||
//
|
||||
// This file is part of meli.
|
||||
//
|
||||
// meli is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// meli is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with meli. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later
|
||||
|
||||
//! <https://release.meli-email.org/v0.8.9>
|
||||
|
||||
use crate::version_migrations::*;
|
||||
|
||||
/// <https://release.meli-email.org/v0.8.9>
|
||||
pub const V0_8_9_ID: VersionIdentifier = VersionIdentifier {
|
||||
string: "0.8.9",
|
||||
major: 0,
|
||||
minor: 8,
|
||||
patch: 9,
|
||||
pre: "",
|
||||
};
|
||||
|
||||
/// <https://release.meli-email.org/v0.8.9>
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct V0_8_9;
|
||||
|
||||
impl Version for V0_8_9 {
|
||||
fn version(&self) -> &VersionIdentifier {
|
||||
&V0_8_9_ID
|
||||
}
|
||||
|
||||
fn migrations(&self) -> Vec<Box<dyn Migration + Send + Sync + 'static>> {
|
||||
vec![]
|
||||
}
|
||||
}
|
|
@ -24,10 +24,11 @@ use std::{io::Write, path::Path};
|
|||
|
||||
use assert_cmd::{assert::OutputAssertExt, Command};
|
||||
use predicates::prelude::*;
|
||||
use sealed_test::prelude::*;
|
||||
use rusty_fork::rusty_fork_test;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[sealed_test]
|
||||
rusty_fork_test! {
|
||||
#[test]
|
||||
fn test_cli_subcommands() {
|
||||
for var in [
|
||||
"PAGER",
|
||||
|
@ -103,6 +104,7 @@ fn test_cli_subcommands() {
|
|||
for (man, dir) in [
|
||||
("meli.1", "man1"),
|
||||
("meli.conf.5", "man5"),
|
||||
("meli.conf.examples.5", "man5"),
|
||||
("meli-themes.5", "man5"),
|
||||
("meli.7", "man7"),
|
||||
] {
|
||||
|
@ -150,6 +152,7 @@ server_password_command = "false"
|
|||
for (man, title) in [
|
||||
("meli.1", "MELI(1)"),
|
||||
("meli.conf.5", "MELI.CONF(5)"),
|
||||
("meli.conf.examples.5", "MELI.CONF.EXAMPLES(5)"),
|
||||
("meli-themes.5", "MELI-THEMES(5)"),
|
||||
("meli.7", "MELI(7)"),
|
||||
] {
|
||||
|
@ -278,3 +281,4 @@ server_password_command = "false"
|
|||
|
||||
tmp_dir.close().unwrap();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "melib"
|
||||
version = "0.8.8"
|
||||
version = "0.8.10"
|
||||
authors = ["Manos Pitsidianakis <manos@pitsidianak.is>"]
|
||||
homepage = "https://meli-email.org"
|
||||
repository = "https://git.meli-email.org/meli/meli.git"
|
||||
|
@ -25,7 +25,6 @@ bitflags = { version = "2.4", features = ["serde"] }
|
|||
cfg-if = { version = "^1.0.0" }
|
||||
chrono = { version = "^0.4", default-features = false }
|
||||
data-encoding = { version = "2.1.1" }
|
||||
encoding = { version = "0.2.33", default-features = false }
|
||||
encoding_rs = { version = "^0.8" }
|
||||
flate2 = { version = "1.0.16" }
|
||||
futures = { version = "0.3.30", default-features = false, features = ["async-await", "executor", "std"] }
|
||||
|
@ -81,7 +80,6 @@ tls-static = ["tls", "native-tls/vendored"]
|
|||
flate2 = { version = "1.0.16" }
|
||||
|
||||
[dev-dependencies]
|
||||
sealed_test = { version = "1.1.0" }
|
||||
stderrlog = { version = "^0.5" }
|
||||
rusty-fork = { version = "0.3.0" }
|
||||
tempfile = { version = "3.3" }
|
||||
toml = { version = "0.8", default-features = false, features = ["display","preserve_order","parse"] }
|
||||
|
|
|
@ -1,30 +1,45 @@
|
|||
<!-- SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later -->
|
||||
# melib
|
||||
|
||||
[](https://github.com/meli/meli/blob/master/COPYING) [](https://crates.io/crates/melib) [](https://docs.rs/melib)
|
||||
|
||||
Library for handling email for email clients.
|
||||
|
||||
## Optional Features
|
||||
## Cargo Compile-time Features
|
||||
|
||||
| Feature flag | Dependencies | Notes |
|
||||
|---------------|--------------------------------------------------|------------------------------------------------------|
|
||||
| `maildir` | `notify` crate | Provides the *maildir* backend |
|
||||
| `mbox-notify` | `notify` crate | Provides notification support for the *mbox* backend |
|
||||
| `notmuch` | `maildir` feature | Provides the *notmuch* backend |
|
||||
| `imap` | `imap-codec` crate, `tls` feature | Provides the *IMAP* backend |
|
||||
| `jmap` | `http` feature, `url` crate with `serde` feature | Provides the *JMAP* backend |
|
||||
| `nntp` | `tls` feature | Provides the *NNTP* (Usenet) backend |
|
||||
| `smtp` | `tls` feature | Integrated async *SMTP* client |
|
||||
| `sqlite3` | `rusqlite` crate with `bundled-full` feature | Used in caches |
|
||||
| `gpgme` | | *GPG* use by dynamically loading `libgpgme.so` |
|
||||
| `http` | `isahc` crate | Used for *HTTP* client needs, notably JMAP` |
|
||||
| `tls` | `native-tls` crate | |
|
||||
| `http-static` | `isahc` crate with `static-curl` feature | Links with `curl` statically |
|
||||
| `tls-static` | `native-tls` crate with `vendored` feature | Links with `OpenSSL` statically where it's used |
|
||||
| `imap-trace` | `imap` feature | Connection trace logs on the `trace` logging level |
|
||||
| `jmap-trace` | `jmap` feature | Connection trace logs on the `trace` logging level |
|
||||
| `nntp-trace` | `nntp` feature | Connection trace logs on the `trace` logging level |
|
||||
| `smtp-trace` | `smtp` feature | Connection trace logs on the `trace` logging level |
|
||||
`melib` supports opting in and out of features at compile time with cargo features.
|
||||
|
||||
The contents of the `default` feature are:
|
||||
|
||||
```toml
|
||||
default = ["imap", "nntp", "maildir", "mbox-notify", "smtp"]
|
||||
```
|
||||
|
||||
A list of all the features and a description for each follows:
|
||||
|
||||
| Feature flag | Dependencies | Notes |
|
||||
|-------------------------------------------------------|--------------------------------------------------|------------------------------------------------------|
|
||||
| <a name="maildir-feature">`maildir`</a> | `notify` crate | Provides the *maildir* backend |
|
||||
| <a name="mbox-notify-feature">`mbox-notify`</a> | `notify` crate | Provides notification support for the *mbox* backend |
|
||||
| <a name="notmuch-feature">`notmuch`</a> | `maildir` feature | Provides the *notmuch* backend |
|
||||
| <a name="imap-feature">`imap`</a> | `imap-codec` crate, `tls` feature | Provides the *IMAP* backend |
|
||||
| <a name="jmap-feature">`jmap`</a> | `http` feature, `url` crate with `serde` feature | Provides the *JMAP* backend |
|
||||
| <a name="nntp-feature">`nntp`</a> | `tls` feature | Provides the *NNTP* (Usenet) backend |
|
||||
| <a name="smtp-feature">`smtp`</a> | `tls` feature | Integrated async *SMTP* client |
|
||||
| <a name="sqlite3-feature">`sqlite3`</a> | `rusqlite` crate with `bundled-full` feature | Used in caches |
|
||||
| <a name="sqlite3-static-feature">`sqlite3-static`</a> | `rusqlite` crate with `bundled-full` feature | Same as `sqlite3` feature but provided for consistency and in case `sqlite3` feature stops bundling libsqlite3 statically in the future.
|
||||
| <a name="gpgme-feature">`gpgme`</a> | | *GPG* use by dynamically loading `libgpgme.so` |
|
||||
| <a name="http-feature">`http`</a> | `isahc` crate | Used for *HTTP* client needs, notably JMAP` |
|
||||
| <a name="tls-feature">`tls`</a> | `native-tls` crate | |
|
||||
| <a name="http-static-feature">`http-static`</a> | `isahc` crate with `static-curl` feature | Links with `curl` statically |
|
||||
| <a name="tls-static-feature">`tls-static`</a> | `native-tls` crate with `vendored` feature | Links with `OpenSSL` statically where it's used |
|
||||
| <a name="imap-trace-feature">`imap-trace`</a> | `imap` feature | Connection trace logs on the `trace` logging level |
|
||||
| <a name="jmap-trace-feature">`jmap-trace`</a> | `jmap` feature | Connection trace logs on the `trace` logging level |
|
||||
| <a name="nntp-trace-feature">`nntp-trace`</a> | `nntp` feature | Connection trace logs on the `trace` logging level |
|
||||
| <a name="smtp-trace-feature">`smtp-trace`</a> | `smtp` feature | Connection trace logs on the `trace` logging level |
|
||||
|
||||
Though not a feature, the presence of the environment variable `UNICODE_REGENERATE_TABLES` at compile-time of the `melib` crate will force the regeneration of Unicode tables from the crate's `build.rs` script.
|
||||
Otherwise the tables are already included with the source code, and there's no real reason to regenerate them unless you intend to modify the code or update to a new Unicode version.
|
||||
|
||||
## Example: Parsing bytes into an `Envelope`
|
||||
|
||||
|
@ -33,7 +48,7 @@ and body structure. Addresses in `To`, `From` fields etc are parsed into
|
|||
`Address` types.
|
||||
|
||||
```rust
|
||||
use melib::{Attachment, Envelope};
|
||||
use melib::{email::attachment_types::Text, Attachment, Envelope};
|
||||
|
||||
let raw_mail = r#"From: "some name" <some@example.com>
|
||||
To: "me" <myself@example.com>
|
||||
|
@ -80,15 +95,19 @@ ouiijDaaCCGQRgrpH3q4QYYXWDihxBE+7KCDDjnUIEVAADs=
|
|||
|
||||
let envelope = Envelope::from_bytes(raw_mail.as_bytes(), None).expect("Could not parse mail");
|
||||
assert_eq!(envelope.subject().as_ref(), "gratuitously encoded subject");
|
||||
assert_eq!(&envelope.message_id().display_bracket().to_string(), "<h2g7f.z0gy2pgaen5m@example.com>");
|
||||
assert_eq!(envelope.message_id(), "h2g7f.z0gy2pgaen5m@example.com");
|
||||
assert_eq!(&envelope.message_id().display_brackets().to_string(), "<h2g7f.z0gy2pgaen5m@example.com>");
|
||||
|
||||
let body = envelope.body_bytes(raw_mail.as_bytes());
|
||||
assert_eq!(body.content_type().to_string().as_str(), "multipart/mixed");
|
||||
|
||||
let body_text = body.text();
|
||||
let body_text = body.text(Text::Plain);
|
||||
assert_eq!(body_text.as_str(), "hello world.");
|
||||
|
||||
let subattachments: Vec<Attachment> = body.attachments();
|
||||
assert_eq!(subattachments.len(), 3);
|
||||
assert_eq!(subattachments[2].content_type().name().unwrap(), "test_image.gif");
|
||||
assert_eq!(
|
||||
subattachments[2].content_type().name().unwrap(),
|
||||
"test_image.gif"
|
||||
);
|
||||
```
|
||||
|
|
|
@ -508,7 +508,7 @@ pub trait MailBackend: ::std::fmt::Debug + Send + Sync {
|
|||
&self,
|
||||
query: crate::search::Query,
|
||||
mailbox_hash: Option<MailboxHash>,
|
||||
) -> ResultFuture<SmallVec<[EnvelopeHash; 512]>>;
|
||||
) -> ResultFuture<Vec<EnvelopeHash>>;
|
||||
|
||||
fn submit(
|
||||
&self,
|
||||
|
@ -650,7 +650,7 @@ impl SpecialUsageMailbox {
|
|||
}
|
||||
}
|
||||
|
||||
pub trait BackendMailbox: std::fmt::Debug {
|
||||
pub trait BackendMailbox: std::fmt::Debug + std::any::Any {
|
||||
fn hash(&self) -> MailboxHash;
|
||||
/// Final component of `path`.
|
||||
fn name(&self) -> &str;
|
||||
|
@ -665,6 +665,8 @@ pub trait BackendMailbox: std::fmt::Debug {
|
|||
fn special_usage(&self) -> SpecialUsageMailbox;
|
||||
fn permissions(&self) -> MailboxPermissions;
|
||||
fn count(&self) -> Result<(usize, usize)>;
|
||||
fn as_any(&self) -> &dyn Any;
|
||||
fn as_any_mut(&mut self) -> &mut dyn Any;
|
||||
}
|
||||
|
||||
crate::declare_u64_hash!(AccountHash);
|
||||
|
|
|
@ -365,6 +365,10 @@ impl Collection {
|
|||
|
||||
pub fn insert(&self, envelope: Envelope, mailbox_hash: MailboxHash) -> bool {
|
||||
let hash = envelope.hash();
|
||||
|
||||
// Ensure mailbox exists in collection.
|
||||
self.new_mailbox(mailbox_hash);
|
||||
|
||||
self.mailboxes
|
||||
.write()
|
||||
.unwrap()
|
||||
|
|
|
@ -97,6 +97,12 @@ impl AccountSettings {
|
|||
self.extra.get("vcard_folder").map(String::as_str)
|
||||
}
|
||||
|
||||
pub fn notmuch_address_book_query(&self) -> Option<&str> {
|
||||
self.extra
|
||||
.get("notmuch_address_book_query")
|
||||
.map(String::as_str)
|
||||
}
|
||||
|
||||
/// Get the server password, either directly from the `server_password`
|
||||
/// settings value, or by running the `server_password_command` and reading
|
||||
/// the output.
|
||||
|
@ -152,6 +158,29 @@ impl AccountSettings {
|
|||
.set_kind(ErrorKind::Configuration));
|
||||
}
|
||||
}
|
||||
_ = self.extra.swap_remove("notmuch_address_book_query");
|
||||
}
|
||||
{
|
||||
if let Some(mutt_alias_file) = self.extra.swap_remove("mutt_alias_file") {
|
||||
let path = Path::new(&mutt_alias_file).expand();
|
||||
|
||||
if !matches!(path.try_exists(), Ok(true)) {
|
||||
return Err(Error::new(format!(
|
||||
"`mutt_alias_file` path {} does not exist",
|
||||
path.display()
|
||||
))
|
||||
.set_details("`mutt_alias_file` must be an existing path of a mutt alias file")
|
||||
.set_kind(ErrorKind::Configuration));
|
||||
}
|
||||
if !path.is_file() {
|
||||
return Err(Error::new(format!(
|
||||
"`mutt_alias_file` path {} is not a file",
|
||||
path.display()
|
||||
))
|
||||
.set_details("`mutt_alias_file` must be a path of a mutt alias file")
|
||||
.set_kind(ErrorKind::Configuration));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
|
||||
pub mod jscontact;
|
||||
pub mod mutt;
|
||||
pub mod notmuchcontact;
|
||||
pub mod vcard;
|
||||
|
||||
mod card;
|
||||
|
@ -143,6 +144,60 @@ impl Contacts {
|
|||
}
|
||||
}
|
||||
}
|
||||
use std::process::Command;
|
||||
if let Some(notmuch_addressbook_query) = s.notmuch_address_book_query() {
|
||||
match Command::new("sh")
|
||||
.args([
|
||||
"-c",
|
||||
&format!(
|
||||
"notmuch address --format=json {}",
|
||||
notmuch_addressbook_query
|
||||
),
|
||||
])
|
||||
.stdin(std::process::Stdio::null())
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
.output()
|
||||
{
|
||||
Ok(notmuch_addresses) => {
|
||||
if notmuch_addresses.status.success() {
|
||||
match std::str::from_utf8(¬much_addresses.stdout) {
|
||||
Ok(notmuch_address_out) => {
|
||||
match notmuchcontact::parse_notmuch_contacts(notmuch_address_out) {
|
||||
Ok(contacts) => {
|
||||
for c in contacts {
|
||||
ret.add_card(c.clone());
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
log::warn!(
|
||||
"Unable to parse notmuch contact result into cards: {} {}",
|
||||
notmuch_address_out,
|
||||
err
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
log::warn!(
|
||||
"Unable to read from notmuch address query: {} {}",
|
||||
notmuch_addressbook_query,
|
||||
err
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::warn!(
|
||||
"Error ({}) running notmuch address: {} {}",
|
||||
notmuch_addresses.status,
|
||||
String::from_utf8_lossy(¬much_addresses.stdout),
|
||||
String::from_utf8_lossy(¬much_addresses.stderr)
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => log::warn!("Unable to run notmuch address command: {}", e),
|
||||
}
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
|
|
63
melib/src/contacts/notmuchcontact.rs
Normal file
63
melib/src/contacts/notmuchcontact.rs
Normal file
|
@ -0,0 +1,63 @@
|
|||
//
|
||||
// meli
|
||||
//
|
||||
// Copyright 2024 Emmanouil Pitsidianakis <manos@pitsidianak.is>
|
||||
//
|
||||
// This file is part of meli.
|
||||
//
|
||||
// meli is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// meli is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with meli. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later
|
||||
|
||||
use crate::{contacts::Card, error::Result};
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
|
||||
pub struct NotmuchContact {
|
||||
pub name: String,
|
||||
pub address: String,
|
||||
#[serde(rename = "name-addr")]
|
||||
pub name_addr: String,
|
||||
}
|
||||
|
||||
pub fn parse_notmuch_contacts(input: &str) -> Result<Vec<Card>> {
|
||||
let mut cards = Vec::new();
|
||||
let abook = serde_json::from_str::<Vec<NotmuchContact>>(input)?;
|
||||
|
||||
for c in abook.iter() {
|
||||
cards.push(
|
||||
Card::new()
|
||||
.set_title(c.name_addr.clone())
|
||||
.set_email(c.address.clone())
|
||||
.set_name(c.name.clone())
|
||||
.set_external_resource(true)
|
||||
.clone(),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(cards)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_addressbook_notmuchcontact() {
|
||||
let cards = parse_notmuch_contacts(
|
||||
r#"[{"name": "Full Name", "address": "user@example.com", "name-addr": "Full Name <user@example.com>"},
|
||||
{"name": "Full2 Name", "address": "user2@example.com", "name-addr": "Full2 Name <user2@example.com>"}]"#
|
||||
).unwrap();
|
||||
assert_eq!(cards[0].name(), "Full Name");
|
||||
assert_eq!(cards[0].title(), "Full Name <user@example.com>");
|
||||
assert_eq!(cards[0].email(), "user@example.com");
|
||||
assert_eq!(cards[1].name(), "Full2 Name");
|
||||
assert_eq!(cards[1].title(), "Full2 Name <user2@example.com>");
|
||||
assert_eq!(cards[1].email(), "user2@example.com");
|
||||
}
|
|
@ -604,7 +604,7 @@ impl StrBuild for MessageID {
|
|||
|
||||
struct MessageIDBracket<'a>(&'a MessageID);
|
||||
|
||||
impl<'a> std::fmt::Display for MessageIDBracket<'a> {
|
||||
impl std::fmt::Display for MessageIDBracket<'_> {
|
||||
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(fmt, "<")?;
|
||||
write!(fmt, "{}", self.0)?;
|
||||
|
|
|
@ -50,7 +50,7 @@ pub struct DecodeOptions<'att> {
|
|||
pub force_charset: Option<Charset>,
|
||||
}
|
||||
|
||||
impl<'att> From<Option<Charset>> for DecodeOptions<'att> {
|
||||
impl From<Option<Charset>> for DecodeOptions<'_> {
|
||||
fn from(force_charset: Option<Charset>) -> Self {
|
||||
Self {
|
||||
filter: None,
|
||||
|
@ -813,6 +813,9 @@ impl Attachment {
|
|||
ret.push_str(&boundary_start);
|
||||
into_raw_helper(p, ret);
|
||||
}
|
||||
if !ret.ends_with("\r\n") {
|
||||
ret.push_str("\r\n");
|
||||
}
|
||||
ret.push_str(&format!("--{}--\r\n\r\n", boundary));
|
||||
}
|
||||
ContentType::MessageRfc822 => {
|
||||
|
|
|
@ -340,7 +340,7 @@ impl<'a> PartialEq<&'a Self> for HeaderName {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq<HeaderName> for &'a HeaderName {
|
||||
impl PartialEq<HeaderName> for &HeaderName {
|
||||
#[inline]
|
||||
fn eq(&self, other: &HeaderName) -> bool {
|
||||
*other == *self
|
||||
|
@ -400,7 +400,7 @@ impl<'a> PartialEq<&'a str> for HeaderName {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq<HeaderName> for &'a str {
|
||||
impl PartialEq<HeaderName> for &str {
|
||||
/// Performs a case-insensitive comparison of the string against the header
|
||||
/// name
|
||||
#[inline]
|
||||
|
@ -491,7 +491,7 @@ struct AsciiIgnoreCaseCmp<'a, 'b> {
|
|||
b: &'b [u8],
|
||||
}
|
||||
|
||||
impl<'a, 'b> Iterator for AsciiIgnoreCaseCmp<'a, 'b> {
|
||||
impl Iterator for AsciiIgnoreCaseCmp<'_, '_> {
|
||||
type Item = ();
|
||||
|
||||
fn next(&mut self) -> Option<()> {
|
||||
|
|
|
@ -400,7 +400,7 @@ pub trait BytesIterExt {
|
|||
fn join(&mut self, sep: u8) -> Vec<u8>;
|
||||
}
|
||||
|
||||
impl<'a, P: for<'r> FnMut(&'r u8) -> bool> BytesIterExt for std::slice::Split<'a, u8, P> {
|
||||
impl<P: for<'r> FnMut(&'r u8) -> bool> BytesIterExt for std::slice::Split<'_, u8, P> {
|
||||
fn join(&mut self, sep: u8) -> Vec<u8> {
|
||||
self.fold(vec![], |mut acc, el| {
|
||||
if !acc.is_empty() {
|
||||
|
@ -451,22 +451,22 @@ pub mod dates {
|
|||
/// In the obsolete time zone, "UT" and "GMT" are indications of
|
||||
/// "Universal Time" and "Greenwich Mean Time", respectively, and are
|
||||
/// both semantically identical to "+0000".
|
||||
|
||||
///
|
||||
/// The remaining three character zones are the US time zones. The first
|
||||
/// letter, "E", "C", "M", or "P" stands for "Eastern", "Central",
|
||||
/// "Mountain", and "Pacific". The second letter is either "S" for
|
||||
/// "Standard" time, or "D" for "Daylight Savings" (or summer) time.
|
||||
/// Their interpretations are as follows:
|
||||
|
||||
/// EDT is semantically equivalent to -0400
|
||||
/// EST is semantically equivalent to -0500
|
||||
/// CDT is semantically equivalent to -0500
|
||||
/// CST is semantically equivalent to -0600
|
||||
/// MDT is semantically equivalent to -0600
|
||||
/// MST is semantically equivalent to -0700
|
||||
/// PDT is semantically equivalent to -0700
|
||||
/// PST is semantically equivalent to -0800
|
||||
|
||||
///
|
||||
/// - EDT is semantically equivalent to `-0400`
|
||||
/// - EST is semantically equivalent to `-0500`
|
||||
/// - CDT is semantically equivalent to `-0500`
|
||||
/// - CST is semantically equivalent to `-0600`
|
||||
/// - MDT is semantically equivalent to `-0600`
|
||||
/// - MST is semantically equivalent to `-0700`
|
||||
/// - PDT is semantically equivalent to `-0700`
|
||||
/// - PST is semantically equivalent to `-0800`
|
||||
///
|
||||
/// The 1 character military time zones were defined in a non-standard
|
||||
/// way in RFC0822 and are therefore unpredictable in their meaning.
|
||||
/// The original definitions of the military zones "A" through "I" are
|
||||
|
@ -477,7 +477,7 @@ pub mod dates {
|
|||
/// the error in RFC0822, they SHOULD all be considered equivalent to
|
||||
/// "-0000" unless there is out-of-band information confirming their
|
||||
/// meaning.
|
||||
|
||||
///
|
||||
/// Other multi-character (usually between 3 and 5) alphabetic time zones
|
||||
/// have been used in Internet messages. Any such time zone whose
|
||||
/// meaning is not known SHOULD be considered equivalent to "-0000"
|
||||
|
@ -2030,7 +2030,7 @@ pub mod attachments {
|
|||
pub mod encodings {
|
||||
//! Email encodings (quoted printable, `MIME`).
|
||||
use data_encoding::BASE64_MIME;
|
||||
use encoding::{all::*, DecoderTrap, Encoding};
|
||||
use encoding_rs::*;
|
||||
|
||||
use super::*;
|
||||
use crate::email::attachment_types::Charset;
|
||||
|
@ -2166,38 +2166,31 @@ pub mod encodings {
|
|||
pub fn decode_charset(s: &[u8], charset: Charset) -> Result<String> {
|
||||
match charset {
|
||||
Charset::UTF8 | Charset::Ascii => Ok(String::from_utf8_lossy(s).to_string()),
|
||||
Charset::ISO8859_1 => Ok(ISO_8859_1.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::ISO8859_2 => Ok(ISO_8859_2.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::ISO8859_3 => Ok(ISO_8859_3.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::ISO8859_4 => Ok(ISO_8859_4.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::ISO8859_5 => Ok(ISO_8859_5.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::ISO8859_6 => Ok(ISO_8859_6.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::ISO8859_7 => Ok(ISO_8859_7.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::ISO8859_8 => Ok(ISO_8859_8.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::ISO8859_10 => Ok(ISO_8859_10.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::ISO8859_13 => Ok(ISO_8859_13.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::ISO8859_14 => Ok(ISO_8859_14.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::ISO8859_15 => Ok(ISO_8859_15.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::ISO8859_16 => Ok(ISO_8859_16.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::GBK => Ok(GBK.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::Windows1250 => Ok(WINDOWS_1250.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::Windows1251 => Ok(WINDOWS_1251.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::Windows1252 => Ok(WINDOWS_1252.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::Windows1253 => Ok(WINDOWS_1253.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::KOI8R => Ok(KOI8_R.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::KOI8U => Ok(KOI8_U.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::BIG5 => Ok(BIG5_2003.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::GB2312 => {
|
||||
Ok(encoding::codec::simpchinese::GBK_ENCODING.decode(s, DecoderTrap::Strict)?)
|
||||
}
|
||||
Charset::GB18030 => Ok(
|
||||
encoding::codec::simpchinese::GB18030_ENCODING.decode(s, DecoderTrap::Strict)?
|
||||
),
|
||||
Charset::UTF16 => {
|
||||
Ok(encoding::codec::utf_16::UTF_16LE_ENCODING.decode(s, DecoderTrap::Strict)?)
|
||||
}
|
||||
Charset::ISO2022JP => Ok(ISO_2022_JP.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::EUCJP => Ok(EUC_JP.decode(s, DecoderTrap::Strict)?),
|
||||
Charset::ISO8859_2 => Ok(ISO_8859_2.decode(s).0.to_string()),
|
||||
Charset::ISO8859_3 => Ok(ISO_8859_3.decode(s).0.to_string()),
|
||||
Charset::ISO8859_4 => Ok(ISO_8859_4.decode(s).0.to_string()),
|
||||
Charset::ISO8859_5 => Ok(ISO_8859_5.decode(s).0.to_string()),
|
||||
Charset::ISO8859_6 => Ok(ISO_8859_6.decode(s).0.to_string()),
|
||||
Charset::ISO8859_7 => Ok(ISO_8859_7.decode(s).0.to_string()),
|
||||
Charset::ISO8859_8 => Ok(ISO_8859_8.decode(s).0.to_string()),
|
||||
Charset::ISO8859_10 => Ok(ISO_8859_10.decode(s).0.to_string()),
|
||||
Charset::ISO8859_13 => Ok(ISO_8859_13.decode(s).0.to_string()),
|
||||
Charset::ISO8859_14 => Ok(ISO_8859_14.decode(s).0.to_string()),
|
||||
Charset::ISO8859_15 => Ok(ISO_8859_15.decode(s).0.to_string()),
|
||||
Charset::ISO8859_16 => Ok(ISO_8859_16.decode(s).0.to_string()),
|
||||
Charset::GBK => Ok(GBK.decode(s).0.to_string()),
|
||||
Charset::Windows1250 => Ok(WINDOWS_1250.decode(s).0.to_string()),
|
||||
Charset::Windows1251 => Ok(WINDOWS_1251.decode(s).0.to_string()),
|
||||
Charset::ISO8859_1 | Charset::Windows1252 => Ok(WINDOWS_1252.decode(s).0.to_string()),
|
||||
Charset::Windows1253 => Ok(WINDOWS_1253.decode(s).0.to_string()),
|
||||
Charset::KOI8R => Ok(KOI8_R.decode(s).0.to_string()),
|
||||
Charset::KOI8U => Ok(KOI8_U.decode(s).0.to_string()),
|
||||
Charset::BIG5 => Ok(BIG5.decode(s).0.to_string()),
|
||||
Charset::GB2312 => Ok(GBK.decode(s).0.to_string()),
|
||||
Charset::GB18030 => Ok(GB18030.decode(s).0.to_string()),
|
||||
Charset::UTF16 => Ok(UTF_16LE.decode(s).0.to_string()),
|
||||
Charset::ISO2022JP => Ok(ISO_2022_JP.decode(s).0.to_string()),
|
||||
Charset::EUCJP => Ok(EUC_JP.decode(s).0.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -464,7 +464,7 @@ impl From<io::Error> for Error {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> From<Cow<'a, str>> for Error {
|
||||
impl From<Cow<'_, str>> for Error {
|
||||
#[inline]
|
||||
fn from(err: Cow<'_, str>) -> Self {
|
||||
Self::new(err.to_string())
|
||||
|
|
32
melib/src/gpgme/README.md
Normal file
32
melib/src/gpgme/README.md
Normal file
|
@ -0,0 +1,32 @@
|
|||
<!-- SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later -->
|
||||
# Interfacing with `libgpgme`
|
||||
|
||||
In order to support both 32-bit and 64-bit compilation targets, the generated
|
||||
bindings for `libgpgme` have been generated twice, once on an 64-bit host and
|
||||
once on an 32-bit host (`arm-unknown-linux-gnueabihf` in specific).
|
||||
|
||||
There are two shell scripts in this directory:
|
||||
|
||||
- `./bindgen-gpgme-funcs.sh`: This script invokes `bindgen` CLI to generate **only** bindings for the
|
||||
functions we need from `libgpgme`.
|
||||
The output is included inside `./bindings.rs` and wrapped with a declarative
|
||||
macro that converts the function declarations into type definitions, because this is the only way we
|
||||
can access functions via symbols with the `libloading` crate.
|
||||
Otherwise, the conversion would have to be done manually.
|
||||
|
||||
Note that running this script to 32-bit and 64-bit hosts should have the same output.
|
||||
- `./bindgen-gpgme-rest.sh`: This script invokes `bindgen` CLI to generate
|
||||
bindings for types and global variables from the `libgpgme` header files.
|
||||
This part is where target pointer width is important, so as a result we have
|
||||
checked in two source files: `./bindings_rest.rs` for the "normal" world and
|
||||
`./bindings_rest_32.rs` for the still 32-bit world.
|
||||
|
||||
`./bindings.rs` includes the correct version based on the `target_pointer_width` value at compile time.
|
||||
|
||||
Is this the best we can do? No, but it's the best we can do for now.
|
||||
|
||||
*NOTE*: Generating bindings with `bindgen` on 32-bit hosts should require
|
||||
appending `-- -D_FILE_OFFSET_BITS=64` at the `bindgen` invocation in those
|
||||
scripts; the `-- [...]` part means these arguments get redirected to `clang`,
|
||||
and what happens is we define a preprocessor symbol `_FILE_OFFSET_BITS` with
|
||||
the value `64`.
|
73
melib/src/gpgme/bindgen-gpgme-funcs.sh
Normal file
73
melib/src/gpgme/bindgen-gpgme-funcs.sh
Normal file
|
@ -0,0 +1,73 @@
|
|||
#!/bin/zsh
|
||||
# SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later
|
||||
|
||||
# Find out which functions we call with:
|
||||
#
|
||||
# rg call melib/src/gpgme | sed -r -e 's/^.+, (gpgme_[^)]+)[)].*$/\1/p' | sort | sort -u
|
||||
|
||||
bindgen \
|
||||
-o bindings_funcs.rs \
|
||||
--raw-line "// SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later" \
|
||||
--raw-line "" \
|
||||
--raw-line "" \
|
||||
--raw-line "convert_to_typedefs! {" \
|
||||
--generate "functions" \
|
||||
--generate-block \
|
||||
--rust-target "1.68" \
|
||||
--use-core \
|
||||
--rustfmt-configuration-file `realpath ../../../rustfmt.toml` \
|
||||
--merge-extern-blocks \
|
||||
--sort-semantically \
|
||||
--flexarray-dst \
|
||||
--wrap-unsafe-ops \
|
||||
--no-prepend-enum-name \
|
||||
--blocklist-type FILE \
|
||||
--blocklist-type _IO_FILE \
|
||||
--blocklist-type _IO_lock_t \
|
||||
--allowlist-function gpgme_check_version \
|
||||
--allowlist-function gpgme_ctx_get_engine_info \
|
||||
--allowlist-function gpgme_ctx_set_engine_info \
|
||||
--allowlist-function gpgme_data_new \
|
||||
--allowlist-function gpgme_data_new_from_file \
|
||||
--allowlist-function gpgme_data_new_from_mem \
|
||||
--allowlist-function gpgme_data_read \
|
||||
--allowlist-function gpgme_data_release \
|
||||
--allowlist-function gpgme_data_seek \
|
||||
--allowlist-function gpgme_data_write \
|
||||
--allowlist-function gpgme_get_armor \
|
||||
--allowlist-function gpgme_get_ctx_flag \
|
||||
--allowlist-function gpgme_get_offline \
|
||||
--allowlist-function gpgme_get_pinentry_mode \
|
||||
--allowlist-function gpgme_key_ref \
|
||||
--allowlist-function gpgme_key_unref \
|
||||
--allowlist-function gpgme_new \
|
||||
--allowlist-function gpgme_op_decrypt_result \
|
||||
--allowlist-function gpgme_op_decrypt_start \
|
||||
--allowlist-function gpgme_op_encrypt_result \
|
||||
--allowlist-function gpgme_op_encrypt_start \
|
||||
--allowlist-function gpgme_op_import \
|
||||
--allowlist-function gpgme_op_import_result \
|
||||
--allowlist-function gpgme_op_keylist_end \
|
||||
--allowlist-function gpgme_op_keylist_start \
|
||||
--allowlist-function gpgme_op_sign_start \
|
||||
--allowlist-function gpgme_op_verify_result \
|
||||
--allowlist-function gpgme_op_verify_start \
|
||||
--allowlist-function gpgme_release \
|
||||
--allowlist-function gpgme_set_armor \
|
||||
--allowlist-function gpgme_set_ctx_flag \
|
||||
--allowlist-function gpgme_set_io_cbs \
|
||||
--allowlist-function gpgme_set_offline \
|
||||
--allowlist-function gpgme_set_passphrase_cb \
|
||||
--allowlist-function gpgme_set_pinentry_mode \
|
||||
--allowlist-function gpgme_set_protocol \
|
||||
--allowlist-function gpgme_signers_add \
|
||||
--allowlist-function gpgme_signers_clear \
|
||||
--allowlist-function gpgme_strerror_r \
|
||||
--allowlist-function gpgme_strerror \
|
||||
--no-size_t-is-usize \
|
||||
--disable-header-comment \
|
||||
--emit-diagnostics \
|
||||
--experimental \
|
||||
/usr/include/gpgme.h
|
||||
|
||||
sed --in-place -e 's/\s*extern "C" [{]//' bindings_funcs.rs
|
56
melib/src/gpgme/bindgen-gpgme-rest.sh
Normal file
56
melib/src/gpgme/bindgen-gpgme-rest.sh
Normal file
|
@ -0,0 +1,56 @@
|
|||
#!/bin/zsh
|
||||
# SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later
|
||||
|
||||
bindgen \
|
||||
-o bindings_rest.rs \
|
||||
--raw-line "// SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later" \
|
||||
--ignore-functions \
|
||||
--bitfield-enum 'gpgme_encrypt_flags_t' \
|
||||
--bitfield-enum 'gpgme_decrypt_flags_t' \
|
||||
--bitfield-enum 'gpgme_sigsum_t' \
|
||||
--impl-debug \
|
||||
--impl-partialeq \
|
||||
--with-derive-default \
|
||||
--with-derive-hash \
|
||||
--with-derive-partialeq \
|
||||
--with-derive-partialord \
|
||||
--with-derive-eq \
|
||||
--with-derive-ord \
|
||||
--generate-block \
|
||||
--generate-cstr \
|
||||
--rust-target "1.68" \
|
||||
--use-core \
|
||||
--rustfmt-configuration-file `realpath ../../../rustfmt.toml` \
|
||||
--merge-extern-blocks \
|
||||
--sort-semantically \
|
||||
--flexarray-dst \
|
||||
--wrap-unsafe-ops \
|
||||
--no-prepend-enum-name \
|
||||
--rustified-enum 'gpgme_status_code_t.*' \
|
||||
--rustified-enum 'gpg_err_source_t.*' \
|
||||
--rustified-enum 'gpg_err_code_t.*' \
|
||||
--rustified-enum 'gpgme_data_encoding_t.*' \
|
||||
--rustified-enum 'gpgme_data_type_t' \
|
||||
--rustified-enum 'gpgme_pubkey_algo_t' \
|
||||
--rustified-enum 'gpgme_hash_algo_t' \
|
||||
--rustified-enum 'gpgme_sig_mode_t' \
|
||||
--rustified-enum 'gpgme_validity_t' \
|
||||
--rustified-enum 'gpgme_tofu_policy_t' \
|
||||
--rustified-enum 'gpgme_keyorg_t' \
|
||||
--rustified-enum 'gpgme_protocol_t' \
|
||||
--rustified-enum 'gpgme_pinentry_mode_t' \
|
||||
--rustified-enum 'gpgme_event_io_t' \
|
||||
--rustified-enum 'gpgme_conf_level_t' \
|
||||
--rustified-enum 'gpgme_conf_type_t' \
|
||||
--rustified-enum '_gpgme_sig_stat_t' \
|
||||
--rustified-enum '_gpgme_attr_t' \
|
||||
--blocklist-type FILE \
|
||||
--blocklist-type _IO_FILE \
|
||||
--blocklist-type _IO_lock_t \
|
||||
--allowlist-var GPGME_VERSION \
|
||||
--allowlist-type gpgme_io_event_done_data \
|
||||
--no-size_t-is-usize \
|
||||
--emit-diagnostics \
|
||||
--experimental \
|
||||
--allowlist-file /usr/include/gpgme.h \
|
||||
/usr/include/gpgme.h
|
File diff suppressed because it is too large
Load diff
110
melib/src/gpgme/bindings_funcs.rs
Normal file
110
melib/src/gpgme/bindings_funcs.rs
Normal file
|
@ -0,0 +1,110 @@
|
|||
// SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later
|
||||
|
||||
|
||||
convert_to_typedefs! {
|
||||
|
||||
|
||||
pub fn gpgme_strerror(err: gpgme_error_t) -> *const ::core::ffi::c_char;
|
||||
pub fn gpgme_strerror_r(
|
||||
err: gpg_error_t,
|
||||
buf: *mut ::core::ffi::c_char,
|
||||
buflen: size_t,
|
||||
) -> ::core::ffi::c_int;
|
||||
pub fn gpgme_new(ctx: *mut gpgme_ctx_t) -> gpgme_error_t;
|
||||
pub fn gpgme_release(ctx: gpgme_ctx_t);
|
||||
pub fn gpgme_set_ctx_flag(
|
||||
ctx: gpgme_ctx_t,
|
||||
name: *const ::core::ffi::c_char,
|
||||
value: *const ::core::ffi::c_char,
|
||||
) -> gpgme_error_t;
|
||||
pub fn gpgme_get_ctx_flag(
|
||||
ctx: gpgme_ctx_t,
|
||||
name: *const ::core::ffi::c_char,
|
||||
) -> *const ::core::ffi::c_char;
|
||||
pub fn gpgme_set_protocol(ctx: gpgme_ctx_t, proto: gpgme_protocol_t) -> gpgme_error_t;
|
||||
pub fn gpgme_set_armor(ctx: gpgme_ctx_t, yes: ::core::ffi::c_int);
|
||||
pub fn gpgme_get_armor(ctx: gpgme_ctx_t) -> ::core::ffi::c_int;
|
||||
pub fn gpgme_set_offline(ctx: gpgme_ctx_t, yes: ::core::ffi::c_int);
|
||||
pub fn gpgme_get_offline(ctx: gpgme_ctx_t) -> ::core::ffi::c_int;
|
||||
pub fn gpgme_set_pinentry_mode(ctx: gpgme_ctx_t, mode: gpgme_pinentry_mode_t) -> gpgme_error_t;
|
||||
pub fn gpgme_get_pinentry_mode(ctx: gpgme_ctx_t) -> gpgme_pinentry_mode_t;
|
||||
pub fn gpgme_set_passphrase_cb(
|
||||
ctx: gpgme_ctx_t,
|
||||
cb: gpgme_passphrase_cb_t,
|
||||
hook_value: *mut ::core::ffi::c_void,
|
||||
);
|
||||
pub fn gpgme_ctx_get_engine_info(ctx: gpgme_ctx_t) -> gpgme_engine_info_t;
|
||||
pub fn gpgme_ctx_set_engine_info(
|
||||
ctx: gpgme_ctx_t,
|
||||
proto: gpgme_protocol_t,
|
||||
file_name: *const ::core::ffi::c_char,
|
||||
home_dir: *const ::core::ffi::c_char,
|
||||
) -> gpgme_error_t;
|
||||
pub fn gpgme_signers_clear(ctx: gpgme_ctx_t);
|
||||
pub fn gpgme_signers_add(ctx: gpgme_ctx_t, key: gpgme_key_t) -> gpgme_error_t;
|
||||
pub fn gpgme_set_io_cbs(ctx: gpgme_ctx_t, io_cbs: gpgme_io_cbs_t);
|
||||
pub fn gpgme_data_read(
|
||||
dh: gpgme_data_t,
|
||||
buffer: *mut ::core::ffi::c_void,
|
||||
size: size_t,
|
||||
) -> ssize_t;
|
||||
pub fn gpgme_data_write(
|
||||
dh: gpgme_data_t,
|
||||
buffer: *const ::core::ffi::c_void,
|
||||
size: size_t,
|
||||
) -> ssize_t;
|
||||
pub fn gpgme_data_seek(dh: gpgme_data_t, offset: off_t, whence: ::core::ffi::c_int) -> off_t;
|
||||
pub fn gpgme_data_new(r_dh: *mut gpgme_data_t) -> gpgme_error_t;
|
||||
pub fn gpgme_data_release(dh: gpgme_data_t);
|
||||
pub fn gpgme_data_new_from_mem(
|
||||
r_dh: *mut gpgme_data_t,
|
||||
buffer: *const ::core::ffi::c_char,
|
||||
size: size_t,
|
||||
copy: ::core::ffi::c_int,
|
||||
) -> gpgme_error_t;
|
||||
pub fn gpgme_data_new_from_file(
|
||||
r_dh: *mut gpgme_data_t,
|
||||
fname: *const ::core::ffi::c_char,
|
||||
copy: ::core::ffi::c_int,
|
||||
) -> gpgme_error_t;
|
||||
pub fn gpgme_key_ref(key: gpgme_key_t);
|
||||
pub fn gpgme_key_unref(key: gpgme_key_t);
|
||||
pub fn gpgme_op_encrypt_result(ctx: gpgme_ctx_t) -> gpgme_encrypt_result_t;
|
||||
pub fn gpgme_op_encrypt_start(
|
||||
ctx: gpgme_ctx_t,
|
||||
recp: *mut gpgme_key_t,
|
||||
flags: gpgme_encrypt_flags_t,
|
||||
plain: gpgme_data_t,
|
||||
cipher: gpgme_data_t,
|
||||
) -> gpgme_error_t;
|
||||
pub fn gpgme_op_decrypt_result(ctx: gpgme_ctx_t) -> gpgme_decrypt_result_t;
|
||||
pub fn gpgme_op_decrypt_start(
|
||||
ctx: gpgme_ctx_t,
|
||||
cipher: gpgme_data_t,
|
||||
plain: gpgme_data_t,
|
||||
) -> gpgme_error_t;
|
||||
pub fn gpgme_op_sign_start(
|
||||
ctx: gpgme_ctx_t,
|
||||
plain: gpgme_data_t,
|
||||
sig: gpgme_data_t,
|
||||
flags: gpgme_sig_mode_t,
|
||||
) -> gpgme_error_t;
|
||||
pub fn gpgme_op_verify_result(ctx: gpgme_ctx_t) -> gpgme_verify_result_t;
|
||||
pub fn gpgme_op_verify_start(
|
||||
ctx: gpgme_ctx_t,
|
||||
sig: gpgme_data_t,
|
||||
signed_text: gpgme_data_t,
|
||||
plaintext: gpgme_data_t,
|
||||
) -> gpgme_error_t;
|
||||
pub fn gpgme_op_import_result(ctx: gpgme_ctx_t) -> gpgme_import_result_t;
|
||||
pub fn gpgme_op_import(ctx: gpgme_ctx_t, keydata: gpgme_data_t) -> gpgme_error_t;
|
||||
pub fn gpgme_op_keylist_start(
|
||||
ctx: gpgme_ctx_t,
|
||||
pattern: *const ::core::ffi::c_char,
|
||||
secret_only: ::core::ffi::c_int,
|
||||
) -> gpgme_error_t;
|
||||
pub fn gpgme_op_keylist_end(ctx: gpgme_ctx_t) -> gpgme_error_t;
|
||||
pub fn gpgme_check_version(
|
||||
req_version: *const ::core::ffi::c_char,
|
||||
) -> *const ::core::ffi::c_char;
|
||||
}
|
5100
melib/src/gpgme/bindings_rest.rs
Normal file
5100
melib/src/gpgme/bindings_rest.rs
Normal file
File diff suppressed because it is too large
Load diff
5100
melib/src/gpgme/bindings_rest_32.rs
Normal file
5100
melib/src/gpgme/bindings_rest_32.rs
Normal file
File diff suppressed because it is too large
Load diff
|
@ -27,7 +27,7 @@ use std::{
|
|||
ptr::NonNull,
|
||||
};
|
||||
|
||||
use super::*;
|
||||
use super::{bindings::gpgme_io_event_done_data, *};
|
||||
|
||||
#[derive(Debug)]
|
||||
#[repr(C)]
|
||||
|
@ -142,14 +142,14 @@ pub unsafe extern "C" fn gpgme_event_io_cb(
|
|||
r#type: gpgme_event_io_t,
|
||||
type_data: *mut c_void,
|
||||
) {
|
||||
if r#type == gpgme_event_io_t_GPGME_EVENT_START {
|
||||
if r#type == gpgme_event_io_t::GPGME_EVENT_START {
|
||||
return;
|
||||
}
|
||||
|
||||
// SAFETY: This is the iostate reference that was leaked in `Context::new`.
|
||||
let io_state: IoStateWrapper = unsafe { IoStateWrapper::from_raw(data) };
|
||||
|
||||
if r#type == gpgme_event_io_t_GPGME_EVENT_DONE {
|
||||
if r#type == gpgme_event_io_t::GPGME_EVENT_DONE {
|
||||
let Some(status) = NonNull::new(type_data.cast::<gpgme_io_event_done_data>()) else {
|
||||
log::error!("gpgme_event_io_cb DONE event with NULL type_data. This is a gpgme bug.",);
|
||||
return;
|
||||
|
@ -166,7 +166,7 @@ pub unsafe extern "C" fn gpgme_event_io_cb(
|
|||
return;
|
||||
}
|
||||
|
||||
if r#type == gpgme_event_io_t_GPGME_EVENT_NEXT_KEY {
|
||||
if r#type == gpgme_event_io_t::GPGME_EVENT_NEXT_KEY {
|
||||
let Some(ptr) = NonNull::new(type_data.cast::<_gpgme_key>()) else {
|
||||
log::error!(
|
||||
"gpgme_event_io_cb NEXT_KEY event with NULL type_data. This is a gpgme bug.",
|
||||
|
@ -181,7 +181,7 @@ pub unsafe extern "C" fn gpgme_event_io_cb(
|
|||
|
||||
log::error!(
|
||||
"gpgme_event_io_cb called with unexpected event type: {}",
|
||||
r#type
|
||||
r#type as u32
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -190,7 +190,12 @@ impl Read for Data {
|
|||
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||
let result = unsafe {
|
||||
let (buf, len) = (buf.as_mut_ptr() as *mut _, buf.len());
|
||||
call!(self.lib, gpgme_data_read)(self.inner.as_ptr(), buf, len)
|
||||
call!(self.lib, gpgme_data_read)(
|
||||
self.inner.as_ptr(),
|
||||
buf,
|
||||
len.try_into()
|
||||
.map_err(|_| io::Error::from_raw_os_error(libc::EOVERFLOW))?,
|
||||
)
|
||||
};
|
||||
if result >= 0 {
|
||||
Ok(result as usize)
|
||||
|
@ -205,7 +210,12 @@ impl Write for Data {
|
|||
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
||||
let result = unsafe {
|
||||
let (buf, len) = (buf.as_ptr() as *const _, buf.len());
|
||||
call!(self.lib, gpgme_data_write)(self.inner.as_ptr(), buf, len)
|
||||
call!(self.lib, gpgme_data_write)(
|
||||
self.inner.as_ptr(),
|
||||
buf,
|
||||
len.try_into()
|
||||
.map_err(|_| io::Error::from_raw_os_error(libc::EOVERFLOW))?,
|
||||
)
|
||||
};
|
||||
if result >= 0 {
|
||||
Ok(result as usize)
|
||||
|
@ -233,10 +243,13 @@ impl Seek for Data {
|
|||
io::SeekFrom::Current(off) => (off, libc::SEEK_CUR),
|
||||
};
|
||||
let result = unsafe {
|
||||
// Allow .into() for both 32bit and 64bit targets
|
||||
#[allow(clippy::useless_conversion)]
|
||||
call!(self.lib, gpgme_data_seek)(
|
||||
self.inner.as_ptr(),
|
||||
libc::off_t::try_from(off)
|
||||
.map_err(|_| io::Error::from_raw_os_error(libc::EOVERFLOW))?,
|
||||
.map_err(|_| io::Error::from_raw_os_error(libc::EOVERFLOW))?
|
||||
.into(),
|
||||
whence,
|
||||
)
|
||||
};
|
||||
|
|
|
@ -128,6 +128,67 @@ impl Key {
|
|||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Key {
|
||||
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(fmt, "{} ", self.fingerprint())?;
|
||||
if let Some(uid) = self.primary_uid() {
|
||||
write!(fmt, "{}", uid)?;
|
||||
} else {
|
||||
write!(fmt, "(missing primary uid)")?;
|
||||
}
|
||||
// Write some properties as a list inside square brackets
|
||||
write!(fmt, " [")?;
|
||||
{
|
||||
let revoked = self.revoked();
|
||||
let expired = self.expired();
|
||||
let disabled = self.disabled();
|
||||
let invalid = self.invalid();
|
||||
let can_encrypt = self.can_encrypt();
|
||||
let can_sign = self.can_sign();
|
||||
let secret = self.secret();
|
||||
let mut empty = true;
|
||||
macro_rules! write_property {
|
||||
($cond:ident, $lit:literal, $else:literal$(,)?) => {{
|
||||
if !empty {
|
||||
write!(fmt, ",")?;
|
||||
}
|
||||
if $cond {
|
||||
write!(fmt, $lit)?;
|
||||
} else {
|
||||
write!(fmt, $else)?;
|
||||
}
|
||||
empty = false;
|
||||
}};
|
||||
($cond:ident, $lit:literal$(,)?) => {{
|
||||
if $cond {
|
||||
if !empty {
|
||||
write!(fmt, ",")?;
|
||||
}
|
||||
write!(fmt, $lit)?;
|
||||
empty = false;
|
||||
}
|
||||
}};
|
||||
}
|
||||
macro_rules! write_properties {
|
||||
($(($cond:ident, $lit:literal $(, $else:literal)?)),*$(,)?) => {{
|
||||
$(write_property!($cond, $lit $(, $else)*);)*
|
||||
}};
|
||||
}
|
||||
write_properties! {
|
||||
(revoked, "revoked"),
|
||||
(expired, "expired"),
|
||||
(disabled, "disabled"),
|
||||
(invalid, "invalid"),
|
||||
(can_encrypt, "can encrypt"),
|
||||
(can_sign, "can sign"),
|
||||
(secret, "secret", "public"),
|
||||
}
|
||||
_ = empty;
|
||||
}
|
||||
write!(fmt, "]")
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Key {
|
||||
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fmt.debug_struct(crate::identify!(Key))
|
||||
|
@ -171,7 +232,7 @@ pub(super) struct InvalidKeysIter<'a> {
|
|||
_ph: std::marker::PhantomData<&'a _gpgme_invalid_key>,
|
||||
}
|
||||
|
||||
impl<'a> InvalidKeysIter<'a> {
|
||||
impl InvalidKeysIter<'_> {
|
||||
pub(super) fn new(ptr: gpgme_invalid_key_t, lib: Arc<libloading::Library>) -> Self {
|
||||
Self {
|
||||
lib,
|
||||
|
@ -181,7 +242,7 @@ impl<'a> InvalidKeysIter<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for InvalidKeysIter<'a> {
|
||||
impl Iterator for InvalidKeysIter<'_> {
|
||||
type Item = InvalidKeyError;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
|
|
@ -69,6 +69,14 @@ macro_rules! call {
|
|||
}};
|
||||
}
|
||||
|
||||
#[allow(
|
||||
non_camel_case_types,
|
||||
non_upper_case_globals,
|
||||
non_snake_case,
|
||||
clippy::useless_transmute,
|
||||
clippy::too_many_arguments,
|
||||
clippy::use_self
|
||||
)]
|
||||
pub mod bindings;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
@ -249,11 +257,10 @@ impl Context {
|
|||
}
|
||||
},
|
||||
);
|
||||
if unsafe { call!(&lib, gpgme_check_version)(GPGME_VERSION.as_bytes().as_ptr()) }.is_null()
|
||||
{
|
||||
if unsafe { call!(&lib, gpgme_check_version)(GPGME_VERSION.as_ptr()) }.is_null() {
|
||||
return Err(Error::new(format!(
|
||||
"Could not use libgpgme: requested version compatible with {} but got {}",
|
||||
GPGME_VERSION,
|
||||
GPGME_VERSION.to_string_lossy(),
|
||||
unsafe {
|
||||
CStr::from_ptr(call!(&lib, gpgme_check_version)(std::ptr::null_mut()))
|
||||
.to_string_lossy()
|
||||
|
@ -437,7 +444,10 @@ impl Context {
|
|||
call!(&self.inner.lib, gpgme_data_new_from_mem)(
|
||||
&mut ptr,
|
||||
bytes.as_ptr() as *const ::std::os::raw::c_char,
|
||||
bytes.len(),
|
||||
bytes
|
||||
.len()
|
||||
.try_into()
|
||||
.map_err(|_| std::io::Error::from_raw_os_error(libc::EOVERFLOW))?,
|
||||
1,
|
||||
),
|
||||
)?;
|
||||
|
@ -465,7 +475,7 @@ impl Context {
|
|||
let bytes = Pin::new(os_str.as_bytes().to_vec());
|
||||
let mut ptr = std::ptr::null_mut();
|
||||
unsafe {
|
||||
let ret: GpgmeError = call!(&self.inner.lib, gpgme_data_new_from_file)(
|
||||
let ret: gpgme_error_t = call!(&self.inner.lib, gpgme_data_new_from_file)(
|
||||
&mut ptr,
|
||||
bytes.as_ptr() as *const ::std::os::raw::c_char,
|
||||
1,
|
||||
|
@ -719,7 +729,7 @@ impl Context {
|
|||
self.inner.ptr.as_ptr(),
|
||||
text.inner.as_mut(),
|
||||
sig,
|
||||
gpgme_sig_mode_t_GPGME_SIG_MODE_DETACH,
|
||||
gpgme_sig_mode_t::GPGME_SIG_MODE_DETACH,
|
||||
),
|
||||
)?;
|
||||
}
|
||||
|
@ -988,9 +998,9 @@ impl Context {
|
|||
call!(&self.inner.lib, gpgme_op_encrypt_start)(
|
||||
self.inner.ptr.as_ptr(),
|
||||
raw_keys.as_mut_slice().as_mut_ptr(),
|
||||
gpgme_encrypt_flags_t_GPGME_ENCRYPT_NO_ENCRYPT_TO
|
||||
| gpgme_encrypt_flags_t_GPGME_ENCRYPT_NO_COMPRESS
|
||||
| gpgme_encrypt_flags_t_GPGME_ENCRYPT_ALWAYS_TRUST,
|
||||
gpgme_encrypt_flags_t::GPGME_ENCRYPT_NO_ENCRYPT_TO
|
||||
| gpgme_encrypt_flags_t::GPGME_ENCRYPT_NO_COMPRESS
|
||||
| gpgme_encrypt_flags_t::GPGME_ENCRYPT_ALWAYS_TRUST,
|
||||
plain.inner.as_mut(),
|
||||
cipher,
|
||||
),
|
||||
|
@ -1152,7 +1162,7 @@ impl Context {
|
|||
&self.inner.lib,
|
||||
call!(&self.inner.lib, gpgme_ctx_set_engine_info)(
|
||||
self.inner.ptr.as_ptr(),
|
||||
protocol as u32,
|
||||
protocol.into(),
|
||||
file_name
|
||||
.as_ref()
|
||||
.map(|c| c.as_ptr())
|
||||
|
@ -1173,7 +1183,7 @@ impl Context {
|
|||
&self.inner.lib,
|
||||
call!(&self.inner.lib, gpgme_set_protocol)(
|
||||
self.inner.ptr.as_ptr(),
|
||||
protocol as u32,
|
||||
protocol.into(),
|
||||
),
|
||||
)?;
|
||||
}
|
||||
|
@ -1216,9 +1226,9 @@ impl Context {
|
|||
call!(&self.inner.lib, gpgme_set_pinentry_mode)(
|
||||
self.inner.ptr.as_ptr(),
|
||||
if cb.is_none() {
|
||||
gpgme_pinentry_mode_t_GPGME_PINENTRY_MODE_DEFAULT
|
||||
gpgme_pinentry_mode_t::GPGME_PINENTRY_MODE_DEFAULT
|
||||
} else {
|
||||
gpgme_pinentry_mode_t_GPGME_PINENTRY_MODE_LOOPBACK
|
||||
gpgme_pinentry_mode_t::GPGME_PINENTRY_MODE_LOOPBACK
|
||||
},
|
||||
),
|
||||
)?;
|
||||
|
@ -1260,22 +1270,54 @@ pub enum Protocol {
|
|||
impl From<u32> for Protocol {
|
||||
fn from(val: u32) -> Self {
|
||||
match val {
|
||||
0 => Self::OpenPGP,
|
||||
1 => Self::CMS,
|
||||
2 => Self::GPGCONF,
|
||||
3 => Self::ASSUAN,
|
||||
4 => Self::G13,
|
||||
5 => Self::UISERVER,
|
||||
6 => Self::SPAWN,
|
||||
254 => Self::DEFAULT,
|
||||
val if val == gpgme_protocol_t::GPGME_PROTOCOL_OpenPGP as u32 => Self::OpenPGP,
|
||||
val if val == gpgme_protocol_t::GPGME_PROTOCOL_CMS as u32 => Self::CMS,
|
||||
val if val == gpgme_protocol_t::GPGME_PROTOCOL_GPGCONF as u32 => Self::GPGCONF,
|
||||
val if val == gpgme_protocol_t::GPGME_PROTOCOL_ASSUAN as u32 => Self::ASSUAN,
|
||||
val if val == gpgme_protocol_t::GPGME_PROTOCOL_G13 as u32 => Self::G13,
|
||||
val if val == gpgme_protocol_t::GPGME_PROTOCOL_UISERVER as u32 => Self::UISERVER,
|
||||
val if val == gpgme_protocol_t::GPGME_PROTOCOL_SPAWN as u32 => Self::SPAWN,
|
||||
val if val == gpgme_protocol_t::GPGME_PROTOCOL_DEFAULT as u32 => Self::DEFAULT,
|
||||
_ => Self::UNKNOWN,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gpgme_error_to_string(lib: &libloading::Library, error_code: GpgmeError) -> String {
|
||||
const ERR_MAX_LEN: usize = 256;
|
||||
let mut buf: Vec<u8> = vec![0; ERR_MAX_LEN];
|
||||
impl From<gpgme_protocol_t> for Protocol {
|
||||
fn from(val: gpgme_protocol_t) -> Self {
|
||||
match val {
|
||||
gpgme_protocol_t::GPGME_PROTOCOL_OpenPGP => Self::OpenPGP,
|
||||
gpgme_protocol_t::GPGME_PROTOCOL_CMS => Self::CMS,
|
||||
gpgme_protocol_t::GPGME_PROTOCOL_GPGCONF => Self::GPGCONF,
|
||||
gpgme_protocol_t::GPGME_PROTOCOL_ASSUAN => Self::ASSUAN,
|
||||
gpgme_protocol_t::GPGME_PROTOCOL_G13 => Self::G13,
|
||||
gpgme_protocol_t::GPGME_PROTOCOL_UISERVER => Self::UISERVER,
|
||||
gpgme_protocol_t::GPGME_PROTOCOL_SPAWN => Self::SPAWN,
|
||||
gpgme_protocol_t::GPGME_PROTOCOL_DEFAULT => Self::DEFAULT,
|
||||
gpgme_protocol_t::GPGME_PROTOCOL_UNKNOWN => Self::UNKNOWN,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Protocol> for gpgme_protocol_t {
|
||||
fn from(val: Protocol) -> Self {
|
||||
match val {
|
||||
Protocol::OpenPGP => Self::GPGME_PROTOCOL_OpenPGP,
|
||||
Protocol::CMS => Self::GPGME_PROTOCOL_CMS,
|
||||
Protocol::GPGCONF => Self::GPGME_PROTOCOL_GPGCONF,
|
||||
Protocol::ASSUAN => Self::GPGME_PROTOCOL_ASSUAN,
|
||||
Protocol::G13 => Self::GPGME_PROTOCOL_G13,
|
||||
Protocol::UISERVER => Self::GPGME_PROTOCOL_UISERVER,
|
||||
Protocol::SPAWN => Self::GPGME_PROTOCOL_SPAWN,
|
||||
Protocol::DEFAULT => Self::GPGME_PROTOCOL_DEFAULT,
|
||||
Protocol::UNKNOWN => Self::GPGME_PROTOCOL_UNKNOWN,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gpgme_error_to_string(lib: &libloading::Library, error_code: gpgme_error_t) -> String {
|
||||
const ERR_MAX_LEN: bindings::size_t = 256;
|
||||
let mut buf: Vec<u8> = vec![0; ERR_MAX_LEN as usize];
|
||||
unsafe {
|
||||
call!(lib, gpgme_strerror_r)(
|
||||
error_code,
|
||||
|
@ -1290,7 +1332,7 @@ fn gpgme_error_to_string(lib: &libloading::Library, error_code: GpgmeError) -> S
|
|||
.unwrap_or_else(|err| String::from_utf8_lossy(&err.into_bytes()).to_string())
|
||||
}
|
||||
|
||||
fn gpgme_error_try(lib: &libloading::Library, error_code: GpgmeError) -> Result<()> {
|
||||
fn gpgme_error_try(lib: &libloading::Library, error_code: gpgme_error_t) -> Result<()> {
|
||||
if error_code == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -1337,7 +1379,7 @@ impl Drop for Data {
|
|||
#[repr(C)]
|
||||
struct GpgmeFd {
|
||||
fd: Arc<ManuallyDrop<OwnedFd>>,
|
||||
fnc: GpgmeIOCb,
|
||||
fnc: gpgme_io_cb_t,
|
||||
fnc_data: *mut c_void,
|
||||
idx: usize,
|
||||
write: bool,
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
|
||||
use std::{borrow::Cow, ffi::CString, future::Future};
|
||||
|
||||
use sealed_test::prelude::*;
|
||||
use rusty_fork::rusty_fork_test;
|
||||
|
||||
use crate::{
|
||||
gpgme::{Context, EngineInfo, Key, LocateKey, Protocol},
|
||||
|
@ -33,7 +33,8 @@ const PUBKEY: &[u8]=b"-----BEGIN PGP PUBLIC KEY BLOCK-----\r\nVersion: GnuPG v2.
|
|||
|
||||
const SECKEY: &[u8] = b"-----BEGIN PGP PRIVATE KEY BLOCK-----\r\nVersion: GnuPG v2.1.0-gitb3c71eb (GNU/Linux)\r\n\r\nlQHpBDo41NoRBADSfQazKGYf8nokq6zUKH/6INtV6MypSzSGmX2XErnARkIIPPYj\r\ncQRQ8zCbGV7ZU2ezVbzhFLUSJveE8PZUzzCrLp1O2NSyBTRcR5HVSXW95nJfY8eV\r\npOvZRAKul0BVLh81kYTsrfzaaCjh9VWNP26LoeN2r+PjZyktXe7gM3C4SwCgoTxK\r\nWUVi9HoT2HCLY7p7oig5hEcEALdCJal0UYomX3nJapIVLVZg3vkidr1RICYMb2vz\r\n58i17h8sxEtobD1vdIKNejulntaRAXs4n0tDYD9z7pRlwG1CLz1R9WxYzeOOqUDr\r\nfnVXdmU8L/oVWABat8v1V7QQhjMMf+41fuzVwDMMGqjVPLhu4X6wp3A8uyM3YDnQ\r\nVMN1A/4n2G5gHoOvjqxn8Ch5tBAdMGfO8gH4RjQOwzm2R1wPQss/yzUN1+tlMZGX\r\nK2dQ2FCWC/hDUSNaEQRlI15wxxBNZ2RQwlzE2A8v113DpvyzOtv0QO95gJ1teCXC\r\n7j/BN9asgHaBBc39JLO/TcpuI7Hf8PQ5VcP2F0UE3lczGhXbLP4HAwL0A7A1a/jY\r\n6s5JxysLUpKA31U2SrKxePmkmzYSuAiValUVdfkmLRrLSwmNJSy5NcrBHGimja1O\r\nfUUmPTg465j1+vD/tERKb2UgUmFuZG9tIEhhY2tlciAodGVzdCBrZXkgd2l0aCBw\r\nYXNzcGhyYXNlICJhYmMiKSA8am9lQGV4YW1wbGUuY29tPohiBBMRAgAiBQJNt1ep\r\nAhsjBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgAAKCRCvgiRPnNn9VRwIAJ9C9I+L\r\n//3+AG/xJWsro7gOLIP6MACfcsSd8XXiPoyPCWpdOqiXZWBCfXKdAWAEOjjU3RAE\r\nAJ50lvtCGbnQlI97VX6tJkosdPmdzeXaTWfv//A2wmSANbYnuychGMa1LN43Ew+H\r\n6FXMWJ3MB/exs6UBFCgGsw88qmcla2bosQN/aVLA7fqXT9ujqoNGaIVEmgdbK1Mk\r\nSPFXBFyVW3hteod83D0UqFlltwp4A3ageCYFVJTp50d3AAMFA/44YCQQbg9x9Jvz\r\nHX3VH7CRX+raEDkDL3Pbz0PHas7bwI7gzZ+GFyNKaCvrHQOyuR8RIKIbjtQYnXr1\r\n675ConCTceIXhysY32sTn5V6UFUW2t0xaRfas8sZBbLDyIJkpt4fyD+6OaRoui9K\r\nZqXMNwt7i/XFIto/sWd/OK3SIgZkAf4HAwIoimqPHVJZM85dNw6JtvLKFvvmkm3X\r\nuoCUG5nU6cgk6vetUYiykuKpU4zG3mDtdZdIZf76hJJ6lZTSHH9frLy7bRYPfu/k\r\nU1AFd1T1OxENiEYEGBECAAYFAjo41N0ACgkQr4IkT5zZ/VVUmgCffq49ZSEJ0Zdt\r\nIvEaJth8G3t63vUAn0mBZ5GVMNMihdg3wn6qw83RgLG2iEYEGBECAAYFAjo41N4A\r\nCgkQr4IkT5zZ/VXuIgCfRaxjJRUe7Phln9bHE3nsq7YSqz0An0qXP2KbO9mhSO4T\r\n38iGvTLnq1We\r\n=m0YJ\r\n-----END PGP PRIVATE KEY BLOCK-----\r\n";
|
||||
|
||||
#[sealed_test]
|
||||
rusty_fork_test! {
|
||||
#[test]
|
||||
fn test_gpgme_verify_sig() {
|
||||
fn make_fut(
|
||||
secret: bool,
|
||||
|
@ -151,3 +152,4 @@ fn test_gpgme_verify_sig() {
|
|||
.import_key(gpgme_ctx.new_data_mem(SECKEY).unwrap())
|
||||
.unwrap_err();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -122,4 +122,12 @@ impl BackendMailbox for ImapMailbox {
|
|||
fn count(&self) -> Result<(usize, usize)> {
|
||||
Ok((self.unseen.lock()?.len(), self.exists.lock()?.len()))
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1307,7 +1307,7 @@ impl MailBackend for ImapType {
|
|||
&self,
|
||||
query: crate::search::Query,
|
||||
mailbox_hash: Option<MailboxHash>,
|
||||
) -> ResultFuture<SmallVec<[EnvelopeHash; 512]>> {
|
||||
) -> ResultFuture<Vec<EnvelopeHash>> {
|
||||
if mailbox_hash.is_none() {
|
||||
return Err(Error::new(
|
||||
"Cannot search without specifying mailbox on IMAP",
|
||||
|
@ -1340,7 +1340,7 @@ impl MailBackend for ImapType {
|
|||
for l in response.split_rn() {
|
||||
if l.starts_with(b"* SEARCH") {
|
||||
let uid_index = uid_store.uid_index.lock()?;
|
||||
return Ok(SmallVec::from_iter(
|
||||
return Ok(Vec::from_iter(
|
||||
String::from_utf8_lossy(l[b"* SEARCH".len()..].trim())
|
||||
.split_whitespace()
|
||||
.map(UID::from_str)
|
||||
|
|
|
@ -158,14 +158,8 @@ fn test_imap_untagged_responses() {
|
|||
#[test]
|
||||
fn test_imap_fetch_response() {
|
||||
#[rustfmt::skip]
|
||||
let input: &[u8] = b"* 198 FETCH (UID 7608 FLAGS (\\Seen) ENVELOPE (\"Fri, 24 Jun 2011 10:09:10 +0000\" \"xxxx/xxxx\" ((\"xx@xx.com\" NIL \"xx\" \"xx.com\")) NIL NIL ((\"xx@xx\" NIL \"xx\" \"xx.com\")) ((\"'xx, xx'\" NIL \"xx.xx\" \"xx.com\")(\"xx.xx@xx.com\" NIL \"xx.xx\" \"xx.com\")(\"'xx'\" NIL \"xx.xx\" \"xx.com\")(\"'xx xx'\" NIL \"xx.xx\" \"xx.com\")(\"xx.xx@xx.com\" NIL \"xx.xx\" \"xx.com\")) NIL NIL \"<xx@xx.com>\") BODY[HEADER.FIELDS (REFERENCES)] {2}\r\n\r\n BODYSTRUCTURE ((\"text\" \"html\" (\"charset\" \"us-ascii\") \"<xx@xx>\" NIL \"7BIT\" 17236 232 NIL NIL NIL NIL)(\"image\" \"jpeg\" (\"name\" \"image001.jpg\") \"<image001.jpg@xx.xx>\" \"image001.jpg\" \"base64\" 1918 NIL (\"inline\" (\"filename\" \"image001.jpg\" \"size\" \"1650\" \"creation-date\" \"Sun, 09 Aug 2015 20:56:04 GMT\" \"modification-date\" \"Sun, 14 Aug 2022 22:11:45 GMT\")) NIL NIL) \"related\" (\"boundary\" \"xx--xx\" \"type\" \"text/html\") NIL \"en-US\"))\r\n";
|
||||
#[rustfmt::skip]
|
||||
// ----------------------------------- ------------- --------------------------------------- --- --- ----------------------------------- -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- --- --- ---------------
|
||||
// date subject from | | to cc bcc irt message-id
|
||||
// | reply-to
|
||||
// sender
|
||||
|
||||
let mut address = SmallVec::new();
|
||||
let input: &[u8] = b"* 198 FETCH (UID 7608 FLAGS (\\Seen) ENVELOPE (\"Fri, 24 Jun 2011 10:09:10 +0000\" \"xxxx/xxxx\" ((\"xx@xx.com\" NIL \"xx\" \"xx.com\")) NIL NIL ((\"xx@xx\" NIL \"xx\" \"xx.com\")) ((\"'xx, xx'\" NIL \"xx.xx\" \"xx.com\")(\"xx.xx@xx.com\" NIL \"xx.xx\" \"xx.com\")(\"'xx'\" NIL \"xx.xx\" \"xx.com\")(\"'xx xx'\" NIL \"xx.xx\" \"xx.com\")(\"xx.xx@xx.com\" NIL \"xx.xx\" \"xx.com\")) NIL NIL \"<xx@xx.com>\") BODY[HEADER.FIELDS (REFERENCES)] {2}\r\n\r\n BODYSTRUCTURE ((\"text\" \"html\" (\"charset\" \"us-ascii\") \"<xx@xx>\" NIL \"7BIT\" 17236 232 NIL NIL NIL NIL)(\"image\" \"jpeg\" (\"name\" \"image001.jpg\") \"<image001.jpg@xx.xx>\" \"image001.jpg\" \"base64\" 1918 NIL (\"inline\" (\"filename\" \"image001.jpg\" \"size\" \"1650\" \"creation-date\" \"Sun, 09 Aug 2015 20:56:04 GMT\" \"modification-date\" \"Sun, 14 Aug 2022 22:11:45 GMT\")) NIL NIL) \"related\" (\"boundary\" \"xx--xx\" \"type\" \"text/html\") NIL \"en-US\"))\r\n";
|
||||
let mut address = SmallVec::new();
|
||||
address.push(Address::new(None, "xx@xx.com".to_string()));
|
||||
let mut env = Envelope::new(EnvelopeHash::default());
|
||||
env.set_subject(b"xxxx/xxxx".to_vec());
|
||||
|
|
|
@ -122,4 +122,12 @@ impl BackendMailbox for JmapMailbox {
|
|||
self.total_emails.lock()?.len(),
|
||||
))
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
|
|
@ -774,7 +774,7 @@ impl<'de> ::serde::de::Deserialize<'de> for RequestUrlTemplate {
|
|||
|
||||
struct _Visitor;
|
||||
|
||||
impl<'de> Visitor<'de> for _Visitor {
|
||||
impl Visitor<'_> for _Visitor {
|
||||
type Value = RequestUrlTemplate;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
|
|
|
@ -626,7 +626,7 @@ impl MailBackend for JmapType {
|
|||
&self,
|
||||
q: crate::search::Query,
|
||||
mailbox_hash: Option<MailboxHash>,
|
||||
) -> ResultFuture<SmallVec<[EnvelopeHash; 512]>> {
|
||||
) -> ResultFuture<Vec<EnvelopeHash>> {
|
||||
let store = self.store.clone();
|
||||
let connection = self.connection.clone();
|
||||
let filter = if let Some(mailbox_hash) = mailbox_hash {
|
||||
|
|
|
@ -360,7 +360,7 @@ pub struct EnvelopeObject {
|
|||
/// submission, plus any parameters to pass with the MAIL FROM
|
||||
/// address. The JMAP server **MAY** allow the address to be the empty
|
||||
/// string.
|
||||
|
||||
///
|
||||
/// When a JMAP server performs an SMTP message submission, it MAY
|
||||
/// use the same id string for the ENVID parameter `[RFC3461]` and
|
||||
/// the [`EmailSubmission`](`EmailSubmissionObject`) object id. Servers
|
||||
|
|
|
@ -78,7 +78,10 @@
|
|||
* - [tag:VERIFY] Verify whether this is the correct way to do something
|
||||
* - [tag:DEBT] Technical debt
|
||||
*/
|
||||
|
||||
#![doc = include_str!("../README.md")]
|
||||
//!
|
||||
//! ## Description
|
||||
//!
|
||||
//! A crate that performs mail client operations such as
|
||||
//! - Hold an [`Envelope`] with methods convenient for mail client use. (see
|
||||
//! module [`email`])
|
||||
|
@ -183,7 +186,6 @@ extern crate serde_derive;
|
|||
pub extern crate log;
|
||||
/* parser */
|
||||
extern crate data_encoding;
|
||||
extern crate encoding;
|
||||
pub extern crate nom;
|
||||
|
||||
#[macro_use]
|
||||
|
|
|
@ -41,7 +41,7 @@ use regex::Regex;
|
|||
use super::{watch, MaildirMailbox, MaildirOp, MaildirPathTrait};
|
||||
use crate::{
|
||||
backends::{prelude::*, RefreshEventKind::*},
|
||||
error::{Error, ErrorKind, IntoError, Result},
|
||||
error::{Error, ErrorKind, IntoError, Result, ResultIntoError},
|
||||
utils::shellexpand::ShellExpandTrait,
|
||||
};
|
||||
|
||||
|
@ -106,9 +106,15 @@ impl DerefMut for HashIndex {
|
|||
|
||||
pub type HashIndexes = Arc<Mutex<HashMap<MailboxHash, HashIndex>>>;
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Configuration {
|
||||
pub rename_regex: Option<Regex>,
|
||||
pub path: PathBuf,
|
||||
pub root_mailbox_name: String,
|
||||
/// Is `root_mailbox` a valid maildir folder or just a folder containing
|
||||
/// valid maildir folders?
|
||||
pub is_root_a_mailbox: bool,
|
||||
pub settings: AccountSettings,
|
||||
}
|
||||
|
||||
impl Configuration {
|
||||
|
@ -130,21 +136,25 @@ impl Configuration {
|
|||
None
|
||||
};
|
||||
|
||||
Ok(Self { rename_regex })
|
||||
Ok(Self {
|
||||
rename_regex,
|
||||
settings: settings.clone(),
|
||||
..Self::default()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// The maildir backend instance type.
|
||||
#[derive(Debug)]
|
||||
pub struct MaildirType {
|
||||
pub name: String,
|
||||
pub account_name: String,
|
||||
pub account_hash: AccountHash,
|
||||
pub mailboxes: HashMap<MailboxHash, MaildirMailbox>,
|
||||
pub mailbox_index: Arc<Mutex<HashMap<EnvelopeHash, MailboxHash>>>,
|
||||
pub hash_indexes: HashIndexes,
|
||||
pub event_consumer: BackendEventConsumer,
|
||||
pub is_subscribed: IsSubscribedFn,
|
||||
pub collection: Collection,
|
||||
pub path: PathBuf,
|
||||
pub config: Arc<Configuration>,
|
||||
}
|
||||
|
||||
|
@ -210,7 +220,7 @@ impl MailBackend for MaildirType {
|
|||
}
|
||||
|
||||
fn refresh(&mut self, mailbox_hash: MailboxHash) -> ResultFuture<()> {
|
||||
let account_hash = AccountHash::from_bytes(self.name.as_bytes());
|
||||
let account_hash = self.account_hash;
|
||||
let sender = self.event_consumer.clone();
|
||||
|
||||
let mailbox: &MaildirMailbox = &self.mailboxes[&mailbox_hash];
|
||||
|
@ -293,7 +303,7 @@ impl MailBackend for MaildirType {
|
|||
}
|
||||
|
||||
fn watch(&self) -> ResultFuture<()> {
|
||||
let root_mailbox = self.path.to_path_buf();
|
||||
let root_mailbox = self.config.path.to_path_buf();
|
||||
let (tx, rx) = channel();
|
||||
let watcher = RecommendedWatcher::new(
|
||||
tx,
|
||||
|
@ -317,7 +327,7 @@ impl MailBackend for MaildirType {
|
|||
.collect::<HashMap<MailboxHash, (Arc<Mutex<usize>>, Arc<Mutex<usize>>)>>();
|
||||
let watch_state = watch::MaildirWatch {
|
||||
watcher,
|
||||
account_hash: AccountHash::from_bytes(self.name.as_bytes()),
|
||||
account_hash: self.account_hash,
|
||||
event_consumer: self.event_consumer.clone(),
|
||||
root_mailbox,
|
||||
rx,
|
||||
|
@ -487,52 +497,13 @@ impl MailBackend for MaildirType {
|
|||
|
||||
fn create_mailbox(
|
||||
&mut self,
|
||||
new_path: String,
|
||||
name: String,
|
||||
) -> ResultFuture<(MailboxHash, HashMap<MailboxHash, Mailbox>)> {
|
||||
let mut path = self.path.clone();
|
||||
path.push(&new_path);
|
||||
if !path.starts_with(&self.path) {
|
||||
return Err(Error::new(format!(
|
||||
"Path given (`{}`) is absolute. Please provide a path relative to the account's \
|
||||
root mailbox.",
|
||||
&new_path
|
||||
)));
|
||||
}
|
||||
|
||||
std::fs::create_dir(&path)?;
|
||||
/* create_dir does not create intermediate directories (like `mkdir -p`), so
|
||||
* the parent must be a valid mailbox at this point. */
|
||||
|
||||
let parent = path.parent().and_then(|p| {
|
||||
self.mailboxes
|
||||
.iter()
|
||||
.find(|(_, f)| f.fs_path == p)
|
||||
.map(|item| *item.0)
|
||||
});
|
||||
|
||||
let mailbox_hash: MailboxHash = path.to_mailbox_hash();
|
||||
if let Some(parent) = parent {
|
||||
self.mailboxes
|
||||
.entry(parent)
|
||||
.and_modify(|entry| entry.children.push(mailbox_hash));
|
||||
}
|
||||
let new_mailbox = MaildirMailbox {
|
||||
hash: mailbox_hash,
|
||||
path: PathBuf::from(&new_path),
|
||||
name: new_path,
|
||||
fs_path: path,
|
||||
parent,
|
||||
children: vec![],
|
||||
usage: Default::default(),
|
||||
is_subscribed: true,
|
||||
permissions: Default::default(),
|
||||
unseen: Default::default(),
|
||||
total: Default::default(),
|
||||
};
|
||||
|
||||
self.mailboxes.insert(mailbox_hash, new_mailbox);
|
||||
let ret = self.mailboxes()?;
|
||||
Ok(Box::pin(async move { Ok((mailbox_hash, ret.await?)) }))
|
||||
let mailbox_hash = self.create_mailbox_sync(name);
|
||||
let mailboxes_fut = self.mailboxes();
|
||||
Ok(Box::pin(async move {
|
||||
Ok((mailbox_hash?, mailboxes_fut?.await?))
|
||||
}))
|
||||
}
|
||||
|
||||
fn delete_mailbox(
|
||||
|
@ -578,7 +549,7 @@ impl MailBackend for MaildirType {
|
|||
&self,
|
||||
_query: crate::search::Query,
|
||||
_mailbox_hash: Option<MailboxHash>,
|
||||
) -> ResultFuture<SmallVec<[EnvelopeHash; 512]>> {
|
||||
) -> ResultFuture<Vec<EnvelopeHash>> {
|
||||
Err(
|
||||
Error::new("Search is unimplemented for the maildir backend.")
|
||||
.set_kind(ErrorKind::NotImplemented),
|
||||
|
@ -600,12 +571,10 @@ impl MaildirType {
|
|||
is_subscribed: IsSubscribedFn,
|
||||
event_consumer: BackendEventConsumer,
|
||||
) -> Result<Box<Self>> {
|
||||
let config = Arc::new(Configuration::new(settings)?);
|
||||
|
||||
let mut mailboxes: HashMap<MailboxHash, MaildirMailbox> = Default::default();
|
||||
fn recurse_mailboxes<P: AsRef<Path>>(
|
||||
mailboxes: &mut HashMap<MailboxHash, MaildirMailbox>,
|
||||
settings: &AccountSettings,
|
||||
config: &Configuration,
|
||||
p: P,
|
||||
) -> Result<Vec<MailboxHash>> {
|
||||
if !p.as_ref().try_exists().unwrap_or(false) || !p.as_ref().is_dir() {
|
||||
|
@ -634,9 +603,9 @@ impl MaildirType {
|
|||
None,
|
||||
Vec::new(),
|
||||
false,
|
||||
settings,
|
||||
config,
|
||||
) {
|
||||
f.children = recurse_mailboxes(mailboxes, settings, &path)?;
|
||||
f.children = recurse_mailboxes(mailboxes, config, &path)?;
|
||||
for c in &f.children {
|
||||
if let Some(f) = mailboxes.get_mut(c) {
|
||||
f.parent = Some(f.hash);
|
||||
|
@ -650,7 +619,7 @@ impl MaildirType {
|
|||
* it contains subdirs of any depth that are
|
||||
* valid maildir paths
|
||||
*/
|
||||
let subdirs = recurse_mailboxes(mailboxes, settings, &path)?;
|
||||
let subdirs = recurse_mailboxes(mailboxes, config, &path)?;
|
||||
if !subdirs.is_empty() {
|
||||
if let Ok(f) = MaildirMailbox::new(
|
||||
path.to_str().unwrap().to_string(),
|
||||
|
@ -658,7 +627,7 @@ impl MaildirType {
|
|||
None,
|
||||
subdirs,
|
||||
true,
|
||||
settings,
|
||||
config,
|
||||
) {
|
||||
for c in &f.children {
|
||||
if let Some(f) = mailboxes.get_mut(c) {
|
||||
|
@ -691,7 +660,7 @@ impl MaildirType {
|
|||
)));
|
||||
}
|
||||
|
||||
if let Ok(f) = MaildirMailbox::new(
|
||||
let (is_root_a_mailbox, root_mailbox_name) = if let Ok(f) = MaildirMailbox::new_root_mailbox(
|
||||
root_mailbox.to_str().unwrap().to_string(),
|
||||
root_mailbox
|
||||
.file_name()
|
||||
|
@ -704,11 +673,30 @@ impl MaildirType {
|
|||
false,
|
||||
settings,
|
||||
) {
|
||||
let name = f.name.clone();
|
||||
mailboxes.insert(f.hash, f);
|
||||
}
|
||||
(true, name)
|
||||
} else {
|
||||
(
|
||||
false,
|
||||
root_mailbox
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_str()
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
)
|
||||
};
|
||||
|
||||
let config = Arc::new(Configuration {
|
||||
path: root_mailbox.clone(),
|
||||
root_mailbox_name,
|
||||
is_root_a_mailbox,
|
||||
..Configuration::new(settings)?
|
||||
});
|
||||
|
||||
if mailboxes.is_empty() {
|
||||
let children = recurse_mailboxes(&mut mailboxes, settings, &root_mailbox)?;
|
||||
let children = recurse_mailboxes(&mut mailboxes, &config, &root_mailbox)?;
|
||||
for c in &children {
|
||||
if let Some(f) = mailboxes.get_mut(c) {
|
||||
f.parent = None;
|
||||
|
@ -716,7 +704,7 @@ impl MaildirType {
|
|||
}
|
||||
} else {
|
||||
let root_hash = *mailboxes.keys().next().unwrap();
|
||||
let children = recurse_mailboxes(&mut mailboxes, settings, &root_mailbox)?;
|
||||
let children = recurse_mailboxes(&mut mailboxes, &config, &root_mailbox)?;
|
||||
for c in &children {
|
||||
if let Some(f) = mailboxes.get_mut(c) {
|
||||
f.parent = Some(root_hash);
|
||||
|
@ -743,15 +731,16 @@ impl MaildirType {
|
|||
},
|
||||
);
|
||||
}
|
||||
|
||||
Ok(Box::new(Self {
|
||||
name: settings.name.to_string(),
|
||||
account_name: settings.name.to_string(),
|
||||
account_hash: AccountHash::from_bytes(settings.name.as_bytes()),
|
||||
mailboxes,
|
||||
is_subscribed,
|
||||
hash_indexes: Arc::new(Mutex::new(hash_indexes)),
|
||||
mailbox_index: Default::default(),
|
||||
event_consumer,
|
||||
collection: Default::default(),
|
||||
path: root_mailbox,
|
||||
config,
|
||||
}))
|
||||
}
|
||||
|
@ -866,4 +855,94 @@ impl MaildirType {
|
|||
}
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
pub fn create_mailbox_sync(&mut self, name: String) -> Result<MailboxHash> {
|
||||
let (fs_path, suffix) = {
|
||||
let mut fs_path = self.config.path.clone();
|
||||
let mut suffix = name.clone();
|
||||
let root_mailbox_path_str = PathBuf::from(&self.config.settings.root_mailbox)
|
||||
.expand()
|
||||
.display()
|
||||
.to_string();
|
||||
if suffix.starts_with(&root_mailbox_path_str)
|
||||
&& suffix.get(root_mailbox_path_str.len()..).is_some()
|
||||
&& suffix[root_mailbox_path_str.len()..].starts_with("/")
|
||||
{
|
||||
suffix.replace_range(0..=root_mailbox_path_str.len(), "");
|
||||
}
|
||||
if suffix.starts_with(&self.config.root_mailbox_name)
|
||||
&& suffix.get(self.config.root_mailbox_name.len()..).is_some()
|
||||
&& suffix[self.config.root_mailbox_name.len()..].starts_with("/")
|
||||
{
|
||||
suffix.replace_range(0..=self.config.root_mailbox_name.len(), "");
|
||||
}
|
||||
fs_path.push(&suffix);
|
||||
let not_in_root = self
|
||||
.config
|
||||
.path
|
||||
.parent()
|
||||
.map(|p| fs_path.starts_with(p) && !fs_path.starts_with(&self.config.path))
|
||||
.unwrap_or(false);
|
||||
if not_in_root {
|
||||
return Err(Error::new(format!(
|
||||
"Path given, `{}`, is not included in the root mailbox path `{}`. A maildir \
|
||||
backend cannot contain mailboxes outside of its root path.",
|
||||
&name,
|
||||
self.config.path.display()
|
||||
)));
|
||||
}
|
||||
if !fs_path.starts_with(&self.config.path) {
|
||||
return Err(Error::new(format!(
|
||||
"Path given (`{}`) is absolute. Please provide a path relative to the \
|
||||
account's root mailbox.",
|
||||
&name
|
||||
)));
|
||||
}
|
||||
(fs_path, suffix)
|
||||
};
|
||||
|
||||
std::fs::create_dir(&fs_path)
|
||||
.chain_err_summary(|| "Could not create new mailbox")
|
||||
.chain_err_related_path(&fs_path)?;
|
||||
|
||||
// std::fs::create_dir does not create intermediate directories (like `mkdir
|
||||
// -p`), so the parent must be a valid mailbox at this point.
|
||||
let parent = fs_path.parent().and_then(|p| {
|
||||
self.mailboxes
|
||||
.iter()
|
||||
.find(|(_, f)| f.fs_path == p)
|
||||
.map(|item| *item.0)
|
||||
});
|
||||
|
||||
let mailbox_hash: MailboxHash = fs_path.to_mailbox_hash();
|
||||
if let Some(parent) = parent {
|
||||
self.mailboxes
|
||||
.entry(parent)
|
||||
.and_modify(|entry| entry.children.push(mailbox_hash));
|
||||
}
|
||||
let path = if self.config.is_root_a_mailbox {
|
||||
let mut path = PathBuf::from(&self.config.root_mailbox_name);
|
||||
path.push(suffix);
|
||||
path
|
||||
} else {
|
||||
PathBuf::from(&suffix)
|
||||
};
|
||||
let name = fs_path.file_name().unwrap().to_str().unwrap().to_string();
|
||||
let new_mailbox = MaildirMailbox {
|
||||
hash: mailbox_hash,
|
||||
path,
|
||||
name,
|
||||
fs_path,
|
||||
parent,
|
||||
children: vec![],
|
||||
usage: Default::default(),
|
||||
is_subscribed: true,
|
||||
permissions: Default::default(),
|
||||
unseen: Default::default(),
|
||||
total: Default::default(),
|
||||
};
|
||||
|
||||
self.mailboxes.insert(mailbox_hash, new_mailbox);
|
||||
Ok(mailbox_hash)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -142,39 +142,128 @@ pub struct MaildirMailbox {
|
|||
|
||||
impl MaildirMailbox {
|
||||
pub fn new(
|
||||
path: String,
|
||||
given_path: String,
|
||||
file_name: String,
|
||||
parent: Option<MailboxHash>,
|
||||
children: Vec<MailboxHash>,
|
||||
accept_invalid: bool,
|
||||
settings: &AccountSettings,
|
||||
config: &Configuration,
|
||||
) -> Result<Self> {
|
||||
let pathbuf = PathBuf::from(&path).expand();
|
||||
let mut h = DefaultHasher::new();
|
||||
pathbuf.hash(&mut h);
|
||||
let (fs_path, suffix) = {
|
||||
let mut fs_path = config.path.clone();
|
||||
let mut suffix = given_path.clone();
|
||||
let root_mailbox_path_str = PathBuf::from(&config.settings.root_mailbox)
|
||||
.expand()
|
||||
.display()
|
||||
.to_string();
|
||||
if suffix.starts_with(&root_mailbox_path_str)
|
||||
&& suffix.get(root_mailbox_path_str.len()..).is_some()
|
||||
&& suffix[root_mailbox_path_str.len()..].starts_with("/")
|
||||
{
|
||||
suffix.replace_range(0..=root_mailbox_path_str.len(), "");
|
||||
}
|
||||
if suffix.starts_with(&config.root_mailbox_name)
|
||||
&& suffix.get(config.root_mailbox_name.len()..).is_some()
|
||||
&& suffix[config.root_mailbox_name.len()..].starts_with("/")
|
||||
{
|
||||
suffix.replace_range(0..=config.root_mailbox_name.len(), "");
|
||||
}
|
||||
fs_path.push(&suffix);
|
||||
if !fs_path.starts_with(&config.path) && fs_path != config.path {
|
||||
return Err(Error::new(format!(
|
||||
"Path given, `{}`, is is not included in the root mailbox path `{}`.",
|
||||
&given_path,
|
||||
config.path.display()
|
||||
)));
|
||||
}
|
||||
(fs_path, suffix)
|
||||
};
|
||||
|
||||
/* Check if mailbox path (Eg `INBOX/Lists/luddites`) is included in the
|
||||
* subscribed mailboxes in user configuration */
|
||||
let fname = pathbuf
|
||||
.strip_prefix(
|
||||
PathBuf::from(&settings.root_mailbox)
|
||||
.expand()
|
||||
.parent()
|
||||
.unwrap_or_else(|| Path::new("/")),
|
||||
)
|
||||
.ok();
|
||||
let hash = {
|
||||
let mut h = DefaultHasher::new();
|
||||
fs_path.hash(&mut h);
|
||||
MailboxHash(h.finish())
|
||||
};
|
||||
|
||||
let read_only = if let Ok(metadata) = std::fs::metadata(&pathbuf) {
|
||||
let path = if config.is_root_a_mailbox {
|
||||
let mut path = PathBuf::from(&config.root_mailbox_name);
|
||||
path.push(suffix);
|
||||
path
|
||||
} else {
|
||||
PathBuf::from(&suffix)
|
||||
};
|
||||
|
||||
let read_only = if let Ok(metadata) = std::fs::metadata(&fs_path) {
|
||||
metadata.permissions().readonly()
|
||||
} else {
|
||||
true
|
||||
};
|
||||
|
||||
let ret = Self {
|
||||
hash: MailboxHash(h.finish()),
|
||||
hash,
|
||||
name: file_name,
|
||||
path: fname.unwrap().to_path_buf(),
|
||||
fs_path: pathbuf,
|
||||
path,
|
||||
fs_path,
|
||||
parent,
|
||||
children,
|
||||
usage: Arc::new(RwLock::new(SpecialUsageMailbox::Normal)),
|
||||
is_subscribed: false,
|
||||
permissions: MailboxPermissions {
|
||||
create_messages: !read_only,
|
||||
remove_messages: !read_only,
|
||||
set_flags: !read_only,
|
||||
create_child: !read_only,
|
||||
rename_messages: !read_only,
|
||||
delete_messages: !read_only,
|
||||
delete_mailbox: !read_only,
|
||||
change_permissions: false,
|
||||
},
|
||||
unseen: Arc::new(Mutex::new(0)),
|
||||
total: Arc::new(Mutex::new(0)),
|
||||
};
|
||||
if !accept_invalid {
|
||||
ret.is_valid()?;
|
||||
}
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
pub fn new_root_mailbox(
|
||||
given_path: String,
|
||||
file_name: String,
|
||||
parent: Option<MailboxHash>,
|
||||
children: Vec<MailboxHash>,
|
||||
accept_invalid: bool,
|
||||
settings: &AccountSettings,
|
||||
) -> Result<Self> {
|
||||
let fs_path = PathBuf::from(&given_path).expand();
|
||||
let hash = {
|
||||
let mut h = DefaultHasher::new();
|
||||
fs_path.hash(&mut h);
|
||||
MailboxHash(h.finish())
|
||||
};
|
||||
|
||||
let path = fs_path
|
||||
.strip_prefix(
|
||||
PathBuf::from(&settings.root_mailbox)
|
||||
.expand()
|
||||
.parent()
|
||||
.unwrap_or_else(|| Path::new("/")),
|
||||
)
|
||||
.ok()
|
||||
.unwrap()
|
||||
.to_path_buf();
|
||||
|
||||
let read_only = if let Ok(metadata) = std::fs::metadata(&fs_path) {
|
||||
metadata.permissions().readonly()
|
||||
} else {
|
||||
true
|
||||
};
|
||||
|
||||
let ret = Self {
|
||||
hash,
|
||||
name: file_name,
|
||||
path,
|
||||
fs_path,
|
||||
parent,
|
||||
children,
|
||||
usage: Arc::new(RwLock::new(SpecialUsageMailbox::Normal)),
|
||||
|
@ -267,6 +356,14 @@ impl BackendMailbox for MaildirMailbox {
|
|||
fn count(&self) -> Result<(usize, usize)> {
|
||||
Ok((*self.unseen.lock()?, *self.total.lock()?))
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub trait MaildirPathTrait {
|
||||
|
|
|
@ -20,15 +20,19 @@
|
|||
//
|
||||
// SPDX-License-Identifier: EUPL-1.2 OR GPL-3.0-or-later
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use regex::Regex;
|
||||
use tempfile::TempDir;
|
||||
|
||||
use crate::{
|
||||
backends::FlagOp,
|
||||
backends::prelude::*,
|
||||
email::Flag,
|
||||
error::Result,
|
||||
maildir::{move_to_cur, Configuration, MaildirPathTrait},
|
||||
maildir::{move_to_cur, Configuration, MaildirMailbox, MaildirPathTrait, MaildirType},
|
||||
};
|
||||
|
||||
fn set_flags(config: &Configuration, path: &Path, flag_ops: &[FlagOp]) -> Result<PathBuf> {
|
||||
|
@ -44,7 +48,7 @@ fn set_flags(config: &Configuration, path: &Path, flag_ops: &[FlagOp]) -> Result
|
|||
|
||||
#[test]
|
||||
fn test_maildir_move_to_cur_rename() {
|
||||
let config = Configuration { rename_regex: None };
|
||||
let config = Configuration::default();
|
||||
assert_eq!(
|
||||
move_to_cur(&config, Path::new("/path/to/new/1423819205.29514_1:2,FRS")).unwrap(),
|
||||
Path::new("/path/to/cur/1423819205.29514_1:2,FRS")
|
||||
|
@ -67,6 +71,7 @@ fn test_maildir_move_to_cur_rename() {
|
|||
fn test_maildir_move_to_cur_rename_regexp() {
|
||||
let config = Configuration {
|
||||
rename_regex: Some(Regex::new(r",U=\d\d*").unwrap()),
|
||||
..Configuration::default()
|
||||
};
|
||||
assert_eq!(
|
||||
move_to_cur(
|
||||
|
@ -104,7 +109,7 @@ fn test_maildir_move_to_cur_rename_regexp() {
|
|||
|
||||
#[test]
|
||||
fn test_maildir_set_flags() {
|
||||
let config = Configuration { rename_regex: None };
|
||||
let config = Configuration::default();
|
||||
|
||||
assert_eq!(
|
||||
set_flags(
|
||||
|
@ -139,6 +144,7 @@ fn test_maildir_set_flags() {
|
|||
fn test_maildir_set_flags_regexp() {
|
||||
let config = Configuration {
|
||||
rename_regex: Some(Regex::new(r",U=\d\d*").unwrap()),
|
||||
..Configuration::default()
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
|
@ -172,7 +178,7 @@ fn test_maildir_set_flags_regexp() {
|
|||
|
||||
#[test]
|
||||
fn test_maildir_place_in_dir() {
|
||||
let config = Configuration { rename_regex: None };
|
||||
let config = Configuration::default();
|
||||
|
||||
assert_eq!(
|
||||
Path::new("/path/to/new/1423819205.29514_1:2,")
|
||||
|
@ -203,6 +209,7 @@ fn test_maildir_place_in_dir() {
|
|||
fn test_maildir_place_in_dir_regexp() {
|
||||
let config = Configuration {
|
||||
rename_regex: Some(Regex::new(r",U=\d\d*").unwrap()),
|
||||
..Configuration::default()
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
|
@ -229,3 +236,312 @@ fn test_maildir_place_in_dir_regexp() {
|
|||
"place_in_dir() should add missing `:2,` substring"
|
||||
);
|
||||
}
|
||||
|
||||
fn new_maildir_backend(
|
||||
temp_dir: &TempDir,
|
||||
acc_name: &str,
|
||||
event_consumer: BackendEventConsumer,
|
||||
with_root_mailbox: bool,
|
||||
) -> Result<(PathBuf, AccountSettings, Box<MaildirType>)> {
|
||||
let root_mailbox = temp_dir.path().join("INBOX");
|
||||
{
|
||||
std::fs::create_dir(&root_mailbox).expect("Could not create root mailbox directory.");
|
||||
if with_root_mailbox {
|
||||
for d in &["cur", "new", "tmp"] {
|
||||
std::fs::create_dir(root_mailbox.join(d))
|
||||
.expect("Could not create root mailbox directory contents.");
|
||||
}
|
||||
}
|
||||
}
|
||||
let subscribed_mailboxes = if with_root_mailbox {
|
||||
vec!["INBOX".into()]
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
let mailboxes = if with_root_mailbox {
|
||||
vec![(
|
||||
"INBOX".into(),
|
||||
crate::conf::MailboxConf {
|
||||
extra: indexmap::indexmap! {
|
||||
"path".into() => root_mailbox.display().to_string(),
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
)]
|
||||
.into_iter()
|
||||
.collect()
|
||||
} else {
|
||||
indexmap::indexmap! {}
|
||||
};
|
||||
let extra = if with_root_mailbox {
|
||||
indexmap::indexmap! {
|
||||
"root_mailbox".into() => root_mailbox.display().to_string(),
|
||||
}
|
||||
} else {
|
||||
indexmap::indexmap! {}
|
||||
};
|
||||
|
||||
let account_conf = AccountSettings {
|
||||
name: acc_name.to_string(),
|
||||
root_mailbox: root_mailbox.display().to_string(),
|
||||
format: "maildir".to_string(),
|
||||
identity: "user@localhost".to_string(),
|
||||
extra_identities: vec![],
|
||||
read_only: false,
|
||||
display_name: None,
|
||||
order: Default::default(),
|
||||
subscribed_mailboxes,
|
||||
mailboxes,
|
||||
manual_refresh: true,
|
||||
extra,
|
||||
};
|
||||
|
||||
let maildir = MaildirType::new(&account_conf, Default::default(), event_consumer)?;
|
||||
Ok((root_mailbox, account_conf, maildir))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_maildir_mailbox_paths() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let backend_event_queue = Arc::new(std::sync::Mutex::new(
|
||||
std::collections::VecDeque::with_capacity(16),
|
||||
));
|
||||
|
||||
let backend_event_consumer = {
|
||||
let backend_event_queue = Arc::clone(&backend_event_queue);
|
||||
|
||||
BackendEventConsumer::new(Arc::new(move |ah, be| {
|
||||
backend_event_queue.lock().unwrap().push_back((ah, be));
|
||||
}))
|
||||
};
|
||||
|
||||
// Perform tests on a maildir backend where the root mailbox, is not a valid
|
||||
// maildir mailbox (e.g. has no cur,new,tmp sub directories.
|
||||
{
|
||||
let (root_mailbox, _settings, maildir) =
|
||||
new_maildir_backend(&temp_dir, "maildir", backend_event_consumer.clone(), false)
|
||||
.unwrap();
|
||||
assert!(!maildir.config.is_root_a_mailbox);
|
||||
// Assert that giving a file system path to MaildirBox::new is valid
|
||||
let new_mailbox = MaildirMailbox::new(
|
||||
root_mailbox.join("Archive").display().to_string(),
|
||||
"Archive".into(),
|
||||
None,
|
||||
vec![],
|
||||
true,
|
||||
&maildir.config,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(new_mailbox.name, "Archive");
|
||||
assert_eq!(&new_mailbox.path, &Path::new("Archive"));
|
||||
assert_eq!(&new_mailbox.fs_path, &root_mailbox.join("Archive"));
|
||||
// Assert that giving a mailbox path to MaildirBox::new is valid
|
||||
let new_mailbox = MaildirMailbox::new(
|
||||
"INBOX/Archive".into(),
|
||||
"Archive".into(),
|
||||
None,
|
||||
vec![],
|
||||
true,
|
||||
&maildir.config,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(new_mailbox.name, "Archive");
|
||||
assert_eq!(&new_mailbox.path, &Path::new("Archive"));
|
||||
assert_eq!(&new_mailbox.fs_path, &root_mailbox.join("Archive"));
|
||||
let mut backend = maildir as Box<dyn MailBackend>;
|
||||
let ref_mailboxes = smol::block_on(backend.mailboxes().unwrap()).unwrap();
|
||||
// Assert that backend has no mailboxes at all");
|
||||
assert!(
|
||||
ref_mailboxes.is_empty(),
|
||||
"ref_mailboxes were not empty: {:?}",
|
||||
ref_mailboxes
|
||||
);
|
||||
let (new_hash, ref_mailboxes) =
|
||||
smol::block_on(backend.create_mailbox("Archive".into()).unwrap()).unwrap();
|
||||
assert_eq!(ref_mailboxes[&new_hash].name(), "Archive");
|
||||
assert_eq!(ref_mailboxes[&new_hash].path(), "Archive");
|
||||
assert_eq!(
|
||||
ref_mailboxes[&new_hash]
|
||||
.as_any()
|
||||
.downcast_ref::<MaildirMailbox>()
|
||||
.unwrap()
|
||||
.fs_path(),
|
||||
&root_mailbox.join("Archive")
|
||||
);
|
||||
// Assert that even if we accidentally give a file system path to a maildir
|
||||
// backend's create_mailbox() method, it still does the correct thing.
|
||||
let (new_hash, ref_mailboxes) = smol::block_on(
|
||||
backend
|
||||
.create_mailbox(root_mailbox.join("Archive2").display().to_string())
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(ref_mailboxes[&new_hash].name(), "Archive2");
|
||||
assert_eq!(ref_mailboxes[&new_hash].path(), "Archive2");
|
||||
assert_eq!(
|
||||
ref_mailboxes[&new_hash]
|
||||
.as_any()
|
||||
.downcast_ref::<MaildirMailbox>()
|
||||
.unwrap()
|
||||
.fs_path(),
|
||||
&root_mailbox.join("Archive2")
|
||||
);
|
||||
let ref_mailboxes = smol::block_on(backend.mailboxes().unwrap()).unwrap();
|
||||
// Assert that backend has all the created mailboxes so far
|
||||
assert_eq!(
|
||||
ref_mailboxes.len(),
|
||||
2,
|
||||
"mailboxes() return value content not what expected: {:?}",
|
||||
ref_mailboxes
|
||||
);
|
||||
// Assert that giving an absolute path returns an error
|
||||
assert_eq!(
|
||||
&smol::block_on(backend.create_mailbox("/Archive3".to_string()).unwrap())
|
||||
.unwrap_err()
|
||||
.to_string(),
|
||||
"Path given (`/Archive3`) is absolute. Please provide a path relative to the \
|
||||
account's root mailbox."
|
||||
);
|
||||
// Assert that attempting to create a mailbox outside of the root mailbox
|
||||
// returns an error
|
||||
assert_eq!(
|
||||
&smol::block_on(
|
||||
backend
|
||||
.create_mailbox(temp_dir.path().join("Archive3").display().to_string())
|
||||
.unwrap()
|
||||
)
|
||||
.unwrap_err()
|
||||
.to_string(),
|
||||
&format!(
|
||||
"Path given, `{}`, is not included in the root mailbox path `{}`. A maildir \
|
||||
backend cannot contain mailboxes outside of its root path.",
|
||||
temp_dir.path().join("Archive3").display(),
|
||||
root_mailbox.display(),
|
||||
)
|
||||
);
|
||||
|
||||
std::fs::remove_dir_all(root_mailbox).unwrap();
|
||||
}
|
||||
|
||||
// Perform same tests on a maildir backend where the root mailbox is a valid
|
||||
// maildir mailbox (e.g. has cur,new,tmp sub directories.
|
||||
{
|
||||
let (root_mailbox, _settings, maildir) =
|
||||
new_maildir_backend(&temp_dir, "maildir", backend_event_consumer, true).unwrap();
|
||||
assert!(maildir.config.is_root_a_mailbox);
|
||||
// Assert that giving a file system path to MaildirBox::new is valid
|
||||
let new_mailbox = MaildirMailbox::new(
|
||||
root_mailbox.join("Archive").display().to_string(),
|
||||
"Archive".into(),
|
||||
None,
|
||||
vec![],
|
||||
true,
|
||||
&maildir.config,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(new_mailbox.name, "Archive");
|
||||
assert_eq!(&new_mailbox.path, &Path::new("INBOX/Archive"));
|
||||
assert_eq!(&new_mailbox.fs_path, &root_mailbox.join("Archive"));
|
||||
// Assert that giving a mailbox path to MaildirBox::new is valid
|
||||
let new_mailbox = MaildirMailbox::new(
|
||||
"INBOX/Archive".into(),
|
||||
"Archive".into(),
|
||||
None,
|
||||
vec![],
|
||||
true,
|
||||
&maildir.config,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(new_mailbox.name, "Archive");
|
||||
assert_eq!(&new_mailbox.path, &Path::new("INBOX/Archive"));
|
||||
assert_eq!(&new_mailbox.fs_path, &root_mailbox.join("Archive"));
|
||||
let mut backend = maildir as Box<dyn MailBackend>;
|
||||
let ref_mailboxes = smol::block_on(backend.mailboxes().unwrap()).unwrap();
|
||||
// Assert that backend has only INBOX as a mailbox
|
||||
assert_eq!(
|
||||
ref_mailboxes.len(),
|
||||
1,
|
||||
"ref_mailboxes is not just INBOX: {:?}",
|
||||
ref_mailboxes
|
||||
);
|
||||
// Assert that creating a mailbox without the root mailbox as a prefix does the
|
||||
// correct thing.
|
||||
let (new_hash, ref_mailboxes) =
|
||||
smol::block_on(backend.create_mailbox("Archive".into()).unwrap()).unwrap();
|
||||
assert_eq!(ref_mailboxes[&new_hash].name(), "Archive");
|
||||
assert_eq!(ref_mailboxes[&new_hash].path(), "INBOX/Archive");
|
||||
assert_eq!(
|
||||
ref_mailboxes[&new_hash]
|
||||
.as_any()
|
||||
.downcast_ref::<MaildirMailbox>()
|
||||
.unwrap()
|
||||
.fs_path(),
|
||||
&root_mailbox.join("Archive")
|
||||
);
|
||||
// Assert that creating a mailbox with the root mailbox as a prefix does the
|
||||
// correct thing.
|
||||
let (new_hash, ref_mailboxes) =
|
||||
smol::block_on(backend.create_mailbox("INBOX/Archive2".into()).unwrap()).unwrap();
|
||||
assert_eq!(ref_mailboxes[&new_hash].name(), "Archive2");
|
||||
assert_eq!(ref_mailboxes[&new_hash].path(), "INBOX/Archive2");
|
||||
assert_eq!(
|
||||
ref_mailboxes[&new_hash]
|
||||
.as_any()
|
||||
.downcast_ref::<MaildirMailbox>()
|
||||
.unwrap()
|
||||
.fs_path(),
|
||||
&root_mailbox.join("Archive2")
|
||||
);
|
||||
// Assert that even if we accidentally give a file system path to a maildir
|
||||
// backend's create_mailbox() method, it still does the correct thing.
|
||||
let (new_hash, ref_mailboxes) = smol::block_on(
|
||||
backend
|
||||
.create_mailbox(root_mailbox.join("Archive3").display().to_string())
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(ref_mailboxes[&new_hash].name(), "Archive3");
|
||||
assert_eq!(ref_mailboxes[&new_hash].path(), "INBOX/Archive3");
|
||||
assert_eq!(
|
||||
ref_mailboxes[&new_hash]
|
||||
.as_any()
|
||||
.downcast_ref::<MaildirMailbox>()
|
||||
.unwrap()
|
||||
.fs_path(),
|
||||
&root_mailbox.join("Archive3")
|
||||
);
|
||||
let ref_mailboxes = smol::block_on(backend.mailboxes().unwrap()).unwrap();
|
||||
// Assert that backend has all the created mailboxes so far
|
||||
assert_eq!(
|
||||
ref_mailboxes.len(),
|
||||
4,
|
||||
"mailboxes() return value content not what expected: {:?}",
|
||||
ref_mailboxes
|
||||
);
|
||||
// Assert that giving an absolute path returns an error
|
||||
assert_eq!(
|
||||
&smol::block_on(backend.create_mailbox("/Archive4".to_string()).unwrap())
|
||||
.unwrap_err()
|
||||
.to_string(),
|
||||
"Path given (`/Archive4`) is absolute. Please provide a path relative to the \
|
||||
account's root mailbox."
|
||||
);
|
||||
// Assert that attempting to create a mailbox outside of the root mailbox
|
||||
// returns an error
|
||||
assert_eq!(
|
||||
&smol::block_on(
|
||||
backend
|
||||
.create_mailbox(temp_dir.path().join("Archive4").display().to_string())
|
||||
.unwrap()
|
||||
)
|
||||
.unwrap_err()
|
||||
.to_string(),
|
||||
&format!(
|
||||
"Path given, `{}`, is not included in the root mailbox path `{}`. A maildir \
|
||||
backend cannot contain mailboxes outside of its root path.",
|
||||
temp_dir.path().join("Archive4").display(),
|
||||
root_mailbox.display(),
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -240,6 +240,14 @@ impl BackendMailbox for MboxMailbox {
|
|||
fn count(&self) -> Result<(usize, usize)> {
|
||||
Ok((*self.unseen.lock()?, *self.total.lock()?))
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// `BackendOp` implementor for Mbox
|
||||
|
@ -730,7 +738,7 @@ pub struct MessageIterator<'a> {
|
|||
pub format: Option<MboxFormat>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for MessageIterator<'a> {
|
||||
impl Iterator for MessageIterator<'_> {
|
||||
type Item = Result<Envelope>;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.input.is_empty() {
|
||||
|
@ -1243,7 +1251,7 @@ impl MailBackend for MboxType {
|
|||
&self,
|
||||
_query: crate::search::Query,
|
||||
_mailbox_hash: Option<MailboxHash>,
|
||||
) -> ResultFuture<SmallVec<[EnvelopeHash; 512]>> {
|
||||
) -> ResultFuture<Vec<EnvelopeHash>> {
|
||||
Err(Error::new("Search is unimplemented for the mbox backend.")
|
||||
.set_kind(ErrorKind::NotImplemented))
|
||||
}
|
||||
|
|
|
@ -97,4 +97,12 @@ impl BackendMailbox for NntpMailbox {
|
|||
fn count(&self) -> Result<(usize, usize)> {
|
||||
Ok((self.unseen.lock()?.len(), self.exists.lock()?.len()))
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
|
|
@ -574,7 +574,7 @@ impl MailBackend for NntpType {
|
|||
&self,
|
||||
_query: crate::search::Query,
|
||||
_mailbox_hash: Option<MailboxHash>,
|
||||
) -> ResultFuture<SmallVec<[EnvelopeHash; 512]>> {
|
||||
) -> ResultFuture<Vec<EnvelopeHash>> {
|
||||
Err(Error::new("Searching is not supported for nntp backend.")
|
||||
.set_kind(ErrorKind::NotSupported))
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ pub struct NntpLineIterator<'a> {
|
|||
slice: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> std::iter::DoubleEndedIterator for NntpLineIterator<'a> {
|
||||
impl std::iter::DoubleEndedIterator for NntpLineIterator<'_> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
if self.slice.is_empty() {
|
||||
None
|
||||
|
|
|
@ -104,4 +104,12 @@ impl BackendMailbox for NotmuchMailbox {
|
|||
fn count(&self) -> Result<(usize, usize)> {
|
||||
Ok((*self.unseen.lock()?, *self.total.lock()?))
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
|
|
@ -954,7 +954,7 @@ impl MailBackend for NotmuchDb {
|
|||
&self,
|
||||
melib_query: crate::search::Query,
|
||||
mailbox_hash: Option<MailboxHash>,
|
||||
) -> ResultFuture<SmallVec<[EnvelopeHash; 512]>> {
|
||||
) -> ResultFuture<Vec<EnvelopeHash>> {
|
||||
let database = Self::new_connection(
|
||||
self.path.as_path(),
|
||||
self.revision_uuid.clone(),
|
||||
|
@ -963,7 +963,6 @@ impl MailBackend for NotmuchDb {
|
|||
)?;
|
||||
let mailboxes = self.mailboxes.clone();
|
||||
Ok(Box::pin(async move {
|
||||
let mut ret = SmallVec::new();
|
||||
let mut query_s = if let Some(mailbox_hash) = mailbox_hash {
|
||||
if let Some(m) = mailboxes.read().unwrap().get(&mailbox_hash) {
|
||||
let mut s = m.query_str.clone();
|
||||
|
@ -981,12 +980,7 @@ impl MailBackend for NotmuchDb {
|
|||
};
|
||||
melib_query.query_to_string(&mut query_s)?;
|
||||
let query: Query = Query::new(&database, &query_s)?;
|
||||
let iter = query.search()?;
|
||||
for message in iter {
|
||||
ret.push(message.env_hash());
|
||||
}
|
||||
|
||||
Ok(ret)
|
||||
Ok(query.search()?.map(|message| message.env_hash()).collect())
|
||||
}))
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue