Compare commits

...

18 Commits

Author SHA1 Message Date
Paolo Barbolini
1ec1b705c9 Prepare 0.10.0-rc.7 (#777) 2022-06-04 11:47:50 +02:00
Paolo Barbolini
e4006518fe Stop using the regex crate for parsing addresses (#776) 2022-06-03 13:39:57 +00:00
Paolo Barbolini
b33dd562fc Fix and improve header wrapping (#774)
Instead of injecting spaces to ensure that lines stay under 76 characters only wrap at whitespace characters. This avoids changing the headers.

A best-effort to keep lines under 76 characters is still done, however it is only done at whitespace. Notably there is no hard wrap enforced. This means that it is possible for headers to break the 1000 character line-length limit in the specification. It is just hoped that the receiver will allow long lines in this case.

Closes #688

Co-authored-by: Kevin Cox <kevincox@kevincox.ca>
2022-06-03 15:24:53 +02:00
Paolo Barbolini
65958df14f Use pretty_assertions for all message tests (#775) 2022-06-02 12:20:06 +00:00
James Hillyerd
50628af5fd README.md: Use IPv4 notation for localhost (#771) 2022-05-30 17:20:51 +00:00
Paolo Barbolini
cf858cc682 Move most email body encoding to email-encoding (#769) 2022-05-30 15:12:42 +00:00
Paolo Barbolini
f9a4b5ba89 Work around async-global-executor bumping MSRV too early (#773) 2022-05-30 15:01:40 +00:00
Jacob Halsey
1391a834ce #715: Support setting the local IP address to connect from (#762)
This adds a `local_address: Option<IpAddr>` parameter to the synchronous, and tokio connect functions.

(As far as I can see there is no current way to support this in async-std, because the library doesn't provide any way to do an async connect for an existing socket)
2022-05-29 07:05:39 +00:00
André Cruz
e6b4529896 use email_address crate for checking formats (#763)
The email_address crate is more strict in validating user and domain
parts of email addresses. For example, it verifies the total size
of the local part, which the current method does not, and this has
caused upstream servers to fail to accept an email.
2022-05-26 19:21:14 +02:00
Kevin Cox
ca5cb3f8f7 Fix encoded header signing. (#765)
The header needs to be properly formatted so that Unicode characters are encoded the same way they will be in the final message. Previously the logical header value was being encoded.

A notable example is that a `'` in the `To:` header needs to be encoded. This was being encoded incorrectly.
2022-05-26 07:44:14 +02:00
Kevin Cox
1e2279457e Add editorconfig file. (#766)
Makes it easy for everyone to use the preferred settings.

https://editorconfig.org/
2022-05-18 13:07:38 +00:00
Kevin Cox
961364cc29 Remove unnecessary clone. (#767)
This is backwards-incompatible but hopefully is an acceptable change for a pre-release. The upgrade path is straight forward.
2022-05-18 10:51:02 +02:00
Paolo Barbolini
b0db759e5f Prepare 0.10.0-rc.6 (#761) 2022-04-29 15:59:36 +02:00
Paolo Barbolini
5daf5d397a Fix parsing Mailboxes with a comma in the name (#760) 2022-04-26 12:18:12 +02:00
Paolo Barbolini
3f1647fa48 Bump dependencies (#759) 2022-04-25 09:17:58 +00:00
Paolo Barbolini
fd106d9b0c Bump rsa crate to the final 0.6.0 release (#758) 2022-04-14 09:39:30 +00:00
Vincent Breitmoser
c1d37d54b4 Use +0000 timezone format in Date header (#756)
Since the Date we emit is UTC, it's correct to use "+0000". The
previously used -0000 timezone indicator means "no timezone info".
2022-04-10 08:34:50 +02:00
David Krasnitsky
efa0d58778 Improve compiler error messages (#754) 2022-04-07 05:03:28 +00:00
29 changed files with 694 additions and 624 deletions

8
.editorconfig Normal file
View File

@@ -0,0 +1,8 @@
root = true
[*]
insert_final_newline = true
[*.rs]
indent_size = 4
indent_style = space

View File

@@ -124,6 +124,9 @@ jobs:
- name: Install dkimverify
run: sudo apt -y install python3-dkim
- name: Work around early dependencies MSRV bump
run: cargo update -p async-global-executor --precise 2.0.4
- name: Test with no default features
run: cargo test --no-default-features

View File

@@ -1,7 +1,7 @@
[package]
name = "lettre"
# remember to update html_root_url and README.md (Cargo.toml example and deps.rs badge)
version = "0.10.0-rc.5"
version = "0.10.0-rc.7"
description = "Email client"
readme = "README.md"
homepage = "https://lettre.rs"
@@ -20,7 +20,7 @@ maintenance = { status = "actively-developed" }
[dependencies]
idna = "0.2"
once_cell = "1"
once_cell = { version = "1", optional = true }
tracing = { version = "0.1.16", default-features = false, features = ["std"], optional = true } # feature
# builder
@@ -29,22 +29,22 @@ mime = { version = "0.3.4", optional = true }
fastrand = { version = "1.4", optional = true }
quoted_printable = { version = "0.4", optional = true }
base64 = { version = "0.13", optional = true }
regex = { version = "1", default-features = false, features = ["std", "unicode-case"] }
email-encoding = { version = "0.1", optional = true }
email-encoding = { version = "0.1.1", optional = true }
# file transport
uuid = { version = "0.8", features = ["v4"], optional = true }
uuid = { version = "1", features = ["v4"], optional = true }
serde = { version = "1", optional = true, features = ["derive"] }
serde_json = { version = "1", optional = true }
# smtp
# smtp-transport
nom = { version = "7", optional = true }
hostname = { version = "0.3", optional = true } # feature
socket2 = { version = "0.4.4", optional = true }
## tls
native-tls = { version = "0.2", optional = true } # feature
rustls = { version = "0.20", features = ["dangerous_configuration"], optional = true }
rustls-pemfile = { version = "0.3", optional = true }
rustls-pemfile = { version = "1", optional = true }
webpki-roots = { version = "0.22", optional = true }
# async
@@ -64,10 +64,15 @@ tokio1_rustls = { package = "tokio-rustls", version = "0.23", optional = true }
## dkim
sha2 = { version = "0.10", optional = true }
rsa = { version = "0.6.0-pre", optional = true }
rsa = { version = "0.6.0", optional = true }
ed25519-dalek = { version = "1.0.1", optional = true }
regex = { version = "1", default-features = false, features = ["std"], optional = true }
# email formats
email_address = { version = "0.2.1", default-features = false }
[dev-dependencies]
pretty_assertions = "1"
criterion = "0.3"
tracing-subscriber = "0.3"
glob = "0.3"
@@ -75,7 +80,7 @@ walkdir = "2"
tokio1_crate = { package = "tokio", version = "1", features = ["macros", "rt-multi-thread"] }
async-std = { version = "1.8", features = ["attributes"] }
serde_json = "1"
maud = "0.22.1"
maud = "0.23"
[[bench]]
harness = false
@@ -83,14 +88,14 @@ name = "transport_smtp"
[features]
default = ["smtp-transport", "pool", "native-tls", "hostname", "builder"]
builder = ["httpdate", "mime", "base64", "fastrand", "quoted_printable", "email-encoding"]
builder = ["httpdate", "mime", "fastrand", "quoted_printable", "email-encoding"]
mime03 = ["mime"]
# transports
file-transport = ["uuid"]
file-transport-envelope = ["serde", "serde_json", "file-transport"]
sendmail-transport = []
smtp-transport = ["base64", "nom"]
smtp-transport = ["base64", "nom", "socket2", "once_cell"]
pool = ["futures-util"]
@@ -104,7 +109,7 @@ tokio1 = ["tokio1_crate", "async-trait", "futures-io", "futures-util"]
tokio1-native-tls = ["tokio1", "native-tls", "tokio1_native_tls_crate"]
tokio1-rustls-tls = ["tokio1", "rustls-tls", "tokio1_rustls"]
dkim = ["sha2", "rsa", "ed25519-dalek"]
dkim = ["base64", "sha2", "rsa", "ed25519-dalek", "regex", "once_cell"]
[package.metadata.docs.rs]
all-features = true

View File

@@ -28,8 +28,8 @@
</div>
<div align="center">
<a href="https://deps.rs/crate/lettre/0.10.0-rc.5">
<img src="https://deps.rs/crate/lettre/0.10.0-rc.5/status.svg"
<a href="https://deps.rs/crate/lettre/0.10.0-rc.7">
<img src="https://deps.rs/crate/lettre/0.10.0-rc.7/status.svg"
alt="dependency status" />
</a>
</div>
@@ -71,7 +71,7 @@ To use this library, add the following to your `Cargo.toml`:
```toml
[dependencies]
lettre = "0.10.0-rc.5"
lettre = "0.10.0-rc.7"
```
```rust,no_run
@@ -104,7 +104,7 @@ match mailer.send(&email) {
## Testing
The `lettre` tests require an open mail server listening locally on port 2525 and the `sendmail` command. If you have python installed
such a server can be launched with `python -m smtpd -n -c DebuggingServer localhost:2525`
such a server can be launched with `python -m smtpd -n -c DebuggingServer 127.0.0.1:2525`
Alternatively only unit tests can be run by doing `cargo test --lib`.

View File

@@ -8,9 +8,8 @@ use std::{
str::FromStr,
};
use email_address::EmailAddress;
use idna::domain_to_ascii;
use once_cell::sync::Lazy;
use regex::Regex;
/// Represents an email address with a user and a domain name.
///
@@ -55,20 +54,6 @@ pub struct Address {
at_start: usize,
}
// Regex from the specs
// https://html.spec.whatwg.org/multipage/forms.html#valid-e-mail-address
// It will mark esoteric email addresses like quoted string as invalid
static USER_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r"^(?i)[a-z0-9.!#$%&'*+/=?^_`{|}~-]+\z").unwrap());
static DOMAIN_RE: Lazy<Regex> = Lazy::new(|| {
Regex::new(
r"(?i)^[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?(?:.[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?)*$",
)
.unwrap()
});
// literal form, ipv4 or ipv6 address (SMTP 4.1.3)
static LITERAL_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?i)\[([A-f0-9:\.]+)\]\z").unwrap());
impl Address {
/// Creates a new email address from a user and domain.
///
@@ -126,7 +111,7 @@ impl Address {
}
pub(super) fn check_user(user: &str) -> Result<(), AddressError> {
if USER_RE.is_match(user) {
if EmailAddress::is_valid_local_part(user) {
Ok(())
} else {
Err(AddressError::InvalidUser)
@@ -142,16 +127,19 @@ impl Address {
}
fn check_domain_ascii(domain: &str) -> Result<(), AddressError> {
if DOMAIN_RE.is_match(domain) {
// Domain
if EmailAddress::is_valid_domain(domain) {
return Ok(());
}
if let Some(caps) = LITERAL_RE.captures(domain) {
if let Some(cap) = caps.get(1) {
if cap.as_str().parse::<IpAddr>().is_ok() {
return Ok(());
}
}
// IP
let ip = domain
.strip_prefix('[')
.and_then(|ip| ip.strip_suffix(']'))
.unwrap_or(domain);
if ip.parse::<IpAddr>().is_ok() {
return Ok(());
}
Err(AddressError::InvalidDomain)
@@ -269,7 +257,7 @@ mod tests {
use super::*;
#[test]
fn parse_address() {
fn ascii_address() {
let addr_str = "something@example.com";
let addr = Address::from_str(addr_str).unwrap();
let addr2 = Address::new("something", "example.com").unwrap();
@@ -279,4 +267,36 @@ mod tests {
assert_eq!(addr2.user(), "something");
assert_eq!(addr2.domain(), "example.com");
}
#[test]
fn ascii_address_ipv4() {
let addr_str = "something@1.1.1.1";
let addr = Address::from_str(addr_str).unwrap();
let addr2 = Address::new("something", "1.1.1.1").unwrap();
assert_eq!(addr, addr2);
assert_eq!(addr.user(), "something");
assert_eq!(addr.domain(), "1.1.1.1");
assert_eq!(addr2.user(), "something");
assert_eq!(addr2.domain(), "1.1.1.1");
}
#[test]
fn ascii_address_ipv6() {
let addr_str = "something@[2606:4700:4700::1111]";
let addr = Address::from_str(addr_str).unwrap();
let addr2 = Address::new("something", "[2606:4700:4700::1111]").unwrap();
assert_eq!(addr, addr2);
assert_eq!(addr.user(), "something");
assert_eq!(addr.domain(), "[2606:4700:4700::1111]");
assert_eq!(addr2.user(), "something");
assert_eq!(addr2.domain(), "[2606:4700:4700::1111]");
}
#[test]
fn check_parts() {
assert!(Address::check_user("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa").is_err());
assert!(
Address::check_domain("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.com").is_err()
);
}
}

View File

@@ -146,6 +146,7 @@ impl Executor for Tokio1Executor {
timeout,
hello_name,
tls_parameters,
None,
)
.await?;

View File

@@ -100,7 +100,7 @@
//! [mime 0.3]: https://docs.rs/mime/0.3
//! [DKIM]: https://datatracker.ietf.org/doc/html/rfc6376
#![doc(html_root_url = "https://docs.rs/crate/lettre/0.10.0-rc.5")]
#![doc(html_root_url = "https://docs.rs/crate/lettre/0.10.0-rc.7")]
#![doc(html_favicon_url = "https://lettre.rs/favicon.ico")]
#![doc(html_logo_url = "https://avatars0.githubusercontent.com/u/15113230?v=4")]
#![forbid(unsafe_code)]
@@ -124,8 +124,8 @@ mod compiletime_checks {
not(feature = "tokio1-native-tls")
))]
compile_error!("Lettre is being built with the `tokio1` and the `native-tls` features, but the `tokio1-native-tls` feature hasn't been turned on.
If you'd like to use rustls make sure that the `native-tls` hasn't been enabled by mistake (you may need to import lettre without default features)
If you're building a library which depends on lettre import it without default features and enable just the features you need.");
If you were trying to opt into `rustls-tls` and did not activate `native-tls`, disable the default-features of lettre in `Cargo.toml` and manually add the required features.
Make sure to apply the same to any of your crate dependencies that use the `lettre` crate.");
#[cfg(all(
feature = "tokio1",
@@ -133,8 +133,8 @@ If you're building a library which depends on lettre import it without default f
not(feature = "tokio1-rustls-tls")
))]
compile_error!("Lettre is being built with the `tokio1` and the `rustls-tls` features, but the `tokio1-rustls-tls` feature hasn't been turned on.
If you'd like to use native-tls make sure that the `rustls-tls` hasn't been enabled by mistake.
If you're building a library which depends on lettre import it without default features and enable just the features you need.");
If you'd like to use `native-tls` make sure that the `rustls-tls` feature hasn't been enabled by mistake.
Make sure to apply the same to any of your crate dependencies that use the `lettre` crate.");
/*
#[cfg(all(
@@ -153,8 +153,8 @@ If you're building a library which depends on lettre import it without default f
))]
compile_error!("Lettre is being built with the `async-std1` and the `native-tls` features, but the async-std integration doesn't support native-tls yet.
If you'd like to work on the issue please take a look at https://github.com/lettre/lettre/issues/576.
If you'd like to use rustls make sure that the `native-tls` hasn't been enabled by mistake (you may need to import lettre without default features)
If you're building a library which depends on lettre import lettre without default features and enable just the features you need.");
If you were trying to opt into `rustls-tls` and did not activate `native-tls`, disable the default-features of lettre in `Cargo.toml` and manually add the required features.
Make sure to apply the same to any of your crate dependencies that use the `lettre` crate.");
#[cfg(all(
feature = "async-std1",
@@ -162,8 +162,8 @@ If you're building a library which depends on lettre import lettre without defau
not(feature = "async-std1-rustls-tls")
))]
compile_error!("Lettre is being built with the `async-std1` and the `rustls-tls` features, but the `async-std1-rustls-tls` feature hasn't been turned on.
If you'd like to use native-tls make sure that the `rustls-tls` hasn't been enabled by mistake (you may need to import lettre without default features)
If you're building a library which depends on lettre import it without default features and enable just the features you need.");
If you'd like to use `native-tls` make sure that the `rustls-tls` hasn't been enabled by mistake.
Make sure to apply the same to any of your crate dependencies that use the `lettre` crate.");
}
pub mod address;

View File

@@ -1,8 +1,4 @@
use std::{
io::{self, Write},
mem,
ops::Deref,
};
use std::{mem, ops::Deref};
use crate::message::header::ContentTransferEncoding;
@@ -41,7 +37,7 @@ impl Body {
pub fn new<B: Into<MaybeString>>(buf: B) -> Self {
let mut buf: MaybeString = buf.into();
let encoding = buf.encoding();
let encoding = buf.encoding(false);
buf.encode_crlf();
Self::new_impl(buf.into(), encoding)
}
@@ -61,7 +57,22 @@ impl Body {
) -> Result<Self, Vec<u8>> {
let mut buf: MaybeString = buf.into();
if !buf.is_encoding_ok(encoding) {
let best_encoding = buf.encoding(true);
let ok = match (encoding, best_encoding) {
(ContentTransferEncoding::SevenBit, ContentTransferEncoding::SevenBit) => true,
(
ContentTransferEncoding::EightBit,
ContentTransferEncoding::SevenBit | ContentTransferEncoding::EightBit,
) => true,
(ContentTransferEncoding::SevenBit | ContentTransferEncoding::EightBit, _) => false,
(
ContentTransferEncoding::QuotedPrintable
| ContentTransferEncoding::Base64
| ContentTransferEncoding::Binary,
_,
) => true,
};
if !ok {
return Err(buf.into());
}
@@ -91,36 +102,13 @@ impl Body {
Self::dangerous_pre_encoded(encoded, ContentTransferEncoding::QuotedPrintable)
}
ContentTransferEncoding::Base64 => {
let base64_len = buf.len() * 4 / 3 + 4;
let base64_endings_len = base64_len + base64_len / LINE_MAX_LENGTH;
let len = email_encoding::body::base64::encoded_len(buf.len());
let mut out = Vec::with_capacity(base64_endings_len);
{
let writer = LineWrappingWriter::new(&mut out, LINE_MAX_LENGTH);
let mut writer = base64::write::EncoderWriter::new(writer, base64::STANDARD);
let mut out = String::with_capacity(len);
email_encoding::body::base64::encode(&buf, &mut out)
.expect("encode body as base64");
// TODO: use writer.write_all(self.as_ref()).expect("base64 encoding never fails");
// modified Write::write_all to work around base64 crate bug
// TODO: remove once https://github.com/marshallpierce/rust-base64/issues/148 is fixed
{
let mut buf: &[u8] = buf.as_ref();
while !buf.is_empty() {
match writer.write(buf) {
Ok(0) => {
// ignore 0 writes
}
Ok(n) => {
buf = &buf[n..];
}
Err(ref e) if e.kind() == std::io::ErrorKind::Interrupted => {}
Err(e) => panic!("base64 encoding never fails: {}", e),
}
}
}
}
Self::dangerous_pre_encoded(out, ContentTransferEncoding::Base64)
Self::dangerous_pre_encoded(out.into_bytes(), ContentTransferEncoding::Base64)
}
}
}
@@ -153,21 +141,20 @@ impl Body {
impl MaybeString {
/// Suggests the best `Content-Transfer-Encoding` to be used for this `MaybeString`
///
/// If the `MaybeString` was created from a `String` composed only of US-ASCII
/// characters, with no lines longer than 1000 characters, then 7bit
/// encoding will be used, else quoted-printable will be chosen.
///
/// If the `MaybeString` was instead created from a `Vec<u8>`, base64 encoding is always
/// chosen.
///
/// `8bit` and `binary` encodings are never returned, as they may not be
/// supported by all SMTP servers.
pub fn encoding(&self) -> ContentTransferEncoding {
match &self {
Self::String(s) if is_7bit_encoded(s.as_ref()) => ContentTransferEncoding::SevenBit,
// TODO: consider when base64 would be a better option because of output size
Self::String(_) => ContentTransferEncoding::QuotedPrintable,
Self::Binary(_) => ContentTransferEncoding::Base64,
/// The `binary` encoding is never returned
fn encoding(&self, supports_utf8: bool) -> ContentTransferEncoding {
use email_encoding::body::Encoding;
let output = match self {
Self::String(s) => Encoding::choose(s.as_str(), supports_utf8),
Self::Binary(b) => Encoding::choose(b.as_slice(), supports_utf8),
};
match output {
Encoding::SevenBit => ContentTransferEncoding::SevenBit,
Encoding::EightBit => ContentTransferEncoding::EightBit,
Encoding::QuotedPrintable => ContentTransferEncoding::QuotedPrintable,
Encoding::Base64 => ContentTransferEncoding::Base64,
}
}
@@ -178,18 +165,6 @@ impl MaybeString {
Self::Binary(_) => {}
}
}
/// Returns `true` if using `encoding` to encode this `MaybeString`
/// would result into an invalid encoded body.
fn is_encoding_ok(&self, encoding: ContentTransferEncoding) -> bool {
match encoding {
ContentTransferEncoding::SevenBit => is_7bit_encoded(self),
ContentTransferEncoding::EightBit => is_8bit_encoded(self),
ContentTransferEncoding::Binary
| ContentTransferEncoding::QuotedPrintable
| ContentTransferEncoding::Base64 => true,
}
}
}
/// A trait for something that takes an encoded [`Body`].
@@ -273,73 +248,6 @@ impl Deref for MaybeString {
}
}
/// Checks whether it contains only US-ASCII characters,
/// and no lines are longer than 1000 characters including the `\n` character.
///
/// Most efficient content encoding available
fn is_7bit_encoded(buf: &[u8]) -> bool {
buf.is_ascii() && !contains_too_long_lines(buf)
}
/// Checks that no lines are longer than 1000 characters,
/// including the `\n` character.
/// NOTE: 8bit isn't supported by all SMTP servers.
fn is_8bit_encoded(buf: &[u8]) -> bool {
!contains_too_long_lines(buf)
}
/// Checks if there are lines that are longer than 1000 characters,
/// including the `\n` character.
fn contains_too_long_lines(buf: &[u8]) -> bool {
buf.len() > 1000 && buf.split(|&b| b == b'\n').any(|line| line.len() > 999)
}
const LINE_SEPARATOR: &[u8] = b"\r\n";
const LINE_MAX_LENGTH: usize = 78 - LINE_SEPARATOR.len();
/// A `Write`r that inserts a line separator `\r\n` every `max_line_length` bytes.
struct LineWrappingWriter<'a, W> {
writer: &'a mut W,
current_line_length: usize,
max_line_length: usize,
}
impl<'a, W> LineWrappingWriter<'a, W> {
pub fn new(writer: &'a mut W, max_line_length: usize) -> Self {
Self {
writer,
current_line_length: 0,
max_line_length,
}
}
}
impl<'a, W> Write for LineWrappingWriter<'a, W>
where
W: Write,
{
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let remaining_line_len = self.max_line_length - self.current_line_length;
let write_len = std::cmp::min(buf.len(), remaining_line_len);
self.writer.write_all(&buf[..write_len])?;
if remaining_line_len == write_len {
self.writer.write_all(LINE_SEPARATOR)?;
self.current_line_length = 0;
} else {
self.current_line_length += write_len;
}
Ok(write_len)
}
fn flush(&mut self) -> io::Result<()> {
self.writer.flush()
}
}
/// In place conversion to CRLF line endings
fn in_place_crlf_line_endings(string: &mut String) {
let indices = find_all_lf_char_indices(string);
@@ -377,6 +285,8 @@ fn find_all_lf_char_indices(s: &str) -> Vec<usize> {
#[cfg(test)]
mod test {
use pretty_assertions::assert_eq;
use super::{in_place_crlf_line_endings, Body, ContentTransferEncoding};
#[test]
@@ -509,13 +419,10 @@ mod test {
#[test]
fn quoted_printable_detect() {
let encoded = Body::new(String::from("Привет, мир!"));
let encoded = Body::new(String::from("Questo messaggio è corto"));
assert_eq!(encoded.encoding(), ContentTransferEncoding::QuotedPrintable);
assert_eq!(
encoded.as_ref(),
b"=D0=9F=D1=80=D0=B8=D0=B2=D0=B5=D1=82, =D0=BC=D0=B8=D1=80!".as_ref()
);
assert_eq!(encoded.as_ref(), b"Questo messaggio =C3=A8 corto");
}
#[test]
@@ -547,14 +454,17 @@ mod test {
#[test]
fn quoted_printable_encode_line_wrap() {
let encoded = Body::new(String::from("Текст письма в уникоде"));
let encoded = Body::new(String::from(
"Se lo standard 📬 fosse stato più semplice avremmo finito molto prima.",
));
assert_eq!(encoded.encoding(), ContentTransferEncoding::QuotedPrintable);
println!("{}", std::str::from_utf8(encoded.as_ref()).unwrap());
assert_eq!(
encoded.as_ref(),
concat!(
"=D0=A2=D0=B5=D0=BA=D1=81=D1=82 =D0=BF=D0=B8=D1=81=D1=8C=D0=BC=D0=B0 =D0=B2 =\r\n",
"=D1=83=D0=BD=D0=B8=D0=BA=D0=BE=D0=B4=D0=B5"
"Se lo standard =F0=9F=93=AC fosse stato pi=C3=B9 semplice avremmo finito mo=\r\n",
"lto prima."
)
.as_bytes()
);
@@ -562,21 +472,25 @@ mod test {
#[test]
fn base64_detect() {
let input = Body::new(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
let input = Body::new(vec![0; 80]);
let encoding = input.encoding();
assert_eq!(encoding, ContentTransferEncoding::Base64);
}
#[test]
fn base64_encode_bytes() {
let encoded = Body::new_with_encoding(
vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
ContentTransferEncoding::Base64,
)
.unwrap();
let encoded =
Body::new_with_encoding(vec![0; 80], ContentTransferEncoding::Base64).unwrap();
assert_eq!(encoded.encoding(), ContentTransferEncoding::Base64);
assert_eq!(encoded.as_ref(), b"AAECAwQFBgcICQ==");
assert_eq!(
encoded.as_ref(),
concat!(
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\r\n",
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
)
.as_bytes()
);
}
#[test]

View File

@@ -1,14 +1,14 @@
use std::{
borrow::Cow,
error::Error as StdError,
fmt::{self, Display, Write},
fmt::{self, Display},
iter::IntoIterator,
time::SystemTime,
};
use ed25519_dalek::Signer;
use once_cell::sync::Lazy;
use regex::{bytes::Regex as BRegex, Regex};
use regex::bytes::Regex;
use rsa::{pkcs1::DecodeRsaPrivateKey, Hash, PaddingScheme, RsaPrivateKey};
use sha2::{Digest, Sha256};
@@ -115,12 +115,12 @@ enum InnerDkimSigningKey {
impl DkimSigningKey {
pub fn new(
private_key: String,
private_key: &str,
algorithm: DkimSigningAlgorithm,
) -> Result<DkimSigningKey, DkimSigningKeyError> {
Ok(Self(match algorithm {
DkimSigningAlgorithm::Rsa => InnerDkimSigningKey::Rsa(
RsaPrivateKey::from_pkcs1_pem(&private_key)
RsaPrivateKey::from_pkcs1_pem(private_key)
.map_err(|err| DkimSigningKeyError(InnerDkimSigningKeyError::Rsa(err)))?,
),
DkimSigningAlgorithm::Ed25519 => {
@@ -222,9 +222,9 @@ fn dkim_canonicalize_body(
body: &[u8],
canonicalization: DkimCanonicalizationType,
) -> Cow<'_, [u8]> {
static RE: Lazy<BRegex> = Lazy::new(|| BRegex::new("(\r\n)+$").unwrap());
static RE_DOUBLE_SPACE: Lazy<BRegex> = Lazy::new(|| BRegex::new("[\\t ]+").unwrap());
static RE_SPACE_EOL: Lazy<BRegex> = Lazy::new(|| BRegex::new("[\t ]\r\n").unwrap());
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("(\r\n)+$").unwrap());
static RE_DOUBLE_SPACE: Lazy<Regex> = Lazy::new(|| Regex::new("[\\t ]+").unwrap());
static RE_SPACE_EOL: Lazy<Regex> = Lazy::new(|| Regex::new("[\t ]\r\n").unwrap());
match canonicalization {
DkimCanonicalizationType::Simple => RE.replace(body, &b"\r\n"[..]),
DkimCanonicalizationType::Relaxed => {
@@ -241,23 +241,59 @@ fn dkim_canonicalize_body(
}
}
/// Canonicalize the value of an header
fn dkim_canonicalize_header_value(
value: &str,
canonicalization: DkimCanonicalizationType,
) -> Cow<'_, str> {
match canonicalization {
DkimCanonicalizationType::Simple => Cow::Borrowed(value),
DkimCanonicalizationType::Relaxed => {
static RE_EOL: Lazy<Regex> = Lazy::new(|| Regex::new("\r\n").unwrap());
static RE_SPACES: Lazy<Regex> = Lazy::new(|| Regex::new("[\\t ]+").unwrap());
let value = RE_EOL.replace_all(value, "");
Cow::Owned(format!(
"{}\r\n",
RE_SPACES.replace_all(&value, " ").trim_end()
))
fn dkim_canonicalize_headers_relaxed(headers: &str) -> String {
let mut r = String::with_capacity(headers.len());
fn skip_whitespace(h: &str) -> &str {
match h.as_bytes().get(0) {
Some(b' ' | b'\t') => skip_whitespace(&h[1..]),
_ => h,
}
}
fn name(h: &str, out: &mut String) {
if let Some(name_end) = h.bytes().position(|c| c == b':') {
let (name, rest) = h.split_at(name_end + 1);
*out += name;
// Space after header colon is stripped.
value(skip_whitespace(rest), out);
} else {
// This should never happen.
*out += h;
}
}
fn value(h: &str, out: &mut String) {
match h.as_bytes() {
// Continuation lines.
[b'\r', b'\n', b' ' | b'\t', ..] => {
out.push(' ');
value(skip_whitespace(&h[2..]), out);
}
// End of header.
[b'\r', b'\n', ..] => {
*out += "\r\n";
name(&h[2..], out)
}
// Sequential whitespace.
[b' ' | b'\t', b' ' | b'\t' | b'\r', ..] => value(&h[1..], out),
// All whitespace becomes spaces.
[b'\t', ..] => {
out.push(' ');
value(&h[1..], out)
}
[_, ..] => {
let mut chars = h.chars();
out.push(chars.next().unwrap());
value(chars.as_str(), out)
}
[] => {}
}
}
name(headers, &mut r);
r
}
/// Canonicalize header tag
@@ -277,47 +313,35 @@ fn dkim_canonicalize_headers<'a>(
mail_headers: &Headers,
canonicalization: DkimCanonicalizationType,
) -> String {
let mut covered_headers = Headers::new();
for name in headers_list {
if let Some(h) = mail_headers.find_header(name) {
let name = dkim_canonicalize_header_tag(name, canonicalization);
covered_headers.insert_raw(HeaderValue::dangerous_new_pre_encoded(
HeaderName::new_from_ascii(name.into()).unwrap(),
h.get_raw().into(),
h.get_encoded().into(),
));
}
}
let serialized = covered_headers.to_string();
match canonicalization {
DkimCanonicalizationType::Simple => {
let mut signed_headers = Headers::new();
for h in headers_list {
let h = dkim_canonicalize_header_tag(h, canonicalization);
if let Some(value) = mail_headers.get_raw(&h) {
signed_headers.insert_raw(HeaderValue::new(
HeaderName::new_from_ascii(h.into()).unwrap(),
dkim_canonicalize_header_value(value, canonicalization).to_string(),
))
}
}
signed_headers.to_string()
}
DkimCanonicalizationType::Relaxed => {
let mut signed_headers = String::new();
for h in headers_list {
let h = dkim_canonicalize_header_tag(h, canonicalization);
if let Some(value) = mail_headers.get_raw(&h) {
write!(
signed_headers,
"{}:{}",
h,
dkim_canonicalize_header_value(value, canonicalization)
)
.expect("write implementation returned an error")
}
}
signed_headers
}
DkimCanonicalizationType::Simple => serialized,
DkimCanonicalizationType::Relaxed => dkim_canonicalize_headers_relaxed(&serialized),
}
}
/// Sign with Dkim a message by adding Dkim-Signture header created with configuration expressed by
/// dkim_config
pub(super) fn dkim_sign(message: &mut Message, dkim_config: &DkimConfig) {
let timestamp = SystemTime::now()
pub fn dkim_sign(message: &mut Message, dkim_config: &DkimConfig) {
dkim_sign_fixed_time(message, dkim_config, SystemTime::now())
}
fn dkim_sign_fixed_time(message: &mut Message, dkim_config: &DkimConfig, timestamp: SystemTime) {
let timestamp = timestamp
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
@@ -385,21 +409,46 @@ pub(super) fn dkim_sign(message: &mut Message, dkim_config: &DkimConfig) {
#[cfg(test)]
mod test {
use std::{
io::Write,
process::{Command, Stdio},
};
use pretty_assertions::assert_eq;
use super::{
super::{
header::{HeaderName, HeaderValue},
Header, Message,
},
dkim_canonicalize_body, dkim_canonicalize_header_value, dkim_canonicalize_headers,
dkim_canonicalize_headers, dkim_sign_fixed_time, DkimCanonicalization,
DkimCanonicalizationType, DkimConfig, DkimSigningAlgorithm, DkimSigningKey,
};
use crate::StdError;
const KEY_RSA: &str = "-----BEGIN RSA PRIVATE KEY-----
MIIEowIBAAKCAQEAwOsW7UFcWn1ch3UM8Mll5qZH5hVHKJQ8Z0tUlebUECq0vjw6
VcsIucZ/B70VpCN63whyi7oApdCIS1o0zad7f0UaW/BfxXADqdcFL36uMaG0RHer
uSASjQGnsl9Kozt/dXiDZX5ngjr/arLJhNZSNR4/9VSwqbE2OPXaSaQ9BsqneD0P
8dCVSfkkDZCcfC2864z7hvC01lFzWQKF36ZAoGBERHScHtFMAzUOgGuqqPiP5khw
DQB3Ffccf+BsWLU2OOteshUwTGjpoangbPCYj6kckwNm440lQwuqTinpC92yyIE5
Ol8psNMW49DLowAeZb6JrjLhD+wY9bghTaOkcwIDAQABAoIBAHTZ8LkkrdvhsvoZ
XA088AwVC9fBa6iYoT2v0zw45JomQ/Q2Zt8wa8ibAradQU56byJI65jWwS2ucd+y
c+ldWOBt6tllb50XjCCDrRBnmvtVBuux0MIBOztNlVXlgj/8+ecdZ/lB51Bqi+sF
ACsF5iVmfTcMZTVjsYQu5llUseI6Lwgqpx6ktaXD2PVsVo9Gf01ssZ4GCy69wB/3
20CsOz4LEpSYkq1oE98lMMGCfD7py3L9kWHYNNisam78GM+1ynRxRGwEDUbz6pxs
fGPIAwHLaZsOmibPkBB0PJTW742w86qQ8KAqC6ZbRYOF19rSMj3oTfRnPMHn9Uu5
N8eQcoECgYEA97SMUrz2hqII5i8igKylO9kV8pjcIWKI0rdt8MKj4FXTNYjjO9I+
41ONOjhUOpFci/G3YRKi8UiwbKxIRTvIxNMh2xj6Ws3iO9gQHK1j8xTWxJdjEBEz
EuZI59Mi5H7fxSL1W+n8nS8JVsaH93rvQErngqTUAsihAzjxHWdFwm0CgYEAx2Dh
claESJP2cOKgYp+SUNwc26qMaqnl1f37Yn+AflrQOfgQqJe5TRbicEC+nFlm6XUt
3st1Nj29H0uOMmMZDmDCO+cOs5Qv5A9pG6jSC6wM+2KNHQDtrxlakBFygePEPVVy
GXaY9DRa9Q4/4ataxDR2/VvIAWfEEtMTJIBDtl8CgYAIXEuwLziS6r0qJ8UeWrVp
A7a97XLgnZbIpfBMBAXL+JmcYPZqenos6hEGOgh9wZJCFvJ9kEd3pWBvCpGV5KKu
IgIuhvVMQ06zfmNs1F1fQwDMud9aF3qF1Mf5KyMuWynqWXe2lns0QvYpu6GzNK8G
mICf5DhTr7nfhfh9aZLtMQKBgCxKsmqzG5n//MxhHB4sstVxwJtwDNeZPKzISnM8
PfBT/lQSbqj1Y73japRjXbTgC4Ore3A2JKjTGFN+dm1tJGDUT/H8x4BPWEBCyCfT
3i2noA6sewrJbQPsDvlYVubSEYNKmxlbBmmhw98StlBMv9I8kX6BSDI/uggwid0e
/WvjAoGBAKpZ0UOKQyrl9reBiUfrpRCvIMakBMd79kNiH+5y0Soq/wCAnAuABayj
XEIBhFv+HxeLEnT7YV+Zzqp5L9kKw/EU4ik3JX/XsEihdSxEuGX00ZYOw05FEfpW
cJ5Ku0OTwRtSMaseRPX+T4EfG1Caa/eunPPN4rh+CSup2BVVarOT
-----END RSA PRIVATE KEY-----";
#[derive(Clone)]
struct TestHeader(String);
@@ -417,47 +466,11 @@ mod test {
}
}
#[test]
fn test_body_simple_canonicalize() {
let body = b"test\r\n\r\ntest \ttest\r\n\r\n\r\n";
let expected: &[u8] = b"test\r\n\r\ntest \ttest\r\n";
assert_eq!(
dkim_canonicalize_body(body, DkimCanonicalizationType::Simple),
expected
)
}
#[test]
fn test_body_relaxed_canonicalize() {
let body = b"test\r\n\r\ntest \ttest\r\n\r\n\r\n";
let expected: &[u8] = b"test\r\n\r\ntest test\r\n";
assert_eq!(
dkim_canonicalize_body(body, DkimCanonicalizationType::Relaxed),
expected
)
}
#[test]
fn test_header_simple_canonicalize() {
let value = "test\r\n\r\ntest \ttest\r\n";
let expected = "test\r\n\r\ntest \ttest\r\n";
assert_eq!(
dkim_canonicalize_header_value(value, DkimCanonicalizationType::Simple),
expected
)
}
#[test]
fn test_header_relaxed_canonicalize() {
let value = "test\r\n\r\ntest \ttest\r\n";
let expected = "testtest test\r\n";
assert_eq!(
dkim_canonicalize_header_value(value, DkimCanonicalizationType::Relaxed),
expected
)
}
fn test_message() -> Message {
Message::builder()
.from("Test <test+ezrz@example.net>".parse().unwrap())
.from("Test O'Leary <test+ezrz@example.net>".parse().unwrap())
.to("Test2 <test2@example.org>".parse().unwrap())
.date(std::time::UNIX_EPOCH)
.header(TestHeader("test test very very long with spaces and extra spaces \twill be folded to several lines ".to_string()))
.subject("Test with utf-8 ë")
.body("test\r\n\r\ntest \ttest\r\n\r\n\r\n".to_string()).unwrap()
@@ -466,63 +479,112 @@ mod test {
#[test]
fn test_headers_simple_canonicalize() {
let message = test_message();
assert_eq!(dkim_canonicalize_headers(["From", "Test"], &message.headers, DkimCanonicalizationType::Simple),"From: Test <test+ezrz@example.net>\r\nTest: test test very very long with spaces and extra spaces \twill be \r\n folded to several lines \r\n")
dbg!(message.headers.to_string());
assert_eq!(dkim_canonicalize_headers(["From", "Test"], &message.headers, DkimCanonicalizationType::Simple), "From: =?utf-8?b?VGVzdCBPJ0xlYXJ5?= <test+ezrz@example.net>\r\nTest: test test very very long with spaces and extra spaces \twill be\r\n folded to several lines \r\n")
}
#[test]
fn test_headers_relaxed_canonicalize() {
let message = test_message();
assert_eq!(dkim_canonicalize_headers(["From", "Test"], &message.headers, DkimCanonicalizationType::Relaxed),"from:Test <test+ezrz@example.net>\r\ntest:test test very very long with spaces and extra spaces will be folded to several lines\r\n")
dbg!(message.headers.to_string());
assert_eq!(dkim_canonicalize_headers(["From", "Test"], &message.headers, DkimCanonicalizationType::Relaxed),"from:=?utf-8?b?VGVzdCBPJ0xlYXJ5?= <test+ezrz@example.net>\r\ntest:test test very very long with spaces and extra spaces will be folded to several lines\r\n")
}
#[test]
fn test_signature_rsa() {
fn test_signature_rsa_simple() {
let mut message = test_message();
let key = "-----BEGIN RSA PRIVATE KEY-----
MIIEpAIBAAKCAQEAz+FHbM8BwkBBz/Ux5OYLQ5Bp1HVuCHTP6Rr3HXTnome/2cGl
/ze0tsmmFbCjjsS89MXbMGs9xJhjv18LmL1N0UTllblOizzVjorQyN4RwBOfG34j
7SS56pwzrA738Ry8FAbL5InPWEgVzbOhXuTCs8yuzcqTnm4sH/csnIl7cMWeQkVn
1FR9LKMtUG0fjhDPkdX0jx3qTX1L3Z7a7gX6geY191yNd9i9DvE2/+wMigMYz1LA
ts4alk2g86MQhtbjc8AOR7EC15hSw37/lmamlunYLa3wC+PzHNMA8sAfnmkgNvip
ssjh8LnelD9qn+VtsjQB5ppkeQx3TcUPvz5z+QIDAQABAoIBAQCzRa5ZEbSMlumq
s+PRaOox3CrIRHUd6c8bUlvmFVllX1++JRhInvvD3ubSMcD7cIMb/D1o5jMgheMP
uKHBmQ+w91+e3W30+gOZp/EiKRDZupIuHXxSGKgUwZx2N3pvfr5b7viLIKWllpTn
DpCNy251rIDbjGX97Tk0X+8jGBVSTCxtruGJR5a+hz4t9Z7bz7JjZWcRNJC+VA+Q
ATjnV7AHO1WR+0tAdPJaHsRLI7drKFSqTYq0As+MksZ40p7T6blZW8NUXA09fJRn
3mP2TZdWjjfBXZje026v4T7TZl+TELKw5WirL/UJ8Zw8dGGV6EZvbfMacZuUB1YQ
0vZnGe4BAoGBAO63xWP3OV8oLAMF90umuusPaQNSc6DnpjnP+sTAcXEYJA0Sa4YD
y8dpTAdFJ4YvUQhLxtbZFK5Ih3x7ZhuerLSJiZiDPC2IJJb7j/812zQQriOi4mQ8
bimxM4Nzql8FKGaXMppE5grFLsy8tw7neIM9KE4uwe9ajwJrRrOTUY8ZAoGBAN7t
+xFeuhg4F9expyaPpCvKT2YNAdMcDzpm7GtLX292u+DQgBfg50Ur9XmbS+RPlx1W
r2Sw3bTjRjJU9QnSZLL2w3hiii/wdaePI4SCaydHdLi4ZGz/pNUsUY+ck2pLptS0
F7rL+s9MV9lUyhvX+pIh+O3idMWAdaymzs7ZlgfhAoGAVoFn2Wrscmw3Tr0puVNp
JudFsbt+RU/Mr+SLRiNKuKX74nTLXBwiC1hAAd5wjTK2VaBIJPEzilikKFr7TIT6
ps20e/0KoKFWSRROQTh9/+cPg8Bx88rmTNt3BGq00Ywn8M1XvAm9pyd/Zxf36kG9
LSnLYlGVW6xgaIsBau+2vXkCgYAeChVdxtTutIhJ8U9ju9FUcUN3reMEDnDi3sGW
x6ZJf8dbSN0p2o1vXbgLNejpD+x98JNbzxVg7Ysk9xu5whb9opC+ZRDX2uAPvxL7
JRPJTDCnP3mQ0nXkn78xydh3Z1BIsyfLbPcT/eaMi4dcbyL9lARWEcDIaEHzDNsr
NlioIQKBgQCXIZp5IBfG5WSXzFk8xvP4BUwHKEI5bttClBmm32K+vaSz8qO6ak6G
4frg+WVopFg3HBHdK9aotzPEd0eHMXJv3C06Ynt2lvF+Rgi/kwGbkuq/mFVnmYYR
Fz0TZ6sKrTAF3fdkN3bcQv6JG1CfnWENDGtekemwcCEA9v46/RsOfg==
-----END RSA PRIVATE KEY-----";
let signing_key = DkimSigningKey::new(key.to_string(), DkimSigningAlgorithm::Rsa).unwrap();
message.sign(&DkimConfig::default_config(
"dkimtest".to_string(),
"example.org".to_string(),
signing_key,
));
println!("{}", std::str::from_utf8(&message.formatted()).unwrap());
let mut verify_command = Command::new("dkimverify")
.stdin(Stdio::piped())
.spawn()
.expect("Fail to verify message signature");
let mut stdin = verify_command.stdin.take().expect("Failed to open stdin");
std::thread::spawn(move || {
stdin
.write_all(&message.formatted())
.expect("Failed to write to stdin");
});
assert!(verify_command
.wait()
.expect("Command did not run")
.success());
let signing_key = DkimSigningKey::new(KEY_RSA, DkimSigningAlgorithm::Rsa).unwrap();
dkim_sign_fixed_time(
&mut message,
&DkimConfig::new(
"dkimtest".to_string(),
"example.org".to_string(),
signing_key,
vec![
HeaderName::new_from_ascii_str("Date"),
HeaderName::new_from_ascii_str("From"),
HeaderName::new_from_ascii_str("Subject"),
HeaderName::new_from_ascii_str("To"),
],
DkimCanonicalization {
header: DkimCanonicalizationType::Simple,
body: DkimCanonicalizationType::Simple,
},
),
std::time::UNIX_EPOCH,
);
let signed = message.formatted();
let signed = std::str::from_utf8(&signed).unwrap();
assert_eq!(
signed,
std::concat!(
"From: =?utf-8?b?VGVzdCBPJ0xlYXJ5?= <test+ezrz@example.net>\r\n",
"To: Test2 <test2@example.org>\r\n",
"Date: Thu, 01 Jan 1970 00:00:00 +0000\r\n",
"Test: test test very very long with spaces and extra spaces \twill be\r\n",
" folded to several lines \r\n",
"Subject: Test with utf-8 =?utf-8?b?w6s=?=\r\n",
"Content-Transfer-Encoding: 7bit\r\n",
"DKIM-Signature: v=1; a=rsa-sha256; d=example.org; s=dkimtest;\r\n",
" c=simple/simple; q=dns/txt; t=0; h=Date:From:Subject:To;\r\n",
" bh=f3Zksdcjqa/xRBwdyFzIXWCcgP7XTgxjCgYsXOMKQl4=;\r\n",
" b=NhoIMMAALoSgu5lKAR0+MUQunOWnU7wpF9ORUFtpxq9sGZDo9AX43AMhFemyM5W204jpFwMU6pm7AMR1nOYBdSYye4yUALtvT2nqbJBwSh7JeYu+z22t1RFKp7qQR1il8aSrkbZuNMFHYuSEwW76QtKwcNqP4bQOzS9CzgQp0ABu8qwYPBr/EypykPTfqjtyN+ywrfdqjjGOzTpRGolH0hc3CrAETNjjHbNBgKgucXmXTN7hMRdzqWjeFPxizXwouwNAavFClPG0l33gXVArFWn+CkgA84G/s4zuJiF7QPZR87Pu4pw/vIlSXxH4a42W3tT19v9iBTH7X7ldYegtmQ==\r\n",
"\r\n",
"test\r\n",
"\r\n",
"test \ttest\r\n",
"\r\n",
"\r\n",
)
);
}
#[test]
fn test_signature_rsa_relaxed() {
let mut message = test_message();
let signing_key = DkimSigningKey::new(KEY_RSA, DkimSigningAlgorithm::Rsa).unwrap();
dkim_sign_fixed_time(
&mut message,
&DkimConfig::new(
"dkimtest".to_string(),
"example.org".to_string(),
signing_key,
vec![
HeaderName::new_from_ascii_str("Date"),
HeaderName::new_from_ascii_str("From"),
HeaderName::new_from_ascii_str("Subject"),
HeaderName::new_from_ascii_str("To"),
],
DkimCanonicalization {
header: DkimCanonicalizationType::Relaxed,
body: DkimCanonicalizationType::Relaxed,
},
),
std::time::UNIX_EPOCH,
);
let signed = message.formatted();
let signed = std::str::from_utf8(&signed).unwrap();
println!("{}", signed);
assert_eq!(
signed,
std::concat!(
"From: =?utf-8?b?VGVzdCBPJ0xlYXJ5?= <test+ezrz@example.net>\r\n",
"To: Test2 <test2@example.org>\r\n",
"Date: Thu, 01 Jan 1970 00:00:00 +0000\r\n",
"Test: test test very very long with spaces and extra spaces \twill be\r\n",
" folded to several lines \r\n","Subject: Test with utf-8 =?utf-8?b?w6s=?=\r\n",
"Content-Transfer-Encoding: 7bit\r\n",
"DKIM-Signature: v=1; a=rsa-sha256; d=example.org; s=dkimtest;\r\n",
" c=relaxed/relaxed; q=dns/txt; t=0; h=date:from:subject:to;\r\n",
" bh=qN8je6qJgWFGSnN2MycC/XKPbN6BOrMJyAX2h4m19Ss=;\r\n",
" b=YaVfmH8dbGEywoLJ4uhbvYqDyQG1UGKFH3PE7zXGgk+YFxUgkwWjoA3aQupDNQtfTjfUsNe0dnrjyZP+ylnESpZBpbCIf5/n3FEh6j3RQthqNbQblcfH/U8mazTuRbVjYBbTZQDaQCMPTz+8D+ZQfXo2oq6dGzTuGvmuYft0CVsq/BIp/EkhZHqiphDeVJSHD4iKW8+L2XwEWThoY92xOYc1G0TtBwz2UJgtiHX2YulH/kRBHeK3dKn9RTNVL3VZ+9ZrnFwIhET9TPGtU2I+q0EMSWF9H9bTrASMgW/U+E0VM2btqJlrTU6rQ7wlQeHdwecLnzXcyhCUInF1+veMNw==\r\n",
"\r\n",
"test\r\n",
"\r\n",
"test \ttest\r\n",
"\r\n",
"\r\n",
)
);
}
}

View File

@@ -75,6 +75,8 @@ impl Default for ContentTransferEncoding {
#[cfg(test)]
mod test {
use pretty_assertions::assert_eq;
use super::ContentTransferEncoding;
use crate::message::header::{HeaderName, HeaderValue, Headers};

View File

@@ -77,6 +77,8 @@ impl Header for ContentDisposition {
#[cfg(test)]
mod test {
use pretty_assertions::assert_eq;
use super::ContentDisposition;
use crate::message::header::{HeaderName, HeaderValue, Headers};

View File

@@ -149,6 +149,8 @@ mod serde {
#[cfg(test)]
mod test {
use pretty_assertions::assert_eq;
use super::ContentType;
use crate::message::header::{HeaderName, HeaderValue, Headers};

View File

@@ -32,11 +32,11 @@ impl Header for Date {
fn parse(s: &str) -> Result<Self, BoxError> {
let mut s = String::from(s);
if s.ends_with(" -0000") {
if s.ends_with("+0000") {
// The httpdate crate expects the `Date` to end in ` GMT`, but email
// uses `-0000`, so we crudely fix this issue here.
// uses `+0000` to indicate UTC, so we crudely fix this issue here.
s.truncate(s.len() - "-0000".len());
s.truncate(s.len() - "+0000".len());
s.push_str("GMT");
}
@@ -49,9 +49,9 @@ impl Header for Date {
// The httpdate crate always appends ` GMT` to the end of the string,
// but this is considered an obsolete date format for email
// https://tools.ietf.org/html/rfc2822#appendix-A.6.2,
// so we replace `GMT` with `-0000`
// so we replace `GMT` with `+0000`
val.truncate(val.len() - "GMT".len());
val.push_str("-0000");
val.push_str("+0000");
}
HeaderValue::dangerous_new_pre_encoded(Self::name(), val.clone(), val)
@@ -74,6 +74,8 @@ impl From<Date> for SystemTime {
mod test {
use std::time::{Duration, SystemTime};
use pretty_assertions::assert_eq;
use super::Date;
use crate::message::header::{HeaderName, HeaderValue, Headers};
@@ -88,7 +90,7 @@ mod test {
assert_eq!(
headers.to_string(),
"Date: Tue, 15 Nov 1994 08:12:31 -0000\r\n".to_string()
"Date: Tue, 15 Nov 1994 08:12:31 +0000\r\n".to_string()
);
// Tue, 15 Nov 1994 08:12:32 GMT
@@ -98,7 +100,7 @@ mod test {
assert_eq!(
headers.to_string(),
"Date: Tue, 15 Nov 1994 08:12:32 -0000\r\n"
"Date: Tue, 15 Nov 1994 08:12:32 +0000\r\n"
);
}
@@ -108,7 +110,7 @@ mod test {
headers.insert_raw(HeaderValue::new(
HeaderName::new_from_ascii_str("Date"),
"Tue, 15 Nov 1994 08:12:31 -0000".to_string(),
"Tue, 15 Nov 1994 08:12:31 +0000".to_string(),
));
assert_eq!(
@@ -120,7 +122,7 @@ mod test {
headers.insert_raw(HeaderValue::new(
HeaderName::new_from_ascii_str("Date"),
"Tue, 15 Nov 1994 08:12:32 -0000".to_string(),
"Tue, 15 Nov 1994 08:12:32 +0000".to_string(),
));
assert_eq!(

View File

@@ -172,6 +172,8 @@ mailboxes_header! {
#[cfg(test)]
mod test {
use pretty_assertions::assert_eq;
use super::{From, Mailbox, Mailboxes};
use crate::message::header::{HeaderName, HeaderValue, Headers};
@@ -296,4 +298,31 @@ mod test {
assert_eq!(headers.get::<From>(), Some(From(from.into())));
}
#[test]
fn parse_multi_with_name_containing_comma() {
let from: Vec<Mailbox> = vec![
"Test, test <1@example.com>".parse().unwrap(),
"Test2, test2 <2@example.com>".parse().unwrap(),
];
let mut headers = Headers::new();
headers.insert_raw(HeaderValue::new(
HeaderName::new_from_ascii_str("From"),
"Test, test <1@example.com>, Test2, test2 <2@example.com>".to_string(),
));
assert_eq!(headers.get::<From>(), Some(From(from.into())));
}
#[test]
fn parse_multi_with_name_containing_comma_last_broken() {
let mut headers = Headers::new();
headers.insert_raw(HeaderValue::new(
HeaderName::new_from_ascii_str("From"),
"Test, test <1@example.com>, Test2, test2".to_string(),
));
assert_eq!(headers.get::<From>(), None);
}
}

View File

@@ -3,10 +3,12 @@
use std::{
borrow::Cow,
error::Error,
fmt::{self, Display, Formatter},
fmt::{self, Display, Formatter, Write},
ops::Deref,
};
use email_encoding::headers::EmailWriter;
pub use self::{
content::*,
content_disposition::ContentDisposition,
@@ -121,7 +123,7 @@ impl Headers {
self.find_header_index(name).map(|i| self.headers.remove(i))
}
fn find_header(&self, name: &str) -> Option<&HeaderValue> {
pub(crate) fn find_header(&self, name: &str) -> Option<&HeaderValue> {
self.headers
.iter()
.find(|value| name.eq_ignore_ascii_case(&value.name))
@@ -305,57 +307,46 @@ impl HeaderValue {
encoded_value,
}
}
pub(crate) fn get_raw(&self) -> &str {
&self.raw_value
}
pub(crate) fn get_encoded(&self) -> &str {
&self.encoded_value
}
}
const ENCODING_START_PREFIX: &str = "=?utf-8?b?";
const ENCODING_END_SUFFIX: &str = "?=";
const MAX_LINE_LEN: usize = 76;
/// [RFC 1522](https://tools.ietf.org/html/rfc1522) header value encoder
struct HeaderValueEncoder {
line_len: usize,
struct HeaderValueEncoder<'a> {
writer: EmailWriter<'a>,
encode_buf: String,
}
impl HeaderValueEncoder {
fn encode(name: &str, value: &str, f: &mut impl fmt::Write) -> fmt::Result {
let (words_iter, encoder) = Self::new(name, value);
encoder.format(words_iter, f)
impl<'a> HeaderValueEncoder<'a> {
fn encode(name: &str, value: &'a str, f: &'a mut impl fmt::Write) -> fmt::Result {
let (words_iter, encoder) = Self::new(name, value, f);
encoder.format(words_iter)
}
fn new<'a>(name: &str, value: &'a str) -> (WordsPlusFillIterator<'a>, Self) {
fn new(
name: &str,
value: &'a str,
writer: &'a mut dyn Write,
) -> (WordsPlusFillIterator<'a>, Self) {
let line_len = name.len() + ": ".len();
let writer = EmailWriter::new(writer, line_len, false);
(
WordsPlusFillIterator { s: value },
Self {
line_len: name.len() + ": ".len(),
writer,
encode_buf: String::new(),
},
)
}
fn format(
mut self,
words_iter: WordsPlusFillIterator<'_>,
f: &mut impl fmt::Write,
) -> fmt::Result {
/// Estimate if an encoded string of `len` would fix in an empty line
fn would_fit_new_line(len: usize) -> bool {
len < (MAX_LINE_LEN - " ".len())
}
/// Estimate how long a string of `len` would be after base64 encoding plus
/// adding the encoding prefix and suffix to it
fn base64_len(len: usize) -> usize {
ENCODING_START_PREFIX.len() + (len * 4 / 3 + 4) + ENCODING_END_SUFFIX.len()
}
/// Estimate how many more bytes we can fit in the current line
fn available_len_to_max_encode_len(len: usize) -> usize {
len.saturating_sub(
ENCODING_START_PREFIX.len() + (len * 3 / 4 + 4) + ENCODING_END_SUFFIX.len(),
)
}
fn format(mut self, words_iter: WordsPlusFillIterator<'_>) -> fmt::Result {
for next_word in words_iter {
let allowed = allowed_str(next_word);
@@ -363,161 +354,64 @@ impl HeaderValueEncoder {
// This word only contains allowed characters
// the next word is allowed, but we may have accumulated some words to encode
self.flush_encode_buf(f, true)?;
self.flush_encode_buf()?;
if next_word.len() > self.remaining_line_len() {
// not enough space left on this line to encode word
if self.something_written_to_this_line() && would_fit_new_line(next_word.len())
{
// word doesn't fit this line, but something had already been written to it,
// and word would fit the next line, so go to a new line
// so go to new line
self.new_line(f)?;
} else {
// word neither fits this line and the next one, cut it
// in the middle and make it fit
let mut next_word = next_word;
while !next_word.is_empty() {
if self.remaining_line_len() == 0 {
self.new_line(f)?;
}
let len = self.remaining_line_len().min(next_word.len());
let first_part = &next_word[..len];
next_word = &next_word[len..];
f.write_str(first_part)?;
self.line_len += first_part.len();
}
continue;
}
}
// word fits, write it!
f.write_str(next_word)?;
self.line_len += next_word.len();
self.writer.folding().write_str(next_word)?;
} else {
// This word contains unallowed characters
if self.remaining_line_len() >= base64_len(self.encode_buf.len() + next_word.len())
{
// next_word fits
self.encode_buf.push_str(next_word);
continue;
}
// next_word doesn't fit this line
if would_fit_new_line(base64_len(next_word.len())) {
// ...but it would fit the next one
self.flush_encode_buf(f, false)?;
self.new_line(f)?;
self.encode_buf.push_str(next_word);
continue;
}
// ...and also wouldn't fit the next one.
// chop it up into pieces
let mut next_word = next_word;
while !next_word.is_empty() {
let mut len = available_len_to_max_encode_len(self.remaining_line_len())
.min(next_word.len());
if len == 0 {
self.flush_encode_buf(f, false)?;
self.new_line(f)?;
}
// avoid slicing on a char boundary
while !next_word.is_char_boundary(len) {
len += 1;
}
let first_part = &next_word[..len];
next_word = &next_word[len..];
self.encode_buf.push_str(first_part);
}
self.encode_buf.push_str(next_word);
}
}
self.flush_encode_buf(f, false)?;
self.flush_encode_buf()?;
Ok(())
}
/// Returns the number of bytes left for the current line
fn remaining_line_len(&self) -> usize {
MAX_LINE_LEN - self.line_len
}
/// Returns true if something has been written to the current line
fn something_written_to_this_line(&self) -> bool {
self.line_len > 1
}
fn flush_encode_buf(
&mut self,
f: &mut impl fmt::Write,
switching_to_allowed: bool,
) -> fmt::Result {
fn flush_encode_buf(&mut self) -> fmt::Result {
if self.encode_buf.is_empty() {
// nothing to encode
return Ok(());
}
let mut write_after = None;
// It is important that we don't encode leading whitespace otherwise it breaks wrapping.
let first_not_allowed = self
.encode_buf
.bytes()
.enumerate()
.find(|(_i, c)| !allowed_char(*c))
.map(|(i, _)| i);
// May as well also write the tail in plain text.
let last_not_allowed = self
.encode_buf
.bytes()
.enumerate()
.rev()
.find(|(_i, c)| !allowed_char(*c))
.map(|(i, _)| i + 1);
if switching_to_allowed {
// If the next word only contains allowed characters, and the string to encode
// ends with a space, take the space out of the part to encode
let (prefix, to_encode, suffix) = match first_not_allowed {
Some(first_not_allowed) => {
let last_not_allowed = last_not_allowed.unwrap();
let last_char = self.encode_buf.pop().expect("self.encode_buf isn't empty");
if is_space_like(last_char) {
write_after = Some(last_char);
} else {
self.encode_buf.push(last_char);
let (remaining, suffix) = self.encode_buf.split_at(last_not_allowed);
let (prefix, to_encode) = remaining.split_at(first_not_allowed);
(prefix, to_encode, suffix)
}
}
None => ("", self.encode_buf.as_str(), ""),
};
f.write_str(ENCODING_START_PREFIX)?;
let encoded = base64::display::Base64Display::with_config(
self.encode_buf.as_bytes(),
base64::STANDARD,
);
write!(f, "{}", encoded)?;
f.write_str(ENCODING_END_SUFFIX)?;
self.line_len += ENCODING_START_PREFIX.len();
self.line_len += self.encode_buf.len() * 4 / 3 + 4;
self.line_len += ENCODING_END_SUFFIX.len();
if let Some(write_after) = write_after {
f.write_char(write_after)?;
self.line_len += 1;
}
self.writer.folding().write_str(prefix)?;
email_encoding::headers::rfc2047::encode(to_encode, &mut self.writer)?;
self.writer.folding().write_str(suffix)?;
self.encode_buf.clear();
Ok(())
}
fn new_line(&mut self, f: &mut impl fmt::Write) -> fmt::Result {
f.write_str("\r\n ")?;
self.line_len = 1;
Ok(())
}
}
/// Iterator yielding a string split space by space, but including all space
/// characters between it and the next word
/// Iterator yielding a string split by space, but spaces are included before the next word.
struct WordsPlusFillIterator<'a> {
s: &'a str,
}
@@ -532,35 +426,31 @@ impl<'a> Iterator for WordsPlusFillIterator<'a> {
let next_word = self
.s
.char_indices()
.bytes()
.enumerate()
.skip(1)
.skip_while(|&(_i, c)| !is_space_like(c))
.find(|&(_i, c)| !is_space_like(c))
.map(|(i, _)| i);
.find(|&(_i, c)| c == b' ')
.map(|(i, _)| i)
.unwrap_or(self.s.len());
let word = &self.s[..next_word.unwrap_or(self.s.len())];
let word = &self.s[..next_word];
self.s = &self.s[word.len()..];
Some(word)
}
}
const fn is_space_like(c: char) -> bool {
c == ',' || c == ' '
}
fn allowed_str(s: &str) -> bool {
s.chars().all(allowed_char)
s.bytes().all(allowed_char)
}
const fn allowed_char(c: char) -> bool {
c >= 1 as char && c <= 9 as char
|| c == 11 as char
|| c == 12 as char
|| c >= 14 as char && c <= 127 as char
const fn allowed_char(c: u8) -> bool {
c >= 1 && c <= 9 || c == 11 || c == 12 || c >= 14 && c <= 127
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use super::{HeaderName, HeaderValue, Headers};
#[test]
@@ -644,8 +534,8 @@ mod tests {
assert_eq!(
headers.to_string(),
concat!(
"To: Ascii <example@example.com>, John Doe <johndoe@example.com, John Smith \r\n",
" <johnsmith@example.com>, Pinco Pallino <pincopallino@example.com>, Jemand \r\n",
"To: Ascii <example@example.com>, John Doe <johndoe@example.com, John Smith\r\n",
" <johnsmith@example.com>, Pinco Pallino <pincopallino@example.com>, Jemand\r\n",
" <jemand@example.com>, Jean Dupont <jean@example.com>\r\n"
)
);
@@ -662,8 +552,8 @@ mod tests {
assert_eq!(
headers.to_string(),
concat!(
"Subject: Hello! This is lettre, and this \r\n ",
"IsAVeryLongLineDoYouKnowWhatsGoingToHappenIGuessWeAreGoingToFindOut. Ok I \r\n",
"Subject: Hello! This is lettre, and this\r\n",
" IsAVeryLongLineDoYouKnowWhatsGoingToHappenIGuessWeAreGoingToFindOut. Ok I\r\n",
" guess that's it!\r\n"
)
);
@@ -681,8 +571,9 @@ mod tests {
assert_eq!(
headers.to_string(),
concat!(
"Subject: Hello! IGuessTheLastLineWasntLongEnoughSoLetsTryAgainShallWeWhatDoY\r\n",
" ouThinkItsGoingToHappenIGuessWereAboutToFindOut! I don't know\r\n",
"Subject: Hello!\r\n",
" IGuessTheLastLineWasntLongEnoughSoLetsTryAgainShallWeWhatDoYouThinkItsGoingToHappenIGuessWereAboutToFindOut!\r\n",
" I don't know\r\n",
)
);
}
@@ -697,11 +588,7 @@ mod tests {
assert_eq!(
headers.to_string(),
concat!(
"Subject: 1abcdefghijklmnopqrstuvwxyz2abcdefghijklmnopqrstuvwxyz3abcdefghijkl\r\n",
" mnopqrstuvwxyz4abcdefghijklmnopqrstuvwxyz5abcdefghijklmnopqrstuvwxyz6abcdef\r\n",
" ghijklmnopqrstuvwxyz\r\n",
)
"Subject: 1abcdefghijklmnopqrstuvwxyz2abcdefghijklmnopqrstuvwxyz3abcdefghijklmnopqrstuvwxyz4abcdefghijklmnopqrstuvwxyz5abcdefghijklmnopqrstuvwxyz6abcdefghijklmnopqrstuvwxyz\r\n",
);
}
@@ -715,7 +602,7 @@ mod tests {
assert_eq!(
headers.to_string(),
"To: =?utf-8?b?U2XDoW4=?= <sean@example.com>\r\n"
"To: Se=?utf-8?b?w6E=?=n <sean@example.com>\r\n"
);
}
@@ -744,11 +631,11 @@ mod tests {
assert_eq!(
headers.to_string(),
concat!(
"To: =?utf-8?b?8J+MjQ==?= <world@example.com>, =?utf-8?b?8J+mhg==?= \r\n",
" Everywhere <ducks@example.com>, =?utf-8?b?0JjQstCw0L3QvtCyIA==?=\r\n",
" =?utf-8?b?0JjQstCw0L0g0JjQstCw0L3QvtCy0LjRhw==?= <ivanov@example.com>, \r\n",
" =?utf-8?b?SsSBbmlzIELEk3J6acWGxaE=?= <janis@example.com>, \r\n",
" =?utf-8?b?U2XDoW4gw5MgUnVkYcOt?= <sean@example.com>\r\n"
"To: =?utf-8?b?8J+MjQ==?= <world@example.com>, =?utf-8?b?8J+mhg==?= Everywhere\r\n",
" <ducks@example.com>, =?utf-8?b?0JjQstCw0L3QvtCyINCY0LLQsNC9INCY0LLQsNC9?=\r\n",
" =?utf-8?b?0L7QstC40Yc=?= <ivanov@example.com>, J=?utf-8?b?xIFuaXMgQsST?=\r\n",
" =?utf-8?b?cnppxYbFoQ==?= <janis@example.com>, Se=?utf-8?b?w6FuIMOTIFJ1?=\r\n",
" =?utf-8?b?ZGHDrQ==?= <sean@example.com>\r\n",
)
);
}
@@ -764,7 +651,14 @@ mod tests {
assert_eq!(
headers.to_string(),
"Subject: =?utf-8?b?8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz?=\r\n"
concat!(
"Subject: =?utf-8?b?8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz?=\r\n",
" =?utf-8?b?8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbM=?=\r\n",
" =?utf-8?b?8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbM=?=\r\n",
" =?utf-8?b?8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbM=?=\r\n",
" =?utf-8?b?8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+ls/CfpbM=?=\r\n",
" =?utf-8?b?8J+ls/CfpbPwn6Wz8J+ls/CfpbPwn6Wz8J+lsw==?=\r\n"
)
);
}
@@ -809,14 +703,14 @@ mod tests {
assert_eq!(
headers.to_string(),
concat!(
"Subject: Hello! This is lettre, and this \r\n",
" IsAVeryLongLineDoYouKnowWhatsGoingToHappenIGuessWeAreGoingToFindOut. Ok I \r\n",
"Subject: Hello! This is lettre, and this\r\n",
" IsAVeryLongLineDoYouKnowWhatsGoingToHappenIGuessWeAreGoingToFindOut. Ok I\r\n",
" guess that's it!\r\n",
"To: =?utf-8?b?8J+MjQ==?= <world@example.com>, =?utf-8?b?8J+mhg==?= \r\n",
" Everywhere <ducks@example.com>, =?utf-8?b?0JjQstCw0L3QvtCyIA==?=\r\n",
" =?utf-8?b?0JjQstCw0L0g0JjQstCw0L3QvtCy0LjRhw==?= <ivanov@example.com>, \r\n",
" =?utf-8?b?SsSBbmlzIELEk3J6acWGxaE=?= <janis@example.com>, \r\n",
" =?utf-8?b?U2XDoW4gw5MgUnVkYcOt?= <sean@example.com>\r\n",
"To: =?utf-8?b?8J+MjQ==?= <world@example.com>, =?utf-8?b?8J+mhg==?= Everywhere\r\n",
" <ducks@example.com>, =?utf-8?b?0JjQstCw0L3QvtCyINCY0LLQsNC9INCY0LLQsNC9?=\r\n",
" =?utf-8?b?0L7QstC40Yc=?= <ivanov@example.com>, J=?utf-8?b?xIFuaXMgQsST?=\r\n",
" =?utf-8?b?cnppxYbFoQ==?= <janis@example.com>, Se=?utf-8?b?w6FuIMOTIFJ1?=\r\n",
" =?utf-8?b?ZGHDrQ==?= <sean@example.com>\r\n",
"From: Someone <somewhere@example.com>\r\n",
"Content-Transfer-Encoding: quoted-printable\r\n",
)
@@ -834,8 +728,8 @@ mod tests {
assert_eq!(
headers.to_string(),
concat!(
"Subject: =?utf-8?b?77yL5Luu5ZCN?= :a;go; \r\n",
" =?utf-8?b?Ozs7OztzOzs7Ozs7Ozs7Ozs7Ozs7O2ZmZmVpbm1qZ2dnZ2dnZ2dn772G44Gj?=\r\n"
"Subject: =?utf-8?b?77yL5Luu5ZCN?= :a;go;\r\n",
" ;;;;;s;;;;;;;;;;;;;;;;fffeinmjggggggggg=?utf-8?b?772G44Gj?=\r\n"
)
);
}

View File

@@ -64,6 +64,8 @@ impl Default for MimeVersion {
#[cfg(test)]
mod test {
use pretty_assertions::assert_eq;
use super::{MimeVersion, MIME_VERSION_1_0};
use crate::message::header::{HeaderName, HeaderValue, Headers};

View File

@@ -85,6 +85,8 @@ text_header! {
#[cfg(test)]
mod test {
use pretty_assertions::assert_eq;
use super::Subject;
use crate::message::header::{HeaderName, HeaderValue, Headers};

View File

@@ -154,6 +154,7 @@ impl<'de> Deserialize<'de> for Mailboxes {
#[cfg(test)]
mod test {
use pretty_assertions::assert_eq;
use serde_json::from_str;
use super::*;

View File

@@ -352,11 +352,37 @@ impl Display for Mailboxes {
impl FromStr for Mailboxes {
type Err = AddressError;
fn from_str(src: &str) -> Result<Self, Self::Err> {
src.split(',')
.map(|m| m.trim().parse())
.collect::<Result<Vec<_>, _>>()
.map(Mailboxes)
fn from_str(mut src: &str) -> Result<Self, Self::Err> {
let mut mailboxes = Vec::new();
if !src.is_empty() {
// n-1 elements
let mut skip = 0;
while let Some(i) = src[skip..].find(',') {
let left = &src[..skip + i];
match left.trim().parse() {
Ok(mailbox) => {
mailboxes.push(mailbox);
src = &src[left.len() + ",".len()..];
skip = 0;
}
Err(AddressError::MissingParts) => {
skip = left.len() + ",".len();
}
Err(err) => {
return Err(err);
}
}
}
// last element
let mailbox = src.trim().parse()?;
mailboxes.push(mailbox);
}
Ok(Mailboxes(mailboxes))
}
}
@@ -445,6 +471,8 @@ fn write_quoted_string_char(f: &mut Formatter<'_>, c: u8) -> FmtResult {
mod test {
use std::convert::TryInto;
use pretty_assertions::assert_eq;
use super::Mailbox;
#[test]

View File

@@ -398,6 +398,8 @@ impl EmailFormat for MultiPart {
#[cfg(test)]
mod test {
use pretty_assertions::assert_eq;
use super::*;
use crate::message::header;
@@ -477,7 +479,7 @@ mod test {
assert_eq!(
String::from_utf8(part.formatted()).unwrap(),
concat!(
"Content-Type: multipart/mixed; \r\n",
"Content-Type: multipart/mixed;\r\n",
" boundary=\"0oVZ2r6AoLAhLlb0gPNSKy6BEqdS2IfwxrcbUuo1\"\r\n",
"\r\n",
"--0oVZ2r6AoLAhLlb0gPNSKy6BEqdS2IfwxrcbUuo1\r\n",
@@ -524,8 +526,8 @@ mod test {
assert_eq!(
String::from_utf8(part.formatted()).unwrap(),
concat!(
"Content-Type: multipart/encrypted; \r\n",
" boundary=\"0oVZ2r6AoLAhLlb0gPNSKy6BEqdS2IfwxrcbUuo1\"; \r\n",
"Content-Type: multipart/encrypted;\r\n",
" boundary=\"0oVZ2r6AoLAhLlb0gPNSKy6BEqdS2IfwxrcbUuo1\";\r\n",
" protocol=\"application/pgp-encrypted\"\r\n",
"\r\n",
"--0oVZ2r6AoLAhLlb0gPNSKy6BEqdS2IfwxrcbUuo1\r\n",
@@ -580,8 +582,8 @@ mod test {
assert_eq!(
String::from_utf8(part.formatted()).unwrap(),
concat!(
"Content-Type: multipart/signed; \r\n",
" boundary=\"0oVZ2r6AoLAhLlb0gPNSKy6BEqdS2IfwxrcbUuo1\"; \r\n",
"Content-Type: multipart/signed;\r\n",
" boundary=\"0oVZ2r6AoLAhLlb0gPNSKy6BEqdS2IfwxrcbUuo1\";\r\n",
" protocol=\"application/pgp-signature\";",
" micalg=\"pgp-sha256\"\r\n",
"\r\n",
@@ -622,7 +624,7 @@ mod test {
.body(String::from("<p>Текст <em>письма</em> в <a href=\"https://ru.wikipedia.org/wiki/Юникод\">уникоде</a><p>")));
assert_eq!(String::from_utf8(part.formatted()).unwrap(),
concat!("Content-Type: multipart/alternative; \r\n",
concat!("Content-Type: multipart/alternative;\r\n",
" boundary=\"0oVZ2r6AoLAhLlb0gPNSKy6BEqdS2IfwxrcbUuo1\"\r\n",
"\r\n",
"--0oVZ2r6AoLAhLlb0gPNSKy6BEqdS2IfwxrcbUuo1\r\n",
@@ -660,11 +662,11 @@ mod test {
.body(String::from("int main() { return 0; }")));
assert_eq!(String::from_utf8(part.formatted()).unwrap(),
concat!("Content-Type: multipart/mixed; \r\n",
concat!("Content-Type: multipart/mixed;\r\n",
" boundary=\"0oVZ2r6AoLAhLlb0gPNSKy6BEqdS2IfwxrcbUuo1\"\r\n",
"\r\n",
"--0oVZ2r6AoLAhLlb0gPNSKy6BEqdS2IfwxrcbUuo1\r\n",
"Content-Type: multipart/related; \r\n",
"Content-Type: multipart/related;\r\n",
" boundary=\"0oVZ2r6AoLAhLlb0gPNSKy6BEqdS2IfwxrcbUuo1\"\r\n",
"\r\n",
"--0oVZ2r6AoLAhLlb0gPNSKy6BEqdS2IfwxrcbUuo1\r\n",

View File

@@ -550,7 +550,7 @@ impl Message {
/// JcaBbL6ZSBIMA3AdaIjtvNRiomueHqh0GspTgOeCE2585TSFnw6vEOJ8RlR4A0Mw
/// I45fbR4l+3D/30WMfZlM6bzZbwPXEnr2s1mirmuQpjumY9wLhK25
/// -----END RSA PRIVATE KEY-----";
/// let signing_key = DkimSigningKey::new(key.to_string(), DkimSigningAlgorithm::Rsa).unwrap();
/// let signing_key = DkimSigningKey::new(key, DkimSigningAlgorithm::Rsa).unwrap();
/// message.sign(&DkimConfig::default_config(
/// "dkimtest".to_string(),
/// "example.org".to_string(),
@@ -598,6 +598,8 @@ fn make_message_id() -> String {
mod test {
use std::time::{Duration, SystemTime};
use pretty_assertions::assert_eq;
use super::{header, mailbox::Mailbox, make_message_id, Message, MultiPart, SinglePart};
#[test]
@@ -650,10 +652,10 @@ mod test {
assert_eq!(
String::from_utf8(email.formatted()).unwrap(),
concat!(
"Date: Tue, 15 Nov 1994 08:12:31 -0000\r\n",
"Date: Tue, 15 Nov 1994 08:12:31 +0000\r\n",
"From: =?utf-8?b?0JrQsNC4?= <kayo@example.com>\r\n",
"To: \"Pony O.P.\" <pony@domain.tld>\r\n",
"Subject: =?utf-8?b?0Y/So9CwINC10Lsg0LHQtdC705nQvSE=?=\r\n",
"Subject: =?utf-8?b?0Y/So9CwINC10Lsg0LHQtdC705nQvQ==?=!\r\n",
"Content-Transfer-Encoding: 7bit\r\n",
"\r\n",
"Happy new year!"

View File

@@ -1,4 +1,4 @@
use std::{fmt::Display, time::Duration};
use std::{fmt::Display, net::IpAddr, time::Duration};
use futures_util::io::{AsyncBufReadExt, AsyncWriteExt, BufReader};
@@ -54,8 +54,11 @@ impl AsyncSmtpConnection {
timeout: Option<Duration>,
hello_name: &ClientId,
tls_parameters: Option<TlsParameters>,
local_address: Option<IpAddr>,
) -> Result<AsyncSmtpConnection, Error> {
let stream = AsyncNetworkStream::connect_tokio1(server, timeout, tls_parameters).await?;
let stream =
AsyncNetworkStream::connect_tokio1(server, timeout, tls_parameters, local_address)
.await?;
Self::connect_impl(stream, hello_name).await
}

View File

@@ -1,6 +1,6 @@
use std::{
io, mem,
net::SocketAddr,
net::{IpAddr, SocketAddr},
pin::Pin,
task::{Context, Poll},
time::Duration,
@@ -19,7 +19,10 @@ use futures_rustls::client::TlsStream as AsyncStd1RustlsTlsStream;
#[cfg(feature = "tokio1")]
use tokio1_crate::io::{AsyncRead as _, AsyncWrite as _, ReadBuf as Tokio1ReadBuf};
#[cfg(feature = "tokio1")]
use tokio1_crate::net::{TcpStream as Tokio1TcpStream, ToSocketAddrs as Tokio1ToSocketAddrs};
use tokio1_crate::net::{
TcpSocket as Tokio1TcpSocket, TcpStream as Tokio1TcpStream,
ToSocketAddrs as Tokio1ToSocketAddrs,
};
#[cfg(feature = "tokio1-native-tls")]
use tokio1_native_tls_crate::TlsStream as Tokio1TlsStream;
#[cfg(feature = "tokio1-rustls-tls")]
@@ -33,6 +36,8 @@ use tokio1_rustls::client::TlsStream as Tokio1RustlsTlsStream;
))]
use super::InnerTlsParameters;
use super::TlsParameters;
#[cfg(feature = "tokio1")]
use crate::transport::smtp::client::net::resolved_address_filter;
use crate::transport::smtp::{error, Error};
/// A network stream
@@ -109,44 +114,59 @@ impl AsyncNetworkStream {
server: T,
timeout: Option<Duration>,
tls_parameters: Option<TlsParameters>,
local_addr: Option<IpAddr>,
) -> Result<AsyncNetworkStream, Error> {
async fn try_connect_timeout<T: Tokio1ToSocketAddrs>(
async fn try_connect<T: Tokio1ToSocketAddrs>(
server: T,
timeout: Duration,
timeout: Option<Duration>,
local_addr: Option<IpAddr>,
) -> Result<Tokio1TcpStream, Error> {
let addrs = tokio1_crate::net::lookup_host(server)
.await
.map_err(error::connection)?;
.map_err(error::connection)?
.filter(|resolved_addr| resolved_address_filter(resolved_addr, local_addr));
let mut last_err = None;
for addr in addrs {
let connect_future = Tokio1TcpStream::connect(&addr);
match tokio1_crate::time::timeout(timeout, connect_future).await {
Ok(Ok(stream)) => return Ok(stream),
Ok(Err(err)) => last_err = Some(err),
Err(_) => {
last_err = Some(io::Error::new(
io::ErrorKind::TimedOut,
"connection timed out",
))
let socket = match addr.ip() {
IpAddr::V4(_) => Tokio1TcpSocket::new_v4(),
IpAddr::V6(_) => Tokio1TcpSocket::new_v6(),
}
.map_err(error::connection)?;
if let Some(local_addr) = local_addr {
socket
.bind(SocketAddr::new(local_addr, 0))
.map_err(error::connection)?;
}
let connect_future = socket.connect(addr);
if let Some(timeout) = timeout {
match tokio1_crate::time::timeout(timeout, connect_future).await {
Ok(Ok(stream)) => return Ok(stream),
Ok(Err(err)) => last_err = Some(err),
Err(_) => {
last_err = Some(io::Error::new(
io::ErrorKind::TimedOut,
"connection timed out",
))
}
}
} else {
match connect_future.await {
Ok(stream) => return Ok(stream),
Err(err) => last_err = Some(err),
}
}
}
Err(match last_err {
Some(last_err) => error::connection(last_err),
None => error::connection("could not resolve to any address"),
None => error::connection("could not resolve to any supported address"),
})
}
let tcp_stream = match timeout {
Some(t) => try_connect_timeout(server, t).await?,
None => Tokio1TcpStream::connect(server)
.await
.map_err(error::connection)?,
};
let tcp_stream = try_connect(server, timeout, local_addr).await?;
let mut stream = AsyncNetworkStream::new(InnerAsyncNetworkStream::Tokio1Tcp(tcp_stream));
if let Some(tls_parameters) = tls_parameters {
stream.upgrade_tls(tls_parameters).await?;
@@ -160,6 +180,9 @@ impl AsyncNetworkStream {
timeout: Option<Duration>,
tls_parameters: Option<TlsParameters>,
) -> Result<AsyncNetworkStream, Error> {
// Unfortunately there doesn't currently seem to be a way to set the local address
// Whilst we can create a AsyncStd1TcpStream from an existing socket, it needs to first have
// connected which is a blocking operation.
async fn try_connect_timeout<T: AsyncStd1ToSocketAddrs>(
server: T,
timeout: Duration,

View File

@@ -1,7 +1,7 @@
use std::{
fmt::Display,
io::{self, BufRead, BufReader, Write},
net::ToSocketAddrs,
net::{IpAddr, ToSocketAddrs},
time::Duration,
};
@@ -58,8 +58,9 @@ impl SmtpConnection {
timeout: Option<Duration>,
hello_name: &ClientId,
tls_parameters: Option<&TlsParameters>,
local_address: Option<IpAddr>,
) -> Result<SmtpConnection, Error> {
let stream = NetworkStream::connect(server, timeout, tls_parameters)?;
let stream = NetworkStream::connect(server, timeout, tls_parameters, local_address)?;
let stream = BufReader::new(stream);
let mut conn = SmtpConnection {
stream,

View File

@@ -12,7 +12,7 @@
//! };
//!
//! let hello = ClientId::Domain("my_hostname".to_string());
//! let mut client = SmtpConnection::connect(&("localhost", SMTP_PORT), None, &hello, None)?;
//! let mut client = SmtpConnection::connect(&("localhost", SMTP_PORT), None, &hello, None, None)?;
//! client.command(Mail::new(Some("user@example.com".parse()?), vec![]))?;
//! client.command(Rcpt::new("user@example.org".parse()?, vec![]))?;
//! client.command(Data)?;

View File

@@ -1,7 +1,7 @@
use std::{
io::{self, Read, Write},
mem,
net::{Ipv4Addr, Shutdown, SocketAddr, SocketAddrV4, TcpStream, ToSocketAddrs},
net::{IpAddr, Ipv4Addr, Shutdown, SocketAddr, SocketAddrV4, TcpStream, ToSocketAddrs},
time::Duration,
};
@@ -9,6 +9,7 @@ use std::{
use native_tls::TlsStream;
#[cfg(feature = "rustls-tls")]
use rustls::{ClientConnection, ServerName, StreamOwned};
use socket2::{Domain, Protocol, Type};
#[cfg(any(feature = "native-tls", feature = "rustls-tls"))]
use super::InnerTlsParameters;
@@ -83,19 +84,39 @@ impl NetworkStream {
server: T,
timeout: Option<Duration>,
tls_parameters: Option<&TlsParameters>,
local_addr: Option<IpAddr>,
) -> Result<NetworkStream, Error> {
fn try_connect_timeout<T: ToSocketAddrs>(
fn try_connect<T: ToSocketAddrs>(
server: T,
timeout: Duration,
timeout: Option<Duration>,
local_addr: Option<IpAddr>,
) -> Result<TcpStream, Error> {
let addrs = server.to_socket_addrs().map_err(error::connection)?;
let addrs = server
.to_socket_addrs()
.map_err(error::connection)?
.filter(|resolved_addr| resolved_address_filter(resolved_addr, local_addr));
let mut last_err = None;
for addr in addrs {
match TcpStream::connect_timeout(&addr, timeout) {
Ok(stream) => return Ok(stream),
Err(err) => last_err = Some(err),
let socket = socket2::Socket::new(
Domain::for_address(addr),
Type::STREAM,
Some(Protocol::TCP),
)
.map_err(error::connection)?;
bind_local_address(&socket, &addr, local_addr)?;
if let Some(timeout) = timeout {
match socket.connect_timeout(&addr.into(), timeout) {
Ok(_) => return Ok(socket.into()),
Err(err) => last_err = Some(err),
}
} else {
match socket.connect(&addr.into()) {
Ok(_) => return Ok(socket.into()),
Err(err) => last_err = Some(err),
}
}
}
@@ -105,11 +126,7 @@ impl NetworkStream {
})
}
let tcp_stream = match timeout {
Some(t) => try_connect_timeout(server, t)?,
None => TcpStream::connect(server).map_err(error::connection)?,
};
let tcp_stream = try_connect(server, timeout, local_addr)?;
let mut stream = NetworkStream::new(InnerNetworkStream::Tcp(tcp_stream));
if let Some(tls_parameters) = tls_parameters {
stream.upgrade_tls(tls_parameters)?;
@@ -289,3 +306,47 @@ impl Write for NetworkStream {
}
}
}
/// If the local address is set, binds the socket to this address.
/// If local address is not set, then destination address is required to determine to the default
/// local address on some platforms.
/// See: https://github.com/hyperium/hyper/blob/faf24c6ad8eee1c3d5ccc9a4d4835717b8e2903f/src/client/connect/http.rs#L560
fn bind_local_address(
socket: &socket2::Socket,
dst_addr: &SocketAddr,
local_addr: Option<IpAddr>,
) -> Result<(), Error> {
match local_addr {
Some(local_addr) => {
socket
.bind(&SocketAddr::new(local_addr, 0).into())
.map_err(error::connection)?;
}
_ => {
if cfg!(windows) {
// Windows requires a socket be bound before calling connect
let any: SocketAddr = match dst_addr {
SocketAddr::V4(_) => ([0, 0, 0, 0], 0).into(),
SocketAddr::V6(_) => ([0, 0, 0, 0, 0, 0, 0, 0], 0).into(),
};
socket.bind(&any.into()).map_err(error::connection)?;
}
}
}
Ok(())
}
/// When we have an iterator of resolved remote addresses, we must filter them to be the same
/// protocol as the local address binding. If no local address is set, then all will be matched.
pub(crate) fn resolved_address_filter(
resolved_addr: &SocketAddr,
local_addr: Option<IpAddr>,
) -> bool {
match local_addr {
Some(local_addr) => match resolved_addr.ip() {
IpAddr::V4(_) => local_addr.is_ipv4(),
IpAddr::V6(_) => local_addr.is_ipv6(),
},
None => true,
}
}

View File

@@ -221,6 +221,7 @@ impl SmtpClient {
self.info.timeout,
&self.info.hello_name,
tls_parameters,
None,
)?;
#[cfg(any(feature = "native-tls", feature = "rustls-tls"))]

View File

@@ -1,4 +1,4 @@
Date: Tue, 15 Nov 1994 08:12:31 -0000
Date: Tue, 15 Nov 1994 08:12:31 +0000
From: NoBody <nobody@domain.tld>
Reply-To: Yuin <yuin@domain.tld>
To: Hei <hei@domain.tld>

View File

@@ -43,7 +43,7 @@ mod sync {
"Reply-To: Yuin <yuin@domain.tld>\r\n",
"To: Hei <hei@domain.tld>\r\n",
"Subject: Happy new year\r\n",
"Date: Tue, 15 Nov 1994 08:12:31 -0000\r\n",
"Date: Tue, 15 Nov 1994 08:12:31 +0000\r\n",
"Content-Transfer-Encoding: 7bit\r\n",
"\r\n",
"Be happy!"
@@ -81,7 +81,7 @@ mod sync {
"Reply-To: Yuin <yuin@domain.tld>\r\n",
"To: Hei <hei@domain.tld>\r\n",
"Subject: Happy new year\r\n",
"Date: Tue, 15 Nov 1994 08:12:31 -0000\r\n",
"Date: Tue, 15 Nov 1994 08:12:31 +0000\r\n",
"Content-Transfer-Encoding: 7bit\r\n",
"\r\n",
"Be happy!"
@@ -141,7 +141,7 @@ mod tokio_1 {
"Reply-To: Yuin <yuin@domain.tld>\r\n",
"To: Hei <hei@domain.tld>\r\n",
"Subject: Happy new year\r\n",
"Date: Tue, 15 Nov 1994 08:12:31 -0000\r\n",
"Date: Tue, 15 Nov 1994 08:12:31 +0000\r\n",
"Content-Transfer-Encoding: 7bit\r\n",
"\r\n",
"Be happy!"
@@ -192,7 +192,7 @@ mod asyncstd_1 {
"Reply-To: Yuin <yuin@domain.tld>\r\n",
"To: Hei <hei@domain.tld>\r\n",
"Subject: Happy new year\r\n",
"Date: Tue, 15 Nov 1994 08:12:31 -0000\r\n",
"Date: Tue, 15 Nov 1994 08:12:31 +0000\r\n",
"Content-Transfer-Encoding: 7bit\r\n",
"\r\n",
"Be happy!"