Skip to content

Commit df613e5

Browse files
committed
Merge #424: bitreq: Remove urlencoding dependence
0357727 Update lock files (Jamil Lambert, PhD) de77534 Remove urlencoding feature (Jamil Lambert, PhD) 332b299 Remove urlencoding dependence (Jamil Lambert, PhD) 0f07b6b Move std feature gate to individual funcions (Jamil Lambert, PhD) 77cf501 Refactor percent encoding (Jamil Lambert, PhD) bde21be Split out char percent encoding (Jamil Lambert, PhD) bd8f133 Run the formatter on master (Jamil Lambert, PhD) Pull request description: `bitreq` has a `http_url` module that has encoding functionality but `request` depends on `urlencoding` to do the encoding. `http_url` can be expanded to do all of the encoding and then the dependence on `urlencoding` is not needed anymore. Addresses one of the points in #402. - Format master - Split out the character encoding from the `HttpUrl::parse` function into its own function. - Refactor the character encoding function. - Move the `std` feature gate off the `http_url` module and onto the individual items in it so that the character encoding function can be accessed in a no-std build. - Replace the use of `urlencoding` in request with the character encoding in `http_url` by adding a string parser that calls the existing char parser. Remove the `urlencoding` dependence. - Remove the `urlencoding` feature. This was done separately to the previous patch in case it should instead be left in. - Update the lock files. ACKs for top commit: tcharding: ACK 0357727 Tree-SHA512: 0312990439dbdeb80bf5e75bfc0b06dd07f6e08288e632488eccb3e98e2aa53ad2e787a0955dbca244811e4bcba9d921d4661478c7d6228de4382f8a58c6a024
2 parents 0f0dec1 + 0357727 commit df613e5

File tree

7 files changed

+76
-112
lines changed

7 files changed

+76
-112
lines changed

Cargo-minimal.lock

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,6 @@ dependencies = [
170170
"tiny_http",
171171
"tokio",
172172
"tokio-rustls",
173-
"urlencoding",
174173
"webpki-roots",
175174
]
176175

@@ -850,12 +849,6 @@ version = "0.9.0"
850849
source = "registry+https://github.com/rust-lang/crates.io-index"
851850
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
852851

853-
[[package]]
854-
name = "urlencoding"
855-
version = "2.1.3"
856-
source = "registry+https://github.com/rust-lang/crates.io-index"
857-
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
858-
859852
[[package]]
860853
name = "wasi"
861854
version = "0.11.0+wasi-snapshot-preview1"

Cargo-recent.lock

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,6 @@ dependencies = [
170170
"tiny_http",
171171
"tokio",
172172
"tokio-rustls",
173-
"urlencoding",
174173
"webpki-roots",
175174
]
176175

@@ -883,12 +882,6 @@ version = "0.9.0"
883882
source = "registry+https://github.com/rust-lang/crates.io-index"
884883
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
885884

886-
[[package]]
887-
name = "urlencoding"
888-
version = "2.1.3"
889-
source = "registry+https://github.com/rust-lang/crates.io-index"
890-
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
891-
892885
[[package]]
893886
name = "wasi"
894887
version = "0.11.0+wasi-snapshot-preview1"

bitreq/Cargo.toml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,6 @@ rust-version = "1.75.0"
1616
maintenance = { status = "experimental" }
1717

1818
[dependencies]
19-
# For the urlencoding feature:
20-
urlencoding = { version = "2.1.0", optional = true }
2119
# For the punycode feature:
2220
punycode = { version = "0.4.1", optional = true }
2321
# For the json-using-serde feature:

bitreq/src/http_url.rs

Lines changed: 40 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,18 @@
1+
#[cfg(feature = "std")]
12
use core::fmt::{self, Write};
23

4+
#[cfg(feature = "std")]
35
use crate::Error;
46

7+
#[cfg(feature = "std")]
58
#[derive(Clone, Copy, PartialEq)]
69
pub(crate) enum Port {
710
ImplicitHttp,
811
ImplicitHttps,
912
Explicit(u32),
1013
}
1114

15+
#[cfg(feature = "std")]
1216
impl Port {
1317
pub(crate) fn port(self) -> u32 {
1418
match self {
@@ -27,6 +31,7 @@ impl Port {
2731
/// ```text
2832
/// scheme "://" host [ ":" port ] path [ "?" query ] [ "#" fragment ]
2933
/// ```
34+
#[cfg(feature = "std")]
3035
#[derive(Clone, PartialEq)]
3136
pub(crate) struct HttpUrl {
3237
/// If scheme is "https", true, if "http", false.
@@ -41,6 +46,7 @@ pub(crate) struct HttpUrl {
4146
pub(crate) fragment: Option<String>,
4247
}
4348

49+
#[cfg(feature = "std")]
4450
impl HttpUrl {
4551
pub(crate) fn parse(url: &str, redirected_from: Option<&HttpUrl>) -> Result<HttpUrl, Error> {
4652
enum UrlParseStatus {
@@ -96,9 +102,6 @@ impl HttpUrl {
96102
path_and_query = Some(resource);
97103
resource = String::new();
98104
}
99-
#[cfg(not(feature = "urlencoding"))]
100-
UrlParseStatus::PathAndQuery | UrlParseStatus::Fragment => resource.push(c),
101-
#[cfg(feature = "urlencoding")]
102105
UrlParseStatus::PathAndQuery | UrlParseStatus::Fragment => match c {
103106
// All URL-'safe' characters, plus URL 'special
104107
// characters' like &, #, =, / ,?
@@ -116,25 +119,8 @@ impl HttpUrl {
116119
| '?' => {
117120
resource.push(c);
118121
}
119-
// There is probably a simpler way to do this, but this
120-
// method avoids any heap allocations (except extending
121-
// `resource`)
122-
_ => {
123-
// Any UTF-8 character can fit in 4 bytes
124-
let mut utf8_buf = [0u8; 4];
125-
// Bytes fill buffer from the front
126-
c.encode_utf8(&mut utf8_buf);
127-
// Slice disregards the unused portion of the buffer
128-
utf8_buf[..c.len_utf8()].iter().for_each(|byte| {
129-
// Convert byte to URL escape, e.g. %21 for b'!'
130-
let rem = *byte % 16;
131-
let right_char = to_hex_digit(rem);
132-
let left_char = to_hex_digit((*byte - rem) >> 4);
133-
resource.push('%');
134-
resource.push(left_char);
135-
resource.push(right_char);
136-
});
137-
}
122+
// Every other character gets percent-encoded.
123+
_ => percent_encode_char(c, &mut resource),
138124
},
139125
}
140126
}
@@ -191,12 +177,37 @@ impl HttpUrl {
191177
}
192178
}
193179

194-
// https://github.com/kornelski/rust_urlencoding/blob/a4df8027ab34a86a63f1be727965cf101556403f/src/enc.rs#L130-L136
195-
// Converts a UTF-8 byte to a single hexadecimal character
196-
#[cfg(feature = "urlencoding")]
197-
fn to_hex_digit(digit: u8) -> char {
198-
match digit {
199-
0..=9 => (b'0' + digit) as char,
200-
10..=255 => (b'A' - 10 + digit) as char,
180+
/// Returns the `%HH` triplet representing `byte` for percent encoding.
181+
fn percent_encoded_triplet(byte: u8) -> [char; 3] {
182+
const HEX: &[u8; 16] = b"0123456789ABCDEF";
183+
['%', HEX[(byte >> 4) as usize] as char, HEX[(byte & 0x0F) as usize] as char]
184+
}
185+
186+
/// Percent-encodes a char and appends it to `result`.
187+
/// Unreserved characters (0-9, A-Z, a-z, -, ., _, ~) are not encoded.
188+
pub(crate) fn percent_encode_char(c: char, result: &mut String) {
189+
match c {
190+
// All URL-'safe' characters are not encoded
191+
'0'..='9' | 'A'..='Z' | 'a'..='z' | '-' | '.' | '_' | '~' => {
192+
result.push(c);
193+
}
194+
_ => {
195+
// Any UTF-8 character can fit in 4 bytes
196+
let mut utf8_buf = [0u8; 4];
197+
c.encode_utf8(&mut utf8_buf).as_bytes().iter().for_each(|byte| {
198+
for ch in percent_encoded_triplet(*byte) {
199+
result.push(ch);
200+
}
201+
});
202+
}
203+
}
204+
}
205+
206+
/// Percent-encodes the entire input string and returns the encoded version.
207+
pub(crate) fn percent_encode_string(input: &str) -> String {
208+
let mut encoded = String::with_capacity(input.len());
209+
for ch in input.chars() {
210+
percent_encode_char(ch, &mut encoded);
201211
}
212+
encoded
202213
}

bitreq/src/lib.rs

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -72,12 +72,6 @@
7272
//!
7373
//! This feature enables HTTP proxy support. See [Proxy].
7474
//!
75-
//! ## `urlencoding`
76-
//!
77-
//! This feature enables percent-encoding for the URL resource when
78-
//! creating a request and any subsequently added parameters from
79-
//! [`Request::with_param`].
80-
//!
8175
//! # Examples
8276
//!
8377
//! ## Get
@@ -231,7 +225,6 @@ extern crate alloc;
231225
#[cfg(feature = "std")]
232226
mod connection;
233227
mod error;
234-
#[cfg(feature = "std")]
235228
mod http_url;
236229
#[cfg(feature = "proxy")]
237230
mod proxy;

bitreq/src/request.rs

Lines changed: 7 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ use core::fmt::Write;
77
use crate::connection::AsyncConnection;
88
#[cfg(feature = "std")]
99
use crate::connection::Connection;
10+
use crate::http_url::percent_encode_string;
1011
#[cfg(feature = "std")]
1112
use crate::http_url::{HttpUrl, Port};
1213
#[cfg(feature = "proxy")]
@@ -98,12 +99,8 @@ impl Request {
9899
/// This is only the request's data, it is not sent yet. For
99100
/// sending the request, see [`send`](struct.Request.html#method.send).
100101
///
101-
/// If `urlencoding` is not enabled, it is the responsibility of the
102-
/// user to ensure there are no illegal characters in the URL.
103-
///
104-
/// If `urlencoding` is enabled, the resource part of the URL will be
105-
/// encoded. Any URL special characters (e.g. &, #, =) are not encoded
106-
/// as they are assumed to be meaningful parameters etc.
102+
/// The resource part of the URL will be encoded. Any URL special characters (e.g. &, #, =) are
103+
/// not encoded as they are assumed to be meaningful parameters etc.
107104
pub fn new<T: Into<URL>>(method: Method, url: T) -> Request {
108105
Request {
109106
method,
@@ -151,18 +148,12 @@ impl Request {
151148
/// Adds given key and value as query parameter to request url
152149
/// (resource).
153150
///
154-
/// If `urlencoding` is not enabled, it is the responsibility
155-
/// of the user to ensure there are no illegal characters in the
156-
/// key or value.
157-
///
158-
/// If `urlencoding` is enabled, the key and value are both encoded.
151+
/// The key and value are both encoded.
159152
pub fn with_param<T: Into<String>, U: Into<String>>(mut self, key: T, value: U) -> Request {
160153
let key = key.into();
161-
#[cfg(feature = "urlencoding")]
162-
let key = urlencoding::encode(&key);
154+
let key = percent_encode_string(&key);
163155
let value = value.into();
164-
#[cfg(feature = "urlencoding")]
165-
let value = urlencoding::encode(&value);
156+
let value = percent_encode_string(&value);
166157

167158
if !self.params.is_empty() {
168159
self.params.push('&');
@@ -610,7 +601,7 @@ mod parsing_tests {
610601
}
611602
}
612603

613-
#[cfg(all(test, feature = "urlencoding"))]
604+
#[cfg(all(test, feature = "std"))]
614605
mod encoding_tests {
615606
use super::{get, ParsedRequest};
616607

verify/src/reexports.rs

Lines changed: 29 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -52,22 +52,19 @@ pub fn check_type_reexports(version: Version) -> Result<()> {
5252
};
5353

5454
for type_name in version_defs.keys() {
55-
let exported = export_map.values().any(|info| {
56-
info.source_version == version_name && type_name == &info.source_ident
57-
});
55+
let exported = export_map
56+
.values()
57+
.any(|info| info.source_version == version_name && type_name == &info.source_ident);
5858
if !exported {
59-
missing.push(format!(
60-
"{} defines {} but does not re-export it",
61-
version_name, type_name
62-
));
59+
missing
60+
.push(format!("{} defines {} but does not re-export it", version_name, type_name));
6361
}
6462
}
6563

6664
// Checks all auxiliary types are re-exported.
6765
for (exported_name, export) in &export_map {
68-
if let Some(deps) = definitions
69-
.get(&export.source_version)
70-
.and_then(|map| map.get(&export.source_ident))
66+
if let Some(deps) =
67+
definitions.get(&export.source_version).and_then(|map| map.get(&export.source_ident))
7168
{
7269
for dep in deps {
7370
if !export_map.contains_key(dep) {
@@ -108,10 +105,7 @@ fn collect_version_dirs(src_dir: &Path) -> Result<Vec<String>> {
108105
}
109106

110107
/// Parses all versioned source files and records every public struct/enum name.
111-
fn collect_type_files_and_names(
112-
src_dir: &Path,
113-
versions: &[String],
114-
) -> Result<ParsedTypeFiles> {
108+
fn collect_type_files_and_names(src_dir: &Path, versions: &[String]) -> Result<ParsedTypeFiles> {
115109
let mut files = Vec::new();
116110
let mut names = HashSet::new();
117111

@@ -162,14 +156,18 @@ fn collect_type_definitions(
162156
match item {
163157
Item::Struct(item_struct) if is_public(&item_struct.vis) => {
164158
let deps = collect_deps_from_fields(&item_struct.fields, known_names);
165-
defs.entry(version.clone()).or_default().insert(item_struct.ident.to_string(), deps);
159+
defs.entry(version.clone())
160+
.or_default()
161+
.insert(item_struct.ident.to_string(), deps);
166162
}
167163
Item::Enum(item_enum) if is_public(&item_enum.vis) => {
168164
let mut deps = BTreeSet::new();
169165
for variant in item_enum.variants {
170166
deps.extend(collect_deps_from_fields(&variant.fields, known_names));
171167
}
172-
defs.entry(version.clone()).or_default().insert(item_enum.ident.to_string(), deps);
168+
defs.entry(version.clone())
169+
.or_default()
170+
.insert(item_enum.ident.to_string(), deps);
173171
}
174172
_ => {}
175173
}
@@ -180,15 +178,12 @@ fn collect_type_definitions(
180178
}
181179

182180
/// Reads `mod.rs` for the chosen version and lists its public re-exports.
183-
fn collect_exports(
184-
src_dir: &Path,
185-
version: &str,
186-
) -> Result<HashMap<String, ExportInfo>> {
181+
fn collect_exports(src_dir: &Path, version: &str) -> Result<HashMap<String, ExportInfo>> {
187182
let mod_path = src_dir.join(version).join("mod.rs");
188-
let content = fs::read_to_string(&mod_path)
189-
.with_context(|| format!("reading {}", mod_path.display()))?;
190-
let syntax = syn::parse_file(&content)
191-
.with_context(|| format!("parsing {}", mod_path.display()))?;
183+
let content =
184+
fs::read_to_string(&mod_path).with_context(|| format!("reading {}", mod_path.display()))?;
185+
let syntax =
186+
syn::parse_file(&content).with_context(|| format!("parsing {}", mod_path.display()))?;
192187
let mut exports = HashMap::new();
193188

194189
for item in syntax.items {
@@ -213,16 +208,14 @@ fn collect_exports(
213208
fn collect_deps_from_fields(fields: &Fields, known_names: &HashSet<String>) -> BTreeSet<String> {
214209
let mut deps = BTreeSet::new();
215210
match fields {
216-
Fields::Named(named) => {
211+
Fields::Named(named) =>
217212
for field in &named.named {
218213
collect_type_dependencies(&field.ty, known_names, &mut deps);
219-
}
220-
}
221-
Fields::Unnamed(unnamed) => {
214+
},
215+
Fields::Unnamed(unnamed) =>
222216
for field in &unnamed.unnamed {
223217
collect_type_dependencies(&field.ty, known_names, &mut deps);
224-
}
225-
}
218+
},
226219
Fields::Unit => {}
227220
}
228221
deps
@@ -255,11 +248,10 @@ fn collect_type_dependencies(
255248
Type::Reference(reference) => collect_type_dependencies(&reference.elem, known_names, deps),
256249
Type::Paren(paren) => collect_type_dependencies(&paren.elem, known_names, deps),
257250
Type::Group(group) => collect_type_dependencies(&group.elem, known_names, deps),
258-
Type::Tuple(tuple) => {
251+
Type::Tuple(tuple) =>
259252
for elem in &tuple.elems {
260253
collect_type_dependencies(elem, known_names, deps);
261-
}
262-
}
254+
},
263255
Type::Array(array) => collect_type_dependencies(&array.elem, known_names, deps),
264256
Type::Slice(slice) => collect_type_dependencies(&slice.elem, known_names, deps),
265257
Type::Ptr(ptr) => collect_type_dependencies(&ptr.elem, known_names, deps),
@@ -278,21 +270,17 @@ fn flatten_use_tree(prefix: Vec<String>, tree: &UseTree, acc: &mut Vec<UseEntry>
278270
UseTree::Rename(rename) => {
279271
let mut path = prefix;
280272
path.push(rename.ident.to_string());
281-
acc.push(UseEntry {
282-
path,
283-
rename: Some(rename.rename.to_string()),
284-
});
273+
acc.push(UseEntry { path, rename: Some(rename.rename.to_string()) });
285274
}
286275
UseTree::Path(path) => {
287276
let mut new_prefix = prefix;
288277
new_prefix.push(path.ident.to_string());
289278
flatten_use_tree(new_prefix, &path.tree, acc);
290279
}
291-
UseTree::Group(group) => {
280+
UseTree::Group(group) =>
292281
for item in &group.items {
293282
flatten_use_tree(prefix.clone(), item, acc);
294-
}
295-
}
283+
},
296284
UseTree::Glob(_) => {}
297285
}
298286
}
@@ -303,10 +291,7 @@ fn interpret_flat_use(target_version: &str, entry: &UseEntry) -> Option<ExportIn
303291
return None;
304292
}
305293
let source_ident = entry.path.last()?.clone();
306-
let exported_ident = entry
307-
.rename
308-
.clone()
309-
.unwrap_or_else(|| source_ident.clone());
294+
let exported_ident = entry.rename.clone().unwrap_or_else(|| source_ident.clone());
310295

311296
match entry.path.first()?.as_str() {
312297
"self" => Some(ExportInfo {

0 commit comments

Comments
 (0)