Fix remaining typos

This commit is contained in:
DaniPopes 2023-04-10 21:02:49 +02:00
parent b410f3f7ae
commit f470c29936
No known key found for this signature in database
GPG Key ID: 0F09640DDB7AC692
13 changed files with 68 additions and 68 deletions

View File

@ -257,7 +257,7 @@ changelog-seen = 2
#python = "python" #python = "python"
# The path to the REUSE executable to use. Note that REUSE is not required in # The path to the REUSE executable to use. Note that REUSE is not required in
# most cases, as our tooling relies on a cached (and shrinked) copy of the # most cases, as our tooling relies on a cached (and shrunk) copy of the
# REUSE output present in the git repository and in our source tarballs. # REUSE output present in the git repository and in our source tarballs.
# #
# REUSE is only needed if your changes caused the overall licensing of the # REUSE is only needed if your changes caused the overall licensing of the

View File

@ -139,7 +139,7 @@ pub fn read_commit_info_file(root: &Path) -> Option<Info> {
sha: sha.to_owned(), sha: sha.to_owned(),
short_sha: short_sha.to_owned(), short_sha: short_sha.to_owned(),
}, },
_ => panic!("the `git-comit-info` file is malformed"), _ => panic!("the `git-commit-info` file is malformed"),
}; };
Some(info) Some(info)
} else { } else {

View File

@ -643,7 +643,7 @@ Examples:
```rust ```rust
match foo { match foo {
foo => bar, foo => bar,
a_very_long_patten | another_pattern if an_expression() => { a_very_long_pattern | another_pattern if an_expression() => {
no_room_for_this_expression() no_room_for_this_expression()
} }
foo => { foo => {

View File

@ -3,4 +3,4 @@
-------------------- --------------------
The `-Z dump-mono-stats-format` compiler flag controls what file format to use for `-Z dump-mono-stats`. The `-Z dump-mono-stats-format` compiler flag controls what file format to use for `-Z dump-mono-stats`.
The default is markdown; currently JSON is also supported. JSON can be useful for programatically manipulating the results (e.g. to find the item that took the longest to compile). The default is markdown; currently JSON is also supported. JSON can be useful for programmatically manipulating the results (e.g. to find the item that took the longest to compile).

View File

@ -119,7 +119,7 @@
<SetProperty Sequence="ui" Before="CostFinalize" <SetProperty Sequence="ui" Before="CostFinalize"
Id="WixAppFolder" Value="WixPerUserFolder">NOT ALLUSERS</SetProperty> Id="WixAppFolder" Value="WixPerUserFolder">NOT ALLUSERS</SetProperty>
<!-- UI sets ALLUSERS per user selection; progagate this choice to MSIINSTALLPERUSER before executing installation actions --> <!-- UI sets ALLUSERS per user selection; propagate this choice to MSIINSTALLPERUSER before executing installation actions -->
<SetProperty Sequence="ui" Before="ExecuteAction" <SetProperty Sequence="ui" Before="ExecuteAction"
Id="MSIINSTALLPERUSER" Value="1">NOT ALLUSERS</SetProperty> Id="MSIINSTALLPERUSER" Value="1">NOT ALLUSERS</SetProperty>

View File

@ -550,7 +550,7 @@ pub enum Type {
DynTrait(DynTrait), DynTrait(DynTrait),
/// Parameterized types /// Parameterized types
Generic(String), Generic(String),
/// Built in numberic (i*, u*, f*) types, bool, and char /// Built in numeric (i*, u*, f*) types, bool, and char
Primitive(String), Primitive(String),
/// `extern "ABI" fn` /// `extern "ABI" fn`
FunctionPointer(Box<FunctionPointer>), FunctionPointer(Box<FunctionPointer>),

View File

@ -10,8 +10,8 @@ use std::path::{Path, PathBuf};
#[derive(serde::Serialize)] #[derive(serde::Serialize)]
#[serde(rename_all = "kebab-case", tag = "type")] #[serde(rename_all = "kebab-case", tag = "type")]
pub(crate) enum Node<L> { pub(crate) enum Node<L> {
Root { childs: Vec<Node<L>> }, Root { children: Vec<Node<L>> },
Directory { name: PathBuf, childs: Vec<Node<L>>, license: Option<L> }, Directory { name: PathBuf, children: Vec<Node<L>>, license: Option<L> },
File { name: PathBuf, license: L }, File { name: PathBuf, license: L },
Group { files: Vec<PathBuf>, directories: Vec<PathBuf>, license: L }, Group { files: Vec<PathBuf>, directories: Vec<PathBuf>, license: L },
Empty, Empty,
@ -48,14 +48,14 @@ impl Node<LicenseId> {
/// ``` /// ```
fn merge_directories(&mut self) { fn merge_directories(&mut self) {
match self { match self {
Node::Root { childs } | Node::Directory { childs, license: None, .. } => { Node::Root { children } | Node::Directory { children, license: None, .. } => {
let mut directories = BTreeMap::new(); let mut directories = BTreeMap::new();
let mut files = Vec::new(); let mut files = Vec::new();
for child in childs.drain(..) { for child in children.drain(..) {
match child { match child {
Node::Directory { name, mut childs, license: None } => { Node::Directory { name, mut children, license: None } => {
directories.entry(name).or_insert_with(Vec::new).append(&mut childs); directories.entry(name).or_insert_with(Vec::new).append(&mut children);
} }
file @ Node::File { .. } => { file @ Node::File { .. } => {
files.push(file); files.push(file);
@ -73,14 +73,14 @@ impl Node<LicenseId> {
} }
} }
childs.extend(directories.into_iter().map(|(name, childs)| Node::Directory { children.extend(directories.into_iter().map(|(name, children)| Node::Directory {
name, name,
childs, children,
license: None, license: None,
})); }));
childs.append(&mut files); children.append(&mut files);
for child in &mut *childs { for child in &mut *children {
child.merge_directories(); child.merge_directories();
} }
} }
@ -105,13 +105,13 @@ impl Node<LicenseId> {
/// our inclusion of LLVM. /// our inclusion of LLVM.
fn collapse_in_licensed_directories(&mut self) { fn collapse_in_licensed_directories(&mut self) {
match self { match self {
Node::Directory { childs, license, .. } => { Node::Directory { children, license, .. } => {
for child in &mut *childs { for child in &mut *children {
child.collapse_in_licensed_directories(); child.collapse_in_licensed_directories();
} }
let mut licenses_count = BTreeMap::new(); let mut licenses_count = BTreeMap::new();
for child in &*childs { for child in &*children {
let Some(license) = child.license() else { continue }; let Some(license) = child.license() else { continue };
*licenses_count.entry(license).or_insert(0) += 1; *licenses_count.entry(license).or_insert(0) += 1;
} }
@ -122,12 +122,12 @@ impl Node<LicenseId> {
.map(|(license, _)| license); .map(|(license, _)| license);
if let Some(most_popular_license) = most_popular_license { if let Some(most_popular_license) = most_popular_license {
childs.retain(|child| child.license() != Some(most_popular_license)); children.retain(|child| child.license() != Some(most_popular_license));
*license = Some(most_popular_license); *license = Some(most_popular_license);
} }
} }
Node::Root { childs } => { Node::Root { children } => {
for child in &mut *childs { for child in &mut *children {
child.collapse_in_licensed_directories(); child.collapse_in_licensed_directories();
} }
} }
@ -138,29 +138,29 @@ impl Node<LicenseId> {
} }
/// Reduce the depth of the tree by merging subdirectories with the same license as their /// Reduce the depth of the tree by merging subdirectories with the same license as their
/// parent directory into their parent, and adjusting the paths of the childs accordingly. /// parent directory into their parent, and adjusting the paths of the children accordingly.
fn merge_directory_licenses(&mut self) { fn merge_directory_licenses(&mut self) {
match self { match self {
Node::Root { childs } => { Node::Root { children } => {
for child in &mut *childs { for child in &mut *children {
child.merge_directory_licenses(); child.merge_directory_licenses();
} }
} }
Node::Directory { childs, license, .. } => { Node::Directory { children, license, .. } => {
let mut to_add = Vec::new(); let mut to_add = Vec::new();
for child in &mut *childs { for child in &mut *children {
child.merge_directory_licenses(); child.merge_directory_licenses();
let Node::Directory { let Node::Directory {
name: child_name, name: child_name,
childs: child_childs, children: child_children,
license: child_license, license: child_license,
} = child else { continue }; } = child else { continue };
if child_license != license { if child_license != license {
continue; continue;
} }
for mut child_child in child_childs.drain(..) { for mut child_child in child_children.drain(..) {
match &mut child_child { match &mut child_child {
Node::Root { .. } => { Node::Root { .. } => {
panic!("can't have a root inside another element"); panic!("can't have a root inside another element");
@ -181,7 +181,7 @@ impl Node<LicenseId> {
*child = Node::Empty; *child = Node::Empty;
} }
childs.append(&mut to_add); children.append(&mut to_add);
} }
Node::Empty => {} Node::Empty => {}
Node::File { .. } => {} Node::File { .. } => {}
@ -203,14 +203,14 @@ impl Node<LicenseId> {
directories: Vec<PathBuf>, directories: Vec<PathBuf>,
} }
match self { match self {
Node::Root { childs } | Node::Directory { childs, .. } => { Node::Root { children } | Node::Directory { children, .. } => {
let mut grouped: BTreeMap<LicenseId, Grouped> = BTreeMap::new(); let mut grouped: BTreeMap<LicenseId, Grouped> = BTreeMap::new();
for child in &mut *childs { for child in &mut *children {
child.merge_groups(); child.merge_groups();
match child { match child {
Node::Directory { name, childs, license: Some(license) } => { Node::Directory { name, children, license: Some(license) } => {
if childs.is_empty() { if children.is_empty() {
grouped grouped
.entry(*license) .entry(*license)
.or_insert_with(Grouped::default) .or_insert_with(Grouped::default)
@ -234,16 +234,16 @@ impl Node<LicenseId> {
for (license, mut grouped) in grouped.into_iter() { for (license, mut grouped) in grouped.into_iter() {
if grouped.files.len() + grouped.directories.len() <= 1 { if grouped.files.len() + grouped.directories.len() <= 1 {
if let Some(name) = grouped.files.pop() { if let Some(name) = grouped.files.pop() {
childs.push(Node::File { license, name }); children.push(Node::File { license, name });
} else if let Some(name) = grouped.directories.pop() { } else if let Some(name) = grouped.directories.pop() {
childs.push(Node::Directory { children.push(Node::Directory {
name, name,
childs: Vec::new(), children: Vec::new(),
license: Some(license), license: Some(license),
}); });
} }
} else { } else {
childs.push(Node::Group { children.push(Node::Group {
license, license,
files: grouped.files, files: grouped.files,
directories: grouped.directories, directories: grouped.directories,
@ -261,11 +261,11 @@ impl Node<LicenseId> {
/// sure to remove them from the tree. /// sure to remove them from the tree.
fn remove_empty(&mut self) { fn remove_empty(&mut self) {
match self { match self {
Node::Root { childs } | Node::Directory { childs, .. } => { Node::Root { children } | Node::Directory { children, .. } => {
for child in &mut *childs { for child in &mut *children {
child.remove_empty(); child.remove_empty();
} }
childs.retain(|child| !matches!(child, Node::Empty)); children.retain(|child| !matches!(child, Node::Empty));
} }
Node::Group { .. } => {} Node::Group { .. } => {}
Node::File { .. } => {} Node::File { .. } => {}
@ -275,7 +275,7 @@ impl Node<LicenseId> {
fn license(&self) -> Option<LicenseId> { fn license(&self) -> Option<LicenseId> {
match self { match self {
Node::Directory { childs, license: Some(license), .. } if childs.is_empty() => { Node::Directory { children, license: Some(license), .. } if children.is_empty() => {
Some(*license) Some(*license)
} }
Node::File { license, .. } => Some(*license), Node::File { license, .. } => Some(*license),
@ -285,7 +285,7 @@ impl Node<LicenseId> {
} }
pub(crate) fn build(mut input: Vec<(PathBuf, LicenseId)>) -> Node<LicenseId> { pub(crate) fn build(mut input: Vec<(PathBuf, LicenseId)>) -> Node<LicenseId> {
let mut childs = Vec::new(); let mut children = Vec::new();
// Ensure reproducibility of all future steps. // Ensure reproducibility of all future steps.
input.sort(); input.sort();
@ -295,15 +295,15 @@ pub(crate) fn build(mut input: Vec<(PathBuf, LicenseId)>) -> Node<LicenseId> {
for component in path.parent().unwrap_or_else(|| Path::new(".")).components().rev() { for component in path.parent().unwrap_or_else(|| Path::new(".")).components().rev() {
node = Node::Directory { node = Node::Directory {
name: component.as_os_str().into(), name: component.as_os_str().into(),
childs: vec![node], children: vec![node],
license: None, license: None,
}; };
} }
childs.push(node); children.push(node);
} }
Node::Root { childs } Node::Root { children }
} }
/// Convert a `Node<LicenseId>` into a `Node<&License>`, expanding all interned license IDs with a /// Convert a `Node<LicenseId>` into a `Node<&License>`, expanding all interned license IDs with a
@ -313,14 +313,14 @@ pub(crate) fn expand_interned_licenses(
interner: &LicensesInterner, interner: &LicensesInterner,
) -> Node<&License> { ) -> Node<&License> {
match node { match node {
Node::Root { childs } => Node::Root { Node::Root { children } => Node::Root {
childs: childs children: children
.into_iter() .into_iter()
.map(|child| expand_interned_licenses(child, interner)) .map(|child| expand_interned_licenses(child, interner))
.collect(), .collect(),
}, },
Node::Directory { name, childs, license } => Node::Directory { Node::Directory { name, children, license } => Node::Directory {
childs: childs children: children
.into_iter() .into_iter()
.map(|child| expand_interned_licenses(child, interner)) .map(|child| expand_interned_licenses(child, interner))
.collect(), .collect(),

View File

@ -20,17 +20,17 @@ fn render_recursive(node: &Node, buffer: &mut Vec<u8>, depth: usize) -> Result<(
let prefix = std::iter::repeat("> ").take(depth + 1).collect::<String>(); let prefix = std::iter::repeat("> ").take(depth + 1).collect::<String>();
match node { match node {
Node::Root { childs } => { Node::Root { children } => {
for child in childs { for child in children {
render_recursive(child, buffer, depth)?; render_recursive(child, buffer, depth)?;
} }
} }
Node::Directory { name, childs, license } => { Node::Directory { name, children, license } => {
render_license(&prefix, std::iter::once(name), license, buffer)?; render_license(&prefix, std::iter::once(name), license, buffer)?;
if !childs.is_empty() { if !children.is_empty() {
writeln!(buffer, "{prefix}")?; writeln!(buffer, "{prefix}")?;
writeln!(buffer, "{prefix}*Exceptions:*")?; writeln!(buffer, "{prefix}*Exceptions:*")?;
for child in childs { for child in children {
writeln!(buffer, "{prefix}")?; writeln!(buffer, "{prefix}")?;
render_recursive(child, buffer, depth + 1)?; render_recursive(child, buffer, depth + 1)?;
} }
@ -73,8 +73,8 @@ struct Metadata {
#[derive(serde::Deserialize)] #[derive(serde::Deserialize)]
#[serde(rename_all = "kebab-case", tag = "type")] #[serde(rename_all = "kebab-case", tag = "type")]
pub(crate) enum Node { pub(crate) enum Node {
Root { childs: Vec<Node> }, Root { children: Vec<Node> },
Directory { name: String, childs: Vec<Node>, license: License }, Directory { name: String, children: Vec<Node>, license: License },
File { name: String, license: License }, File { name: String, license: License },
Group { files: Vec<String>, directories: Vec<String>, license: License }, Group { files: Vec<String>, directories: Vec<String>, license: License },
} }

View File

@ -237,7 +237,7 @@ fn check_command(command: Command, cache: &mut Cache) -> Result<(), CkError> {
// Serde json doesn't implement Ord or Hash for Value, so we must // Serde json doesn't implement Ord or Hash for Value, so we must
// use a Vec here. While in theory that makes setwize equality // use a Vec here. While in theory that makes setwize equality
// O(n^2), in practice n will never be large enought to matter. // O(n^2), in practice n will never be large enough to matter.
let expected_values = let expected_values =
values.iter().map(|v| string_to_value(v, cache)).collect::<Vec<_>>(); values.iter().map(|v| string_to_value(v, cache)).collect::<Vec<_>>();
if expected_values.len() != got_values.len() { if expected_values.len() != got_values.len() {

View File

@ -1,6 +1,6 @@
use rustdoc_json_types::{Item, ItemEnum, ItemKind, ItemSummary}; use rustdoc_json_types::{Item, ItemEnum, ItemKind, ItemSummary};
/// A univeral way to represent an [`ItemEnum`] or [`ItemKind`] /// A universal way to represent an [`ItemEnum`] or [`ItemKind`]
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub(crate) enum Kind { pub(crate) enum Kind {
Module, Module,
@ -53,7 +53,7 @@ impl Kind {
Primitive => true, Primitive => true,
ForeignType => true, ForeignType => true,
// FIXME(adotinthevoid): I'm not sure if these are corrent // FIXME(adotinthevoid): I'm not sure if these are correct
Keyword => false, Keyword => false,
OpaqueTy => false, OpaqueTy => false,
ProcAttribute => false, ProcAttribute => false,

View File

@ -72,7 +72,7 @@ fn main() -> Result<()> {
) )
} }
[sel] => eprintln!( [sel] => eprintln!(
"{} not in index or paths, but refered to at '{}'", "{} not in index or paths, but referred to at '{}'",
err.id.0, err.id.0,
json_find::to_jsonpath(&sel) json_find::to_jsonpath(&sel)
), ),
@ -85,12 +85,12 @@ fn main() -> Result<()> {
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(", "); .join(", ");
eprintln!( eprintln!(
"{} not in index or paths, but refered to at {sels}", "{} not in index or paths, but referred to at {sels}",
err.id.0 err.id.0
); );
} else { } else {
eprintln!( eprintln!(
"{} not in index or paths, but refered to at '{}' and {} more", "{} not in index or paths, but referred to at '{}' and {} more",
err.id.0, err.id.0,
json_find::to_jsonpath(&sel), json_find::to_jsonpath(&sel),
sels.len() - 1, sels.len() - 1,

View File

@ -86,7 +86,7 @@ def gh_url():
return os.environ['TOOLSTATE_ISSUES_API_URL'] return os.environ['TOOLSTATE_ISSUES_API_URL']
def maybe_delink(message): def maybe_unlink(message):
# type: (str) -> str # type: (str) -> str
if os.environ.get('TOOLSTATE_SKIP_MENTIONS') is not None: if os.environ.get('TOOLSTATE_SKIP_MENTIONS') is not None:
return message.replace("@", "") return message.replace("@", "")
@ -109,7 +109,7 @@ def issue(
else: else:
status_description = 'no longer builds' status_description = 'no longer builds'
request = json.dumps({ request = json.dumps({
'body': maybe_delink(textwrap.dedent('''\ 'body': maybe_unlink(textwrap.dedent('''\
Hello, this is your friendly neighborhood mergebot. Hello, this is your friendly neighborhood mergebot.
After merging PR {}, I observed that the tool {} {}. After merging PR {}, I observed that the tool {} {}.
A follow-up PR to the repository {} is needed to fix the fallout. A follow-up PR to the repository {} is needed to fix the fallout.
@ -285,7 +285,7 @@ try:
issue_url = gh_url() + '/{}/comments'.format(number) issue_url = gh_url() + '/{}/comments'.format(number)
response = urllib2.urlopen(urllib2.Request( response = urllib2.urlopen(urllib2.Request(
issue_url, issue_url,
json.dumps({'body': maybe_delink(message)}).encode(), json.dumps({'body': maybe_unlink(message)}).encode(),
{ {
'Authorization': 'token ' + github_token, 'Authorization': 'token ' + github_token,
'Content-Type': 'application/json', 'Content-Type': 'application/json',

View File

@ -344,11 +344,11 @@ message = "Some changes occurred in `const_evaluatable.rs`"
cc = ["@BoxyUwU"] cc = ["@BoxyUwU"]
[mentions."compiler/rustc_middle/src/ty/abstract_const.rs"] [mentions."compiler/rustc_middle/src/ty/abstract_const.rs"]
message = "Some changes occured in `abstract_const.rs`" message = "Some changes occurred in `abstract_const.rs`"
cc = ["@BoxyUwU"] cc = ["@BoxyUwU"]
[mentions."compiler/rustc_ty_utils/src/consts.rs"] [mentions."compiler/rustc_ty_utils/src/consts.rs"]
message = "Some changes occured in `rustc_ty_utils::consts.rs`" message = "Some changes occurred in `rustc_ty_utils::consts.rs`"
cc = ["@BoxyUwU"] cc = ["@BoxyUwU"]
[mentions."compiler/rustc_trait_selection/src/solve/"] [mentions."compiler/rustc_trait_selection/src/solve/"]