Auto merge of #44654 - TimNN:rollup, r=TimNN

Rollup of 17 pull requests

- Successful merges: #44073, #44088, #44381, #44397, #44509, #44533, #44549, #44553, #44562, #44567, #44595, #44604, #44617, #44622, #44630, #44639, #44647
- Failed merges:
This commit is contained in:
bors 2017-09-17 11:19:56 +00:00
commit 1cdd68922d
63 changed files with 880 additions and 287 deletions

3
.gitmodules vendored
View File

@ -36,3 +36,6 @@
[submodule "src/tools/clippy"]
path = src/tools/clippy
url = https://github.com/rust-lang-nursery/rust-clippy.git
[submodule "src/tools/rustfmt"]
path = src/tools/rustfmt
url = https://github.com/rust-lang-nursery/rustfmt.git

View File

@ -193,7 +193,7 @@ before_install:
install:
- case "$TRAVIS_OS_NAME" in
linux)
travis_retry curl -fo $HOME/stamp https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-17-stamp-x86_64-unknown-linux-musl &&
travis_retry curl -fo $HOME/stamp https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-03-17-stamp-x86_64-unknown-linux-musl &&
chmod +x $HOME/stamp &&
export PATH=$PATH:$HOME
;;
@ -202,9 +202,9 @@ install:
travis_retry brew update &&
travis_retry brew install xz;
fi &&
travis_retry curl -fo /usr/local/bin/sccache https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-apple-darwin &&
travis_retry curl -fo /usr/local/bin/sccache https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-05-12-sccache-x86_64-apple-darwin &&
chmod +x /usr/local/bin/sccache &&
travis_retry curl -fo /usr/local/bin/stamp https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-17-stamp-x86_64-apple-darwin &&
travis_retry curl -fo /usr/local/bin/stamp https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-03-17-stamp-x86_64-apple-darwin &&
chmod +x /usr/local/bin/stamp
;;
esac
@ -287,12 +287,12 @@ before_deploy:
deploy:
- provider: s3
bucket: rust-lang-ci
bucket: rust-lang-ci2
skip_cleanup: true
local_dir: deploy
upload_dir: rustc-builds
acl: public_read
region: us-east-1
region: us-west-1
access_key_id: AKIAJVBODR3IA4O72THQ
secret_access_key:
secure: "kUGd3t7JcVWFESgIlzvsM8viZgCA9Encs3creW0xLJaLSeI1iVjlJK4h/2/nO6y224AFrh/GUfsNr4/4AlxPuYb8OU5oC5Lv+Ff2JiRDYtuNpyQSKAQp+bRYytWMtrmhja91h118Mbm90cUfcLPwkdiINgJNTXhPKg5Cqu3VYn0="
@ -301,12 +301,12 @@ deploy:
condition: $DEPLOY = 1
- provider: s3
bucket: rust-lang-ci
bucket: rust-lang-ci2
skip_cleanup: true
local_dir: deploy
upload_dir: rustc-builds-try
acl: public_read
region: us-east-1
region: us-west-1
access_key_id: AKIAJVBODR3IA4O72THQ
secret_access_key:
secure: "kUGd3t7JcVWFESgIlzvsM8viZgCA9Encs3creW0xLJaLSeI1iVjlJK4h/2/nO6y224AFrh/GUfsNr4/4AlxPuYb8OU5oC5Lv+Ff2JiRDYtuNpyQSKAQp+bRYytWMtrmhja91h118Mbm90cUfcLPwkdiINgJNTXhPKg5Cqu3VYn0="
@ -317,12 +317,12 @@ deploy:
# this is the same as the above deployment provider except that it uploads to
# a slightly different directory and has a different trigger
- provider: s3
bucket: rust-lang-ci
bucket: rust-lang-ci2
skip_cleanup: true
local_dir: deploy
upload_dir: rustc-builds-alt
acl: public_read
region: us-east-1
region: us-west-1
access_key_id: AKIAJVBODR3IA4O72THQ
secret_access_key:
secure: "kUGd3t7JcVWFESgIlzvsM8viZgCA9Encs3creW0xLJaLSeI1iVjlJK4h/2/nO6y224AFrh/GUfsNr4/4AlxPuYb8OU5oC5Lv+Ff2JiRDYtuNpyQSKAQp+bRYytWMtrmhja91h118Mbm90cUfcLPwkdiINgJNTXhPKg5Cqu3VYn0="

View File

@ -41,13 +41,13 @@ environment:
- MSYS_BITS: 32
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu
SCRIPT: python x.py test
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
MINGW_DIR: mingw32
- MSYS_BITS: 64
SCRIPT: python x.py test
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
MINGW_ARCHIVE: x86_64-6.3.0-release-posix-seh-rt_v5-rev2.7z
MINGW_DIR: mingw64
@ -68,14 +68,14 @@ environment:
- MSYS_BITS: 32
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu --enable-extended
SCRIPT: python x.py dist
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
MINGW_DIR: mingw32
DEPLOY: 1
- MSYS_BITS: 64
SCRIPT: python x.py dist
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu --enable-extended
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
MINGW_ARCHIVE: x86_64-6.3.0-release-posix-seh-rt_v5-rev2.7z
MINGW_DIR: mingw64
DEPLOY: 1
@ -133,25 +133,25 @@ install:
- set PATH=C:\Python27;%PATH%
# Download and install sccache
- appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-pc-windows-msvc
- appveyor-retry appveyor DownloadFile https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-05-12-sccache-x86_64-pc-windows-msvc
- mv 2017-05-12-sccache-x86_64-pc-windows-msvc sccache.exe
- set PATH=%PATH%;%CD%
# Download and install ninja
#
# Note that this is originally from the github releases patch of Ninja
- appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-15-ninja-win.zip
- appveyor-retry appveyor DownloadFile https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-03-15-ninja-win.zip
- 7z x 2017-03-15-ninja-win.zip
- set RUST_CONFIGURE_ARGS=%RUST_CONFIGURE_ARGS% --enable-ninja
# - set PATH=%PATH%;%CD% -- this already happens above for sccache
# Install InnoSetup to get `iscc` used to produce installers
- appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-08-22-is.exe
- appveyor-retry appveyor DownloadFile https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-08-22-is.exe
- 2017-08-22-is.exe /VERYSILENT /SUPPRESSMSGBOXES /NORESTART /SP-
- set PATH="C:\Program Files (x86)\Inno Setup 5";%PATH%
# Help debug some handle issues on AppVeyor
- appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-15-Handle.zip
- appveyor-retry appveyor DownloadFile https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-05-15-Handle.zip
- mkdir handle
- 7z x -ohandle 2017-05-15-Handle.zip
- set PATH=%PATH%;%CD%\handle
@ -189,9 +189,9 @@ deploy:
access_key_id: AKIAJVBODR3IA4O72THQ
secret_access_key:
secure: tQWIE+DJHjXaV4np/3YeETkEmXngtIuIgAO/LYKQaUshGLgN8cBCFGG3cHx5lKLt
bucket: rust-lang-ci
bucket: rust-lang-ci2
set_public: true
region: us-east-1
region: us-west-1
artifact: /.*/
folder: rustc-builds
on:
@ -206,9 +206,9 @@ deploy:
access_key_id: AKIAJVBODR3IA4O72THQ
secret_access_key:
secure: tQWIE+DJHjXaV4np/3YeETkEmXngtIuIgAO/LYKQaUshGLgN8cBCFGG3cHx5lKLt
bucket: rust-lang-ci
bucket: rust-lang-ci2
set_public: true
region: us-east-1
region: us-west-1
artifact: /.*/
folder: rustc-builds-alt
on:

8
fn.rs
View File

@ -1,8 +0,0 @@
fn foo(x: fn(&u8, &u8), y: Vec<&u8>, z: &u8) {
// Debruijn 1 1 1 1
// Anon-Index 0 1 0 1
// ------
// debruijn indices are shifted by 1 in here
y.push(z); // index will be zero or one
}

4
src/Cargo.lock generated
View File

@ -1351,7 +1351,7 @@ dependencies = [
"rls-rustc 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-vfs 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"rustfmt-nightly 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rustfmt-nightly 0.2.5",
"serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1822,7 +1822,6 @@ dependencies = [
[[package]]
name = "rustfmt-nightly"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2562,7 +2561,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum rls-vfs 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ffd34691a510938bb67fe0444fb363103c73ffb31c121d1e16bc92d8945ea8ff"
"checksum rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "aee45432acc62f7b9a108cc054142dac51f979e69e71ddce7d6fc7adf29e817e"
"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
"checksum rustfmt-nightly 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7d6dbb39239e54df780a850721fba87b3fdb2e645b39041742ec111369cec6af"
"checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7"
"checksum scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f417c22df063e9450888a7561788e9bd46d3bb3c1466435b4eccb903807f147d"
"checksum scopeguard 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "59a076157c1e2dc561d8de585151ee6965d910dd4dcb5dabb7ae3e83981a6c57"

View File

@ -18,6 +18,7 @@ members = [
"tools/cargo",
"tools/rustdoc",
"tools/rls",
"tools/rustfmt",
# FIXME(https://github.com/rust-lang/cargo/issues/4089): move these to exclude
"tools/rls/test_data/borrow_error",
"tools/rls/test_data/completion",
@ -56,5 +57,13 @@ debug-assertions = false
debug = false
debug-assertions = false
[patch.'https://github.com/rust-lang/cargo']
[patch."https://github.com/rust-lang/cargo"]
cargo = { path = "tools/cargo" }
# Override rustfmt dependencies both on the repo and the crate (the RLS
# sometimes uses either).
# FIXME should only need the crates.io patch, long term.
[patch.'https://github.com/rust-lang-nursery/rustfmt']
rustfmt-nightly = { path = "tools/rustfmt" }
[patch.crates-io]
rustfmt-nightly = { path = "tools/rustfmt" }

View File

@ -249,11 +249,11 @@ impl<'a> Builder<'a> {
tool::UnstableBookGen, tool::Tidy, tool::Linkchecker, tool::CargoTest,
tool::Compiletest, tool::RemoteTestServer, tool::RemoteTestClient,
tool::RustInstaller, tool::Cargo, tool::Rls, tool::Rustdoc, tool::Clippy,
native::Llvm),
native::Llvm, tool::Rustfmt),
Kind::Test => describe!(check::Tidy, check::Bootstrap, check::DefaultCompiletest,
check::HostCompiletest, check::Crate, check::CrateLibrustc, check::Rustdoc,
check::Linkcheck, check::Cargotest, check::Cargo, check::Rls, check::Docs,
check::ErrorIndex, check::Distcheck),
check::ErrorIndex, check::Distcheck, check::Rustfmt),
Kind::Bench => describe!(check::Crate, check::CrateLibrustc),
Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook,
doc::Standalone, doc::Std, doc::Test, doc::Rustc, doc::ErrorIndex, doc::Nomicon,

View File

@ -253,6 +253,47 @@ impl Step for Rls {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Rustfmt {
stage: u32,
host: Interned<String>,
}
impl Step for Rustfmt {
type Output = ();
const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun) -> ShouldRun {
run.path("src/tools/rustfmt")
}
fn make_run(run: RunConfig) {
run.builder.ensure(Rustfmt {
stage: run.builder.top_stage,
host: run.target,
});
}
/// Runs `cargo test` for rustfmt.
fn run(self, builder: &Builder) {
let build = builder.build;
let stage = self.stage;
let host = self.host;
let compiler = builder.compiler(stage, host);
builder.ensure(tool::Rustfmt { compiler, target: self.host });
let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
cargo.arg("--manifest-path").arg(build.src.join("src/tools/rustfmt/Cargo.toml"));
// Don't build tests dynamically, just a pain to work with
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
builder.add_rustc_lib_path(compiler, &mut cargo);
try_run(build, &mut cargo);
}
}
fn path_for_cargo(builder: &Builder, compiler: Compiler) -> OsString {
// Configure PATH to find the right rustc. NB. we have to use PATH
// and not RUSTC because the Cargo test suite has tests that will

View File

@ -55,6 +55,7 @@ check-aux:
src/tools/cargotest \
src/tools/cargo \
src/tools/rls \
src/tools/rustfmt \
src/test/pretty \
src/test/run-pass/pretty \
src/test/run-fail/pretty \

View File

@ -350,7 +350,7 @@ impl Step for Openssl {
if !tarball.exists() {
let tmp = tarball.with_extension("tmp");
// originally from https://www.openssl.org/source/...
let url = format!("https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/{}",
let url = format!("https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/{}",
name);
let mut ok = false;
for _ in 0..3 {

View File

@ -445,6 +445,40 @@ impl Step for Rls {
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Rustfmt {
pub compiler: Compiler,
pub target: Interned<String>,
}
impl Step for Rustfmt {
type Output = PathBuf;
const DEFAULT: bool = true;
const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
run.path("src/tools/rustfmt").default_condition(builder.build.config.extended)
}
fn make_run(run: RunConfig) {
run.builder.ensure(Rustfmt {
compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
target: run.target,
});
}
fn run(self, builder: &Builder) -> PathBuf {
builder.ensure(ToolBuild {
compiler: self.compiler,
target: self.target,
tool: "rustfmt",
mode: Mode::Librustc,
path: "src/tools/rustfmt",
})
}
}
impl<'a> Builder<'a> {
/// Get a `Command` which is ready to run `tool` in `stage` built for
/// `host`.

View File

@ -15,7 +15,7 @@ mkdir /usr/local/mips-linux-musl
# originally from
# https://downloads.openwrt.org/snapshots/trunk/ar71xx/generic/
# OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2
URL="https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror"
URL="https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror"
FILE="OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2"
curl -L "$URL/$FILE" | tar xjf - -C /usr/local/mips-linux-musl --strip-components=2

View File

@ -15,7 +15,7 @@ mkdir /usr/local/mipsel-linux-musl
# Note that this originally came from:
# https://downloads.openwrt.org/snapshots/trunk/malta/generic/
# OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2
URL="https://s3.amazonaws.com/rust-lang-ci/libc"
URL="https://s3-us-west-1.amazonaws.com/rust-lang-ci2/libc"
FILE="OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2"
curl -L "$URL/$FILE" | tar xjf - -C /usr/local/mipsel-linux-musl --strip-components=2

View File

@ -0,0 +1,49 @@
FROM ubuntu:16.04
RUN apt-get update && apt-get install -y --no-install-recommends \
autoconf \
automake \
bison \
bzip2 \
ca-certificates \
cmake \
curl \
file \
flex \
g++ \
gawk \
git \
libcurl4-openssl-dev \
libssl-dev \
make \
nasm \
pkg-config \
python2.7 \
sudo \
texinfo \
wget \
xz-utils \
zlib1g-dev
COPY dist-x86_64-haiku/llvm-config.sh /bin/llvm-config-haiku
ENV ARCH=x86_64
WORKDIR /tmp
COPY dist-x86_64-haiku/build-toolchain.sh /tmp/
RUN /tmp/build-toolchain.sh $ARCH
COPY dist-x86_64-haiku/fetch-packages.sh /tmp/
RUN /tmp/fetch-packages.sh
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
ENV HOST=x86_64-unknown-haiku
ENV TARGET=target.$HOST
ENV RUST_CONFIGURE_ARGS --host=$HOST --target=$HOST --disable-jemalloc \
--set=$TARGET.cc=x86_64-unknown-haiku-gcc \
--set=$TARGET.cxx=x86_64-unknown-haiku-g++ \
--set=$TARGET.llvm-config=/bin/llvm-config-haiku
ENV SCRIPT python2.7 ../x.py dist

View File

@ -0,0 +1,74 @@
#!/bin/bash
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
ARCH=$1
TOP=$(pwd)
BUILDTOOLS=$TOP/buildtools
HAIKU=$TOP/haiku
OUTPUT=/tools
SYSROOT=$OUTPUT/cross-tools-$ARCH/sysroot
PACKAGE_ROOT=/system
hide_output() {
set +x
on_err="
echo ERROR: An error was encountered with the build.
cat /tmp/build.log
exit 1
"
trap "$on_err" ERR
bash -c "while true; do sleep 30; echo \$(date) - building ...; done" &
PING_LOOP_PID=$!
$@ &> /tmp/build.log
trap - ERR
kill $PING_LOOP_PID
set -x
}
# First up, build a cross-compiler
git clone --depth=1 https://git.haiku-os.org/haiku
git clone --depth=1 https://git.haiku-os.org/buildtools
cd $BUILDTOOLS/jam
hide_output make
hide_output ./jam0 install
mkdir -p $OUTPUT
cd $OUTPUT
hide_output $HAIKU/configure --build-cross-tools $ARCH $TOP/buildtools
# Set up sysroot to redirect to /system
mkdir -p $SYSROOT/boot
mkdir -p $PACKAGE_ROOT
ln -s $PACKAGE_ROOT $SYSROOT/boot/system
# Build needed packages and tools for the cross-compiler
hide_output jam -q haiku.hpkg haiku_devel.hpkg '<build>package'
# Set up our sysroot
cp $OUTPUT/objects/linux/lib/*.so /lib/x86_64-linux-gnu
cp $OUTPUT/objects/linux/x86_64/release/tools/package/package /bin/
find $SYSROOT/../bin/ -type f -exec ln -s {} /bin/ \;
# Extract packages
package extract -C $PACKAGE_ROOT $OUTPUT/objects/haiku/$ARCH/packaging/packages/haiku.hpkg
package extract -C $PACKAGE_ROOT $OUTPUT/objects/haiku/$ARCH/packaging/packages/haiku_devel.hpkg
find $OUTPUT/download/ -name '*.hpkg' -exec package extract -C $PACKAGE_ROOT {} \;
# Fix libgcc_s so we can link to it
cd $PACKAGE_ROOT/develop/lib
ln -s ../../lib/libgcc_s.so libgcc_s.so
# Clean up
rm -rf $BUILDTOOLS $HAIKU $OUTPUT/Jamfile $OUTPUT/attributes $OUTPUT/build \
$OUTPUT/build_packages $OUTPUT/download $OUTPUT/objects

View File

@ -0,0 +1,18 @@
#!/bin/bash
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
wget http://packages.haiku-os.org/haikuports/master/hpkg/llvm-4.0.1-2-x86_64.hpkg
wget http://packages.haiku-os.org/haikuports/master/hpkg/llvm_libs-4.0.1-2-x86_64.hpkg
package extract -C /system llvm-4.0.1-2-x86_64.hpkg
package extract -C /system llvm_libs-4.0.1-2-x86_64.hpkg
rm -f *.hpkg

View File

@ -0,0 +1,67 @@
#!/bin/sh
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
case $1 in
--version) echo 4.0.1;;
--prefix) echo $SCRATCH/haiku-cross/sysroot/boot/system;;
--bindir) echo $SCRATCH/haiku-cross/sysroot/boot/system/bin;;
--includedir) echo $SCRATCH/haiku-cross/sysroot/boot/system/develop/headers;;
--libdir) echo $SCRATCH/haiku-/cross/sysroot/boot/system/develop/lib;;
--cmakedir) echo $SCRATCH/haiku-/cross/sysroot/boot/system/develop/lib/cmake/llvm;;
--cppflags) echo -I$SCRATCH/haiku-/cross/sysroot/boot/system/develop/headers \
-D__STDC_CONSTANT_MACROS -D__STDC_FORMAT_MACROS -D__STDC_LIMIT_MACROS;;
--cflags) echo -I$SCRATCH/haiku-cross/sysroot/boot/system/develop/headers \
-fPIC -Wall -W -Wno-unused-parameter -Wwrite-strings \
-Wno-missing-field-initializers -pedantic -Wno-long-long -Wno-comment \
-Werror=date-time -ffunction-sections -fdata-sections -O3 -DNDEBUG \
-D__STDC_CONSTANT_MACROS -D__STDC_FORMAT_MACROS -D__STDC_LIMIT_MACROS;;
--cxxflags) echo -I/$SCRATCH/haiku-cross/sysroot/boot/system/develop/headers \
-fPIC -fvisibility-inlines-hidden -Wall -W -Wno-unused-parameter \
-Wwrite-strings -Wcast-qual -Wno-missing-field-initializers -pedantic \
-Wno-long-long -Wno-maybe-uninitialized -Wdelete-non-virtual-dtor \
-Wno-comment -Werror=date-time -std=c++11 -ffunction-sections \
-fdata-sections -O3 -DNDEBUG -fno-exceptions \
-D__STDC_CONSTANT_MACROS -D__STDC_FORMAT_MACROS -D__STDC_LIMIT_MACROS;;
--ldflags) echo -L$SCRATCH/haiku-cross/sysroot/boot/system/develop/lib ;;
--system-libs) echo ;;
--libs) echo -lLLVM-4.0;;
--libfiles) echo $SCRATCH/haiku-cross/sysroot/boot/system/develop/lib/libLLVM-4.0.so;;
--components) echo aarch64 aarch64asmparser aarch64asmprinter aarch64codegen \
aarch64desc aarch64disassembler aarch64info aarch64utils all \
all-targets amdgpu amdgpuasmparser amdgpuasmprinter amdgpucodegen \
amdgpudesc amdgpudisassembler amdgpuinfo amdgpuutils analysis arm \
armasmparser armasmprinter armcodegen armdesc armdisassembler \
arminfo asmparser asmprinter bitreader bitwriter bpf bpfasmprinter \
bpfcodegen bpfdesc bpfdisassembler bpfinfo codegen core coroutines \
coverage debuginfocodeview debuginfodwarf debuginfomsf debuginfopdb \
demangle engine executionengine globalisel hexagon hexagonasmparser \
hexagoncodegen hexagondesc hexagondisassembler hexagoninfo \
instcombine instrumentation interpreter ipo irreader lanai \
lanaiasmparser lanaicodegen lanaidesc lanaidisassembler lanaiinfo \
lanaiinstprinter libdriver lineeditor linker lto mc mcdisassembler \
mcjit mcparser mips mipsasmparser mipsasmprinter mipscodegen \
mipsdesc mipsdisassembler mipsinfo mirparser msp430 msp430asmprinter \
msp430codegen msp430desc msp430info native nativecodegen nvptx \
nvptxasmprinter nvptxcodegen nvptxdesc nvptxinfo objcarcopts object \
objectyaml option orcjit passes powerpc powerpcasmparser \
powerpcasmprinter powerpccodegen powerpcdesc powerpcdisassembler \
powerpcinfo profiledata riscv riscvcodegen riscvdesc riscvinfo \
runtimedyld scalaropts selectiondag sparc sparcasmparser \
sparcasmprinter sparccodegen sparcdesc sparcdisassembler sparcinfo \
support symbolize systemz systemzasmparser systemzasmprinter \
systemzcodegen systemzdesc systemzdisassembler systemzinfo tablegen \
target transformutils vectorize x86 x86asmparser x86asmprinter \
x86codegen x86desc x86disassembler x86info x86utils xcore \
xcoreasmprinter xcorecodegen xcoredesc xcoredisassembler xcoreinfo;;
--host-target) echo x86_64-unknown-haiku;;
--has-rtti) echo YES;;
--shared-mode) echo shared;;
esac

View File

@ -13,7 +13,7 @@ set -ex
source shared.sh
VERSION=1.0.2k
URL=https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/openssl-$VERSION.tar.gz
URL=https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/openssl-$VERSION.tar.gz
curl $URL | tar xzf -

View File

@ -13,7 +13,7 @@ set -ex
source shared.sh
VERSION=1.0.2k
URL=https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/openssl-$VERSION.tar.gz
URL=https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/openssl-$VERSION.tar.gz
curl $URL | tar xzf -

View File

@ -35,7 +35,7 @@ cd netbsd
mkdir -p /x-tools/x86_64-unknown-netbsd/sysroot
URL=https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
URL=https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
# Originally from ftp://ftp.netbsd.org/pub/NetBSD/NetBSD-$BSD/source/sets/*.tgz
curl $URL/2017-03-17-netbsd-src.tgz | tar xzf -

View File

@ -8,9 +8,11 @@
# option. This file may not be copied, modified, or distributed
# except according to those terms.
# ignore-tidy-linelength
set -ex
curl -fo /usr/local/bin/sccache \
https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl
https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl
chmod +x /usr/local/bin/sccache

View File

@ -1,106 +0,0 @@
# `compiler_fences`
The tracking issue for this feature is: [#41091]
[#41091]: https://github.com/rust-lang/rust/issues/41091
------------------------
The `compiler_fences` feature exposes the `compiler_fence` function
in `std::sync::atomic`. This function is conceptually similar to C++'s
`atomic_signal_fence`, which can currently only be accessed in nightly
Rust using the `atomic_singlethreadfence_*` instrinsic functions in
`core`, or through the mostly equivalent literal assembly:
```rust
#![feature(asm)]
unsafe { asm!("" ::: "memory" : "volatile") };
```
A `compiler_fence` restricts the kinds of memory re-ordering the
compiler is allowed to do. Specifically, depending on the given ordering
semantics, the compiler may be disallowed from moving reads or writes
from before or after the call to the other side of the call to
`compiler_fence`. Note that it does **not** prevent the *hardware*
from doing such re-ordering. This is not a problem in a single-threaded,
execution context, but when other threads may modify memory at the same
time, stronger synchronization primitives are required.
## Examples
`compiler_fence` is generally only useful for preventing a thread from
racing *with itself*. That is, if a given thread is executing one piece
of code, and is then interrupted, and starts executing code elsewhere
(while still in the same thread, and conceptually still on the same
core). In traditional programs, this can only occur when a signal
handler is registered. In more low-level code, such situations can also
arise when handling interrupts, when implementing green threads with
pre-emption, etc.
To give a straightforward example of when a `compiler_fence` is
necessary, consider the following example:
```rust
# use std::sync::atomic::{AtomicBool, AtomicUsize};
# use std::sync::atomic::{ATOMIC_BOOL_INIT, ATOMIC_USIZE_INIT};
# use std::sync::atomic::Ordering;
static IMPORTANT_VARIABLE: AtomicUsize = ATOMIC_USIZE_INIT;
static IS_READY: AtomicBool = ATOMIC_BOOL_INIT;
fn main() {
IMPORTANT_VARIABLE.store(42, Ordering::Relaxed);
IS_READY.store(true, Ordering::Relaxed);
}
fn signal_handler() {
if IS_READY.load(Ordering::Relaxed) {
assert_eq!(IMPORTANT_VARIABLE.load(Ordering::Relaxed), 42);
}
}
```
The way it is currently written, the `assert_eq!` is *not* guaranteed to
succeed, despite everything happening in a single thread. To see why,
remember that the compiler is free to swap the stores to
`IMPORTANT_VARIABLE` and `IS_READ` since they are both
`Ordering::Relaxed`. If it does, and the signal handler is invoked right
after `IS_READY` is updated, then the signal handler will see
`IS_READY=1`, but `IMPORTANT_VARIABLE=0`.
Using a `compiler_fence`, we can remedy this situation:
```rust
#![feature(compiler_fences)]
# use std::sync::atomic::{AtomicBool, AtomicUsize};
# use std::sync::atomic::{ATOMIC_BOOL_INIT, ATOMIC_USIZE_INIT};
# use std::sync::atomic::Ordering;
use std::sync::atomic::compiler_fence;
static IMPORTANT_VARIABLE: AtomicUsize = ATOMIC_USIZE_INIT;
static IS_READY: AtomicBool = ATOMIC_BOOL_INIT;
fn main() {
IMPORTANT_VARIABLE.store(42, Ordering::Relaxed);
// prevent earlier writes from being moved beyond this point
compiler_fence(Ordering::Release);
IS_READY.store(true, Ordering::Relaxed);
}
fn signal_handler() {
if IS_READY.load(Ordering::Relaxed) {
assert_eq!(IMPORTANT_VARIABLE.load(Ordering::Relaxed), 42);
}
}
```
A deeper discussion of compiler barriers with various re-ordering
semantics (such as `Ordering::SeqCst`) is beyond the scope of this text.
Curious readers are encouraged to read the Linux kernel's discussion of
[memory barriers][1], the C++ references on [`std::memory_order`][2] and
[`atomic_signal_fence`][3], and [this StackOverflow answer][4] for
further details.
[1]: https://www.kernel.org/doc/Documentation/memory-barriers.txt
[2]: http://en.cppreference.com/w/cpp/atomic/memory_order
[3]: http://www.cplusplus.com/reference/atomic/atomic_signal_fence/
[4]: http://stackoverflow.com/a/18454971/472927

View File

@ -1,17 +0,0 @@
# `iterator_for_each`
The tracking issue for this feature is: [#42986]
[#42986]: https://github.com/rust-lang/rust/issues/42986
------------------------
To call a closure on each element of an iterator, you can use `for_each`:
```rust
#![feature(iterator_for_each)]
fn main() {
(0..10).for_each(|i| println!("{}", i));
}
```

View File

@ -22,7 +22,7 @@ use core::borrow;
use core::fmt;
use core::cmp::Ordering;
use core::intrinsics::abort;
use core::mem::{self, size_of_val, uninitialized};
use core::mem::{self, align_of_val, size_of_val, uninitialized};
use core::ops::Deref;
use core::ops::CoerceUnsized;
use core::ptr::{self, Shared};
@ -324,7 +324,9 @@ impl<T> Arc<T> {
Ok(elem)
}
}
}
impl<T: ?Sized> Arc<T> {
/// Consumes the `Arc`, returning the wrapped pointer.
///
/// To avoid a memory leak the pointer must be converted back to an `Arc` using
@ -378,16 +380,21 @@ impl<T> Arc<T> {
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub unsafe fn from_raw(ptr: *const T) -> Self {
// To find the corresponding pointer to the `ArcInner` we need to subtract the offset of the
// `data` field from the pointer.
let ptr = (ptr as *const u8).offset(-offset_of!(ArcInner<T>, data));
// Align the unsized value to the end of the ArcInner.
// Because it is ?Sized, it will always be the last field in memory.
let align = align_of_val(&*ptr);
let layout = Layout::new::<ArcInner<()>>();
let offset = (layout.size() + layout.padding_needed_for(align)) as isize;
// Reverse the offset to find the original ArcInner.
let fake_ptr = ptr as *mut ArcInner<T>;
let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
Arc {
ptr: Shared::new_unchecked(ptr as *mut u8 as *mut _),
ptr: Shared::new_unchecked(arc_ptr),
}
}
}
impl<T: ?Sized> Arc<T> {
/// Creates a new [`Weak`][weak] pointer to this value.
///
/// [weak]: struct.Weak.html
@ -1491,6 +1498,28 @@ mod tests {
}
}
#[test]
fn test_into_from_raw_unsized() {
use std::fmt::Display;
use std::string::ToString;
let arc: Arc<str> = Arc::from("foo");
let ptr = Arc::into_raw(arc.clone());
let arc2 = unsafe { Arc::from_raw(ptr) };
assert_eq!(unsafe { &*ptr }, "foo");
assert_eq!(arc, arc2);
let arc: Arc<Display> = Arc::new(123);
let ptr = Arc::into_raw(arc.clone());
let arc2 = unsafe { Arc::from_raw(ptr) };
assert_eq!(unsafe { &*ptr }.to_string(), "123");
assert_eq!(arc2.to_string(), "123");
}
#[test]
fn test_cowarc_clone_make_mut() {
let mut cow0 = Arc::new(75);

View File

@ -105,22 +105,3 @@ macro_rules! vec {
macro_rules! format {
($($arg:tt)*) => ($crate::fmt::format(format_args!($($arg)*)))
}
// Private macro to get the offset of a struct field in bytes from the address of the struct.
macro_rules! offset_of {
($container:path, $field:ident) => {{
// Make sure the field actually exists. This line ensures that a compile-time error is
// generated if $field is accessed through a Deref impl.
let $container { $field : _, .. };
// Create an (invalid) instance of the container and calculate the offset to its
// field. Using a null pointer might be UB if `&(*(0 as *const T)).field` is interpreted to
// be nullptr deref.
let invalid: $container = ::core::mem::uninitialized();
let offset = &invalid.$field as *const _ as usize - &invalid as *const _ as usize;
// Do not run destructors on the made up invalid instance.
::core::mem::forget(invalid);
offset as isize
}};
}

View File

@ -253,7 +253,7 @@ use core::hash::{Hash, Hasher};
use core::intrinsics::abort;
use core::marker;
use core::marker::Unsize;
use core::mem::{self, forget, size_of_val, uninitialized};
use core::mem::{self, align_of_val, forget, size_of_val, uninitialized};
use core::ops::Deref;
use core::ops::CoerceUnsized;
use core::ptr::{self, Shared};
@ -359,7 +359,9 @@ impl<T> Rc<T> {
Err(this)
}
}
}
impl<T: ?Sized> Rc<T> {
/// Consumes the `Rc`, returning the wrapped pointer.
///
/// To avoid a memory leak the pointer must be converted back to an `Rc` using
@ -413,17 +415,21 @@ impl<T> Rc<T> {
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub unsafe fn from_raw(ptr: *const T) -> Self {
// To find the corresponding pointer to the `RcBox` we need to subtract the offset of the
// `value` field from the pointer.
// Align the unsized value to the end of the RcBox.
// Because it is ?Sized, it will always be the last field in memory.
let align = align_of_val(&*ptr);
let layout = Layout::new::<RcBox<()>>();
let offset = (layout.size() + layout.padding_needed_for(align)) as isize;
// Reverse the offset to find the original RcBox.
let fake_ptr = ptr as *mut RcBox<T>;
let rc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
let ptr = (ptr as *const u8).offset(-offset_of!(RcBox<T>, value));
Rc {
ptr: Shared::new_unchecked(ptr as *mut u8 as *mut _)
ptr: Shared::new_unchecked(rc_ptr),
}
}
}
impl<T: ?Sized> Rc<T> {
/// Creates a new [`Weak`][weak] pointer to this value.
///
/// [weak]: struct.Weak.html
@ -1522,6 +1528,28 @@ mod tests {
}
}
#[test]
fn test_into_from_raw_unsized() {
use std::fmt::Display;
use std::string::ToString;
let rc: Rc<str> = Rc::from("foo");
let ptr = Rc::into_raw(rc.clone());
let rc2 = unsafe { Rc::from_raw(ptr) };
assert_eq!(unsafe { &*ptr }, "foo");
assert_eq!(rc, rc2);
let rc: Rc<Display> = Rc::new(123);
let ptr = Rc::into_raw(rc.clone());
let rc2 = unsafe { Rc::from_raw(ptr) };
assert_eq!(unsafe { &*ptr }.to_string(), "123");
assert_eq!(rc2.to_string(), "123");
}
#[test]
fn get_mut() {
let mut x = Rc::new(3);

View File

@ -28,7 +28,6 @@
#![feature(core_intrinsics)]
#![feature(dropck_eyepatch)]
#![feature(generic_param_attrs)]
#![feature(needs_drop)]
#![cfg_attr(test, feature(test))]
#![allow(deprecated)]

@ -1 +1 @@
Subproject commit 38ffaf97aa418cc369ca0197a72a0b927cc0f622
Subproject commit ef4951582f620c589cd9e18ec182538bf116bce3

View File

@ -498,8 +498,6 @@ pub trait Iterator {
/// Basic usage:
///
/// ```
/// #![feature(iterator_for_each)]
///
/// use std::sync::mpsc::channel;
///
/// let (tx, rx) = channel();
@ -514,15 +512,13 @@ pub trait Iterator {
/// might be preferable to keep a functional style with longer iterators:
///
/// ```
/// #![feature(iterator_for_each)]
///
/// (0..5).flat_map(|x| x * 100 .. x * 110)
/// .enumerate()
/// .filter(|&(i, x)| (i + x) % 3 == 0)
/// .for_each(|(i, x)| println!("{}:{}", i, x));
/// ```
#[inline]
#[unstable(feature = "iterator_for_each", issue = "42986")]
#[stable(feature = "iterator_for_each", since = "1.22.0")]
fn for_each<F>(self, mut f: F) where
Self: Sized, F: FnMut(Self::Item),
{

View File

@ -332,7 +332,6 @@ pub fn align_of_val<T: ?Sized>(val: &T) -> usize {
/// Here's an example of how a collection might make use of needs_drop:
///
/// ```
/// #![feature(needs_drop)]
/// use std::{mem, ptr};
///
/// pub struct MyCollection<T> {
@ -359,7 +358,7 @@ pub fn align_of_val<T: ?Sized>(val: &T) -> usize {
/// }
/// ```
#[inline]
#[unstable(feature = "needs_drop", issue = "41890")]
#[stable(feature = "needs_drop", since = "1.22.0")]
pub fn needs_drop<T>() -> bool {
unsafe { intrinsics::needs_drop::<T>() }
}

View File

@ -1679,10 +1679,14 @@ pub fn fence(order: Ordering) {
/// A compiler memory fence.
///
/// `compiler_fence` does not emit any machine code, but prevents the compiler from re-ordering
/// memory operations across this point. Which reorderings are disallowed is dictated by the given
/// [`Ordering`]. Note that `compiler_fence` does *not* introduce inter-thread memory
/// synchronization; for that, a [`fence`] is needed.
/// `compiler_fence` does not emit any machine code, but restricts the kinds
/// of memory re-ordering the compiler is allowed to do. Specifically, depending on
/// the given [`Ordering`] semantics, the compiler may be disallowed from moving reads
/// or writes from before or after the call to the other side of the call to
/// `compiler_fence`. Note that it does **not** prevent the *hardware*
/// from doing such re-ordering. This is not a problem in a single-threaded,
/// execution context, but when other threads may modify memory at the same
/// time, stronger synchronization primitives such as [`fence`] are required.
///
/// The re-ordering prevented by the different ordering semantics are:
///
@ -1691,10 +1695,54 @@ pub fn fence(order: Ordering) {
/// - with [`Acquire`], subsequent reads and writes cannot be moved ahead of preceding reads.
/// - with [`AcqRel`], both of the above rules are enforced.
///
/// `compiler_fence` is generally only useful for preventing a thread from
/// racing *with itself*. That is, if a given thread is executing one piece
/// of code, and is then interrupted, and starts executing code elsewhere
/// (while still in the same thread, and conceptually still on the same
/// core). In traditional programs, this can only occur when a signal
/// handler is registered. In more low-level code, such situations can also
/// arise when handling interrupts, when implementing green threads with
/// pre-emption, etc. Curious readers are encouraged to read the Linux kernel's
/// discussion of [memory barriers].
///
/// # Panics
///
/// Panics if `order` is [`Relaxed`].
///
/// # Examples
///
/// Without `compiler_fence`, the `assert_eq!` in following code
/// is *not* guaranteed to succeed, despite everything happening in a single thread.
/// To see why, remember that the compiler is free to swap the stores to
/// `IMPORTANT_VARIABLE` and `IS_READ` since they are both
/// `Ordering::Relaxed`. If it does, and the signal handler is invoked right
/// after `IS_READY` is updated, then the signal handler will see
/// `IS_READY=1`, but `IMPORTANT_VARIABLE=0`.
/// Using a `compiler_fence` remedies this situation.
///
/// ```
/// use std::sync::atomic::{AtomicBool, AtomicUsize};
/// use std::sync::atomic::{ATOMIC_BOOL_INIT, ATOMIC_USIZE_INIT};
/// use std::sync::atomic::Ordering;
/// use std::sync::atomic::compiler_fence;
///
/// static IMPORTANT_VARIABLE: AtomicUsize = ATOMIC_USIZE_INIT;
/// static IS_READY: AtomicBool = ATOMIC_BOOL_INIT;
///
/// fn main() {
/// IMPORTANT_VARIABLE.store(42, Ordering::Relaxed);
/// // prevent earlier writes from being moved beyond this point
/// compiler_fence(Ordering::Release);
/// IS_READY.store(true, Ordering::Relaxed);
/// }
///
/// fn signal_handler() {
/// if IS_READY.load(Ordering::Relaxed) {
/// assert_eq!(IMPORTANT_VARIABLE.load(Ordering::Relaxed), 42);
/// }
/// }
/// ```
///
/// [`fence`]: fn.fence.html
/// [`Ordering`]: enum.Ordering.html
/// [`Acquire`]: enum.Ordering.html#variant.Acquire
@ -1702,8 +1750,9 @@ pub fn fence(order: Ordering) {
/// [`Release`]: enum.Ordering.html#variant.Release
/// [`AcqRel`]: enum.Ordering.html#variant.AcqRel
/// [`Relaxed`]: enum.Ordering.html#variant.Relaxed
/// [memory barriers]: https://www.kernel.org/doc/Documentation/memory-barriers.txt
#[inline]
#[unstable(feature = "compiler_fences", issue = "41091")]
#[stable(feature = "compiler_fences", since = "1.22.0")]
pub fn compiler_fence(order: Ordering) {
unsafe {
match order {

View File

@ -287,6 +287,7 @@ impl<'a, 'gcx, 'tcx> Visitor<'gcx> for FindNestedTypeVisitor<'a, 'gcx, 'tcx> {
found_it: false,
bound_region: self.bound_region,
hir_map: self.hir_map,
depth: self.depth,
};
intravisit::walk_ty(subvisitor, arg); // call walk_ty; as visit_ty is empty,
// this will visit only outermost type
@ -313,6 +314,7 @@ struct TyPathVisitor<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
hir_map: &'a hir::map::Map<'gcx>,
found_it: bool,
bound_region: ty::BoundRegion,
depth: u32,
}
impl<'a, 'gcx, 'tcx> Visitor<'gcx> for TyPathVisitor<'a, 'gcx, 'tcx> {
@ -321,24 +323,47 @@ impl<'a, 'gcx, 'tcx> Visitor<'gcx> for TyPathVisitor<'a, 'gcx, 'tcx> {
}
fn visit_lifetime(&mut self, lifetime: &hir::Lifetime) {
let br_index = match self.bound_region {
ty::BrAnon(index) => index,
_ => return,
};
let hir_id = self.infcx.tcx.hir.node_to_hir_id(lifetime.id);
match self.infcx.tcx.named_region(hir_id) {
match (self.infcx.tcx.named_region(hir_id), self.bound_region) {
// the lifetime of the TyPath!
Some(rl::Region::LateBoundAnon(debruijn_index, anon_index)) => {
if debruijn_index.depth == 1 && anon_index == br_index {
(Some(rl::Region::LateBoundAnon(debruijn_index, anon_index)), ty::BrAnon(br_index)) => {
if debruijn_index.depth == self.depth && anon_index == br_index {
self.found_it = true;
return;
}
}
Some(rl::Region::Static) |
Some(rl::Region::EarlyBound(_, _)) |
Some(rl::Region::LateBound(_, _)) |
Some(rl::Region::Free(_, _)) |
None => {
(Some(rl::Region::EarlyBound(_, id)), ty::BrNamed(def_id, _)) => {
debug!("EarlyBound self.infcx.tcx.hir.local_def_id(id)={:?} \
def_id={:?}",
self.infcx.tcx.hir.local_def_id(id),
def_id);
if self.infcx.tcx.hir.local_def_id(id) == def_id {
self.found_it = true;
return; // we can stop visiting now
}
}
(Some(rl::Region::LateBound(debruijn_index, id)), ty::BrNamed(def_id, _)) => {
debug!("FindNestedTypeVisitor::visit_ty: LateBound depth = {:?}",
debruijn_index.depth);
debug!("self.infcx.tcx.hir.local_def_id(id)={:?}",
self.infcx.tcx.hir.local_def_id(id));
debug!("def_id={:?}", def_id);
if debruijn_index.depth == self.depth &&
self.infcx.tcx.hir.local_def_id(id) == def_id {
self.found_it = true;
return; // we can stop visiting now
}
}
(Some(rl::Region::Static), _) |
(Some(rl::Region::EarlyBound(_, _)), _) |
(Some(rl::Region::LateBound(_, _)), _) |
(Some(rl::Region::LateBoundAnon(_, _)), _) |
(Some(rl::Region::Free(_, _)), _) |
(None, _) => {
debug!("no arg found");
}
}

View File

@ -16,8 +16,7 @@ use std;
use llvm;
use llvm::{ValueRef};
use rustc::traits;
use rustc::ty::{self, Ty, TypeFoldable};
use rustc::ty::{self, Ty};
use rustc::ty::layout::LayoutTyper;
use common::*;
use meth;
@ -25,42 +24,6 @@ use monomorphize;
use value::Value;
use builder::Builder;
pub fn needs_drop_glue<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, t: Ty<'tcx>) -> bool {
assert!(t.is_normalized_for_trans());
let t = scx.tcx().erase_regions(&t);
// FIXME (#22815): note that type_needs_drop conservatively
// approximates in some cases and may say a type expression
// requires drop glue when it actually does not.
//
// (In this case it is not clear whether any harm is done, i.e.
// erroneously returning `true` in some cases where we could have
// returned `false` does not appear unsound. The impact on
// code quality is unknown at this time.)
if !scx.type_needs_drop(t) {
return false;
}
match t.sty {
ty::TyAdt(def, _) if def.is_box() => {
let typ = t.boxed_ty();
if !scx.type_needs_drop(typ) && scx.type_is_sized(typ) {
let layout = t.layout(scx.tcx(), ty::ParamEnv::empty(traits::Reveal::All)).unwrap();
if layout.size(scx).bytes() == 0 {
// `Box<ZeroSizeType>` does not allocate.
false
} else {
true
}
} else {
true
}
}
_ => true
}
}
pub fn size_and_align_of_dst<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, t: Ty<'tcx>, info: ValueRef)
-> (ValueRef, ValueRef) {
debug!("calculate size of DST: {}; with lost info: {:?}",

View File

@ -10,7 +10,6 @@
use abi::Abi;
use common::*;
use glue;
use rustc::hir::def_id::DefId;
use rustc::middle::lang_items::DropInPlaceFnLangItem;
@ -189,7 +188,7 @@ pub fn resolve<'a, 'tcx>(
_ => {
if Some(def_id) == scx.tcx().lang_items().drop_in_place_fn() {
let ty = substs.type_at(0);
if glue::needs_drop_glue(scx, ty) {
if scx.type_needs_drop(ty) {
debug!(" => nontrivial drop glue");
ty::InstanceDef::DropGlue(def_id, Some(ty))
} else {

View File

@ -473,7 +473,7 @@ impl hir::print::PpAnn for InlinedConst {
}
}
fn print_inlined_const(cx: &DocContext, did: DefId) -> String {
pub fn print_inlined_const(cx: &DocContext, did: DefId) -> String {
let body = cx.tcx.extern_const_body(did);
let inlined = InlinedConst {
nested_bodies: cx.tcx.item_body_nested_bodies(did)

View File

@ -1793,6 +1793,12 @@ impl Clean<Type> for hir::Ty {
let n = cx.tcx.const_eval(param_env.and((def_id, substs))).unwrap();
let n = if let ConstVal::Integral(ConstInt::Usize(n)) = n.val {
n.to_string()
} else if let ConstVal::Unevaluated(def_id, _) = n.val {
if let Some(node_id) = cx.tcx.hir.as_local_node_id(def_id) {
print_const_expr(cx, cx.tcx.hir.body_owned_by(node_id))
} else {
inline::print_inlined_const(cx, def_id)
}
} else {
format!("{:?}", n)
};
@ -1909,6 +1915,12 @@ impl<'tcx> Clean<Type> for Ty<'tcx> {
ty::TyArray(ty, n) => {
let n = if let ConstVal::Integral(ConstInt::Usize(n)) = n.val {
n.to_string()
} else if let ConstVal::Unevaluated(def_id, _) = n.val {
if let Some(node_id) = cx.tcx.hir.as_local_node_id(def_id) {
print_const_expr(cx, cx.tcx.hir.body_owned_by(node_id))
} else {
inline::print_inlined_const(cx, def_id)
}
} else {
format!("{:?}", n)
};

View File

@ -34,12 +34,18 @@ use syntax_pos::Span;
/// Highlights `src`, returning the HTML output.
pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>,
extension: Option<&str>) -> String {
extension: Option<&str>,
tooltip: Option<(&str, &str)>) -> String {
debug!("highlighting: ================\n{}\n==============", src);
let sess = parse::ParseSess::new(FilePathMapping::empty());
let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
let mut out = Vec::new();
if let Some((tooltip, class)) = tooltip {
write!(out, "<div class='information'><div class='tooltip {}'>⚠<span \
class='tooltiptext'>{}</span></div></div>",
class, tooltip).unwrap();
}
write_header(class, id, &mut out).unwrap();
let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm), sess.codemap());

View File

@ -160,10 +160,15 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'a, I> {
fn next(&mut self) -> Option<Self::Item> {
let event = self.inner.next();
let compile_fail;
let ignore;
if let Some(Event::Start(Tag::CodeBlock(lang))) = event {
if !LangString::parse(&lang).rust {
let parse_result = LangString::parse(&lang);
if !parse_result.rust {
return Some(Event::Start(Tag::CodeBlock(lang)));
}
compile_fail = parse_result.compile_fail;
ignore = parse_result.ignore;
} else {
return event;
}
@ -222,11 +227,22 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'a, I> {
url, test_escaped, channel
))
});
let tooltip = if ignore {
Some(("Be careful when using this code, it's not being tested!", "ignore"))
} else if compile_fail {
Some(("This code doesn't compile so be extra careful!", "compile_fail"))
} else {
None
};
s.push_str(&highlight::render_with_highlighting(
&text,
Some("rust-example-rendered"),
Some(&format!("rust-example-rendered{}",
if ignore { " ignore" }
else if compile_fail { " compile_fail" }
else { "" })),
None,
playground_button.as_ref().map(String::as_str)));
playground_button.as_ref().map(String::as_str),
tooltip));
Some(Event::Html(s.into()))
})
}
@ -556,12 +572,18 @@ pub fn render(w: &mut fmt::Formatter,
let origtext = str::from_utf8(text).unwrap();
let origtext = origtext.trim_left();
debug!("docblock: ==============\n{:?}\n=======", text);
let mut compile_fail = false;
let mut ignore = false;
let rendered = if lang.is_null() || origtext.is_empty() {
false
} else {
let rlang = (*lang).as_bytes();
let rlang = str::from_utf8(rlang).unwrap();
if !LangString::parse(rlang).rust {
let parse_result = LangString::parse(rlang);
compile_fail = parse_result.compile_fail;
ignore = parse_result.ignore;
if !parse_result.rust {
(my_opaque.dfltblk)(ob, orig_text, lang,
opaque as *const hoedown_renderer_data,
line);
@ -616,11 +638,22 @@ pub fn render(w: &mut fmt::Formatter,
url, test_escaped, channel
))
});
let tooltip = if ignore {
Some(("Be careful when using this code, it's not being tested!", "ignore"))
} else if compile_fail {
Some(("This code doesn't compile so be extra careful!", "compile_fail"))
} else {
None
};
s.push_str(&highlight::render_with_highlighting(
&text,
Some("rust-example-rendered"),
Some(&format!("rust-example-rendered{}",
if ignore { " ignore" }
else if compile_fail { " compile_fail" }
else { "" })),
None,
playground_button.as_ref().map(String::as_str)));
playground_button.as_ref().map(String::as_str),
tooltip));
hoedown_buffer_put(ob, s.as_ptr(), s.len());
})
}

View File

@ -1819,6 +1819,7 @@ fn render_assoc_const_value(item: &clean::Item) -> String {
None,
None,
None,
None,
)
}
_ => String::new(),
@ -3647,7 +3648,8 @@ impl<'a> fmt::Display for Source<'a> {
write!(fmt, "<span id=\"{0}\">{0:1$}</span>\n", i, cols)?;
}
write!(fmt, "</pre>")?;
write!(fmt, "{}", highlight::render_with_highlighting(s, None, None, None))?;
write!(fmt, "{}",
highlight::render_with_highlighting(s, None, None, None, None))?;
Ok(())
}
}
@ -3657,6 +3659,7 @@ fn item_macro(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
w.write_str(&highlight::render_with_highlighting(&t.source,
Some("macro"),
None,
None,
None))?;
document(w, cx, it)
}

View File

@ -1296,6 +1296,24 @@
collapseDocs(i_e.previousSibling.childNodes[0]);
});
});
onEach(document.getElementsByClassName('rust-example-rendered'), function(e) {
if (hasClass(e, 'compile_fail')) {
e.addEventListener("mouseover", function(event) {
e.previousElementSibling.childNodes[0].style.color = '#f00';
});
e.addEventListener("mouseout", function(event) {
e.previousElementSibling.childNodes[0].style.color = '';
});
} else if (hasClass(e, 'ignore')) {
e.addEventListener("mouseover", function(event) {
e.previousElementSibling.childNodes[0].style.color = '#ff9200';
});
e.addEventListener("mouseout", function(event) {
e.previousElementSibling.childNodes[0].style.color = '';
});
}
});
}());
// Sets the focus on the search bar at the top of the page

View File

@ -612,7 +612,6 @@ pre.rust .question-mark {
font-weight: bold;
}
pre.rust { position: relative; }
a.test-arrow {
display: inline-block;
position: absolute;
@ -813,3 +812,44 @@ span.since {
display: none;
}
}
.information {
position: absolute;
left: -1px;
margin-top: 7px;
}
.tooltip {
position: relative;
display: inline-block;
cursor: pointer;
}
.tooltip .tooltiptext {
width: 120px;
display: none;
background-color: black;
color: #fff;
text-align: center;
padding: 5px 3px;
border-radius: 6px;
margin-left: 5px;
top: -5px;
left: 105%;
z-index: 1;
}
.tooltip:hover .tooltiptext {
display: inline;
}
.tooltip .tooltiptext::after {
content: " ";
position: absolute;
top: 50%;
left: 11px;
margin-top: -5px;
border-width: 5px;
border-style: solid;
border-color: transparent black transparent transparent;
}

View File

@ -202,4 +202,36 @@ a.test-arrow:hover{
:target > code {
background: #FDFFD3;
}
}
pre.compile_fail {
border-left: 2px solid rgba(255,0,0,.4);
}
pre.compile_fail:hover, .information:hover + pre.compile_fail {
border-left: 2px solid #f00;
}
pre.ignore {
border-left: 2px solid rgba(255,142,0,.4);
}
pre.ignore:hover, .information:hover + pre.ignore {
border-left: 2px solid #ff9200;
}
.tooltip.compile_fail {
color: rgba(255,0,0,.3);
}
.information > .compile_fail:hover {
color: #f00;
}
.tooltip.ignore {
color: rgba(255,142,0,.3);
}
.information > .ignore:hover {
color: rgba(255,142,0,1);
}

View File

@ -276,7 +276,6 @@
#![feature(macro_reexport)]
#![feature(macro_vis_matcher)]
#![feature(needs_panic_runtime)]
#![feature(needs_drop)]
#![feature(never_type)]
#![feature(num_bits_bytes)]
#![feature(old_wrapping)]

View File

@ -1570,10 +1570,13 @@ mod tests {
#[test]
fn connect_timeout_unroutable() {
// this IP is unroutable, so connections should always time out.
// this IP is unroutable, so connections should always time out,
// provided the network is reachable to begin with.
let addr = "10.255.255.1:80".parse().unwrap();
let e = TcpStream::connect_timeout(&addr, Duration::from_millis(250)).unwrap_err();
assert_eq!(e.kind(), io::ErrorKind::TimedOut);
assert!(e.kind() == io::ErrorKind::TimedOut ||
e.kind() == io::ErrorKind::Other,
"bad error: {} {:?}", e, e.kind());
}
#[test]

View File

@ -98,7 +98,7 @@ pub enum TryLockError<T> {
///
/// [`Ok`]: ../../std/result/enum.Result.html#variant.Ok
/// [`Err`]: ../../std/result/enum.Result.html#variant.Err
/// [`into_inner`]: ../../std/sync/struct.Mutex.html#method.into_inner
/// [`into_inner`]: ../../std/sync/struct.PoisonError.html#method.into_inner
#[stable(feature = "rust1", since = "1.0.0")]
pub type LockResult<Guard> = Result<Guard, PoisonError<Guard>>;

View File

@ -792,7 +792,7 @@ impl<'a> ExtCtxt<'a> {
pub fn span_bug(&self, sp: Span, msg: &str) -> ! {
self.parse_sess.span_diagnostic.span_bug(sp, msg);
}
pub fn trace_macros_diag(&self) {
pub fn trace_macros_diag(&mut self) {
for (sp, notes) in self.expansions.iter() {
let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, "trace_macro");
for note in notes {
@ -800,6 +800,8 @@ impl<'a> ExtCtxt<'a> {
}
db.emit();
}
// Fixme: does this result in errors?
self.expansions.clear();
}
pub fn bug(&self, msg: &str) -> ! {
self.parse_sess.span_diagnostic.bug(msg);

View File

@ -384,13 +384,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
if self.cx.current_expansion.depth > self.cx.ecfg.recursion_limit {
let info = self.cx.current_expansion.mark.expn_info().unwrap();
let suggested_limit = self.cx.ecfg.recursion_limit * 2;
let mut err = self.cx.struct_span_fatal(info.call_site,
let mut err = self.cx.struct_span_err(info.call_site,
&format!("recursion limit reached while expanding the macro `{}`",
info.callee.name()));
err.help(&format!(
"consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate",
suggested_limit));
err.emit();
self.cx.trace_macros_diag();
panic!(FatalError);
}
@ -439,11 +440,13 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
}
ProcMacroDerive(..) | BuiltinDerive(..) => {
self.cx.span_err(attr.span, &format!("`{}` is a derive mode", attr.path));
self.cx.trace_macros_diag();
kind.dummy(attr.span)
}
_ => {
let msg = &format!("macro `{}` may not be used in attributes", attr.path);
self.cx.span_err(attr.span, msg);
self.cx.trace_macros_diag();
kind.dummy(attr.span)
}
}
@ -482,6 +485,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
if let Err(msg) = validate_and_set_expn_info(def_span.map(|(_, s)| s),
false, false) {
self.cx.span_err(path.span, &msg);
self.cx.trace_macros_diag();
return kind.dummy(span);
}
kind.make_from(expand.expand(self.cx, span, mac.node.stream()))
@ -497,6 +501,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
allow_internal_unstable,
allow_internal_unsafe) {
self.cx.span_err(path.span, &msg);
self.cx.trace_macros_diag();
return kind.dummy(span);
}
kind.make_from(expander.expand(self.cx, span, mac.node.stream()))
@ -506,6 +511,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
if ident.name == keywords::Invalid.name() {
self.cx.span_err(path.span,
&format!("macro {}! expects an ident argument", path));
self.cx.trace_macros_diag();
return kind.dummy(span);
};
@ -526,11 +532,13 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
MultiDecorator(..) | MultiModifier(..) | AttrProcMacro(..) => {
self.cx.span_err(path.span,
&format!("`{}` can only be used in attributes", path));
self.cx.trace_macros_diag();
return kind.dummy(span);
}
ProcMacroDerive(..) | BuiltinDerive(..) => {
self.cx.span_err(path.span, &format!("`{}` is a derive mode", path));
self.cx.trace_macros_diag();
return kind.dummy(span);
}
@ -539,6 +547,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
let msg =
format!("macro {}! expects no ident argument, given '{}'", path, ident);
self.cx.span_err(path.span, &msg);
self.cx.trace_macros_diag();
return kind.dummy(span);
}
@ -564,6 +573,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
let msg = format!("non-{kind} macro in {kind} position: {name}",
name = path.segments[0].identifier.name, kind = kind.name());
self.cx.span_err(path.span, &msg);
self.cx.trace_macros_diag();
kind.dummy(span)
})
}
@ -617,6 +627,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
_ => {
let msg = &format!("macro `{}` may not be used for derive attributes", attr.path);
self.cx.span_err(span, msg);
self.cx.trace_macros_diag();
kind.dummy(span)
}
}
@ -629,6 +640,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
Ok(expansion) => expansion,
Err(mut err) => {
err.emit();
self.cx.trace_macros_diag();
return kind.dummy(span);
}
};
@ -739,6 +751,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
if !traits.is_empty() &&
(kind == ExpansionKind::TraitItems || kind == ExpansionKind::ImplItems) {
self.cx.span_err(traits[0].span, "`derive` can be only be applied to items");
self.cx.trace_macros_diag();
return kind.expect_from_annotatables(::std::iter::once(item));
}
self.collect(kind, InvocationKind::Attr { attr: attr, traits: traits, item: item })

View File

@ -172,7 +172,9 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
}
let best_fail_msg = parse_failure_msg(best_fail_tok.expect("ran no matchers"));
cx.span_fatal(best_fail_spot.substitute_dummy(sp), &best_fail_msg);
cx.span_err(best_fail_spot.substitute_dummy(sp), &best_fail_msg);
cx.trace_macros_diag();
DummyResult::any(sp)
}
// Note that macro-by-example's input is also matched against a token tree:

View File

@ -0,0 +1,31 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_name = "foo"]
// ignore-tidy-linelength
// @has foo/fn.bar.html '//*[@class="tooltip compile_fail"]/span' "This code doesn't compile so be extra careful!"
// @has foo/fn.bar.html '//*[@class="tooltip ignore"]/span' "Be careful when using this code, it's not being tested!"
/// foo
///
/// ```compile_fail
/// foo();
/// ```
///
/// ```ignore (tidy)
/// goo();
/// ```
///
/// ```
/// let x = 0;
/// ```
pub fn bar() -> usize { 2 }

View File

@ -0,0 +1,21 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Ref<'a> {
x: &'a u32,
}
fn foo<'a, 'b>(mut x: Vec<Ref<'a>>, y: Ref<'b>)
where &'a (): Sized,
&'b u32: Sized
{
x.push(y);
}
fn main() {}

View File

@ -0,0 +1,11 @@
error[E0623]: lifetime mismatch
--> $DIR/ex3-both-anon-regions-both-are-structs-earlybound-regions.rs:18:12
|
14 | fn foo<'a, 'b>(mut x: Vec<Ref<'a>>, y: Ref<'b>)
| ------- ------- these two types are declared with different lifetimes...
...
18 | x.push(y);
| ^ ...but data from `y` flows into `x` here
error: aborting due to previous error

View File

@ -0,0 +1,18 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Ref<'a> {
x: &'a u32,
}
fn foo<'a, 'b>(mut x: Vec<Ref<'a>>, y: Ref<'b>) {
x.push(y);
}
fn main() {}

View File

@ -0,0 +1,10 @@
error[E0623]: lifetime mismatch
--> $DIR/ex3-both-anon-regions-both-are-structs-latebound-regions.rs:15:12
|
14 | fn foo<'a, 'b>(mut x: Vec<Ref<'a>>, y: Ref<'b>) {
| ------- ------- these two types are declared with different lifetimes...
15 | x.push(y);
| ^ ...but data from `y` flows into `x` here
error: aborting due to previous error

View File

@ -4,3 +4,5 @@ error: unexpected end of macro invocation
12 | assert_eq!(1, 1,);
| ^
error: aborting due to previous error

View File

@ -4,3 +4,5 @@ error: unexpected end of macro invocation
12 | assert_ne!(1, 2,);
| ^
error: aborting due to previous error

View File

@ -0,0 +1,55 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -Z trace-macros
#![recursion_limit="4"]
macro_rules! my_faulty_macro {
() => {
my_faulty_macro!(bcd);
};
}
macro_rules! pat_macro {
() => {
pat_macro!(A{a:a, b:0, c:_, ..});
};
($a:pat) => {
$a
};
}
macro_rules! my_recursive_macro {
() => {
my_recursive_macro!();
};
}
macro_rules! my_macro {
() => {
};
}
fn main() {
my_faulty_macro!();
my_recursive_macro!();
test!();
non_exisiting!();
derive!(Debug);
let a = pat_macro!();
}
#[my_macro]
fn use_bang_macro_as_attr(){}
#[derive(Debug)]
fn use_derive_macro_as_attr(){}

View File

@ -0,0 +1,47 @@
error: no rules expected the token `bcd`
--> $DIR/trace_faulty_macros.rs:17:26
|
17 | my_faulty_macro!(bcd);
| ^^^
...
43 | my_faulty_macro!();
| ------------------- in this macro invocation
note: trace_macro
--> $DIR/trace_faulty_macros.rs:43:5
|
43 | my_faulty_macro!();
| ^^^^^^^^^^^^^^^^^^^
|
= note: expanding `my_faulty_macro! { }`
= note: to `my_faulty_macro ! ( bcd ) ;`
= note: expanding `my_faulty_macro! { bcd }`
error: recursion limit reached while expanding the macro `my_recursive_macro`
--> $DIR/trace_faulty_macros.rs:32:9
|
32 | my_recursive_macro!();
| ^^^^^^^^^^^^^^^^^^^^^^
...
44 | my_recursive_macro!();
| ---------------------- in this macro invocation
|
= help: consider adding a `#![recursion_limit="8"]` attribute to your crate
note: trace_macro
--> $DIR/trace_faulty_macros.rs:44:5
|
44 | my_recursive_macro!();
| ^^^^^^^^^^^^^^^^^^^^^^
|
= note: expanding `my_recursive_macro! { }`
= note: to `my_recursive_macro ! ( ) ;`
= note: expanding `my_recursive_macro! { }`
= note: to `my_recursive_macro ! ( ) ;`
= note: expanding `my_recursive_macro! { }`
= note: to `my_recursive_macro ! ( ) ;`
= note: expanding `my_recursive_macro! { }`
= note: to `my_recursive_macro ! ( ) ;`
= note: expanding `my_recursive_macro! { }`
= note: to `my_recursive_macro ! ( ) ;`

@ -1 +1 @@
Subproject commit 33250c48b4763b01478d780e76206484a1d5b207
Subproject commit 8118b02ac5ce49b22e049ff03316d5e1574852cf

View File

@ -517,7 +517,7 @@ pub fn make_test_name(config: &Config, testpaths: &TestPaths) -> test::TestName
//
// run-pass/foo/bar/baz.rs
let path =
PathBuf::from(config.mode.to_string())
PathBuf::from(config.src_base.file_name().unwrap())
.join(&testpaths.relative_dir)
.join(&testpaths.file.file_name().unwrap());
test::DynTestName(format!("[{}] {}", config.mode, path.display()))

View File

@ -2174,8 +2174,6 @@ actual:\n\
}
fn run_ui_test(&self) {
println!("ui: {}", self.testpaths.file.display());
let proc_res = self.compile_test();
let expected_stderr_path = self.expected_output_path("stderr");

@ -1 +1 @@
Subproject commit 52d48656f93eeeb2c568e6c1048e64168e5b209f
Subproject commit 7221e38023c41ff2532ebbf54a7da296fd488b50

1
src/tools/rustfmt Submodule

@ -0,0 +1 @@
Subproject commit a1fd68da464fc51585f351c81fc2b867211c197e

View File

@ -64,6 +64,7 @@ fn filter_dirs(path: &Path) -> bool {
"src/tools/rls",
"src/tools/clippy",
"src/tools/rust-installer",
"src/tools/rustfmt",
];
skip.iter().any(|p| path.ends_with(p))
}