diff --git a/.gitmodules b/.gitmodules index 6244b3c095186..fbebccf408fb6 100644 --- a/.gitmodules +++ b/.gitmodules @@ -33,3 +33,6 @@ [submodule "src/libcompiler_builtins"] path = src/libcompiler_builtins url = https://github.com/rust-lang-nursery/compiler-builtins +[submodule "src/tools/clippy"] + path = src/tools/clippy + url = https://github.com/rust-lang-nursery/rust-clippy.git diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c424ca7ab009e..7441d51055ffb 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -232,7 +232,34 @@ Some common invocations of `x.py` are: guidelines as of yet, but basic rules like 4 spaces for indentation and no more than 99 characters in a single line should be kept in mind when writing code. -- `rustup toolchain link build//` - Use the custom compiler build via [rustup](https://github.com/rust-lang-nursery/rustup.rs#working-with-custom-toolchains-and-local-builds). + +### Using your local build + +If you use Rustup to manage your rust install, it has a feature called ["custom +toolchains"][toolchain-link] that you can use to access your newly-built compiler +without having to install it to your system or user PATH. If you've run `python +x.py build`, then you can add your custom rustc to a new toolchain like this: + +[toolchain-link]: https://github.com/rust-lang-nursery/rustup.rs#working-with-custom-toolchains-and-local-builds + +``` +rustup toolchain link build//stage2 +``` + +Where `` is the build triple for the host (the triple of your +computer, by default), and `` is the name for your custom toolchain. (If you +added `--stage 1` to your build command, the compiler will be in the `stage1` +folder instead.) You'll only need to do this once - it will automatically point +to the latest build you've done. + +Once this is set up, you can use your custom toolchain just like any other. For +example, if you've named your toolchain `local`, running `cargo +local build` will +compile a project with your custom rustc, setting `rustup override set local` will +override the toolchain for your current directory, and `cargo +local doc` will use +your custom rustc and rustdoc to generate docs. (If you do this with a `--stage 1` +build, you'll need to build rustdoc specially, since it's not normally built in +stage 1. `python x.py build --stage 1 src/libstd src/tools/rustdoc` will build +rustdoc and libstd, which will allow rustdoc to be run with that toolchain.) ## Pull Requests @@ -298,6 +325,32 @@ Speaking of tests, Rust has a comprehensive test suite. More information about it can be found [here](https://github.com/rust-lang/rust-wiki-backup/blob/master/Note-testsuite.md). +### External Dependencies + +Currently building Rust will also build the following external projects: + +* [clippy](https://github.com/rust-lang-nursery/rust-clippy) + +If your changes break one of these projects, you need to fix them by opening +a pull request against the broken project. When you have opened a pull request, +you can point the submodule at your pull request by calling + +``` +git fetch origin pull/$id_of_your_pr/head:my_pr +git checkout my_pr +``` + +within the submodule's directory. Don't forget to also add your changes with + +``` +git add path/to/submodule +``` + +outside the submodule. + +It can also be more convenient during development to set `submodules = false` +in the `config.toml` to prevent `x.py` from resetting to the original branch. + ## Writing Documentation Documentation improvements are very welcome. The source of `doc.rust-lang.org` diff --git a/RELEASES.md b/RELEASES.md index c3a7367a2ee54..5815cb0f97260 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -3,7 +3,7 @@ Version 1.20.0 (2017-08-31) Language -------- -- [Associated constants in traits is now stabilised.][42809] +- [Associated constants are now stabilised.][42809] - [A lot of macro bugs are now fixed.][42913] Compiler @@ -77,7 +77,7 @@ Stabilized APIs - [`slice::sort_unstable_by_key`] - [`slice::sort_unstable_by`] - [`slice::sort_unstable`] -- [`ste::from_boxed_utf8_unchecked`] +- [`str::from_boxed_utf8_unchecked`] - [`str::as_bytes_mut`] - [`str::as_bytes_mut`] - [`str::from_utf8_mut`] diff --git a/config.toml.example b/config.toml.example index fd1f03b9d0e24..3f5101ebf342f 100644 --- a/config.toml.example +++ b/config.toml.example @@ -73,6 +73,10 @@ # controlled by rustbuild's -j parameter. #link-jobs = 0 +# When invoking `llvm-config` this configures whether the `--shared` argument is +# passed to prefer linking to shared libraries. +#link-shared = false + # ============================================================================= # General build configuration options # ============================================================================= @@ -166,6 +170,15 @@ # to +10 on Unix platforms, and by using a "low priority" job object on Windows. #low-priority = false +# Arguments passed to the `./configure` script, used during distcheck. You +# probably won't fill this in but rather it's filled in by the `./configure` +# script. +#configure-args = [] + +# Indicates that a local rebuild is ocurring instead of a full bootstrap, +# essentially skipping stage0 as the local compiler is recompiling itself again. +#local-rebuild = false + # ============================================================================= # General install configuration options # ============================================================================= @@ -195,6 +208,13 @@ # ============================================================================= [rust] +# Indicates that the build should be optimized for debugging Rust. Note that +# this is typically not what you want as it takes an incredibly large amount of +# time to have a debug-mode rustc compile any code (notably libstd). If this +# value is set to `true` it will affect a number of configuration options below +# as well, if unconfigured. +#debug = false + # Whether or not to optimize the compiler and standard library # Note: the slowness of the non optimized compiler compiling itself usually # outweighs the time gains in not doing optimizations, therefore a @@ -249,6 +269,10 @@ # desired in distributions, for example. #rpath = true +# Suppresses extraneous output from tests to ensure the output of the test +# harness is relatively clean. +#quiet-tests = false + # Flag indicating whether tests are compiled with optimizations (the -O flag) or # with debuginfo (the -g flag) #optimize-tests = true @@ -259,7 +283,13 @@ #codegen-tests = true # Flag indicating whether git info will be retrieved from .git automatically. -#ignore-git = false +# Having the git information can cause a lot of rebuilds during development. +# Note: If this attribute is not explicity set (e.g. if left commented out) it +# will default to true if channel = "dev", but will default to false otherwise. +#ignore-git = true + +# When creating source tarballs whether or not to create a source tarball. +#dist-src = false # ============================================================================= # Options for specific targets @@ -304,6 +334,10 @@ # linked binaries #musl-root = "..." +# Used in testing for configuring where the QEMU images are located, you +# probably don't want to use this. +#qemu-rootfs = "..." + # ============================================================================= # Distribution options # diff --git a/configure b/configure index 664b473b2c9d0..eeb8d081d3454 100755 --- a/configure +++ b/configure @@ -1,779 +1,17 @@ #!/bin/sh -# /bin/sh on Solaris is not a POSIX compatible shell, but /usr/bin/bash is. -if [ `uname -s` = 'SunOS' -a "${POSIX_SHELL}" != "true" ]; then - POSIX_SHELL="true" - export POSIX_SHELL - exec /usr/bin/env bash $0 "$@" -fi -unset POSIX_SHELL # clear it so if we invoke other scripts, they run as bash as well +script="$(dirname $0)"/src/bootstrap/configure.py -msg() { - echo "configure: $*" -} - -step_msg() { - msg - msg "$1" - msg -} - -warn() { - echo "configure: WARNING: $1" -} - -err() { - echo "configure: error: $1" - exit 1 -} - -run() { - msg "$@" - "$@" -} - -need_ok() { - if [ $? -ne 0 ] - then - err "$1" - fi -} - -need_cmd() { - if command -v $1 >/dev/null 2>&1 - then msg "found program '$1'" - else err "program '$1' is missing, please install it" - fi -} - -make_dir() { - if [ ! -d $1 ] - then - run mkdir -p $1 - fi -} - -copy_if_changed() { - if cmp -s $1 $2 - then - msg "leaving $2 unchanged" - else - run cp -f $1 $2 - chmod u-w $2 # make copied artifact read-only - fi -} - -move_if_changed() { - if cmp -s $1 $2 - then - msg "leaving $2 unchanged" - else - run mv -f $1 $2 - chmod u-w $2 # make moved artifact read-only - fi -} - -putvar() { - local T - eval T=\$$1 - eval TLEN=\${#$1} - if [ $TLEN -gt 35 ] - then - printf "configure: %-20s := %.35s ...\n" $1 "$T" - else - printf "configure: %-20s := %s %s\n" $1 "$T" "$2" - fi - printf "%-20s := %s\n" $1 "$T" >>config.tmp -} - -putpathvar() { - local T - eval T=\$$1 - eval TLEN=\${#$1} - if [ $TLEN -gt 35 ] - then - printf "configure: %-20s := %.35s ...\n" $1 "$T" - else - printf "configure: %-20s := %s %s\n" $1 "$T" "$2" - fi - if [ -z "$T" ] - then - printf "%-20s := \n" $1 >>config.tmp - else - printf "%-20s := \"%s\"\n" $1 "$T" >>config.tmp - fi -} - -probe() { - local V=$1 +try() { + cmd=$1 shift - local P - local T - for P - do - T=$(command -v $P 2>&1) - if [ $? -eq 0 ] - then - VER0=$($P --version 2>/dev/null \ - | grep -o '[vV]\?[0-9][0-9.][a-z0-9.-]*' | head -1 ) - if [ $? -eq 0 -a "x${VER0}" != "x" ] - then - VER="($VER0)" - else - VER="" - fi - break - else - VER="" - T="" - fi - done - eval $V=\$T - putpathvar $V "$VER" -} - -probe_need() { - probe $* - local V=$1 - shift - eval VV=\$$V - if [ -z "$VV" ] - then - err "$V needed, but unable to find any of: $*" - fi -} - -validate_opt () { - for arg in $CFG_CONFIGURE_ARGS - do - isArgValid=0 - for option in $BOOL_OPTIONS - do - if test --disable-$option = $arg - then - isArgValid=1 - fi - if test --enable-$option = $arg - then - isArgValid=1 - fi - done - for option in $VAL_OPTIONS - do - if echo "$arg" | grep -q -- "--$option=" - then - isArgValid=1 - fi - done - if [ "$arg" = "--help" ] - then - echo - echo "No more help available for Configure options," - echo "check the Wiki or join our IRC channel" - break - else - if test $isArgValid -eq 0 - then - err "Option '$arg' is not recognized" - fi - fi - done -} - -# `valopt OPTION_NAME DEFAULT DOC` extracts a string-valued option -# from command line, using provided default value for the option if -# not present, and saves it to the generated config.mk. -# -# `valopt_nosave` is much the same, except that it does not save the -# result to config.mk (instead the script should use `putvar` itself -# later on to save it). `valopt_core` is the core upon which the -# other two are built. - -valopt_core() { - VAL_OPTIONS="$VAL_OPTIONS $2" - - local SAVE=$1 - local OP=$2 - local DEFAULT=$3 - shift - shift - shift - local DOC="$*" - if [ $HELP -eq 0 ] - then - local UOP=$(echo $OP | tr '[:lower:]' '[:upper:]' | tr '\-' '\_') - local V="CFG_${UOP}" - local V_PROVIDED="${V}_PROVIDED" - eval $V="$DEFAULT" - for arg in $CFG_CONFIGURE_ARGS - do - if echo "$arg" | grep -q -- "--$OP=" - then - val=$(echo "$arg" | cut -f2 -d=) - eval $V=$val - eval $V_PROVIDED=1 - fi - done - if [ "$SAVE" = "save" ] - then - putvar $V - fi - else - if [ -z "$DEFAULT" ] - then - DEFAULT="" - fi - OP="${OP}=[${DEFAULT}]" - printf " --%-30s %s\n" "$OP" "$DOC" - fi -} - -valopt_nosave() { - valopt_core nosave "$@" -} - -valopt() { - valopt_core save "$@" -} - -# `opt OPTION_NAME DEFAULT DOC` extracts a boolean-valued option from -# command line, using the provided default value (0/1) for the option -# if not present, and saves it to the generated config.mk. -# -# `opt_nosave` is much the same, except that it does not save the -# result to config.mk (instead the script should use `putvar` itself -# later on to save it). `opt_core` is the core upon which the other -# two are built. - -opt_core() { - BOOL_OPTIONS="$BOOL_OPTIONS $2" - - local SAVE=$1 - local OP=$2 - local DEFAULT=$3 - shift - shift - shift - local DOC="$*" - local FLAG="" - - if [ $DEFAULT -eq 0 ] - then - FLAG="enable" - DEFAULT_FLAG="disable" - else - FLAG="disable" - DEFAULT_FLAG="enable" - DOC="don't $DOC" - fi - - if [ $HELP -eq 0 ] - then - for arg in $CFG_CONFIGURE_ARGS - do - if [ "$arg" = "--${FLAG}-${OP}" ] - then - OP=$(echo $OP | tr 'a-z-' 'A-Z_') - FLAG=$(echo $FLAG | tr 'a-z' 'A-Z') - local V="CFG_${FLAG}_${OP}" - local V_PROVIDED="CFG_${FLAG}_${OP}_PROVIDED" - eval $V=1 - eval $V_PROVIDED=1 - if [ "$SAVE" = "save" ] - then - putvar $V - fi - elif [ "$arg" = "--${DEFAULT_FLAG}-${OP}" ] - then - OP=$(echo $OP | tr 'a-z-' 'A-Z_') - DEFAULT_FLAG=$(echo $DEFAULT_FLAG | tr 'a-z' 'A-Z') - local V_PROVIDED="CFG_${DEFAULT_FLAG}_${OP}_PROVIDED" - eval $V_PROVIDED=1 - fi - done - else - if [ -n "$META" ] - then - OP="$OP=<$META>" - fi - printf " --%-30s %s\n" "$FLAG-$OP" "$DOC" - fi -} - -opt_nosave() { - opt_core nosave "$@" -} - -opt() { - opt_core save "$@" -} - -envopt() { - local NAME=$1 - local V="CFG_${NAME}" - eval VV=\$$V - - # If configure didn't set a value already, then check environment. - # - # (It is recommended that the configure script always check the - # environment before setting any values to envopt variables; see - # e.g. how CFG_CC is handled, where it first checks `-z "$CC"`, - # and issues msg if it ends up employing that provided value.) - if [ -z "$VV" ] - then - eval $V=\$$NAME - eval VV=\$$V - fi - - # If script or environment provided a value, save it. - if [ -n "$VV" ] - then - putvar $V - fi -} - -enable_if_not_disabled() { - local OP=$1 - local UOP=$(echo $OP | tr '[:lower:]' '[:upper:]' | tr '\-' '\_') - local ENAB_V="CFG_ENABLE_$UOP" - local EXPLICITLY_DISABLED="CFG_DISABLE_${UOP}_PROVIDED" - eval VV=\$$EXPLICITLY_DISABLED - if [ -z "$VV" ]; then - eval $ENAB_V=1 + T=$($cmd --version 2>/dev/null) + if [ $? -eq 0 ]; then + exec $cmd "$script" "$@" fi } -to_gnu_triple() { - case $1 in - i686-pc-windows-gnu) echo i686-w64-mingw32 ;; - x86_64-pc-windows-gnu) echo x86_64-w64-mingw32 ;; - *) echo $1 ;; - esac -} - -# Prints the absolute path of a directory to stdout -abs_path() { - local _path="$1" - # Unset CDPATH because it causes havok: it makes the destination unpredictable - # and triggers 'cd' to print the path to stdout. Route `cd`'s output to /dev/null - # for good measure. - (unset CDPATH && cd "$_path" > /dev/null && pwd) -} - -HELP=0 -for arg; do - case "$arg" in - --help) HELP=1;; - esac -done - -msg "looking for configure programs" -need_cmd cmp -need_cmd mkdir -need_cmd printf -need_cmd cut -need_cmd head -need_cmd grep -need_cmd xargs -need_cmd cp -need_cmd find -need_cmd uname -need_cmd date -need_cmd tr -need_cmd sed -need_cmd file -need_cmd make - -CFG_SRC_DIR="$(abs_path $(dirname $0))/" -CFG_SRC_DIR_RELATIVE="$(dirname $0)/" -CFG_BUILD_DIR="$(pwd)/" -CFG_SELF="$0" -CFG_CONFIGURE_ARGS="$@" - - -case "${CFG_SRC_DIR}" in - *\ * ) - err "The path to the rust source directory contains spaces, which is not supported" - ;; - *) - ;; -esac - - -OPTIONS="" -if [ "$HELP" -eq 1 ] -then - echo - echo "Usage: $CFG_SELF [options]" - echo - echo "Options:" - echo -else - msg "recreating config.tmp" - echo '' >config.tmp - - step_msg "processing $CFG_SELF args" -fi - -BOOL_OPTIONS="" -VAL_OPTIONS="" - -opt debug 0 "debug mode; disables optimization unless \`--enable-optimize\` given" -opt valgrind 0 "run tests with valgrind (memcheck by default)" -opt helgrind 0 "run tests with helgrind instead of memcheck" -opt valgrind-rpass 1 "run rpass-valgrind tests with valgrind" -opt docs 1 "build standard library documentation" -opt compiler-docs 0 "build compiler documentation" -opt optimize-tests 1 "build tests with optimizations" -opt debuginfo-tests 0 "build tests with debugger metadata" -opt quiet-tests 0 "enable quieter output when running tests" -opt libcpp 1 "build llvm with libc++ instead of libstdc++ when using clang" -opt llvm-assertions 0 "build LLVM with assertions" -opt debug-assertions 0 "build with debugging assertions" -opt fast-make 0 "use .gitmodules as timestamp for submodule deps" -opt ccache 0 "invoke gcc/clang via ccache to reuse object files between builds" -opt sccache 0 "invoke gcc/clang via sccache to reuse object files between builds" -opt local-rust 0 "use an installed rustc rather than downloading a snapshot" -opt local-rebuild 0 "assume local-rust matches the current version, for rebuilds; implies local-rust, and is implied if local-rust already matches the current version" -opt llvm-static-stdcpp 0 "statically link to libstdc++ for LLVM" -opt llvm-link-shared 0 "prefer shared linking to LLVM (llvm-config --link-shared)" -opt rpath 1 "build rpaths into rustc itself" -opt stage0-landing-pads 1 "enable landing pads during bootstrap with stage0" -# This is used by the automation to produce single-target nightlies -opt dist-host-only 0 "only install bins for the host architecture" -opt inject-std-version 1 "inject the current compiler version of libstd into programs" -opt llvm-version-check 1 "check if the LLVM version is supported, build anyway" -opt codegen-tests 1 "run the src/test/codegen tests" -opt option-checking 1 "complain about unrecognized options in this configure script" -opt ninja 0 "build LLVM using the Ninja generator (for MSVC, requires building in the correct environment)" -opt locked-deps 0 "force Cargo.lock to be up to date" -opt vendor 0 "enable usage of vendored Rust crates" -opt sanitizers 0 "build the sanitizer runtimes (asan, lsan, msan, tsan)" -opt dist-src 1 "when building tarballs enables building a source tarball" -opt cargo-openssl-static 0 "static openssl in cargo" -opt profiler 0 "build the profiler runtime" - -# Optimization and debugging options. These may be overridden by the release channel, etc. -opt_nosave optimize 1 "build optimized rust code" -opt_nosave optimize-cxx 1 "build optimized C++ code" -opt_nosave optimize-llvm 1 "build optimized LLVM" -opt_nosave llvm-assertions 0 "build LLVM with assertions" -opt_nosave debug-assertions 0 "build with debugging assertions" -opt_nosave llvm-release-debuginfo 0 "build LLVM with debugger metadata" -opt_nosave debuginfo 0 "build with debugger metadata" -opt_nosave debuginfo-lines 0 "build with line number debugger metadata" -opt_nosave debuginfo-only-std 0 "build only libstd with debugging information" -opt_nosave debug-jemalloc 0 "build jemalloc with --enable-debug --enable-fill" - -valopt localstatedir "/var/lib" "local state directory" -valopt sysconfdir "/etc" "install system configuration files" - -valopt datadir "${CFG_PREFIX}/share" "install data" -valopt infodir "${CFG_PREFIX}/share/info" "install additional info" -valopt llvm-root "" "set LLVM root" -valopt python "" "set path to python" -valopt jemalloc-root "" "set directory where libjemalloc_pic.a is located" -valopt build "" "GNUs ./configure syntax LLVM build triple" -valopt android-cross-path "" "Android NDK standalone path (deprecated)" -valopt i686-linux-android-ndk "" "i686-linux-android NDK standalone path" -valopt arm-linux-androideabi-ndk "" "arm-linux-androideabi NDK standalone path" -valopt armv7-linux-androideabi-ndk "" "armv7-linux-androideabi NDK standalone path" -valopt aarch64-linux-android-ndk "" "aarch64-linux-android NDK standalone path" -valopt x86_64-linux-android-ndk "" "x86_64-linux-android NDK standalone path" -valopt nacl-cross-path "" "NaCl SDK path (Pepper Canary is recommended). Must be absolute!" -valopt musl-root "/usr/local" "MUSL root installation directory (deprecated)" -valopt musl-root-x86_64 "" "x86_64-unknown-linux-musl install directory" -valopt musl-root-i686 "" "i686-unknown-linux-musl install directory" -valopt musl-root-arm "" "arm-unknown-linux-musleabi install directory" -valopt musl-root-armhf "" "arm-unknown-linux-musleabihf install directory" -valopt musl-root-armv7 "" "armv7-unknown-linux-musleabihf install directory" -valopt extra-filename "" "Additional data that is hashed and passed to the -C extra-filename flag" -valopt qemu-armhf-rootfs "" "rootfs in qemu testing, you probably don't want to use this" -valopt qemu-aarch64-rootfs "" "rootfs in qemu testing, you probably don't want to use this" -valopt experimental-targets "" "experimental LLVM targets to build" - -if [ -e ${CFG_SRC_DIR}.git ] -then - valopt release-channel "dev" "the name of the release channel to build" -else - # If we have no git directory then we are probably a tarball distribution - # and should default to stable channel - Issue 28322 - probe CFG_GIT git - msg "git: no git directory. Changing default release channel to stable" - valopt release-channel "stable" "the name of the release channel to build" -fi - -# Used on systems where "cc" and "ar" are unavailable -valopt default-linker "cc" "the default linker" -valopt default-ar "ar" "the default ar" - -# Many of these are saved below during the "writing configuration" step -# (others are conditionally saved). -opt_nosave manage-submodules 1 "let the build manage the git submodules" -opt_nosave clang 0 "prefer clang to gcc for building the runtime" -opt_nosave jemalloc 1 "build liballoc with jemalloc" -opt full-bootstrap 0 "build three compilers instead of two" -opt extended 0 "build an extended rust tool set" - -valopt_nosave prefix "/usr/local" "set installation prefix" -valopt_nosave local-rust-root "/usr/local" "set prefix for local rust binary" -valopt_nosave host "${CFG_BUILD}" "GNUs ./configure syntax LLVM host triples" -valopt_nosave target "${CFG_HOST}" "GNUs ./configure syntax LLVM target triples" -valopt_nosave mandir "${CFG_PREFIX}/share/man" "install man pages in PATH" -valopt_nosave docdir "${CFG_PREFIX}/share/doc/rust" "install documentation in PATH" -valopt_nosave bindir "${CFG_PREFIX}/bin" "install binaries" - -# On Windows this determines root of the subtree for target libraries. -# Host runtime libs always go to 'bin'. -valopt libdir "${CFG_PREFIX}/lib" "install libraries" - -case "$CFG_LIBDIR" in - "$CFG_PREFIX"/*) CAT_INC=2;; - "$CFG_PREFIX"*) CAT_INC=1;; - *) - err "libdir must begin with the prefix. Use --prefix to set it accordingly.";; -esac - -CFG_LIBDIR_RELATIVE=`echo ${CFG_LIBDIR} | cut -c$((${#CFG_PREFIX}+${CAT_INC}))-` - -if [ $HELP -eq 1 ] -then - echo - exit 0 -fi - -# Validate Options -if [ -z "$CFG_DISABLE_OPTION_CHECKING" ] -then - step_msg "validating $CFG_SELF args" - validate_opt -fi - -# Validate the release channel, and configure options -case "$CFG_RELEASE_CHANNEL" in - nightly ) - msg "overriding settings for $CFG_RELEASE_CHANNEL" - enable_if_not_disabled llvm-assertions - # FIXME(stage0) re-enable this on the next stage0 now that #35566 is - # fixed - case "$CFG_BUILD" in - *-pc-windows-gnu) - ;; - *) - enable_if_not_disabled debuginfo-lines - enable_if_not_disabled debuginfo-only-std - ;; - esac - - ;; - beta | stable) - msg "overriding settings for $CFG_RELEASE_CHANNEL" - case "$CFG_BUILD" in - *-pc-windows-gnu) - ;; - *) - enable_if_not_disabled debuginfo-lines - enable_if_not_disabled debuginfo-only-std - ;; - esac - ;; - dev) - ;; - *) - err "release channel must be 'dev', 'nightly', 'beta' or 'stable'" - ;; -esac - -# Adjust perf and debug options for debug mode -if [ -n "$CFG_ENABLE_DEBUG" ]; then - msg "debug mode enabled, setting performance options" - if [ -z "$CFG_ENABLE_OPTIMIZE_PROVIDED" ]; then - msg "optimization not explicitly enabled, disabling optimization" - CFG_DISABLE_OPTIMIZE=1 - CFG_DISABLE_OPTIMIZE_CXX=1 - fi - - # Set following variables to 1 unless setting already provided - enable_if_not_disabled debug-assertions - enable_if_not_disabled debug-jemalloc - enable_if_not_disabled debuginfo - enable_if_not_disabled llvm-assertions -fi - -# OK, now write the debugging options -if [ -n "$CFG_DISABLE_OPTIMIZE" ]; then putvar CFG_DISABLE_OPTIMIZE; fi -if [ -n "$CFG_DISABLE_OPTIMIZE_CXX" ]; then putvar CFG_DISABLE_OPTIMIZE_CXX; fi -if [ -n "$CFG_DISABLE_OPTIMIZE_LLVM" ]; then putvar CFG_DISABLE_OPTIMIZE_LLVM; fi -if [ -n "$CFG_ENABLE_LLVM_ASSERTIONS" ]; then putvar CFG_ENABLE_LLVM_ASSERTIONS; fi -if [ -n "$CFG_ENABLE_DEBUG_ASSERTIONS" ]; then putvar CFG_ENABLE_DEBUG_ASSERTIONS; fi -if [ -n "$CFG_ENABLE_LLVM_RELEASE_DEBUGINFO" ]; then putvar CFG_ENABLE_LLVM_RELEASE_DEBUGINFO; fi -if [ -n "$CFG_ENABLE_DEBUGINFO" ]; then putvar CFG_ENABLE_DEBUGINFO; fi -if [ -n "$CFG_ENABLE_DEBUGINFO_LINES" ]; then putvar CFG_ENABLE_DEBUGINFO_LINES; fi -if [ -n "$CFG_ENABLE_DEBUGINFO_ONLY_STD" ]; then putvar CFG_ENABLE_DEBUGINFO_ONLY_STD; fi -if [ -n "$CFG_ENABLE_DEBUG_JEMALLOC" ]; then putvar CFG_ENABLE_DEBUG_JEMALLOC; fi - -step_msg "looking for build programs" - -probe_need CFG_CURL curl -if [ -z "$CFG_PYTHON_PROVIDED" ]; then - probe_need CFG_PYTHON python2.7 python2 python -fi - -python_version=$($CFG_PYTHON -V 2>&1) -if [ $(echo $python_version | grep -c '^Python 2\.7') -ne 1 ]; then - err "Found $python_version, but Python 2.7 is required" -fi - -# the valgrind rpass tests will fail if you don't have a valgrind, but they're -# only disabled if you opt out. -if [ -z "$CFG_VALGRIND" ] -then - # If the user has explicitly asked for valgrind tests, then fail - if [ -n "$CFG_ENABLE_VALGRIND" ] && [ -n "$CFG_ENABLE_VALGRIND_PROVIDED" ] - then - err "No valgrind present, but valgrind tests explicitly requested" - else - CFG_DISABLE_VALGRIND_RPASS=1 - putvar CFG_DISABLE_VALGRIND_RPASS - fi -fi - -# Do some sanity checks if running on buildbot -# (these env vars are set by rust-buildbot) -if [ -n "$RUST_DIST_SERVER" -a -n "$ALLOW_NONZERO_RLIMIT_CORE" ]; then - # Frequently the llvm submodule directory is broken by the build - # being killed - llvm_lock="${CFG_SRC_DIR}/.git/modules/src/llvm/index.lock" - if [ -e "$llvm_lock" ]; then - step_msg "removing $llvm_lock" - rm -f "$llvm_lock" - fi -fi - -BIN_SUF= -if [ "$CFG_OSTYPE" = "pc-windows-gnu" ] || [ "$CFG_OSTYPE" = "pc-windows-msvc" ] -then - BIN_SUF=.exe -fi - -# --enable-local-rebuild implies --enable-local-rust too -if [ -n "$CFG_ENABLE_LOCAL_REBUILD" ] -then - if [ -z "$CFG_ENABLE_LOCAL_RUST" ] - then - CFG_ENABLE_LOCAL_RUST=1 - putvar CFG_ENABLE_LOCAL_RUST - fi -fi - -if [ -n "$CFG_ENABLE_LOCAL_RUST" ] -then - system_rustc=$(which rustc) - if [ -f ${CFG_LOCAL_RUST_ROOT}/bin/rustc${BIN_SUF} ] - then - : # everything already configured - elif [ -n "$system_rustc" ] - then - # we assume that rustc is in a /bin directory - CFG_LOCAL_RUST_ROOT=${system_rustc%/bin/rustc} - else - err "no local rust to use" - fi - - CMD="${CFG_LOCAL_RUST_ROOT}/bin/rustc${BIN_SUF}" - LRV=`LD_LIBRARY_PATH=${CFG_LOCAL_RUST_ROOT}/lib $CMD --version` - if [ $? -ne 0 ] - then - step_msg "failure while running $CMD --version" - exit 1 - fi - step_msg "using rustc at: ${CFG_LOCAL_RUST_ROOT} with version: $LRV" - putvar CFG_LOCAL_RUST_ROOT -fi - -# Same with jemalloc. save the setting here. -if [ -n "$CFG_DISABLE_JEMALLOC" ] -then - putvar CFG_DISABLE_JEMALLOC -fi - -# All safeguards based on $CFG_ENABLE_CLANG should occur before this -# point in the script; after this point, script logic should inspect -# $CFG_USING_CLANG rather than $CFG_ENABLE_CLANG. - -# Set CFG_{CC,CXX,CPP,CFLAGS,CXXFLAGS,LDFLAGS} -envopt CC -envopt CXX -envopt CPP -envopt CFLAGS -envopt CXXFLAGS -envopt LDFLAGS - -# a little post-processing of various config values -CFG_PREFIX=${CFG_PREFIX%/} -CFG_MANDIR=${CFG_MANDIR%/} -CFG_DOCDIR=${CFG_DOCDIR%/} -CFG_BINDIR=${CFG_BINDIR%/} -CFG_HOST="$(echo $CFG_HOST | tr ',' ' ')" -CFG_TARGET="$(echo $CFG_TARGET | tr ',' ' ')" - -# copy build-triples to host-triples so that builds are a subset of hosts -V_TEMP="" -for i in $CFG_BUILD $CFG_HOST; -do - echo "$V_TEMP" | grep -qF $i || V_TEMP="$V_TEMP${V_TEMP:+ }$i" -done -CFG_HOST=$V_TEMP - -# copy host-triples to target-triples so that hosts are a subset of targets -V_TEMP="" -for i in $CFG_HOST $CFG_TARGET; -do - echo "$V_TEMP" | grep -qF $i || V_TEMP="$V_TEMP${V_TEMP:+ }$i" -done -CFG_TARGET=$V_TEMP - -step_msg "writing configuration" - -putvar CFG_SRC_DIR -putvar CFG_SRC_DIR_RELATIVE -putvar CFG_BUILD_DIR -putvar CFG_OSTYPE -putvar CFG_CPUTYPE -putvar CFG_CONFIGURE_ARGS -putvar CFG_PREFIX -putvar CFG_HOST -putvar CFG_TARGET -putvar CFG_LIBDIR_RELATIVE -putvar CFG_DISABLE_MANAGE_SUBMODULES -putvar CFG_AARCH64_LINUX_ANDROID_NDK -putvar CFG_ARM_LINUX_ANDROIDEABI_NDK -putvar CFG_ARMV7_LINUX_ANDROIDEABI_NDK -putvar CFG_I686_LINUX_ANDROID_NDK -putvar CFG_X86_64_LINUX_ANDROID_NDK -putvar CFG_NACL_CROSS_PATH -putvar CFG_MANDIR -putvar CFG_DOCDIR -putvar CFG_BINDIR -putvar CFG_USING_LIBCPP - -msg -copy_if_changed ${CFG_SRC_DIR}src/bootstrap/mk/Makefile.in ./Makefile -move_if_changed config.tmp config.mk -rm -f config.tmp -touch config.stamp - -if [ -z "$CFG_ENABLE_DEBUG" ]; then - step_msg "configured in release mode. for development consider --enable-debug" -else - step_msg "complete" -fi - -if [ "$CFG_SRC_DIR" = `pwd` ]; then - X_PY=x.py -else - X_PY=${CFG_SRC_DIR_RELATIVE}x.py -fi - -msg "run \`python ${X_PY} --help\`" -msg +try python2.7 "$@" +try python27 "$@" +try python2 "$@" +exec python $script "$@" diff --git a/src/Cargo.lock b/src/Cargo.lock index 123c884585c19..b5e292159638f 100644 --- a/src/Cargo.lock +++ b/src/Cargo.lock @@ -80,7 +80,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -93,7 +93,7 @@ dependencies = [ "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -104,7 +104,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -140,7 +140,7 @@ dependencies = [ "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", @@ -175,20 +175,14 @@ dependencies = [ [[package]] name = "cargo" -version = "0.22.0" -source = "git+https://github.com/rust-lang/cargo#bcf3997b1fa177afc5b6c632a6fbbf6cc75df427" -replace = "cargo 0.22.0" - -[[package]] -name = "cargo" -version = "0.22.0" +version = "0.23.0" dependencies = [ "advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "cargotest 0.1.0", "core-foundation 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "crates-io 0.11.0", + "crates-io 0.12.0", "crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", "curl 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "docopt 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -197,7 +191,7 @@ dependencies = [ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)", "fs2 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "git2 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", + "git2 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", "git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -206,8 +200,8 @@ dependencies = [ "ignore 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "jobserver 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", - "libgit2-sys 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", + "libgit2-sys 0.6.14 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.6.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -223,7 +217,7 @@ dependencies = [ "shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "termcolor 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "termcolor 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -233,10 +227,10 @@ dependencies = [ name = "cargotest" version = "0.1.0" dependencies = [ - "cargo 0.22.0", + "cargo 0.23.0", "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)", - "git2 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", + "git2 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -303,7 +297,7 @@ dependencies = [ "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -312,6 +306,14 @@ dependencies = [ name = "completion" version = "0.1.0" +[[package]] +name = "conv" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "custom_derive 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "core" version = "0.0.0" @@ -325,7 +327,7 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "core-foundation-sys 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -333,12 +335,12 @@ name = "core-foundation-sys" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "crates-io" -version = "0.11.0" +version = "0.12.0" dependencies = [ "curl 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "error-chain 0.11.0-rc.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -353,13 +355,37 @@ name = "crossbeam" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "cssparser" +version = "0.13.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cssparser-macros 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", + "procedural-masquerade 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "cssparser-macros" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", + "procedural-masquerade 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "curl" version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "curl-sys 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-sys 0.9.17 (registry+https://github.com/rust-lang/crates.io-index)", "socket2 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -372,7 +398,7 @@ version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.16 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-sys 0.9.17 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", @@ -380,6 +406,11 @@ dependencies = [ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "custom_derive" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "dbghelp-sys" version = "0.2.0" @@ -389,6 +420,14 @@ dependencies = [ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "debug_unreachable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "derive-new" version = "0.3.0" @@ -417,7 +456,7 @@ dependencies = [ [[package]] name = "dtoa" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -474,7 +513,7 @@ name = "filetime" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -494,7 +533,7 @@ name = "flate2" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -518,13 +557,22 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "futf" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "debug_unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "mac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "futures" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -539,12 +587,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "git2" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", - "libgit2-sys 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", + "libgit2-sys 0.6.14 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-sys 0.9.17 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -556,7 +604,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "curl 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "git2 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", + "git2 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -634,6 +682,26 @@ dependencies = [ name = "hover" version = "0.1.0" +[[package]] +name = "html-diff" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kuchiki 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "html5ever" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "mac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "markup5ever 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "idna" version = "0.1.4" @@ -688,7 +756,7 @@ dependencies = [ [[package]] name = "itoa" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -696,8 +764,8 @@ name = "jobserver" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -705,7 +773,7 @@ name = "jsonrpc-core" version = "7.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", @@ -721,6 +789,17 @@ dependencies = [ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "kuchiki" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cssparser 0.13.7 (registry+https://github.com/rust-lang/crates.io-index)", + "html5ever 0.18.0 (registry+https://github.com/rust-lang/crates.io-index)", + "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "selectors 0.18.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "languageserver-types" version = "0.12.0" @@ -748,18 +827,18 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.29" +version = "0.2.30" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "libgit2-sys" -version = "0.6.12" +version = "0.6.14" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cmake 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)", "curl-sys 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.16 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-sys 0.9.17 (registry+https://github.com/rust-lang/crates.io-index)", @@ -772,7 +851,7 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cmake 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.16 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-sys 0.9.17 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", @@ -784,7 +863,7 @@ version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -805,7 +884,42 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "mac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "magenta" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "conv 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "magenta-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "magenta-sys" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "markup5ever" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", + "phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "string_cache 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", + "string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "tendril 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -835,7 +949,7 @@ name = "memchr" version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -843,7 +957,7 @@ name = "memchr" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -852,7 +966,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -877,7 +991,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -902,7 +1016,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "num-integer 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -953,7 +1067,7 @@ name = "num_cpus" version = "1.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -973,7 +1087,7 @@ dependencies = [ "bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-sys 0.9.17 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -988,7 +1102,7 @@ version = "0.9.17" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1029,19 +1143,64 @@ name = "pest" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "phf" +version = "0.7.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "phf_codegen" +version = "0.7.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", + "phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "phf_generator" +version = "0.7.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "phf_shared" +version = "0.7.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "siphasher 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "pkg-config" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "precomputed-hash" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "proc_macro" version = "0.0.0" dependencies = [ + "rustc_errors 0.0.0", "syntax 0.0.0", "syntax_pos 0.0.0", ] +[[package]] +name = "procedural-masquerade" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "profiler_builtins" version = "0.0.0" @@ -1106,10 +1265,11 @@ dependencies = [ [[package]] name = "rand" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", + "magenta 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1168,9 +1328,9 @@ version = "0.1.0" [[package]] name = "rls" -version = "0.1.0" +version = "0.122.0" dependencies = [ - "cargo 0.22.0 (git+https://github.com/rust-lang/cargo)", + "cargo 0.23.0", "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core 7.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "languageserver-types 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1179,15 +1339,14 @@ dependencies = [ "racer 2.0.10 (registry+https://github.com/rust-lang/crates.io-index)", "rls-analysis 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", "rls-data 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-rustc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rls-vfs 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "rustfmt-nightly 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rustfmt-nightly 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1213,6 +1372,11 @@ dependencies = [ "serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "rls-rustc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "rls-span" version = "0.4.0" @@ -1634,6 +1798,7 @@ dependencies = [ "build_helper 0.1.0", "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", + "html-diff 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "pulldown-cmark 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1647,14 +1812,14 @@ dependencies = [ [[package]] name = "rustfmt-nightly" -version = "0.2.2" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1686,6 +1851,21 @@ name = "scopeguard" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "selectors" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cssparser 0.13.7 (registry+https://github.com/rust-lang/crates.io-index)", + "fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", + "phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", + "precomputed-hash 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "smallvec 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "semver" version = "0.7.0" @@ -1737,8 +1917,8 @@ name = "serde_json" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", - "itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "itoa 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1752,6 +1932,16 @@ name = "shell-escape" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "siphasher" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "smallvec" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "socket2" version = "0.2.2" @@ -1759,7 +1949,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1801,6 +1991,36 @@ dependencies = [ "core 0.0.0", ] +[[package]] +name = "string_cache" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "debug_unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", + "precomputed-hash 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "string_cache_codegen" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", + "phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "string_cache_shared" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "strings" version = "0.1.0" @@ -1877,7 +2097,7 @@ name = "syntex_errors" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", "syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1899,7 +2119,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", "syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1914,7 +2134,7 @@ version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "xattr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1923,7 +2143,17 @@ name = "tempdir" version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "tendril" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "futf 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "mac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "utf-8 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1945,13 +2175,13 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "termcolor" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "wincolor 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1980,7 +2210,7 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2058,6 +2288,14 @@ name = "unicode-xid" version = "0.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "unreachable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "unreachable" version = "1.0.0" @@ -2101,6 +2339,14 @@ dependencies = [ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "utf-8" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "utf8-ranges" version = "0.1.3" @@ -2169,7 +2415,7 @@ name = "xattr" version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2198,20 +2444,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1370e9fc2a6ae53aea8b7a5110edbd08836ed87c88736dfabccade1c2b44bff4" "checksum bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5" "checksum bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f382711e76b9de6c744cc00d0497baba02fb00a787f088c879f01d09468e32" -"checksum cargo 0.22.0 (git+https://github.com/rust-lang/cargo)" = "" "checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de" "checksum clap 2.26.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2267a8fdd4dce6956ba6649e130f62fb279026e5e84b92aa939ac8f85ce3f9f0" "checksum cmake 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "b8ebbb35d3dc9cd09497168f33de1acb79b265d350ab0ac34133b98f8509af1f" +"checksum conv 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "78ff10625fd0ac447827aa30ea8b861fead473bb60aeb73af6c1c58caf0d1299" "checksum core-foundation 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5909502e547762013619f4c4e01cc7393c20fe2d52d7fa471c1210adb2320dc7" "checksum core-foundation-sys 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bc9fb3d6cb663e6fd7cf1c63f9b144ee2b1e4a78595a0451dd34bff85b9a3387" "checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97" +"checksum cssparser 0.13.7 (registry+https://github.com/rust-lang/crates.io-index)" = "ef6124306e5ebc5ab11891d063aeafdd0cdc308079b708c8b566125f3680292b" +"checksum cssparser-macros 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "079adec4af52bb5275eadd004292028c79eb3c5f5b4ee8086a36d4197032f6df" "checksum curl 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7034c534a1d7d22f7971d6088aa9d281d219ef724026c3428092500f41ae9c2c" "checksum curl-sys 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d5481162dc4f424d088581db2f979fa7d4c238fe9794595de61d8d7522e277de" +"checksum custom_derive 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "ef8ae57c4978a2acd8b869ce6b9ca1dfe817bff704c220209fdef2c0b75a01b9" "checksum dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97590ba53bcb8ac28279161ca943a924d1fd4a8fb3fa63302591647c4fc5b850" +"checksum debug_unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9a032eac705ca39214d169f83e3d3da290af06d8d1d344d1baad2fd002dca4b3" "checksum derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "41be6ca3b99e0c0483fb2389685448f650459c3ecbe4e18d7705d8010ec4ab8e" "checksum diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0a515461b6c8c08419850ced27bc29e86166dcdcde8fbe76f8b1f0589bb49472" "checksum docopt 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3b5b93718f8b3e5544fcc914c43de828ca6c6ace23e0332c6080a2977b49787a" -"checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90" +"checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab" "checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180" "checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f" "checksum env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b" @@ -2222,10 +2472,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6cc484842f1e2884faf56f529f960cc12ad8c71ce96cc7abba0a067c98fee344" "checksum foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3e4056b9bd47f8ac5ba12be771f77a0dae796d1bbaaf5fd0b9c2d38b69b8a29d" "checksum fs2 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9ab76cfd2aaa59b7bf6688ad9ba15bbae64bff97f04ea02144cfd3443e5c2866" -"checksum futures 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "4b63a4792d4f8f686defe3b39b92127fea6344de5d38202b2ee5a11bbbf29d6a" +"checksum futf 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "51f93f3de6ba1794dcd5810b3546d004600a59a98266487c8407bc4b24e398f3" +"checksum futures 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a82bdc62350ca9d7974c760e9665102fc9d740992a528c2254aa930e53b783c4" "checksum gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)" = "120d07f202dcc3f72859422563522b66fe6463a4c513df062874daad05f85f0a" "checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685" -"checksum git2 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)" = "aa01936ac96555c083c0e8553f672616274408d9d3fc5b8696603fbf63ff43ee" +"checksum git2 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)" = "0c1c0203d653f4140241da0c1375a404f0a397249ec818cd2076c6280c50f6fa" "checksum git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "68676bc784bf0bef83278898929bf64a251e87c0340723d0b93fa096c9c5bf8e" "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" "checksum globset 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "feeb1b6840809ef5efcf7a4a990bc4e1b7ee3df8cf9e2379a75aeb2ba42ac9c3" @@ -2233,20 +2484,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum handlebars 0.26.2 (registry+https://github.com/rust-lang/crates.io-index)" = "fbba80e74e9591a5f6a4ffff6b7f9d645759a896e431cfbdc853e9184370294a" "checksum hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d6a22814455d41612f41161581c2883c0c6a1c41852729b17d5ed88f01e153aa" "checksum home 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9f25ae61099d8f3fee8b483df0bd4ecccf4b2731897aad40d50eca1b641fe6db" +"checksum html-diff 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5298d63081a642508fce965740ddb03a386c5d81bf1fef0579a815cf49cb8c68" +"checksum html5ever 0.18.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a49d5001dd1bddf042ea41ed4e0a671d50b1bf187e66b349d7ec613bdce4ad90" "checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d" "checksum ignore 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b3fcaf2365eb14b28ec7603c98c06cc531f19de9eb283d89a3dff8417c8c99f5" -"checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c" +"checksum itoa 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ac17257442c2ed77dbc9fd555cf83c58b0c7f7d0e8f2ae08c0ac05c72842e1f6" "checksum jobserver 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "443ae8bc0af6c106e6e8b77e04684faecc1a5ce94e058f4c2b0a037b0ea1b133" "checksum jsonrpc-core 7.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "903e5eee845f3d83c1436d12848d97b1247cf850ff06a8e1db2f1ce3543af2cf" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" +"checksum kuchiki 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ef2ea4f2f7883cd7c6772b06c14abca01a2cc1f75c426cebffcf6b3b925ef9fc" "checksum languageserver-types 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d52e477b23bf52cd3ca0f9fc6c5d14be954eec97e3b9cdfbd962d911bd533caf" "checksum lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3b37545ab726dd833ec6420aaba8231c5b320814b9029ad585555d2a03e94fbf" -"checksum libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)" = "8a014d9226c2cc402676fbe9ea2e15dd5222cd1dd57f576b5b283178c944a264" -"checksum libgit2-sys 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "df18a822100352d9863b302faf6f8f25c0e77f0e60feb40e5dbe1238b7f13b1d" +"checksum libc 0.2.30 (registry+https://github.com/rust-lang/crates.io-index)" = "2370ca07ec338939e356443dac2296f581453c35fe1e3a3ed06023c49435f915" +"checksum libgit2-sys 0.6.14 (registry+https://github.com/rust-lang/crates.io-index)" = "c00f6e5bc3fb2b5f87e75e8d0fd4ae6720d55f3ee23d389b7c6cae30f8db8db1" "checksum libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0db4ec23611747ef772db1c4d650f8bd762f07b461727ec998f953c614024b75" "checksum libz-sys 1.0.16 (registry+https://github.com/rust-lang/crates.io-index)" = "3fdd64ef8ee652185674455c1d450b83cbc8ad895625d543b5324d923f82e4d8" "checksum log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "880f77541efa6e5cc74e76910c9884d9859683118839d6a1dc3b11e63512565b" "checksum lzma-sys 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "66b2e318eb97ab84f05725471f90c52a09c964053a5899a13fd0165acc26d00b" +"checksum mac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" +"checksum magenta 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4bf0336886480e671965f794bc9b6fce88503563013d1bfb7a502c81fe3ac527" +"checksum magenta-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "40d014c7011ac470ae28e2f76a02bfea4a8480f73e701353b49ad7a8d75f4699" +"checksum markup5ever 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ff834ac7123c6a37826747e5ca09db41fd7a83126792021c2e636ad174bb77d3" "checksum matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "100aabe6b8ff4e4a7e32c1c13523379802df0772b82466207ac25b013f193376" "checksum mdbook 0.0.22 (registry+https://github.com/rust-lang/crates.io-index)" = "22911d86cde6f80fa9f0fb2a68bbbde85d97af4fe0ce267141c83a4187d28700" "checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20" @@ -2269,28 +2527,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37" "checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356" "checksum pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0a6dda33d67c26f0aac90d324ab2eb7239c819fc7b2552fe9faa4fe88441edc8" +"checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc" +"checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f" +"checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03" +"checksum phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "07e24b0ca9643bdecd0632f2b3da6b1b89bbb0030e0b992afc1113b23a7bc2f2" "checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903" +"checksum precomputed-hash 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf1fc3616b3ef726a847f2cd2388c646ef6a1f1ba4835c2629004da48184150" +"checksum procedural-masquerade 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c93cdc1fb30af9ddf3debc4afbdb0f35126cbd99daa229dd76cdd5349b41d989" "checksum psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "abcd5d1a07d360e29727f757a9decb3ce8bc6e0efa8969cfaad669a8317a2478" "checksum pulldown-cmark 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9ab1e588ef8efd702c7ed9d2bd774db5e6f4d878bb5a1a9f371828fbdff6973" "checksum quick-error 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c36987d4978eb1be2e422b1e0423a557923a5c3e7e6f31d5699e9aafaefa469" "checksum quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4c5cf478fe1006dbcc72567121d23dbdae5f1632386068c5c86ff4f645628504" "checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" "checksum racer 2.0.10 (registry+https://github.com/rust-lang/crates.io-index)" = "f120c7510ef7aff254aeb06067fb6fac573ec96a1660e194787cf9dced412bf0" -"checksum rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "022e0636ec2519ddae48154b028864bdce4eaf7d35226ab8e65c611be97b189d" +"checksum rand 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)" = "eb250fd207a4729c976794d03db689c9be1d634ab5a1c9da9492a13d8fecbcdf" "checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f" "checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b" "checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957" "checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db" "checksum rls-analysis 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d2cb40c0371765897ae428b5706bb17135705ad4f6d1b8b6afbaabcf8c9b5cff" "checksum rls-data 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "11d339f1888e33e74d8032de0f83c40b2bdaaaf04a8cfc03b32186c3481fb534" +"checksum rls-rustc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5fa757c9d547d460427ceff01875f9cac5f5acd8fc6543946e9b0335ba29d537" "checksum rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d7c7046dc6a92f2ae02ed302746db4382e75131b9ce20ce967259f6b5867a6a" "checksum rls-vfs 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ffd34691a510938bb67fe0444fb363103c73ffb31c121d1e16bc92d8945ea8ff" "checksum rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "aee45432acc62f7b9a108cc054142dac51f979e69e71ddce7d6fc7adf29e817e" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" -"checksum rustfmt-nightly 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6eea0d0590ae793fc4d281df56e01dc7531575c8ed9a72fadf5fdc7305a0d32f" +"checksum rustfmt-nightly 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7d6dbb39239e54df780a850721fba87b3fdb2e645b39041742ec111369cec6af" "checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7" "checksum scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f417c22df063e9450888a7561788e9bd46d3bb3c1466435b4eccb903807f147d" "checksum scopeguard 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "59a076157c1e2dc561d8de585151ee6965d910dd4dcb5dabb7ae3e83981a6c57" +"checksum selectors 0.18.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e3c89b1c6a3c029c82263f7dd2d44d0005ee7374eb09e254ab59dede4353a8c0" "checksum semver 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3fdd61b85a0fa777f7fb7c454b9189b2941b110d1385ce84d7f76efdf1606a85" "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" "checksum serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)" = "f7726f29ddf9731b17ff113c461e362c381d9d69433f79de4f3dd572488823e9" @@ -2299,8 +2565,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum serde_ignored 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c10e798e4405d7dcec3658989e35ee6706f730a9ed7c1184d5ebd84317e82f46" "checksum serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "48b04779552e92037212c3615370f6bd57a40ebba7f20e554ff9f55e41a69a7b" "checksum shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "dd5cc96481d54583947bfe88bf30c23d53f883c6cd0145368b69989d97b84ef8" +"checksum siphasher 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0df90a788073e8d0235a67e50441d47db7c8ad9debd91cbf43736a2a92d36537" +"checksum smallvec 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4f8266519bc1d17d0b5b16f6c21295625d562841c708f6376f49028a43e9c11e" "checksum socket2 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4daf80fcf54186fac4fe049e0b39d36a5cfde69a11a06413e61e77f553cccf9a" "checksum stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b" +"checksum string_cache 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "413fc7852aeeb5472f1986ef755f561ddf0c789d3d796e65f0b6fe293ecd4ef8" +"checksum string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "479cde50c3539481f33906a387f2bd17c8e87cb848c35b6021d41fb81ff9b4d7" +"checksum string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b1884d1bc09741d466d9b14e6d37ac89d6909cbcac41dd9ae982d4d063bbedfc" "checksum strings 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "da75d8bf2c4d210d63dd09581a041b036001f9f6e03d9b151dbff810fb7ba26a" "checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694" "checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad" @@ -2311,9 +2582,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum syntex_syntax 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "76a302e717e348aa372ff577791c3832395650073b8d8432f8b3cb170b34afde" "checksum tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "281285b717926caa919ad905ef89c63d75805c7d89437fb873100925a53f2b1b" "checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6" +"checksum tendril 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1b72f8e2f5b73b65c315b1a70c730f24b9d7a25f39e98de8acbe2bb795caea" "checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1" "checksum term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2b6b55df3198cc93372e85dd2ed817f0e38ce8cc0f22eb32391bfad9c4bf209" -"checksum termcolor 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9a5193a56b8d82014662c4b933dea6bec851daf018a2b01722e007daaf5f9dca" +"checksum termcolor 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9065bced9c3e43453aa3d56f1e98590b8455b341d2fa191a1090c0dd0b242c75" "checksum textwrap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f728584ea33b0ad19318e20557cb0a39097751dbb07171419673502f848c7af6" "checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03" "checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5" @@ -2327,10 +2599,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f" "checksum unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "36dff09cafb4ec7c8cf0023eb0b686cb6ce65499116a12201c9e11840ca01beb" "checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc" +"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91" "checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" "checksum url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb819346883532a271eb626deb43c4a1bb4c4dd47c519bd78137c3e72a4fe27" "checksum url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea" "checksum userenv-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "71d28ea36bbd9192d75bd9fa9b39f96ddb986eaee824adae5d53b6e51919b2f3" +"checksum utf-8 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b6f923c601c7ac48ef1d66f7d5b5b2d9a7ba9c51333ab75a3ddf8d0309185a56" "checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f" "checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122" "checksum vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9e0a7d8bed3178a8fb112199d466eeca9ed09a14ba8ad67718179b4fd5487d0b" diff --git a/src/Cargo.toml b/src/Cargo.toml index ffec3eb075500..35ba022c4899a 100644 --- a/src/Cargo.toml +++ b/src/Cargo.toml @@ -56,5 +56,5 @@ debug-assertions = false debug = false debug-assertions = false -[replace] -"https://github.com/rust-lang/cargo#0.22.0" = { path = "tools/cargo" } +[patch.'https://github.com/rust-lang/cargo'] +cargo = { path = "tools/cargo" } diff --git a/src/bootstrap/README.md b/src/bootstrap/README.md index 2e844ceb17831..e543b8c070bcc 100644 --- a/src/bootstrap/README.md +++ b/src/bootstrap/README.md @@ -76,10 +76,9 @@ The script accepts commands, flags, and arguments to determine what to do: There are currently two methods for configuring the rustbuild build system. First, rustbuild offers a TOML-based configuration system with a `config.toml` -file in the same location as `config.mk`. An example of this configuration can -be found at `config.toml.example`, and the configuration file can also be passed -as `--config path/to/config.toml` if the build system is being invoked manually -(via the python script). +file. An example of this configuration can be found at `config.toml.example`, +and the configuration file can also be passed as `--config path/to/config.toml` +if the build system is being invoked manually (via the python script). Next, the `./configure` options serialized in `config.mk` will be parsed and read. That is, if any `./configure` options are passed, they'll be diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index 9369a55ccb97b..4ef6d70e820ea 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -167,6 +167,141 @@ def format_build_time(duration): return str(datetime.timedelta(seconds=int(duration))) +def default_build_triple(): + """Build triple as in LLVM""" + default_encoding = sys.getdefaultencoding() + try: + ostype = subprocess.check_output( + ['uname', '-s']).strip().decode(default_encoding) + cputype = subprocess.check_output( + ['uname', '-m']).strip().decode(default_encoding) + except (subprocess.CalledProcessError, OSError): + if sys.platform == 'win32': + return 'x86_64-pc-windows-msvc' + err = "uname not found" + sys.exit(err) + + # The goal here is to come up with the same triple as LLVM would, + # at least for the subset of platforms we're willing to target. + ostype_mapper = { + 'Bitrig': 'unknown-bitrig', + 'Darwin': 'apple-darwin', + 'DragonFly': 'unknown-dragonfly', + 'FreeBSD': 'unknown-freebsd', + 'Haiku': 'unknown-haiku', + 'NetBSD': 'unknown-netbsd', + 'OpenBSD': 'unknown-openbsd' + } + + # Consider the direct transformation first and then the special cases + if ostype in ostype_mapper: + ostype = ostype_mapper[ostype] + elif ostype == 'Linux': + os_from_sp = subprocess.check_output( + ['uname', '-o']).strip().decode(default_encoding) + if os_from_sp == 'Android': + ostype = 'linux-android' + else: + ostype = 'unknown-linux-gnu' + elif ostype == 'SunOS': + ostype = 'sun-solaris' + # On Solaris, uname -m will return a machine classification instead + # of a cpu type, so uname -p is recommended instead. However, the + # output from that option is too generic for our purposes (it will + # always emit 'i386' on x86/amd64 systems). As such, isainfo -k + # must be used instead. + try: + cputype = subprocess.check_output( + ['isainfo', '-k']).strip().decode(default_encoding) + except (subprocess.CalledProcessError, OSError): + err = "isainfo not found" + sys.exit(err) + elif ostype.startswith('MINGW'): + # msys' `uname` does not print gcc configuration, but prints msys + # configuration. so we cannot believe `uname -m`: + # msys1 is always i686 and msys2 is always x86_64. + # instead, msys defines $MSYSTEM which is MINGW32 on i686 and + # MINGW64 on x86_64. + ostype = 'pc-windows-gnu' + cputype = 'i686' + if os.environ.get('MSYSTEM') == 'MINGW64': + cputype = 'x86_64' + elif ostype.startswith('MSYS'): + ostype = 'pc-windows-gnu' + elif ostype.startswith('CYGWIN_NT'): + cputype = 'i686' + if ostype.endswith('WOW64'): + cputype = 'x86_64' + ostype = 'pc-windows-gnu' + else: + err = "unknown OS type: {}".format(ostype) + sys.exit(err) + + cputype_mapper = { + 'BePC': 'i686', + 'aarch64': 'aarch64', + 'amd64': 'x86_64', + 'arm64': 'aarch64', + 'i386': 'i686', + 'i486': 'i686', + 'i686': 'i686', + 'i786': 'i686', + 'powerpc': 'powerpc', + 'powerpc64': 'powerpc64', + 'powerpc64le': 'powerpc64le', + 'ppc': 'powerpc', + 'ppc64': 'powerpc64', + 'ppc64le': 'powerpc64le', + 's390x': 's390x', + 'x64': 'x86_64', + 'x86': 'i686', + 'x86-64': 'x86_64', + 'x86_64': 'x86_64' + } + + # Consider the direct transformation first and then the special cases + if cputype in cputype_mapper: + cputype = cputype_mapper[cputype] + elif cputype in {'xscale', 'arm'}: + cputype = 'arm' + if ostype == 'linux-android': + ostype = 'linux-androideabi' + elif cputype == 'armv6l': + cputype = 'arm' + if ostype == 'linux-android': + ostype = 'linux-androideabi' + else: + ostype += 'eabihf' + elif cputype in {'armv7l', 'armv8l'}: + cputype = 'armv7' + if ostype == 'linux-android': + ostype = 'linux-androideabi' + else: + ostype += 'eabihf' + elif cputype == 'mips': + if sys.byteorder == 'big': + cputype = 'mips' + elif sys.byteorder == 'little': + cputype = 'mipsel' + else: + raise ValueError("unknown byteorder: {}".format(sys.byteorder)) + elif cputype == 'mips64': + if sys.byteorder == 'big': + cputype = 'mips64' + elif sys.byteorder == 'little': + cputype = 'mips64el' + else: + raise ValueError('unknown byteorder: {}'.format(sys.byteorder)) + # only the n64 ABI is supported, indicate it + ostype += 'abi64' + elif cputype == 'sparcv9': + pass + else: + err = "unknown cpu type: {}".format(cputype) + sys.exit(err) + + return "{}-{}".format(cputype, ostype) + class RustBuild(object): """Provide all the methods required to build Rust""" def __init__(self): @@ -177,7 +312,6 @@ def __init__(self): self.build = '' self.build_dir = os.path.join(os.getcwd(), "build") self.clean = False - self.config_mk = '' self.config_toml = '' self.printed = False self.rust_root = os.path.abspath(os.path.join(__file__, '../../..')) @@ -374,26 +508,6 @@ def get_toml(self, key): return self.get_string(value) or value.strip() return None - def get_mk(self, key): - """Returns the value of the given key in config.mk, otherwise returns None - - >>> rb = RustBuild() - >>> rb.config_mk = 'key := value\\n' - >>> rb.get_mk('key') - 'value' - - If the key does not exists, the result is None: - - >>> rb.get_mk('does_not_exists') == None - True - """ - for line in iter(self.config_mk.splitlines()): - if line.startswith(key + ' '): - var = line[line.find(':=') + 2:].strip() - if var != '': - return var - return None - def cargo(self): """Return config path for cargo""" return self.program_config('cargo') @@ -407,15 +521,9 @@ def program_config(self, program): >>> rb = RustBuild() >>> rb.config_toml = 'rustc = "rustc"\\n' - >>> rb.config_mk = 'CFG_LOCAL_RUST_ROOT := /tmp/rust\\n' >>> rb.program_config('rustc') 'rustc' - >>> cargo_path = rb.program_config('cargo') - >>> cargo_path.rstrip(".exe") == os.path.join("/tmp/rust", - ... "bin", "cargo") - True >>> rb.config_toml = '' - >>> rb.config_mk = '' >>> cargo_path = rb.program_config('cargo') >>> cargo_path.rstrip(".exe") == os.path.join(rb.bin_root(), ... "bin", "cargo") @@ -424,10 +532,6 @@ def program_config(self, program): config = self.get_toml(program) if config: return config - config = self.get_mk('CFG_LOCAL_RUST_ROOT') - if config: - return os.path.join(config, "bin", "{}{}".format( - program, self.exe_suffix())) return os.path.join(self.bin_root(), "bin", "{}{}".format( program, self.exe_suffix())) @@ -439,10 +543,14 @@ def get_string(line): 'devel' """ start = line.find('"') - if start == -1: - return None - end = start + 1 + line[start + 1:].find('"') - return line[start + 1:end] + if start != -1: + end = start + 1 + line[start + 1:].find('"') + return line[start + 1:end] + start = line.find('\'') + if start != -1: + end = start + 1 + line[start + 1:].find('\'') + return line[start + 1:end] + return None @staticmethod def exe_suffix(): @@ -521,154 +629,12 @@ def build_triple(self): config = self.get_toml('build') if config: return config - config = self.get_mk('CFG_BUILD') - if config: - return config - try: - ostype = subprocess.check_output( - ['uname', '-s']).strip().decode(default_encoding) - cputype = subprocess.check_output( - ['uname', '-m']).strip().decode(default_encoding) - except (subprocess.CalledProcessError, OSError): - if sys.platform == 'win32': - return 'x86_64-pc-windows-msvc' - err = "uname not found" - if self.verbose: - raise Exception(err) - sys.exit(err) - - # The goal here is to come up with the same triple as LLVM would, - # at least for the subset of platforms we're willing to target. - ostype_mapper = { - 'Bitrig': 'unknown-bitrig', - 'Darwin': 'apple-darwin', - 'DragonFly': 'unknown-dragonfly', - 'FreeBSD': 'unknown-freebsd', - 'Haiku': 'unknown-haiku', - 'NetBSD': 'unknown-netbsd', - 'OpenBSD': 'unknown-openbsd' - } - - # Consider the direct transformation first and then the special cases - if ostype in ostype_mapper: - ostype = ostype_mapper[ostype] - elif ostype == 'Linux': - os_from_sp = subprocess.check_output( - ['uname', '-o']).strip().decode(default_encoding) - if os_from_sp == 'Android': - ostype = 'linux-android' - else: - ostype = 'unknown-linux-gnu' - elif ostype == 'SunOS': - ostype = 'sun-solaris' - # On Solaris, uname -m will return a machine classification instead - # of a cpu type, so uname -p is recommended instead. However, the - # output from that option is too generic for our purposes (it will - # always emit 'i386' on x86/amd64 systems). As such, isainfo -k - # must be used instead. - try: - cputype = subprocess.check_output( - ['isainfo', '-k']).strip().decode(default_encoding) - except (subprocess.CalledProcessError, OSError): - err = "isainfo not found" - if self.verbose: - raise Exception(err) - sys.exit(err) - elif ostype.startswith('MINGW'): - # msys' `uname` does not print gcc configuration, but prints msys - # configuration. so we cannot believe `uname -m`: - # msys1 is always i686 and msys2 is always x86_64. - # instead, msys defines $MSYSTEM which is MINGW32 on i686 and - # MINGW64 on x86_64. - ostype = 'pc-windows-gnu' - cputype = 'i686' - if os.environ.get('MSYSTEM') == 'MINGW64': - cputype = 'x86_64' - elif ostype.startswith('MSYS'): - ostype = 'pc-windows-gnu' - elif ostype.startswith('CYGWIN_NT'): - cputype = 'i686' - if ostype.endswith('WOW64'): - cputype = 'x86_64' - ostype = 'pc-windows-gnu' - else: - err = "unknown OS type: {}".format(ostype) - if self.verbose: - raise ValueError(err) - sys.exit(err) - - cputype_mapper = { - 'BePC': 'i686', - 'aarch64': 'aarch64', - 'amd64': 'x86_64', - 'arm64': 'aarch64', - 'i386': 'i686', - 'i486': 'i686', - 'i686': 'i686', - 'i786': 'i686', - 'powerpc': 'powerpc', - 'powerpc64': 'powerpc64', - 'powerpc64le': 'powerpc64le', - 'ppc': 'powerpc', - 'ppc64': 'powerpc64', - 'ppc64le': 'powerpc64le', - 's390x': 's390x', - 'x64': 'x86_64', - 'x86': 'i686', - 'x86-64': 'x86_64', - 'x86_64': 'x86_64' - } - - # Consider the direct transformation first and then the special cases - if cputype in cputype_mapper: - cputype = cputype_mapper[cputype] - elif cputype in {'xscale', 'arm'}: - cputype = 'arm' - if ostype == 'linux-android': - ostype = 'linux-androideabi' - elif cputype == 'armv6l': - cputype = 'arm' - if ostype == 'linux-android': - ostype = 'linux-androideabi' - else: - ostype += 'eabihf' - elif cputype in {'armv7l', 'armv8l'}: - cputype = 'armv7' - if ostype == 'linux-android': - ostype = 'linux-androideabi' - else: - ostype += 'eabihf' - elif cputype == 'mips': - if sys.byteorder == 'big': - cputype = 'mips' - elif sys.byteorder == 'little': - cputype = 'mipsel' - else: - raise ValueError("unknown byteorder: {}".format(sys.byteorder)) - elif cputype == 'mips64': - if sys.byteorder == 'big': - cputype = 'mips64' - elif sys.byteorder == 'little': - cputype = 'mips64el' - else: - raise ValueError('unknown byteorder: {}'.format(sys.byteorder)) - # only the n64 ABI is supported, indicate it - ostype += 'abi64' - elif cputype == 'sparcv9': - pass - else: - err = "unknown cpu type: {}".format(cputype) - if self.verbose: - raise ValueError(err) - sys.exit(err) - - return "{}-{}".format(cputype, ostype) + return default_build_triple() def update_submodules(self): """Update submodules""" if (not os.path.exists(os.path.join(self.rust_root, ".git"))) or \ - self.get_toml('submodules') == "false" or \ - self.get_mk('CFG_DISABLE_MANAGE_SUBMODULES') == "1": + self.get_toml('submodules') == "false": return print('Updating submodules') default_encoding = sys.getdefaultencoding() @@ -680,11 +646,9 @@ def update_submodules(self): ).decode(default_encoding).splitlines()] submodules = [module for module in submodules if not ((module.endswith("llvm") and - (self.get_toml('llvm-config') or - self.get_mk('CFG_LLVM_ROOT'))) or + self.get_toml('llvm-config')) or (module.endswith("jemalloc") and - (self.get_toml('jemalloc') or - self.get_mk('CFG_JEMALLOC_ROOT'))))] + self.get_toml('jemalloc')))] run(["git", "submodule", "update", "--init", "--recursive"] + submodules, cwd=self.rust_root, verbose=self.verbose) @@ -721,21 +685,15 @@ def bootstrap(): build.config_toml = config.read() except: pass - try: - build.config_mk = open('config.mk').read() - except: - pass if '\nverbose = 2' in build.config_toml: build.verbose = 2 elif '\nverbose = 1' in build.config_toml: build.verbose = 1 - build.use_vendored_sources = '\nvendor = true' in build.config_toml or \ - 'CFG_ENABLE_VENDOR' in build.config_mk + build.use_vendored_sources = '\nvendor = true' in build.config_toml - build.use_locked_deps = '\nlocked-deps = true' in build.config_toml or \ - 'CFG_ENABLE_LOCKED_DEPS' in build.config_mk + build.use_locked_deps = '\nlocked-deps = true' in build.config_toml if 'SUDO_USER' in os.environ and not build.use_vendored_sources: if os.environ.get('USER') != os.environ['SUDO_USER']: diff --git a/src/bootstrap/bootstrap_test.py b/src/bootstrap/bootstrap_test.py index a65a3a4042eca..32ea4b4abe638 100644 --- a/src/bootstrap/bootstrap_test.py +++ b/src/bootstrap/bootstrap_test.py @@ -15,6 +15,7 @@ import unittest import tempfile import hashlib +import sys from shutil import rmtree @@ -110,5 +111,6 @@ def test_same_dates(self): TEST_LOADER.loadTestsFromTestCase(VerifyTestCase), TEST_LOADER.loadTestsFromTestCase(ProgramOutOfDate)]) - RUNNER = unittest.TextTestRunner(verbosity=2) - RUNNER.run(SUITE) + RUNNER = unittest.TextTestRunner(stream=sys.stdout, verbosity=2) + result = RUNNER.run(SUITE) + sys.exit(0 if result.wasSuccessful() else 1) diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index 298f6a004a20a..722b3d16e9d19 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -248,7 +248,7 @@ impl<'a> Builder<'a> { compile::StartupObjects, tool::BuildManifest, tool::Rustbook, tool::ErrorIndex, tool::UnstableBookGen, tool::Tidy, tool::Linkchecker, tool::CargoTest, tool::Compiletest, tool::RemoteTestServer, tool::RemoteTestClient, - tool::RustInstaller, tool::Cargo, tool::Rls, tool::Rustdoc, + tool::RustInstaller, tool::Cargo, tool::Rls, tool::Rustdoc, tool::Clippy, native::Llvm), Kind::Test => describe!(check::Tidy, check::Bootstrap, check::DefaultCompiletest, check::HostCompiletest, check::Crate, check::CrateLibrustc, check::Linkcheck, diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs index 9c1ae83d38281..6ed504dfe74a3 100644 --- a/src/bootstrap/channel.rs +++ b/src/bootstrap/channel.rs @@ -24,7 +24,7 @@ use Build; use config::Config; // The version number -pub const CFG_RELEASE_NUM: &str = "1.21.0"; +pub const CFG_RELEASE_NUM: &str = "1.22.0"; // An optional number to put after the label, e.g. '.2' -> '-beta.2' // Be sure to make this starts with a dot to conform to semver pre-release diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index f43035fbfe8a1..372e0906cc61e 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -10,12 +10,12 @@ //! Serialized configuration of a build. //! -//! This module implements parsing `config.mk` and `config.toml` configuration -//! files to tweak how the build runs. +//! This module implements parsing `config.toml` configuration files to tweak +//! how the build runs. use std::collections::HashMap; use std::env; -use std::fs::{self, File}; +use std::fs::File; use std::io::prelude::*; use std::path::PathBuf; use std::process; @@ -23,7 +23,7 @@ use std::cmp; use num_cpus; use toml; -use util::{exe, push_exe_path}; +use util::exe; use cache::{INTERNER, Interned}; use flags::Flags; pub use flags::Subcommand; @@ -124,14 +124,12 @@ pub struct Config { pub nodejs: Option, pub gdb: Option, pub python: Option, - pub configure_args: Vec, pub openssl_static: bool, - + pub configure_args: Vec, // These are either the stage0 downloaded binaries or the locally installed ones. pub initial_cargo: PathBuf, pub initial_rustc: PathBuf, - } /// Per-target configuration stored in the global configuration structure. @@ -190,6 +188,8 @@ struct Build { sanitizers: Option, profiler: Option, openssl_static: Option, + configure_args: Option>, + local_rebuild: Option, } /// TOML representation of various global install decisions. @@ -219,6 +219,7 @@ struct Llvm { targets: Option, experimental_targets: Option, link_jobs: Option, + link_shared: Option, } #[derive(Deserialize, Default, Clone)] @@ -265,6 +266,9 @@ struct Rust { debuginfo_tests: Option, codegen_tests: Option, ignore_git: Option, + debug: Option, + dist_src: Option, + quiet_tests: Option, } /// TOML representation of how each build target is configured. @@ -374,6 +378,8 @@ impl Config { set(&mut config.sanitizers, build.sanitizers); set(&mut config.profiler, build.profiler); set(&mut config.openssl_static, build.openssl_static); + set(&mut config.configure_args, build.configure_args); + set(&mut config.local_rebuild, build.local_rebuild); config.verbose = cmp::max(config.verbose, flags.verbose); if let Some(ref install) = toml.install { @@ -385,6 +391,18 @@ impl Config { config.mandir = install.mandir.clone().map(PathBuf::from); } + // Store off these values as options because if they're not provided + // we'll infer default values for them later + let mut llvm_assertions = None; + let mut debuginfo_lines = None; + let mut debuginfo_only_std = None; + let mut debug = None; + let mut debug_jemalloc = None; + let mut debuginfo = None; + let mut debug_assertions = None; + let mut optimize = None; + let mut ignore_git = None; + if let Some(ref llvm) = toml.llvm { match llvm.ccache { Some(StringOrBool::String(ref s)) => { @@ -397,31 +415,35 @@ impl Config { } set(&mut config.ninja, llvm.ninja); set(&mut config.llvm_enabled, llvm.enabled); - set(&mut config.llvm_assertions, llvm.assertions); + llvm_assertions = llvm.assertions; set(&mut config.llvm_optimize, llvm.optimize); set(&mut config.llvm_release_debuginfo, llvm.release_debuginfo); set(&mut config.llvm_version_check, llvm.version_check); set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp); + set(&mut config.llvm_link_shared, llvm.link_shared); config.llvm_targets = llvm.targets.clone(); config.llvm_experimental_targets = llvm.experimental_targets.clone(); config.llvm_link_jobs = llvm.link_jobs; } if let Some(ref rust) = toml.rust { - set(&mut config.rust_debug_assertions, rust.debug_assertions); - set(&mut config.rust_debuginfo, rust.debuginfo); - set(&mut config.rust_debuginfo_lines, rust.debuginfo_lines); - set(&mut config.rust_debuginfo_only_std, rust.debuginfo_only_std); - set(&mut config.rust_optimize, rust.optimize); + debug = rust.debug; + debug_assertions = rust.debug_assertions; + debuginfo = rust.debuginfo; + debuginfo_lines = rust.debuginfo_lines; + debuginfo_only_std = rust.debuginfo_only_std; + optimize = rust.optimize; + ignore_git = rust.ignore_git; + debug_jemalloc = rust.debug_jemalloc; set(&mut config.rust_optimize_tests, rust.optimize_tests); set(&mut config.rust_debuginfo_tests, rust.debuginfo_tests); set(&mut config.codegen_tests, rust.codegen_tests); set(&mut config.rust_rpath, rust.rpath); - set(&mut config.debug_jemalloc, rust.debug_jemalloc); set(&mut config.use_jemalloc, rust.use_jemalloc); set(&mut config.backtrace, rust.backtrace); set(&mut config.channel, rust.channel.clone()); - set(&mut config.ignore_git, rust.ignore_git); + set(&mut config.rust_dist_src, rust.dist_src); + set(&mut config.quiet_tests, rust.quiet_tests); config.rustc_default_linker = rust.default_linker.clone(); config.rustc_default_ar = rust.default_ar.clone(); config.musl_root = rust.musl_root.clone().map(PathBuf::from); @@ -476,224 +498,29 @@ impl Config { None => stage0_root.join(exe("cargo", &config.build)), }; - // compat with `./configure` while we're still using that - if fs::metadata("config.mk").is_ok() { - config.update_with_config_mk(); - } + // Now that we've reached the end of our configuration, infer the + // default values for all options that we haven't otherwise stored yet. - config - } + let default = config.channel == "nightly"; + config.llvm_assertions = llvm_assertions.unwrap_or(default); - /// "Temporary" routine to parse `config.mk` into this configuration. - /// - /// While we still have `./configure` this implements the ability to decode - /// that configuration into this. This isn't exactly a full-blown makefile - /// parser, but hey it gets the job done! - fn update_with_config_mk(&mut self) { - let mut config = String::new(); - File::open("config.mk").unwrap().read_to_string(&mut config).unwrap(); - for line in config.lines() { - let mut parts = line.splitn(2, ":=").map(|s| s.trim()); - let key = parts.next().unwrap(); - let value = match parts.next() { - Some(n) if n.starts_with('\"') => &n[1..n.len() - 1], - Some(n) => n, - None => continue - }; - - macro_rules! check { - ($(($name:expr, $val:expr),)*) => { - if value == "1" { - $( - if key == concat!("CFG_ENABLE_", $name) { - $val = true; - continue - } - if key == concat!("CFG_DISABLE_", $name) { - $val = false; - continue - } - )* - } - } - } + let default = match &config.channel[..] { + "stable" | "beta" | "nightly" => true, + _ => false, + }; + config.rust_debuginfo_lines = debuginfo_lines.unwrap_or(default); + config.rust_debuginfo_only_std = debuginfo_only_std.unwrap_or(default); - check! { - ("MANAGE_SUBMODULES", self.submodules), - ("COMPILER_DOCS", self.compiler_docs), - ("DOCS", self.docs), - ("LLVM_ASSERTIONS", self.llvm_assertions), - ("LLVM_RELEASE_DEBUGINFO", self.llvm_release_debuginfo), - ("OPTIMIZE_LLVM", self.llvm_optimize), - ("LLVM_VERSION_CHECK", self.llvm_version_check), - ("LLVM_STATIC_STDCPP", self.llvm_static_stdcpp), - ("LLVM_LINK_SHARED", self.llvm_link_shared), - ("OPTIMIZE", self.rust_optimize), - ("DEBUG_ASSERTIONS", self.rust_debug_assertions), - ("DEBUGINFO", self.rust_debuginfo), - ("DEBUGINFO_LINES", self.rust_debuginfo_lines), - ("DEBUGINFO_ONLY_STD", self.rust_debuginfo_only_std), - ("JEMALLOC", self.use_jemalloc), - ("DEBUG_JEMALLOC", self.debug_jemalloc), - ("RPATH", self.rust_rpath), - ("OPTIMIZE_TESTS", self.rust_optimize_tests), - ("DEBUGINFO_TESTS", self.rust_debuginfo_tests), - ("QUIET_TESTS", self.quiet_tests), - ("LOCAL_REBUILD", self.local_rebuild), - ("NINJA", self.ninja), - ("CODEGEN_TESTS", self.codegen_tests), - ("LOCKED_DEPS", self.locked_deps), - ("VENDOR", self.vendor), - ("FULL_BOOTSTRAP", self.full_bootstrap), - ("EXTENDED", self.extended), - ("SANITIZERS", self.sanitizers), - ("PROFILER", self.profiler), - ("DIST_SRC", self.rust_dist_src), - ("CARGO_OPENSSL_STATIC", self.openssl_static), - } + let default = debug == Some(true); + config.debug_jemalloc = debug_jemalloc.unwrap_or(default); + config.rust_debuginfo = debuginfo.unwrap_or(default); + config.rust_debug_assertions = debug_assertions.unwrap_or(default); + config.rust_optimize = optimize.unwrap_or(!default); - match key { - "CFG_BUILD" if value.len() > 0 => self.build = INTERNER.intern_str(value), - "CFG_HOST" if value.len() > 0 => { - self.hosts.extend(value.split(" ").map(|s| INTERNER.intern_str(s))); + let default = config.channel == "dev"; + config.ignore_git = ignore_git.unwrap_or(default); - } - "CFG_TARGET" if value.len() > 0 => { - self.targets.extend(value.split(" ").map(|s| INTERNER.intern_str(s))); - } - "CFG_EXPERIMENTAL_TARGETS" if value.len() > 0 => { - self.llvm_experimental_targets = Some(value.to_string()); - } - "CFG_MUSL_ROOT" if value.len() > 0 => { - self.musl_root = Some(parse_configure_path(value)); - } - "CFG_MUSL_ROOT_X86_64" if value.len() > 0 => { - let target = INTERNER.intern_str("x86_64-unknown-linux-musl"); - let target = self.target_config.entry(target).or_insert(Target::default()); - target.musl_root = Some(parse_configure_path(value)); - } - "CFG_MUSL_ROOT_I686" if value.len() > 0 => { - let target = INTERNER.intern_str("i686-unknown-linux-musl"); - let target = self.target_config.entry(target).or_insert(Target::default()); - target.musl_root = Some(parse_configure_path(value)); - } - "CFG_MUSL_ROOT_ARM" if value.len() > 0 => { - let target = INTERNER.intern_str("arm-unknown-linux-musleabi"); - let target = self.target_config.entry(target).or_insert(Target::default()); - target.musl_root = Some(parse_configure_path(value)); - } - "CFG_MUSL_ROOT_ARMHF" if value.len() > 0 => { - let target = INTERNER.intern_str("arm-unknown-linux-musleabihf"); - let target = self.target_config.entry(target).or_insert(Target::default()); - target.musl_root = Some(parse_configure_path(value)); - } - "CFG_MUSL_ROOT_ARMV7" if value.len() > 0 => { - let target = INTERNER.intern_str("armv7-unknown-linux-musleabihf"); - let target = self.target_config.entry(target).or_insert(Target::default()); - target.musl_root = Some(parse_configure_path(value)); - } - "CFG_DEFAULT_AR" if value.len() > 0 => { - self.rustc_default_ar = Some(value.to_string()); - } - "CFG_DEFAULT_LINKER" if value.len() > 0 => { - self.rustc_default_linker = Some(value.to_string()); - } - "CFG_GDB" if value.len() > 0 => { - self.gdb = Some(parse_configure_path(value)); - } - "CFG_RELEASE_CHANNEL" => { - self.channel = value.to_string(); - } - "CFG_PREFIX" => { - self.prefix = Some(PathBuf::from(value)); - } - "CFG_SYSCONFDIR" => { - self.sysconfdir = Some(PathBuf::from(value)); - } - "CFG_DOCDIR" => { - self.docdir = Some(PathBuf::from(value)); - } - "CFG_BINDIR" => { - self.bindir = Some(PathBuf::from(value)); - } - "CFG_LIBDIR" => { - self.libdir = Some(PathBuf::from(value)); - } - "CFG_LIBDIR_RELATIVE" => { - self.libdir_relative = Some(PathBuf::from(value)); - } - "CFG_MANDIR" => { - self.mandir = Some(PathBuf::from(value)); - } - "CFG_LLVM_ROOT" if value.len() > 0 => { - let target = self.target_config.entry(self.build.clone()) - .or_insert(Target::default()); - let root = parse_configure_path(value); - target.llvm_config = Some(push_exe_path(root, &["bin", "llvm-config"])); - } - "CFG_JEMALLOC_ROOT" if value.len() > 0 => { - let target = self.target_config.entry(self.build.clone()) - .or_insert(Target::default()); - target.jemalloc = Some(parse_configure_path(value).join("libjemalloc_pic.a")); - } - "CFG_ARM_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => { - let target = INTERNER.intern_str("arm-linux-androideabi"); - let target = self.target_config.entry(target).or_insert(Target::default()); - target.ndk = Some(parse_configure_path(value)); - } - "CFG_ARMV7_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => { - let target = INTERNER.intern_str("armv7-linux-androideabi"); - let target = self.target_config.entry(target).or_insert(Target::default()); - target.ndk = Some(parse_configure_path(value)); - } - "CFG_I686_LINUX_ANDROID_NDK" if value.len() > 0 => { - let target = INTERNER.intern_str("i686-linux-android"); - let target = self.target_config.entry(target).or_insert(Target::default()); - target.ndk = Some(parse_configure_path(value)); - } - "CFG_AARCH64_LINUX_ANDROID_NDK" if value.len() > 0 => { - let target = INTERNER.intern_str("aarch64-linux-android"); - let target = self.target_config.entry(target).or_insert(Target::default()); - target.ndk = Some(parse_configure_path(value)); - } - "CFG_X86_64_LINUX_ANDROID_NDK" if value.len() > 0 => { - let target = INTERNER.intern_str("x86_64-linux-android"); - let target = self.target_config.entry(target).or_insert(Target::default()); - target.ndk = Some(parse_configure_path(value)); - } - "CFG_LOCAL_RUST_ROOT" if value.len() > 0 => { - let path = parse_configure_path(value); - self.initial_rustc = push_exe_path(path.clone(), &["bin", "rustc"]); - self.initial_cargo = push_exe_path(path, &["bin", "cargo"]); - } - "CFG_PYTHON" if value.len() > 0 => { - let path = parse_configure_path(value); - self.python = Some(path); - } - "CFG_ENABLE_CCACHE" if value == "1" => { - self.ccache = Some(exe("ccache", &self.build)); - } - "CFG_ENABLE_SCCACHE" if value == "1" => { - self.ccache = Some(exe("sccache", &self.build)); - } - "CFG_CONFIGURE_ARGS" if value.len() > 0 => { - self.configure_args = value.split_whitespace() - .map(|s| s.to_string()) - .collect(); - } - "CFG_QEMU_ARMHF_ROOTFS" if value.len() > 0 => { - let target = INTERNER.intern_str("arm-unknown-linux-gnueabihf"); - let target = self.target_config.entry(target).or_insert(Target::default()); - target.qemu_rootfs = Some(parse_configure_path(value)); - } - "CFG_QEMU_AARCH64_ROOTFS" if value.len() > 0 => { - let target = INTERNER.intern_str("aarch64-unknown-linux-gnu"); - let target = self.target_config.entry(target).or_insert(Target::default()); - target.qemu_rootfs = Some(parse_configure_path(value)); - } - _ => {} - } - } + config } pub fn verbose(&self) -> bool { @@ -705,30 +532,6 @@ impl Config { } } -#[cfg(not(windows))] -fn parse_configure_path(path: &str) -> PathBuf { - path.into() -} - -#[cfg(windows)] -fn parse_configure_path(path: &str) -> PathBuf { - // on windows, configure produces unix style paths e.g. /c/some/path but we - // only want real windows paths - - use std::process::Command; - use build_helper; - - // '/' is invalid in windows paths, so we can detect unix paths by the presence of it - if !path.contains('/') { - return path.into(); - } - - let win_path = build_helper::output(Command::new("cygpath").arg("-w").arg(path)); - let win_path = win_path.trim(); - - win_path.into() -} - fn set(field: &mut T, val: Option) { if let Some(v) = val { *field = v; diff --git a/src/bootstrap/configure.py b/src/bootstrap/configure.py new file mode 100755 index 0000000000000..fa8b761336068 --- /dev/null +++ b/src/bootstrap/configure.py @@ -0,0 +1,414 @@ +#!/usr/bin/env python +# Copyright 2017 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +# ignore-tidy-linelength + +import sys +import os +rust_dir = os.path.dirname(os.path.abspath(__file__)) +rust_dir = os.path.dirname(rust_dir) +rust_dir = os.path.dirname(rust_dir) +sys.path.append(os.path.join(rust_dir, "src", "bootstrap")) +import bootstrap + +class Option: + def __init__(self, name, rustbuild, desc, value): + self.name = name + self.rustbuild = rustbuild + self.desc = desc + self.value = value + +options = [] + +def o(*args): + options.append(Option(*args, value=False)) + +def v(*args): + options.append(Option(*args, value=True)) + +o("debug", "rust.debug", "debug mode; disables optimization unless `--enable-optimize` given") +o("docs", "build.docs", "build standard library documentation") +o("compiler-docs", "build.compiler-docs", "build compiler documentation") +o("optimize-tests", "rust.optimize-tests", "build tests with optimizations") +o("debuginfo-tests", "rust.debuginfo-tests", "build tests with debugger metadata") +o("quiet-tests", "rust.quiet-tests", "enable quieter output when running tests") +o("ccache", "llvm.ccache", "invoke gcc/clang via ccache to reuse object files between builds") +o("sccache", None, "invoke gcc/clang via sccache to reuse object files between builds") +o("local-rust", None, "use an installed rustc rather than downloading a snapshot") +v("local-rust-root", None, "set prefix for local rust binary") +o("local-rebuild", "build.local-rebuild", "assume local-rust matches the current version, for rebuilds; implies local-rust, and is implied if local-rust already matches the current version") +o("llvm-static-stdcpp", "llvm.static-libstdcpp", "statically link to libstdc++ for LLVM") +o("llvm-link-shared", "llvm.link-shared", "prefer shared linking to LLVM (llvm-config --link-shared)") +o("rpath", "rust.rpath", "build rpaths into rustc itself") +o("llvm-version-check", "llvm.version-check", "check if the LLVM version is supported, build anyway") +o("codegen-tests", "rust.codegen-tests", "run the src/test/codegen tests") +o("option-checking", None, "complain about unrecognized options in this configure script") +o("ninja", "llvm.ninja", "build LLVM using the Ninja generator (for MSVC, requires building in the correct environment)") +o("locked-deps", "build.locked-deps", "force Cargo.lock to be up to date") +o("vendor", "build.vendor", "enable usage of vendored Rust crates") +o("sanitizers", "build.sanitizers", "build the sanitizer runtimes (asan, lsan, msan, tsan)") +o("dist-src", "rust.dist-src", "when building tarballs enables building a source tarball") +o("cargo-openssl-static", "build.openssl-static", "static openssl in cargo") +o("profiler", "build.profiler", "build the profiler runtime") + +# Optimization and debugging options. These may be overridden by the release +# channel, etc. +o("optimize", "rust.optimize", "build optimized rust code") +o("optimize-llvm", "llvm.optimize", "build optimized LLVM") +o("llvm-assertions", "llvm.assertions", "build LLVM with assertions") +o("debug-assertions", "rust.debug-assertions", "build with debugging assertions") +o("llvm-release-debuginfo", "llvm.release-debuginfo", "build LLVM with debugger metadata") +o("debuginfo", "rust.debuginfo", "build with debugger metadata") +o("debuginfo-lines", "rust.debuginfo-lines", "build with line number debugger metadata") +o("debuginfo-only-std", "rust.debuginfo-only-std", "build only libstd with debugging information") +o("debug-jemalloc", "rust.debug-jemalloc", "build jemalloc with --enable-debug --enable-fill") + +v("prefix", "install.prefix", "set installation prefix") +v("localstatedir", "install.localstatedir", "local state directory") +v("datadir", "install.datadir", "install data") +v("sysconfdir", "install.sysconfdir", "install system configuration files") +v("infodir", "install.infodir", "install additional info") +v("libdir", "install.libdir", "install libraries") +v("mandir", "install.mandir", "install man pages in PATH") +v("docdir", "install.docdir", "install documentation in PATH") +v("bindir", "install.bindir", "install binaries") + +v("llvm-root", None, "set LLVM root") +v("python", "build.python", "set path to python") +v("jemalloc-root", None, "set directory where libjemalloc_pic.a is located") +v("android-cross-path", "target.arm-linux-androideabi.android-ndk", + "Android NDK standalone path (deprecated)") +v("i686-linux-android-ndk", "target.i686-linux-android.android-ndk", + "i686-linux-android NDK standalone path") +v("arm-linux-androideabi-ndk", "target.arm-linux-androideabi.android-ndk", + "arm-linux-androideabi NDK standalone path") +v("armv7-linux-androideabi-ndk", "target.armv7-linux-androideabi.android-ndk", + "armv7-linux-androideabi NDK standalone path") +v("aarch64-linux-android-ndk", "target.aarch64-linux-android.android-ndk", + "aarch64-linux-android NDK standalone path") +v("x86_64-linux-android-ndk", "target.x86_64-linux-android.android-ndk", + "x86_64-linux-android NDK standalone path") +v("musl-root", "target.x86_64-unknown-linux-musl.musl-root", + "MUSL root installation directory (deprecated)") +v("musl-root-x86_64", "target.x86_64-unknown-linux-musl.musl-root", + "x86_64-unknown-linux-musl install directory") +v("musl-root-i686", "target.i686-unknown-linux-musl.musl-root", + "i686-unknown-linux-musl install directory") +v("musl-root-arm", "target.arm-unknown-linux-musleabi.musl-root", + "arm-unknown-linux-musleabi install directory") +v("musl-root-armhf", "target.arm-unknown-linux-musleabihf.musl-root", + "arm-unknown-linux-musleabihf install directory") +v("musl-root-armv7", "target.armv7-unknown-linux-musleabihf.musl-root", + "armv7-unknown-linux-musleabihf install directory") +v("qemu-armhf-rootfs", "target.arm-unknown-linux-gnueabihf.qemu-rootfs", + "rootfs in qemu testing, you probably don't want to use this") +v("qemu-aarch64-rootfs", "target.aarch64-unknown-linux-gnu.qemu-rootfs", + "rootfs in qemu testing, you probably don't want to use this") +v("experimental-targets", "llvm.experimental-targets", + "experimental LLVM targets to build") +v("release-channel", "rust.channel", "the name of the release channel to build") + +# Used on systems where "cc" and "ar" are unavailable +v("default-linker", "rust.default-linker", "the default linker") +v("default-ar", "rust.default-ar", "the default ar") + +# Many of these are saved below during the "writing configuration" step +# (others are conditionally saved). +o("manage-submodules", "build.submodules", "let the build manage the git submodules") +o("jemalloc", "rust.use-jemalloc", "build liballoc with jemalloc") +o("full-bootstrap", "build.full-bootstrap", "build three compilers instead of two") +o("extended", "build.extended", "build an extended rust tool set") + +v("build", "build.build", "GNUs ./configure syntax LLVM build triple") +v("host", None, "GNUs ./configure syntax LLVM host triples") +v("target", None, "GNUs ./configure syntax LLVM target triples") + +v("set", None, "set arbitrary key/value pairs in TOML configuration") + +def p(msg): + print("configure: " + msg) + +def err(msg): + print("configure: error: " + msg) + sys.exit(1) + +if '--help' in sys.argv or '-h' in sys.argv: + print('Usage: ./configure [options]') + print('') + print('Options') + for option in options: + if 'android' in option.name: + # no one needs to know about these obscure options + continue + if option.value: + print('\t{:30} {}'.format('--{}=VAL'.format(option.name), option.desc)) + else: + print('\t{:30} {}'.format('--enable-{}'.format(option.name), option.desc)) + print('') + print('This configure script is a thin configuration shim over the true') + print('configuration system, `config.toml`. You can explore the comments') + print('in `config.toml.example` next to this configure script to see') + print('more information about what each option is. Additionally you can') + print('pass `--set` as an argument to set arbitrary key/value pairs') + print('in the TOML configuration if desired') + print('') + print('Also note that all options which take `--enable` can similarly') + print('be passed with `--disable-foo` to forcibly disable the option') + sys.exit(0) + +# Parse all command line arguments into one of these three lists, handling +# boolean and value-based options separately +unknown_args = [] +need_value_args = [] +known_args = {} + +p("processing command line") +i = 1 +while i < len(sys.argv): + arg = sys.argv[i] + i += 1 + if not arg.startswith('--'): + unknown_args.append(arg) + continue + + found = False + for option in options: + value = None + if option.value: + keyval = arg[2:].split('=', 1) + key = keyval[0] + if option.name != key: + continue + + if len(keyval) > 1: + value = keyval[1] + elif i < len(sys.argv): + value = sys.argv[i] + i += 1 + else: + need_value_args.append(arg) + continue + else: + if arg[2:] == 'enable-' + option.name: + value = True + elif arg[2:] == 'disable-' + option.name: + value = False + else: + continue + + found = True + if not option.name in known_args: + known_args[option.name] = [] + known_args[option.name].append((option, value)) + break + + if not found: + unknown_args.append(arg) +p("") + +if 'option-checking' not in known_args or known_args['option-checking'][1]: + if len(unknown_args) > 0: + err("Option '" + unknown_args[0] + "' is not recognized") + if len(need_value_args) > 0: + err("Option '{0}' needs a value ({0}=val)".format(need_value_args[0])) + +# Parse all known arguments into a configuration structure that reflects the +# TOML we're going to write out +config = {} + +def build(): + if 'build' in known_args: + return known_args['build'][0][1] + return bootstrap.default_build_triple() + +def set(key, value): + s = "{:20} := {}".format(key, value) + if len(s) < 70: + p(s) + else: + p(s[:70] + " ...") + + arr = config + parts = key.split('.') + for i, part in enumerate(parts): + if i == len(parts) - 1: + arr[part] = value + else: + if not part in arr: + arr[part] = {} + arr = arr[part] + +for key in known_args: + # The `set` option is special and can be passed a bunch of times + if key == 'set': + for option, value in known_args[key]: + keyval = value.split('=', 1) + if len(keyval) == 1 or keyval[1] == "true": + value = True + elif keyval[1] == "false": + value = False + else: + value = keyval[1] + set(keyval[0], value) + continue + + # Ensure each option is only passed once + arr = known_args[key] + if len(arr) > 1: + err("Option '{}' provided more than once".format(key)) + option, value = arr[0] + + # If we have a clear avenue to set our value in rustbuild, do so + if option.rustbuild is not None: + set(option.rustbuild, value) + continue + + # Otherwise we're a "special" option and need some extra handling, so do + # that here. + if option.name == 'sccache': + set('llvm.ccache', 'sccache') + elif option.name == 'local-rust': + for path in os.environ['PATH'].split(os.pathsep): + if os.path.exists(path + '/rustc'): + set('build.rustc', path + '/rustc') + break + for path in os.environ['PATH'].split(os.pathsep): + if os.path.exists(path + '/cargo'): + set('build.cargo', path + '/cargo') + break + elif option.name == 'local-rust-root': + set('build.rustc', value + '/bin/rustc') + set('build.cargo', value + '/bin/cargo') + elif option.name == 'llvm-root': + set('target.{}.llvm-config'.format(build()), value + '/bin/llvm-config') + elif option.name == 'jemalloc-root': + set('target.{}.jemalloc'.format(build()), value + '/libjemalloc_pic.a') + elif option.name == 'host': + set('build.host', value.split(',')) + elif option.name == 'target': + set('build.target', value.split(',')) + elif option.name == 'option-checking': + # this was handled above + pass + else: + raise RuntimeError("unhandled option {}".format(option.name)) + +set('build.configure-args', sys.argv[1:]) + +# "Parse" the `config.toml.example` file into the various sections, and we'll +# use this as a template of a `config.toml` to write out which preserves +# all the various comments and whatnot. +# +# Note that the `target` section is handled separately as we'll duplicate it +# per configure dtarget, so there's a bit of special handling for that here. +sections = {} +cur_section = None +sections[None] = [] +section_order = [None] +targets = {} + +for line in open(rust_dir + '/config.toml.example').read().split("\n"): + if line.startswith('['): + cur_section = line[1:-1] + if cur_section.startswith('target'): + cur_section = 'target' + elif '.' in cur_section: + raise RuntimeError("don't know how to deal with section: {}".format(cur_section)) + sections[cur_section] = [line] + section_order.append(cur_section) + else: + sections[cur_section].append(line) + +# Fill out the `targets` array by giving all configured targets a copy of the +# `target` section we just loaded from the example config +configured_targets = [build()] +if 'build' in config: + if 'host' in config['build']: + configured_targets += config['build']['host'] + if 'target' in config['build']: + configured_targets += config['build']['target'] +if 'target' in config: + for target in config['target']: + configured_targets.append(target) +for target in configured_targets: + targets[target] = sections['target'][:] + targets[target][0] = targets[target][0].replace("x86_64-unknown-linux-gnu", target) + +# Here we walk through the constructed configuration we have from the parsed +# command line arguemnts. We then apply each piece of configuration by +# basically just doing a `sed` to change the various configuration line to what +# we've got configure. +def to_toml(value): + if isinstance(value, bool): + if value: + return "true" + else: + return "false" + elif isinstance(value, list): + return '[' + ', '.join(map(to_toml, value)) + ']' + elif isinstance(value, str): + return "'" + value + "'" + else: + raise 'no toml' + +def configure_section(lines, config): + for key in config: + value = config[key] + found = False + for i, line in enumerate(lines): + if not line.startswith('#' + key + ' = '): + continue + found = True + lines[i] = "{} = {}".format(key, to_toml(value)) + break + if not found: + raise RuntimeError("failed to find config line for {}".format(key)) + +for section_key in config: + section_config = config[section_key] + if not section_key in sections: + raise RuntimeError("config key {} not in sections".format(key)) + + if section_key == 'target': + for target in section_config: + configure_section(targets[target], section_config[target]) + else: + configure_section(sections[section_key], section_config) + +# Now that we've built up our `config.toml`, write it all out in the same +# order that we read it in. +p("") +p("writing `config.toml` in current directory") +with open('config.toml', 'w') as f: + for section in section_order: + if section == 'target': + for target in targets: + for line in targets[target]: + f.write(line + "\n") + else: + for line in sections[section]: + f.write(line + "\n") + +with open('Makefile', 'w') as f: + contents = os.path.join(rust_dir, 'src', 'bootstrap', 'mk', 'Makefile.in') + contents = open(contents).read() + contents = contents.replace("$(CFG_SRC_DIR)", rust_dir + '/') + contents = contents.replace("$(CFG_PYTHON)", sys.executable) + f.write(contents) + +# Finally, clean up with a bit of a help message +relpath = os.path.dirname(__file__) +if relpath == '': + relpath = '.' + +p("") +p("run `python {}/x.py --help`".format(relpath)) +p("") diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index bfcfb5f9a37f8..2e243b022c314 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -724,6 +724,9 @@ impl Step for Src { let dst_src = dst.join("rust"); t!(fs::create_dir_all(&dst_src)); + let src_files = [ + "src/Cargo.lock", + ]; // This is the reduced set of paths which will become the rust-src component // (essentially libstd and all of its path dependencies) let std_src_dirs = [ @@ -754,11 +757,14 @@ impl Step for Src { "src/libprofiler_builtins", ]; let std_src_dirs_exclude = [ - "src/compiler-rt/test", + "src/libcompiler_builtins/compiler-rt/test", "src/jemalloc/test/unit", ]; copy_src_dirs(build, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src); + for file in src_files.iter() { + copy(&build.src.join(file), &dst_src.join(file)); + } // Create source tarball in rust-installer format let mut cmd = rust_installer(builder); @@ -822,6 +828,7 @@ impl Step for PlainSourceTarball { "RELEASES.md", "configure", "x.py", + "config.toml.example", ]; let src_dirs = [ "man", @@ -1081,8 +1088,14 @@ impl Step for Rls { .arg("--output-dir").arg(&distdir(build)) .arg("--non-installed-overlay").arg(&overlay) .arg(format!("--package-name={}-{}", name, target)) - .arg("--component-name=rls") .arg("--legacy-manifest-dirs=rustlib,cargo"); + + if build.config.channel == "nightly" { + cmd.arg("--component-name=rls"); + } else { + cmd.arg("--component-name=rls-preview"); + } + build.run(&mut cmd); distdir(build).join(format!("{}-{}.tar.gz", name, target)) } diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs index 86f5346bea1fb..b9a52a66793df 100644 --- a/src/bootstrap/doc.rs +++ b/src/bootstrap/doc.rs @@ -669,11 +669,6 @@ impl Step for ErrorIndex { let build = builder.build; let target = self.target; - builder.ensure(compile::Rustc { - compiler: builder.compiler(0, build.build), - target, - }); - println!("Documenting error index ({})", target); let out = build.doc_out(target); t!(fs::create_dir_all(&out)); diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs index a84d43d3deede..7546d7fd4f07a 100644 --- a/src/bootstrap/flags.rs +++ b/src/bootstrap/flags.rs @@ -136,7 +136,7 @@ To learn more about a subcommand, run `./x.py -h`"); None => { // No subcommand -- show the general usage and subcommand help println!("{}\n", subcommand_help); - process::exit(0); + process::exit(1); } }; diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index 55358f2ffcb73..f21b382619d0a 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -123,7 +123,6 @@ extern crate build_helper; extern crate serde_derive; #[macro_use] extern crate lazy_static; -extern crate serde; extern crate serde_json; extern crate cmake; extern crate filetime; @@ -718,7 +717,7 @@ impl Build { fn force_use_stage1(&self, compiler: Compiler, target: Interned) -> bool { !self.config.full_bootstrap && compiler.stage >= 2 && - self.hosts.iter().any(|h| *h == target) + (self.hosts.iter().any(|h| *h == target) || target == self.build) } /// Returns the directory that OpenSSL artifacts are compiled into if diff --git a/src/bootstrap/mk/Makefile.in b/src/bootstrap/mk/Makefile.in index 9410927824cc1..67495b891f80d 100644 --- a/src/bootstrap/mk/Makefile.in +++ b/src/bootstrap/mk/Makefile.in @@ -8,8 +8,6 @@ # option. This file may not be copied, modified, or distributed # except according to those terms. -include config.mk - ifdef VERBOSE Q := BOOTSTRAP_ARGS := -v diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs index 0a307e72bf61d..8173903c03440 100644 --- a/src/bootstrap/native.rs +++ b/src/bootstrap/native.rs @@ -407,6 +407,7 @@ impl Step for Openssl { "i686-unknown-freebsd" => "BSD-x86-elf", "i686-unknown-linux-gnu" => "linux-elf", "i686-unknown-linux-musl" => "linux-elf", + "i686-unknown-netbsd" => "BSD-x86-elf", "mips-unknown-linux-gnu" => "linux-mips32", "mips64-unknown-linux-gnuabi64" => "linux64-mips64", "mips64el-unknown-linux-gnuabi64" => "linux64-mips64", diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs index d798e8de3dffa..b129aa113a030 100644 --- a/src/bootstrap/tool.rs +++ b/src/bootstrap/tool.rs @@ -120,6 +120,7 @@ fn prepare_tool_cargo( } cargo.env("CFG_RELEASE_CHANNEL", &build.config.channel); + cargo.env("CFG_VERSION", build.rust_version()); let info = GitInfo::new(&build.config, &dir); if let Some(sha) = info.sha() { @@ -198,7 +199,7 @@ tool!( Linkchecker, "src/tools/linkchecker", "linkchecker", Mode::Libstd; CargoTest, "src/tools/cargotest", "cargotest", Mode::Libstd; Compiletest, "src/tools/compiletest", "compiletest", Mode::Libtest; - BuildManifest, "src/tools/build-manifest", "build-manifest", Mode::Librustc; + BuildManifest, "src/tools/build-manifest", "build-manifest", Mode::Libstd; RemoteTestClient, "src/tools/remote-test-client", "remote-test-client", Mode::Libstd; RustInstaller, "src/tools/rust-installer", "rust-installer", Mode::Libstd; ); @@ -340,6 +341,44 @@ impl Step for Cargo { } } +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Clippy { + pub compiler: Compiler, + pub target: Interned, +} + +impl Step for Clippy { + type Output = PathBuf; + const DEFAULT: bool = false; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/tools/clippy") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Clippy { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build), + target: run.target, + }); + } + + fn run(self, builder: &Builder) -> PathBuf { + // Clippy depends on procedural macros (serde), which requires a full host + // compiler to be available, so we need to depend on that. + builder.ensure(compile::Rustc { + compiler: self.compiler, + target: builder.build.build, + }); + builder.ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: "clippy", + mode: Mode::Librustc, + }) + } +} + #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct Rls { pub compiler: Compiler, diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs index 092fb04637ba7..234d937823fb5 100644 --- a/src/bootstrap/util.rs +++ b/src/bootstrap/util.rs @@ -279,7 +279,7 @@ pub fn symlink_dir(src: &Path, dest: &Path) -> io::Result<()> { ptr::null_mut()); let mut data = [0u8; MAXIMUM_REPARSE_DATA_BUFFER_SIZE]; - let mut db = data.as_mut_ptr() + let db = data.as_mut_ptr() as *mut REPARSE_MOUNTPOINT_DATA_BUFFER; let buf = &mut (*db).ReparseTarget as *mut _; let mut i = 0; diff --git a/src/ci/docker/x86_64-gnu-distcheck/Dockerfile b/src/ci/docker/x86_64-gnu-distcheck/Dockerfile index 786f59eb9f761..f16dd9809981e 100644 --- a/src/ci/docker/x86_64-gnu-distcheck/Dockerfile +++ b/src/ci/docker/x86_64-gnu-distcheck/Dockerfile @@ -18,6 +18,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu +ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --set rust.ignore-git=false ENV SCRIPT python2.7 ../x.py test distcheck ENV DIST_SRC 1 diff --git a/src/doc/unstable-book/src/language-features/generators.md b/src/doc/unstable-book/src/language-features/generators.md new file mode 100644 index 0000000000000..7a559a7bec866 --- /dev/null +++ b/src/doc/unstable-book/src/language-features/generators.md @@ -0,0 +1,245 @@ +# `generators` + +The tracking issue for this feature is: [#43122] + +[#43122]: https://github.com/rust-lang/rust/issues/43122 + +------------------------ + +The `generators` feature gate in Rust allows you to define generator or +coroutine literals. A generator is a "resumable function" that syntactically +resembles a closure but compiles to much different semantics in the compiler +itself. The primary feature of a generator is that it can be suspended during +execution to be resumed at a later date. Generators use the `yield` keyword to +"return", and then the caller can `resume` a generator to resume execution just +after the `yield` keyword. + +Generators are an extra-unstable feature in the compiler right now. Added in +[RFC 2033] they're mostly intended right now as a information/constraint +gathering phase. The intent is that experimentation can happen on the nightly +compiler before actual stabilization. A further RFC will be required to +stabilize generators/coroutines and will likely contain at least a few small +tweaks to the overall design. + +[RFC 2033]: https://github.com/rust-lang/rfcs/pull/2033 + +A syntactical example of a generator is: + +```rust +#![feature(generators, generator_trait)] + +use std::ops::{Generator, GeneratorState}; + +fn main() { + let mut generator = || { + yield 1; + return "foo" + }; + + match generator.resume() { + GeneratorState::Yielded(1) => {} + _ => panic!("unexpected value from resume"), + } + match generator.resume() { + GeneratorState::Complete("foo") => {} + _ => panic!("unexpected value from resume"), + } +} +``` + +Generators are closure-like literals which can contain a `yield` statement. The +`yield` statement takes an optional expression of a value to yield out of the +generator. All generator literals implement the `Generator` trait in the +`std::ops` module. The `Generator` trait has one main method, `resume`, which +resumes execution of the generator at the previous suspension point. + +An example of the control flow of generators is that the following example +prints all numbers in order: + +```rust +#![feature(generators, generator_trait)] + +use std::ops::Generator; + +fn main() { + let mut generator = || { + println!("2"); + yield; + println!("4"); + }; + + println!("1"); + generator.resume(); + println!("3"); + generator.resume(); + println!("5"); +} +``` + +At this time the main intended use case of generators is an implementation +primitive for async/await syntax, but generators will likely be extended to +ergonomic implementations of iterators and other primitives in the future. +Feedback on the design and usage is always appreciated! + +### The `Generator` trait + +The `Generator` trait in `std::ops` currently looks like: + +``` +# #![feature(generator_trait)] +# use std::ops::GeneratorState; + +pub trait Generator { + type Yield; + type Return; + fn resume(&mut self) -> GeneratorState; +} +``` + +The `Generator::Yield` type is the type of values that can be yielded with the +`yield` statement. The `Generator::Return` type is the returned type of the +generator. This is typically the last expression in a generator's definition or +any value passed to `return` in a generator. The `resume` function is the entry +point for executing the `Generator` itself. + +The return value of `resume`, `GeneratorState`, looks like: + +``` +pub enum GeneratorState { + Yielded(Y), + Complete(R), +} +``` + +The `Yielded` variant indicates that the generator can later be resumed. This +corresponds to a `yield` point in a generator. The `Complete` variant indicates +that the generator is complete and cannot be resumed again. Calling `resume` +after a generator has returned `Complete` will likely result in a panic of the +program. + +### Closure-like semantics + +The closure-like syntax for generators alludes to the fact that they also have +closure-like semantics. Namely: + +* When created, a generator executes no code. A closure literal does not + actually execute any of the closure's code on construction, and similarly a + generator literal does not execute any code inside the generator when + constructed. + +* Generators can capture outer variables by reference or by move, and this can + be tweaked with the `move` keyword at the beginning of the closure. Like + closures all generators will have an implicit environment which is inferred by + the compiler. Outer variables can be moved into a generator for use as the + generator progresses. + +* Generator literals produce a value with a unique type which implements the + `std::ops::Generator` trait. This allows actual execution of the generator + through the `Generator::resume` method as well as also naming it in return + types and such. + +* Traits like `Send` and `Sync` are automatically implemented for a `Generator` + depending on the captured variables of the environment. Unlike closures though + generators also depend on variables live across suspension points. This means + that although the ambient environment may be `Send` or `Sync`, the generator + itself may not be due to internal variables live across `yield` points being + not-`Send` or not-`Sync`. Note, though, that generators, like closures, do + not implement traits like `Copy` or `Clone` automatically. + +* Whenever a generator is dropped it will drop all captured environment + variables. + +Note that unlike closures generators at this time cannot take any arguments. +That is, generators must always look like `|| { ... }`. This restriction may be +lifted at a future date, the design is ongoing! + +### Generators as state machines + +In the compiler generators are currently compiled as state machines. Each +`yield` expression will correspond to a different state that stores all live +variables over that suspension point. Resumption of a generator will dispatch on +the current state and then execute internally until a `yield` is reached, at +which point all state is saved off in the generator and a value is returned. + +Let's take a look at an example to see what's going on here: + +```rust +#![feature(generators, generator_trait)] + +use std::ops::Generator; + +fn main() { + let ret = "foo"; + let mut generator = move || { + yield 1; + return ret + }; + + generator.resume(); + generator.resume(); +} +``` + +This generator literal will compile down to something similar to: + +```rust +#![feature(generators, generator_trait)] + +use std::ops::{Generator, GeneratorState}; + +fn main() { + let ret = "foo"; + let mut generator = { + enum __Generator { + Start(&'static str), + Yield1(&'static str), + Done, + } + + impl Generator for __Generator { + type Yield = i32; + type Return = &'static str; + + fn resume(&mut self) -> GeneratorState { + use std::mem; + match mem::replace(self, __Generator::Done) { + __Generator::Start(s) => { + *self = __Generator::Yield1(s); + GeneratorState::Yielded(1) + } + + __Generator::Yield1(s) => { + *self = __Generator::Done; + GeneratorState::Complete(s) + } + + __Generator::Done => { + panic!("generator resumed after completion") + } + } + } + } + + __Generator::Start(ret) + }; + + generator.resume(); + generator.resume(); +} +``` + +Notably here we can see that the compiler is generating a fresh type, +`__Generator` in this case. This type has a number of states (represented here +as an `enum`) corresponding to each of the conceptual states of the generator. +At the beginning we're closing over our outer variable `foo` and then that +variable is also live over the `yield` point, so it's stored in both states. + +When the generator starts it'll immediately yield 1, but it saves off its state +just before it does so indicating that it has reached the yield point. Upon +resuming again we'll execute the `return ret` which returns the `Complete` +state. + +Here we can also note that the `Done` state, if resumed, panics immediately as +it's invalid to resume a completed generator. It's also worth noting that this +is just a rough desugaring, not a normative specification for what the compiler +does. diff --git a/src/doc/unstable-book/src/language-features/match-beginning-vert.md b/src/doc/unstable-book/src/language-features/match-beginning-vert.md new file mode 100644 index 0000000000000..f0a51af7fd1c8 --- /dev/null +++ b/src/doc/unstable-book/src/language-features/match-beginning-vert.md @@ -0,0 +1,23 @@ +# `match_beginning_vert` + +The tracking issue for this feature is [#44101]. + +With this feature enabled, you are allowed to add a '|' to the beginning of a +match arm: + +```rust +#![feature(match_beginning_vert)] + +enum Foo { A, B, C } + +fn main() { + let x = Foo::A; + match x { + | Foo::A + | Foo::B => println!("AB"), + | Foo::C => println!("C"), + } +} +``` + +[#44101]: https://github.com/rust-lang/rust/issues/44101 \ No newline at end of file diff --git a/src/doc/unstable-book/src/library-features/splice.md b/src/doc/unstable-book/src/library-features/splice.md index ca7f78a8f79e5..dae4475257a02 100644 --- a/src/doc/unstable-book/src/library-features/splice.md +++ b/src/doc/unstable-book/src/library-features/splice.md @@ -18,7 +18,6 @@ let mut s = String::from("α is alpha, β is beta"); let beta_offset = s.find('β').unwrap_or(s.len()); // Replace the range up until the β from the string -let t: String = s.splice(..beta_offset, "Α is capital alpha; ").collect(); -assert_eq!(t, "α is alpha, "); +s.splice(..beta_offset, "Α is capital alpha; "); assert_eq!(s, "Α is capital alpha; β is beta"); ``` \ No newline at end of file diff --git a/src/liballoc/allocator.rs b/src/liballoc/allocator.rs index f14f27023249e..5a9cd82b9d119 100644 --- a/src/liballoc/allocator.rs +++ b/src/liballoc/allocator.rs @@ -464,6 +464,29 @@ impl fmt::Display for CannotReallocInPlace { /// * if a layout `k` fits a memory block (denoted by `ptr`) /// currently allocated via an allocator `a`, then it is legal to /// use that layout to deallocate it, i.e. `a.dealloc(ptr, k);`. +/// +/// # Unsafety +/// +/// The `Alloc` trait is an `unsafe` trait for a number of reasons, and +/// implementors must ensure that they adhere to these contracts: +/// +/// * Pointers returned from allocation functions must point to valid memory and +/// retain their validity until at least the instance of `Alloc` is dropped +/// itself. +/// +/// * It's undefined behavior if global allocators unwind. This restriction may +/// be lifted in the future, but currently a panic from any of these +/// functions may lead to memory unsafety. Note that as of the time of this +/// writing allocators *not* intending to be global allocators can still panic +/// in their implementation without violating memory safety. +/// +/// * `Layout` queries and calculations in general must be correct. Callers of +/// this trait are allowed to rely on the contracts defined on each method, +/// and implementors must ensure such contracts remain true. +/// +/// Note that this list may get tweaked over time as clarifications are made in +/// the future. Additionally global allocators may gain unique requirements for +/// how to safely implement one in the future as well. pub unsafe trait Alloc { // (Note: existing allocators have unspecified but well-defined diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 82aac4dbf6334..d9f4a2217db4e 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -66,7 +66,7 @@ use core::hash::{self, Hash}; use core::iter::FusedIterator; use core::marker::{self, Unsize}; use core::mem; -use core::ops::{CoerceUnsized, Deref, DerefMut}; +use core::ops::{CoerceUnsized, Deref, DerefMut, Generator, GeneratorState}; use core::ops::{BoxPlace, Boxed, InPlace, Place, Placer}; use core::ptr::{self, Unique}; use core::convert::From; @@ -784,3 +784,14 @@ impl AsMut for Box { &mut **self } } + +#[unstable(feature = "generator_trait", issue = "43122")] +impl Generator for Box + where T: Generator + ?Sized +{ + type Yield = T::Yield; + type Return = T::Return; + fn resume(&mut self) -> GeneratorState { + (**self).resume() + } +} diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs index 820f2d958d9a8..b2bd9d7d8fafa 100644 --- a/src/liballoc/heap.rs +++ b/src/liballoc/heap.rs @@ -27,24 +27,32 @@ pub mod __core { extern "Rust" { #[allocator] + #[rustc_allocator_nounwind] fn __rust_alloc(size: usize, align: usize, err: *mut u8) -> *mut u8; #[cold] + #[rustc_allocator_nounwind] fn __rust_oom(err: *const u8) -> !; + #[rustc_allocator_nounwind] fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize); + #[rustc_allocator_nounwind] fn __rust_usable_size(layout: *const u8, min: *mut usize, max: *mut usize); + #[rustc_allocator_nounwind] fn __rust_realloc(ptr: *mut u8, old_size: usize, old_align: usize, new_size: usize, new_align: usize, err: *mut u8) -> *mut u8; + #[rustc_allocator_nounwind] fn __rust_alloc_zeroed(size: usize, align: usize, err: *mut u8) -> *mut u8; + #[rustc_allocator_nounwind] fn __rust_alloc_excess(size: usize, align: usize, excess: *mut usize, err: *mut u8) -> *mut u8; + #[rustc_allocator_nounwind] fn __rust_realloc_excess(ptr: *mut u8, old_size: usize, old_align: usize, @@ -52,11 +60,13 @@ extern "Rust" { new_align: usize, excess: *mut usize, err: *mut u8) -> *mut u8; + #[rustc_allocator_nounwind] fn __rust_grow_in_place(ptr: *mut u8, old_size: usize, old_align: usize, new_size: usize, new_align: usize) -> u8; + #[rustc_allocator_nounwind] fn __rust_shrink_in_place(ptr: *mut u8, old_size: usize, old_align: usize, diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 2d41ed648102b..dc64a787ae9ae 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -80,6 +80,7 @@ #![cfg_attr(not(test), feature(core_float))] #![cfg_attr(not(test), feature(exact_size_is_empty))] #![cfg_attr(not(test), feature(slice_rotate))] +#![cfg_attr(not(test), feature(generator_trait))] #![cfg_attr(test, feature(rand, test))] #![feature(allow_internal_unstable)] #![feature(box_patterns)] @@ -105,6 +106,7 @@ #![feature(pattern)] #![feature(placement_in_syntax)] #![feature(placement_new_protocol)] +#![feature(rustc_attrs)] #![feature(shared)] #![feature(slice_get_slice)] #![feature(slice_patterns)] diff --git a/src/liballoc/macros.rs b/src/liballoc/macros.rs index 763f04fcd0dcd..43ebaa4fbdb3f 100644 --- a/src/liballoc/macros.rs +++ b/src/liballoc/macros.rs @@ -8,12 +8,12 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -/// Creates a `Vec` containing the arguments. +/// Creates a [`Vec`] containing the arguments. /// /// `vec!` allows `Vec`s to be defined with the same syntax as array expressions. /// There are two forms of this macro: /// -/// - Create a `Vec` containing a given list of elements: +/// - Create a [`Vec`] containing a given list of elements: /// /// ``` /// let v = vec![1, 2, 3]; @@ -22,7 +22,7 @@ /// assert_eq!(v[2], 3); /// ``` /// -/// - Create a `Vec` from a given element and size: +/// - Create a [`Vec`] from a given element and size: /// /// ``` /// let v = vec![1; 3]; @@ -30,14 +30,17 @@ /// ``` /// /// Note that unlike array expressions this syntax supports all elements -/// which implement `Clone` and the number of elements doesn't have to be +/// which implement [`Clone`] and the number of elements doesn't have to be /// a constant. /// -/// This will use `clone()` to duplicate an expression, so one should be careful +/// This will use `clone` to duplicate an expression, so one should be careful /// using this with types having a nonstandard `Clone` implementation. For /// example, `vec![Rc::new(1); 5]` will create a vector of five references /// to the same boxed integer value, not five references pointing to independently /// boxed integers. +/// +/// [`Vec`]: ../std/vec/struct.Vec.html +/// [`Clone`]: ../std/clone/trait.Clone.html #[cfg(not(test))] #[macro_export] #[stable(feature = "rust1", since = "1.0.0")] @@ -67,10 +70,22 @@ macro_rules! vec { ($($x:expr,)*) => (vec![$($x),*]) } -/// Use the syntax described in `std::fmt` to create a value of type `String`. -/// See [`std::fmt`][fmt] for more information. +/// Creates a `String` using interpolation of runtime expressions. +/// +/// The first argument `format!` recieves is a format string. This must be a string +/// literal. The power of the formatting string is in the `{}`s contained. +/// +/// Additional parameters passed to `format!` replace the `{}`s within the +/// formatting string in the order given unless named or positional parameters +/// are used, see [`std::fmt`][fmt] for more information. +/// +/// A common use for `format!` is concatenation and interpolation of strings. +/// The same convention is used with [`print!`] and [`write!`] macros, +/// depending on the intended destination of the string. /// /// [fmt]: ../std/fmt/index.html +/// [`print!`]: ../std/macro.print.html +/// [`write!`]: ../std/macro.write.html /// /// # Panics /// diff --git a/src/liballoc/string.rs b/src/liballoc/string.rs index b1919c7c968c9..ddb23b2ef37bf 100644 --- a/src/liballoc/string.rs +++ b/src/liballoc/string.rs @@ -1392,11 +1392,11 @@ impl String { } /// Creates a splicing iterator that removes the specified range in the string, - /// replaces with the given string, and yields the removed chars. - /// The given string doesn’t need to be the same length as the range. + /// and replaces it with the given string. + /// The given string doesn't need to be the same length as the range. /// - /// Note: The element range is removed when the [`Splice`] is dropped, - /// even if the iterator is not consumed until the end. + /// Note: Unlike [`Vec::splice`], the replacement happens eagerly, and this + /// method does not return the removed chars. /// /// # Panics /// @@ -1404,7 +1404,7 @@ impl String { /// boundary, or if they're out of bounds. /// /// [`char`]: ../../std/primitive.char.html - /// [`Splice`]: ../../std/string/struct.Splice.html + /// [`Vec::splice`]: ../../std/vec/struct.Vec.html#method.splice /// /// # Examples /// @@ -1416,45 +1416,32 @@ impl String { /// let beta_offset = s.find('β').unwrap_or(s.len()); /// /// // Replace the range up until the β from the string - /// let t: String = s.splice(..beta_offset, "Α is capital alpha; ").collect(); - /// assert_eq!(t, "α is alpha, "); + /// s.splice(..beta_offset, "Α is capital alpha; "); /// assert_eq!(s, "Α is capital alpha; β is beta"); /// ``` #[unstable(feature = "splice", reason = "recently added", issue = "32310")] - pub fn splice<'a, 'b, R>(&'a mut self, range: R, replace_with: &'b str) -> Splice<'a, 'b> + pub fn splice(&mut self, range: R, replace_with: &str) where R: RangeArgument { // Memory safety // // The String version of Splice does not have the memory safety issues // of the vector version. The data is just plain bytes. - // Because the range removal happens in Drop, if the Splice iterator is leaked, - // the removal will not happen. - let len = self.len(); - let start = match range.start() { - Included(&n) => n, - Excluded(&n) => n + 1, - Unbounded => 0, + + match range.start() { + Included(&n) => assert!(self.is_char_boundary(n)), + Excluded(&n) => assert!(self.is_char_boundary(n + 1)), + Unbounded => {}, }; - let end = match range.end() { - Included(&n) => n + 1, - Excluded(&n) => n, - Unbounded => len, + match range.end() { + Included(&n) => assert!(self.is_char_boundary(n + 1)), + Excluded(&n) => assert!(self.is_char_boundary(n)), + Unbounded => {}, }; - // Take out two simultaneous borrows. The &mut String won't be accessed - // until iteration is over, in Drop. - let self_ptr = self as *mut _; - // slicing does the appropriate bounds checks - let chars_iter = self[start..end].chars(); - - Splice { - start, - end, - iter: chars_iter, - string: self_ptr, - replace_with, - } + unsafe { + self.as_mut_vec() + }.splice(range, replace_with.bytes()); } /// Converts this `String` into a [`Box`]`<`[`str`]`>`. @@ -2241,61 +2228,3 @@ impl<'a> DoubleEndedIterator for Drain<'a> { #[unstable(feature = "fused", issue = "35602")] impl<'a> FusedIterator for Drain<'a> {} - -/// A splicing iterator for `String`. -/// -/// This struct is created by the [`splice()`] method on [`String`]. See its -/// documentation for more. -/// -/// [`splice()`]: struct.String.html#method.splice -/// [`String`]: struct.String.html -#[derive(Debug)] -#[unstable(feature = "splice", reason = "recently added", issue = "32310")] -pub struct Splice<'a, 'b> { - /// Will be used as &'a mut String in the destructor - string: *mut String, - /// Start of part to remove - start: usize, - /// End of part to remove - end: usize, - /// Current remaining range to remove - iter: Chars<'a>, - replace_with: &'b str, -} - -#[unstable(feature = "splice", reason = "recently added", issue = "32310")] -unsafe impl<'a, 'b> Sync for Splice<'a, 'b> {} -#[unstable(feature = "splice", reason = "recently added", issue = "32310")] -unsafe impl<'a, 'b> Send for Splice<'a, 'b> {} - -#[unstable(feature = "splice", reason = "recently added", issue = "32310")] -impl<'a, 'b> Drop for Splice<'a, 'b> { - fn drop(&mut self) { - unsafe { - let vec = (*self.string).as_mut_vec(); - vec.splice(self.start..self.end, self.replace_with.bytes()); - } - } -} - -#[unstable(feature = "splice", reason = "recently added", issue = "32310")] -impl<'a, 'b> Iterator for Splice<'a, 'b> { - type Item = char; - - #[inline] - fn next(&mut self) -> Option { - self.iter.next() - } - - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -#[unstable(feature = "splice", reason = "recently added", issue = "32310")] -impl<'a, 'b> DoubleEndedIterator for Splice<'a, 'b> { - #[inline] - fn next_back(&mut self) -> Option { - self.iter.next_back() - } -} diff --git a/src/liballoc/tests/string.rs b/src/liballoc/tests/string.rs index f5c124c6b4458..6aba18ddf49ff 100644 --- a/src/liballoc/tests/string.rs +++ b/src/liballoc/tests/string.rs @@ -442,9 +442,8 @@ fn test_drain() { #[test] fn test_splice() { let mut s = "Hello, world!".to_owned(); - let t: String = s.splice(7..12, "世界").collect(); + s.splice(7..12, "世界"); assert_eq!(s, "Hello, 世界!"); - assert_eq!(t, "world"); } #[test] @@ -457,12 +456,10 @@ fn test_splice_char_boundary() { #[test] fn test_splice_inclusive_range() { let mut v = String::from("12345"); - let t: String = v.splice(2...3, "789").collect(); + v.splice(2...3, "789"); assert_eq!(v, "127895"); - assert_eq!(t, "34"); - let t2: String = v.splice(1...2, "A").collect(); + v.splice(1...2, "A"); assert_eq!(v, "1A895"); - assert_eq!(t2, "27"); } #[test] @@ -482,24 +479,15 @@ fn test_splice_inclusive_out_of_bounds() { #[test] fn test_splice_empty() { let mut s = String::from("12345"); - let t: String = s.splice(1..2, "").collect(); + s.splice(1..2, ""); assert_eq!(s, "1345"); - assert_eq!(t, "2"); } #[test] fn test_splice_unbounded() { let mut s = String::from("12345"); - let t: String = s.splice(.., "").collect(); + s.splice(.., ""); assert_eq!(s, ""); - assert_eq!(t, "12345"); -} - -#[test] -fn test_splice_forget() { - let mut s = String::from("12345"); - ::std::mem::forget(s.splice(2..4, "789")); - assert_eq!(s, "12345"); } #[test] diff --git a/src/liballoc/vec_deque.rs b/src/liballoc/vec_deque.rs index bf9069200297d..00def2a1eac49 100644 --- a/src/liballoc/vec_deque.rs +++ b/src/liballoc/vec_deque.rs @@ -459,10 +459,12 @@ impl VecDeque { /// /// `i` and `j` may be equal. /// - /// Fails if there is no element with either index. - /// /// Element at index 0 is the front of the queue. /// + /// # Panics + /// + /// Panics if either index is out of bounds. + /// /// # Examples /// /// ``` diff --git a/src/liballoc_jemalloc/build.rs b/src/liballoc_jemalloc/build.rs index 41193f6a41f89..c9bea2ecf5e9f 100644 --- a/src/liballoc_jemalloc/build.rs +++ b/src/liballoc_jemalloc/build.rs @@ -111,9 +111,11 @@ fn main() { cmd.arg("--with-jemalloc-prefix=je_"); } - if cfg!(feature = "debug-jemalloc") { - cmd.arg("--enable-debug"); - } + // FIXME: building with jemalloc assertions is currently broken. + // See . + //if cfg!(feature = "debug") { + // cmd.arg("--enable-debug"); + //} cmd.arg(format!("--host={}", build_helper::gnu_target(&target))); cmd.arg(format!("--build={}", build_helper::gnu_target(&host))); diff --git a/src/libcore/array.rs b/src/libcore/array.rs index 4b7706bac300f..6a7926fecde38 100644 --- a/src/libcore/array.rs +++ b/src/libcore/array.rs @@ -123,14 +123,6 @@ macro_rules! array_impls { } } - #[stable(feature = "rust1", since = "1.0.0")] - #[cfg(stage0)] - impl Clone for [T; $N] { - fn clone(&self) -> [T; $N] { - *self - } - } - #[stable(feature = "rust1", since = "1.0.0")] impl Hash for [T; $N] { fn hash(&self, state: &mut H) { diff --git a/src/libcore/clone.rs b/src/libcore/clone.rs index 2dc51718b97b2..826420a0c001c 100644 --- a/src/libcore/clone.rs +++ b/src/libcore/clone.rs @@ -88,7 +88,7 @@ /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] -#[cfg_attr(not(stage0), lang = "clone")] +#[lang = "clone"] pub trait Clone : Sized { /// Returns a copy of the value. /// @@ -130,45 +130,3 @@ pub struct AssertParamIsClone { _field: ::marker::PhantomData reason = "deriving hack, should not be public", issue = "0")] pub struct AssertParamIsCopy { _field: ::marker::PhantomData } - -#[stable(feature = "rust1", since = "1.0.0")] -#[cfg(stage0)] -impl<'a, T: ?Sized> Clone for &'a T { - /// Returns a shallow copy of the reference. - #[inline] - fn clone(&self) -> &'a T { *self } -} - -macro_rules! clone_impl { - ($t:ty) => { - #[stable(feature = "rust1", since = "1.0.0")] - #[cfg(stage0)] - impl Clone for $t { - /// Returns a deep copy of the value. - #[inline] - fn clone(&self) -> $t { *self } - } - } -} - -clone_impl! { isize } -clone_impl! { i8 } -clone_impl! { i16 } -clone_impl! { i32 } -clone_impl! { i64 } -clone_impl! { i128 } - -clone_impl! { usize } -clone_impl! { u8 } -clone_impl! { u16 } -clone_impl! { u32 } -clone_impl! { u64 } -clone_impl! { u128 } - -clone_impl! { f32 } -clone_impl! { f64 } - -clone_impl! { ! } -clone_impl! { () } -clone_impl! { bool } -clone_impl! { char } diff --git a/src/libcore/fmt/num.rs b/src/libcore/fmt/num.rs index 4ca303dee43f2..8ea388fddf884 100644 --- a/src/libcore/fmt/num.rs +++ b/src/libcore/fmt/num.rs @@ -242,7 +242,7 @@ macro_rules! impl_Display { // decode last 1 or 2 chars if n < 10 { curr -= 1; - *buf_ptr.offset(curr) = (n as u8) + 48; + *buf_ptr.offset(curr) = (n as u8) + b'0'; } else { let d1 = n << 1; curr -= 2; diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index ad776c8605ac8..607f6f3701799 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -1343,4 +1343,81 @@ extern "rust-intrinsic" { /// on MSVC it's `*mut [usize; 2]`. For more information see the compiler's /// source as well as std's catch implementation. pub fn try(f: fn(*mut u8), data: *mut u8, local_ptr: *mut u8) -> i32; + + /// Computes the byte offset that needs to be applied to `ptr` in order to + /// make it aligned to `align`. + /// If it is not possible to align `ptr`, the implementation returns + /// `usize::max_value()`. + /// + /// There are no guarantees whatsover that offsetting the pointer will not + /// overflow or go beyond the allocation that `ptr` points into. + /// It is up to the caller to ensure that the returned offset is correct + /// in all terms other than alignment. + /// + /// # Examples + /// + /// Accessing adjacent `u8` as `u16` + /// + /// ``` + /// # #![feature(core_intrinsics)] + /// # fn foo(n: usize) { + /// # use std::intrinsics::align_offset; + /// # use std::mem::align_of; + /// # unsafe { + /// let x = [5u8, 6u8, 7u8, 8u8, 9u8]; + /// let ptr = &x[n] as *const u8; + /// let offset = align_offset(ptr as *const (), align_of::()); + /// if offset < x.len() - n - 1 { + /// let u16_ptr = ptr.offset(offset as isize) as *const u16; + /// assert_ne!(*u16_ptr, 500); + /// } else { + /// // while the pointer can be aligned via `offset`, it would point + /// // outside the allocation + /// } + /// # } } + /// ``` + #[cfg(not(stage0))] + pub fn align_offset(ptr: *const (), align: usize) -> usize; +} + +#[cfg(stage0)] +/// Computes the byte offset that needs to be applied to `ptr` in order to +/// make it aligned to `align`. +/// If it is not possible to align `ptr`, the implementation returns +/// `usize::max_value()`. +/// +/// There are no guarantees whatsover that offsetting the pointer will not +/// overflow or go beyond the allocation that `ptr` points into. +/// It is up to the caller to ensure that the returned offset is correct +/// in all terms other than alignment. +/// +/// # Examples +/// +/// Accessing adjacent `u8` as `u16` +/// +/// ``` +/// # #![feature(core_intrinsics)] +/// # fn foo(n: usize) { +/// # use std::intrinsics::align_offset; +/// # use std::mem::align_of; +/// # unsafe { +/// let x = [5u8, 6u8, 7u8, 8u8, 9u8]; +/// let ptr = &x[n] as *const u8; +/// let offset = align_offset(ptr as *const (), align_of::()); +/// if offset < x.len() - n - 1 { +/// let u16_ptr = ptr.offset(offset as isize) as *const u16; +/// assert_ne!(*u16_ptr, 500); +/// } else { +/// // while the pointer can be aligned via `offset`, it would point +/// // outside the allocation +/// } +/// # } } +/// ``` +pub unsafe fn align_offset(ptr: *const (), align: usize) -> usize { + let offset = ptr as usize % align; + if offset == 0 { + 0 + } else { + align - offset + } } diff --git a/src/libcore/macros.rs b/src/libcore/macros.rs index 684b81a27f82e..ce183389a8083 100644 --- a/src/libcore/macros.rs +++ b/src/libcore/macros.rs @@ -8,16 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#[macro_export] -// This stability attribute is totally useless. -#[stable(feature = "rust1", since = "1.0.0")] -#[cfg(stage0)] -macro_rules! __rust_unstable_column { - () => { - column!() - } -} - /// Entry point of thread panic, for details, see std::macros #[macro_export] #[allow_internal_unstable] @@ -62,11 +52,13 @@ macro_rules! panic { /// # Custom Messages /// /// This macro has a second form, where a custom panic message can -/// be provided with or without arguments for formatting. +/// be provided with or without arguments for formatting. See [`std::fmt`] +/// for syntax for this form. /// /// [`panic!`]: macro.panic.html /// [`debug_assert!`]: macro.debug_assert.html -/// [testing]: ../book/first-edition/testing.html +/// [testing]: ../book/second-edition/ch11-01-writing-tests.html#checking-results-with-the-assert-macro +/// [`std::fmt`]: ../std/fmt/index.html /// /// # Examples /// @@ -252,13 +244,15 @@ macro_rules! debug_assert { /// On panic, this macro will print the values of the expressions with their /// debug representations. /// -/// Unlike `assert_eq!`, `debug_assert_eq!` statements are only enabled in non +/// Unlike [`assert_eq!`], `debug_assert_eq!` statements are only enabled in non /// optimized builds by default. An optimized build will omit all /// `debug_assert_eq!` statements unless `-C debug-assertions` is passed to the /// compiler. This makes `debug_assert_eq!` useful for checks that are too /// expensive to be present in a release build but may be helpful during /// development. /// +/// [`assert_eq!`]: ../std/macro.assert_eq.html +/// /// # Examples /// /// ``` @@ -277,13 +271,15 @@ macro_rules! debug_assert_eq { /// On panic, this macro will print the values of the expressions with their /// debug representations. /// -/// Unlike `assert_ne!`, `debug_assert_ne!` statements are only enabled in non +/// Unlike [`assert_ne!`], `debug_assert_ne!` statements are only enabled in non /// optimized builds by default. An optimized build will omit all /// `debug_assert_ne!` statements unless `-C debug-assertions` is passed to the /// compiler. This makes `debug_assert_ne!` useful for checks that are too /// expensive to be present in a release build but may be helpful during /// development. /// +/// [`assert_ne!`]: ../std/macro.assert_ne.html +/// /// # Examples /// /// ``` @@ -300,10 +296,9 @@ macro_rules! debug_assert_ne { /// Helper macro for reducing boilerplate code for matching `Result` together /// with converting downstream errors. /// -/// Prefer using `?` syntax to `try!`. `?` is built in to the language and is -/// more succinct than `try!`. It is the standard method for error propagation. +/// The `?` operator was added to replace `try!` and should be used instead. /// -/// `try!` matches the given `Result`. In case of the `Ok` variant, the +/// `try!` matches the given [`Result`]. In case of the `Ok` variant, the /// expression has the value of the wrapped value. /// /// In case of the `Err` variant, it retrieves the inner error. `try!` then @@ -312,7 +307,9 @@ macro_rules! debug_assert_ne { /// error is then immediately returned. /// /// Because of the early return, `try!` can only be used in functions that -/// return `Result`. +/// return [`Result`]. +/// +/// [`Result`]: ../std/result/enum.Result.html /// /// # Examples /// @@ -331,12 +328,19 @@ macro_rules! debug_assert_ne { /// } /// } /// +/// // The prefered method of quick returning Errors +/// fn write_to_file_question() -> Result<(), MyError> { +/// let mut file = File::create("my_best_friends.txt")?; +/// Ok(()) +/// } +/// +/// // The previous method of quick returning Errors /// fn write_to_file_using_try() -> Result<(), MyError> { /// let mut file = try!(File::create("my_best_friends.txt")); /// try!(file.write_all(b"This is a list of my best friends.")); -/// println!("I wrote to the file"); /// Ok(()) /// } +/// /// // This is equivalent to: /// fn write_to_file_using_match() -> Result<(), MyError> { /// let mut file = try!(File::create("my_best_friends.txt")); @@ -344,7 +348,6 @@ macro_rules! debug_assert_ne { /// Ok(v) => v, /// Err(e) => return Err(From::from(e)), /// } -/// println!("I wrote to the file"); /// Ok(()) /// } /// ``` @@ -365,7 +368,7 @@ macro_rules! try { /// formatted according to the specified format string and the result will be passed to the writer. /// The writer may be any value with a `write_fmt` method; generally this comes from an /// implementation of either the [`std::fmt::Write`] or the [`std::io::Write`] trait. The macro -/// returns whatever the 'write_fmt' method returns; commonly a [`std::fmt::Result`], or an +/// returns whatever the `write_fmt` method returns; commonly a [`std::fmt::Result`], or an /// [`io::Result`]. /// /// See [`std::fmt`] for more information on the format string syntax. @@ -470,10 +473,20 @@ macro_rules! writeln { /// * Loops that dynamically terminate. /// * Iterators that dynamically terminate. /// +/// If the determination that the code is unreachable proves incorrect, the +/// program immediately terminates with a [`panic!`]. The function [`unreachable`], +/// which belongs to the [`std::intrinsics`] module, informs the compilier to +/// optimize the code out of the release version entirely. +/// +/// [`panic!`]: ../std/macro.panic.html +/// [`unreachable`]: ../std/intrinsics/fn.unreachable.html +/// [`std::intrinsics`]: ../std/intrinsics/index.html +/// /// # Panics /// -/// This will always [panic!](macro.panic.html) +/// This will always [`panic!`] /// +/// [`panic!`]: ../std/macro.panic.html /// # Examples /// /// Match arms: @@ -516,13 +529,18 @@ macro_rules! unreachable { }); } -/// A standardized placeholder for marking unfinished code. It panics with the -/// message `"not yet implemented"` when executed. +/// A standardized placeholder for marking unfinished code. +/// +/// It panics with the message `"not yet implemented"` when executed. /// /// This can be useful if you are prototyping and are just looking to have your /// code typecheck, or if you're implementing a trait that requires multiple /// methods, and you're only planning on using one of them. /// +/// # Panics +/// +/// This macro always panics. +/// /// # Examples /// /// Here's an example of some in-progress code. We have a trait `Foo`: diff --git a/src/libcore/mem.rs b/src/libcore/mem.rs index 4b866cab1eae2..a57d835528980 100644 --- a/src/libcore/mem.rs +++ b/src/libcore/mem.rs @@ -188,26 +188,6 @@ pub fn forget(t: T) { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] -#[cfg(stage0)] -pub fn size_of() -> usize { - unsafe { intrinsics::size_of::() } -} - -/// Returns the size of a type in bytes. -/// -/// More specifically, this is the offset in bytes between successive -/// items of the same type, including alignment padding. -/// -/// # Examples -/// -/// ``` -/// use std::mem; -/// -/// assert_eq!(4, mem::size_of::()); -/// ``` -#[inline] -#[stable(feature = "rust1", since = "1.0.0")] -#[cfg(not(stage0))] pub const fn size_of() -> usize { unsafe { intrinsics::size_of::() } } @@ -299,29 +279,6 @@ pub fn min_align_of_val(val: &T) -> usize { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] -#[cfg(stage0)] -pub fn align_of() -> usize { - unsafe { intrinsics::min_align_of::() } -} - -/// Returns the [ABI]-required minimum alignment of a type. -/// -/// Every reference to a value of the type `T` must be a multiple of this number. -/// -/// This is the alignment used for struct fields. It may be smaller than the preferred alignment. -/// -/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface -/// -/// # Examples -/// -/// ``` -/// use std::mem; -/// -/// assert_eq!(4, mem::align_of::()); -/// ``` -#[inline] -#[stable(feature = "rust1", since = "1.0.0")] -#[cfg(not(stage0))] pub const fn align_of() -> usize { unsafe { intrinsics::min_align_of::() } } @@ -754,39 +711,39 @@ pub unsafe fn transmute_copy(src: &T) -> U { /// Opaque type representing the discriminant of an enum. /// /// See the `discriminant` function in this module for more information. -#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +#[stable(feature = "discriminant_value", since = "1.22.0")] pub struct Discriminant(u64, PhantomData<*const T>); // N.B. These trait implementations cannot be derived because we don't want any bounds on T. -#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +#[stable(feature = "discriminant_value", since = "1.22.0")] impl Copy for Discriminant {} -#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +#[stable(feature = "discriminant_value", since = "1.22.0")] impl clone::Clone for Discriminant { fn clone(&self) -> Self { *self } } -#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +#[stable(feature = "discriminant_value", since = "1.22.0")] impl cmp::PartialEq for Discriminant { fn eq(&self, rhs: &Self) -> bool { self.0 == rhs.0 } } -#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +#[stable(feature = "discriminant_value", since = "1.22.0")] impl cmp::Eq for Discriminant {} -#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +#[stable(feature = "discriminant_value", since = "1.22.0")] impl hash::Hash for Discriminant { fn hash(&self, state: &mut H) { self.0.hash(state); } } -#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +#[stable(feature = "discriminant_value", since = "1.22.0")] impl fmt::Debug for Discriminant { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.debug_tuple("Discriminant") @@ -811,7 +768,6 @@ impl fmt::Debug for Discriminant { /// the actual data: /// /// ``` -/// #![feature(discriminant_value)] /// use std::mem; /// /// enum Foo { A(&'static str), B(i32), C(i32) } @@ -820,7 +776,7 @@ impl fmt::Debug for Discriminant { /// assert!(mem::discriminant(&Foo::B(1)) == mem::discriminant(&Foo::B(2))); /// assert!(mem::discriminant(&Foo::B(3)) != mem::discriminant(&Foo::C(3))); /// ``` -#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +#[stable(feature = "discriminant_value", since = "1.22.0")] pub fn discriminant(v: &T) -> Discriminant { unsafe { Discriminant(intrinsics::discriminant_value(v), PhantomData) diff --git a/src/libcore/num/f32.rs b/src/libcore/num/f32.rs index 5068375368466..43d38926c9718 100644 --- a/src/libcore/num/f32.rs +++ b/src/libcore/num/f32.rs @@ -10,8 +10,6 @@ //! Operations and constants for 32-bits floats (`f32` type) -#![cfg_attr(stage0, allow(overflowing_literals))] - #![stable(feature = "rust1", since = "1.0.0")] use intrinsics; diff --git a/src/libcore/ops/generator.rs b/src/libcore/ops/generator.rs new file mode 100644 index 0000000000000..798c182bc6e38 --- /dev/null +++ b/src/libcore/ops/generator.rs @@ -0,0 +1,131 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +/// The result of a generator resumption. +/// +/// This enum is returned from the `Generator::resume` method and indicates the +/// possible return values of a generator. Currently this corresponds to either +/// a suspension point (`Yielded`) or a termination point (`Complete`). +#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)] +#[cfg_attr(not(stage0), lang = "generator_state")] +#[unstable(feature = "generator_trait", issue = "43122")] +pub enum GeneratorState { + /// The generator suspended with a value. + /// + /// This state indicates that a generator has been suspended, and typically + /// corresponds to a `yield` statement. The value provided in this variant + /// corresponds to the expression passed to `yield` and allows generators to + /// provide a value each time they yield. + Yielded(Y), + + /// The generator completed with a return value. + /// + /// This state indicates that a generator has finished execution with the + /// provided value. Once a generator has returned `Complete` it is + /// considered a programmer error to call `resume` again. + Complete(R), +} + +/// The trait implemented by builtin generator types. +/// +/// Generators, also commonly referred to as coroutines, are currently an +/// experimental language feature in Rust. Added in [RFC 2033] generators are +/// currently intended to primarily provide a building block for async/await +/// syntax but will likely extend to also providing an ergonomic definition for +/// iterators and other primitives. +/// +/// The syntax and semantics for generators is unstable and will require a +/// further RFC for stabilization. At this time, though, the syntax is +/// closure-like: +/// +/// ```rust +/// #![feature(generators, generator_trait)] +/// +/// use std::ops::{Generator, GeneratorState}; +/// +/// fn main() { +/// let mut generator = || { +/// yield 1; +/// return "foo" +/// }; +/// +/// match generator.resume() { +/// GeneratorState::Yielded(1) => {} +/// _ => panic!("unexpected return from resume"), +/// } +/// match generator.resume() { +/// GeneratorState::Complete("foo") => {} +/// _ => panic!("unexpected return from resume"), +/// } +/// } +/// ``` +/// +/// More documentation of generators can be found in the unstable book. +/// +/// [RFC 2033]: https://github.com/rust-lang/rfcs/pull/2033 +#[cfg_attr(not(stage0), lang = "generator")] +#[unstable(feature = "generator_trait", issue = "43122")] +#[fundamental] +pub trait Generator { + /// The type of value this generator yields. + /// + /// This associated type corresponds to the `yield` expression and the + /// values which are allowed to be returned each time a generator yields. + /// For example an iterator-as-a-generator would likely have this type as + /// `T`, the type being iterated over. + type Yield; + + /// The type of value this generator returns. + /// + /// This corresponds to the type returned from a generator either with a + /// `return` statement or implicitly as the last expression of a generator + /// literal. For example futures would use this as `Result` as it + /// represents a completed future. + type Return; + + /// Resumes the execution of this generator. + /// + /// This function will resume execution of the generator or start execution + /// if it hasn't already. This call will return back into the generator's + /// last suspension point, resuming execution from the latest `yield`. The + /// generator will continue executing until it either yields or returns, at + /// which point this function will return. + /// + /// # Return value + /// + /// The `GeneratorState` enum returned from this function indicates what + /// state the generator is in upon returning. If the `Yielded` variant is + /// returned then the generator has reached a suspension point and a value + /// has been yielded out. Generators in this state are available for + /// resumption at a later point. + /// + /// If `Complete` is returned then the generator has completely finished + /// with the value provided. It is invalid for the generator to be resumed + /// again. + /// + /// # Panics + /// + /// This function may panic if it is called after the `Complete` variant has + /// been returned previously. While generator literals in the language are + /// guaranteed to panic on resuming after `Complete`, this is not guaranteed + /// for all implementations of the `Generator` trait. + fn resume(&mut self) -> GeneratorState; +} + +#[unstable(feature = "generator_trait", issue = "43122")] +impl<'a, T> Generator for &'a mut T + where T: Generator + ?Sized +{ + type Yield = T::Yield; + type Return = T::Return; + fn resume(&mut self) -> GeneratorState { + (**self).resume() + } +} diff --git a/src/libcore/ops/mod.rs b/src/libcore/ops/mod.rs index b5e6912b10d6e..8975b680ca7fa 100644 --- a/src/libcore/ops/mod.rs +++ b/src/libcore/ops/mod.rs @@ -159,6 +159,7 @@ mod bit; mod deref; mod drop; mod function; +mod generator; mod index; mod place; mod range; @@ -196,6 +197,9 @@ pub use self::range::{RangeInclusive, RangeToInclusive}; #[unstable(feature = "try_trait", issue = "42327")] pub use self::try::Try; +#[unstable(feature = "generator_trait", issue = "43122")] +pub use self::generator::{Generator, GeneratorState}; + #[unstable(feature = "placement_new_protocol", issue = "27779")] pub use self::place::{Place, Placer, InPlace, Boxed, BoxPlace}; diff --git a/src/libcore/ops/try.rs b/src/libcore/ops/try.rs index 78326c3e6391a..e788b66a1ec82 100644 --- a/src/libcore/ops/try.rs +++ b/src/libcore/ops/try.rs @@ -15,8 +15,24 @@ /// extracting those success or failure values from an existing instance and /// creating a new instance from a success or failure value. #[unstable(feature = "try_trait", issue = "42327")] -#[rustc_on_unimplemented = "the `?` operator can only be used in a function that returns `Result` \ - (or another type that implements `{Try}`)"] +#[cfg_attr(stage0, + rustc_on_unimplemented = "the `?` operator can only be used in a \ + function that returns `Result` \ + (or another type that implements `{Try}`)")] +#[cfg_attr(not(stage0), + rustc_on_unimplemented( + on(all( + any(from_method="from_error", from_method="from_ok"), + from_desugaring="?"), + message="the `?` operator can only be used in a \ + function that returns `Result` \ + (or another type that implements `{Try}`)", + label="cannot use the `?` operator in a function that returns `{Self}`"), + on(all(from_method="into_result", from_desugaring="?"), + message="the `?` operator can only be applied to values \ + that implement `{Try}`", + label="the `?` operator cannot be applied to type `{Self}`") +))] pub trait Try { /// The type of this value when viewed as successful. #[unstable(feature = "try_trait", issue = "42327")] diff --git a/src/libcore/option.rs b/src/libcore/option.rs index aecf2ee9325ee..138e04c7737e0 100644 --- a/src/libcore/option.rs +++ b/src/libcore/option.rs @@ -774,6 +774,26 @@ impl<'a, T: Clone> Option<&'a T> { } } +impl<'a, T: Clone> Option<&'a mut T> { + /// Maps an `Option<&mut T>` to an `Option` by cloning the contents of the + /// option. + /// + /// # Examples + /// + /// ``` + /// #![feature(option_ref_mut_cloned)] + /// let mut x = 12; + /// let opt_x = Some(&mut x); + /// assert_eq!(opt_x, Some(&mut 12)); + /// let cloned = opt_x.cloned(); + /// assert_eq!(cloned, Some(12)); + /// ``` + #[unstable(feature = "option_ref_mut_cloned", issue = "43738")] + pub fn cloned(self) -> Option { + self.map(|t| t.clone()) + } +} + impl Option { /// Returns the contained value or a default /// diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index 63e9373e93606..2e42e0dfd550d 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -875,36 +875,9 @@ pub fn eq(a: *const T, b: *const T) -> bool { a == b } -#[stable(feature = "rust1", since = "1.0.0")] -#[cfg(stage0)] -impl Clone for *const T { - #[inline] - fn clone(&self) -> *const T { - *self - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -#[cfg(stage0)] -impl Clone for *mut T { - #[inline] - fn clone(&self) -> *mut T { - *self - } -} - // Impls for function pointers macro_rules! fnptr_impls_safety_abi { ($FnTy: ty, $($Arg: ident),*) => { - #[stable(feature = "rust1", since = "1.0.0")] - #[cfg(stage0)] - impl Clone for $FnTy { - #[inline] - fn clone(&self) -> Self { - *self - } - } - #[stable(feature = "fnptr_impls", since = "1.4.0")] impl PartialEq for $FnTy { #[inline] diff --git a/src/libcore/str/mod.rs b/src/libcore/str/mod.rs index a5f6e49a53b4f..62e84c9ebd017 100644 --- a/src/libcore/str/mod.rs +++ b/src/libcore/str/mod.rs @@ -23,6 +23,7 @@ use fmt; use iter::{Map, Cloned, FusedIterator}; use slice::{self, SliceIndex}; use mem; +use intrinsics::align_offset; pub mod pattern; @@ -1468,7 +1469,10 @@ fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> { // When the pointer is aligned, read 2 words of data per iteration // until we find a word containing a non-ascii byte. let ptr = v.as_ptr(); - let align = (ptr as usize + index) & (usize_bytes - 1); + let align = unsafe { + // the offset is safe, because `index` is guaranteed inbounds + align_offset(ptr.offset(index as isize) as *const (), usize_bytes) + }; if align == 0 { while index < blocks_end { unsafe { diff --git a/src/libcore/tuple.rs b/src/libcore/tuple.rs index 555843dba418e..4c5370194fecb 100644 --- a/src/libcore/tuple.rs +++ b/src/libcore/tuple.rs @@ -21,14 +21,6 @@ macro_rules! tuple_impls { } )+) => { $( - #[stable(feature = "rust1", since = "1.0.0")] - #[cfg(stage0)] - impl<$($T:Clone),+> Clone for ($($T,)+) { - fn clone(&self) -> ($($T,)+) { - ($(self.$idx.clone(),)+) - } - } - #[stable(feature = "rust1", since = "1.0.0")] impl<$($T:PartialEq),+> PartialEq for ($($T,)+) where last_type!($($T,)+): ?Sized { #[inline] diff --git a/src/liblibc b/src/liblibc index 2a5b50b7f7f53..04a5e75c99dc9 160000 --- a/src/liblibc +++ b/src/liblibc @@ -1 +1 @@ -Subproject commit 2a5b50b7f7f539a0fd201331d6c1e0534aa332f5 +Subproject commit 04a5e75c99dc92afab490c38fcbbeac9b4bc8104 diff --git a/src/libproc_macro/Cargo.toml b/src/libproc_macro/Cargo.toml index 1b5141773a967..cfd83e348a8e2 100644 --- a/src/libproc_macro/Cargo.toml +++ b/src/libproc_macro/Cargo.toml @@ -10,3 +10,4 @@ crate-type = ["dylib"] [dependencies] syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } +rustc_errors = { path = "../librustc_errors" } diff --git a/src/libproc_macro/diagnostic.rs b/src/libproc_macro/diagnostic.rs new file mode 100644 index 0000000000000..c39aec896e6b4 --- /dev/null +++ b/src/libproc_macro/diagnostic.rs @@ -0,0 +1,134 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use Span; + +use rustc_errors as rustc; + +/// An enum representing a diagnostic level. +#[unstable(feature = "proc_macro", issue = "38356")] +#[derive(Copy, Clone, Debug)] +pub enum Level { + /// An error. + Error, + /// A warning. + Warning, + /// A note. + Note, + /// A help message. + Help, + #[doc(hidden)] + __Nonexhaustive, +} + +/// A structure representing a diagnostic message and associated children +/// messages. +#[unstable(feature = "proc_macro", issue = "38356")] +#[derive(Clone, Debug)] +pub struct Diagnostic { + level: Level, + message: String, + span: Option, + children: Vec +} + +macro_rules! diagnostic_child_methods { + ($spanned:ident, $regular:ident, $level:expr) => ( + /// Add a new child diagnostic message to `self` with the level + /// identified by this methods name with the given `span` and `message`. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn $spanned>(mut self, span: Span, message: T) -> Diagnostic { + self.children.push(Diagnostic::spanned(span, $level, message)); + self + } + + /// Add a new child diagnostic message to `self` with the level + /// identified by this method's name with the given `message`. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn $regular>(mut self, message: T) -> Diagnostic { + self.children.push(Diagnostic::new($level, message)); + self + } + ) +} + +impl Diagnostic { + /// Create a new diagnostic with the given `level` and `message`. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn new>(level: Level, message: T) -> Diagnostic { + Diagnostic { + level: level, + message: message.into(), + span: None, + children: vec![] + } + } + + /// Create a new diagnostic with the given `level` and `message` pointing to + /// the given `span`. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn spanned>(span: Span, level: Level, message: T) -> Diagnostic { + Diagnostic { + level: level, + message: message.into(), + span: Some(span), + children: vec![] + } + } + + diagnostic_child_methods!(span_error, error, Level::Error); + diagnostic_child_methods!(span_warning, warning, Level::Warning); + diagnostic_child_methods!(span_note, note, Level::Note); + diagnostic_child_methods!(span_help, help, Level::Help); + + /// Returns the diagnostic `level` for `self`. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn level(&self) -> Level { + self.level + } + + /// Emit the diagnostic. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn emit(self) { + ::__internal::with_sess(move |(sess, _)| { + let handler = &sess.span_diagnostic; + let level = __internal::level_to_internal_level(self.level); + let mut diag = rustc::DiagnosticBuilder::new(handler, level, &*self.message); + + if let Some(span) = self.span { + diag.set_span(span.0); + } + + for child in self.children { + let span = child.span.map(|s| s.0); + let level = __internal::level_to_internal_level(child.level); + diag.sub(level, &*child.message, span); + } + + diag.emit(); + }); + } +} + +#[unstable(feature = "proc_macro_internals", issue = "27812")] +#[doc(hidden)] +pub mod __internal { + use super::{Level, rustc}; + + pub fn level_to_internal_level(level: Level) -> rustc::Level { + match level { + Level::Error => rustc::Level::Error, + Level::Warning => rustc::Level::Warning, + Level::Note => rustc::Level::Note, + Level::Help => rustc::Level::Help, + Level::__Nonexhaustive => unreachable!("Level::__Nonexhaustive") + } + } +} diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index 3f425c24a9143..e403e2a00c9e3 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -42,6 +42,12 @@ #[macro_use] extern crate syntax; extern crate syntax_pos; +extern crate rustc_errors; + +mod diagnostic; + +#[unstable(feature = "proc_macro", issue = "38356")] +pub use diagnostic::{Diagnostic, Level}; use std::{ascii, fmt, iter}; use std::str::FromStr; @@ -89,10 +95,7 @@ impl FromStr for TokenStream { // notify the expansion info that it is unhygienic let mark = Mark::fresh(mark); mark.set_expn_info(expn_info); - let span = syntax_pos::Span { - ctxt: SyntaxContext::empty().apply_mark(mark), - ..call_site - }; + let span = call_site.with_ctxt(SyntaxContext::empty().apply_mark(mark)); let stream = parse::parse_stream_from_source_str(name, src, sess, Some(span)); Ok(__internal::token_stream_wrap(stream)) }) @@ -177,10 +180,10 @@ pub struct Span(syntax_pos::Span); #[unstable(feature = "proc_macro", issue = "38356")] impl Default for Span { fn default() -> Span { - ::__internal::with_sess(|(_, mark)| Span(syntax_pos::Span { - ctxt: SyntaxContext::empty().apply_mark(mark), - ..mark.expn_info().unwrap().call_site - })) + ::__internal::with_sess(|(_, mark)| { + let call_site = mark.expn_info().unwrap().call_site; + Span(call_site.with_ctxt(SyntaxContext::empty().apply_mark(mark))) + }) } } @@ -191,12 +194,28 @@ pub fn quote_span(span: Span) -> TokenStream { TokenStream(quote::Quote::quote(&span.0)) } +macro_rules! diagnostic_method { + ($name:ident, $level:expr) => ( + /// Create a new `Diagnostic` with the given `message` at the span + /// `self`. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn $name>(self, message: T) -> Diagnostic { + Diagnostic::spanned(self, $level, message) + } + ) +} + impl Span { /// The span of the invocation of the current procedural macro. #[unstable(feature = "proc_macro", issue = "38356")] pub fn call_site() -> Span { ::__internal::with_sess(|(_, mark)| Span(mark.expn_info().unwrap().call_site)) } + + diagnostic_method!(error, Level::Error); + diagnostic_method!(warning, Level::Warning); + diagnostic_method!(note, Level::Note); + diagnostic_method!(help, Level::Help); } /// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`). @@ -570,7 +589,7 @@ impl TokenTree { }).into(); }, TokenNode::Term(symbol) => { - let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt }; + let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt() }; let token = if symbol.0.as_str().starts_with("'") { Lifetime(ident) } else { Ident(ident) }; return TokenTree::Token(self.span.0, token).into(); diff --git a/src/librustc/cfg/construct.rs b/src/librustc/cfg/construct.rs index fa6b78045ffad..ff2c36416bfd2 100644 --- a/src/librustc/cfg/construct.rs +++ b/src/librustc/cfg/construct.rs @@ -10,9 +10,8 @@ use rustc_data_structures::graph; use cfg::*; -use middle::region::CodeExtent; +use middle::region; use ty::{self, TyCtxt}; -use syntax::ast; use syntax::ptr::P; use hir::{self, PatKind}; @@ -30,13 +29,13 @@ struct CFGBuilder<'a, 'tcx: 'a> { #[derive(Copy, Clone)] struct BlockScope { - block_expr_id: ast::NodeId, // id of breakable block expr node + block_expr_id: hir::ItemLocalId, // id of breakable block expr node break_index: CFGIndex, // where to go on `break` } #[derive(Copy, Clone)] struct LoopScope { - loop_id: ast::NodeId, // id of loop/while node + loop_id: hir::ItemLocalId, // id of loop/while node continue_index: CFGIndex, // where to go on a `loop` break_index: CFGIndex, // where to go on a `break` } @@ -70,6 +69,7 @@ pub fn construct<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, cfg_builder.add_contained_edge(body_exit, fn_exit); let CFGBuilder { graph, .. } = cfg_builder; CFG { + owner_def_id, graph, entry, exit: fn_exit, @@ -79,10 +79,10 @@ pub fn construct<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { fn block(&mut self, blk: &hir::Block, pred: CFGIndex) -> CFGIndex { if blk.targeted_by_break { - let expr_exit = self.add_ast_node(blk.id, &[]); + let expr_exit = self.add_ast_node(blk.hir_id.local_id, &[]); self.breakable_block_scopes.push(BlockScope { - block_expr_id: blk.id, + block_expr_id: blk.hir_id.local_id, break_index: expr_exit, }); @@ -104,21 +104,22 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { let expr_exit = self.opt_expr(&blk.expr, stmts_exit); - self.add_ast_node(blk.id, &[expr_exit]) + self.add_ast_node(blk.hir_id.local_id, &[expr_exit]) } } fn stmt(&mut self, stmt: &hir::Stmt, pred: CFGIndex) -> CFGIndex { + let hir_id = self.tcx.hir.node_to_hir_id(stmt.node.id()); match stmt.node { - hir::StmtDecl(ref decl, id) => { + hir::StmtDecl(ref decl, _) => { let exit = self.decl(&decl, pred); - self.add_ast_node(id, &[exit]) + self.add_ast_node(hir_id.local_id, &[exit]) } - hir::StmtExpr(ref expr, id) | - hir::StmtSemi(ref expr, id) => { + hir::StmtExpr(ref expr, _) | + hir::StmtSemi(ref expr, _) => { let exit = self.expr(&expr, pred); - self.add_ast_node(id, &[exit]) + self.add_ast_node(hir_id.local_id, &[exit]) } } } @@ -140,31 +141,31 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { PatKind::Path(_) | PatKind::Lit(..) | PatKind::Range(..) | - PatKind::Wild => self.add_ast_node(pat.id, &[pred]), + PatKind::Wild => self.add_ast_node(pat.hir_id.local_id, &[pred]), PatKind::Box(ref subpat) | PatKind::Ref(ref subpat, _) | PatKind::Binding(.., Some(ref subpat)) => { let subpat_exit = self.pat(&subpat, pred); - self.add_ast_node(pat.id, &[subpat_exit]) + self.add_ast_node(pat.hir_id.local_id, &[subpat_exit]) } PatKind::TupleStruct(_, ref subpats, _) | PatKind::Tuple(ref subpats, _) => { let pats_exit = self.pats_all(subpats.iter(), pred); - self.add_ast_node(pat.id, &[pats_exit]) + self.add_ast_node(pat.hir_id.local_id, &[pats_exit]) } PatKind::Struct(_, ref subpats, _) => { let pats_exit = self.pats_all(subpats.iter().map(|f| &f.node.pat), pred); - self.add_ast_node(pat.id, &[pats_exit]) + self.add_ast_node(pat.hir_id.local_id, &[pats_exit]) } PatKind::Slice(ref pre, ref vec, ref post) => { let pre_exit = self.pats_all(pre.iter(), pred); let vec_exit = self.pats_all(vec.iter(), pre_exit); let post_exit = self.pats_all(post.iter(), vec_exit); - self.add_ast_node(pat.id, &[post_exit]) + self.add_ast_node(pat.hir_id.local_id, &[post_exit]) } } } @@ -180,7 +181,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { match expr.node { hir::ExprBlock(ref blk) => { let blk_exit = self.block(&blk, pred); - self.add_ast_node(expr.id, &[blk_exit]) + self.add_ast_node(expr.hir_id.local_id, &[blk_exit]) } hir::ExprIf(ref cond, ref then, None) => { @@ -200,7 +201,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { // let cond_exit = self.expr(&cond, pred); // 1 let then_exit = self.expr(&then, cond_exit); // 2 - self.add_ast_node(expr.id, &[cond_exit, then_exit]) // 3,4 + self.add_ast_node(expr.hir_id.local_id, &[cond_exit, then_exit]) // 3,4 } hir::ExprIf(ref cond, ref then, Some(ref otherwise)) => { @@ -221,7 +222,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { let cond_exit = self.expr(&cond, pred); // 1 let then_exit = self.expr(&then, cond_exit); // 2 let else_exit = self.expr(&otherwise, cond_exit); // 3 - self.add_ast_node(expr.id, &[then_exit, else_exit]) // 4, 5 + self.add_ast_node(expr.hir_id.local_id, &[then_exit, else_exit]) // 4, 5 } hir::ExprWhile(ref cond, ref body, _) => { @@ -245,12 +246,12 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { let loopback = self.add_dummy_node(&[pred]); // 1 // Create expr_exit without pred (cond_exit) - let expr_exit = self.add_ast_node(expr.id, &[]); // 3 + let expr_exit = self.add_ast_node(expr.hir_id.local_id, &[]); // 3 // The LoopScope needs to be on the loop_scopes stack while evaluating the // condition and the body of the loop (both can break out of the loop) self.loop_scopes.push(LoopScope { - loop_id: expr.id, + loop_id: expr.hir_id.local_id, continue_index: loopback, break_index: expr_exit }); @@ -282,9 +283,9 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { // may cause additional edges. let loopback = self.add_dummy_node(&[pred]); // 1 - let expr_exit = self.add_ast_node(expr.id, &[]); // 2 + let expr_exit = self.add_ast_node(expr.hir_id.local_id, &[]); // 2 self.loop_scopes.push(LoopScope { - loop_id: expr.id, + loop_id: expr.hir_id.local_id, continue_index: loopback, break_index: expr_exit, }); @@ -295,7 +296,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { } hir::ExprMatch(ref discr, ref arms, _) => { - self.match_(expr.id, &discr, &arms, pred) + self.match_(expr.hir_id.local_id, &discr, &arms, pred) } hir::ExprBinary(op, ref l, ref r) if op.node.is_lazy() => { @@ -315,30 +316,30 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { // let l_exit = self.expr(&l, pred); // 1 let r_exit = self.expr(&r, l_exit); // 2 - self.add_ast_node(expr.id, &[l_exit, r_exit]) // 3,4 + self.add_ast_node(expr.hir_id.local_id, &[l_exit, r_exit]) // 3,4 } hir::ExprRet(ref v) => { let v_exit = self.opt_expr(v, pred); - let b = self.add_ast_node(expr.id, &[v_exit]); + let b = self.add_ast_node(expr.hir_id.local_id, &[v_exit]); self.add_returning_edge(expr, b); self.add_unreachable_node() } hir::ExprBreak(destination, ref opt_expr) => { let v = self.opt_expr(opt_expr, pred); - let (scope_id, break_dest) = + let (target_scope, break_dest) = self.find_scope_edge(expr, destination, ScopeCfKind::Break); - let b = self.add_ast_node(expr.id, &[v]); - self.add_exiting_edge(expr, b, scope_id, break_dest); + let b = self.add_ast_node(expr.hir_id.local_id, &[v]); + self.add_exiting_edge(expr, b, target_scope, break_dest); self.add_unreachable_node() } hir::ExprAgain(destination) => { - let (scope_id, cont_dest) = + let (target_scope, cont_dest) = self.find_scope_edge(expr, destination, ScopeCfKind::Continue); - let a = self.add_ast_node(expr.id, &[pred]); - self.add_exiting_edge(expr, a, scope_id, cont_dest); + let a = self.add_ast_node(expr.hir_id.local_id, &[pred]); + self.add_exiting_edge(expr, a, target_scope, cont_dest); self.add_unreachable_node() } @@ -389,6 +390,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { hir::ExprUnary(_, ref e) | hir::ExprField(ref e, _) | hir::ExprTupField(ref e, _) | + hir::ExprYield(ref e) | hir::ExprRepeat(ref e, _) => { self.straightline(expr, pred, Some(&**e).into_iter()) } @@ -396,7 +398,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { hir::ExprInlineAsm(_, ref outputs, ref inputs) => { let post_outputs = self.exprs(outputs.iter().map(|e| &*e), pred); let post_inputs = self.exprs(inputs.iter().map(|e| &*e), post_outputs); - self.add_ast_node(expr.id, &[post_inputs]) + self.add_ast_node(expr.hir_id.local_id, &[post_inputs]) } hir::ExprClosure(..) | @@ -443,10 +445,10 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { //! Handles case of an expression that evaluates `subexprs` in order let subexprs_exit = self.exprs(subexprs, pred); - self.add_ast_node(expr.id, &[subexprs_exit]) + self.add_ast_node(expr.hir_id.local_id, &[subexprs_exit]) } - fn match_(&mut self, id: ast::NodeId, discr: &hir::Expr, + fn match_(&mut self, id: hir::ItemLocalId, discr: &hir::Expr, arms: &[hir::Arm], pred: CFGIndex) -> CFGIndex { // The CFG for match expression is quite complex, so no ASCII // art for it (yet). @@ -551,8 +553,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { self.add_node(CFGNodeData::Dummy, preds) } - fn add_ast_node(&mut self, id: ast::NodeId, preds: &[CFGIndex]) -> CFGIndex { - assert!(id != ast::DUMMY_NODE_ID); + fn add_ast_node(&mut self, id: hir::ItemLocalId, preds: &[CFGIndex]) -> CFGIndex { self.add_node(CFGNodeData::AST(id), preds) } @@ -578,15 +579,14 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { fn add_exiting_edge(&mut self, from_expr: &hir::Expr, from_index: CFGIndex, - scope_id: ast::NodeId, + target_scope: region::Scope, to_index: CFGIndex) { let mut data = CFGEdgeData { exiting_scopes: vec![] }; - let mut scope = CodeExtent::Misc(from_expr.id); - let target_scope = CodeExtent::Misc(scope_id); - let region_maps = self.tcx.region_maps(self.owner_def_id); + let mut scope = region::Scope::Node(from_expr.hir_id.local_id); + let region_scope_tree = self.tcx.region_scope_tree(self.owner_def_id); while scope != target_scope { - data.exiting_scopes.push(scope.node_id()); - scope = region_maps.encl_scope(scope); + data.exiting_scopes.push(scope.item_local_id()); + scope = region_scope_tree.encl_scope(scope); } self.graph.add_edge(from_index, to_index, data); } @@ -606,13 +606,14 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { fn find_scope_edge(&self, expr: &hir::Expr, destination: hir::Destination, - scope_cf_kind: ScopeCfKind) -> (ast::NodeId, CFGIndex) { + scope_cf_kind: ScopeCfKind) -> (region::Scope, CFGIndex) { match destination.target_id { hir::ScopeTarget::Block(block_expr_id) => { for b in &self.breakable_block_scopes { - if b.block_expr_id == block_expr_id { - return (block_expr_id, match scope_cf_kind { + if b.block_expr_id == self.tcx.hir.node_to_hir_id(block_expr_id).local_id { + let scope_id = self.tcx.hir.node_to_hir_id(block_expr_id).local_id; + return (region::Scope::Node(scope_id), match scope_cf_kind { ScopeCfKind::Break => b.break_index, ScopeCfKind::Continue => bug!("can't continue to block"), }); @@ -622,8 +623,9 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { } hir::ScopeTarget::Loop(hir::LoopIdResult::Ok(loop_id)) => { for l in &self.loop_scopes { - if l.loop_id == loop_id { - return (loop_id, match scope_cf_kind { + if l.loop_id == self.tcx.hir.node_to_hir_id(loop_id).local_id { + let scope_id = self.tcx.hir.node_to_hir_id(loop_id).local_id; + return (region::Scope::Node(scope_id), match scope_cf_kind { ScopeCfKind::Break => l.break_index, ScopeCfKind::Continue => l.continue_index, }); diff --git a/src/librustc/cfg/graphviz.rs b/src/librustc/cfg/graphviz.rs index 944b77dbf01fd..9241240caf043 100644 --- a/src/librustc/cfg/graphviz.rs +++ b/src/librustc/cfg/graphviz.rs @@ -15,40 +15,48 @@ use graphviz as dot; use graphviz::IntoCow; -use syntax::ast; - -use hir::map as hir_map; use cfg; +use hir; +use ty::TyCtxt; pub type Node<'a> = (cfg::CFGIndex, &'a cfg::CFGNode); pub type Edge<'a> = &'a cfg::CFGEdge; -pub struct LabelledCFG<'a, 'hir: 'a> { - pub hir_map: &'a hir_map::Map<'hir>, +pub struct LabelledCFG<'a, 'tcx: 'a> { + pub tcx: TyCtxt<'a, 'tcx, 'tcx>, pub cfg: &'a cfg::CFG, pub name: String, /// `labelled_edges` controls whether we emit labels on the edges pub labelled_edges: bool, } -fn replace_newline_with_backslash_l(s: String) -> String { - // Replacing newlines with \\l causes each line to be left-aligned, - // improving presentation of (long) pretty-printed expressions. - if s.contains("\n") { - let mut s = s.replace("\n", "\\l"); - // Apparently left-alignment applies to the line that precedes - // \l, not the line that follows; so, add \l at end of string - // if not already present, ensuring last line gets left-aligned - // as well. - let mut last_two: Vec<_> = - s.chars().rev().take(2).collect(); - last_two.reverse(); - if last_two != ['\\', 'l'] { - s.push_str("\\l"); +impl<'a, 'tcx> LabelledCFG<'a, 'tcx> { + fn local_id_to_string(&self, local_id: hir::ItemLocalId) -> String { + assert!(self.cfg.owner_def_id.is_local()); + let node_id = self.tcx.hir.hir_to_node_id(hir::HirId { + owner: self.tcx.hir.def_index_to_hir_id(self.cfg.owner_def_id.index).owner, + local_id + }); + let s = self.tcx.hir.node_to_string(node_id); + + // Replacing newlines with \\l causes each line to be left-aligned, + // improving presentation of (long) pretty-printed expressions. + if s.contains("\n") { + let mut s = s.replace("\n", "\\l"); + // Apparently left-alignment applies to the line that precedes + // \l, not the line that follows; so, add \l at end of string + // if not already present, ensuring last line gets left-aligned + // as well. + let mut last_two: Vec<_> = + s.chars().rev().take(2).collect(); + last_two.reverse(); + if last_two != ['\\', 'l'] { + s.push_str("\\l"); + } + s + } else { + s } - s - } else { - s } } @@ -66,12 +74,10 @@ impl<'a, 'hir> dot::Labeller<'a> for LabelledCFG<'a, 'hir> { dot::LabelText::LabelStr("entry".into_cow()) } else if i == self.cfg.exit { dot::LabelText::LabelStr("exit".into_cow()) - } else if n.data.id() == ast::DUMMY_NODE_ID { + } else if n.data.id() == hir::DUMMY_ITEM_LOCAL_ID { dot::LabelText::LabelStr("(dummy_node)".into_cow()) } else { - let s = self.hir_map.node_to_string(n.data.id()); - // left-aligns the lines - let s = replace_newline_with_backslash_l(s); + let s = self.local_id_to_string(n.data.id()); dot::LabelText::EscStr(s.into_cow()) } } @@ -82,15 +88,13 @@ impl<'a, 'hir> dot::Labeller<'a> for LabelledCFG<'a, 'hir> { return dot::LabelText::EscStr(label.into_cow()); } let mut put_one = false; - for (i, &node_id) in e.data.exiting_scopes.iter().enumerate() { + for (i, &id) in e.data.exiting_scopes.iter().enumerate() { if put_one { label.push_str(",\\l"); } else { put_one = true; } - let s = self.hir_map.node_to_string(node_id); - // left-aligns the lines - let s = replace_newline_with_backslash_l(s); + let s = self.local_id_to_string(id); label.push_str(&format!("exiting scope_{} {}", i, &s[..])); diff --git a/src/librustc/cfg/mod.rs b/src/librustc/cfg/mod.rs index 1473dbb1676f3..b379d3956e944 100644 --- a/src/librustc/cfg/mod.rs +++ b/src/librustc/cfg/mod.rs @@ -13,13 +13,14 @@ use rustc_data_structures::graph; use ty::TyCtxt; -use syntax::ast; use hir; +use hir::def_id::DefId; mod construct; pub mod graphviz; pub struct CFG { + pub owner_def_id: DefId, pub graph: CFGGraph, pub entry: CFGIndex, pub exit: CFGIndex, @@ -27,7 +28,7 @@ pub struct CFG { #[derive(Copy, Clone, Debug, PartialEq)] pub enum CFGNodeData { - AST(ast::NodeId), + AST(hir::ItemLocalId), Entry, Exit, Dummy, @@ -35,18 +36,18 @@ pub enum CFGNodeData { } impl CFGNodeData { - pub fn id(&self) -> ast::NodeId { + pub fn id(&self) -> hir::ItemLocalId { if let CFGNodeData::AST(id) = *self { id } else { - ast::DUMMY_NODE_ID + hir::DUMMY_ITEM_LOCAL_ID } } } #[derive(Debug)] pub struct CFGEdgeData { - pub exiting_scopes: Vec + pub exiting_scopes: Vec } pub type CFGIndex = graph::NodeIndex; @@ -63,7 +64,7 @@ impl CFG { construct::construct(tcx, body) } - pub fn node_is_reachable(&self, id: ast::NodeId) -> bool { + pub fn node_is_reachable(&self, id: hir::ItemLocalId) -> bool { self.graph.depth_traverse(self.entry, graph::OUTGOING) .any(|idx| self.graph.node_data(idx).id() == id) } diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs index 01fff60528394..c438944cf0131 100644 --- a/src/librustc/dep_graph/dep_node.rs +++ b/src/librustc/dep_graph/dep_node.rs @@ -62,6 +62,7 @@ use hir::def_id::{CrateNum, DefId}; use hir::map::DefPathHash; +use hir::HirId; use ich::Fingerprint; use ty::{TyCtxt, Instance, InstanceDef}; @@ -394,7 +395,7 @@ define_dep_nodes!( <'tcx> [] WorkProduct(WorkProductId), // Represents different phases in the compiler. - [] RegionMaps(DefId), + [] RegionScopeTree(DefId), [] Coherence, [] CoherenceInherentImplOverlapCheck, [] Resolve, @@ -434,6 +435,7 @@ define_dep_nodes!( <'tcx> [] ImplPolarity(DefId), [] ClosureKind(DefId), [] FnSignature(DefId), + [] GenSignature(DefId), [] CoerceUnsizedInfo(DefId), [] ItemVarianceConstraints(DefId), @@ -526,6 +528,9 @@ define_dep_nodes!( <'tcx> [] HasGlobalAllocator(DefId), [] ExternCrate(DefId), [] LintLevels, + [] Specializes { impl1: DefId, impl2: DefId }, + [] InScopeTraits(HirId), + [] ModuleExports(HirId), ); trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> : fmt::Debug { diff --git a/src/librustc/diagnostics.rs b/src/librustc/diagnostics.rs index 412759e11423e..be7bb4d811413 100644 --- a/src/librustc/diagnostics.rs +++ b/src/librustc/diagnostics.rs @@ -688,8 +688,8 @@ See also https://doc.rust-lang.org/book/first-edition/no-stdlib.html "##, E0214: r##" -A generic type was described using parentheses rather than angle brackets. For -example: +A generic type was described using parentheses rather than angle brackets. +For example: ```compile_fail,E0214 fn main() { @@ -702,6 +702,93 @@ Parentheses are currently only used with generic types when defining parameters for `Fn`-family traits. "##, +E0230: r##" +The `#[rustc_on_unimplemented]` attribute lets you specify a custom error +message for when a particular trait isn't implemented on a type placed in a +position that needs that trait. For example, when the following code is +compiled: + +```compile_fail +#![feature(on_unimplemented)] + +fn foo>(x: T){} + +#[rustc_on_unimplemented = "the type `{Self}` cannot be indexed by `{Idx}`"] +trait Index { /* ... */ } + +foo(true); // `bool` does not implement `Index` +``` + +There will be an error about `bool` not implementing `Index`, followed by a +note saying "the type `bool` cannot be indexed by `u8`". + +As you can see, you can specify type parameters in curly braces for +substitution with the actual types (using the regular format string syntax) in +a given situation. Furthermore, `{Self}` will substitute to the type (in this +case, `bool`) that we tried to use. + +This error appears when the curly braces contain an identifier which doesn't +match with any of the type parameters or the string `Self`. This might happen +if you misspelled a type parameter, or if you intended to use literal curly +braces. If it is the latter, escape the curly braces with a second curly brace +of the same type; e.g. a literal `{` is `{{`. +"##, + +E0231: r##" +The `#[rustc_on_unimplemented]` attribute lets you specify a custom error +message for when a particular trait isn't implemented on a type placed in a +position that needs that trait. For example, when the following code is +compiled: + +```compile_fail +#![feature(on_unimplemented)] + +fn foo>(x: T){} + +#[rustc_on_unimplemented = "the type `{Self}` cannot be indexed by `{Idx}`"] +trait Index { /* ... */ } + +foo(true); // `bool` does not implement `Index` +``` + +there will be an error about `bool` not implementing `Index`, followed by a +note saying "the type `bool` cannot be indexed by `u8`". + +As you can see, you can specify type parameters in curly braces for +substitution with the actual types (using the regular format string syntax) in +a given situation. Furthermore, `{Self}` will substitute to the type (in this +case, `bool`) that we tried to use. + +This error appears when the curly braces do not contain an identifier. Please +add one of the same name as a type parameter. If you intended to use literal +braces, use `{{` and `}}` to escape them. +"##, + +E0232: r##" +The `#[rustc_on_unimplemented]` attribute lets you specify a custom error +message for when a particular trait isn't implemented on a type placed in a +position that needs that trait. For example, when the following code is +compiled: + +```compile_fail +#![feature(on_unimplemented)] + +fn foo>(x: T){} + +#[rustc_on_unimplemented = "the type `{Self}` cannot be indexed by `{Idx}`"] +trait Index { /* ... */ } + +foo(true); // `bool` does not implement `Index` +``` + +there will be an error about `bool` not implementing `Index`, followed by a +note saying "the type `bool` cannot be indexed by `u8`". + +For this to work, some note must be specified. An empty attribute will not do +anything, please remove the attribute or add some helpful note for users of the +trait. +"##, + E0261: r##" When using a lifetime like `'a` in a type, it must be declared before being used. @@ -917,92 +1004,6 @@ for v in &vs { ``` "##, -E0272: r##" -The `#[rustc_on_unimplemented]` attribute lets you specify a custom error -message for when a particular trait isn't implemented on a type placed in a -position that needs that trait. For example, when the following code is -compiled: - -```compile_fail -#![feature(on_unimplemented)] - -fn foo>(x: T){} - -#[rustc_on_unimplemented = "the type `{Self}` cannot be indexed by `{Idx}`"] -trait Index { /* ... */ } - -foo(true); // `bool` does not implement `Index` -``` - -There will be an error about `bool` not implementing `Index`, followed by a -note saying "the type `bool` cannot be indexed by `u8`". - -As you can see, you can specify type parameters in curly braces for -substitution with the actual types (using the regular format string syntax) in -a given situation. Furthermore, `{Self}` will substitute to the type (in this -case, `bool`) that we tried to use. - -This error appears when the curly braces contain an identifier which doesn't -match with any of the type parameters or the string `Self`. This might happen -if you misspelled a type parameter, or if you intended to use literal curly -braces. If it is the latter, escape the curly braces with a second curly brace -of the same type; e.g. a literal `{` is `{{`. -"##, - -E0273: r##" -The `#[rustc_on_unimplemented]` attribute lets you specify a custom error -message for when a particular trait isn't implemented on a type placed in a -position that needs that trait. For example, when the following code is -compiled: - -```compile_fail -#![feature(on_unimplemented)] - -fn foo>(x: T){} - -#[rustc_on_unimplemented = "the type `{Self}` cannot be indexed by `{Idx}`"] -trait Index { /* ... */ } - -foo(true); // `bool` does not implement `Index` -``` - -there will be an error about `bool` not implementing `Index`, followed by a -note saying "the type `bool` cannot be indexed by `u8`". - -As you can see, you can specify type parameters in curly braces for -substitution with the actual types (using the regular format string syntax) in -a given situation. Furthermore, `{Self}` will substitute to the type (in this -case, `bool`) that we tried to use. - -This error appears when the curly braces do not contain an identifier. Please -add one of the same name as a type parameter. If you intended to use literal -braces, use `{{` and `}}` to escape them. -"##, - -E0274: r##" -The `#[rustc_on_unimplemented]` attribute lets you specify a custom error -message for when a particular trait isn't implemented on a type placed in a -position that needs that trait. For example, when the following code is -compiled: - -```compile_fail -#![feature(on_unimplemented)] - -fn foo>(x: T){} - -#[rustc_on_unimplemented = "the type `{Self}` cannot be indexed by `{Idx}`"] -trait Index { /* ... */ } - -foo(true); // `bool` does not implement `Index` -``` - -there will be an error about `bool` not implementing `Index`, followed by a -note saying "the type `bool` cannot be indexed by `u8`". - -For this to work, some note must be specified. An empty attribute will not do -anything, please remove the attribute or add some helpful note for users of the -trait. -"##, E0275: r##" This error occurs when there was a recursive trait requirement that overflowed @@ -2011,6 +2012,9 @@ register_diagnostics! { // E0102, // replaced with E0282 // E0134, // E0135, +// E0272, // on_unimplemented #0 +// E0273, // on_unimplemented #1 +// E0274, // on_unimplemented #2 E0278, // requirement is not satisfied E0279, // requirement is not satisfied E0280, // requirement is not satisfied @@ -2045,4 +2049,5 @@ register_diagnostics! { E0495, // cannot infer an appropriate lifetime due to conflicting requirements E0566, // conflicting representation hints E0623, // lifetime mismatch where both parameters are anonymous regions + E0628, // generators cannot have explicit arguments } diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs index d52cc26eaebc6..880605ee377f7 100644 --- a/src/librustc/hir/intravisit.rs +++ b/src/librustc/hir/intravisit.rs @@ -979,7 +979,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { visitor.visit_expr(subexpression); walk_list!(visitor, visit_arm, arms); } - ExprClosure(_, ref function_declaration, body, _fn_decl_span) => { + ExprClosure(_, ref function_declaration, body, _fn_decl_span, _gen) => { visitor.visit_fn(FnKind::Closure(&expression.attrs), function_declaration, body, @@ -1043,6 +1043,9 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { visitor.visit_expr(input) } } + ExprYield(ref subexpression) => { + visitor.visit_expr(subexpression); + } } } diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index ac26cbc833dea..0f69c06c417a1 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -93,6 +93,8 @@ pub struct LoweringContext<'a> { trait_impls: BTreeMap>, trait_default_impl: BTreeMap, + is_generator: bool, + catch_scopes: Vec, loop_scopes: Vec, is_in_loop_condition: bool, @@ -145,6 +147,7 @@ pub fn lower_crate(sess: &Session, current_hir_id_owner: vec![(CRATE_DEF_INDEX, 0)], item_local_id_counters: NodeMap(), node_id_to_hir_id: IndexVec::new(), + is_generator: false, }.lower_crate(krate) } @@ -393,6 +396,7 @@ impl<'a> LoweringContext<'a> { arguments: decl.map_or(hir_vec![], |decl| { decl.inputs.iter().map(|x| self.lower_arg(x)).collect() }), + is_generator: self.is_generator, value, }; let id = body.id(); @@ -421,8 +425,7 @@ impl<'a> LoweringContext<'a> { Symbol::gensym(s) } - fn allow_internal_unstable(&self, reason: CompilerDesugaringKind, mut span: Span) - -> Span + fn allow_internal_unstable(&self, reason: CompilerDesugaringKind, span: Span) -> Span { let mark = Mark::fresh(Mark::root()); mark.set_expn_info(codemap::ExpnInfo { @@ -434,8 +437,7 @@ impl<'a> LoweringContext<'a> { allow_internal_unsafe: false, }, }); - span.ctxt = SyntaxContext::empty().apply_mark(mark); - span + span.with_ctxt(SyntaxContext::empty().apply_mark(mark)) } fn with_catch_scope(&mut self, catch_id: NodeId, f: F) -> T @@ -453,6 +455,16 @@ impl<'a> LoweringContext<'a> { result } + fn lower_body(&mut self, decl: Option<&FnDecl>, f: F) -> hir::BodyId + where F: FnOnce(&mut LoweringContext) -> hir::Expr + { + let prev = mem::replace(&mut self.is_generator, false); + let result = f(self); + let r = self.record_body(result, decl); + self.is_generator = prev; + return r + } + fn with_loop_scope(&mut self, loop_id: NodeId, f: F) -> T where F: FnOnce(&mut LoweringContext) -> T { @@ -599,7 +611,7 @@ impl<'a> LoweringContext<'a> { TyKind::Slice(ref ty) => hir::TySlice(self.lower_ty(ty)), TyKind::Ptr(ref mt) => hir::TyPtr(self.lower_mt(mt)), TyKind::Rptr(ref region, ref mt) => { - let span = Span { hi: t.span.lo, ..t.span }; + let span = t.span.with_hi(t.span.lo()); let lifetime = match *region { Some(ref lt) => self.lower_lifetime(lt), None => self.elided_lifetime(span) @@ -637,13 +649,12 @@ impl<'a> LoweringContext<'a> { }))) } TyKind::Array(ref ty, ref length) => { - let length = self.lower_expr(length); - hir::TyArray(self.lower_ty(ty), - self.record_body(length, None)) + let length = self.lower_body(None, |this| this.lower_expr(length)); + hir::TyArray(self.lower_ty(ty), length) } TyKind::Typeof(ref expr) => { - let expr = self.lower_expr(expr); - hir::TyTypeof(self.record_body(expr, None)) + let expr = self.lower_body(None, |this| this.lower_expr(expr)); + hir::TyTypeof(expr) } TyKind::TraitObject(ref bounds) => { let mut lifetime_bound = None; @@ -700,8 +711,7 @@ impl<'a> LoweringContext<'a> { attrs: self.lower_attrs(&v.node.attrs), data: self.lower_variant_data(&v.node.data), disr_expr: v.node.disr_expr.as_ref().map(|e| { - let e = self.lower_expr(e); - self.record_body(e, None) + self.lower_body(None, |this| this.lower_expr(e)) }), }, span: v.span, @@ -1225,7 +1235,7 @@ impl<'a> LoweringContext<'a> { name: self.lower_ident(match f.ident { Some(ident) => ident, // FIXME(jseyfried) positional field hygiene - None => Ident { name: Symbol::intern(&index.to_string()), ctxt: f.span.ctxt }, + None => Ident { name: Symbol::intern(&index.to_string()), ctxt: f.span.ctxt() }, }), vis: self.lower_visibility(&f.vis, None), ty: self.lower_ty(&f.ty), @@ -1368,21 +1378,21 @@ impl<'a> LoweringContext<'a> { hir::ItemUse(path, kind) } ItemKind::Static(ref t, m, ref e) => { - let value = self.lower_expr(e); + let value = self.lower_body(None, |this| this.lower_expr(e)); hir::ItemStatic(self.lower_ty(t), self.lower_mutability(m), - self.record_body(value, None)) + value) } ItemKind::Const(ref t, ref e) => { - let value = self.lower_expr(e); - hir::ItemConst(self.lower_ty(t), - self.record_body(value, None)) + let value = self.lower_body(None, |this| this.lower_expr(e)); + hir::ItemConst(self.lower_ty(t), value) } ItemKind::Fn(ref decl, unsafety, constness, abi, ref generics, ref body) => { self.with_new_scopes(|this| { - let body = this.lower_block(body, false); - let body = this.expr_block(body, ThinVec::new()); - let body_id = this.record_body(body, Some(decl)); + let body_id = this.lower_body(Some(decl), |this| { + let body = this.lower_block(body, false); + this.expr_block(body, ThinVec::new()) + }); hir::ItemFn(this.lower_fn_decl(decl), this.lower_unsafety(unsafety), this.lower_constness(constness), @@ -1478,8 +1488,7 @@ impl<'a> LoweringContext<'a> { TraitItemKind::Const(ref ty, ref default) => { hir::TraitItemKind::Const(this.lower_ty(ty), default.as_ref().map(|x| { - let value = this.lower_expr(x); - this.record_body(value, None) + this.lower_body(None, |this| this.lower_expr(x)) })) } TraitItemKind::Method(ref sig, None) => { @@ -1488,9 +1497,10 @@ impl<'a> LoweringContext<'a> { hir::TraitMethod::Required(names)) } TraitItemKind::Method(ref sig, Some(ref body)) => { - let body = this.lower_block(body, false); - let expr = this.expr_block(body, ThinVec::new()); - let body_id = this.record_body(expr, Some(&sig.decl)); + let body_id = this.lower_body(Some(&sig.decl), |this| { + let body = this.lower_block(body, false); + this.expr_block(body, ThinVec::new()) + }); hir::TraitItemKind::Method(this.lower_method_sig(sig), hir::TraitMethod::Provided(body_id)) } @@ -1542,14 +1552,14 @@ impl<'a> LoweringContext<'a> { defaultness: this.lower_defaultness(i.defaultness, true /* [1] */), node: match i.node { ImplItemKind::Const(ref ty, ref expr) => { - let value = this.lower_expr(expr); - let body_id = this.record_body(value, None); + let body_id = this.lower_body(None, |this| this.lower_expr(expr)); hir::ImplItemKind::Const(this.lower_ty(ty), body_id) } ImplItemKind::Method(ref sig, ref body) => { - let body = this.lower_block(body, false); - let expr = this.expr_block(body, ThinVec::new()); - let body_id = this.record_body(expr, Some(&sig.decl)); + let body_id = this.lower_body(Some(&sig.decl), |this| { + let body = this.lower_block(body, false); + this.expr_block(body, ThinVec::new()) + }); hir::ImplItemKind::Method(this.lower_method_sig(sig), body_id) } ImplItemKind::Type(ref ty) => hir::ImplItemKind::Type(this.lower_ty(ty)), @@ -1928,8 +1938,8 @@ impl<'a> LoweringContext<'a> { } ExprKind::Repeat(ref expr, ref count) => { let expr = P(self.lower_expr(expr)); - let count = self.lower_expr(count); - hir::ExprRepeat(expr, self.record_body(count, None)) + let count = self.lower_body(None, |this| this.lower_expr(count)); + hir::ExprRepeat(expr, count) } ExprKind::Tup(ref elts) => { hir::ExprTup(elts.iter().map(|x| self.lower_expr(x)).collect()) @@ -2027,11 +2037,22 @@ impl<'a> LoweringContext<'a> { ExprKind::Closure(capture_clause, ref decl, ref body, fn_decl_span) => { self.with_new_scopes(|this| { this.with_parent_def(e.id, |this| { - let expr = this.lower_expr(body); + let mut is_generator = false; + let body_id = this.lower_body(Some(decl), |this| { + let e = this.lower_expr(body); + is_generator = this.is_generator; + e + }); + if is_generator && !decl.inputs.is_empty() { + span_err!(this.sess, fn_decl_span, E0628, + "generators cannot have explicit arguments"); + this.sess.abort_if_errors(); + } hir::ExprClosure(this.lower_capture_clause(capture_clause), this.lower_fn_decl(decl), - this.record_body(expr, Some(decl)), - fn_decl_span) + body_id, + fn_decl_span, + is_generator) }) }) } @@ -2172,6 +2193,14 @@ impl<'a> LoweringContext<'a> { return ex; } + ExprKind::Yield(ref opt_expr) => { + self.is_generator = true; + let expr = opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| { + self.expr(e.span, hir::ExprTup(hir_vec![]), ThinVec::new()) + }); + hir::ExprYield(P(expr)) + } + // Desugar ExprIfLet // From: `if let = []` ExprKind::IfLet(ref pat, ref sub_expr, ref body, ref else_opt) => { diff --git a/src/librustc/hir/map/blocks.rs b/src/librustc/hir/map/blocks.rs index 1b7eb1585671e..d2888dcf6aaa4 100644 --- a/src/librustc/hir/map/blocks.rs +++ b/src/librustc/hir/map/blocks.rs @@ -264,7 +264,7 @@ impl<'a> FnLikeNode<'a> { } }, map::NodeExpr(e) => match e.node { - ast::ExprClosure(_, ref decl, block, _fn_decl_span) => + ast::ExprClosure(_, ref decl, block, _fn_decl_span, _gen) => closure(ClosureParts::new(&decl, block, e.id, e.span, &e.attrs)), _ => bug!("expr FnLikeNode that is not fn-like"), }, diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 63a5b70cb81a9..e54df2d50d8eb 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -26,7 +26,7 @@ use syntax_pos::Span; use hir::*; use hir::print::Nested; -use util::nodemap::DefIdMap; +use util::nodemap::{DefIdMap, FxHashMap}; use arena::TypedArena; use std::cell::RefCell; @@ -151,7 +151,9 @@ impl<'hir> MapEntry<'hir> { EntryTyParam(_, _, n) => NodeTyParam(n), EntryVisibility(_, _, n) => NodeVisibility(n), EntryLocal(_, _, n) => NodeLocal(n), - _ => return None + + NotPresent | + RootCrate(_) => return None }) } @@ -184,7 +186,7 @@ impl<'hir> MapEntry<'hir> { EntryExpr(_, _, expr) => { match expr.node { - ExprClosure(.., body, _) => Some(body), + ExprClosure(.., body, _, _) => Some(body), _ => None, } } @@ -249,6 +251,9 @@ pub struct Map<'hir> { /// Bodies inlined from other crates are cached here. inlined_bodies: RefCell>, + + /// The reverse mapping of `node_to_hir_id`. + hir_to_node_id: FxHashMap, } impl<'hir> Map<'hir> { @@ -337,6 +342,11 @@ impl<'hir> Map<'hir> { self.definitions.as_local_node_id(def_id) } + #[inline] + pub fn hir_to_node_id(&self, hir_id: HirId) -> NodeId { + self.hir_to_node_id[&hir_id] + } + #[inline] pub fn node_to_hir_id(&self, node_id: NodeId) -> HirId { self.definitions.node_to_hir_id(node_id) @@ -1019,10 +1029,15 @@ pub fn map_crate<'hir>(forest: &'hir mut Forest, entries, vector_length, (entries as f64 / vector_length as f64) * 100.); } + // Build the reverse mapping of `node_to_hir_id`. + let hir_to_node_id = definitions.node_to_hir_id.iter_enumerated() + .map(|(node_id, &hir_id)| (hir_id, node_id)).collect(); + let map = Map { forest, dep_graph: forest.dep_graph.clone(), map, + hir_to_node_id, definitions, inlined_bodies: RefCell::new(DefIdMap()), }; diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index df67f8416212c..46b8cb0a2e2c8 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -929,7 +929,8 @@ pub struct BodyId { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Body { pub arguments: HirVec, - pub value: Expr + pub value: Expr, + pub is_generator: bool, } impl Body { @@ -1007,7 +1008,10 @@ pub enum Expr_ { /// A closure (for example, `move |a, b, c| {a + b + c}`). /// /// The final span is the span of the argument block `|...|` - ExprClosure(CaptureClause, P, BodyId, Span), + /// + /// This may also be a generator literal, indicated by the final boolean, + /// in that case there is an GeneratorClause. + ExprClosure(CaptureClause, P, BodyId, Span, bool), /// A block (`{ ... }`) ExprBlock(P), @@ -1052,6 +1056,9 @@ pub enum Expr_ { /// For example, `[1; 5]`. The first expression is the element /// to be repeated; the second is the number of times to repeat it. ExprRepeat(P, BodyId), + + /// A suspension point for generators. This is `yield ` in Rust. + ExprYield(P), } /// Optionally `Self`-qualified value/type path or associated extension. diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index 876875bce4a5e..dce824bd513a7 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -262,7 +262,7 @@ impl<'a> State<'a> { indented: usize, close_box: bool) -> io::Result<()> { - self.maybe_print_comment(span.hi)?; + self.maybe_print_comment(span.hi())?; self.break_offset_if_not_bol(1, -(indented as isize))?; self.s.word("}")?; if close_box { @@ -324,12 +324,12 @@ impl<'a> State<'a> { let len = elts.len(); let mut i = 0; for elt in elts { - self.maybe_print_comment(get_span(elt).hi)?; + self.maybe_print_comment(get_span(elt).hi())?; op(self, elt)?; i += 1; if i < len { self.s.word(",")?; - self.maybe_print_trailing_comment(get_span(elt), Some(get_span(&elts[i]).hi))?; + self.maybe_print_trailing_comment(get_span(elt), Some(get_span(&elts[i]).hi()))?; self.space_if_not_bol()?; } } @@ -368,7 +368,7 @@ impl<'a> State<'a> { } pub fn print_type(&mut self, ty: &hir::Ty) -> io::Result<()> { - self.maybe_print_comment(ty.span.lo)?; + self.maybe_print_comment(ty.span.lo())?; self.ibox(0)?; match ty.node { hir::TySlice(ref ty) => { @@ -458,7 +458,7 @@ impl<'a> State<'a> { pub fn print_foreign_item(&mut self, item: &hir::ForeignItem) -> io::Result<()> { self.hardbreak_if_not_bol()?; - self.maybe_print_comment(item.span.lo)?; + self.maybe_print_comment(item.span.lo())?; self.print_outer_attributes(&item.attrs)?; match item.node { hir::ForeignItemFn(ref decl, ref arg_names, ref generics) => { @@ -531,7 +531,7 @@ impl<'a> State<'a> { /// Pretty-print an item pub fn print_item(&mut self, item: &hir::Item) -> io::Result<()> { self.hardbreak_if_not_bol()?; - self.maybe_print_comment(item.span.lo)?; + self.maybe_print_comment(item.span.lo())?; self.print_outer_attributes(&item.attrs)?; self.ann.pre(self, NodeItem(item))?; match item.node { @@ -797,7 +797,7 @@ impl<'a> State<'a> { self.bopen()?; for v in variants { self.space_if_not_bol()?; - self.maybe_print_comment(v.span.lo)?; + self.maybe_print_comment(v.span.lo())?; self.print_outer_attributes(&v.node.attrs)?; self.ibox(indent_unit)?; self.print_variant(v)?; @@ -842,7 +842,7 @@ impl<'a> State<'a> { if struct_def.is_tuple() { self.popen()?; self.commasep(Inconsistent, struct_def.fields(), |s, field| { - s.maybe_print_comment(field.span.lo)?; + s.maybe_print_comment(field.span.lo())?; s.print_outer_attributes(&field.attrs)?; s.print_visibility(&field.vis)?; s.print_type(&field.ty) @@ -863,7 +863,7 @@ impl<'a> State<'a> { for field in struct_def.fields() { self.hardbreak_if_not_bol()?; - self.maybe_print_comment(field.span.lo)?; + self.maybe_print_comment(field.span.lo())?; self.print_outer_attributes(&field.attrs)?; self.print_visibility(&field.vis)?; self.print_name(field.name)?; @@ -908,7 +908,7 @@ impl<'a> State<'a> { pub fn print_trait_item(&mut self, ti: &hir::TraitItem) -> io::Result<()> { self.ann.pre(self, NodeSubItem(ti.id))?; self.hardbreak_if_not_bol()?; - self.maybe_print_comment(ti.span.lo)?; + self.maybe_print_comment(ti.span.lo())?; self.print_outer_attributes(&ti.attrs)?; match ti.node { hir::TraitItemKind::Const(ref ty, default) => { @@ -938,7 +938,7 @@ impl<'a> State<'a> { pub fn print_impl_item(&mut self, ii: &hir::ImplItem) -> io::Result<()> { self.ann.pre(self, NodeSubItem(ii.id))?; self.hardbreak_if_not_bol()?; - self.maybe_print_comment(ii.span.lo)?; + self.maybe_print_comment(ii.span.lo())?; self.print_outer_attributes(&ii.attrs)?; self.print_defaultness(ii.defaultness)?; @@ -962,7 +962,7 @@ impl<'a> State<'a> { } pub fn print_stmt(&mut self, st: &hir::Stmt) -> io::Result<()> { - self.maybe_print_comment(st.span.lo)?; + self.maybe_print_comment(st.span.lo())?; match st.node { hir::StmtDecl(ref decl, _) => { self.print_decl(&decl)?; @@ -1017,7 +1017,7 @@ impl<'a> State<'a> { hir::PopUnsafeBlock(..) => self.word_space("pop_unsafe")?, hir::DefaultBlock => (), } - self.maybe_print_comment(blk.span.lo)?; + self.maybe_print_comment(blk.span.lo())?; self.ann.pre(self, NodeBlock(blk))?; self.bopen()?; @@ -1030,7 +1030,7 @@ impl<'a> State<'a> { Some(ref expr) => { self.space_if_not_bol()?; self.print_expr(&expr)?; - self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))?; + self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi()))?; } _ => (), } @@ -1228,7 +1228,7 @@ impl<'a> State<'a> { } pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> { - self.maybe_print_comment(expr.span.lo)?; + self.maybe_print_comment(expr.span.lo())?; self.print_outer_attributes(&expr.attrs)?; self.ibox(indent_unit)?; self.ann.pre(self, NodeExpr(expr))?; @@ -1312,7 +1312,7 @@ impl<'a> State<'a> { } self.bclose_(expr.span, indent_unit)?; } - hir::ExprClosure(capture_clause, ref decl, body, _fn_decl_span) => { + hir::ExprClosure(capture_clause, ref decl, body, _fn_decl_span, _gen) => { self.print_capture_clause(capture_clause)?; self.print_closure_args(&decl, body)?; @@ -1461,6 +1461,10 @@ impl<'a> State<'a> { self.pclose()?; } + hir::ExprYield(ref expr) => { + self.s.word("yield")?; + self.print_expr(&expr)?; + } } self.ann.post(self, NodeExpr(expr))?; self.end() @@ -1476,7 +1480,7 @@ impl<'a> State<'a> { } pub fn print_decl(&mut self, decl: &hir::Decl) -> io::Result<()> { - self.maybe_print_comment(decl.span.lo)?; + self.maybe_print_comment(decl.span.lo())?; match decl.node { hir::DeclLocal(ref loc) => { self.space_if_not_bol()?; @@ -1519,7 +1523,7 @@ impl<'a> State<'a> { path: &hir::Path, colons_before_params: bool) -> io::Result<()> { - self.maybe_print_comment(path.span.lo)?; + self.maybe_print_comment(path.span.lo())?; for (i, segment) in path.segments.iter().enumerate() { if i > 0 { @@ -1637,7 +1641,7 @@ impl<'a> State<'a> { } pub fn print_pat(&mut self, pat: &hir::Pat) -> io::Result<()> { - self.maybe_print_comment(pat.span.lo)?; + self.maybe_print_comment(pat.span.lo())?; self.ann.pre(self, NodePat(pat))?; // Pat isn't normalized, but the beauty of it // is that it doesn't matter @@ -1893,7 +1897,7 @@ impl<'a> State<'a> { match decl.output { hir::Return(ref ty) => { self.print_type(&ty)?; - self.maybe_print_comment(ty.span.lo) + self.maybe_print_comment(ty.span.lo()) } hir::DefaultReturn(..) => unreachable!(), } @@ -2070,7 +2074,7 @@ impl<'a> State<'a> { self.end()?; match decl.output { - hir::Return(ref output) => self.maybe_print_comment(output.span.lo), + hir::Return(ref output) => self.maybe_print_comment(output.span.lo()), _ => Ok(()), } } @@ -2120,13 +2124,13 @@ impl<'a> State<'a> { if (*cmnt).style != comments::Trailing { return Ok(()); } - let span_line = cm.lookup_char_pos(span.hi); + let span_line = cm.lookup_char_pos(span.hi()); let comment_line = cm.lookup_char_pos((*cmnt).pos); let mut next = (*cmnt).pos + BytePos(1); if let Some(p) = next_pos { next = p; } - if span.hi < (*cmnt).pos && (*cmnt).pos < next && + if span.hi() < (*cmnt).pos && (*cmnt).pos < next && span_line.line == comment_line.line { self.print_comment(cmnt)?; } diff --git a/src/librustc/ich/hcx.rs b/src/librustc/ich/hcx.rs index 218483232d673..9c841022fcb82 100644 --- a/src/librustc/ich/hcx.rs +++ b/src/librustc/ich/hcx.rs @@ -205,13 +205,15 @@ impl<'a, 'gcx, 'tcx> HashStable> for ast::N // corresponding entry in the `trait_map` we need to hash that. // Make sure we don't ignore too much by checking that there is // no entry in a debug_assert!(). - debug_assert!(hcx.tcx.trait_map.get(self).is_none()); + let hir_id = hcx.tcx.hir.node_to_hir_id(*self); + debug_assert!(hcx.tcx.in_scope_traits(hir_id).is_none()); } NodeIdHashingMode::HashDefPath => { hcx.tcx.hir.definitions().node_to_hir_id(*self).hash_stable(hcx, hasher); } NodeIdHashingMode::HashTraitsInScope => { - if let Some(traits) = hcx.tcx.trait_map.get(self) { + let hir_id = hcx.tcx.hir.node_to_hir_id(*self); + if let Some(traits) = hcx.tcx.in_scope_traits(hir_id) { // The ordering of the candidates is not fixed. So we hash // the def-ids and then sort them and hash the collection. let mut candidates: AccumulateVec<[_; 8]> = @@ -253,17 +255,17 @@ impl<'a, 'gcx, 'tcx> HashStable> for Span { // If this is not an empty or invalid span, we want to hash the last // position that belongs to it, as opposed to hashing the first // position past it. - let span_hi = if self.hi > self.lo { + let span_hi = if self.hi() > self.lo() { // We might end up in the middle of a multibyte character here, // but that's OK, since we are not trying to decode anything at // this position. - self.hi - ::syntax_pos::BytePos(1) + self.hi() - ::syntax_pos::BytePos(1) } else { - self.hi + self.hi() }; { - let loc1 = hcx.codemap().byte_pos_to_line_and_col(self.lo); + let loc1 = hcx.codemap().byte_pos_to_line_and_col(self.lo()); let loc1 = loc1.as_ref() .map(|&(ref fm, line, col)| (&fm.name[..], line, col.to_usize())) .unwrap_or(("???", 0, 0)); @@ -296,7 +298,7 @@ impl<'a, 'gcx, 'tcx> HashStable> for Span { } } - if self.ctxt == SyntaxContext::empty() { + if self.ctxt() == SyntaxContext::empty() { 0u8.hash_stable(hcx, hasher); } else { 1u8.hash_stable(hcx, hasher); diff --git a/src/librustc/ich/impls_hir.rs b/src/librustc/ich/impls_hir.rs index a79133187895a..411f5e26e4d05 100644 --- a/src/librustc/ich/impls_hir.rs +++ b/src/librustc/ich/impls_hir.rs @@ -569,6 +569,7 @@ impl<'a, 'gcx, 'tcx> HashStable> for hir::E hir::ExprBreak(..) | hir::ExprAgain(..) | hir::ExprRet(..) | + hir::ExprYield(..) | hir::ExprInlineAsm(..) | hir::ExprRepeat(..) | hir::ExprTup(..) => { @@ -633,7 +634,7 @@ impl_stable_hash_for!(enum hir::Expr_ { ExprWhile(cond, body, label), ExprLoop(body, label, loop_src), ExprMatch(matchee, arms, match_src), - ExprClosure(capture_clause, decl, body_id, span), + ExprClosure(capture_clause, decl, body_id, span, gen), ExprBlock(blk), ExprAssign(lhs, rhs), ExprAssignOp(op, lhs, rhs), @@ -647,7 +648,8 @@ impl_stable_hash_for!(enum hir::Expr_ { ExprRet(val), ExprInlineAsm(asm, inputs, outputs), ExprStruct(path, fields, base), - ExprRepeat(val, times) + ExprRepeat(val, times), + ExprYield(val) }); impl_stable_hash_for!(enum hir::LocalSource { @@ -1024,7 +1026,8 @@ impl_stable_hash_for!(struct hir::Arg { impl_stable_hash_for!(struct hir::Body { arguments, - value + value, + is_generator }); impl<'a, 'gcx, 'tcx> HashStable> for hir::BodyId { diff --git a/src/librustc/ich/impls_mir.rs b/src/librustc/ich/impls_mir.rs index faf579186e5fc..9a061da177eba 100644 --- a/src/librustc/ich/impls_mir.rs +++ b/src/librustc/ich/impls_mir.rs @@ -17,7 +17,7 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; use std::mem; - +impl_stable_hash_for!(struct mir::GeneratorLayout<'tcx> { fields }); impl_stable_hash_for!(struct mir::SourceInfo { span, scope }); impl_stable_hash_for!(enum mir::Mutability { Mut, Not }); impl_stable_hash_for!(enum mir::BorrowKind { Shared, Unique, Mut }); @@ -27,6 +27,7 @@ impl_stable_hash_for!(struct mir::LocalDecl<'tcx> { ty, name, source_info, + internal, is_user_variable }); impl_stable_hash_for!(struct mir::UpvarDecl { debug_name, by_ref }); @@ -54,9 +55,11 @@ for mir::Terminator<'gcx> { mir::TerminatorKind::SwitchInt { .. } | mir::TerminatorKind::Resume | mir::TerminatorKind::Return | + mir::TerminatorKind::GeneratorDrop | mir::TerminatorKind::Unreachable | mir::TerminatorKind::Drop { .. } | mir::TerminatorKind::DropAndReplace { .. } | + mir::TerminatorKind::Yield { .. } | mir::TerminatorKind::Call { .. } => false, }; @@ -146,6 +149,7 @@ for mir::TerminatorKind<'gcx> { } mir::TerminatorKind::Resume | mir::TerminatorKind::Return | + mir::TerminatorKind::GeneratorDrop | mir::TerminatorKind::Unreachable => {} mir::TerminatorKind::Drop { ref location, target, unwind } => { location.hash_stable(hcx, hasher); @@ -161,6 +165,13 @@ for mir::TerminatorKind<'gcx> { target.hash_stable(hcx, hasher); unwind.hash_stable(hcx, hasher); } + mir::TerminatorKind::Yield { ref value, + resume, + drop } => { + value.hash_stable(hcx, hasher); + resume.hash_stable(hcx, hasher); + drop.hash_stable(hcx, hasher); + } mir::TerminatorKind::Call { ref func, ref args, ref destination, @@ -200,6 +211,8 @@ for mir::AssertMessage<'gcx> { mir::AssertMessage::Math(ref const_math_err) => { const_math_err.hash_stable(hcx, hasher); } + mir::AssertMessage::GeneratorResumedAfterReturn => (), + mir::AssertMessage::GeneratorResumedAfterPanic => (), } } } @@ -226,8 +239,8 @@ for mir::StatementKind<'gcx> { mir::StatementKind::StorageDead(ref lvalue) => { lvalue.hash_stable(hcx, hasher); } - mir::StatementKind::EndRegion(ref extent) => { - extent.hash_stable(hcx, hasher); + mir::StatementKind::EndRegion(ref region_scope) => { + region_scope.hash_stable(hcx, hasher); } mir::StatementKind::Validate(ref op, ref lvalues) => { op.hash_stable(hcx, hasher); @@ -258,7 +271,7 @@ impl<'a, 'gcx, 'tcx, T> HashStable> } } -impl_stable_hash_for!(enum mir::ValidationOp { Acquire, Release, Suspend(extent) }); +impl_stable_hash_for!(enum mir::ValidationOp { Acquire, Release, Suspend(region_scope) }); impl<'a, 'gcx, 'tcx> HashStable> for mir::Lvalue<'gcx> { fn hash_stable(&self, @@ -433,6 +446,11 @@ for mir::AggregateKind<'gcx> { def_id.hash_stable(hcx, hasher); substs.hash_stable(hcx, hasher); } + mir::AggregateKind::Generator(def_id, ref substs, ref interior) => { + def_id.hash_stable(hcx, hasher); + substs.hash_stable(hcx, hasher); + interior.hash_stable(hcx, hasher); + } } } } diff --git a/src/librustc/ich/impls_ty.rs b/src/librustc/ich/impls_ty.rs index 5f51579945e33..077905b3ac0ae 100644 --- a/src/librustc/ich/impls_ty.rs +++ b/src/librustc/ich/impls_ty.rs @@ -17,6 +17,7 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher, use std::hash as std_hash; use std::mem; use syntax_pos::symbol::InternedString; +use middle::region; use ty; impl<'a, 'gcx, 'tcx, T> HashStable> @@ -65,8 +66,8 @@ for ty::RegionKind { index.hash_stable(hcx, hasher); name.hash_stable(hcx, hasher); } - ty::ReScope(code_extent) => { - code_extent.hash_stable(hcx, hasher); + ty::ReScope(scope) => { + scope.hash_stable(hcx, hasher); } ty::ReFree(ref free_region) => { free_region.hash_stable(hcx, hasher); @@ -147,6 +148,11 @@ for ty::UpvarCapture<'gcx> { } } +impl_stable_hash_for!(struct ty::GenSig<'tcx> { + yield_ty, + return_ty +}); + impl_stable_hash_for!(struct ty::FnSig<'tcx> { inputs_and_output, variadic, @@ -321,6 +327,8 @@ for ::middle::const_val::ConstVal<'gcx> { impl_stable_hash_for!(struct ty::ClosureSubsts<'tcx> { substs }); +impl_stable_hash_for!(struct ty::GeneratorInterior<'tcx> { witness }); + impl_stable_hash_for!(struct ty::GenericPredicates<'tcx> { parent, predicates @@ -443,24 +451,22 @@ impl_stable_hash_for!(enum ty::cast::CastKind { }); impl<'a, 'gcx, 'tcx> HashStable> -for ::middle::region::CodeExtent +for region::Scope { fn hash_stable(&self, hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>, hasher: &mut StableHasher) { - use middle::region::CodeExtent; - mem::discriminant(self).hash_stable(hcx, hasher); match *self { - CodeExtent::Misc(node_id) | - CodeExtent::DestructionScope(node_id) => { + region::Scope::Node(node_id) | + region::Scope::Destruction(node_id) => { node_id.hash_stable(hcx, hasher); } - CodeExtent::CallSiteScope(body_id) | - CodeExtent::ParameterScope(body_id) => { + region::Scope::CallSite(body_id) | + region::Scope::Arguments(body_id) => { body_id.hash_stable(hcx, hasher); } - CodeExtent::Remainder(block_remainder) => { + region::Scope::Remainder(block_remainder) => { block_remainder.hash_stable(hcx, hasher); } } @@ -546,6 +552,12 @@ for ty::TypeVariants<'gcx> def_id.hash_stable(hcx, hasher); closure_substs.hash_stable(hcx, hasher); } + TyGenerator(def_id, closure_substs, interior) + => { + def_id.hash_stable(hcx, hasher); + closure_substs.hash_stable(hcx, hasher); + interior.hash_stable(hcx, hasher); + } TyTuple(inner_tys, from_diverging_type_var) => { inner_tys.hash_stable(hcx, hasher); from_diverging_type_var.hash_stable(hcx, hasher); @@ -625,6 +637,7 @@ impl_stable_hash_for!(enum ty::fast_reject::SimplifiedType { TupleSimplifiedType(size), TraitSimplifiedType(def_id), ClosureSimplifiedType(def_id), + GeneratorSimplifiedType(def_id), AnonSimplifiedType(def_id), FunctionSimplifiedType(params), ParameterSimplifiedType diff --git a/src/librustc/infer/error_reporting/mod.rs b/src/librustc/infer/error_reporting/mod.rs index edf9ca89b3394..476bf94714268 100644 --- a/src/librustc/infer/error_reporting/mod.rs +++ b/src/librustc/infer/error_reporting/mod.rs @@ -83,6 +83,7 @@ mod anon_anon_conflict; impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn note_and_explain_region(self, + region_scope_tree: ®ion::ScopeTree, err: &mut DiagnosticBuilder, prefix: &str, region: ty::Region<'tcx>, @@ -118,7 +119,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { fn explain_span<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, heading: &str, span: Span) -> (String, Option) { - let lo = tcx.sess.codemap().lookup_char_pos_adj(span.lo); + let lo = tcx.sess.codemap().lookup_char_pos_adj(span.lo()); (format!("the {} at {}:{}", heading, lo.line, lo.col.to_usize() + 1), Some(span)) } @@ -130,14 +131,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { format!("{}unknown scope: {:?}{}. Please report a bug.", prefix, scope, suffix) }; - let span = match scope.span(&self.hir) { - Some(s) => s, - None => { - err.note(&unknown_scope()); - return; - } - }; - let tag = match self.hir.find(scope.node_id()) { + let span = scope.span(self, region_scope_tree); + let tag = match self.hir.find(scope.node_id(self, region_scope_tree)) { Some(hir_map::NodeBlock(_)) => "block", Some(hir_map::NodeExpr(expr)) => match expr.node { hir::ExprCall(..) => "call", @@ -158,18 +153,18 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } }; let scope_decorated_tag = match scope { - region::CodeExtent::Misc(_) => tag, - region::CodeExtent::CallSiteScope(_) => { + region::Scope::Node(_) => tag, + region::Scope::CallSite(_) => { "scope of call-site for function" } - region::CodeExtent::ParameterScope(_) => { + region::Scope::Arguments(_) => { "scope of function body" } - region::CodeExtent::DestructionScope(_) => { + region::Scope::Destruction(_) => { new_string = format!("destruction scope surrounding {}", tag); &new_string[..] } - region::CodeExtent::Remainder(r) => { + region::Scope::Remainder(r) => { new_string = format!("block suffix following statement {}", r.first_statement_index); &new_string[..] @@ -260,8 +255,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { - - pub fn report_region_errors(&self, errors: &Vec>) { + pub fn report_region_errors(&self, + region_scope_tree: ®ion::ScopeTree, + errors: &Vec>) { debug!("report_region_errors(): {} errors to start", errors.len()); // try to pre-process the errors, which will group some of them @@ -285,16 +281,16 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // the error. If all of these fails, we fall back to a rather // general bit of code that displays the error information ConcreteFailure(origin, sub, sup) => { - - self.report_concrete_failure(origin, sub, sup).emit(); + self.report_concrete_failure(region_scope_tree, origin, sub, sup).emit(); } GenericBoundFailure(kind, param_ty, sub) => { - self.report_generic_bound_failure(kind, param_ty, sub); + self.report_generic_bound_failure(region_scope_tree, kind, param_ty, sub); } SubSupConflict(var_origin, sub_origin, sub_r, sup_origin, sup_r) => { - self.report_sub_sup_conflict(var_origin, + self.report_sub_sup_conflict(region_scope_tree, + var_origin, sub_origin, sub_r, sup_origin, @@ -773,6 +769,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } fn report_generic_bound_failure(&self, + region_scope_tree: ®ion::ScopeTree, origin: SubregionOrigin<'tcx>, bound_kind: GenericKind<'tcx>, sub: Region<'tcx>) @@ -840,6 +837,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { err.help(&format!("consider adding an explicit lifetime bound for `{}`", bound_kind)); self.tcx.note_and_explain_region( + region_scope_tree, &mut err, &format!("{} must be valid for ", labeled_user_string), sub, @@ -853,6 +851,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } fn report_sub_sup_conflict(&self, + region_scope_tree: ®ion::ScopeTree, var_origin: RegionVariableOrigin, sub_origin: SubregionOrigin<'tcx>, sub_region: Region<'tcx>, @@ -860,14 +859,14 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { sup_region: Region<'tcx>) { let mut err = self.report_inference_failure(var_origin); - self.tcx.note_and_explain_region(&mut err, + self.tcx.note_and_explain_region(region_scope_tree, &mut err, "first, the lifetime cannot outlive ", sup_region, "..."); self.note_region_origin(&mut err, &sup_origin); - self.tcx.note_and_explain_region(&mut err, + self.tcx.note_and_explain_region(region_scope_tree, &mut err, "but, the lifetime must be valid for ", sub_region, "..."); diff --git a/src/librustc/infer/error_reporting/note.rs b/src/librustc/infer/error_reporting/note.rs index 87047d0df144c..68e8ccbc3d886 100644 --- a/src/librustc/infer/error_reporting/note.rs +++ b/src/librustc/infer/error_reporting/note.rs @@ -9,6 +9,7 @@ // except according to those terms. use infer::{self, InferCtxt, SubregionOrigin}; +use middle::region; use ty::{self, Region}; use ty::error::TypeError; use errors::DiagnosticBuilder; @@ -144,6 +145,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } pub(super) fn report_concrete_failure(&self, + region_scope_tree: ®ion::ScopeTree, origin: SubregionOrigin<'tcx>, sub: Region<'tcx>, sup: Region<'tcx>) @@ -151,7 +153,11 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { match origin { infer::Subtype(trace) => { let terr = TypeError::RegionsDoesNotOutlive(sup, sub); - self.report_and_explain_type_error(trace, &terr) + let mut err = self.report_and_explain_type_error(trace, &terr); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, "", sup, "..."); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "...does not necessarily outlive ", sub, ""); + err } infer::Reborrow(span) => { let mut err = struct_span_err!(self.tcx.sess, @@ -159,11 +165,11 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { E0312, "lifetime of reference outlives lifetime of \ borrowed content..."); - self.tcx.note_and_explain_region(&mut err, + self.tcx.note_and_explain_region(region_scope_tree, &mut err, "...the reference is valid for ", sub, "..."); - self.tcx.note_and_explain_region(&mut err, + self.tcx.note_and_explain_region(region_scope_tree, &mut err, "...but the borrowed content is only valid for ", sup, ""); @@ -177,27 +183,27 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { of captured variable `{}`...", self.tcx .local_var_name_str_def_index(upvar_id.var_id)); - self.tcx.note_and_explain_region(&mut err, + self.tcx.note_and_explain_region(region_scope_tree, &mut err, "...the borrowed pointer is valid for ", sub, "..."); - self.tcx - .note_and_explain_region( - &mut err, - &format!("...but `{}` is only valid for ", - self.tcx.local_var_name_str_def_index(upvar_id.var_id)), - sup, - ""); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + &format!("...but `{}` is only valid for ", + self.tcx.local_var_name_str_def_index(upvar_id.var_id)), + sup, + ""); err } infer::InfStackClosure(span) => { let mut err = struct_span_err!(self.tcx.sess, span, E0314, "closure outlives stack frame"); - self.tcx.note_and_explain_region(&mut err, + self.tcx.note_and_explain_region(region_scope_tree, &mut err, "...the closure must be valid for ", sub, "..."); - self.tcx.note_and_explain_region(&mut err, + self.tcx.note_and_explain_region(region_scope_tree, &mut err, "...but the closure's stack frame is only valid \ for ", sup, @@ -209,8 +215,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { span, E0315, "cannot invoke closure outside of its lifetime"); - self.tcx - .note_and_explain_region(&mut err, "the closure is only valid for ", sup, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "the closure is only valid for ", sup, ""); err } infer::DerefPointer(span) => { @@ -218,8 +224,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { span, E0473, "dereference of reference outside its lifetime"); - self.tcx - .note_and_explain_region(&mut err, "the reference is only valid for ", sup, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "the reference is only valid for ", sup, ""); err } infer::FreeVariable(span, id) => { @@ -229,9 +235,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { "captured variable `{}` does not outlive the \ enclosing closure", self.tcx.local_var_name_str(id)); - self.tcx - .note_and_explain_region(&mut err, "captured variable is valid for ", sup, ""); - self.tcx.note_and_explain_region(&mut err, "closure is valid for ", sub, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "captured variable is valid for ", sup, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "closure is valid for ", sub, ""); err } infer::IndexSlice(span) => { @@ -239,7 +246,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { span, E0475, "index of slice outside its lifetime"); - self.tcx.note_and_explain_region(&mut err, "the slice is only valid for ", sup, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "the slice is only valid for ", sup, ""); err } infer::RelateObjectBound(span) => { @@ -248,8 +256,9 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { E0476, "lifetime of the source pointer does not outlive \ lifetime bound of the object type"); - self.tcx.note_and_explain_region(&mut err, "object type is valid for ", sub, ""); - self.tcx.note_and_explain_region(&mut err, + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "object type is valid for ", sub, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, "source pointer is only valid for ", sup, ""); @@ -264,10 +273,12 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { self.ty_to_string(ty)); match *sub { ty::ReStatic => { - self.tcx.note_and_explain_region(&mut err, "type must satisfy ", sub, "") + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "type must satisfy ", sub, "") } _ => { - self.tcx.note_and_explain_region(&mut err, "type must outlive ", sub, "") + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "type must outlive ", sub, "") } } err @@ -275,11 +286,11 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { infer::RelateRegionParamBound(span) => { let mut err = struct_span_err!(self.tcx.sess, span, E0478, "lifetime bound not satisfied"); - self.tcx.note_and_explain_region(&mut err, + self.tcx.note_and_explain_region(region_scope_tree, &mut err, "lifetime parameter instantiated with ", sup, ""); - self.tcx.note_and_explain_region(&mut err, + self.tcx.note_and_explain_region(region_scope_tree, &mut err, "but lifetime parameter must outlive ", sub, ""); @@ -292,7 +303,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { "the type `{}` (provided as the value of a type \ parameter) is not valid at this point", self.ty_to_string(ty)); - self.tcx.note_and_explain_region(&mut err, "type must outlive ", sub, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "type must outlive ", sub, ""); err } infer::CallRcvr(span) => { @@ -301,8 +313,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { E0480, "lifetime of method receiver does not outlive the \ method call"); - self.tcx - .note_and_explain_region(&mut err, "the receiver is only valid for ", sup, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "the receiver is only valid for ", sup, ""); err } infer::CallArg(span) => { @@ -311,7 +323,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { E0481, "lifetime of function argument does not outlive \ the function call"); - self.tcx.note_and_explain_region(&mut err, + self.tcx.note_and_explain_region(region_scope_tree, &mut err, "the function argument is only valid for ", sup, ""); @@ -323,7 +335,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { E0482, "lifetime of return value does not outlive the \ function call"); - self.tcx.note_and_explain_region(&mut err, + self.tcx.note_and_explain_region(region_scope_tree, &mut err, "the return value is only valid for ", sup, ""); @@ -335,8 +347,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { E0483, "lifetime of operand does not outlive the \ operation"); - self.tcx - .note_and_explain_region(&mut err, "the operand is only valid for ", sup, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "the operand is only valid for ", sup, ""); err } infer::AddrOf(span) => { @@ -344,8 +356,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { span, E0484, "reference is not valid at the time of borrow"); - self.tcx - .note_and_explain_region(&mut err, "the borrow is only valid for ", sup, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "the borrow is only valid for ", sup, ""); err } infer::AutoBorrow(span) => { @@ -354,7 +366,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { E0485, "automatically reference is not valid at the time \ of borrow"); - self.tcx.note_and_explain_region(&mut err, + self.tcx.note_and_explain_region(region_scope_tree, &mut err, "the automatic borrow is only valid for ", sup, ""); @@ -367,7 +379,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { "type of expression contains references that are \ not valid during the expression: `{}`", self.ty_to_string(t)); - self.tcx.note_and_explain_region(&mut err, "type is only valid for ", sup, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "type is only valid for ", sup, ""); err } infer::SafeDestructor(span) => { @@ -377,8 +390,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { "unsafe use of destructor: destructor might be \ called while references are dead"); // FIXME (22171): terms "super/subregion" are suboptimal - self.tcx.note_and_explain_region(&mut err, "superregion: ", sup, ""); - self.tcx.note_and_explain_region(&mut err, "subregion: ", sub, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "superregion: ", sup, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "subregion: ", sub, ""); err } infer::BindingTypeIsNotValidAtDecl(span) => { @@ -387,8 +402,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { E0488, "lifetime of variable does not enclose its \ declaration"); - self.tcx - .note_and_explain_region(&mut err, "the variable is only valid for ", sup, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "the variable is only valid for ", sup, ""); err } infer::ParameterInScope(_, span) => { @@ -396,8 +411,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { span, E0489, "type/lifetime parameter not in scope here"); - self.tcx - .note_and_explain_region(&mut err, "the parameter is only valid for ", sub, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "the parameter is only valid for ", sub, ""); err } infer::DataBorrowed(ty, span) => { @@ -406,8 +421,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { E0490, "a value of type `{}` is borrowed for too long", self.ty_to_string(ty)); - self.tcx.note_and_explain_region(&mut err, "the type is valid for ", sub, ""); - self.tcx.note_and_explain_region(&mut err, "but the borrow lasts for ", sup, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "the type is valid for ", sub, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "but the borrow lasts for ", sup, ""); err } infer::ReferenceOutlivesReferent(ty, span) => { @@ -417,8 +434,9 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { "in type `{}`, reference has a longer lifetime \ than the data it references", self.ty_to_string(ty)); - self.tcx.note_and_explain_region(&mut err, "the pointer is valid for ", sub, ""); - self.tcx.note_and_explain_region(&mut err, + self.tcx.note_and_explain_region(region_scope_tree, &mut err, + "the pointer is valid for ", sub, ""); + self.tcx.note_and_explain_region(region_scope_tree, &mut err, "but the referenced data is only valid for ", sup, ""); diff --git a/src/librustc/infer/freshen.rs b/src/librustc/infer/freshen.rs index 41858088f7e70..c274f8bda9fb0 100644 --- a/src/librustc/infer/freshen.rs +++ b/src/librustc/infer/freshen.rs @@ -19,10 +19,21 @@ //! fact an unbound type variable, we want the match to be regarded as ambiguous, because depending //! on what type that type variable is ultimately assigned, the match may or may not succeed. //! +//! To handle closures, freshened types also have to contain the signature and kind of any +//! closure in the local inference context, as otherwise the cache key might be invalidated. +//! The way this is done is somewhat hacky - the closure signature is appended to the substs, +//! as well as the closure kind "encoded" as a type. Also, special handling is needed when +//! the closure signature contains a reference to the original closure. +//! //! Note that you should be careful not to allow the output of freshening to leak to the user in //! error messages or in any other form. Freshening is only really useful as an internal detail. //! -//! __An important detail concerning regions.__ The freshener also replaces *all* regions with +//! Because of the manipulation required to handle closures, doing arbitrary operations on +//! freshened types is not recommended. However, in addition to doing equality/hash +//! comparisons (for caching), it is possible to do a `ty::_match` operation between +//! 2 freshened types - this works even with the closure encoding. +//! +//! __An important detail concerning regions.__ The freshener also replaces *all* free regions with //! 'erased. The reason behind this is that, in general, we do not take region relationships into //! account when making type-overloaded decisions. This is important because of the design of the //! region inferencer, which is not based on unification but rather on accumulating and then @@ -32,7 +43,10 @@ use ty::{self, Ty, TyCtxt, TypeFoldable}; use ty::fold::TypeFolder; +use ty::subst::Substs; use util::nodemap::FxHashMap; +use hir::def_id::DefId; + use std::collections::hash_map::Entry; use super::InferCtxt; @@ -42,6 +56,7 @@ pub struct TypeFreshener<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, freshen_count: u32, freshen_map: FxHashMap>, + closure_set: Vec, } impl<'a, 'gcx, 'tcx> TypeFreshener<'a, 'gcx, 'tcx> { @@ -51,6 +66,7 @@ impl<'a, 'gcx, 'tcx> TypeFreshener<'a, 'gcx, 'tcx> { infcx, freshen_count: 0, freshen_map: FxHashMap(), + closure_set: vec![], } } @@ -76,6 +92,88 @@ impl<'a, 'gcx, 'tcx> TypeFreshener<'a, 'gcx, 'tcx> { } } } + + fn next_fresh(&mut self, + freshener: F) + -> Ty<'tcx> + where F: FnOnce(u32) -> ty::InferTy, + { + let index = self.freshen_count; + self.freshen_count += 1; + self.infcx.tcx.mk_infer(freshener(index)) + } + + fn freshen_closure_like(&mut self, + def_id: DefId, + substs: ty::ClosureSubsts<'tcx>, + t: Ty<'tcx>, + markers: M, + combine: C) + -> Ty<'tcx> + where M: FnOnce(&mut Self) -> (Ty<'tcx>, Ty<'tcx>), + C: FnOnce(&'tcx Substs<'tcx>) -> Ty<'tcx> + { + let tcx = self.infcx.tcx; + + let closure_in_progress = self.infcx.in_progress_tables.map_or(false, |tables| { + tcx.hir.as_local_node_id(def_id).map_or(false, |closure_id| { + tables.borrow().local_id_root == + Some(DefId::local(tcx.hir.node_to_hir_id(closure_id).owner)) + }) + }); + + if !closure_in_progress { + // If this closure belongs to another infcx, its kind etc. were + // fully inferred and its signature/kind are exactly what's listed + // in its infcx. So we don't need to add the markers for them. + return t.super_fold_with(self); + } + + // We are encoding a closure in progress. Because we want our freshening + // key to contain all inference information needed to make sense of our + // value, we need to encode the closure signature and kind. The way + // we do that is to add them as 2 variables to the closure substs, + // basically because it's there (and nobody cares about adding extra stuff + // to substs). + // + // This means the "freshened" closure substs ends up looking like + // fresh_substs = [PARENT_SUBSTS* ; UPVARS* ; SIG_MARKER ; KIND_MARKER] + let (marker_1, marker_2) = if self.closure_set.contains(&def_id) { + // We found the closure def-id within its own signature. Just + // leave a new freshened type - any matching operations would + // have found and compared the exterior closure already to + // get here. + // + // In that case, we already know what the signature would + // be - the parent closure on the stack already contains a + // "copy" of the signature, so there is no reason to encode + // it again for injectivity. Just use a fresh type variable + // to make everything comparable. + // + // For example (closure kinds omitted for clarity) + // t=[closure FOO sig=[closure BAR sig=[closure FOO ..]]] + // Would get encoded to + // t=[closure FOO sig=[closure BAR sig=[closure FOO sig=$0]]] + // + // and we can decode by having + // $0=[closure BAR {sig doesn't exist in decode}] + // and get + // t=[closure FOO] + // sig[FOO] = [closure BAR] + // sig[BAR] = [closure FOO] + (self.next_fresh(ty::FreshTy), self.next_fresh(ty::FreshTy)) + } else { + self.closure_set.push(def_id); + let markers = markers(self); + self.closure_set.pop(); + markers + }; + + combine(tcx.mk_substs( + substs.substs.iter().map(|k| k.fold_with(self)).chain( + [marker_1, marker_2].iter().cloned().map(From::from) + ))) + } } impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for TypeFreshener<'a, 'gcx, 'tcx> { @@ -105,7 +203,8 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for TypeFreshener<'a, 'gcx, 'tcx> { } fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { - if !t.needs_infer() && !t.has_erasable_regions() { + if !t.needs_infer() && !t.has_erasable_regions() && + !(t.has_closure_types() && self.infcx.in_progress_tables.is_some()) { return t; } @@ -150,6 +249,51 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for TypeFreshener<'a, 'gcx, 'tcx> { t } + ty::TyClosure(def_id, substs) => { + self.freshen_closure_like( + def_id, substs, t, + |this| { + // HACK: use a "random" integer type to mark the kind. Because + // different closure kinds shouldn't get unified during + // selection, the "subtyping" relationship (where any kind is + // better than no kind) shouldn't matter here, just that the + // types are different. + let closure_kind = this.infcx.closure_kind(def_id); + let closure_kind_marker = match closure_kind { + None => tcx.types.i8, + Some(ty::ClosureKind::Fn) => tcx.types.i16, + Some(ty::ClosureKind::FnMut) => tcx.types.i32, + Some(ty::ClosureKind::FnOnce) => tcx.types.i64, + }; + + let closure_sig = this.infcx.fn_sig(def_id); + (tcx.mk_fn_ptr(closure_sig.fold_with(this)), + closure_kind_marker) + }, + |substs| tcx.mk_closure(def_id, substs) + ) + } + + ty::TyGenerator(def_id, substs, interior) => { + self.freshen_closure_like( + def_id, substs, t, + |this| { + let gen_sig = this.infcx.generator_sig(def_id).unwrap(); + // FIXME: want to revise this strategy when generator + // signatures can actually contain LBRs. + let sig = this.tcx().no_late_bound_regions(&gen_sig) + .unwrap_or_else(|| { + bug!("late-bound regions in signature of {:?}", + def_id) + }); + (sig.yield_ty, sig.return_ty).fold_with(this) + }, + |substs| { + tcx.mk_generator(def_id, ty::ClosureSubsts { substs }, interior) + } + ) + } + ty::TyBool | ty::TyChar | ty::TyInt(..) | @@ -165,7 +309,6 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for TypeFreshener<'a, 'gcx, 'tcx> { ty::TyFnDef(..) | ty::TyFnPtr(_) | ty::TyDynamic(..) | - ty::TyClosure(..) | ty::TyNever | ty::TyTuple(..) | ty::TyProjection(..) | diff --git a/src/librustc/infer/mod.rs b/src/librustc/infer/mod.rs index 6c9b9d853f403..e85e8e2bdb8c9 100644 --- a/src/librustc/infer/mod.rs +++ b/src/librustc/infer/mod.rs @@ -20,7 +20,7 @@ pub use self::region_inference::{GenericKind, VerifyBound}; use hir::def_id::DefId; use middle::free_region::{FreeRegionMap, RegionRelations}; -use middle::region::RegionMaps; +use middle::region; use middle::lang_items; use mir::tcx::LvalueTy; use ty::subst::{Kind, Subst, Substs}; @@ -1070,7 +1070,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { pub fn resolve_regions_and_report_errors(&self, region_context: DefId, - region_map: &RegionMaps, + region_map: ®ion::ScopeTree, free_regions: &FreeRegionMap<'tcx>) { let region_rels = RegionRelations::new(self.tcx, region_context, @@ -1084,7 +1084,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // this infcx was in use. This is totally hokey but // otherwise we have a hard time separating legit region // errors from silly ones. - self.report_region_errors(&errors); // see error_reporting module + self.report_region_errors(region_map, &errors); // see error_reporting module } } @@ -1363,6 +1363,19 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { self.tcx.fn_sig(def_id) } + + pub fn generator_sig(&self, def_id: DefId) -> Option> { + if let Some(tables) = self.in_progress_tables { + if let Some(id) = self.tcx.hir.as_local_node_id(def_id) { + let hir_id = self.tcx.hir.node_to_hir_id(id); + if let Some(&ty) = tables.borrow().generator_sigs().get(hir_id) { + return ty.map(|t| ty::Binder(t)); + } + } + } + + self.tcx.generator_sig(def_id) + } } impl<'a, 'gcx, 'tcx> TypeTrace<'tcx> { diff --git a/src/librustc/infer/region_inference/graphviz.rs b/src/librustc/infer/region_inference/graphviz.rs index 81a8984e7530e..5cf6aa350bdd5 100644 --- a/src/librustc/infer/region_inference/graphviz.rs +++ b/src/librustc/infer/region_inference/graphviz.rs @@ -21,7 +21,7 @@ use graphviz as dot; use hir::def_id::DefIndex; use ty; use middle::free_region::RegionRelations; -use middle::region::CodeExtent; +use middle::region; use super::Constraint; use infer::SubregionOrigin; use infer::region_inference::RegionVarBindings; @@ -136,7 +136,7 @@ enum Node { #[derive(Clone, PartialEq, Eq, Debug, Copy)] enum Edge<'tcx> { Constraint(Constraint<'tcx>), - EnclScope(CodeExtent, CodeExtent), + EnclScope(region::Scope, region::Scope), } impl<'a, 'gcx, 'tcx> ConstraintGraph<'a, 'gcx, 'tcx> { @@ -159,7 +159,7 @@ impl<'a, 'gcx, 'tcx> ConstraintGraph<'a, 'gcx, 'tcx> { add_node(n2); } - region_rels.region_maps.each_encl_scope(|sub, sup| { + region_rels.region_scope_tree.each_encl_scope(|sub, sup| { add_node(Node::Region(ty::ReScope(sub))); add_node(Node::Region(ty::ReScope(sup))); }); @@ -245,7 +245,9 @@ impl<'a, 'gcx, 'tcx> dot::GraphWalk<'a> for ConstraintGraph<'a, 'gcx, 'tcx> { fn edges(&self) -> dot::Edges> { debug!("constraint graph has {} edges", self.map.len()); let mut v: Vec<_> = self.map.keys().map(|e| Edge::Constraint(*e)).collect(); - self.region_rels.region_maps.each_encl_scope(|sub, sup| v.push(Edge::EnclScope(sub, sup))); + self.region_rels.region_scope_tree.each_encl_scope(|sub, sup| { + v.push(Edge::EnclScope(sub, sup)) + }); debug!("region graph has {} edges", v.len()); Cow::Owned(v) } diff --git a/src/librustc/infer/region_inference/mod.rs b/src/librustc/infer/region_inference/mod.rs index 5588b6d9add16..8351be490767a 100644 --- a/src/librustc/infer/region_inference/mod.rs +++ b/src/librustc/infer/region_inference/mod.rs @@ -935,14 +935,14 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> { // reasonably compare free regions and scopes: let fr_scope = match (a, b) { (&ReEarlyBound(ref br), _) | (_, &ReEarlyBound(ref br)) => { - region_rels.region_maps.early_free_extent(self.tcx, br) + region_rels.region_scope_tree.early_free_scope(self.tcx, br) } (&ReFree(ref fr), _) | (_, &ReFree(ref fr)) => { - region_rels.region_maps.free_extent(self.tcx, fr) + region_rels.region_scope_tree.free_scope(self.tcx, fr) } _ => bug!() }; - let r_id = region_rels.region_maps.nearest_common_ancestor(fr_scope, s_id); + let r_id = region_rels.region_scope_tree.nearest_common_ancestor(fr_scope, s_id); if r_id == fr_scope { // if the free region's scope `fr.scope` is bigger than // the scope region `s_id`, then the LUB is the free @@ -963,7 +963,7 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> { // The region corresponding to an outer block is a // subtype of the region corresponding to an inner // block. - let lub = region_rels.region_maps.nearest_common_ancestor(a_id, b_id); + let lub = region_rels.region_scope_tree.nearest_common_ancestor(a_id, b_id); self.tcx.mk_region(ReScope(lub)) } diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index 152b2e2aa5ebc..82f01c36fee7f 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -24,7 +24,6 @@ #![feature(conservative_impl_trait)] #![feature(const_fn)] #![feature(core_intrinsics)] -#![feature(discriminant_value)] #![feature(i128_type)] #![cfg_attr(windows, feature(libc))] #![feature(never_type)] @@ -34,7 +33,6 @@ #![feature(slice_patterns)] #![feature(specialization)] #![feature(unboxed_closures)] -#![feature(discriminant_value)] #![feature(trace_macros)] #![feature(test)] diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 811bf9776101d..21852468146f4 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -216,6 +216,12 @@ declare_lint! { "detects use of deprecated items" } +declare_lint! { + pub UNUSED_UNSAFE, + Warn, + "unnecessary use of an `unsafe` block" +} + /// Does nothing as a lint pass, but registers some `Lint`s /// which are used by other parts of the compiler. #[derive(Copy, Clone)] @@ -256,7 +262,8 @@ impl LintPass for HardwiredLints { MISSING_FRAGMENT_SPECIFIER, PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES, LATE_BOUND_LIFETIME_ARGUMENTS, - DEPRECATED + DEPRECATED, + UNUSED_UNSAFE ) } } diff --git a/src/librustc/lint/levels.rs b/src/librustc/lint/levels.rs index ab086e5b8e90e..c5863b5618feb 100644 --- a/src/librustc/lint/levels.rs +++ b/src/librustc/lint/levels.rs @@ -247,13 +247,27 @@ impl<'a> LintLevelsBuilder<'a> { self.cur, Some(&specs)); let msg = format!("unknown lint: `{}`", name); - lint::struct_lint_level(self.sess, + let mut db = lint::struct_lint_level(self.sess, lint, level, src, Some(li.span.into()), - &msg) - .emit(); + &msg); + if name.as_str().chars().any(|c| c.is_uppercase()) { + let name_lower = name.as_str().to_lowercase(); + if let CheckLintNameResult::NoLint = + store.check_lint_name(&name_lower) { + db.emit(); + } else { + db.span_suggestion( + li.span, + "lowercase the lint name", + name_lower + ).emit(); + } + } else { + db.emit(); + } } } } diff --git a/src/librustc/middle/dataflow.rs b/src/librustc/middle/dataflow.rs index d394c0f0c8734..e88678dea1d74 100644 --- a/src/librustc/middle/dataflow.rs +++ b/src/librustc/middle/dataflow.rs @@ -20,12 +20,11 @@ use ty::TyCtxt; use std::io; use std::mem; use std::usize; -use syntax::ast; use syntax::print::pprust::PrintState; use rustc_data_structures::graph::OUTGOING; -use util::nodemap::NodeMap; +use util::nodemap::FxHashMap; use hir; use hir::intravisit::{self, IdRange}; use hir::print as pprust; @@ -56,7 +55,7 @@ pub struct DataFlowContext<'a, 'tcx: 'a, O> { // mapping from node to cfg node index // FIXME (#6298): Shouldn't this go with CFG? - nodeid_to_index: NodeMap>, + local_id_to_index: FxHashMap>, // Bit sets per cfg node. The following three fields (`gens`, `kills`, // and `on_entry`) all have the same structure. For each id in @@ -97,15 +96,16 @@ struct PropagationContext<'a, 'b: 'a, 'tcx: 'b, O: 'a> { changed: bool } -fn get_cfg_indices<'a>(id: ast::NodeId, index: &'a NodeMap>) -> &'a [CFGIndex] { - let opt_indices = index.get(&id); - opt_indices.map(|v| &v[..]).unwrap_or(&[]) +fn get_cfg_indices<'a>(id: hir::ItemLocalId, + index: &'a FxHashMap>) + -> &'a [CFGIndex] { + index.get(&id).map_or(&[], |v| &v[..]) } impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { - fn has_bitset_for_nodeid(&self, n: ast::NodeId) -> bool { - assert!(n != ast::DUMMY_NODE_ID); - self.nodeid_to_index.contains_key(&n) + fn has_bitset_for_local_id(&self, n: hir::ItemLocalId) -> bool { + assert!(n != hir::DUMMY_ITEM_LOCAL_ID); + self.local_id_to_index.contains_key(&n) } } @@ -117,19 +117,20 @@ impl<'a, 'tcx, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, 'tcx, O ps: &mut pprust::State, node: pprust::AnnNode) -> io::Result<()> { let id = match node { - pprust::NodeName(_) => ast::CRATE_NODE_ID, - pprust::NodeExpr(expr) => expr.id, - pprust::NodeBlock(blk) => blk.id, - pprust::NodeItem(_) | pprust::NodeSubItem(_) => ast::CRATE_NODE_ID, - pprust::NodePat(pat) => pat.id + pprust::NodeName(_) => return Ok(()), + pprust::NodeExpr(expr) => expr.hir_id.local_id, + pprust::NodeBlock(blk) => blk.hir_id.local_id, + pprust::NodeItem(_) | + pprust::NodeSubItem(_) => return Ok(()), + pprust::NodePat(pat) => pat.hir_id.local_id }; - if !self.has_bitset_for_nodeid(id) { + if !self.has_bitset_for_local_id(id) { return Ok(()); } assert!(self.bits_per_id > 0); - let indices = get_cfg_indices(id, &self.nodeid_to_index); + let indices = get_cfg_indices(id, &self.local_id_to_index); for &cfgidx in indices { let (start, end) = self.compute_id_range(cfgidx); let on_entry = &self.on_entry[start.. end]; @@ -157,7 +158,7 @@ impl<'a, 'tcx, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, 'tcx, O }; ps.synth_comment( - format!("id {}: {}{}{}{}", id, entry_str, + format!("id {}: {}{}{}{}", id.as_usize(), entry_str, gens_str, action_kills_str, scope_kills_str))?; ps.s.space()?; } @@ -165,9 +166,10 @@ impl<'a, 'tcx, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, 'tcx, O } } -fn build_nodeid_to_index(body: Option<&hir::Body>, - cfg: &cfg::CFG) -> NodeMap> { - let mut index = NodeMap(); +fn build_local_id_to_index(body: Option<&hir::Body>, + cfg: &cfg::CFG) + -> FxHashMap> { + let mut index = FxHashMap(); // FIXME (#6298): Would it be better to fold formals from decl // into cfg itself? i.e. introduce a fn-based flow-graph in @@ -188,14 +190,14 @@ fn build_nodeid_to_index(body: Option<&hir::Body>, /// Add mappings from the ast nodes for the formal bindings to /// the entry-node in the graph. - fn add_entries_from_fn_body(index: &mut NodeMap>, + fn add_entries_from_fn_body(index: &mut FxHashMap>, body: &hir::Body, entry: CFGIndex) { use hir::intravisit::Visitor; struct Formals<'a> { entry: CFGIndex, - index: &'a mut NodeMap>, + index: &'a mut FxHashMap>, } let mut formals = Formals { entry: entry, index: index }; for arg in &body.arguments { @@ -207,7 +209,7 @@ fn build_nodeid_to_index(body: Option<&hir::Body>, } fn visit_pat(&mut self, p: &hir::Pat) { - self.index.entry(p.id).or_insert(vec![]).push(self.entry); + self.index.entry(p.hir_id.local_id).or_insert(vec![]).push(self.entry); intravisit::walk_pat(self, p) } } @@ -259,13 +261,13 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { let kills2 = zeroes; let on_entry = vec![entry; num_nodes * words_per_id]; - let nodeid_to_index = build_nodeid_to_index(body, cfg); + let local_id_to_index = build_local_id_to_index(body, cfg); DataFlowContext { tcx, analysis_name, words_per_id, - nodeid_to_index, + local_id_to_index, bits_per_id, oper, gens, @@ -275,14 +277,14 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { } } - pub fn add_gen(&mut self, id: ast::NodeId, bit: usize) { + pub fn add_gen(&mut self, id: hir::ItemLocalId, bit: usize) { //! Indicates that `id` generates `bit` - debug!("{} add_gen(id={}, bit={})", + debug!("{} add_gen(id={:?}, bit={})", self.analysis_name, id, bit); - assert!(self.nodeid_to_index.contains_key(&id)); + assert!(self.local_id_to_index.contains_key(&id)); assert!(self.bits_per_id > 0); - let indices = get_cfg_indices(id, &self.nodeid_to_index); + let indices = get_cfg_indices(id, &self.local_id_to_index); for &cfgidx in indices { let (start, end) = self.compute_id_range(cfgidx); let gens = &mut self.gens[start.. end]; @@ -290,14 +292,14 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { } } - pub fn add_kill(&mut self, kind: KillFrom, id: ast::NodeId, bit: usize) { + pub fn add_kill(&mut self, kind: KillFrom, id: hir::ItemLocalId, bit: usize) { //! Indicates that `id` kills `bit` - debug!("{} add_kill(id={}, bit={})", + debug!("{} add_kill(id={:?}, bit={})", self.analysis_name, id, bit); - assert!(self.nodeid_to_index.contains_key(&id)); + assert!(self.local_id_to_index.contains_key(&id)); assert!(self.bits_per_id > 0); - let indices = get_cfg_indices(id, &self.nodeid_to_index); + let indices = get_cfg_indices(id, &self.local_id_to_index); for &cfgidx in indices { let (start, end) = self.compute_id_range(cfgidx); let kills = match kind { @@ -341,15 +343,15 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { } - pub fn each_bit_on_entry(&self, id: ast::NodeId, mut f: F) -> bool where + pub fn each_bit_on_entry(&self, id: hir::ItemLocalId, mut f: F) -> bool where F: FnMut(usize) -> bool, { //! Iterates through each bit that is set on entry to `id`. //! Only useful after `propagate()` has been called. - if !self.has_bitset_for_nodeid(id) { + if !self.has_bitset_for_local_id(id) { return true; } - let indices = get_cfg_indices(id, &self.nodeid_to_index); + let indices = get_cfg_indices(id, &self.local_id_to_index); for &cfgidx in indices { if !self.each_bit_for_node(EntryOrExit::Entry, cfgidx, |i| f(i)) { return false; @@ -387,11 +389,11 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { self.each_bit(slice, f) } - pub fn each_gen_bit(&self, id: ast::NodeId, mut f: F) -> bool where + pub fn each_gen_bit(&self, id: hir::ItemLocalId, mut f: F) -> bool where F: FnMut(usize) -> bool, { //! Iterates through each bit in the gen set for `id`. - if !self.has_bitset_for_nodeid(id) { + if !self.has_bitset_for_local_id(id) { return true; } @@ -401,11 +403,11 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { return true; } - let indices = get_cfg_indices(id, &self.nodeid_to_index); + let indices = get_cfg_indices(id, &self.local_id_to_index); for &cfgidx in indices { let (start, end) = self.compute_id_range(cfgidx); let gens = &self.gens[start.. end]; - debug!("{} each_gen_bit(id={}, gens={})", + debug!("{} each_gen_bit(id={:?}, gens={})", self.analysis_name, id, bits_to_string(gens)); if !self.each_bit(gens, |i| f(i)) { return false; @@ -472,17 +474,17 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { let mut orig_kills = self.scope_kills[start.. end].to_vec(); let mut changed = false; - for &node_id in &edge.data.exiting_scopes { - let opt_cfg_idx = self.nodeid_to_index.get(&node_id); + for &id in &edge.data.exiting_scopes { + let opt_cfg_idx = self.local_id_to_index.get(&id); match opt_cfg_idx { Some(indices) => { for &cfg_idx in indices { let (start, end) = self.compute_id_range(cfg_idx); let kills = &self.scope_kills[start.. end]; if bitwise(&mut orig_kills, kills, &Union) { - debug!("scope exits: scope id={} \ + debug!("scope exits: scope id={:?} \ (node={:?} of {:?}) added killset: {}", - node_id, cfg_idx, indices, + id, cfg_idx, indices, bits_to_string(kills)); changed = true; } @@ -490,8 +492,8 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { } None => { debug!("{} add_kills_from_flow_exits flow_exit={:?} \ - no cfg_idx for exiting_scope={}", - self.analysis_name, flow_exit, node_id); + no cfg_idx for exiting_scope={:?}", + self.analysis_name, flow_exit, id); } } } @@ -559,7 +561,7 @@ impl<'a, 'b, 'tcx, O:DataFlowOperator> PropagationContext<'a, 'b, 'tcx, O> { // Iterate over nodes in reverse postorder for &node_index in nodes_po.iter().rev() { let node = cfg.graph.node(node_index); - debug!("DataFlowContext::walk_cfg idx={:?} id={} begin in_out={}", + debug!("DataFlowContext::walk_cfg idx={:?} id={:?} begin in_out={}", node_index, node.data.id(), bits_to_string(in_out)); let (start, end) = self.dfcx.compute_id_range(node_index); diff --git a/src/librustc/middle/effect.rs b/src/librustc/middle/effect.rs index 98934d6070328..7290353e48b0c 100644 --- a/src/librustc/middle/effect.rs +++ b/src/librustc/middle/effect.rs @@ -14,12 +14,14 @@ use self::RootUnsafeContext::*; use ty::{self, TyCtxt}; use lint; +use lint::builtin::UNUSED_UNSAFE; -use syntax::ast; -use syntax_pos::Span; -use hir::{self, PatKind}; use hir::def::Def; use hir::intravisit::{self, FnKind, Visitor, NestedVisitorMap}; +use hir::{self, PatKind}; +use syntax::ast; +use syntax_pos::Span; +use util::nodemap::FxHashSet; #[derive(Copy, Clone)] struct UnsafeContext { @@ -47,6 +49,7 @@ struct EffectCheckVisitor<'a, 'tcx: 'a> { /// Whether we're in an unsafe context. unsafe_context: UnsafeContext, + used_unsafe: FxHashSet, } impl<'a, 'tcx> EffectCheckVisitor<'a, 'tcx> { @@ -73,7 +76,7 @@ impl<'a, 'tcx> EffectCheckVisitor<'a, 'tcx> { UnsafeBlock(block_id) => { // OK, but record this. debug!("effect: recording unsafe block as used: {}", block_id); - self.tcx.used_unsafe.borrow_mut().insert(block_id); + self.used_unsafe.insert(block_id); } UnsafeFn => {} } @@ -159,7 +162,48 @@ impl<'a, 'tcx> Visitor<'tcx> for EffectCheckVisitor<'a, 'tcx> { intravisit::walk_block(self, block); - self.unsafe_context = old_unsafe_context + self.unsafe_context = old_unsafe_context; + + // Don't warn about generated blocks, that'll just pollute the output. + match block.rules { + hir::UnsafeBlock(hir::UserProvided) => {} + _ => return, + } + if self.used_unsafe.contains(&block.id) { + return + } + + /// Return the NodeId for an enclosing scope that is also `unsafe` + fn is_enclosed(tcx: TyCtxt, + used_unsafe: &FxHashSet, + id: ast::NodeId) -> Option<(String, ast::NodeId)> { + let parent_id = tcx.hir.get_parent_node(id); + if parent_id != id { + if used_unsafe.contains(&parent_id) { + Some(("block".to_string(), parent_id)) + } else if let Some(hir::map::NodeItem(&hir::Item { + node: hir::ItemFn(_, hir::Unsafety::Unsafe, _, _, _, _), + .. + })) = tcx.hir.find(parent_id) { + Some(("fn".to_string(), parent_id)) + } else { + is_enclosed(tcx, used_unsafe, parent_id) + } + } else { + None + } + } + + let mut db = self.tcx.struct_span_lint_node(UNUSED_UNSAFE, + block.id, + block.span, + "unnecessary `unsafe` block"); + db.span_label(block.span, "unnecessary `unsafe` block"); + if let Some((kind, id)) = is_enclosed(self.tcx, &self.used_unsafe, block.id) { + db.span_note(self.tcx.hir.span(id), + &format!("because it's nested under this `unsafe` {}", kind)); + } + db.emit(); } fn visit_expr(&mut self, expr: &'tcx hir::Expr) { @@ -265,6 +309,7 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { tables: &ty::TypeckTables::empty(None), body_id: hir::BodyId { node_id: ast::CRATE_NODE_ID }, unsafe_context: UnsafeContext::new(SafeContext), + used_unsafe: FxHashSet(), }; tcx.hir.krate().visit_all_item_likes(&mut visitor.as_deep_visitor()); diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index 324f9a6e9061f..374b02125a1d0 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -23,7 +23,7 @@ use hir::def::Def; use hir::def_id::{DefId}; use infer::InferCtxt; use middle::mem_categorization as mc; -use middle::region::RegionMaps; +use middle::region; use ty::{self, TyCtxt, adjustment}; use hir::{self, PatKind}; @@ -265,12 +265,12 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx, 'tcx> { pub fn new(delegate: &'a mut (Delegate<'tcx>+'a), tcx: TyCtxt<'a, 'tcx, 'tcx>, param_env: ty::ParamEnv<'tcx>, - region_maps: &'a RegionMaps, + region_scope_tree: &'a region::ScopeTree, tables: &'a ty::TypeckTables<'tcx>) -> Self { ExprUseVisitor { - mc: mc::MemCategorizationContext::new(tcx, region_maps, tables), + mc: mc::MemCategorizationContext::new(tcx, region_scope_tree, tables), delegate, param_env, } @@ -281,12 +281,12 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { pub fn with_infer(delegate: &'a mut (Delegate<'tcx>+'a), infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, param_env: ty::ParamEnv<'tcx>, - region_maps: &'a RegionMaps, + region_scope_tree: &'a region::ScopeTree, tables: &'a ty::TypeckTables<'tcx>) -> Self { ExprUseVisitor { - mc: mc::MemCategorizationContext::with_infer(infcx, region_maps, tables), + mc: mc::MemCategorizationContext::with_infer(infcx, region_scope_tree, tables), delegate, param_env, } @@ -298,7 +298,8 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { for arg in &body.arguments { let arg_ty = return_if_err!(self.mc.node_ty(arg.pat.hir_id)); - let fn_body_scope_r = self.tcx().node_scope_region(body.value.id); + let fn_body_scope_r = + self.tcx().mk_region(ty::ReScope(region::Scope::Node(body.value.hir_id.local_id))); let arg_cmt = self.mc.cat_rvalue( arg.id, arg.pat.span, @@ -517,13 +518,17 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { self.consume_expr(&base); } - hir::ExprClosure(.., fn_decl_span) => { + hir::ExprClosure(.., fn_decl_span, _) => { self.walk_captures(expr, fn_decl_span) } hir::ExprBox(ref base) => { self.consume_expr(&base); } + + hir::ExprYield(ref value) => { + self.consume_expr(&value); + } } } @@ -538,16 +543,17 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { ty::TyError => { } _ => { let def_id = self.mc.tables.type_dependent_defs()[call.hir_id].def_id(); + let call_scope = region::Scope::Node(call.hir_id.local_id); match OverloadedCallType::from_method_id(self.tcx(), def_id) { FnMutOverloadedCall => { - let call_scope_r = self.tcx().node_scope_region(call.id); + let call_scope_r = self.tcx().mk_region(ty::ReScope(call_scope)); self.borrow_expr(callee, call_scope_r, ty::MutBorrow, ClosureInvocation); } FnOverloadedCall => { - let call_scope_r = self.tcx().node_scope_region(call.id); + let call_scope_r = self.tcx().mk_region(ty::ReScope(call_scope)); self.borrow_expr(callee, call_scope_r, ty::ImmBorrow, @@ -745,7 +751,8 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { // Converting from a &T to *T (or &mut T to *mut T) is // treated as borrowing it for the enclosing temporary // scope. - let r = self.tcx().node_scope_region(expr.id); + let r = self.tcx().mk_region(ty::ReScope( + region::Scope::Node(expr.hir_id.local_id))); self.delegate.borrow(expr.id, expr.span, diff --git a/src/librustc/middle/free_region.rs b/src/librustc/middle/free_region.rs index de738fba30e92..d4cee25bb8fa2 100644 --- a/src/librustc/middle/free_region.rs +++ b/src/librustc/middle/free_region.rs @@ -16,11 +16,11 @@ //! region outlives another and so forth. use hir::def_id::DefId; -use middle::region::RegionMaps; +use middle::region; use ty::{self, Lift, TyCtxt, Region}; use rustc_data_structures::transitive_relation::TransitiveRelation; -/// Combines a `RegionMaps` (which governs relationships between +/// Combines a `region::ScopeTree` (which governs relationships between /// scopes) and a `FreeRegionMap` (which governs relationships between /// free regions) to yield a complete relation between concrete /// regions. @@ -34,7 +34,7 @@ pub struct RegionRelations<'a, 'gcx: 'tcx, 'tcx: 'a> { pub context: DefId, /// region maps for the given context - pub region_maps: &'a RegionMaps, + pub region_scope_tree: &'a region::ScopeTree, /// free-region relationships pub free_regions: &'a FreeRegionMap<'tcx>, @@ -44,13 +44,13 @@ impl<'a, 'gcx, 'tcx> RegionRelations<'a, 'gcx, 'tcx> { pub fn new( tcx: TyCtxt<'a, 'gcx, 'tcx>, context: DefId, - region_maps: &'a RegionMaps, + region_scope_tree: &'a region::ScopeTree, free_regions: &'a FreeRegionMap<'tcx>, ) -> Self { Self { tcx, context, - region_maps, + region_scope_tree, free_regions, } } @@ -68,16 +68,16 @@ impl<'a, 'gcx, 'tcx> RegionRelations<'a, 'gcx, 'tcx> { true, (&ty::ReScope(sub_scope), &ty::ReScope(super_scope)) => - self.region_maps.is_subscope_of(sub_scope, super_scope), + self.region_scope_tree.is_subscope_of(sub_scope, super_scope), (&ty::ReScope(sub_scope), &ty::ReEarlyBound(ref br)) => { - let fr_scope = self.region_maps.early_free_extent(self.tcx, br); - self.region_maps.is_subscope_of(sub_scope, fr_scope) + let fr_scope = self.region_scope_tree.early_free_scope(self.tcx, br); + self.region_scope_tree.is_subscope_of(sub_scope, fr_scope) } (&ty::ReScope(sub_scope), &ty::ReFree(ref fr)) => { - let fr_scope = self.region_maps.free_extent(self.tcx, fr); - self.region_maps.is_subscope_of(sub_scope, fr_scope) + let fr_scope = self.region_scope_tree.free_scope(self.tcx, fr); + self.region_scope_tree.is_subscope_of(sub_scope, fr_scope) } (&ty::ReEarlyBound(_), &ty::ReEarlyBound(_)) | diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index 9ba4252b52e57..ae3e3a30f371e 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -316,6 +316,9 @@ language_item_table! { FnMutTraitLangItem, "fn_mut", fn_mut_trait; FnOnceTraitLangItem, "fn_once", fn_once_trait; + GeneratorStateLangItem, "generator_state", gen_state; + GeneratorTraitLangItem, "generator", gen_trait; + EqTraitLangItem, "eq", eq_trait; OrdTraitLangItem, "ord", ord_trait; diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 8e5f748c78e29..6910a21ca5526 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -460,7 +460,7 @@ fn visit_expr<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, expr: &'tcx Expr) { hir::ExprAgain(_) | hir::ExprLit(_) | hir::ExprRet(..) | hir::ExprBlock(..) | hir::ExprAssign(..) | hir::ExprAssignOp(..) | hir::ExprStruct(..) | hir::ExprRepeat(..) | - hir::ExprInlineAsm(..) | hir::ExprBox(..) | + hir::ExprInlineAsm(..) | hir::ExprBox(..) | hir::ExprYield(..) | hir::ExprType(..) | hir::ExprPath(hir::QPath::TypeRelative(..)) => { intravisit::walk_expr(ir, expr); } @@ -881,7 +881,6 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { match expr.node { // Interesting cases with control flow or which gen/kill - hir::ExprPath(hir::QPath::Resolved(_, ref path)) => { self.access_path(expr.id, path, succ, ACC_READ | ACC_USE) } @@ -894,7 +893,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.propagate_through_expr(&e, succ) } - hir::ExprClosure(.., blk_id, _) => { + hir::ExprClosure(.., blk_id, _, _) => { debug!("{} is an ExprClosure", self.ir.tcx.hir.node_to_pretty_string(expr.id)); /* @@ -1116,6 +1115,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { hir::ExprCast(ref e, _) | hir::ExprType(ref e, _) | hir::ExprUnary(_, ref e) | + hir::ExprYield(ref e) | hir::ExprRepeat(ref e, _) => { self.propagate_through_expr(&e, succ) } @@ -1224,18 +1224,23 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } } + fn access_var(&mut self, id: NodeId, nid: NodeId, succ: LiveNode, acc: u32, span: Span) + -> LiveNode { + let ln = self.live_node(id, span); + if acc != 0 { + self.init_from_succ(ln, succ); + let var = self.variable(nid, span); + self.acc(ln, var, acc); + } + ln + } + fn access_path(&mut self, id: NodeId, path: &hir::Path, succ: LiveNode, acc: u32) -> LiveNode { match path.def { Def::Local(def_id) => { let nid = self.ir.tcx.hir.as_local_node_id(def_id).unwrap(); - let ln = self.live_node(id, path.span); - if acc != 0 { - self.init_from_succ(ln, succ); - let var = self.variable(nid, path.span); - self.acc(ln, var, acc); - } - ln + self.access_var(id, nid, succ, acc, path.span) } _ => succ } @@ -1398,7 +1403,7 @@ fn check_expr<'a, 'tcx>(this: &mut Liveness<'a, 'tcx>, expr: &'tcx Expr) { hir::ExprBreak(..) | hir::ExprAgain(..) | hir::ExprLit(_) | hir::ExprBlock(..) | hir::ExprAddrOf(..) | hir::ExprStruct(..) | hir::ExprRepeat(..) | - hir::ExprClosure(..) | hir::ExprPath(_) | + hir::ExprClosure(..) | hir::ExprPath(_) | hir::ExprYield(..) | hir::ExprBox(..) | hir::ExprType(..) => { intravisit::walk_expr(this, expr); } diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index 8cd023b8e638b..0b0fbad9fc39c 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -69,7 +69,7 @@ pub use self::Note::*; use self::Aliasability::*; -use middle::region::RegionMaps; +use middle::region; use hir::def_id::{DefId, DefIndex}; use hir::map as hir_map; use infer::InferCtxt; @@ -283,7 +283,7 @@ impl ast_node for hir::Pat { #[derive(Clone)] pub struct MemCategorizationContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { pub tcx: TyCtxt<'a, 'gcx, 'tcx>, - pub region_maps: &'a RegionMaps, + pub region_scope_tree: &'a region::ScopeTree, pub tables: &'a ty::TypeckTables<'tcx>, infcx: Option<&'a InferCtxt<'a, 'gcx, 'tcx>>, } @@ -391,21 +391,21 @@ impl MutabilityCategory { impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx, 'tcx> { pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, - region_maps: &'a RegionMaps, + region_scope_tree: &'a region::ScopeTree, tables: &'a ty::TypeckTables<'tcx>) -> MemCategorizationContext<'a, 'tcx, 'tcx> { - MemCategorizationContext { tcx, region_maps, tables, infcx: None } + MemCategorizationContext { tcx, region_scope_tree, tables, infcx: None } } } impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { pub fn with_infer(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, - region_maps: &'a RegionMaps, + region_scope_tree: &'a region::ScopeTree, tables: &'a ty::TypeckTables<'tcx>) -> MemCategorizationContext<'a, 'gcx, 'tcx> { MemCategorizationContext { tcx: infcx.tcx, - region_maps, + region_scope_tree, tables, infcx: Some(infcx), } @@ -625,7 +625,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { hir::ExprAddrOf(..) | hir::ExprCall(..) | hir::ExprAssign(..) | hir::ExprAssignOp(..) | hir::ExprClosure(..) | hir::ExprRet(..) | - hir::ExprUnary(..) | + hir::ExprUnary(..) | hir::ExprYield(..) | hir::ExprMethodCall(..) | hir::ExprCast(..) | hir::ExprArray(..) | hir::ExprTup(..) | hir::ExprIf(..) | hir::ExprBinary(..) | hir::ExprWhile(..) | @@ -725,9 +725,14 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { // FnMut | copied -> &'env mut | upvar -> &'env mut -> &'up bk // FnOnce | copied | upvar -> &'up bk - let kind = match self.tables.closure_kinds().get(fn_hir_id) { - Some(&(kind, _)) => kind, - None => span_bug!(span, "missing closure kind") + let kind = match self.node_ty(fn_hir_id)?.sty { + ty::TyGenerator(..) => ty::ClosureKind::FnOnce, + _ => { + match self.tables.closure_kinds().get(fn_hir_id) { + Some(&(kind, _)) => kind, + None => span_bug!(span, "missing closure kind"), + } + } }; let closure_expr_def_index = self.tcx.hir.local_def_id(fn_node_id).index; @@ -856,9 +861,8 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { /// Returns the lifetime of a temporary created by expr with id `id`. /// This could be `'static` if `id` is part of a constant expression. - pub fn temporary_scope(&self, id: ast::NodeId) -> ty::Region<'tcx> - { - let scope = self.region_maps.temporary_scope(id); + pub fn temporary_scope(&self, id: hir::ItemLocalId) -> ty::Region<'tcx> { + let scope = self.region_scope_tree.temporary_scope(id); self.tcx.mk_region(match scope { Some(scope) => ty::ReScope(scope), None => ty::ReStatic @@ -885,7 +889,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { let re = if promotable { self.tcx.types.re_static } else { - self.temporary_scope(id) + self.temporary_scope(self.tcx.hir.node_to_hir_id(id).local_id) }; let ret = self.cat_rvalue(id, span, re, expr_ty); debug!("cat_rvalue_node ret {:?}", ret); diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs index 666f71cca06bb..3efc696f2a50a 100644 --- a/src/librustc/middle/reachable.rs +++ b/src/librustc/middle/reachable.rs @@ -296,7 +296,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { hir::ImplItemKind::Type(_) => {} } } - hir_map::NodeExpr(&hir::Expr { node: hir::ExprClosure(.., body, _), .. }) => { + hir_map::NodeExpr(&hir::Expr { node: hir::ExprClosure(.., body, _, _), .. }) => { self.visit_nested_body(body); } // Nothing to recurse on for these diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index 45a3080ed91ff..ae9866edc53b2 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -8,23 +8,21 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -//! This file actually contains two passes related to regions. The first -//! pass builds up the `scope_map`, which describes the parent links in -//! the region hierarchy. The second pass infers which types must be -//! region parameterized. +//! This file builds up the `ScopeTree`, which describes +//! the parent links in the region hierarchy. //! //! Most of the documentation on regions can be found in //! `middle/infer/region_inference/README.md` -use hir::map as hir_map; -use util::nodemap::{FxHashMap, NodeMap, NodeSet}; +use util::nodemap::{FxHashMap, FxHashSet}; use ty; +use std::collections::hash_map::Entry; use std::mem; use std::rc::Rc; use syntax::codemap; use syntax::ast; -use syntax_pos::Span; +use syntax_pos::{Span, DUMMY_SP}; use ty::TyCtxt; use ty::maps::Providers; @@ -34,24 +32,24 @@ use hir::intravisit::{self, Visitor, NestedVisitorMap}; use hir::{Block, Arm, Pat, PatKind, Stmt, Expr, Local}; use mir::transform::MirSource; -/// CodeExtent represents a statically-describable extent that can be +/// Scope represents a statically-describable scope that can be /// used to bound the lifetime/region for values. /// -/// `Misc(node_id)`: Any AST node that has any extent at all has the -/// `Misc(node_id)` extent. Other variants represent special cases not +/// `Node(node_id)`: Any AST node that has any scope at all has the +/// `Node(node_id)` scope. Other variants represent special cases not /// immediately derivable from the abstract syntax tree structure. /// -/// `DestructionScope(node_id)` represents the extent of destructors +/// `DestructionScope(node_id)` represents the scope of destructors /// implicitly-attached to `node_id` that run immediately after the /// expression for `node_id` itself. Not every AST node carries a /// `DestructionScope`, but those that are `terminating_scopes` do; -/// see discussion with `RegionMaps`. +/// see discussion with `ScopeTree`. /// /// `Remainder(BlockRemainder { block, statement_index })` represents -/// the extent of user code running immediately after the initializer +/// the scope of user code running immediately after the initializer /// expression for the indexed statement, until the end of the block. /// -/// So: the following code can be broken down into the extents beneath: +/// So: the following code can be broken down into the scopes beneath: /// ``` /// let a = f().g( 'b: { let x = d(); let y = d(); x.h(y) } ) ; /// ``` @@ -69,21 +67,21 @@ use mir::transform::MirSource; /// +--+ (M2.) /// +-----------------------------------------------------------+ (M1.) /// -/// (M1.): Misc extent of the whole `let a = ...;` statement. -/// (M2.): Misc extent of the `f()` expression. -/// (M3.): Misc extent of the `f().g(..)` expression. -/// (M4.): Misc extent of the block labeled `'b:`. -/// (M5.): Misc extent of the `let x = d();` statement +/// (M1.): Node scope of the whole `let a = ...;` statement. +/// (M2.): Node scope of the `f()` expression. +/// (M3.): Node scope of the `f().g(..)` expression. +/// (M4.): Node scope of the block labeled `'b:`. +/// (M5.): Node scope of the `let x = d();` statement /// (D6.): DestructionScope for temporaries created during M5. -/// (R7.): Remainder extent for block `'b:`, stmt 0 (let x = ...). -/// (M8.): Misc Extent of the `let y = d();` statement. +/// (R7.): Remainder scope for block `'b:`, stmt 0 (let x = ...). +/// (M8.): Node scope of the `let y = d();` statement. /// (D9.): DestructionScope for temporaries created during M8. -/// (R10.): Remainder extent for block `'b:`, stmt 1 (let y = ...). +/// (R10.): Remainder scope for block `'b:`, stmt 1 (let y = ...). /// (D11.): DestructionScope for temporaries and bindings from block `'b:`. /// (D12.): DestructionScope for temporaries created during M1 (e.g. f()). /// /// Note that while the above picture shows the destruction scopes -/// as following their corresponding misc extents, in the internal +/// as following their corresponding node scopes, in the internal /// data structures of the compiler the destruction scopes are /// represented as enclosing parents. This is sound because we use the /// enclosing parent relationship just to ensure that referenced @@ -96,21 +94,21 @@ use mir::transform::MirSource; /// actually attach a more meaningful ordering to scopes than the one /// generated via deriving here. #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Debug, Copy, RustcEncodable, RustcDecodable)] -pub enum CodeExtent { - Misc(ast::NodeId), +pub enum Scope { + Node(hir::ItemLocalId), - // extent of the call-site for a function or closure (outlives - // the parameters as well as the body). - CallSiteScope(hir::BodyId), + // Scope of the call-site for a function or closure + // (outlives the arguments as well as the body). + CallSite(hir::ItemLocalId), - // extent of parameters passed to a function or closure (they - // outlive its body) - ParameterScope(hir::BodyId), + // Scope of arguments passed to a function or closure + // (they outlive its body). + Arguments(hir::ItemLocalId), - // extent of destructors for temporaries of node-id - DestructionScope(ast::NodeId), + // Scope of destructors for temporaries of node-id. + Destruction(hir::ItemLocalId), - // extent of code following a `let id = expr;` binding in a block + // Scope following a `let id = expr;` binding in a block. Remainder(BlockRemainder) } @@ -125,95 +123,106 @@ pub enum CodeExtent { /// * the subscope with `first_statement_index == 0` is scope of both /// `a` and `b`; it does not include EXPR_1, but does include /// everything after that first `let`. (If you want a scope that -/// includes EXPR_1 as well, then do not use `CodeExtent::Remainder`, -/// but instead another `CodeExtent` that encompasses the whole block, -/// e.g. `CodeExtent::Misc`. +/// includes EXPR_1 as well, then do not use `Scope::Remainder`, +/// but instead another `Scope` that encompasses the whole block, +/// e.g. `Scope::Node`. /// /// * the subscope with `first_statement_index == 1` is scope of `c`, /// and thus does not include EXPR_2, but covers the `...`. #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable, RustcDecodable, Debug, Copy)] pub struct BlockRemainder { - pub block: ast::NodeId, + pub block: hir::ItemLocalId, pub first_statement_index: u32, } -impl CodeExtent { - /// Returns a node id associated with this scope. +impl Scope { + /// Returns a item-local id associated with this scope. /// /// NB: likely to be replaced as API is refined; e.g. pnkfelix /// anticipates `fn entry_node_id` and `fn each_exit_node_id`. - pub fn node_id(&self) -> ast::NodeId { + pub fn item_local_id(&self) -> hir::ItemLocalId { match *self { - CodeExtent::Misc(node_id) => node_id, + Scope::Node(id) => id, // These cases all return rough approximations to the - // precise extent denoted by `self`. - CodeExtent::Remainder(br) => br.block, - CodeExtent::DestructionScope(node_id) => node_id, - CodeExtent::CallSiteScope(body_id) | - CodeExtent::ParameterScope(body_id) => body_id.node_id, + // precise scope denoted by `self`. + Scope::Remainder(br) => br.block, + Scope::Destruction(id) | + Scope::CallSite(id) | + Scope::Arguments(id) => id, } } - /// Returns the span of this CodeExtent. Note that in general the + pub fn node_id(&self, tcx: TyCtxt, scope_tree: &ScopeTree) -> ast::NodeId { + match scope_tree.root_body { + Some(hir_id) => { + tcx.hir.hir_to_node_id(hir::HirId { + owner: hir_id.owner, + local_id: self.item_local_id() + }) + } + None => ast::DUMMY_NODE_ID + } + } + + /// Returns the span of this Scope. Note that in general the /// returned span may not correspond to the span of any node id in /// the AST. - pub fn span(&self, hir_map: &hir_map::Map) -> Option { - match hir_map.find(self.node_id()) { - Some(hir_map::NodeBlock(ref blk)) => { - match *self { - CodeExtent::CallSiteScope(_) | - CodeExtent::ParameterScope(_) | - CodeExtent::Misc(_) | - CodeExtent::DestructionScope(_) => Some(blk.span), - - CodeExtent::Remainder(r) => { - assert_eq!(r.block, blk.id); - // Want span for extent starting after the - // indexed statement and ending at end of - // `blk`; reuse span of `blk` and shift `lo` - // forward to end of indexed statement. - // - // (This is the special case aluded to in the - // doc-comment for this method) - let stmt_span = blk.stmts[r.first_statement_index as usize].span; - Some(Span { lo: stmt_span.hi, hi: blk.span.hi, ctxt: stmt_span.ctxt }) - } + pub fn span(&self, tcx: TyCtxt, scope_tree: &ScopeTree) -> Span { + let node_id = self.node_id(tcx, scope_tree); + if node_id == ast::DUMMY_NODE_ID { + return DUMMY_SP; + } + let span = tcx.hir.span(node_id); + if let Scope::Remainder(r) = *self { + if let hir::map::NodeBlock(ref blk) = tcx.hir.get(node_id) { + // Want span for scope starting after the + // indexed statement and ending at end of + // `blk`; reuse span of `blk` and shift `lo` + // forward to end of indexed statement. + // + // (This is the special case aluded to in the + // doc-comment for this method) + + let stmt_span = blk.stmts[r.first_statement_index as usize].span; + + // To avoid issues with macro-generated spans, the span + // of the statement must be nested in that of the block. + if span.lo() <= stmt_span.lo() && stmt_span.lo() <= span.hi() { + return Span::new(stmt_span.lo(), span.hi(), span.ctxt()); } } - Some(hir_map::NodeExpr(ref expr)) => Some(expr.span), - Some(hir_map::NodeStmt(ref stmt)) => Some(stmt.span), - Some(hir_map::NodeItem(ref item)) => Some(item.span), - Some(_) | None => None, } + span } } -/// The region maps encode information about region relationships. -pub struct RegionMaps { +/// The region scope tree encodes information about region relationships. +#[derive(Default)] +pub struct ScopeTree { /// If not empty, this body is the root of this region hierarchy. - root_body: Option, + root_body: Option, /// The parent of the root body owner, if the latter is an /// an associated const or method, as impls/traits can also /// have lifetime parameters free in this body. root_parent: Option, - /// `scope_map` maps from a scope id to the enclosing scope id; + /// `parent_map` maps from a scope id to the enclosing scope id; /// this is usually corresponding to the lexical nesting, though /// in the case of closures the parent scope is the innermost /// conditional expression or repeating block. (Note that the /// enclosing scope id for the block associated with a closure is /// the closure itself.) - scope_map: FxHashMap, + parent_map: FxHashMap, /// `var_map` maps from a variable or binding id to the block in /// which that variable is declared. - var_map: NodeMap, + var_map: FxHashMap, /// maps from a node-id to the associated destruction scope (if any) - destruction_scopes: NodeMap, + destruction_scopes: FxHashMap, /// `rvalue_scopes` includes entries for those expressions whose cleanup scope is /// larger than the default. The map goes from the expression id @@ -221,7 +230,9 @@ pub struct RegionMaps { /// table, the appropriate cleanup scope is the innermost /// enclosing statement, conditional expression, or repeating /// block (see `terminating_scopes`). - rvalue_scopes: NodeMap, + /// In constants, None is used to indicate that certain expressions + /// escape into 'static and should have no local cleanup scope. + rvalue_scopes: FxHashMap>, /// Encodes the hierarchy of fn bodies. Every fn body (including /// closures) forms its own distinct region hierarchy, rooted in @@ -233,7 +244,11 @@ pub struct RegionMaps { /// closure defined by that fn. See the "Modeling closures" /// section of the README in infer::region_inference for /// more details. - fn_tree: NodeMap, + closure_tree: FxHashMap, + + /// If there are any `yield` nested within a scope, this map + /// stores the `Span` of the first one. + yield_in_scope: FxHashMap, } #[derive(Debug, Copy, Clone)] @@ -244,20 +259,20 @@ pub struct Context { /// arranged into a tree. See the "Modeling closures" section of /// the README in infer::region_inference for more /// details. - root_id: Option, + root_id: Option, /// the scope that contains any new variables declared - var_parent: Option, + var_parent: Option, /// region parent of expressions etc - parent: Option, + parent: Option, } struct RegionResolutionVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, - // Generated maps: - region_maps: RegionMaps, + // Generated scope tree: + scope_tree: ScopeTree, cx: Context, @@ -281,124 +296,117 @@ struct RegionResolutionVisitor<'a, 'tcx: 'a> { /// arbitrary amounts of stack space. Terminating scopes end /// up being contained in a DestructionScope that contains the /// destructor's execution. - terminating_scopes: NodeSet, + terminating_scopes: FxHashSet, } -impl<'tcx> RegionMaps { - pub fn new() -> Self { - RegionMaps { - root_body: None, - root_parent: None, - scope_map: FxHashMap(), - destruction_scopes: FxHashMap(), - var_map: NodeMap(), - rvalue_scopes: NodeMap(), - fn_tree: NodeMap(), - } - } - - pub fn record_code_extent(&mut self, - child: CodeExtent, - parent: Option) { +impl<'tcx> ScopeTree { + pub fn record_scope_parent(&mut self, child: Scope, parent: Option) { debug!("{:?}.parent = {:?}", child, parent); if let Some(p) = parent { - let prev = self.scope_map.insert(child, p); + let prev = self.parent_map.insert(child, p); assert!(prev.is_none()); } // record the destruction scopes for later so we can query them - if let CodeExtent::DestructionScope(n) = child { + if let Scope::Destruction(n) = child { self.destruction_scopes.insert(n, child); } } - pub fn each_encl_scope(&self, mut e:E) where E: FnMut(CodeExtent, CodeExtent) { - for (&child, &parent) in &self.scope_map { + pub fn each_encl_scope(&self, mut e:E) where E: FnMut(Scope, Scope) { + for (&child, &parent) in &self.parent_map { e(child, parent) } } - pub fn each_var_scope(&self, mut e:E) where E: FnMut(&ast::NodeId, CodeExtent) { + pub fn each_var_scope(&self, mut e:E) where E: FnMut(&hir::ItemLocalId, Scope) { for (child, &parent) in self.var_map.iter() { e(child, parent) } } - pub fn opt_destruction_extent(&self, n: ast::NodeId) -> Option { + pub fn opt_destruction_scope(&self, n: hir::ItemLocalId) -> Option { self.destruction_scopes.get(&n).cloned() } - /// Records that `sub_fn` is defined within `sup_fn`. These ids + /// Records that `sub_closure` is defined within `sup_closure`. These ids /// should be the id of the block that is the fn body, which is /// also the root of the region hierarchy for that fn. - fn record_fn_parent(&mut self, sub_fn: ast::NodeId, sup_fn: ast::NodeId) { - debug!("record_fn_parent(sub_fn={:?}, sup_fn={:?})", sub_fn, sup_fn); - assert!(sub_fn != sup_fn); - let previous = self.fn_tree.insert(sub_fn, sup_fn); + fn record_closure_parent(&mut self, + sub_closure: hir::ItemLocalId, + sup_closure: hir::ItemLocalId) { + debug!("record_closure_parent(sub_closure={:?}, sup_closure={:?})", + sub_closure, sup_closure); + assert!(sub_closure != sup_closure); + let previous = self.closure_tree.insert(sub_closure, sup_closure); assert!(previous.is_none()); } - fn fn_is_enclosed_by(&self, mut sub_fn: ast::NodeId, sup_fn: ast::NodeId) -> bool { + fn closure_is_enclosed_by(&self, + mut sub_closure: hir::ItemLocalId, + sup_closure: hir::ItemLocalId) -> bool { loop { - if sub_fn == sup_fn { return true; } - match self.fn_tree.get(&sub_fn) { - Some(&s) => { sub_fn = s; } + if sub_closure == sup_closure { return true; } + match self.closure_tree.get(&sub_closure) { + Some(&s) => { sub_closure = s; } None => { return false; } } } } - fn record_var_scope(&mut self, var: ast::NodeId, lifetime: CodeExtent) { + fn record_var_scope(&mut self, var: hir::ItemLocalId, lifetime: Scope) { debug!("record_var_scope(sub={:?}, sup={:?})", var, lifetime); - assert!(var != lifetime.node_id()); + assert!(var != lifetime.item_local_id()); self.var_map.insert(var, lifetime); } - fn record_rvalue_scope(&mut self, var: ast::NodeId, lifetime: CodeExtent) { + fn record_rvalue_scope(&mut self, var: hir::ItemLocalId, lifetime: Option) { debug!("record_rvalue_scope(sub={:?}, sup={:?})", var, lifetime); - assert!(var != lifetime.node_id()); + if let Some(lifetime) = lifetime { + assert!(var != lifetime.item_local_id()); + } self.rvalue_scopes.insert(var, lifetime); } - pub fn opt_encl_scope(&self, id: CodeExtent) -> Option { + pub fn opt_encl_scope(&self, id: Scope) -> Option { //! Returns the narrowest scope that encloses `id`, if any. - self.scope_map.get(&id).cloned() + self.parent_map.get(&id).cloned() } #[allow(dead_code)] // used in cfg - pub fn encl_scope(&self, id: CodeExtent) -> CodeExtent { + pub fn encl_scope(&self, id: Scope) -> Scope { //! Returns the narrowest scope that encloses `id`, if any. self.opt_encl_scope(id).unwrap() } /// Returns the lifetime of the local variable `var_id` - pub fn var_scope(&self, var_id: ast::NodeId) -> CodeExtent { + pub fn var_scope(&self, var_id: hir::ItemLocalId) -> Scope { match self.var_map.get(&var_id) { Some(&r) => r, None => { bug!("no enclosing scope for id {:?}", var_id); } } } - pub fn temporary_scope(&self, expr_id: ast::NodeId) -> Option { + pub fn temporary_scope(&self, expr_id: hir::ItemLocalId) -> Option { //! Returns the scope when temp created by expr_id will be cleaned up // check for a designated rvalue scope if let Some(&s) = self.rvalue_scopes.get(&expr_id) { debug!("temporary_scope({:?}) = {:?} [custom]", expr_id, s); - return Some(s); + return s; } // else, locate the innermost terminating scope // if there's one. Static items, for instance, won't // have an enclosing scope, hence no scope will be // returned. - let mut id = CodeExtent::Misc(expr_id); + let mut id = Scope::Node(expr_id); - while let Some(&p) = self.scope_map.get(&id) { + while let Some(&p) = self.parent_map.get(&id) { match p { - CodeExtent::DestructionScope(..) => { + Scope::Destruction(..) => { debug!("temporary_scope({:?}) = {:?} [enclosing]", expr_id, id); return Some(id); @@ -411,7 +419,7 @@ impl<'tcx> RegionMaps { return None; } - pub fn var_region(&self, id: ast::NodeId) -> ty::RegionKind { + pub fn var_region(&self, id: hir::ItemLocalId) -> ty::RegionKind { //! Returns the lifetime of the variable `id`. let scope = ty::ReScope(self.var_scope(id)); @@ -419,7 +427,7 @@ impl<'tcx> RegionMaps { scope } - pub fn scopes_intersect(&self, scope1: CodeExtent, scope2: CodeExtent) + pub fn scopes_intersect(&self, scope1: Scope, scope2: Scope) -> bool { self.is_subscope_of(scope1, scope2) || self.is_subscope_of(scope2, scope1) @@ -428,8 +436,8 @@ impl<'tcx> RegionMaps { /// Returns true if `subscope` is equal to or is lexically nested inside `superscope` and false /// otherwise. pub fn is_subscope_of(&self, - subscope: CodeExtent, - superscope: CodeExtent) + subscope: Scope, + superscope: Scope) -> bool { let mut s = subscope; debug!("is_subscope_of({:?}, {:?})", subscope, superscope); @@ -453,22 +461,22 @@ impl<'tcx> RegionMaps { /// Finds the nearest common ancestor (if any) of two scopes. That is, finds the smallest /// scope which is greater than or equal to both `scope_a` and `scope_b`. pub fn nearest_common_ancestor(&self, - scope_a: CodeExtent, - scope_b: CodeExtent) - -> CodeExtent { + scope_a: Scope, + scope_b: Scope) + -> Scope { if scope_a == scope_b { return scope_a; } // [1] The initial values for `a_buf` and `b_buf` are not used. // The `ancestors_of` function will return some prefix that // is re-initialized with new values (or else fallback to a // heap-allocated vector). - let mut a_buf: [CodeExtent; 32] = [scope_a /* [1] */; 32]; - let mut a_vec: Vec = vec![]; - let mut b_buf: [CodeExtent; 32] = [scope_b /* [1] */; 32]; - let mut b_vec: Vec = vec![]; - let scope_map = &self.scope_map; - let a_ancestors = ancestors_of(scope_map, scope_a, &mut a_buf, &mut a_vec); - let b_ancestors = ancestors_of(scope_map, scope_b, &mut b_buf, &mut b_vec); + let mut a_buf: [Scope; 32] = [scope_a /* [1] */; 32]; + let mut a_vec: Vec = vec![]; + let mut b_buf: [Scope; 32] = [scope_b /* [1] */; 32]; + let mut b_vec: Vec = vec![]; + let parent_map = &self.parent_map; + let a_ancestors = ancestors_of(parent_map, scope_a, &mut a_buf, &mut a_vec); + let b_ancestors = ancestors_of(parent_map, scope_b, &mut b_buf, &mut b_vec); let mut a_index = a_ancestors.len() - 1; let mut b_index = b_ancestors.len() - 1; @@ -489,12 +497,12 @@ impl<'tcx> RegionMaps { let a_root_scope = a_ancestors[a_index]; let b_root_scope = a_ancestors[a_index]; return match (a_root_scope, b_root_scope) { - (CodeExtent::DestructionScope(a_root_id), - CodeExtent::DestructionScope(b_root_id)) => { - if self.fn_is_enclosed_by(a_root_id, b_root_id) { + (Scope::Destruction(a_root_id), + Scope::Destruction(b_root_id)) => { + if self.closure_is_enclosed_by(a_root_id, b_root_id) { // `a` is enclosed by `b`, hence `b` is the ancestor of everything in `a` scope_b - } else if self.fn_is_enclosed_by(b_root_id, a_root_id) { + } else if self.closure_is_enclosed_by(b_root_id, a_root_id) { // `b` is enclosed by `a`, hence `a` is the ancestor of everything in `b` scope_a } else { @@ -503,7 +511,7 @@ impl<'tcx> RegionMaps { } } _ => { - // root ids are always Misc right now + // root ids are always Node right now bug!() } }; @@ -521,18 +529,18 @@ impl<'tcx> RegionMaps { } } - fn ancestors_of<'a, 'tcx>(scope_map: &FxHashMap, - scope: CodeExtent, - buf: &'a mut [CodeExtent; 32], - vec: &'a mut Vec) - -> &'a [CodeExtent] { + fn ancestors_of<'a, 'tcx>(parent_map: &FxHashMap, + scope: Scope, + buf: &'a mut [Scope; 32], + vec: &'a mut Vec) + -> &'a [Scope] { // debug!("ancestors_of(scope={:?})", scope); let mut scope = scope; let mut i = 0; while i < 32 { buf[i] = scope; - match scope_map.get(&scope) { + match parent_map.get(&scope) { Some(&superscope) => scope = superscope, _ => return &buf[..i+1] } @@ -543,7 +551,7 @@ impl<'tcx> RegionMaps { vec.extend_from_slice(buf); loop { vec.push(scope); - match scope_map.get(&scope) { + match parent_map.get(&scope) { Some(&superscope) => scope = superscope, _ => return &*vec } @@ -551,34 +559,38 @@ impl<'tcx> RegionMaps { } } - /// Assuming that the provided region was defined within this `RegionMaps`, - /// returns the outermost `CodeExtent` that the region outlives. - pub fn early_free_extent<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, + /// Assuming that the provided region was defined within this `ScopeTree`, + /// returns the outermost `Scope` that the region outlives. + pub fn early_free_scope<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, br: &ty::EarlyBoundRegion) - -> CodeExtent { + -> Scope { let param_owner = tcx.parent_def_id(br.def_id).unwrap(); let param_owner_id = tcx.hir.as_local_node_id(param_owner).unwrap(); - let body_id = tcx.hir.maybe_body_owned_by(param_owner_id).unwrap_or_else(|| { + let scope = tcx.hir.maybe_body_owned_by(param_owner_id).map(|body_id| { + tcx.hir.body(body_id).value.hir_id.local_id + }).unwrap_or_else(|| { // The lifetime was defined on node that doesn't own a body, // which in practice can only mean a trait or an impl, that // is the parent of a method, and that is enforced below. assert_eq!(Some(param_owner_id), self.root_parent, - "free_extent: {:?} not recognized by the region maps for {:?}", + "free_scope: {:?} not recognized by the \ + region scope tree for {:?} / {:?}", param_owner, - self.root_body.map(|body| tcx.hir.body_owner_def_id(body))); + self.root_parent.map(|id| tcx.hir.local_def_id(id)), + self.root_body.map(|hir_id| DefId::local(hir_id.owner))); // The trait/impl lifetime is in scope for the method's body. - self.root_body.unwrap() + self.root_body.unwrap().local_id }); - CodeExtent::CallSiteScope(body_id) + Scope::CallSite(scope) } - /// Assuming that the provided region was defined within this `RegionMaps`, - /// returns the outermost `CodeExtent` that the region outlives. - pub fn free_extent<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, fr: &ty::FreeRegion) - -> CodeExtent { + /// Assuming that the provided region was defined within this `ScopeTree`, + /// returns the outermost `Scope` that the region outlives. + pub fn free_scope<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, fr: &ty::FreeRegion) + -> Scope { let param_owner = match fr.bound_region { ty::BoundRegion::BrNamed(def_id, _) => { tcx.parent_def_id(def_id).unwrap() @@ -591,13 +603,20 @@ impl<'tcx> RegionMaps { assert_eq!(param_owner, fr.scope); let param_owner_id = tcx.hir.as_local_node_id(param_owner).unwrap(); - CodeExtent::CallSiteScope(tcx.hir.body_owned_by(param_owner_id)) + let body_id = tcx.hir.body_owned_by(param_owner_id); + Scope::CallSite(tcx.hir.body(body_id).value.hir_id.local_id) + } + + /// Checks whether the given scope contains a `yield`. If so, + /// returns `Some(span)` with the span of a yield we found. + pub fn yield_in_scope(&self, scope: Scope) -> Option { + self.yield_in_scope.get(&scope).cloned() } } /// Records the lifetime of a local variable as `cx.var_parent` fn record_var_lifetime(visitor: &mut RegionResolutionVisitor, - var_id: ast::NodeId, + var_id: hir::ItemLocalId, _sp: Span) { match visitor.cx.var_parent { None => { @@ -606,7 +625,7 @@ fn record_var_lifetime(visitor: &mut RegionResolutionVisitor, // extern fn isalnum(c: c_int) -> c_int } Some(parent_scope) => - visitor.region_maps.record_var_scope(var_id, parent_scope), + visitor.scope_tree.record_var_scope(var_id, parent_scope), } } @@ -640,7 +659,7 @@ fn resolve_block<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, blk: // `other_argument()` has run and also the call to `quux(..)` // itself has returned. - visitor.enter_node_extent_with_dtor(blk.id); + visitor.enter_node_scope_with_dtor(blk.hir_id.local_id); visitor.cx.var_parent = visitor.cx.parent; { @@ -657,9 +676,9 @@ fn resolve_block<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, blk: // has the previous subscope in the block as a parent, // except for the first such subscope, which has the // block itself as a parent. - visitor.enter_code_extent( - CodeExtent::Remainder(BlockRemainder { - block: blk.id, + visitor.enter_scope( + Scope::Remainder(BlockRemainder { + block: blk.hir_id.local_id, first_statement_index: i as u32 }) ); @@ -674,39 +693,39 @@ fn resolve_block<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, blk: } fn resolve_arm<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, arm: &'tcx hir::Arm) { - visitor.terminating_scopes.insert(arm.body.id); + visitor.terminating_scopes.insert(arm.body.hir_id.local_id); if let Some(ref expr) = arm.guard { - visitor.terminating_scopes.insert(expr.id); + visitor.terminating_scopes.insert(expr.hir_id.local_id); } intravisit::walk_arm(visitor, arm); } fn resolve_pat<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, pat: &'tcx hir::Pat) { - visitor.record_code_extent(CodeExtent::Misc(pat.id)); + visitor.record_child_scope(Scope::Node(pat.hir_id.local_id)); // If this is a binding then record the lifetime of that binding. if let PatKind::Binding(..) = pat.node { - record_var_lifetime(visitor, pat.id, pat.span); + record_var_lifetime(visitor, pat.hir_id.local_id, pat.span); } intravisit::walk_pat(visitor, pat); } fn resolve_stmt<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, stmt: &'tcx hir::Stmt) { - let stmt_id = stmt.node.id(); + let stmt_id = visitor.tcx.hir.node_to_hir_id(stmt.node.id()).local_id; debug!("resolve_stmt(stmt.id={:?})", stmt_id); // Every statement will clean up the temporaries created during // execution of that statement. Therefore each statement has an - // associated destruction scope that represents the extent of the - // statement plus its destructors, and thus the extent for which + // associated destruction scope that represents the scope of the + // statement plus its destructors, and thus the scope for which // regions referenced by the destructors need to survive. visitor.terminating_scopes.insert(stmt_id); let prev_parent = visitor.cx.parent; - visitor.enter_node_extent_with_dtor(stmt_id); + visitor.enter_node_scope_with_dtor(stmt_id); intravisit::walk_stmt(visitor, stmt); @@ -717,11 +736,11 @@ fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: debug!("resolve_expr(expr.id={:?})", expr.id); let prev_cx = visitor.cx; - visitor.enter_node_extent_with_dtor(expr.id); + visitor.enter_node_scope_with_dtor(expr.hir_id.local_id); { let terminating_scopes = &mut visitor.terminating_scopes; - let mut terminating = |id: ast::NodeId| { + let mut terminating = |id: hir::ItemLocalId| { terminating_scopes.insert(id); }; match expr.node { @@ -733,27 +752,27 @@ fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: hir::ExprBinary(codemap::Spanned { node: hir::BiOr, .. }, _, ref r) => { // For shortcircuiting operators, mark the RHS as a terminating // scope since it only executes conditionally. - terminating(r.id); + terminating(r.hir_id.local_id); } hir::ExprIf(ref expr, ref then, Some(ref otherwise)) => { - terminating(expr.id); - terminating(then.id); - terminating(otherwise.id); + terminating(expr.hir_id.local_id); + terminating(then.hir_id.local_id); + terminating(otherwise.hir_id.local_id); } hir::ExprIf(ref expr, ref then, None) => { - terminating(expr.id); - terminating(then.id); + terminating(expr.hir_id.local_id); + terminating(then.hir_id.local_id); } hir::ExprLoop(ref body, _, _) => { - terminating(body.id); + terminating(body.hir_id.local_id); } hir::ExprWhile(ref expr, ref body, _) => { - terminating(expr.id); - terminating(body.id); + terminating(expr.hir_id.local_id); + terminating(body.hir_id.local_id); } hir::ExprMatch(..) => { @@ -782,6 +801,29 @@ fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: // record_superlifetime(new_cx, expr.callee_id); } + hir::ExprYield(..) => { + // Mark this expr's scope and all parent scopes as containing `yield`. + let mut scope = Scope::Node(expr.hir_id.local_id); + loop { + match visitor.scope_tree.yield_in_scope.entry(scope) { + // Another `yield` has already been found. + Entry::Occupied(_) => break, + + Entry::Vacant(entry) => { + entry.insert(expr.span); + } + } + + // Keep traversing up while we can. + match visitor.scope_tree.parent_map.get(&scope) { + // Don't cross from closure bodies to their parent. + Some(&Scope::CallSite(_)) => break, + Some(&superscope) => scope = superscope, + None => break + } + } + } + _ => {} } } @@ -789,7 +831,7 @@ fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: match expr.node { // Manually recurse over closures, because they are the only // case of nested bodies that share the parent environment. - hir::ExprClosure(.., body, _) => { + hir::ExprClosure(.., body, _, _) => { let body = visitor.tcx.hir.body(body); visitor.visit_body(body); } @@ -801,16 +843,11 @@ fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: } fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, - local: &'tcx hir::Local) { - debug!("resolve_local(local.id={:?},local.init={:?})", - local.id,local.init.is_some()); + pat: Option<&'tcx hir::Pat>, + init: Option<&'tcx hir::Expr>) { + debug!("resolve_local(pat={:?}, init={:?})", pat, init); - // For convenience in trans, associate with the local-id the var - // scope that will be used for any bindings declared in this - // pattern. let blk_scope = visitor.cx.var_parent; - let blk_scope = blk_scope.expect("locals must be within a block"); - visitor.region_maps.record_var_scope(local.id, blk_scope); // As an exception to the normal rules governing temporary // lifetimes, initializers in a let have a temporary lifetime @@ -870,15 +907,22 @@ fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, // // FIXME(#6308) -- Note that `[]` patterns work more smoothly post-DST. - if let Some(ref expr) = local.init { + if let Some(expr) = init { record_rvalue_scope_if_borrow_expr(visitor, &expr, blk_scope); - if is_binding_pat(&local.pat) { - record_rvalue_scope(visitor, &expr, blk_scope); + if let Some(pat) = pat { + if is_binding_pat(pat) { + record_rvalue_scope(visitor, &expr, blk_scope); + } } } - intravisit::walk_local(visitor, local); + if let Some(pat) = pat { + visitor.visit_pat(pat); + } + if let Some(expr) = init { + visitor.visit_expr(expr); + } /// True if `pat` match the `P&` nonterminal: /// @@ -952,7 +996,7 @@ fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, fn record_rvalue_scope_if_borrow_expr<'a, 'tcx>( visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: &hir::Expr, - blk_id: CodeExtent) + blk_id: Option) { match expr.node { hir::ExprAddrOf(_, ref subexpr) => { @@ -1002,7 +1046,7 @@ fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, /// Note: ET is intended to match "rvalues or lvalues based on rvalues". fn record_rvalue_scope<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: &hir::Expr, - blk_scope: CodeExtent) { + blk_scope: Option) { let mut expr = expr; loop { // Note: give all the expressions matching `ET` with the @@ -1010,7 +1054,7 @@ fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, // because in trans if we must compile e.g. `*rvalue()` // into a temporary, we request the temporary scope of the // outer expression. - visitor.region_maps.record_rvalue_scope(expr.id, blk_scope); + visitor.scope_tree.record_rvalue_scope(expr.hir_id.local_id, blk_scope); match expr.node { hir::ExprAddrOf(_, ref subexpr) | @@ -1030,27 +1074,27 @@ fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, impl<'a, 'tcx> RegionResolutionVisitor<'a, 'tcx> { /// Records the current parent (if any) as the parent of `child_scope`. - fn record_code_extent(&mut self, child_scope: CodeExtent) { + fn record_child_scope(&mut self, child_scope: Scope) { let parent = self.cx.parent; - self.region_maps.record_code_extent(child_scope, parent); + self.scope_tree.record_scope_parent(child_scope, parent); } /// Records the current parent (if any) as the parent of `child_scope`, /// and sets `child_scope` as the new current parent. - fn enter_code_extent(&mut self, child_scope: CodeExtent) { - self.record_code_extent(child_scope); + fn enter_scope(&mut self, child_scope: Scope) { + self.record_child_scope(child_scope); self.cx.parent = Some(child_scope); } - fn enter_node_extent_with_dtor(&mut self, id: ast::NodeId) { + fn enter_node_scope_with_dtor(&mut self, id: hir::ItemLocalId) { // If node was previously marked as a terminating scope during the // recursive visit of its parent node in the AST, then we need to - // account for the destruction scope representing the extent of + // account for the destruction scope representing the scope of // the destructors that run immediately after it completes. if self.terminating_scopes.contains(&id) { - self.enter_code_extent(CodeExtent::DestructionScope(id)); + self.enter_scope(Scope::Destruction(id)); } - self.enter_code_extent(CodeExtent::Misc(id)); + self.enter_scope(Scope::Node(id)); } } @@ -1074,21 +1118,16 @@ impl<'a, 'tcx> Visitor<'tcx> for RegionResolutionVisitor<'a, 'tcx> { self.cx.parent); let outer_cx = self.cx; - let outer_ts = mem::replace(&mut self.terminating_scopes, NodeSet()); - - // Only functions have an outer terminating (drop) scope, - // while temporaries in constant initializers are 'static. - if let MirSource::Fn(_) = MirSource::from_node(self.tcx, owner_id) { - self.terminating_scopes.insert(body_id.node_id); - } + let outer_ts = mem::replace(&mut self.terminating_scopes, FxHashSet()); + self.terminating_scopes.insert(body.value.hir_id.local_id); if let Some(root_id) = self.cx.root_id { - self.region_maps.record_fn_parent(body_id.node_id, root_id); + self.scope_tree.record_closure_parent(body.value.hir_id.local_id, root_id); } - self.cx.root_id = Some(body_id.node_id); + self.cx.root_id = Some(body.value.hir_id.local_id); - self.enter_code_extent(CodeExtent::CallSiteScope(body_id)); - self.enter_code_extent(CodeExtent::ParameterScope(body_id)); + self.enter_scope(Scope::CallSite(body.value.hir_id.local_id)); + self.enter_scope(Scope::Arguments(body.value.hir_id.local_id)); // The arguments and `self` are parented to the fn. self.cx.var_parent = self.cx.parent.take(); @@ -1098,7 +1137,30 @@ impl<'a, 'tcx> Visitor<'tcx> for RegionResolutionVisitor<'a, 'tcx> { // The body of the every fn is a root scope. self.cx.parent = self.cx.var_parent; - self.visit_expr(&body.value); + if let MirSource::Fn(_) = MirSource::from_node(self.tcx, owner_id) { + self.visit_expr(&body.value); + } else { + // Only functions have an outer terminating (drop) scope, while + // temporaries in constant initializers may be 'static, but only + // according to rvalue lifetime semantics, using the same + // syntactical rules used for let initializers. + // + // E.g. in `let x = &f();`, the temporary holding the result from + // the `f()` call lives for the entirety of the surrounding block. + // + // Similarly, `const X: ... = &f();` would have the result of `f()` + // live for `'static`, implying (if Drop restrictions on constants + // ever get lifted) that the value *could* have a destructor, but + // it'd get leaked instead of the destructor running during the + // evaluation of `X` (if at all allowed by CTFE). + // + // However, `const Y: ... = g(&f());`, like `let y = g(&f());`, + // would *not* let the `f()` temporary escape into an outer scope + // (i.e. `'static`), which means that after `g` returns, it drops, + // and all the associated destruction scope rules apply. + self.cx.var_parent = None; + resolve_local(self, None, Some(&body.value)); + } // Restore context we had at the start. self.cx = outer_cx; @@ -1118,32 +1180,33 @@ impl<'a, 'tcx> Visitor<'tcx> for RegionResolutionVisitor<'a, 'tcx> { resolve_expr(self, ex); } fn visit_local(&mut self, l: &'tcx Local) { - resolve_local(self, l); + resolve_local(self, Some(&l.pat), l.init.as_ref().map(|e| &**e)); } } -fn region_maps<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) - -> Rc +fn region_scope_tree<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) + -> Rc { let closure_base_def_id = tcx.closure_base_def_id(def_id); if closure_base_def_id != def_id { - return tcx.region_maps(closure_base_def_id); + return tcx.region_scope_tree(closure_base_def_id); } let id = tcx.hir.as_local_node_id(def_id).unwrap(); - let maps = if let Some(body) = tcx.hir.maybe_body_owned_by(id) { + let scope_tree = if let Some(body_id) = tcx.hir.maybe_body_owned_by(id) { let mut visitor = RegionResolutionVisitor { tcx, - region_maps: RegionMaps::new(), + scope_tree: ScopeTree::default(), cx: Context { root_id: None, parent: None, var_parent: None, }, - terminating_scopes: NodeSet(), + terminating_scopes: FxHashSet(), }; - visitor.region_maps.root_body = Some(body); + let body = tcx.hir.body(body_id); + visitor.scope_tree.root_body = Some(body.value.hir_id); // If the item is an associated const or a method, // record its impl/trait parent, as it can also have @@ -1151,24 +1214,24 @@ fn region_maps<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) match tcx.hir.get(id) { hir::map::NodeImplItem(_) | hir::map::NodeTraitItem(_) => { - visitor.region_maps.root_parent = Some(tcx.hir.get_parent(id)); + visitor.scope_tree.root_parent = Some(tcx.hir.get_parent(id)); } _ => {} } - visitor.visit_body(tcx.hir.body(body)); + visitor.visit_body(body); - visitor.region_maps + visitor.scope_tree } else { - RegionMaps::new() + ScopeTree::default() }; - Rc::new(maps) + Rc::new(scope_tree) } pub fn provide(providers: &mut Providers) { *providers = Providers { - region_maps, + region_scope_tree, ..*providers }; } diff --git a/src/librustc/mir/mod.rs b/src/librustc/mir/mod.rs index 74ce68b351df9..cf3d7c3642a5f 100644 --- a/src/librustc/mir/mod.rs +++ b/src/librustc/mir/mod.rs @@ -12,7 +12,7 @@ use graphviz::IntoCow; use middle::const_val::ConstVal; -use middle::region::CodeExtent; +use middle::region; use rustc_const_math::{ConstUsize, ConstInt, ConstMathErr}; use rustc_data_structures::indexed_vec::{IndexVec, Idx}; use rustc_data_structures::control_flow_graph::dominators::{Dominators, dominators}; @@ -21,7 +21,7 @@ use rustc_data_structures::control_flow_graph::ControlFlowGraph; use hir::def::CtorKind; use hir::def_id::DefId; use ty::subst::{Subst, Substs}; -use ty::{self, AdtDef, ClosureSubsts, Region, Ty}; +use ty::{self, AdtDef, ClosureSubsts, Region, Ty, GeneratorInterior}; use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; use util::ppaux; use rustc_back::slice; @@ -104,6 +104,15 @@ pub struct Mir<'tcx> { /// Return type of the function. pub return_ty: Ty<'tcx>, + /// Yield type of the function, if it is a generator. + pub yield_ty: Option>, + + /// Generator drop glue + pub generator_drop: Option>>, + + /// The layout of a generator. Produced by the state transformation. + pub generator_layout: Option>, + /// Declarations of locals. /// /// The first local is the return value pointer, followed by `arg_count` @@ -144,6 +153,7 @@ impl<'tcx> Mir<'tcx> { visibility_scopes: IndexVec, promoted: IndexVec>, return_ty: Ty<'tcx>, + yield_ty: Option>, local_decls: IndexVec>, arg_count: usize, upvar_decls: Vec, @@ -159,6 +169,9 @@ impl<'tcx> Mir<'tcx> { visibility_scopes, promoted, return_ty, + yield_ty, + generator_drop: None, + generator_layout: None, local_decls, arg_count, upvar_decls, @@ -270,6 +283,9 @@ impl_stable_hash_for!(struct Mir<'tcx> { visibility_scopes, promoted, return_ty, + yield_ty, + generator_drop, + generator_layout, local_decls, arg_count, upvar_decls, @@ -395,6 +411,22 @@ pub struct LocalDecl<'tcx> { /// True if this corresponds to a user-declared local variable. pub is_user_variable: bool, + /// True if this is an internal local + /// + /// These locals are not based on types in the source code and are only used + /// for drop flags at the moment. + /// + /// The generator transformation will sanity check the locals which are live + /// across a suspension point against the type components of the generator + /// which type checking knows are live across a suspension point. We need to + /// flag drop flags to avoid triggering this check as they are introduced + /// after typeck. + /// + /// This should be sound because the drop flags are fully algebraic, and + /// therefore don't affect the OIBIT or outlives properties of the + /// generator. + pub internal: bool, + /// Type of this local. pub ty: Ty<'tcx>, @@ -420,6 +452,23 @@ impl<'tcx> LocalDecl<'tcx> { span, scope: ARGUMENT_VISIBILITY_SCOPE }, + internal: false, + is_user_variable: false + } + } + + /// Create a new `LocalDecl` for a internal temporary. + #[inline] + pub fn new_internal(ty: Ty<'tcx>, span: Span) -> Self { + LocalDecl { + mutability: Mutability::Mut, + ty, + name: None, + source_info: SourceInfo { + span, + scope: ARGUMENT_VISIBILITY_SCOPE + }, + internal: true, is_user_variable: false } } @@ -436,6 +485,7 @@ impl<'tcx> LocalDecl<'tcx> { span, scope: ARGUMENT_VISIBILITY_SCOPE }, + internal: false, name: None, // FIXME maybe we do want some name here? is_user_variable: false } @@ -567,7 +617,20 @@ pub enum TerminatorKind<'tcx> { msg: AssertMessage<'tcx>, target: BasicBlock, cleanup: Option - } + }, + + /// A suspend point + Yield { + /// The value to return + value: Operand<'tcx>, + /// Where to resume to + resume: BasicBlock, + /// Cleanup to be done if the generator is dropped at this suspend point + drop: Option, + }, + + /// Indicates the end of the dropping of a generator + GeneratorDrop, } impl<'tcx> Terminator<'tcx> { @@ -597,7 +660,7 @@ impl<'tcx> TerminatorKind<'tcx> { match *self { Goto { target: ref b } => slice::ref_slice(b).into_cow(), SwitchInt { targets: ref b, .. } => b[..].into_cow(), - Resume => (&[]).into_cow(), + Resume | GeneratorDrop => (&[]).into_cow(), Return => (&[]).into_cow(), Unreachable => (&[]).into_cow(), Call { destination: Some((_, t)), cleanup: Some(c), .. } => vec![t, c].into_cow(), @@ -605,6 +668,8 @@ impl<'tcx> TerminatorKind<'tcx> { slice::ref_slice(t).into_cow(), Call { destination: None, cleanup: Some(ref c), .. } => slice::ref_slice(c).into_cow(), Call { destination: None, cleanup: None, .. } => (&[]).into_cow(), + Yield { resume: t, drop: Some(c), .. } => vec![t, c].into_cow(), + Yield { resume: ref t, drop: None, .. } => slice::ref_slice(t).into_cow(), DropAndReplace { target, unwind: Some(unwind), .. } | Drop { target, unwind: Some(unwind), .. } => { vec![target, unwind].into_cow() @@ -625,13 +690,15 @@ impl<'tcx> TerminatorKind<'tcx> { match *self { Goto { target: ref mut b } => vec![b], SwitchInt { targets: ref mut b, .. } => b.iter_mut().collect(), - Resume => Vec::new(), + Resume | GeneratorDrop => Vec::new(), Return => Vec::new(), Unreachable => Vec::new(), Call { destination: Some((_, ref mut t)), cleanup: Some(ref mut c), .. } => vec![t, c], Call { destination: Some((_, ref mut t)), cleanup: None, .. } => vec![t], Call { destination: None, cleanup: Some(ref mut c), .. } => vec![c], Call { destination: None, cleanup: None, .. } => vec![], + Yield { resume: ref mut t, drop: Some(ref mut c), .. } => vec![t, c], + Yield { resume: ref mut t, drop: None, .. } => vec![t], DropAndReplace { ref mut target, unwind: Some(ref mut unwind), .. } | Drop { ref mut target, unwind: Some(ref mut unwind), .. } => vec![target, unwind], DropAndReplace { ref mut target, unwind: None, .. } | @@ -664,6 +731,14 @@ impl<'tcx> BasicBlockData<'tcx> { pub fn terminator_mut(&mut self) -> &mut Terminator<'tcx> { self.terminator.as_mut().expect("invalid terminator state") } + + pub fn retain_statements(&mut self, mut f: F) where F: FnMut(&mut Statement) -> bool { + for s in &mut self.statements { + if !f(s) { + s.kind = StatementKind::Nop; + } + } + } } impl<'tcx> Debug for TerminatorKind<'tcx> { @@ -703,7 +778,9 @@ impl<'tcx> TerminatorKind<'tcx> { Goto { .. } => write!(fmt, "goto"), SwitchInt { discr: ref lv, .. } => write!(fmt, "switchInt({:?})", lv), Return => write!(fmt, "return"), + GeneratorDrop => write!(fmt, "generator_drop"), Resume => write!(fmt, "resume"), + Yield { ref value, .. } => write!(fmt, "_1 = suspend({:?})", value), Unreachable => write!(fmt, "unreachable"), Drop { ref location, .. } => write!(fmt, "drop({:?})", location), DropAndReplace { ref location, ref value, .. } => @@ -737,6 +814,12 @@ impl<'tcx> TerminatorKind<'tcx> { AssertMessage::Math(ref err) => { write!(fmt, "{:?}", err.description())?; } + AssertMessage::GeneratorResumedAfterReturn => { + write!(fmt, "{:?}", "generator resumed after completion")?; + } + AssertMessage::GeneratorResumedAfterPanic => { + write!(fmt, "{:?}", "generator resumed after panicking")?; + } } write!(fmt, ")") @@ -748,7 +831,7 @@ impl<'tcx> TerminatorKind<'tcx> { pub fn fmt_successor_labels(&self) -> Vec> { use self::TerminatorKind::*; match *self { - Return | Resume | Unreachable => vec![], + Return | Resume | Unreachable | GeneratorDrop => vec![], Goto { .. } => vec!["".into()], SwitchInt { ref values, .. } => { values.iter() @@ -765,6 +848,9 @@ impl<'tcx> TerminatorKind<'tcx> { Call { destination: Some(_), cleanup: None, .. } => vec!["return".into_cow()], Call { destination: None, cleanup: Some(_), .. } => vec!["unwind".into_cow()], Call { destination: None, cleanup: None, .. } => vec![], + Yield { drop: Some(_), .. } => + vec!["resume".into_cow(), "drop".into_cow()], + Yield { drop: None, .. } => vec!["resume".into_cow()], DropAndReplace { unwind: None, .. } | Drop { unwind: None, .. } => vec!["return".into_cow()], DropAndReplace { unwind: Some(_), .. } | @@ -784,7 +870,9 @@ pub enum AssertMessage<'tcx> { len: Operand<'tcx>, index: Operand<'tcx> }, - Math(ConstMathErr) + Math(ConstMathErr), + GeneratorResumedAfterReturn, + GeneratorResumedAfterPanic, } /////////////////////////////////////////////////////////////////////////// @@ -813,10 +901,10 @@ pub enum StatementKind<'tcx> { SetDiscriminant { lvalue: Lvalue<'tcx>, variant_index: usize }, /// Start a live range for the storage of the local. - StorageLive(Lvalue<'tcx>), + StorageLive(Local), /// End the current live range for the storage of the local. - StorageDead(Lvalue<'tcx>), + StorageDead(Local), /// Execute a piece of inline Assembly. InlineAsm { @@ -830,9 +918,9 @@ pub enum StatementKind<'tcx> { /// See for more details. Validate(ValidationOp, Vec>>), - /// Mark one terminating point of an extent (i.e. static region). + /// Mark one terminating point of a region scope (i.e. static region). /// (The starting point(s) arise implicitly from borrows.) - EndRegion(CodeExtent), + EndRegion(region::Scope), /// No-op. Useful for deleting instructions without affecting statement indices. Nop, @@ -851,7 +939,7 @@ pub enum ValidationOp { Release, /// Recursive traverse the *mutable* part of the type and relinquish all exclusive /// access *until* the given region ends. Then, access will be recovered. - Suspend(CodeExtent), + Suspend(region::Scope), } impl Debug for ValidationOp { @@ -871,7 +959,7 @@ impl Debug for ValidationOp { pub struct ValidationOperand<'tcx, T> { pub lval: T, pub ty: Ty<'tcx>, - pub re: Option, + pub re: Option, pub mutbl: hir::Mutability, } @@ -989,12 +1077,12 @@ pub enum ProjectionElem<'tcx, V, T> { } /// Alias for projections as they appear in lvalues, where the base is an lvalue -/// and the index is an operand. -pub type LvalueProjection<'tcx> = Projection<'tcx, Lvalue<'tcx>, Operand<'tcx>, Ty<'tcx>>; +/// and the index is a local. +pub type LvalueProjection<'tcx> = Projection<'tcx, Lvalue<'tcx>, Local, Ty<'tcx>>; /// Alias for projections as they appear in lvalues, where the base is an lvalue -/// and the index is an operand. -pub type LvalueElem<'tcx> = ProjectionElem<'tcx, Operand<'tcx>, Ty<'tcx>>; +/// and the index is a local. +pub type LvalueElem<'tcx> = ProjectionElem<'tcx, Local, Ty<'tcx>>; newtype_index!(Field, "field"); @@ -1011,7 +1099,7 @@ impl<'tcx> Lvalue<'tcx> { self.elem(ProjectionElem::Downcast(adt_def, variant_index)) } - pub fn index(self, index: Operand<'tcx>) -> Lvalue<'tcx> { + pub fn index(self, index: Local) -> Lvalue<'tcx> { self.elem(ProjectionElem::Index(index)) } @@ -1178,6 +1266,7 @@ pub enum AggregateKind<'tcx> { /// number and is present only for union expressions. Adt(&'tcx AdtDef, usize, &'tcx Substs<'tcx>, Option), Closure(DefId, ClosureSubsts<'tcx>), + Generator(DefId, ClosureSubsts<'tcx>, GeneratorInterior<'tcx>), } #[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)] @@ -1339,6 +1428,31 @@ impl<'tcx> Debug for Rvalue<'tcx> { write!(fmt, "[closure]") } }), + + AggregateKind::Generator(def_id, _, _) => ty::tls::with(|tcx| { + if let Some(node_id) = tcx.hir.as_local_node_id(def_id) { + let name = format!("[generator@{:?}]", tcx.hir.span(node_id)); + let mut struct_fmt = fmt.debug_struct(&name); + + tcx.with_freevars(node_id, |freevars| { + for (freevar, lv) in freevars.iter().zip(lvs) { + let def_id = freevar.def.def_id(); + let var_id = tcx.hir.as_local_node_id(def_id).unwrap(); + let var_name = tcx.local_var_name_str(var_id); + struct_fmt.field(&var_name, lv); + } + struct_fmt.field("$state", &lvs[freevars.len()]); + for i in (freevars.len() + 1)..lvs.len() { + struct_fmt.field(&format!("${}", i - freevars.len() - 1), + &lvs[i]); + } + }); + + struct_fmt.finish() + } else { + write!(fmt, "[generator]") + } + }), } } } @@ -1483,6 +1597,11 @@ impl Location { } } +/// The layout of generator state +#[derive(Clone, Debug, RustcEncodable, RustcDecodable)] +pub struct GeneratorLayout<'tcx> { + pub fields: Vec>, +} /* * TypeFoldable implementations for MIR types @@ -1495,6 +1614,9 @@ impl<'tcx> TypeFoldable<'tcx> for Mir<'tcx> { visibility_scopes: self.visibility_scopes.clone(), promoted: self.promoted.fold_with(folder), return_ty: self.return_ty.fold_with(folder), + yield_ty: self.yield_ty.fold_with(folder), + generator_drop: self.generator_drop.fold_with(folder), + generator_layout: self.generator_layout.fold_with(folder), local_decls: self.local_decls.fold_with(folder), arg_count: self.arg_count, upvar_decls: self.upvar_decls.clone(), @@ -1506,12 +1628,27 @@ impl<'tcx> TypeFoldable<'tcx> for Mir<'tcx> { fn super_visit_with>(&self, visitor: &mut V) -> bool { self.basic_blocks.visit_with(visitor) || + self.generator_drop.visit_with(visitor) || + self.generator_layout.visit_with(visitor) || + self.yield_ty.visit_with(visitor) || self.promoted.visit_with(visitor) || self.return_ty.visit_with(visitor) || self.local_decls.visit_with(visitor) } } +impl<'tcx> TypeFoldable<'tcx> for GeneratorLayout<'tcx> { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { + GeneratorLayout { + fields: self.fields.fold_with(folder), + } + } + + fn super_visit_with>(&self, visitor: &mut V) -> bool { + self.fields.visit_with(visitor) + } +} + impl<'tcx> TypeFoldable<'tcx> for LocalDecl<'tcx> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { LocalDecl { @@ -1564,19 +1701,19 @@ impl<'tcx> TypeFoldable<'tcx> for Statement<'tcx> { lvalue: lvalue.fold_with(folder), variant_index, }, - StorageLive(ref lval) => StorageLive(lval.fold_with(folder)), - StorageDead(ref lval) => StorageDead(lval.fold_with(folder)), + StorageLive(ref local) => StorageLive(local.fold_with(folder)), + StorageDead(ref local) => StorageDead(local.fold_with(folder)), InlineAsm { ref asm, ref outputs, ref inputs } => InlineAsm { asm: asm.clone(), outputs: outputs.fold_with(folder), inputs: inputs.fold_with(folder) }, - // Note for future: If we want to expose the extents + // Note for future: If we want to expose the region scopes // during the fold, we need to either generalize EndRegion // to carry `[ty::Region]`, or extend the `TypeFolder` - // trait with a `fn fold_extent`. - EndRegion(ref extent) => EndRegion(extent.clone()), + // trait with a `fn fold_scope`. + EndRegion(ref region_scope) => EndRegion(region_scope.clone()), Validate(ref op, ref lvals) => Validate(op.clone(), @@ -1595,17 +1732,17 @@ impl<'tcx> TypeFoldable<'tcx> for Statement<'tcx> { match self.kind { Assign(ref lval, ref rval) => { lval.visit_with(visitor) || rval.visit_with(visitor) } - SetDiscriminant { ref lvalue, .. } | - StorageLive(ref lvalue) | - StorageDead(ref lvalue) => lvalue.visit_with(visitor), + SetDiscriminant { ref lvalue, .. } => lvalue.visit_with(visitor), + StorageLive(ref local) | + StorageDead(ref local) => local.visit_with(visitor), InlineAsm { ref outputs, ref inputs, .. } => outputs.visit_with(visitor) || inputs.visit_with(visitor), - // Note for future: If we want to expose the extents + // Note for future: If we want to expose the region scopes // during the visit, we need to either generalize EndRegion // to carry `[ty::Region]`, or extend the `TypeVisitor` - // trait with a `fn visit_extent`. - EndRegion(ref _extent) => false, + // trait with a `fn visit_scope`. + EndRegion(ref _scope) => false, Validate(ref _op, ref lvalues) => lvalues.iter().any(|ty_and_lvalue| ty_and_lvalue.visit_with(visitor)), @@ -1638,6 +1775,11 @@ impl<'tcx> TypeFoldable<'tcx> for Terminator<'tcx> { target, unwind, }, + Yield { ref value, resume, drop } => Yield { + value: value.fold_with(folder), + resume: resume, + drop: drop, + }, Call { ref func, ref args, ref destination, cleanup } => { let dest = destination.as_ref().map(|&(ref loc, dest)| { (loc.fold_with(folder), dest) @@ -1667,6 +1809,7 @@ impl<'tcx> TypeFoldable<'tcx> for Terminator<'tcx> { cleanup, } }, + GeneratorDrop => GeneratorDrop, Resume => Resume, Return => Return, Unreachable => Unreachable, @@ -1686,6 +1829,8 @@ impl<'tcx> TypeFoldable<'tcx> for Terminator<'tcx> { Drop { ref location, ..} => location.visit_with(visitor), DropAndReplace { ref location, ref value, ..} => location.visit_with(visitor) || value.visit_with(visitor), + Yield { ref value, ..} => + value.visit_with(visitor), Call { ref func, ref args, ref destination, .. } => { let dest = if let Some((ref loc, _)) = *destination { loc.visit_with(visitor) @@ -1706,6 +1851,7 @@ impl<'tcx> TypeFoldable<'tcx> for Terminator<'tcx> { Goto { .. } | Resume | Return | + GeneratorDrop | Unreachable => false } } @@ -1751,7 +1897,11 @@ impl<'tcx> TypeFoldable<'tcx> for Rvalue<'tcx> { AggregateKind::Adt(def, v, substs, n) => AggregateKind::Adt(def, v, substs.fold_with(folder), n), AggregateKind::Closure(id, substs) => - AggregateKind::Closure(id, substs.fold_with(folder)) + AggregateKind::Closure(id, substs.fold_with(folder)), + AggregateKind::Generator(id, substs, interior) => + AggregateKind::Generator(id, + substs.fold_with(folder), + interior.fold_with(folder)), }; Aggregate(kind, fields.fold_with(folder)) } @@ -1777,7 +1927,9 @@ impl<'tcx> TypeFoldable<'tcx> for Rvalue<'tcx> { AggregateKind::Array(ty) => ty.visit_with(visitor), AggregateKind::Tuple => false, AggregateKind::Adt(_, _, substs, _) => substs.visit_with(visitor), - AggregateKind::Closure(_, substs) => substs.visit_with(visitor) + AggregateKind::Closure(_, substs) => substs.visit_with(visitor), + AggregateKind::Generator(_, substs, interior) => substs.visit_with(visitor) || + interior.visit_with(visitor), }) || fields.visit_with(visitor) } } diff --git a/src/librustc/mir/tcx.rs b/src/librustc/mir/tcx.rs index a801804d502a1..eb403442f4639 100644 --- a/src/librustc/mir/tcx.rs +++ b/src/librustc/mir/tcx.rs @@ -207,6 +207,9 @@ impl<'tcx> Rvalue<'tcx> { AggregateKind::Closure(did, substs) => { tcx.mk_closure_from_closure_substs(did, substs) } + AggregateKind::Generator(did, substs, interior) => { + tcx.mk_generator(did, substs, interior) + } } } } diff --git a/src/librustc/mir/transform.rs b/src/librustc/mir/transform.rs index aa91123ef6952..f29405e665051 100644 --- a/src/librustc/mir/transform.rs +++ b/src/librustc/mir/transform.rs @@ -33,7 +33,10 @@ pub enum MirSource { Static(NodeId, hir::Mutability), /// Promoted rvalues within a function. - Promoted(NodeId, Promoted) + Promoted(NodeId, Promoted), + + /// Drop glue for a generator. + GeneratorDrop(NodeId), } impl<'a, 'tcx> MirSource { @@ -70,6 +73,7 @@ impl<'a, 'tcx> MirSource { match *self { MirSource::Fn(id) | MirSource::Const(id) | + MirSource::GeneratorDrop(id) | MirSource::Static(id, _) | MirSource::Promoted(id, _) => id } diff --git a/src/librustc/mir/visit.rs b/src/librustc/mir/visit.rs index a6d115bf03912..f26505c6d0237 100644 --- a/src/librustc/mir/visit.rs +++ b/src/librustc/mir/visit.rs @@ -11,7 +11,7 @@ use middle::const_val::ConstVal; use hir::def_id::DefId; use ty::subst::Substs; -use ty::{ClosureSubsts, Region, Ty}; +use ty::{ClosureSubsts, Region, Ty, GeneratorInterior}; use mir::*; use rustc_const_math::ConstUsize; use syntax_pos::Span; @@ -226,6 +226,12 @@ macro_rules! make_mir_visitor { self.super_closure_substs(substs); } + fn visit_generator_interior(&mut self, + interior: & $($mutability)* GeneratorInterior<'tcx>, + _: Location) { + self.super_generator_interior(interior); + } + fn visit_const_val(&mut self, const_val: & $($mutability)* ConstVal, _: Location) { @@ -249,6 +255,12 @@ macro_rules! make_mir_visitor { self.super_local_decl(local_decl); } + fn visit_local(&mut self, + _local: & $($mutability)* Local, + _context: LvalueContext<'tcx>, + _location: Location) { + } + fn visit_visibility_scope(&mut self, scope: & $($mutability)* VisibilityScope) { self.super_visibility_scope(scope); @@ -348,11 +360,11 @@ macro_rules! make_mir_visitor { StatementKind::SetDiscriminant{ ref $($mutability)* lvalue, .. } => { self.visit_lvalue(lvalue, LvalueContext::Store, location); } - StatementKind::StorageLive(ref $($mutability)* lvalue) => { - self.visit_lvalue(lvalue, LvalueContext::StorageLive, location); + StatementKind::StorageLive(ref $($mutability)* local) => { + self.visit_local(local, LvalueContext::StorageLive, location); } - StatementKind::StorageDead(ref $($mutability)* lvalue) => { - self.visit_lvalue(lvalue, LvalueContext::StorageDead, location); + StatementKind::StorageDead(ref $($mutability)* local) => { + self.visit_local(local, LvalueContext::StorageDead, location); } StatementKind::InlineAsm { ref $($mutability)* outputs, ref $($mutability)* inputs, @@ -415,6 +427,7 @@ macro_rules! make_mir_visitor { TerminatorKind::Resume | TerminatorKind::Return | + TerminatorKind::GeneratorDrop | TerminatorKind::Unreachable => { } @@ -461,6 +474,15 @@ macro_rules! make_mir_visitor { self.visit_branch(block, target); cleanup.map(|t| self.visit_branch(block, t)); } + + TerminatorKind::Yield { ref $($mutability)* value, + resume, + drop } => { + self.visit_operand(value, source_location); + self.visit_branch(block, resume); + drop.map(|t| self.visit_branch(block, t)); + } + } } @@ -475,7 +497,9 @@ macro_rules! make_mir_visitor { self.visit_operand(len, location); self.visit_operand(index, location); } - AssertMessage::Math(_) => {} + AssertMessage::Math(_) => {}, + AssertMessage::GeneratorResumedAfterReturn => {}, + AssertMessage::GeneratorResumedAfterPanic => {}, } } @@ -553,6 +577,13 @@ macro_rules! make_mir_visitor { self.visit_def_id(def_id, location); self.visit_closure_substs(closure_substs, location); } + AggregateKind::Generator(ref $($mutability)* def_id, + ref $($mutability)* closure_substs, + ref $($mutability)* interior) => { + self.visit_def_id(def_id, location); + self.visit_closure_substs(closure_substs, location); + self.visit_generator_interior(interior, location); + } } for operand in operands { @@ -580,7 +611,8 @@ macro_rules! make_mir_visitor { context: LvalueContext<'tcx>, location: Location) { match *lvalue { - Lvalue::Local(_) => { + Lvalue::Local(ref $($mutability)* local) => { + self.visit_local(local, context, location); } Lvalue::Static(ref $($mutability)* static_) => { self.visit_static(static_, context, location); @@ -632,8 +664,8 @@ macro_rules! make_mir_visitor { ProjectionElem::Field(_field, ref $($mutability)* ty) => { self.visit_ty(ty, Lookup::Loc(location)); } - ProjectionElem::Index(ref $($mutability)* operand) => { - self.visit_operand(operand, location); + ProjectionElem::Index(ref $($mutability)* local) => { + self.visit_local(local, LvalueContext::Consume, location); } ProjectionElem::ConstantIndex { offset: _, min_length: _, @@ -651,6 +683,7 @@ macro_rules! make_mir_visitor { ref $($mutability)* ty, name: _, ref $($mutability)* source_info, + internal: _, is_user_variable: _, } = *local_decl; @@ -719,6 +752,10 @@ macro_rules! make_mir_visitor { fn super_substs(&mut self, _substs: & $($mutability)* &'tcx Substs<'tcx>) { } + fn super_generator_interior(&mut self, + _interior: & $($mutability)* GeneratorInterior<'tcx>) { + } + fn super_closure_substs(&mut self, _substs: & $($mutability)* ClosureSubsts<'tcx>) { } diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 99fe8e60ae52b..b538c6a6eca7b 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -340,6 +340,7 @@ pub enum PrintRequest { RelocationModels, CodeModels, TargetSpec, + NativeStaticLibs, } pub enum Input { @@ -1296,7 +1297,7 @@ pub fn rustc_short_optgroups() -> Vec { print on stdout", "[crate-name|file-names|sysroot|cfg|target-list|\ target-cpus|target-features|relocation-models|\ - code-models|target-spec-json]"), + code-models|target-spec-json|native-static-deps]"), opt::flagmulti_s("g", "", "Equivalent to -C debuginfo=2"), opt::flagmulti_s("O", "", "Equivalent to -C opt-level=2"), opt::opt_s("o", "", "Write output to ", "FILENAME"), @@ -1642,6 +1643,7 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches) "target-features" => PrintRequest::TargetFeatures, "relocation-models" => PrintRequest::RelocationModels, "code-models" => PrintRequest::CodeModels, + "native-static-libs" => PrintRequest::NativeStaticLibs, "target-spec-json" => { if nightly_options::is_unstable_enabled(matches) { PrintRequest::TargetSpec diff --git a/src/librustc/traits/coherence.rs b/src/librustc/traits/coherence.rs index 34df447a11e15..431bd8ee88f70 100644 --- a/src/librustc/traits/coherence.rs +++ b/src/librustc/traits/coherence.rs @@ -301,7 +301,7 @@ fn ty_is_local_constructor(ty: Ty, infer_is_local: InferIsLocal)-> bool { true } - ty::TyClosure(..) | ty::TyAnon(..) => { + ty::TyClosure(..) | ty::TyGenerator(..) | ty::TyAnon(..) => { bug!("ty_is_local invoked on unexpected type: {:?}", ty) } } diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index 3e64c8c7dea99..ce6da55fec36b 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -15,6 +15,8 @@ use super::{ Obligation, ObligationCause, ObligationCauseCode, + OnUnimplementedDirective, + OnUnimplementedNote, OutputTypeParameterMismatch, TraitNotObjectSafe, PredicateObligation, @@ -25,7 +27,6 @@ use super::{ }; use errors::DiagnosticBuilder; -use fmt_macros::{Parser, Piece, Position}; use hir; use hir::def_id::DefId; use infer::{self, InferCtxt}; @@ -251,6 +252,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { AdtKind::Union => Some(16), AdtKind::Enum => Some(17), }, + ty::TyGenerator(..) => Some(18), ty::TyInfer(..) | ty::TyError => None } } @@ -315,77 +317,56 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } } - fn on_unimplemented_note(&self, - trait_ref: ty::PolyTraitRef<'tcx>, - obligation: &PredicateObligation<'tcx>) -> Option { + fn on_unimplemented_note( + &self, + trait_ref: ty::PolyTraitRef<'tcx>, + obligation: &PredicateObligation<'tcx>) -> + OnUnimplementedNote + { let def_id = self.impl_similar_to(trait_ref, obligation) .unwrap_or(trait_ref.def_id()); - let trait_ref = trait_ref.skip_binder(); + let trait_ref = *trait_ref.skip_binder(); + + let desugaring; + let method; + let mut flags = vec![]; + let direct = match obligation.cause.code { + ObligationCauseCode::BuiltinDerivedObligation(..) | + ObligationCauseCode::ImplDerivedObligation(..) => false, + _ => true + }; + if direct { + // this is a "direct", user-specified, rather than derived, + // obligation. + flags.push(("direct", None)); + } - let span = obligation.cause.span; - let mut report = None; - if let Some(item) = self.tcx - .get_attrs(def_id) - .into_iter() - .filter(|a| a.check_name("rustc_on_unimplemented")) - .next() - { - let name = self.tcx.item_name(def_id).as_str(); - let err_sp = item.span.substitute_dummy(span); - let trait_str = self.tcx.item_path_str(trait_ref.def_id); - if let Some(istring) = item.value_str() { - let istring = &*istring.as_str(); - let generics = self.tcx.generics_of(trait_ref.def_id); - let generic_map = generics.types.iter().map(|param| { - (param.name.as_str().to_string(), - trait_ref.substs.type_for_def(param).to_string()) - }).collect::>(); - let parser = Parser::new(istring); - let mut errored = false; - let err: String = parser.filter_map(|p| { - match p { - Piece::String(s) => Some(s), - Piece::NextArgument(a) => match a.position { - Position::ArgumentNamed(s) => match generic_map.get(s) { - Some(val) => Some(val), - None if s == name => { - Some(&trait_str) - } - None => { - span_err!(self.tcx.sess, err_sp, E0272, - "the #[rustc_on_unimplemented] attribute on trait \ - definition for {} refers to non-existent type \ - parameter {}", - trait_str, s); - errored = true; - None - } - }, - _ => { - span_err!(self.tcx.sess, err_sp, E0273, - "the #[rustc_on_unimplemented] attribute on trait \ - definition for {} must have named format arguments, eg \ - `#[rustc_on_unimplemented = \"foo {{T}}\"]`", - trait_str); - errored = true; - None - } - } - } - }).collect(); - // Report only if the format string checks out - if !errored { - report = Some(err); - } - } else { - span_err!(self.tcx.sess, err_sp, E0274, - "the #[rustc_on_unimplemented] attribute on \ - trait definition for {} must have a value, \ - eg `#[rustc_on_unimplemented = \"foo\"]`", - trait_str); + if let ObligationCauseCode::ItemObligation(item) = obligation.cause.code { + // FIXME: maybe also have some way of handling methods + // from other traits? That would require name resolution, + // which we might want to be some sort of hygienic. + // + // Currently I'm leaving it for what I need for `try`. + if self.tcx.trait_of_item(item) == Some(trait_ref.def_id) { + method = self.tcx.item_name(item).as_str(); + flags.push(("from_method", None)); + flags.push(("from_method", Some(&*method))); } } - report + + if let Some(k) = obligation.cause.span.compiler_desugaring_kind() { + desugaring = k.as_symbol().as_str(); + flags.push(("from_desugaring", None)); + flags.push(("from_desugaring", Some(&*desugaring))); + } + + if let Ok(Some(command)) = OnUnimplementedDirective::of_item( + self.tcx, trait_ref.def_id, def_id + ) { + command.evaluate(self.tcx, trait_ref, &flags) + } else { + OnUnimplementedNote::empty() + } } fn find_similar_impl_candidates(&self, @@ -576,17 +557,23 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { let (post_message, pre_message) = self.get_parent_trait_ref(&obligation.cause.code) .map(|t| (format!(" in `{}`", t), format!("within `{}`, ", t))) - .unwrap_or((String::new(), String::new())); + .unwrap_or((String::new(), String::new())); + + let OnUnimplementedNote { message, label } + = self.on_unimplemented_note(trait_ref, obligation); + let have_alt_message = message.is_some() || label.is_some(); + let mut err = struct_span_err!( self.tcx.sess, span, E0277, - "the trait bound `{}` is not satisfied{}", - trait_ref.to_predicate(), - post_message); + "{}", + message.unwrap_or_else(|| { + format!("the trait bound `{}` is not satisfied{}", + trait_ref.to_predicate(), post_message) + })); - let unimplemented_note = self.on_unimplemented_note(trait_ref, obligation); - if let Some(ref s) = unimplemented_note { + if let Some(ref s) = label { // If it has a custom "#[rustc_on_unimplemented]" // error message, let's display it as the label! err.span_label(span, s.as_str()); @@ -614,7 +601,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // which is somewhat confusing. err.help(&format!("consider adding a `where {}` bound", trait_ref.to_predicate())); - } else if unimplemented_note.is_none() { + } else if !have_alt_message { // Can't show anything else useful, try to find similar impls. let impl_candidates = self.find_similar_impl_candidates(trait_ref); self.report_similar_impl_candidates(impl_candidates, &mut err); diff --git a/src/librustc/traits/fulfill.rs b/src/librustc/traits/fulfill.rs index 78e47693caaf1..fbc393cbd96f2 100644 --- a/src/librustc/traits/fulfill.rs +++ b/src/librustc/traits/fulfill.rs @@ -251,6 +251,9 @@ impl<'a, 'gcx, 'tcx> FulfillmentContext<'tcx> { }); debug!("select: outcome={:?}", outcome); + // FIXME: if we kept the original cache key, we could mark projection + // obligations as complete for the projection cache here. + errors.extend( outcome.errors.into_iter() .map(|e| to_fulfillment_error(e))); diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs index 5bae82c1a332f..33dcf3c76e6cf 100644 --- a/src/librustc/traits/mod.rs +++ b/src/librustc/traits/mod.rs @@ -17,7 +17,7 @@ pub use self::ObligationCauseCode::*; use hir; use hir::def_id::DefId; -use middle::region::RegionMaps; +use middle::region; use middle::free_region::FreeRegionMap; use ty::subst::Substs; use ty::{self, AdtKind, Ty, TyCtxt, TypeFoldable, ToPredicate}; @@ -37,8 +37,9 @@ pub use self::project::{normalize, normalize_projection_type, Normalized}; pub use self::project::{ProjectionCache, ProjectionCacheSnapshot, Reveal}; pub use self::object_safety::ObjectSafetyViolation; pub use self::object_safety::MethodViolationCode; +pub use self::on_unimplemented::{OnUnimplementedDirective, OnUnimplementedNote}; pub use self::select::{EvaluationCache, SelectionContext, SelectionCache}; -pub use self::specialize::{OverlapError, specialization_graph, specializes, translate_substs}; +pub use self::specialize::{OverlapError, specialization_graph, translate_substs}; pub use self::specialize::{SpecializesCache, find_associated_item}; pub use self::util::elaborate_predicates; pub use self::util::supertraits; @@ -52,6 +53,7 @@ mod error_reporting; mod fulfill; mod project; mod object_safety; +mod on_unimplemented; mod select; mod specialize; mod structural_impls; @@ -310,6 +312,9 @@ pub enum Vtable<'tcx, N> { /// Same as above, but for a fn pointer type with the given signature. VtableFnPointer(VtableFnPointerData<'tcx, N>), + + /// Vtable automatically generated for a generator + VtableGenerator(VtableGeneratorData<'tcx, N>), } /// Identifies a particular impl in the source, along with a set of @@ -329,6 +334,15 @@ pub struct VtableImplData<'tcx, N> { pub nested: Vec } +#[derive(Clone, PartialEq, Eq)] +pub struct VtableGeneratorData<'tcx, N> { + pub closure_def_id: DefId, + pub substs: ty::ClosureSubsts<'tcx>, + /// Nested obligations. This can be non-empty if the generator + /// signature contains associated types. + pub nested: Vec +} + #[derive(Clone, PartialEq, Eq)] pub struct VtableClosureData<'tcx, N> { pub closure_def_id: DefId, @@ -520,9 +534,9 @@ pub fn normalize_param_env_or_error<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, debug!("normalize_param_env_or_error: normalized predicates={:?}", predicates); - let region_maps = RegionMaps::new(); + let region_scope_tree = region::ScopeTree::default(); let free_regions = FreeRegionMap::new(); - infcx.resolve_regions_and_report_errors(region_context, ®ion_maps, &free_regions); + infcx.resolve_regions_and_report_errors(region_context, ®ion_scope_tree, &free_regions); let predicates = match infcx.fully_resolve(&predicates) { Ok(predicates) => predicates, Err(fixup_err) => { @@ -743,6 +757,7 @@ impl<'tcx, N> Vtable<'tcx, N> { VtableBuiltin(i) => i.nested, VtableDefaultImpl(d) => d.nested, VtableClosure(c) => c.nested, + VtableGenerator(c) => c.nested, VtableObject(d) => d.nested, VtableFnPointer(d) => d.nested, } @@ -754,6 +769,7 @@ impl<'tcx, N> Vtable<'tcx, N> { &mut VtableParam(ref mut n) => n, &mut VtableBuiltin(ref mut i) => &mut i.nested, &mut VtableDefaultImpl(ref mut d) => &mut d.nested, + &mut VtableGenerator(ref mut c) => &mut c.nested, &mut VtableClosure(ref mut c) => &mut c.nested, &mut VtableObject(ref mut d) => &mut d.nested, &mut VtableFnPointer(ref mut d) => &mut d.nested, @@ -784,6 +800,11 @@ impl<'tcx, N> Vtable<'tcx, N> { fn_ty: p.fn_ty, nested: p.nested.into_iter().map(f).collect(), }), + VtableGenerator(c) => VtableGenerator(VtableGeneratorData { + closure_def_id: c.closure_def_id, + substs: c.substs, + nested: c.nested.into_iter().map(f).collect(), + }), VtableClosure(c) => VtableClosure(VtableClosureData { closure_def_id: c.closure_def_id, substs: c.substs, @@ -812,6 +833,7 @@ pub fn provide(providers: &mut ty::maps::Providers) { *providers = ty::maps::Providers { is_object_safe: object_safety::is_object_safe_provider, specialization_graph_of: specialize::specialization_graph_provider, + specializes: specialize::specializes, ..*providers }; } @@ -820,6 +842,7 @@ pub fn provide_extern(providers: &mut ty::maps::Providers) { *providers = ty::maps::Providers { is_object_safe: object_safety::is_object_safe_provider, specialization_graph_of: specialize::specialization_graph_provider, + specializes: specialize::specializes, ..*providers }; } diff --git a/src/librustc/traits/on_unimplemented.rs b/src/librustc/traits/on_unimplemented.rs new file mode 100644 index 0000000000000..7dd3fc70b1e3f --- /dev/null +++ b/src/librustc/traits/on_unimplemented.rs @@ -0,0 +1,307 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use fmt_macros::{Parser, Piece, Position}; + +use hir::def_id::DefId; +use ty::{self, TyCtxt}; +use util::common::ErrorReported; +use util::nodemap::FxHashMap; + +use syntax::ast::{MetaItem, NestedMetaItem}; +use syntax::attr; +use syntax_pos::Span; +use syntax_pos::symbol::InternedString; + +#[derive(Clone, Debug)] +pub struct OnUnimplementedFormatString(InternedString); + +#[derive(Debug)] +pub struct OnUnimplementedDirective { + pub condition: Option, + pub subcommands: Vec, + pub message: Option, + pub label: Option, +} + +pub struct OnUnimplementedNote { + pub message: Option, + pub label: Option, +} + +impl OnUnimplementedNote { + pub fn empty() -> Self { + OnUnimplementedNote { message: None, label: None } + } +} + +fn parse_error(tcx: TyCtxt, span: Span, + message: &str, + label: &str, + note: Option<&str>) + -> ErrorReported +{ + let mut diag = struct_span_err!( + tcx.sess, span, E0232, "{}", message); + diag.span_label(span, label); + if let Some(note) = note { + diag.note(note); + } + diag.emit(); + ErrorReported +} + +impl<'a, 'gcx, 'tcx> OnUnimplementedDirective { + pub fn parse(tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_def_id: DefId, + items: &[NestedMetaItem], + span: Span, + is_root: bool) + -> Result + { + let mut errored = false; + let mut item_iter = items.iter(); + + let condition = if is_root { + None + } else { + let cond = item_iter.next().ok_or_else(|| { + parse_error(tcx, span, + "empty `on`-clause in `#[rustc_on_unimplemented]`", + "empty on-clause here", + None) + })?.meta_item().ok_or_else(|| { + parse_error(tcx, span, + "invalid `on`-clause in `#[rustc_on_unimplemented]`", + "invalid on-clause here", + None) + })?; + attr::eval_condition(cond, &tcx.sess.parse_sess, &mut |_| true); + Some(cond.clone()) + }; + + let mut message = None; + let mut label = None; + let mut subcommands = vec![]; + for item in item_iter { + if item.check_name("message") && message.is_none() { + if let Some(message_) = item.value_str() { + message = Some(OnUnimplementedFormatString::try_parse( + tcx, trait_def_id, message_.as_str(), span)?); + continue; + } + } else if item.check_name("label") && label.is_none() { + if let Some(label_) = item.value_str() { + label = Some(OnUnimplementedFormatString::try_parse( + tcx, trait_def_id, label_.as_str(), span)?); + continue; + } + } else if item.check_name("on") && is_root && + message.is_none() && label.is_none() + { + if let Some(items) = item.meta_item_list() { + if let Ok(subcommand) = + Self::parse(tcx, trait_def_id, &items, item.span, false) + { + subcommands.push(subcommand); + } else { + errored = true; + } + continue + } + } + + // nothing found + parse_error(tcx, item.span, + "this attribute must have a valid value", + "expected value here", + Some(r#"eg `#[rustc_on_unimplemented = "foo"]`"#)); + } + + if errored { + Err(ErrorReported) + } else { + Ok(OnUnimplementedDirective { condition, message, label, subcommands }) + } + } + + + pub fn of_item(tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_def_id: DefId, + impl_def_id: DefId) + -> Result, ErrorReported> + { + let attrs = tcx.get_attrs(impl_def_id); + + let attr = if let Some(item) = + attrs.into_iter().find(|a| a.check_name("rustc_on_unimplemented")) + { + item + } else { + return Ok(None); + }; + + let result = if let Some(items) = attr.meta_item_list() { + Self::parse(tcx, trait_def_id, &items, attr.span, true).map(Some) + } else if let Some(value) = attr.value_str() { + Ok(Some(OnUnimplementedDirective { + condition: None, + message: None, + subcommands: vec![], + label: Some(OnUnimplementedFormatString::try_parse( + tcx, trait_def_id, value.as_str(), attr.span)?) + })) + } else { + return Err(parse_error(tcx, attr.span, + "`#[rustc_on_unimplemented]` requires a value", + "value required here", + Some(r#"eg `#[rustc_on_unimplemented = "foo"]`"#))); + }; + debug!("of_item({:?}/{:?}) = {:?}", trait_def_id, impl_def_id, result); + result + } + + pub fn evaluate(&self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_ref: ty::TraitRef<'tcx>, + options: &[(&str, Option<&str>)]) + -> OnUnimplementedNote + { + let mut message = None; + let mut label = None; + info!("evaluate({:?}, trait_ref={:?}, options={:?})", + self, trait_ref, options); + + for command in self.subcommands.iter().chain(Some(self)).rev() { + if let Some(ref condition) = command.condition { + if !attr::eval_condition(condition, &tcx.sess.parse_sess, &mut |c| { + options.contains(&(&c.name().as_str(), + match c.value_str().map(|s| s.as_str()) { + Some(ref s) => Some(s), + None => None + })) + }) { + debug!("evaluate: skipping {:?} due to condition", command); + continue + } + } + debug!("evaluate: {:?} succeeded", command); + if let Some(ref message_) = command.message { + message = Some(message_.clone()); + } + + if let Some(ref label_) = command.label { + label = Some(label_.clone()); + } + } + + OnUnimplementedNote { + label: label.map(|l| l.format(tcx, trait_ref)), + message: message.map(|m| m.format(tcx, trait_ref)) + } + } +} + +impl<'a, 'gcx, 'tcx> OnUnimplementedFormatString { + pub fn try_parse(tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_def_id: DefId, + from: InternedString, + err_sp: Span) + -> Result + { + let result = OnUnimplementedFormatString(from); + result.verify(tcx, trait_def_id, err_sp)?; + Ok(result) + } + + fn verify(&self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_def_id: DefId, + span: Span) + -> Result<(), ErrorReported> + { + let name = tcx.item_name(trait_def_id).as_str(); + let generics = tcx.generics_of(trait_def_id); + let parser = Parser::new(&self.0); + let types = &generics.types; + let mut result = Ok(()); + for token in parser { + match token { + Piece::String(_) => (), // Normal string, no need to check it + Piece::NextArgument(a) => match a.position { + // `{Self}` is allowed + Position::ArgumentNamed(s) if s == "Self" => (), + // `{ThisTraitsName}` is allowed + Position::ArgumentNamed(s) if s == name => (), + // So is `{A}` if A is a type parameter + Position::ArgumentNamed(s) => match types.iter().find(|t| { + t.name == s + }) { + Some(_) => (), + None => { + span_err!(tcx.sess, span, E0230, + "there is no type parameter \ + {} on trait {}", + s, name); + result = Err(ErrorReported); + } + }, + // `{:1}` and `{}` are not to be used + Position::ArgumentIs(_) => { + span_err!(tcx.sess, span, E0231, + "only named substitution \ + parameters are allowed"); + result = Err(ErrorReported); + } + } + } + } + + result + } + + pub fn format(&self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_ref: ty::TraitRef<'tcx>) + -> String + { + let name = tcx.item_name(trait_ref.def_id).as_str(); + let trait_str = tcx.item_path_str(trait_ref.def_id); + let generics = tcx.generics_of(trait_ref.def_id); + let generic_map = generics.types.iter().map(|param| { + (param.name.as_str().to_string(), + trait_ref.substs.type_for_def(param).to_string()) + }).collect::>(); + + let parser = Parser::new(&self.0); + parser.map(|p| { + match p { + Piece::String(s) => s, + Piece::NextArgument(a) => match a.position { + Position::ArgumentNamed(s) => match generic_map.get(s) { + Some(val) => val, + None if s == name => { + &trait_str + } + None => { + bug!("broken on_unimplemented {:?} for {:?}: \ + no argument matching {:?}", + self.0, trait_ref, s) + } + }, + _ => { + bug!("broken on_unimplemented {:?} - bad \ + format arg", self.0) + } + } + } + }).collect() + } +} diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs index e70258007e463..512cfee12b05f 100644 --- a/src/librustc/traits/project.rs +++ b/src/librustc/traits/project.rs @@ -19,6 +19,7 @@ use super::PredicateObligation; use super::SelectionContext; use super::SelectionError; use super::VtableClosureData; +use super::VtableGeneratorData; use super::VtableFnPointerData; use super::VtableImplData; use super::util; @@ -121,11 +122,13 @@ struct ProjectionTyCandidateSet<'tcx> { /// /// for<...> ::U == V /// -/// If successful, this may result in additional obligations. +/// If successful, this may result in additional obligations. Also returns +/// the projection cache key used to track these additional obligations. pub fn poly_project_and_unify_type<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &PolyProjectionObligation<'tcx>) - -> Result>>, MismatchedProjectionTypes<'tcx>> + -> Result>>, + MismatchedProjectionTypes<'tcx>> { debug!("poly_project_and_unify_type(obligation={:?})", obligation); @@ -161,7 +164,8 @@ pub fn poly_project_and_unify_type<'cx, 'gcx, 'tcx>( fn project_and_unify_type<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionObligation<'tcx>) - -> Result>>, MismatchedProjectionTypes<'tcx>> + -> Result>>, + MismatchedProjectionTypes<'tcx>> { debug!("project_and_unify_type(obligation={:?})", obligation); @@ -396,6 +400,7 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>( let infcx = selcx.infcx(); let projection_ty = infcx.resolve_type_vars_if_possible(&projection_ty); + let cache_key = ProjectionCacheKey { ty: projection_ty }; debug!("opt_normalize_projection_type(\ projection_ty={:?}, \ @@ -411,7 +416,7 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>( // bounds. It might be the case that we want two distinct caches, // or else another kind of cache entry. - match infcx.projection_cache.borrow_mut().try_start(projection_ty) { + match infcx.projection_cache.borrow_mut().try_start(cache_key) { Ok(()) => { } Err(ProjectionCacheEntry::Ambiguous) => { // If we found ambiguity the last time, that generally @@ -522,7 +527,7 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>( obligations, } }; - infcx.projection_cache.borrow_mut().complete(projection_ty, &result); + infcx.projection_cache.borrow_mut().insert_ty(cache_key, &result); Some(result) } Ok(ProjectedTy::NoProgress(projected_ty)) => { @@ -533,14 +538,14 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>( value: projected_ty, obligations: vec![] }; - infcx.projection_cache.borrow_mut().complete(projection_ty, &result); + infcx.projection_cache.borrow_mut().insert_ty(cache_key, &result); Some(result) } Err(ProjectionTyError::TooManyCandidates) => { debug!("opt_normalize_projection_type: \ too many candidates"); infcx.projection_cache.borrow_mut() - .ambiguous(projection_ty); + .ambiguous(cache_key); None } Err(ProjectionTyError::TraitSelectionError(_)) => { @@ -551,7 +556,7 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>( // reported later infcx.projection_cache.borrow_mut() - .error(projection_ty); + .error(cache_key); Some(normalize_to_error(selcx, param_env, projection_ty, cause, depth)) } } @@ -882,6 +887,7 @@ fn assemble_candidates_from_impls<'cx, 'gcx, 'tcx>( match vtable { super::VtableClosure(_) | + super::VtableGenerator(_) | super::VtableFnPointer(_) | super::VtableObject(_) => { debug!("assemble_candidates_from_impls: vtable={:?}", @@ -1041,6 +1047,8 @@ fn confirm_select_candidate<'cx, 'gcx, 'tcx>( match vtable { super::VtableImpl(data) => confirm_impl_candidate(selcx, obligation, data), + super::VtableGenerator(data) => + confirm_generator_candidate(selcx, obligation, data), super::VtableClosure(data) => confirm_closure_candidate(selcx, obligation, data), super::VtableFnPointer(data) => @@ -1123,6 +1131,60 @@ fn confirm_object_candidate<'cx, 'gcx, 'tcx>( confirm_param_env_candidate(selcx, obligation, env_predicate) } +fn confirm_generator_candidate<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, + obligation: &ProjectionTyObligation<'tcx>, + vtable: VtableGeneratorData<'tcx, PredicateObligation<'tcx>>) + -> Progress<'tcx> +{ + let gen_sig = selcx.infcx().generator_sig(vtable.closure_def_id).unwrap() + .subst(selcx.tcx(), vtable.substs.substs); + let Normalized { + value: gen_sig, + obligations + } = normalize_with_depth(selcx, + obligation.param_env, + obligation.cause.clone(), + obligation.recursion_depth+1, + &gen_sig); + + debug!("confirm_generator_candidate: obligation={:?},gen_sig={:?},obligations={:?}", + obligation, + gen_sig, + obligations); + + let tcx = selcx.tcx(); + + let gen_def_id = tcx.lang_items.gen_trait().unwrap(); + + // Note: we unwrap the binder here but re-create it below (1) + let ty::Binder((trait_ref, yield_ty, return_ty)) = + tcx.generator_trait_ref_and_outputs(gen_def_id, + obligation.predicate.self_ty(), + gen_sig); + + let name = tcx.associated_item(obligation.predicate.item_def_id).name; + let ty = if name == Symbol::intern("Return") { + return_ty + } else if name == Symbol::intern("Yield") { + yield_ty + } else { + bug!() + }; + + let predicate = ty::Binder(ty::ProjectionPredicate { // (1) recreate binder here + projection_ty: ty::ProjectionTy { + substs: trait_ref.substs, + item_def_id: obligation.predicate.item_def_id, + }, + ty: ty + }); + + confirm_param_env_candidate(selcx, obligation, predicate) + .with_addl_obligations(vtable.nested) + .with_addl_obligations(obligations) +} + fn confirm_fn_pointer_candidate<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, @@ -1323,8 +1385,62 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>( // # Cache +/// The projection cache. Unlike the standard caches, this can +/// include infcx-dependent type variables - therefore, we have to roll +/// the cache back each time we roll a snapshot back, to avoid assumptions +/// on yet-unresolved inference variables. Types with skolemized regions +/// also have to be removed when the respective snapshot ends. +/// +/// Because of that, projection cache entries can be "stranded" and left +/// inaccessible when type variables inside the key are resolved. We make no +/// attempt to recover or remove "stranded" entries, but rather let them be +/// (for the lifetime of the infcx). +/// +/// Entries in the projection cache might contain inference variables +/// that will be resolved by obligations on the projection cache entry - e.g. +/// when a type parameter in the associated type is constrained through +/// an "RFC 447" projection on the impl. +/// +/// When working with a fulfillment context, the derived obligations of each +/// projection cache entry will be registered on the fulfillcx, so any users +/// that can wait for a fulfillcx fixed point need not care about this. However, +/// users that don't wait for a fixed point (e.g. trait evaluation) have to +/// resolve the obligations themselves to make sure the projected result is +/// ok and avoid issues like #43132. +/// +/// If that is done, after evaluation the obligations, it is a good idea to +/// call `ProjectionCache::complete` to make sure the obligations won't be +/// re-evaluated and avoid an exponential worst-case. +/// +/// FIXME: we probably also want some sort of cross-infcx cache here to +/// reduce the amount of duplication. Let's see what we get with the Chalk +/// reforms. pub struct ProjectionCache<'tcx> { - map: SnapshotMap, ProjectionCacheEntry<'tcx>>, + map: SnapshotMap, ProjectionCacheEntry<'tcx>>, +} + +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub struct ProjectionCacheKey<'tcx> { + ty: ty::ProjectionTy<'tcx> +} + +impl<'cx, 'gcx, 'tcx> ProjectionCacheKey<'tcx> { + pub fn from_poly_projection_predicate(selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, + predicate: &ty::PolyProjectionPredicate<'tcx>) + -> Option + { + let infcx = selcx.infcx(); + // We don't do cross-snapshot caching of obligations with escaping regions, + // so there's no cache key to use + infcx.tcx.no_late_bound_regions(&predicate) + .map(|predicate| ProjectionCacheKey { + // We don't attempt to match up with a specific type-variable state + // from a specific call to `opt_normalize_projection_type` - if + // there's no precise match, the original cache entry is "stranded" + // anyway. + ty: infcx.resolve_type_vars_if_possible(&predicate.projection_ty) + }) + } } #[derive(Clone, Debug)] @@ -1337,7 +1453,7 @@ enum ProjectionCacheEntry<'tcx> { // NB: intentionally not Clone pub struct ProjectionCacheSnapshot { - snapshot: Snapshot + snapshot: Snapshot, } impl<'tcx> ProjectionCache<'tcx> { @@ -1356,7 +1472,7 @@ impl<'tcx> ProjectionCache<'tcx> { } pub fn rollback_skolemized(&mut self, snapshot: &ProjectionCacheSnapshot) { - self.map.partial_rollback(&snapshot.snapshot, &|k| k.has_re_skol()); + self.map.partial_rollback(&snapshot.snapshot, &|k| k.ty.has_re_skol()); } pub fn commit(&mut self, snapshot: ProjectionCacheSnapshot) { @@ -1366,7 +1482,7 @@ impl<'tcx> ProjectionCache<'tcx> { /// Try to start normalize `key`; returns an error if /// normalization already occurred (this error corresponds to a /// cache hit, so it's actually a good thing). - fn try_start(&mut self, key: ty::ProjectionTy<'tcx>) + fn try_start(&mut self, key: ProjectionCacheKey<'tcx>) -> Result<(), ProjectionCacheEntry<'tcx>> { if let Some(entry) = self.map.get(&key) { return Err(entry.clone()); @@ -1377,25 +1493,51 @@ impl<'tcx> ProjectionCache<'tcx> { } /// Indicates that `key` was normalized to `value`. - fn complete(&mut self, key: ty::ProjectionTy<'tcx>, value: &NormalizedTy<'tcx>) { - debug!("ProjectionCacheEntry::complete: adding cache entry: key={:?}, value={:?}", + fn insert_ty(&mut self, key: ProjectionCacheKey<'tcx>, value: &NormalizedTy<'tcx>) { + debug!("ProjectionCacheEntry::insert_ty: adding cache entry: key={:?}, value={:?}", key, value); let fresh_key = self.map.insert(key, ProjectionCacheEntry::NormalizedTy(value.clone())); assert!(!fresh_key, "never started projecting `{:?}`", key); } + /// Mark the relevant projection cache key as having its derived obligations + /// complete, so they won't have to be re-computed (this is OK to do in a + /// snapshot - if the snapshot is rolled back, the obligations will be + /// marked as incomplete again). + pub fn complete(&mut self, key: ProjectionCacheKey<'tcx>) { + let ty = match self.map.get(&key) { + Some(&ProjectionCacheEntry::NormalizedTy(ref ty)) => { + debug!("ProjectionCacheEntry::complete({:?}) - completing {:?}", + key, ty); + ty.value + } + ref value => { + // Type inference could "strand behind" old cache entries. Leave + // them alone for now. + debug!("ProjectionCacheEntry::complete({:?}) - ignoring {:?}", + key, value); + return + } + }; + + self.map.insert(key, ProjectionCacheEntry::NormalizedTy(Normalized { + value: ty, + obligations: vec![] + })); + } + /// Indicates that trying to normalize `key` resulted in /// ambiguity. No point in trying it again then until we gain more /// type information (in which case, the "fully resolved" key will /// be different). - fn ambiguous(&mut self, key: ty::ProjectionTy<'tcx>) { + fn ambiguous(&mut self, key: ProjectionCacheKey<'tcx>) { let fresh = self.map.insert(key, ProjectionCacheEntry::Ambiguous); assert!(!fresh, "never started projecting `{:?}`", key); } /// Indicates that trying to normalize `key` resulted in /// error. - fn error(&mut self, key: ty::ProjectionTy<'tcx>) { + fn error(&mut self, key: ProjectionCacheKey<'tcx>) { let fresh = self.map.insert(key, ProjectionCacheEntry::Error); assert!(!fresh, "never started projecting `{:?}`", key); } diff --git a/src/librustc/traits/select.rs b/src/librustc/traits/select.rs index 46bdb1344b2fe..726e5d83428ca 100644 --- a/src/librustc/traits/select.rs +++ b/src/librustc/traits/select.rs @@ -16,7 +16,7 @@ use self::EvaluationResult::*; use super::coherence; use super::DerivedObligationCause; use super::project; -use super::project::{normalize_with_depth, Normalized}; +use super::project::{normalize_with_depth, Normalized, ProjectionCacheKey}; use super::{PredicateObligation, TraitObligation, ObligationCause}; use super::{ObligationCauseCode, BuiltinDerivedObligation, ImplDerivedObligation}; use super::{SelectionError, Unimplemented, OutputTypeParameterMismatch}; @@ -24,9 +24,9 @@ use super::{ObjectCastObligation, Obligation}; use super::TraitNotObjectSafe; use super::Selection; use super::SelectionResult; -use super::{VtableBuiltin, VtableImpl, VtableParam, VtableClosure, +use super::{VtableBuiltin, VtableImpl, VtableParam, VtableClosure, VtableGenerator, VtableFnPointer, VtableObject, VtableDefaultImpl}; -use super::{VtableImplData, VtableObjectData, VtableBuiltinData, +use super::{VtableImplData, VtableObjectData, VtableBuiltinData, VtableGeneratorData, VtableClosureData, VtableDefaultImplData, VtableFnPointerData}; use super::util; @@ -36,13 +36,13 @@ use infer; use infer::{InferCtxt, InferOk, TypeFreshener}; use ty::subst::{Kind, Subst, Substs}; use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt, TypeFoldable}; -use traits; use ty::fast_reject; use ty::relate::TypeRelation; use middle::lang_items; use rustc_data_structures::bitvec::BitVector; use rustc_data_structures::snapshot_vec::{SnapshotVecDelegate, SnapshotVec}; +use std::iter; use std::cell::RefCell; use std::cmp; use std::fmt; @@ -193,9 +193,12 @@ enum SelectionCandidate<'tcx> { ProjectionCandidate, /// Implementation of a `Fn`-family trait by one of the anonymous types - /// generated for a `||` expression. The ty::ClosureKind informs the - /// confirmation step what ClosureKind obligation to emit. - ClosureCandidate(/* closure */ DefId, ty::ClosureSubsts<'tcx>, ty::ClosureKind), + /// generated for a `||` expression. + ClosureCandidate, + + /// Implementation of a `Generator` trait by one of the anonymous types + /// generated for a generator. + GeneratorCandidate, /// Implementation of a `Fn`-family trait by one of the anonymous /// types generated for a fn pointer type (e.g., `fn(int)->int`) @@ -224,15 +227,12 @@ impl<'a, 'tcx> ty::Lift<'tcx> for SelectionCandidate<'a> { ObjectCandidate => ObjectCandidate, BuiltinObjectCandidate => BuiltinObjectCandidate, BuiltinUnsizeCandidate => BuiltinUnsizeCandidate, + ClosureCandidate => ClosureCandidate, + GeneratorCandidate => GeneratorCandidate, ParamCandidate(ref trait_ref) => { return tcx.lift(trait_ref).map(ParamCandidate); } - ClosureCandidate(def_id, ref substs, kind) => { - return tcx.lift(substs).map(|substs| { - ClosureCandidate(def_id, substs, kind) - }); - } }) } } @@ -655,8 +655,14 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { let project_obligation = obligation.with(data.clone()); match project::poly_project_and_unify_type(self, &project_obligation) { Ok(Some(subobligations)) => { - self.evaluate_predicates_recursively(previous_stack, - subobligations.iter()) + let result = self.evaluate_predicates_recursively(previous_stack, + subobligations.iter()); + if let Some(key) = + ProjectionCacheKey::from_poly_projection_predicate(self, data) + { + self.infcx.projection_cache.borrow_mut().complete(key); + } + result } Ok(None) => { EvaluatedToAmbig @@ -888,14 +894,9 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { dep_node: DepNodeIndex, result: EvaluationResult) { - // Avoid caching results that depend on more than just the trait-ref: - // The stack can create recursion, and closure signatures - // being yet uninferred can create "spurious" EvaluatedToAmbig - // and EvaluatedToOk. - if result.is_stack_dependent() || - ((result == EvaluatedToAmbig || result == EvaluatedToOk) - && trait_ref.has_closure_types()) - { + // Avoid caching results that depend on more than just the trait-ref + // - the stack can create recursion. + if result.is_stack_dependent() { return; } @@ -955,15 +956,12 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { this.candidate_from_obligation_no_cache(stack) }); - if self.should_update_candidate_cache(&cache_fresh_trait_pred, &candidate) { - debug!("CACHE MISS: SELECT({:?})={:?}", - cache_fresh_trait_pred, candidate); - self.insert_candidate_cache(stack.obligation.param_env, - cache_fresh_trait_pred, - dep_node, - candidate.clone()); - } - + debug!("CACHE MISS: SELECT({:?})={:?}", + cache_fresh_trait_pred, candidate); + self.insert_candidate_cache(stack.obligation.param_env, + cache_fresh_trait_pred, + dep_node, + candidate.clone()); candidate } @@ -1203,45 +1201,6 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { .insert(trait_ref, WithDepNode::new(dep_node, candidate)); } - fn should_update_candidate_cache(&mut self, - cache_fresh_trait_pred: &ty::PolyTraitPredicate<'tcx>, - candidate: &SelectionResult<'tcx, SelectionCandidate<'tcx>>) - -> bool - { - // In general, it's a good idea to cache results, even - // ambiguous ones, to save us some trouble later. But we have - // to be careful not to cache results that could be - // invalidated later by advances in inference. Normally, this - // is not an issue, because any inference variables whose - // types are not yet bound are "freshened" in the cache key, - // which means that if we later get the same request once that - // type variable IS bound, we'll have a different cache key. - // For example, if we have `Vec<_#0t> : Foo`, and `_#0t` is - // not yet known, we may cache the result as `None`. But if - // later `_#0t` is bound to `Bar`, then when we freshen we'll - // have `Vec : Foo` as the cache key. - // - // HOWEVER, it CAN happen that we get an ambiguity result in - // one particular case around closures where the cache key - // would not change. That is when the precise types of the - // upvars that a closure references have not yet been figured - // out (i.e., because it is not yet known if they are captured - // by ref, and if by ref, what kind of ref). In these cases, - // when matching a builtin bound, we will yield back an - // ambiguous result. But the *cache key* is just the closure type, - // it doesn't capture the state of the upvar computation. - // - // To avoid this trap, just don't cache ambiguous results if - // the self-type contains no inference byproducts (that really - // shouldn't happen in other circumstances anyway, given - // coherence). - - match *candidate { - Ok(Some(_)) | Err(_) => true, - Ok(None) => cache_fresh_trait_pred.has_infer_types() - } - } - fn assemble_candidates<'o>(&mut self, stack: &TraitObligationStack<'o, 'tcx>) -> Result, SelectionError<'tcx>> @@ -1296,14 +1255,15 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { } else if self.tcx().lang_items.unsize_trait() == Some(def_id) { self.assemble_candidates_for_unsizing(obligation, &mut candidates); } else { - if self.tcx().lang_items.clone_trait() == Some(def_id) { - // Same builtin conditions as `Copy`, i.e. every type which has builtin support - // for `Copy` also has builtin support for `Clone`, + tuples and arrays of `Clone` - // types have builtin support for `Clone`. - let clone_conditions = self.copy_conditions(obligation); - self.assemble_builtin_bound_candidates(clone_conditions, &mut candidates)?; - } - + if self.tcx().lang_items.clone_trait() == Some(def_id) { + // Same builtin conditions as `Copy`, i.e. every type which has builtin support + // for `Copy` also has builtin support for `Clone`, + tuples and arrays of `Clone` + // types have builtin support for `Clone`. + let clone_conditions = self.copy_conditions(obligation); + self.assemble_builtin_bound_candidates(clone_conditions, &mut candidates)?; + } + + self.assemble_generator_candidates(obligation, &mut candidates)?; self.assemble_closure_candidates(obligation, &mut candidates)?; self.assemble_fn_pointer_candidates(obligation, &mut candidates)?; self.assemble_candidates_from_impls(obligation, &mut candidates)?; @@ -1488,6 +1448,37 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { }) } + fn assemble_generator_candidates(&mut self, + obligation: &TraitObligation<'tcx>, + candidates: &mut SelectionCandidateSet<'tcx>) + -> Result<(),SelectionError<'tcx>> + { + if self.tcx().lang_items.gen_trait() != Some(obligation.predicate.def_id()) { + return Ok(()); + } + + // ok to skip binder because the substs on generator types never + // touch bound regions, they just capture the in-scope + // type/region parameters + let self_ty = *obligation.self_ty().skip_binder(); + match self_ty.sty { + ty::TyGenerator(..) => { + debug!("assemble_generator_candidates: self_ty={:?} obligation={:?}", + self_ty, + obligation); + + candidates.vec.push(GeneratorCandidate); + Ok(()) + } + ty::TyInfer(ty::TyVar(_)) => { + debug!("assemble_generator_candidates: ambiguous self-type"); + candidates.ambiguous = true; + return Ok(()); + } + _ => { return Ok(()); } + } + } + /// Check for the artificial impl that the compiler will create for an obligation like `X : /// FnMut<..>` where `X` is a closure type. /// @@ -1507,36 +1498,31 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { // ok to skip binder because the substs on closure types never // touch bound regions, they just capture the in-scope // type/region parameters - let self_ty = *obligation.self_ty().skip_binder(); - let (closure_def_id, substs) = match self_ty.sty { - ty::TyClosure(id, substs) => (id, substs), + match obligation.self_ty().skip_binder().sty { + ty::TyClosure(closure_def_id, _) => { + debug!("assemble_unboxed_candidates: kind={:?} obligation={:?}", + kind, obligation); + match self.infcx.closure_kind(closure_def_id) { + Some(closure_kind) => { + debug!("assemble_unboxed_candidates: closure_kind = {:?}", closure_kind); + if closure_kind.extends(kind) { + candidates.vec.push(ClosureCandidate); + } + } + None => { + debug!("assemble_unboxed_candidates: closure_kind not yet known"); + candidates.vec.push(ClosureCandidate); + } + }; + Ok(()) + } ty::TyInfer(ty::TyVar(_)) => { debug!("assemble_unboxed_closure_candidates: ambiguous self-type"); candidates.ambiguous = true; return Ok(()); } _ => { return Ok(()); } - }; - - debug!("assemble_unboxed_candidates: self_ty={:?} kind={:?} obligation={:?}", - self_ty, - kind, - obligation); - - match self.infcx.closure_kind(closure_def_id) { - Some(closure_kind) => { - debug!("assemble_unboxed_candidates: closure_kind = {:?}", closure_kind); - if closure_kind.extends(kind) { - candidates.vec.push(ClosureCandidate(closure_def_id, substs, kind)); - } - } - None => { - debug!("assemble_unboxed_candidates: closure_kind not yet known"); - candidates.vec.push(ClosureCandidate(closure_def_id, substs, kind)); - } } - - Ok(()) } /// Implement one of the `Fn()` family for a fn pointer. @@ -1853,7 +1839,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { when there are other valid candidates"); } ImplCandidate(..) | - ClosureCandidate(..) | + ClosureCandidate | + GeneratorCandidate | FnPointerCandidate | BuiltinObjectCandidate | BuiltinUnsizeCandidate | @@ -1877,7 +1864,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { if other.evaluation == EvaluatedToOk { if let ImplCandidate(victim_def) = victim.candidate { let tcx = self.tcx().global_tcx(); - return traits::specializes(tcx, other_def, victim_def) || + return tcx.specializes((other_def, victim_def)) || tcx.impls_are_allowed_to_overlap(other_def, victim_def); } } @@ -1934,7 +1921,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { ty::TyInfer(ty::IntVar(_)) | ty::TyInfer(ty::FloatVar(_)) | ty::TyUint(_) | ty::TyInt(_) | ty::TyBool | ty::TyFloat(_) | ty::TyFnDef(..) | ty::TyFnPtr(_) | ty::TyRawPtr(..) | - ty::TyChar | ty::TyRef(..) | + ty::TyChar | ty::TyRef(..) | ty::TyGenerator(..) | ty::TyArray(..) | ty::TyClosure(..) | ty::TyNever | ty::TyError => { // safe for everything @@ -1986,7 +1973,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { } ty::TyDynamic(..) | ty::TyStr | ty::TySlice(..) | - ty::TyClosure(..) | + ty::TyClosure(..) | ty::TyGenerator(..) | ty::TyRef(_, ty::TypeAndMut { ty: _, mutbl: hir::MutMutable }) => { Never } @@ -2087,6 +2074,11 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { substs.upvar_tys(def_id, self.tcx()).collect() } + ty::TyGenerator(def_id, ref substs, interior) => { + let witness = iter::once(interior.witness); + substs.upvar_tys(def_id, self.tcx()).chain(witness).collect() + } + // for `PhantomData`, we pass `T` ty::TyAdt(def, substs) if def.is_phantom_data() => { substs.types().collect() @@ -2190,12 +2182,16 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { Ok(VtableImpl(self.confirm_impl_candidate(obligation, impl_def_id))) } - ClosureCandidate(closure_def_id, substs, kind) => { - let vtable_closure = - self.confirm_closure_candidate(obligation, closure_def_id, substs, kind)?; + ClosureCandidate => { + let vtable_closure = self.confirm_closure_candidate(obligation)?; Ok(VtableClosure(vtable_closure)) } + GeneratorCandidate => { + let vtable_generator = self.confirm_generator_candidate(obligation)?; + Ok(VtableGenerator(vtable_generator)) + } + BuiltinObjectCandidate => { // This indicates something like `(Trait+Send) : // Send`. In this case, we know that this holds @@ -2528,23 +2524,84 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { Ok(VtableFnPointerData { fn_ty: self_ty, nested: obligations }) } - fn confirm_closure_candidate(&mut self, - obligation: &TraitObligation<'tcx>, - closure_def_id: DefId, - substs: ty::ClosureSubsts<'tcx>, - kind: ty::ClosureKind) - -> Result>, + fn confirm_generator_candidate(&mut self, + obligation: &TraitObligation<'tcx>) + -> Result>, SelectionError<'tcx>> { - debug!("confirm_closure_candidate({:?},{:?},{:?})", + // ok to skip binder because the substs on generator types never + // touch bound regions, they just capture the in-scope + // type/region parameters + let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder()); + let (closure_def_id, substs) = match self_ty.sty { + ty::TyGenerator(id, substs, _) => (id, substs), + _ => bug!("closure candidate for non-closure {:?}", obligation) + }; + + debug!("confirm_generator_candidate({:?},{:?},{:?})", obligation, closure_def_id, substs); + let trait_ref = + self.generator_trait_ref_unnormalized(obligation, closure_def_id, substs); + let Normalized { + value: trait_ref, + obligations + } = normalize_with_depth(self, + obligation.param_env, + obligation.cause.clone(), + obligation.recursion_depth+1, + &trait_ref); + + debug!("confirm_generator_candidate(closure_def_id={:?}, trait_ref={:?}, obligations={:?})", + closure_def_id, + trait_ref, + obligations); + + self.confirm_poly_trait_refs(obligation.cause.clone(), + obligation.param_env, + obligation.predicate.to_poly_trait_ref(), + trait_ref)?; + + Ok(VtableGeneratorData { + closure_def_id: closure_def_id, + substs: substs.clone(), + nested: obligations + }) + } + + fn confirm_closure_candidate(&mut self, + obligation: &TraitObligation<'tcx>) + -> Result>, + SelectionError<'tcx>> + { + debug!("confirm_closure_candidate({:?})", obligation); + + let kind = match self.tcx().lang_items.fn_trait_kind(obligation.predicate.0.def_id()) { + Some(k) => k, + None => bug!("closure candidate for non-fn trait {:?}", obligation) + }; + + // ok to skip binder because the substs on closure types never + // touch bound regions, they just capture the in-scope + // type/region parameters + let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder()); + let (closure_def_id, substs) = match self_ty.sty { + ty::TyClosure(id, substs) => (id, substs), + _ => bug!("closure candidate for non-closure {:?}", obligation) + }; + + let trait_ref = + self.closure_trait_ref_unnormalized(obligation, closure_def_id, substs); let Normalized { value: trait_ref, mut obligations - } = self.closure_trait_ref(obligation, closure_def_id, substs); + } = normalize_with_depth(self, + obligation.param_env, + obligation.cause.clone(), + obligation.recursion_depth+1, + &trait_ref); debug!("confirm_closure_candidate(closure_def_id={:?}, trait_ref={:?}, obligations={:?})", closure_def_id, @@ -3011,22 +3068,25 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { ty::Binder(trait_ref) } - fn closure_trait_ref(&mut self, - obligation: &TraitObligation<'tcx>, - closure_def_id: DefId, - substs: ty::ClosureSubsts<'tcx>) - -> Normalized<'tcx, ty::PolyTraitRef<'tcx>> + fn generator_trait_ref_unnormalized(&mut self, + obligation: &TraitObligation<'tcx>, + closure_def_id: DefId, + substs: ty::ClosureSubsts<'tcx>) + -> ty::PolyTraitRef<'tcx> { - let trait_ref = self.closure_trait_ref_unnormalized( - obligation, closure_def_id, substs); + let gen_sig = self.infcx.generator_sig(closure_def_id).unwrap() + .subst(self.tcx(), substs.substs); + let ty::Binder((trait_ref, ..)) = + self.tcx().generator_trait_ref_and_outputs(obligation.predicate.def_id(), + obligation.predicate.0.self_ty(), // (1) + gen_sig); + // (1) Feels icky to skip the binder here, but OTOH we know + // that the self-type is an generator type and hence is + // in fact unparameterized (or at least does not reference any + // regions bound in the obligation). Still probably some + // refactoring could make this nicer. - // A closure signature can contain associated types which - // must be normalized. - normalize_with_depth(self, - obligation.param_env, - obligation.cause.clone(), - obligation.recursion_depth+1, - &trait_ref) + ty::Binder(trait_ref) } /// Returns the obligations that are implied by instantiating an diff --git a/src/librustc/traits/specialize/mod.rs b/src/librustc/traits/specialize/mod.rs index 7c916e162a4ff..2dd6ca4b5a928 100644 --- a/src/librustc/traits/specialize/mod.rs +++ b/src/librustc/traits/specialize/mod.rs @@ -150,15 +150,12 @@ pub fn find_associated_item<'a, 'tcx>( /// Specialization is determined by the sets of types to which the impls apply; /// impl1 specializes impl2 if it applies to a subset of the types impl2 applies /// to. -pub fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - impl1_def_id: DefId, - impl2_def_id: DefId) -> bool { +pub(super) fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + (impl1_def_id, impl2_def_id): (DefId, DefId)) + -> bool +{ debug!("specializes({:?}, {:?})", impl1_def_id, impl2_def_id); - if let Some(r) = tcx.specializes_cache.borrow().check(impl1_def_id, impl2_def_id) { - return r; - } - // The feature gate should prevent introducing new specializations, but not // taking advantage of upstream ones. if !tcx.sess.features.borrow().specialization && @@ -188,7 +185,7 @@ pub fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let impl1_trait_ref = tcx.impl_trait_ref(impl1_def_id).unwrap(); // Create a infcx, taking the predicates of impl1 as assumptions: - let result = tcx.infer_ctxt().enter(|infcx| { + tcx.infer_ctxt().enter(|infcx| { // Normalize the trait reference. The WF rules ought to ensure // that this always succeeds. let impl1_trait_ref = @@ -204,10 +201,7 @@ pub fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // Attempt to prove that impl2 applies, given all of the above. fulfill_implication(&infcx, penv, impl1_trait_ref, impl2_def_id).is_ok() - }); - - tcx.specializes_cache.borrow_mut().insert(impl1_def_id, impl2_def_id, result); - result + }) } /// Attempt to fulfill all obligations of `target_impl` after unification with diff --git a/src/librustc/traits/specialize/specialization_graph.rs b/src/librustc/traits/specialize/specialization_graph.rs index 8b31cb599e45d..5242accceabb3 100644 --- a/src/librustc/traits/specialize/specialization_graph.rs +++ b/src/librustc/traits/specialize/specialization_graph.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use super::{OverlapError, specializes}; +use super::OverlapError; use hir::def_id::DefId; use traits; @@ -118,8 +118,8 @@ impl<'a, 'gcx, 'tcx> Children { return Ok((false, false)); } - let le = specializes(tcx, impl_def_id, possible_sibling); - let ge = specializes(tcx, possible_sibling, impl_def_id); + let le = tcx.specializes((impl_def_id, possible_sibling)); + let ge = tcx.specializes((possible_sibling, impl_def_id)); if le == ge { // overlap, but no specialization; error out diff --git a/src/librustc/traits/structural_impls.rs b/src/librustc/traits/structural_impls.rs index d913c76ec3c09..674da297cd959 100644 --- a/src/librustc/traits/structural_impls.rs +++ b/src/librustc/traits/structural_impls.rs @@ -53,6 +53,9 @@ impl<'tcx, N: fmt::Debug> fmt::Debug for traits::Vtable<'tcx, N> { super::VtableClosure(ref d) => write!(f, "{:?}", d), + super::VtableGenerator(ref d) => + write!(f, "{:?}", d), + super::VtableFnPointer(ref d) => write!(f, "VtableFnPointer({:?})", d), @@ -77,6 +80,15 @@ impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableImplData<'tcx, N> { } } +impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableGeneratorData<'tcx, N> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "VtableGenerator(closure_def_id={:?}, substs={:?}, nested={:?})", + self.closure_def_id, + self.substs, + self.nested) + } +} + impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableClosureData<'tcx, N> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "VtableClosure(closure_def_id={:?}, substs={:?}, nested={:?})", @@ -278,6 +290,19 @@ impl<'a, 'tcx> Lift<'tcx> for traits::Vtable<'a, ()> { }) } traits::VtableDefaultImpl(t) => Some(traits::VtableDefaultImpl(t)), + traits::VtableGenerator(traits::VtableGeneratorData { + closure_def_id, + substs, + nested + }) => { + tcx.lift(&substs).map(|substs| { + traits::VtableGenerator(traits::VtableGeneratorData { + closure_def_id: closure_def_id, + substs: substs, + nested: nested + }) + }) + } traits::VtableClosure(traits::VtableClosureData { closure_def_id, substs, @@ -351,6 +376,20 @@ impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableImplData< } } +impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableGeneratorData<'tcx, N> { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { + traits::VtableGeneratorData { + closure_def_id: self.closure_def_id, + substs: self.substs.fold_with(folder), + nested: self.nested.fold_with(folder), + } + } + + fn super_visit_with>(&self, visitor: &mut V) -> bool { + self.substs.visit_with(visitor) || self.nested.visit_with(visitor) + } +} + impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableClosureData<'tcx, N> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { traits::VtableClosureData { @@ -422,6 +461,9 @@ impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::Vtable<'tcx, N> match *self { traits::VtableImpl(ref v) => traits::VtableImpl(v.fold_with(folder)), traits::VtableDefaultImpl(ref t) => traits::VtableDefaultImpl(t.fold_with(folder)), + traits::VtableGenerator(ref d) => { + traits::VtableGenerator(d.fold_with(folder)) + } traits::VtableClosure(ref d) => { traits::VtableClosure(d.fold_with(folder)) } @@ -438,6 +480,7 @@ impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::Vtable<'tcx, N> match *self { traits::VtableImpl(ref v) => v.visit_with(visitor), traits::VtableDefaultImpl(ref t) => t.visit_with(visitor), + traits::VtableGenerator(ref d) => d.visit_with(visitor), traits::VtableClosure(ref d) => d.visit_with(visitor), traits::VtableFnPointer(ref d) => d.visit_with(visitor), traits::VtableParam(ref n) => n.visit_with(visitor), diff --git a/src/librustc/traits/util.rs b/src/librustc/traits/util.rs index dae0c896909c8..28abd1577dade 100644 --- a/src/librustc/traits/util.rs +++ b/src/librustc/traits/util.rs @@ -513,6 +513,19 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { ty::Binder((trait_ref, sig.skip_binder().output())) } + pub fn generator_trait_ref_and_outputs(self, + fn_trait_def_id: DefId, + self_ty: Ty<'tcx>, + sig: ty::PolyGenSig<'tcx>) + -> ty::Binder<(ty::TraitRef<'tcx>, Ty<'tcx>, Ty<'tcx>)> + { + let trait_ref = ty::TraitRef { + def_id: fn_trait_def_id, + substs: self.mk_substs_trait(self_ty, &[]), + }; + ty::Binder((trait_ref, sig.skip_binder().yield_ty, sig.skip_binder().return_ty)) + } + pub fn impl_is_default(self, node_item_def_id: DefId) -> bool { match self.hir.as_local_node_id(node_item_def_id) { Some(node_id) => { diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 6ce2232eb3e5d..f475baf19949f 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -14,8 +14,8 @@ use dep_graph::DepGraph; use errors::DiagnosticBuilder; use session::Session; use middle; -use hir::{TraitMap}; -use hir::def::{Def, ExportMap}; +use hir::{TraitCandidate, HirId}; +use hir::def::{Def, Export}; use hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; use hir::map as hir_map; use hir::map::DefPathHash; @@ -32,7 +32,7 @@ use ty::ReprOptions; use traits; use ty::{self, Ty, TypeAndMut}; use ty::{TyS, TypeVariants, Slice}; -use ty::{AdtKind, AdtDef, ClosureSubsts, Region}; +use ty::{AdtKind, AdtDef, ClosureSubsts, GeneratorInterior, Region}; use hir::FreevarMap; use ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate}; use ty::RegionKind; @@ -340,6 +340,10 @@ pub struct TypeckTables<'tcx> { /// that caused the closure to be this kind. closure_kinds: ItemLocalMap<(ty::ClosureKind, Option<(Span, ast::Name)>)>, + generator_sigs: ItemLocalMap>>, + + generator_interiors: ItemLocalMap>, + /// For each fn, records the "liberated" types of its arguments /// and return type. Liberated means that all bound regions /// (including late-bound regions) are replaced with free @@ -381,6 +385,8 @@ impl<'tcx> TypeckTables<'tcx> { adjustments: ItemLocalMap(), pat_binding_modes: ItemLocalMap(), upvar_capture_map: FxHashMap(), + generator_sigs: ItemLocalMap(), + generator_interiors: ItemLocalMap(), closure_tys: ItemLocalMap(), closure_kinds: ItemLocalMap(), liberated_fn_sigs: ItemLocalMap(), @@ -634,6 +640,42 @@ impl<'tcx> TypeckTables<'tcx> { data: &mut self.cast_kinds } } + + pub fn generator_sigs(&self) + -> LocalTableInContext>> + { + LocalTableInContext { + local_id_root: self.local_id_root, + data: &self.generator_sigs, + } + } + + pub fn generator_sigs_mut(&mut self) + -> LocalTableInContextMut>> + { + LocalTableInContextMut { + local_id_root: self.local_id_root, + data: &mut self.generator_sigs, + } + } + + pub fn generator_interiors(&self) + -> LocalTableInContext> + { + LocalTableInContext { + local_id_root: self.local_id_root, + data: &self.generator_interiors, + } + } + + pub fn generator_interiors_mut(&mut self) + -> LocalTableInContextMut> + { + LocalTableInContextMut { + local_id_root: self.local_id_root, + data: &mut self.generator_interiors, + } + } } impl<'a, 'gcx, 'tcx> HashStable> for TypeckTables<'gcx> { @@ -658,6 +700,8 @@ impl<'a, 'gcx, 'tcx> HashStable> for Typeck ref used_trait_imports, tainted_by_errors, ref free_region_map, + ref generator_sigs, + ref generator_interiors, } = *self; hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { @@ -691,6 +735,8 @@ impl<'a, 'gcx, 'tcx> HashStable> for Typeck ich::hash_stable_itemlocalmap(hcx, hasher, liberated_fn_sigs); ich::hash_stable_itemlocalmap(hcx, hasher, fru_field_types); ich::hash_stable_itemlocalmap(hcx, hasher, cast_kinds); + ich::hash_stable_itemlocalmap(hcx, hasher, generator_sigs); + ich::hash_stable_itemlocalmap(hcx, hasher, generator_interiors); ich::hash_stable_hashset(hcx, hasher, used_trait_imports, |hcx, def_id| { hcx.def_path_hash(*def_id) @@ -762,8 +808,6 @@ pub struct GlobalCtxt<'tcx> { pub sess: &'tcx Session, - pub specializes_cache: RefCell, - pub trans_trait_caches: traits::trans::TransTraitCaches<'tcx>, pub dep_graph: DepGraph, @@ -773,10 +817,10 @@ pub struct GlobalCtxt<'tcx> { /// Map indicating what traits are in scope for places where this /// is relevant; generated by resolve. - pub trait_map: TraitMap, + trait_map: FxHashMap>>, /// Export map produced by name resolution. - pub export_map: ExportMap, + export_map: FxHashMap>>, pub named_region_map: resolve_lifetime::NamedRegionMap, @@ -809,10 +853,6 @@ pub struct GlobalCtxt<'tcx> { pub lang_items: middle::lang_items::LanguageItems, - /// Set of used unsafe nodes (functions or blocks). Unsafe nodes not - /// present in this set can be warned about. - pub used_unsafe: RefCell, - /// Set of nodes which mark locals as mutable which end up getting used at /// some point. Local variable definitions not in this set can be warned /// about. @@ -1026,14 +1066,17 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { tls::enter_global(GlobalCtxt { sess: s, trans_trait_caches: traits::trans::TransTraitCaches::new(dep_graph.clone()), - specializes_cache: RefCell::new(traits::SpecializesCache::new()), global_arenas: arenas, global_interners: interners, dep_graph: dep_graph.clone(), types: common_types, named_region_map, - trait_map: resolutions.trait_map, - export_map: resolutions.export_map, + trait_map: resolutions.trait_map.into_iter().map(|(k, v)| { + (hir.node_to_hir_id(k), Rc::new(v)) + }).collect(), + export_map: resolutions.export_map.into_iter().map(|(k, v)| { + (hir.node_to_hir_id(k), Rc::new(v)) + }).collect(), hir, def_path_hash_to_def_id, maps: maps::Maps::new(providers), @@ -1045,7 +1088,6 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { normalized_cache: RefCell::new(FxHashMap()), inhabitedness_cache: RefCell::new(FxHashMap()), lang_items, - used_unsafe: RefCell::new(NodeSet()), used_mut_nodes: RefCell::new(NodeSet()), stability: RefCell::new(stability), selection_cache: traits::SelectionCache::new(), @@ -1364,7 +1406,7 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { pub fn print_debug_stats(self) { sty_debug_print!( self, - TyAdt, TyArray, TySlice, TyRawPtr, TyRef, TyFnDef, TyFnPtr, + TyAdt, TyArray, TySlice, TyRawPtr, TyRef, TyFnDef, TyFnPtr, TyGenerator, TyDynamic, TyClosure, TyTuple, TyParam, TyInfer, TyProjection, TyAnon); println!("Substs interner: #{}", self.interners.substs.borrow().len()); @@ -1719,6 +1761,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.mk_ty(TyClosure(closure_id, closure_substs)) } + pub fn mk_generator(self, + id: DefId, + closure_substs: ClosureSubsts<'tcx>, + interior: GeneratorInterior<'tcx>) + -> Ty<'tcx> { + self.mk_ty(TyGenerator(id, closure_substs, interior)) + } + pub fn mk_var(self, v: TyVid) -> Ty<'tcx> { self.mk_infer(TyVar(v)) } @@ -1943,3 +1993,20 @@ impl InternIteratorElement for Result { Ok(f(&iter.collect::, _>>()?)) } } + +fn in_scope_traits<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: HirId) + -> Option>> +{ + tcx.gcx.trait_map.get(&id).cloned() +} + +fn module_exports<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: HirId) + -> Option>> +{ + tcx.gcx.export_map.get(&id).cloned() +} + +pub fn provide(providers: &mut ty::maps::Providers) { + providers.in_scope_traits = in_scope_traits; + providers.module_exports = module_exports; +} diff --git a/src/librustc/ty/error.rs b/src/librustc/ty/error.rs index 86a4f66918965..49d7f40000f07 100644 --- a/src/librustc/ty/error.rs +++ b/src/librustc/ty/error.rs @@ -36,11 +36,11 @@ pub enum TypeError<'tcx> { TupleSize(ExpectedFound), FixedArraySize(ExpectedFound), ArgCount, + RegionsDoesNotOutlive(Region<'tcx>, Region<'tcx>), - RegionsNotSame(Region<'tcx>, Region<'tcx>), - RegionsNoOverlap(Region<'tcx>, Region<'tcx>), RegionsInsufficientlyPolymorphic(BoundRegion, Region<'tcx>), RegionsOverlyPolymorphic(BoundRegion, Region<'tcx>), + Sorts(ExpectedFound>), IntMismatch(ExpectedFound), FloatMismatch(ExpectedFound), @@ -110,12 +110,6 @@ impl<'tcx> fmt::Display for TypeError<'tcx> { RegionsDoesNotOutlive(..) => { write!(f, "lifetime mismatch") } - RegionsNotSame(..) => { - write!(f, "lifetimes are not the same") - } - RegionsNoOverlap(..) => { - write!(f, "lifetimes do not intersect") - } RegionsInsufficientlyPolymorphic(br, _) => { write!(f, "expected bound lifetime parameter{}{}, found concrete lifetime", @@ -213,6 +207,7 @@ impl<'a, 'gcx, 'lcx, 'tcx> ty::TyS<'tcx> { |p| format!("trait {}", tcx.item_path_str(p.def_id()))) } ty::TyClosure(..) => "closure".to_string(), + ty::TyGenerator(..) => "generator".to_string(), ty::TyTuple(..) => "tuple".to_string(), ty::TyInfer(ty::TyVar(_)) => "inferred type".to_string(), ty::TyInfer(ty::IntVar(_)) => "integral variable".to_string(), @@ -242,33 +237,6 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { use self::TypeError::*; match err.clone() { - RegionsDoesNotOutlive(subregion, superregion) => { - self.note_and_explain_region(db, "", subregion, "..."); - self.note_and_explain_region(db, "...does not necessarily outlive ", - superregion, ""); - } - RegionsNotSame(region1, region2) => { - self.note_and_explain_region(db, "", region1, "..."); - self.note_and_explain_region(db, "...is not the same lifetime as ", - region2, ""); - } - RegionsNoOverlap(region1, region2) => { - self.note_and_explain_region(db, "", region1, "..."); - self.note_and_explain_region(db, "...does not overlap ", - region2, ""); - } - RegionsInsufficientlyPolymorphic(_, conc_region) => { - self.note_and_explain_region(db, "concrete lifetime that was found is ", - conc_region, ""); - } - RegionsOverlyPolymorphic(_, &ty::ReVar(_)) => { - // don't bother to print out the message below for - // inference variables, it's not very illuminating. - } - RegionsOverlyPolymorphic(_, conc_region) => { - self.note_and_explain_region(db, "expected concrete lifetime is ", - conc_region, ""); - } Sorts(values) => { let expected_str = values.expected.sort_string(self); let found_str = values.found.sort_string(self); diff --git a/src/librustc/ty/fast_reject.rs b/src/librustc/ty/fast_reject.rs index 68f85ba7d33e2..353a1cd5355b9 100644 --- a/src/librustc/ty/fast_reject.rs +++ b/src/librustc/ty/fast_reject.rs @@ -30,6 +30,7 @@ pub enum SimplifiedType { TupleSimplifiedType(usize), TraitSimplifiedType(DefId), ClosureSimplifiedType(DefId), + GeneratorSimplifiedType(DefId), AnonSimplifiedType(DefId), FunctionSimplifiedType(usize), ParameterSimplifiedType, @@ -72,6 +73,9 @@ pub fn simplify_type<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, ty::TyClosure(def_id, _) => { Some(ClosureSimplifiedType(def_id)) } + ty::TyGenerator(def_id, _, _) => { + Some(GeneratorSimplifiedType(def_id)) + } ty::TyNever => Some(NeverSimplifiedType), ty::TyTuple(ref tys, _) => { Some(TupleSimplifiedType(tys.len())) diff --git a/src/librustc/ty/flags.rs b/src/librustc/ty/flags.rs index 31ed61a919e7c..27b8d245396c9 100644 --- a/src/librustc/ty/flags.rs +++ b/src/librustc/ty/flags.rs @@ -85,6 +85,13 @@ impl FlagComputation { } } + &ty::TyGenerator(_, ref substs, ref interior) => { + self.add_flags(TypeFlags::HAS_TY_CLOSURE); + self.add_flags(TypeFlags::HAS_LOCAL_NAMES); + self.add_substs(&substs.substs); + self.add_ty(interior.witness); + } + &ty::TyClosure(_, ref substs) => { self.add_flags(TypeFlags::HAS_TY_CLOSURE); self.add_flags(TypeFlags::HAS_LOCAL_NAMES); diff --git a/src/librustc/ty/item_path.rs b/src/librustc/ty/item_path.rs index 5caf513981280..eadf80871fc95 100644 --- a/src/librustc/ty/item_path.rs +++ b/src/librustc/ty/item_path.rs @@ -345,6 +345,7 @@ pub fn characteristic_def_id_of_type(ty: Ty) -> Option { ty::TyFnDef(def_id, _) | ty::TyClosure(def_id, _) => Some(def_id), + ty::TyGenerator(def_id, _, _) => Some(def_id), ty::TyBool | ty::TyChar | diff --git a/src/librustc/ty/layout.rs b/src/librustc/ty/layout.rs index 4ee9b2e65a782..cf21a66d51538 100644 --- a/src/librustc/ty/layout.rs +++ b/src/librustc/ty/layout.rs @@ -1226,7 +1226,17 @@ impl<'a, 'tcx> Layout { Univariant { variant: unit, non_zero: false } } - // Tuples and closures. + // Tuples, generators and closures. + ty::TyGenerator(def_id, ref substs, _) => { + let tys = substs.field_tys(def_id, tcx); + let st = Struct::new(dl, + &tys.map(|ty| ty.layout(tcx, param_env)) + .collect::, _>>()?, + &ReprOptions::default(), + StructKind::AlwaysSizedUnivariant, ty)?; + Univariant { variant: st, non_zero: false } + } + ty::TyClosure(def_id, ref substs) => { let tys = substs.upvar_tys(def_id, tcx); let st = Struct::new(dl, @@ -2240,11 +2250,15 @@ impl<'a, 'tcx> TyLayout<'tcx> { ty::TySlice(element) => element, ty::TyStr => tcx.types.u8, - // Tuples and closures. + // Tuples, generators and closures. ty::TyClosure(def_id, ref substs) => { substs.upvar_tys(def_id, tcx).nth(i).unwrap() } + ty::TyGenerator(def_id, ref substs, _) => { + substs.field_tys(def_id, tcx).nth(i).unwrap() + } + ty::TyTuple(tys, _) => tys[i], // SIMD vector types. diff --git a/src/librustc/ty/maps.rs b/src/librustc/ty/maps.rs index a73202ced61e6..5a372dbf89fbf 100644 --- a/src/librustc/ty/maps.rs +++ b/src/librustc/ty/maps.rs @@ -11,13 +11,13 @@ use dep_graph::{DepConstructor, DepNode, DepNodeIndex}; use errors::{Diagnostic, DiagnosticBuilder}; use hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; -use hir::def::Def; -use hir; +use hir::def::{Def, Export}; +use hir::{self, TraitCandidate, HirId}; use lint; use middle::const_val; use middle::cstore::{ExternCrate, LinkagePreference}; use middle::privacy::AccessLevels; -use middle::region::RegionMaps; +use middle::region; use mir; use mir::transform::{MirSuite, MirPassIndex}; use session::CompileResult; @@ -31,6 +31,7 @@ use ty::fast_reject::SimplifiedType; use util::nodemap::{DefIdSet, NodeSet}; use util::common::{profq_msg, ProfileQueriesMsg}; +use rustc_data_structures::indexed_set::IdxSetBuf; use rustc_data_structures::indexed_vec::IndexVec; use rustc_data_structures::fx::FxHashMap; use std::cell::{RefCell, RefMut, Cell}; @@ -80,6 +81,15 @@ impl Key for CrateNum { } } +impl Key for HirId { + fn map_crate(&self) -> CrateNum { + LOCAL_CRATE + } + fn default_span(&self, _tcx: TyCtxt) -> Span { + DUMMY_SP + } +} + impl Key for DefId { fn map_crate(&self) -> CrateNum { self.krate @@ -540,6 +550,24 @@ impl<'tcx> QueryDescription for queries::lint_levels<'tcx> { } } +impl<'tcx> QueryDescription for queries::specializes<'tcx> { + fn describe(_tcx: TyCtxt, _: (DefId, DefId)) -> String { + format!("computing whether impls specialize one another") + } +} + +impl<'tcx> QueryDescription for queries::in_scope_traits<'tcx> { + fn describe(_tcx: TyCtxt, _: HirId) -> String { + format!("fetching the traits in scope at a particular ast node") + } +} + +impl<'tcx> QueryDescription for queries::module_exports<'tcx> { + fn describe(_tcx: TyCtxt, _: HirId) -> String { + format!("fetching the exported items for a module") + } +} + // If enabled, send a message to the profile-queries thread macro_rules! profq_msg { ($tcx:expr, $msg:expr) => { @@ -566,7 +594,7 @@ macro_rules! profq_key { macro_rules! define_maps { (<$tcx:tt> $($(#[$attr:meta])* - [$($modifiers:tt)*] $name:ident: $node:ident($K:ty) -> $V:ty,)*) => { + [$($modifiers:tt)*] fn $name:ident: $node:ident($K:ty) -> $V:ty,)*) => { define_map_struct! { tcx: $tcx, input: ($(([$($modifiers)*] [$($attr)*] [$name]))*) @@ -597,8 +625,15 @@ macro_rules! define_maps { impl<$tcx> Query<$tcx> { pub fn describe(&self, tcx: TyCtxt) -> String { - match *self { - $(Query::$name(key) => queries::$name::describe(tcx, key)),* + let (r, name) = match *self { + $(Query::$name(key) => { + (queries::$name::describe(tcx, key), stringify!($name)) + })* + }; + if tcx.sess.verbose() { + format!("{} [{}]", r, name) + } else { + r } } } @@ -920,12 +955,12 @@ macro_rules! define_provider_struct { // the driver creates (using several `rustc_*` crates). define_maps! { <'tcx> /// Records the type of every item. - [] type_of: TypeOfItem(DefId) -> Ty<'tcx>, + [] fn type_of: TypeOfItem(DefId) -> Ty<'tcx>, /// Maps from the def-id of an item (trait/struct/enum/fn) to its /// associated generics and predicates. - [] generics_of: GenericsOfItem(DefId) -> &'tcx ty::Generics, - [] predicates_of: PredicatesOfItem(DefId) -> ty::GenericPredicates<'tcx>, + [] fn generics_of: GenericsOfItem(DefId) -> &'tcx ty::Generics, + [] fn predicates_of: PredicatesOfItem(DefId) -> ty::GenericPredicates<'tcx>, /// Maps from the def-id of a trait to the list of /// super-predicates. This is a subset of the full list of @@ -933,141 +968,145 @@ define_maps! { <'tcx> /// evaluate them even during type conversion, often before the /// full predicates are available (note that supertraits have /// additional acyclicity requirements). - [] super_predicates_of: SuperPredicatesOfItem(DefId) -> ty::GenericPredicates<'tcx>, + [] fn super_predicates_of: SuperPredicatesOfItem(DefId) -> ty::GenericPredicates<'tcx>, /// To avoid cycles within the predicates of a single item we compute /// per-type-parameter predicates for resolving `T::AssocTy`. - [] type_param_predicates: type_param_predicates((DefId, DefId)) + [] fn type_param_predicates: type_param_predicates((DefId, DefId)) -> ty::GenericPredicates<'tcx>, - [] trait_def: TraitDefOfItem(DefId) -> &'tcx ty::TraitDef, - [] adt_def: AdtDefOfItem(DefId) -> &'tcx ty::AdtDef, - [] adt_destructor: AdtDestructor(DefId) -> Option, - [] adt_sized_constraint: SizedConstraint(DefId) -> &'tcx [Ty<'tcx>], - [] adt_dtorck_constraint: DtorckConstraint(DefId) -> ty::DtorckConstraint<'tcx>, + [] fn trait_def: TraitDefOfItem(DefId) -> &'tcx ty::TraitDef, + [] fn adt_def: AdtDefOfItem(DefId) -> &'tcx ty::AdtDef, + [] fn adt_destructor: AdtDestructor(DefId) -> Option, + [] fn adt_sized_constraint: SizedConstraint(DefId) -> &'tcx [Ty<'tcx>], + [] fn adt_dtorck_constraint: DtorckConstraint(DefId) -> ty::DtorckConstraint<'tcx>, /// True if this is a const fn - [] is_const_fn: IsConstFn(DefId) -> bool, + [] fn is_const_fn: IsConstFn(DefId) -> bool, /// True if this is a foreign item (i.e., linked via `extern { ... }`). - [] is_foreign_item: IsForeignItem(DefId) -> bool, + [] fn is_foreign_item: IsForeignItem(DefId) -> bool, /// True if this is a default impl (aka impl Foo for ..) - [] is_default_impl: IsDefaultImpl(DefId) -> bool, + [] fn is_default_impl: IsDefaultImpl(DefId) -> bool, /// Get a map with the variance of every item; use `item_variance` /// instead. - [] crate_variances: crate_variances(CrateNum) -> Rc, + [] fn crate_variances: crate_variances(CrateNum) -> Rc, /// Maps from def-id of a type or region parameter to its /// (inferred) variance. - [] variances_of: ItemVariances(DefId) -> Rc>, + [] fn variances_of: ItemVariances(DefId) -> Rc>, /// Maps from an impl/trait def-id to a list of the def-ids of its items - [] associated_item_def_ids: AssociatedItemDefIds(DefId) -> Rc>, + [] fn associated_item_def_ids: AssociatedItemDefIds(DefId) -> Rc>, /// Maps from a trait item to the trait item "descriptor" - [] associated_item: AssociatedItems(DefId) -> ty::AssociatedItem, + [] fn associated_item: AssociatedItems(DefId) -> ty::AssociatedItem, - [] impl_trait_ref: ImplTraitRef(DefId) -> Option>, - [] impl_polarity: ImplPolarity(DefId) -> hir::ImplPolarity, + [] fn impl_trait_ref: ImplTraitRef(DefId) -> Option>, + [] fn impl_polarity: ImplPolarity(DefId) -> hir::ImplPolarity, /// Maps a DefId of a type to a list of its inherent impls. /// Contains implementations of methods that are inherent to a type. /// Methods in these implementations don't need to be exported. - [] inherent_impls: InherentImpls(DefId) -> Rc>, + [] fn inherent_impls: InherentImpls(DefId) -> Rc>, /// Set of all the def-ids in this crate that have MIR associated with /// them. This includes all the body owners, but also things like struct /// constructors. - [] mir_keys: mir_keys(CrateNum) -> Rc, + [] fn mir_keys: mir_keys(CrateNum) -> Rc, /// Maps DefId's that have an associated Mir to the result /// of the MIR qualify_consts pass. The actual meaning of /// the value isn't known except to the pass itself. - [] mir_const_qualif: MirConstQualif(DefId) -> u8, + [] fn mir_const_qualif: MirConstQualif(DefId) -> (u8, Rc>), /// Fetch the MIR for a given def-id up till the point where it is /// ready for const evaluation. /// /// See the README for the `mir` module for details. - [] mir_const: MirConst(DefId) -> &'tcx Steal>, + [] fn mir_const: MirConst(DefId) -> &'tcx Steal>, - [] mir_validated: MirValidated(DefId) -> &'tcx Steal>, + [] fn mir_validated: MirValidated(DefId) -> &'tcx Steal>, /// MIR after our optimization passes have run. This is MIR that is ready /// for trans. This is also the only query that can fetch non-local MIR, at present. - [] optimized_mir: MirOptimized(DefId) -> &'tcx mir::Mir<'tcx>, + [] fn optimized_mir: MirOptimized(DefId) -> &'tcx mir::Mir<'tcx>, /// Type of each closure. The def ID is the ID of the /// expression defining the closure. - [] closure_kind: ClosureKind(DefId) -> ty::ClosureKind, + [] fn closure_kind: ClosureKind(DefId) -> ty::ClosureKind, /// The signature of functions and closures. - [] fn_sig: FnSignature(DefId) -> ty::PolyFnSig<'tcx>, + [] fn fn_sig: FnSignature(DefId) -> ty::PolyFnSig<'tcx>, + + /// Records the signature of each generator. The def ID is the ID of the + /// expression defining the closure. + [] fn generator_sig: GenSignature(DefId) -> Option>, /// Caches CoerceUnsized kinds for impls on custom types. - [] coerce_unsized_info: CoerceUnsizedInfo(DefId) + [] fn coerce_unsized_info: CoerceUnsizedInfo(DefId) -> ty::adjustment::CoerceUnsizedInfo, - [] typeck_item_bodies: typeck_item_bodies_dep_node(CrateNum) -> CompileResult, + [] fn typeck_item_bodies: typeck_item_bodies_dep_node(CrateNum) -> CompileResult, - [] typeck_tables_of: TypeckTables(DefId) -> &'tcx ty::TypeckTables<'tcx>, + [] fn typeck_tables_of: TypeckTables(DefId) -> &'tcx ty::TypeckTables<'tcx>, - [] has_typeck_tables: HasTypeckTables(DefId) -> bool, + [] fn has_typeck_tables: HasTypeckTables(DefId) -> bool, - [] coherent_trait: coherent_trait_dep_node((CrateNum, DefId)) -> (), + [] fn coherent_trait: coherent_trait_dep_node((CrateNum, DefId)) -> (), - [] borrowck: BorrowCheck(DefId) -> (), + [] fn borrowck: BorrowCheck(DefId) -> (), // FIXME: shouldn't this return a `Result<(), BorrowckErrors>` instead? - [] mir_borrowck: MirBorrowCheck(DefId) -> (), + [] fn mir_borrowck: MirBorrowCheck(DefId) -> (), /// Gets a complete map from all types to their inherent impls. /// Not meant to be used directly outside of coherence. /// (Defined only for LOCAL_CRATE) - [] crate_inherent_impls: crate_inherent_impls_dep_node(CrateNum) -> CrateInherentImpls, + [] fn crate_inherent_impls: crate_inherent_impls_dep_node(CrateNum) -> CrateInherentImpls, /// Checks all types in the krate for overlap in their inherent impls. Reports errors. /// Not meant to be used directly outside of coherence. /// (Defined only for LOCAL_CRATE) - [] crate_inherent_impls_overlap_check: inherent_impls_overlap_check_dep_node(CrateNum) -> (), + [] fn crate_inherent_impls_overlap_check: inherent_impls_overlap_check_dep_node(CrateNum) -> (), /// Results of evaluating const items or constants embedded in /// other items (such as enum variant explicit discriminants). - [] const_eval: const_eval_dep_node(ty::ParamEnvAnd<'tcx, (DefId, &'tcx Substs<'tcx>)>) + [] fn const_eval: const_eval_dep_node(ty::ParamEnvAnd<'tcx, (DefId, &'tcx Substs<'tcx>)>) -> const_val::EvalResult<'tcx>, /// Performs the privacy check and computes "access levels". - [] privacy_access_levels: PrivacyAccessLevels(CrateNum) -> Rc, + [] fn privacy_access_levels: PrivacyAccessLevels(CrateNum) -> Rc, - [] reachable_set: reachability_dep_node(CrateNum) -> Rc, + [] fn reachable_set: reachability_dep_node(CrateNum) -> Rc, - /// Per-function `RegionMaps`. The `DefId` should be the owner-def-id for the fn body; - /// in the case of closures or "inline" expressions, this will be redirected to the enclosing - /// fn item. - [] region_maps: RegionMaps(DefId) -> Rc, + /// Per-body `region::ScopeTree`. The `DefId` should be the owner-def-id for the body; + /// in the case of closures, this will be redirected to the enclosing function. + [] fn region_scope_tree: RegionScopeTree(DefId) -> Rc, - [] mir_shims: mir_shim_dep_node(ty::InstanceDef<'tcx>) -> &'tcx mir::Mir<'tcx>, + [] fn mir_shims: mir_shim_dep_node(ty::InstanceDef<'tcx>) -> &'tcx mir::Mir<'tcx>, - [] def_symbol_name: SymbolName(DefId) -> ty::SymbolName, - [] symbol_name: symbol_name_dep_node(ty::Instance<'tcx>) -> ty::SymbolName, + [] fn def_symbol_name: SymbolName(DefId) -> ty::SymbolName, + [] fn symbol_name: symbol_name_dep_node(ty::Instance<'tcx>) -> ty::SymbolName, - [] describe_def: DescribeDef(DefId) -> Option, - [] def_span: DefSpan(DefId) -> Span, - [] stability: Stability(DefId) -> Option, - [] deprecation: Deprecation(DefId) -> Option, - [] item_attrs: ItemAttrs(DefId) -> Rc<[ast::Attribute]>, - [] fn_arg_names: FnArgNames(DefId) -> Vec, - [] impl_parent: ImplParent(DefId) -> Option, - [] trait_of_item: TraitOfItem(DefId) -> Option, - [] is_exported_symbol: IsExportedSymbol(DefId) -> bool, - [] item_body_nested_bodies: ItemBodyNestedBodies(DefId) -> Rc>, - [] const_is_rvalue_promotable_to_static: ConstIsRvaluePromotableToStatic(DefId) -> bool, - [] is_mir_available: IsMirAvailable(DefId) -> bool, + [] fn describe_def: DescribeDef(DefId) -> Option, + [] fn def_span: DefSpan(DefId) -> Span, + [] fn stability: Stability(DefId) -> Option, + [] fn deprecation: Deprecation(DefId) -> Option, + [] fn item_attrs: ItemAttrs(DefId) -> Rc<[ast::Attribute]>, + [] fn fn_arg_names: FnArgNames(DefId) -> Vec, + [] fn impl_parent: ImplParent(DefId) -> Option, + [] fn trait_of_item: TraitOfItem(DefId) -> Option, + [] fn is_exported_symbol: IsExportedSymbol(DefId) -> bool, + [] fn item_body_nested_bodies: ItemBodyNestedBodies(DefId) + -> Rc>, + [] fn const_is_rvalue_promotable_to_static: ConstIsRvaluePromotableToStatic(DefId) -> bool, + [] fn is_mir_available: IsMirAvailable(DefId) -> bool, - [] trait_impls_of: TraitImpls(DefId) -> Rc, - [] specialization_graph_of: SpecializationGraph(DefId) -> Rc, - [] is_object_safe: ObjectSafety(DefId) -> bool, + [] fn trait_impls_of: TraitImpls(DefId) -> Rc, + [] fn specialization_graph_of: SpecializationGraph(DefId) -> Rc, + [] fn is_object_safe: ObjectSafety(DefId) -> bool, // Get the ParameterEnvironment for a given item; this environment // will be in "user-facing" mode, meaning that it is suitabe for @@ -1075,28 +1114,32 @@ define_maps! { <'tcx> // associated types. This is almost always what you want, // unless you are doing MIR optimizations, in which case you // might want to use `reveal_all()` method to change modes. - [] param_env: ParamEnv(DefId) -> ty::ParamEnv<'tcx>, + [] fn param_env: ParamEnv(DefId) -> ty::ParamEnv<'tcx>, // Trait selection queries. These are best used by invoking `ty.moves_by_default()`, // `ty.is_copy()`, etc, since that will prune the environment where possible. - [] is_copy_raw: is_copy_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool, - [] is_sized_raw: is_sized_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool, - [] is_freeze_raw: is_freeze_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool, - [] needs_drop_raw: needs_drop_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool, - [] layout_raw: layout_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) + [] fn is_copy_raw: is_copy_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool, + [] fn is_sized_raw: is_sized_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool, + [] fn is_freeze_raw: is_freeze_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool, + [] fn needs_drop_raw: needs_drop_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool, + [] fn layout_raw: layout_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> Result<&'tcx Layout, LayoutError<'tcx>>, - [] dylib_dependency_formats: DylibDepFormats(DefId) + [] fn dylib_dependency_formats: DylibDepFormats(DefId) -> Rc>, - [] is_allocator: IsAllocator(DefId) -> bool, - [] is_panic_runtime: IsPanicRuntime(DefId) -> bool, - [] is_compiler_builtins: IsCompilerBuiltins(DefId) -> bool, - [] has_global_allocator: HasGlobalAllocator(DefId) -> bool, + [] fn is_allocator: IsAllocator(DefId) -> bool, + [] fn is_panic_runtime: IsPanicRuntime(DefId) -> bool, + [] fn is_compiler_builtins: IsCompilerBuiltins(DefId) -> bool, + [] fn has_global_allocator: HasGlobalAllocator(DefId) -> bool, + + [] fn extern_crate: ExternCrate(DefId) -> Rc>, - [] extern_crate: ExternCrate(DefId) -> Rc>, + [] fn lint_levels: lint_levels(CrateNum) -> Rc, - [] lint_levels: lint_levels(CrateNum) -> Rc, + [] fn specializes: specializes_node((DefId, DefId)) -> bool, + [] fn in_scope_traits: InScopeTraits(HirId) -> Option>>, + [] fn module_exports: ModuleExports(HirId) -> Option>>, } fn type_param_predicates<'tcx>((item_id, param_id): (DefId, DefId)) -> DepConstructor<'tcx> { @@ -1172,3 +1215,7 @@ fn layout_dep_node<'tcx>(_: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> DepConstructor<' fn lint_levels<'tcx>(_: CrateNum) -> DepConstructor<'tcx> { DepConstructor::LintLevels } + +fn specializes_node<'tcx>((a, b): (DefId, DefId)) -> DepConstructor<'tcx> { + DepConstructor::Specializes { impl1: a, impl2: b } +} diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index 6597dccf25816..1851e1b8d34bb 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -23,8 +23,8 @@ use middle::const_val::ConstVal; use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem}; use middle::privacy::AccessLevels; use middle::resolve_lifetime::ObjectLifetimeDefault; -use middle::region::CodeExtent; use mir::Mir; +use mir::GeneratorLayout; use traits; use ty; use ty::subst::{Subst, Substs}; @@ -59,9 +59,9 @@ use rustc_data_structures::transitive_relation::TransitiveRelation; use hir; pub use self::sty::{Binder, DebruijnIndex}; -pub use self::sty::{FnSig, PolyFnSig}; +pub use self::sty::{FnSig, GenSig, PolyFnSig, PolyGenSig}; pub use self::sty::{InferTy, ParamTy, ProjectionTy, ExistentialPredicate}; -pub use self::sty::{ClosureSubsts, TypeAndMut}; +pub use self::sty::{ClosureSubsts, GeneratorInterior, TypeAndMut}; pub use self::sty::{TraitRef, TypeVariants, PolyTraitRef}; pub use self::sty::{ExistentialTraitRef, PolyExistentialTraitRef}; pub use self::sty::{ExistentialProjection, PolyExistentialProjection}; @@ -409,6 +409,8 @@ bitflags! { const HAS_FREE_REGIONS = 1 << 6, const HAS_TY_ERR = 1 << 7, const HAS_PROJECTION = 1 << 8, + + // FIXME: Rename this to the actual property since it's used for generators too const HAS_TY_CLOSURE = 1 << 9, // true if there are "names" of types and regions and so forth @@ -1706,7 +1708,7 @@ impl<'a, 'gcx, 'tcx> AdtDef { let result = match ty.sty { TyBool | TyChar | TyInt(..) | TyUint(..) | TyFloat(..) | TyRawPtr(..) | TyRef(..) | TyFnDef(..) | TyFnPtr(_) | - TyArray(..) | TyClosure(..) | TyNever => { + TyArray(..) | TyClosure(..) | TyGenerator(..) | TyNever => { vec![] } @@ -2039,6 +2041,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { hir::ExprBox(..) | hir::ExprAddrOf(..) | hir::ExprBinary(..) | + hir::ExprYield(..) | hir::ExprCast(..) => { false } @@ -2271,6 +2274,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.trait_def(trait_def_id).has_default_impl } + pub fn generator_layout(self, def_id: DefId) -> &'tcx GeneratorLayout<'tcx> { + self.optimized_mir(def_id).generator_layout.as_ref().unwrap() + } + /// Given the def_id of an impl, return the def_id of the trait it implements. /// If it implements no trait, return `None`. pub fn trait_id_of_impl(self, def_id: DefId) -> Option { @@ -2301,10 +2308,6 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - pub fn node_scope_region(self, id: NodeId) -> Region<'tcx> { - self.mk_region(ty::ReScope(CodeExtent::Misc(id))) - } - /// Looks up the span of `impl_did` if the impl is local; otherwise returns `Err` /// with the name of the crate containing the impl. pub fn span_of_impl(self, impl_did: DefId) -> Result { @@ -2509,6 +2512,7 @@ fn param_env<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, pub fn provide(providers: &mut ty::maps::Providers) { util::provide(providers); + context::provide(providers); *providers = ty::maps::Providers { associated_item, associated_item_def_ids, diff --git a/src/librustc/ty/outlives.rs b/src/librustc/ty/outlives.rs index ab1b1b3857d00..657ed4077911c 100644 --- a/src/librustc/ty/outlives.rs +++ b/src/librustc/ty/outlives.rs @@ -115,6 +115,16 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } + ty::TyGenerator(def_id, ref substs, ref interior) => { + // Same as the closure case + for upvar_ty in substs.upvar_tys(def_id, *self) { + self.compute_components(upvar_ty, out); + } + + // But generators can have additional interior types + self.compute_components(interior.witness, out); + } + // OutlivesTypeParameterEnv -- the actual checking that `X:'a` // is implied by the environment is done in regionck. ty::TyParam(p) => { diff --git a/src/librustc/ty/relate.rs b/src/librustc/ty/relate.rs index c035817d66db0..0ff3199689c19 100644 --- a/src/librustc/ty/relate.rs +++ b/src/librustc/ty/relate.rs @@ -389,6 +389,18 @@ pub fn super_relate_tys<'a, 'gcx, 'tcx, R>(relation: &mut R, Ok(tcx.mk_dynamic(relation.relate(a_obj, b_obj)?, region_bound)) } + (&ty::TyGenerator(a_id, a_substs, a_interior), + &ty::TyGenerator(b_id, b_substs, b_interior)) + if a_id == b_id => + { + // All TyGenerator types with the same id represent + // the (anonymous) type of the same generator expression. So + // all of their regions should be equated. + let substs = relation.relate(&a_substs, &b_substs)?; + let interior = relation.relate(&a_interior, &b_interior)?; + Ok(tcx.mk_generator(a_id, substs, interior)) + } + (&ty::TyClosure(a_id, a_substs), &ty::TyClosure(b_id, b_substs)) if a_id == b_id => @@ -512,6 +524,18 @@ impl<'tcx> Relate<'tcx> for ty::ClosureSubsts<'tcx> { } } +impl<'tcx> Relate<'tcx> for ty::GeneratorInterior<'tcx> { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &ty::GeneratorInterior<'tcx>, + b: &ty::GeneratorInterior<'tcx>) + -> RelateResult<'tcx, ty::GeneratorInterior<'tcx>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + { + let interior = relation.relate(&a.witness, &b.witness)?; + Ok(ty::GeneratorInterior::new(interior)) + } +} + impl<'tcx> Relate<'tcx> for &'tcx Substs<'tcx> { fn relate<'a, 'gcx, R>(relation: &mut R, a: &&'tcx Substs<'tcx>, diff --git a/src/librustc/ty/structural_impls.rs b/src/librustc/ty/structural_impls.rs index e41eb079b3782..44b505e19658f 100644 --- a/src/librustc/ty/structural_impls.rs +++ b/src/librustc/ty/structural_impls.rs @@ -29,6 +29,15 @@ impl<'tcx, A: Lift<'tcx>, B: Lift<'tcx>> Lift<'tcx> for (A, B) { } } +impl<'tcx, A: Lift<'tcx>, B: Lift<'tcx>, C: Lift<'tcx>> Lift<'tcx> for (A, B, C) { + type Lifted = (A::Lifted, B::Lifted, C::Lifted); + fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { + tcx.lift(&self.0).and_then(|a| { + tcx.lift(&self.1).and_then(|b| tcx.lift(&self.2).map(|c| (a, b, c))) + }) + } +} + impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Option { type Lifted = Option; fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { @@ -220,6 +229,15 @@ impl<'a, 'tcx> Lift<'tcx> for ty::ClosureSubsts<'a> { } } +impl<'a, 'tcx> Lift<'tcx> for ty::GeneratorInterior<'a> { + type Lifted = ty::GeneratorInterior<'tcx>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { + tcx.lift(&self.witness).map(|witness| { + ty::GeneratorInterior { witness } + }) + } +} + impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::Adjustment<'a> { type Lifted = ty::adjustment::Adjustment<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { @@ -283,6 +301,19 @@ impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::AutoBorrow<'a> { } } +impl<'a, 'tcx> Lift<'tcx> for ty::GenSig<'a> { + type Lifted = ty::GenSig<'tcx>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { + tcx.lift(&(self.yield_ty, self.return_ty)) + .map(|(yield_ty, return_ty)| { + ty::GenSig { + yield_ty, + return_ty, + } + }) + } +} + impl<'a, 'tcx> Lift<'tcx> for ty::FnSig<'a> { type Lifted = ty::FnSig<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { @@ -340,12 +371,6 @@ impl<'a, 'tcx> Lift<'tcx> for ty::error::TypeError<'a> { RegionsDoesNotOutlive(a, b) => { return tcx.lift(&(a, b)).map(|(a, b)| RegionsDoesNotOutlive(a, b)) } - RegionsNotSame(a, b) => { - return tcx.lift(&(a, b)).map(|(a, b)| RegionsNotSame(a, b)) - } - RegionsNoOverlap(a, b) => { - return tcx.lift(&(a, b)).map(|(a, b)| RegionsNoOverlap(a, b)) - } RegionsInsufficientlyPolymorphic(a, b) => { return tcx.lift(&b).map(|b| RegionsInsufficientlyPolymorphic(a, b)) } @@ -396,7 +421,7 @@ macro_rules! CopyImpls { } } -CopyImpls! { (), hir::Unsafety, abi::Abi, hir::def_id::DefId } +CopyImpls! { (), hir::Unsafety, abi::Abi, hir::def_id::DefId, ::mir::Local } impl<'tcx, T:TypeFoldable<'tcx>, U:TypeFoldable<'tcx>> TypeFoldable<'tcx> for (T, U) { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> (T, U) { @@ -539,6 +564,9 @@ impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> { ty::TyRef(ref r, tm) => { ty::TyRef(r.fold_with(folder), tm.fold_with(folder)) } + ty::TyGenerator(did, substs, interior) => { + ty::TyGenerator(did, substs.fold_with(folder), interior.fold_with(folder)) + } ty::TyClosure(did, substs) => ty::TyClosure(did, substs.fold_with(folder)), ty::TyProjection(ref data) => ty::TyProjection(data.fold_with(folder)), ty::TyAnon(did, substs) => ty::TyAnon(did, substs.fold_with(folder)), @@ -570,6 +598,9 @@ impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> { ty::TyFnDef(_, substs) => substs.visit_with(visitor), ty::TyFnPtr(ref f) => f.visit_with(visitor), ty::TyRef(r, ref tm) => r.visit_with(visitor) || tm.visit_with(visitor), + ty::TyGenerator(_did, ref substs, ref interior) => { + substs.visit_with(visitor) || interior.visit_with(visitor) + } ty::TyClosure(_did, ref substs) => substs.visit_with(visitor), ty::TyProjection(ref data) => data.visit_with(visitor), ty::TyAnon(_, ref substs) => substs.visit_with(visitor), @@ -594,6 +625,20 @@ impl<'tcx> TypeFoldable<'tcx> for ty::TypeAndMut<'tcx> { } } +impl<'tcx> TypeFoldable<'tcx> for ty::GenSig<'tcx> { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { + ty::GenSig { + yield_ty: self.yield_ty.fold_with(folder), + return_ty: self.return_ty.fold_with(folder), + } + } + + fn super_visit_with>(&self, visitor: &mut V) -> bool { + self.yield_ty.visit_with(visitor) || + self.return_ty.visit_with(visitor) + } +} + impl<'tcx> TypeFoldable<'tcx> for ty::FnSig<'tcx> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { let inputs_and_output = self.inputs_and_output.fold_with(folder); @@ -684,6 +729,16 @@ impl<'tcx> TypeFoldable<'tcx> for ty::ClosureSubsts<'tcx> { } } +impl<'tcx> TypeFoldable<'tcx> for ty::GeneratorInterior<'tcx> { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { + ty::GeneratorInterior::new(self.witness.fold_with(folder)) + } + + fn super_visit_with>(&self, visitor: &mut V) -> bool { + self.witness.visit_with(visitor) + } +} + impl<'tcx> TypeFoldable<'tcx> for ty::adjustment::Adjustment<'tcx> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::adjustment::Adjustment { @@ -996,12 +1051,6 @@ impl<'tcx> TypeFoldable<'tcx> for ty::error::TypeError<'tcx> { RegionsDoesNotOutlive(a, b) => { RegionsDoesNotOutlive(a.fold_with(folder), b.fold_with(folder)) }, - RegionsNotSame(a, b) => { - RegionsNotSame(a.fold_with(folder), b.fold_with(folder)) - }, - RegionsNoOverlap(a, b) => { - RegionsNoOverlap(a.fold_with(folder), b.fold_with(folder)) - }, RegionsInsufficientlyPolymorphic(a, b) => { RegionsInsufficientlyPolymorphic(a, b.fold_with(folder)) }, @@ -1027,9 +1076,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::error::TypeError<'tcx> { match *self { UnsafetyMismatch(x) => x.visit_with(visitor), AbiMismatch(x) => x.visit_with(visitor), - RegionsDoesNotOutlive(a, b) | - RegionsNotSame(a, b) | - RegionsNoOverlap(a, b) => { + RegionsDoesNotOutlive(a, b) => { a.visit_with(visitor) || b.visit_with(visitor) }, RegionsInsufficientlyPolymorphic(_, b) | diff --git a/src/librustc/ty/sty.rs b/src/librustc/ty/sty.rs index 8d6b7b7ac9fdd..fc244cabcd11c 100644 --- a/src/librustc/ty/sty.rs +++ b/src/librustc/ty/sty.rs @@ -135,6 +135,10 @@ pub enum TypeVariants<'tcx> { /// `|a| a`. TyClosure(DefId, ClosureSubsts<'tcx>), + /// The anonymous type of a generator. Used to represent the type of + /// `|a| yield a`. + TyGenerator(DefId, ClosureSubsts<'tcx>, GeneratorInterior<'tcx>), + /// The never type `!` TyNever, @@ -209,8 +213,8 @@ pub enum TypeVariants<'tcx> { /// as extra type parameters? The reason for this design is that the /// upvar types can reference lifetimes that are internal to the /// creating function. In my example above, for example, the lifetime -/// `'b` represents the extent of the closure itself; this is some -/// subset of `foo`, probably just the extent of the call to the to +/// `'b` represents the scope of the closure itself; this is some +/// subset of `foo`, probably just the scope of the call to the to /// `do()`. If we just had the lifetime/type parameters from the /// enclosing function, we couldn't name this lifetime `'b`. Note that /// there can also be lifetimes in the types of the upvars themselves, @@ -261,6 +265,51 @@ impl<'a, 'gcx, 'acx, 'tcx> ClosureSubsts<'tcx> { } } +impl<'a, 'gcx, 'tcx> ClosureSubsts<'tcx> { + /// This returns the types of the MIR locals which had to be stored across suspension points. + /// It is calculated in rustc_mir::transform::generator::StateTransform. + /// All the types here must be in the tuple in GeneratorInterior. + pub fn state_tys(self, def_id: DefId, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> + impl Iterator> + 'a + { + let state = tcx.generator_layout(def_id).fields.iter(); + state.map(move |d| d.ty.subst(tcx, self.substs)) + } + + /// This is the types of all the fields stored in a generator. + /// It includes the upvars, state types and the state discriminant which is u32. + pub fn field_tys(self, def_id: DefId, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> + impl Iterator> + 'a + { + let upvars = self.upvar_tys(def_id, tcx); + let state = self.state_tys(def_id, tcx); + upvars.chain(iter::once(tcx.types.u32)).chain(state) + } +} + +/// This describes the types that can be contained in a generator. +/// It will be a type variable initially and unified in the last stages of typeck of a body. +/// It contains a tuple of all the types that could end up on a generator frame. +/// The state transformation MIR pass may only produce layouts which mention types in this tuple. +/// Upvars are not counted here. +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] +pub struct GeneratorInterior<'tcx> { + pub witness: Ty<'tcx>, +} + +impl<'tcx> GeneratorInterior<'tcx> { + pub fn new(witness: Ty<'tcx>) -> GeneratorInterior<'tcx> { + GeneratorInterior { witness } + } + + pub fn as_slice(&self) -> &'tcx Slice> { + match self.witness.sty { + ty::TyTuple(s, _) => s, + _ => bug!(), + } + } +} + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub enum ExistentialPredicate<'tcx> { /// e.g. Iterator @@ -579,6 +628,22 @@ impl<'a, 'tcx> ProjectionTy<'tcx> { } } +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] +pub struct GenSig<'tcx> { + pub yield_ty: Ty<'tcx>, + pub return_ty: Ty<'tcx>, +} + +pub type PolyGenSig<'tcx> = Binder>; + +impl<'tcx> PolyGenSig<'tcx> { + pub fn yield_ty(&self) -> ty::Binder> { + self.map_bound_ref(|sig| sig.yield_ty) + } + pub fn return_ty(&self) -> ty::Binder> { + self.map_bound_ref(|sig| sig.return_ty) + } +} /// Signature of a function type, which I have arbitrarily /// decided to use to refer to the input/output types. @@ -780,10 +845,10 @@ pub enum RegionKind { /// region parameters. ReFree(FreeRegion), - /// A concrete region naming some statically determined extent + /// A concrete region naming some statically determined scope /// (e.g. an expression or sequence of statements) within the /// current function. - ReScope(region::CodeExtent), + ReScope(region::Scope), /// Static data that has an "infinite" lifetime. Top in the region lattice. ReStatic, @@ -1379,7 +1444,7 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { TyAdt(_, substs) | TyAnon(_, substs) => { substs.regions().collect() } - TyClosure(_, ref substs) => { + TyClosure(_, ref substs) | TyGenerator(_, ref substs, _) => { substs.substs.regions().collect() } TyProjection(ref data) => { diff --git a/src/librustc/ty/util.rs b/src/librustc/ty/util.rs index bbbb8611f98a5..4e4c7b107c429 100644 --- a/src/librustc/ty/util.rs +++ b/src/librustc/ty/util.rs @@ -27,6 +27,7 @@ use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult, HashStable}; use rustc_data_structures::fx::FxHashMap; use std::cmp; +use std::iter; use std::hash::Hash; use std::intrinsics; use syntax::ast::{self, Name}; @@ -573,6 +574,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { }).collect() } + ty::TyGenerator(def_id, substs, interior) => { + substs.upvar_tys(def_id, self).chain(iter::once(interior.witness)).map(|ty| { + self.dtorck_constraint_for_ty(span, for_ty, depth+1, ty) + }).collect() + } + ty::TyAdt(def, substs) => { let ty::DtorckConstraint { dtorck_types, outlives @@ -694,6 +701,7 @@ impl<'a, 'gcx, 'tcx, W> TypeVisitor<'tcx> for TypeIdHasher<'a, 'gcx, 'tcx, W> TyRawPtr(m) | TyRef(_, m) => self.hash(m.mutbl), TyClosure(def_id, _) | + TyGenerator(def_id, _, _) | TyAnon(def_id, _) | TyFnDef(def_id, _) => self.def_id(def_id), TyAdt(d, _) => self.def_id(d.did), @@ -1120,6 +1128,11 @@ fn needs_drop_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty::TyClosure(def_id, ref substs) => substs.upvar_tys(def_id, tcx).any(needs_drop), + // Pessimistically assume that all generators will require destructors + // as we don't know if a destructor is a noop or not until after the MIR + // state transformation pass + ty::TyGenerator(..) => true, + ty::TyTuple(ref tys, _) => tys.iter().cloned().any(needs_drop), // unions don't have destructors regardless of the child types diff --git a/src/librustc/ty/walk.rs b/src/librustc/ty/walk.rs index a7f0bafe9b67d..bfabacdb17214 100644 --- a/src/librustc/ty/walk.rs +++ b/src/librustc/ty/walk.rs @@ -112,6 +112,10 @@ fn push_subtypes<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent_ty: Ty<'tcx>) { ty::TyClosure(_, ref substs) => { stack.extend(substs.substs.types().rev()); } + ty::TyGenerator(_, ref substs, ref interior) => { + stack.extend(substs.substs.types().rev()); + stack.push(interior.witness); + } ty::TyTuple(ts, _) => { stack.extend(ts.iter().cloned().rev()); } diff --git a/src/librustc/ty/wf.rs b/src/librustc/ty/wf.rs index 6d9e648452fd3..3516f7dfb25b0 100644 --- a/src/librustc/ty/wf.rs +++ b/src/librustc/ty/wf.rs @@ -281,8 +281,8 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { } } - ty::TyClosure(..) => { - // the types in a closure are always the types of + ty::TyGenerator(..) | ty::TyClosure(..) => { + // the types in a closure or generator are always the types of // local variables (or possibly references to local // variables), we'll walk those. // diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index 184fd75135e47..cc581b07de5fc 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -8,16 +8,15 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use hir::BodyId; use hir::def_id::DefId; use hir::map::definitions::DefPathData; -use middle::region::{CodeExtent, BlockRemainder}; +use middle::region::{self, BlockRemainder}; use ty::subst::{self, Subst}; use ty::{BrAnon, BrEnv, BrFresh, BrNamed}; use ty::{TyBool, TyChar, TyAdt}; use ty::{TyError, TyStr, TyArray, TySlice, TyFloat, TyFnDef, TyFnPtr}; use ty::{TyParam, TyRawPtr, TyRef, TyNever, TyTuple}; -use ty::{TyClosure, TyProjection, TyAnon}; +use ty::{TyClosure, TyGenerator, TyProjection, TyAnon}; use ty::{TyDynamic, TyInt, TyUint, TyInfer}; use ty::{self, Ty, TyCtxt, TypeFoldable}; @@ -525,18 +524,18 @@ impl fmt::Display for ty::RegionKind { ty::ReSkolemized(_, br) => { write!(f, "{}", br) } - ty::ReScope(code_extent) if identify_regions() => { - match code_extent { - CodeExtent::Misc(node_id) => - write!(f, "'{}mce", node_id.as_u32()), - CodeExtent::CallSiteScope(BodyId { node_id }) => - write!(f, "'{}cce", node_id.as_u32()), - CodeExtent::ParameterScope(BodyId { node_id }) => - write!(f, "'{}pce", node_id.as_u32()), - CodeExtent::DestructionScope(node_id) => - write!(f, "'{}dce", node_id.as_u32()), - CodeExtent::Remainder(BlockRemainder { block, first_statement_index }) => - write!(f, "'{}_{}rce", block, first_statement_index), + ty::ReScope(scope) if identify_regions() => { + match scope { + region::Scope::Node(id) => + write!(f, "'{}s", id.as_usize()), + region::Scope::CallSite(id) => + write!(f, "'{}cs", id.as_usize()), + region::Scope::Arguments(id) => + write!(f, "'{}as", id.as_usize()), + region::Scope::Destruction(id) => + write!(f, "'{}ds", id.as_usize()), + region::Scope::Remainder(BlockRemainder { block, first_statement_index }) => + write!(f, "'{}_{}rs", block.as_usize(), first_statement_index), } } ty::ReVar(region_vid) if identify_regions() => { @@ -715,6 +714,12 @@ impl<'tcx> fmt::Display for ty::TraitRef<'tcx> { } } +impl<'tcx> fmt::Display for ty::GeneratorInterior<'tcx> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.witness.fmt(f) + } +} + impl<'tcx> fmt::Display for ty::TypeVariants<'tcx> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { @@ -813,6 +818,41 @@ impl<'tcx> fmt::Display for ty::TypeVariants<'tcx> { }) } TyStr => write!(f, "str"), + TyGenerator(did, substs, interior) => ty::tls::with(|tcx| { + let upvar_tys = substs.upvar_tys(did, tcx); + write!(f, "[generator")?; + + if let Some(node_id) = tcx.hir.as_local_node_id(did) { + write!(f, "@{:?}", tcx.hir.span(node_id))?; + let mut sep = " "; + tcx.with_freevars(node_id, |freevars| { + for (freevar, upvar_ty) in freevars.iter().zip(upvar_tys) { + let def_id = freevar.def.def_id(); + let node_id = tcx.hir.as_local_node_id(def_id).unwrap(); + write!(f, + "{}{}:{}", + sep, + tcx.local_var_name_str(node_id), + upvar_ty)?; + sep = ", "; + } + Ok(()) + })? + } else { + // cross-crate closure types should only be + // visible in trans bug reports, I imagine. + write!(f, "@{:?}", did)?; + let mut sep = " "; + for (index, upvar_ty) in upvar_tys.enumerate() { + write!(f, "{}{}:{}", sep, index, upvar_ty)?; + sep = ", "; + } + } + + write!(f, " {}", interior)?; + + write!(f, "]") + }), TyClosure(did, substs) => ty::tls::with(|tcx| { let upvar_tys = substs.upvar_tys(did, tcx); write!(f, "[closure")?; diff --git a/src/librustc_allocator/expand.rs b/src/librustc_allocator/expand.rs index f99c6a29ff00a..eafb4c5c80078 100644 --- a/src/librustc_allocator/expand.rs +++ b/src/librustc_allocator/expand.rs @@ -82,10 +82,7 @@ impl<'a> Folder for ExpandAllocatorDirectives<'a> { allow_internal_unsafe: false, } }); - let span = Span { - ctxt: SyntaxContext::empty().apply_mark(mark), - ..item.span - }; + let span = item.span.with_ctxt(SyntaxContext::empty().apply_mark(mark)); let ecfg = ExpansionConfig::default(name.to_string()); let mut f = AllocFnFactory { span, diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs index 4058f3198afb4..985257c28104c 100644 --- a/src/librustc_borrowck/borrowck/check_loans.rs +++ b/src/librustc_borrowck/borrowck/check_loans.rs @@ -103,7 +103,8 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for CheckLoanCtxt<'a, 'tcx> { debug!("consume(consume_id={}, cmt={:?}, mode={:?})", consume_id, cmt, mode); - self.consume_common(consume_id, consume_span, cmt, mode); + let hir_id = self.tcx().hir.node_to_hir_id(consume_id); + self.consume_common(hir_id.local_id, consume_span, cmt, mode); } fn matched_pat(&mut self, @@ -120,7 +121,7 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for CheckLoanCtxt<'a, 'tcx> { cmt, mode); - self.consume_common(consume_pat.id, consume_pat.span, cmt, mode); + self.consume_common(consume_pat.hir_id.local_id, consume_pat.span, cmt, mode); } fn borrow(&mut self, @@ -136,15 +137,16 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for CheckLoanCtxt<'a, 'tcx> { borrow_id, cmt, loan_region, bk, loan_cause); + let hir_id = self.tcx().hir.node_to_hir_id(borrow_id); if let Some(lp) = opt_loan_path(&cmt) { let moved_value_use_kind = match loan_cause { euv::ClosureCapture(_) => MovedInCapture, _ => MovedInUse, }; - self.check_if_path_is_moved(borrow_id, borrow_span, moved_value_use_kind, &lp); + self.check_if_path_is_moved(hir_id.local_id, borrow_span, moved_value_use_kind, &lp); } - self.check_for_conflicting_loans(borrow_id); + self.check_for_conflicting_loans(hir_id.local_id); } fn mutate(&mut self, @@ -163,7 +165,8 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for CheckLoanCtxt<'a, 'tcx> { // have to be *FULLY* initialized, but we still // must be careful lest it contains derefs of // pointers. - self.check_if_assigned_path_is_moved(assignee_cmt.id, + let hir_id = self.tcx().hir.node_to_hir_id(assignee_cmt.id); + self.check_if_assigned_path_is_moved(hir_id.local_id, assignment_span, MovedInUse, &lp); @@ -172,14 +175,16 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for CheckLoanCtxt<'a, 'tcx> { // In a case like `path += 1`, then path must be // fully initialized, since we will read it before // we write it. - self.check_if_path_is_moved(assignee_cmt.id, + let hir_id = self.tcx().hir.node_to_hir_id(assignee_cmt.id); + self.check_if_path_is_moved(hir_id.local_id, assignment_span, MovedInUse, &lp); } } } - self.check_assignment(assignment_id, assignment_span, assignee_cmt); + self.check_assignment(self.tcx().hir.node_to_hir_id(assignment_id).local_id, + assignment_span, assignee_cmt); } fn decl_without_init(&mut self, _id: ast::NodeId, _span: Span) { } @@ -201,7 +206,7 @@ pub fn check_loans<'a, 'b, 'c, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, all_loans, param_env, }; - euv::ExprUseVisitor::new(&mut clcx, bccx.tcx, param_env, &bccx.region_maps, bccx.tables) + euv::ExprUseVisitor::new(&mut clcx, bccx.tcx, param_env, &bccx.region_scope_tree, bccx.tables) .consume_body(body); } @@ -220,7 +225,7 @@ fn compatible_borrow_kinds(borrow_kind1: ty::BorrowKind, impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { pub fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.bccx.tcx } - pub fn each_issued_loan(&self, node: ast::NodeId, mut op: F) -> bool where + pub fn each_issued_loan(&self, node: hir::ItemLocalId, mut op: F) -> bool where F: FnMut(&Loan<'tcx>) -> bool, { //! Iterates over each loan that has been issued @@ -235,14 +240,14 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { }) } - pub fn each_in_scope_loan(&self, scope: region::CodeExtent, mut op: F) -> bool where + pub fn each_in_scope_loan(&self, scope: region::Scope, mut op: F) -> bool where F: FnMut(&Loan<'tcx>) -> bool, { //! Like `each_issued_loan()`, but only considers loans that are //! currently in scope. - self.each_issued_loan(scope.node_id(), |loan| { - if self.bccx.region_maps.is_subscope_of(scope, loan.kill_scope) { + self.each_issued_loan(scope.item_local_id(), |loan| { + if self.bccx.region_scope_tree.is_subscope_of(scope, loan.kill_scope) { op(loan) } else { true @@ -251,7 +256,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { } fn each_in_scope_loan_affecting_path(&self, - scope: region::CodeExtent, + scope: region::Scope, loan_path: &LoanPath<'tcx>, mut op: F) -> bool where @@ -325,7 +330,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { return true; } - pub fn loans_generated_by(&self, node: ast::NodeId) -> Vec { + pub fn loans_generated_by(&self, node: hir::ItemLocalId) -> Vec { //! Returns a vector of the loans that are generated as //! we enter `node`. @@ -337,7 +342,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { return result; } - pub fn check_for_conflicting_loans(&self, node: ast::NodeId) { + pub fn check_for_conflicting_loans(&self, node: hir::ItemLocalId) { //! Checks to see whether any of the loans that are issued //! on entrance to `node` conflict with loans that have already been //! issued when we enter `node` (for example, we do not @@ -381,7 +386,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { new_loan); // Should only be called for loans that are in scope at the same time. - assert!(self.bccx.region_maps.scopes_intersect(old_loan.kill_scope, + assert!(self.bccx.region_scope_tree.scopes_intersect(old_loan.kill_scope, new_loan.kill_scope)); self.report_error_if_loan_conflicts_with_restriction( @@ -462,7 +467,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { // 3. Where does old loan expire. let previous_end_span = - self.tcx().hir.span(old_loan.kill_scope.node_id()).end_point(); + old_loan.kill_scope.span(self.tcx(), &self.bccx.region_scope_tree).end_point(); let mut err = match (new_loan.kind, old_loan.kind) { (ty::MutBorrow, ty::MutBorrow) => { @@ -590,7 +595,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { } fn consume_common(&self, - id: ast::NodeId, + id: hir::ItemLocalId, span: Span, cmt: mc::cmt<'tcx>, mode: euv::ConsumeMode) { @@ -628,7 +633,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { } fn check_for_copy_of_frozen_path(&self, - id: ast::NodeId, + id: hir::ItemLocalId, span: Span, copy_path: &LoanPath<'tcx>) { match self.analyze_restrictions_on_use(id, copy_path, ty::ImmBorrow) { @@ -649,7 +654,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { } fn check_for_move_of_borrowed_path(&self, - id: ast::NodeId, + id: hir::ItemLocalId, span: Span, move_path: &LoanPath<'tcx>, move_kind: move_data::MoveKind) { @@ -699,18 +704,17 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { } pub fn analyze_restrictions_on_use(&self, - expr_id: ast::NodeId, + expr_id: hir::ItemLocalId, use_path: &LoanPath<'tcx>, borrow_kind: ty::BorrowKind) -> UseError<'tcx> { - debug!("analyze_restrictions_on_use(expr_id={}, use_path={:?})", - self.tcx().hir.node_to_string(expr_id), - use_path); + debug!("analyze_restrictions_on_use(expr_id={:?}, use_path={:?})", + expr_id, use_path); let mut ret = UseOk; self.each_in_scope_loan_affecting_path( - region::CodeExtent::Misc(expr_id), use_path, |loan| { + region::Scope::Node(expr_id), use_path, |loan| { if !compatible_borrow_kinds(loan.kind, borrow_kind) { ret = UseWhileBorrowed(loan.loan_path.clone(), loan.span); false @@ -725,11 +729,11 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { /// Reports an error if `expr` (which should be a path) /// is using a moved/uninitialized value fn check_if_path_is_moved(&self, - id: ast::NodeId, + id: hir::ItemLocalId, span: Span, use_kind: MovedValueUseKind, lp: &Rc>) { - debug!("check_if_path_is_moved(id={}, use_kind={:?}, lp={:?})", + debug!("check_if_path_is_moved(id={:?}, use_kind={:?}, lp={:?})", id, use_kind, lp); // FIXME (22079): if you find yourself tempted to cut and paste @@ -772,7 +776,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { /// (*p).x = 22; // not ok, p is uninitialized, can't deref /// ``` fn check_if_assigned_path_is_moved(&self, - id: ast::NodeId, + id: hir::ItemLocalId, span: Span, use_kind: MovedValueUseKind, lp: &Rc>) @@ -822,14 +826,14 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { } fn check_assignment(&self, - assignment_id: ast::NodeId, + assignment_id: hir::ItemLocalId, assignment_span: Span, assignee_cmt: mc::cmt<'tcx>) { debug!("check_assignment(assignee_cmt={:?})", assignee_cmt); // Check that we don't invalidate any outstanding loans if let Some(loan_path) = opt_loan_path(&assignee_cmt) { - let scope = region::CodeExtent::Misc(assignment_id); + let scope = region::Scope::Node(assignment_id); self.each_in_scope_loan_affecting_path(scope, &loan_path, |loan| { self.report_illegal_mutation(assignment_span, &loan_path, loan); false diff --git a/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs b/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs index 7878762788905..465457f5ab39a 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs @@ -27,7 +27,7 @@ use rustc::hir::*; use rustc::hir::map::Node::*; struct GatherMoveInfo<'tcx> { - id: ast::NodeId, + id: hir::ItemLocalId, kind: MoveKind, cmt: mc::cmt<'tcx>, span_path_opt: Option> @@ -79,13 +79,14 @@ pub fn gather_decl<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, var_id: ast::NodeId, var_ty: Ty<'tcx>) { let loan_path = Rc::new(LoanPath::new(LpVar(var_id), var_ty)); - move_data.add_move(bccx.tcx, loan_path, var_id, Declared); + let hir_id = bccx.tcx.hir.node_to_hir_id(var_id); + move_data.add_move(bccx.tcx, loan_path, hir_id.local_id, Declared); } pub fn gather_move_from_expr<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, move_data: &MoveData<'tcx>, move_error_collector: &mut MoveErrorCollector<'tcx>, - move_expr_id: ast::NodeId, + move_expr_id: hir::ItemLocalId, cmt: mc::cmt<'tcx>, move_reason: euv::MoveReason) { let kind = match move_reason { @@ -118,7 +119,7 @@ pub fn gather_move_from_pat<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, _ => None, }; let move_info = GatherMoveInfo { - id: move_pat.id, + id: move_pat.hir_id.local_id, kind: MovePat, cmt, span_path_opt: pat_span_path_opt, @@ -135,7 +136,7 @@ fn gather_move<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, move_data: &MoveData<'tcx>, move_error_collector: &mut MoveErrorCollector<'tcx>, move_info: GatherMoveInfo<'tcx>) { - debug!("gather_move(move_id={}, cmt={:?})", + debug!("gather_move(move_id={:?}, cmt={:?})", move_info.id, move_info.cmt); let potentially_illegal_move = @@ -161,10 +162,10 @@ fn gather_move<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, pub fn gather_assignment<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, move_data: &MoveData<'tcx>, - assignment_id: ast::NodeId, + assignment_id: hir::ItemLocalId, assignment_span: Span, assignee_loan_path: Rc>, - assignee_id: ast::NodeId, + assignee_id: hir::ItemLocalId, mode: euv::MutateMode) { move_data.add_assignment(bccx.tcx, assignee_loan_path, diff --git a/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs b/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs index 22de3c759139d..66aaafb77f757 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs @@ -24,7 +24,7 @@ use syntax_pos::Span; type R = Result<(),()>; pub fn guarantee_lifetime<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, - item_scope: region::CodeExtent, + item_scope: region::Scope, span: Span, cause: euv::LoanCause, cmt: mc::cmt<'tcx>, @@ -52,7 +52,7 @@ struct GuaranteeLifetimeContext<'a, 'tcx: 'a> { bccx: &'a BorrowckCtxt<'a, 'tcx>, // the scope of the function body for the enclosing item - item_scope: region::CodeExtent, + item_scope: region::Scope, span: Span, cause: euv::LoanCause, @@ -115,8 +115,9 @@ impl<'a, 'tcx> GuaranteeLifetimeContext<'a, 'tcx> { self.bccx.tcx.mk_region(ty::ReScope(self.item_scope)) } Categorization::Local(local_id) => { + let hir_id = self.bccx.tcx.hir.node_to_hir_id(local_id); self.bccx.tcx.mk_region(ty::ReScope( - self.bccx.region_maps.var_scope(local_id))) + self.bccx.region_scope_tree.var_scope(hir_id.local_id))) } Categorization::StaticItem | Categorization::Deref(_, mc::UnsafePtr(..)) => { diff --git a/src/librustc_borrowck/borrowck/gather_loans/mod.rs b/src/librustc_borrowck/borrowck/gather_loans/mod.rs index 00ebf5de44af8..00edd9cb28a0d 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/mod.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/mod.rs @@ -43,14 +43,13 @@ pub fn gather_loans_in_fn<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, let mut glcx = GatherLoanCtxt { bccx, all_loans: Vec::new(), - item_ub: region::CodeExtent::Misc(body.node_id), - move_data: MoveData::new(), + item_ub: region::Scope::Node(bccx.tcx.hir.body(body).value.hir_id.local_id), + move_data: MoveData::default(), move_error_collector: move_error::MoveErrorCollector::new(), }; - let body = glcx.bccx.tcx.hir.body(body); - euv::ExprUseVisitor::new(&mut glcx, bccx.tcx, param_env, &bccx.region_maps, bccx.tables) - .consume_body(body); + euv::ExprUseVisitor::new(&mut glcx, bccx.tcx, param_env, &bccx.region_scope_tree, bccx.tables) + .consume_body(bccx.body); glcx.report_potential_errors(); let GatherLoanCtxt { all_loans, move_data, .. } = glcx; @@ -64,7 +63,7 @@ struct GatherLoanCtxt<'a, 'tcx: 'a> { all_loans: Vec>, /// `item_ub` is used as an upper-bound on the lifetime whenever we /// ask for the scope of an expression categorized as an upvar. - item_ub: region::CodeExtent, + item_ub: region::Scope, } impl<'a, 'tcx> euv::Delegate<'tcx> for GatherLoanCtxt<'a, 'tcx> { @@ -80,7 +79,7 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for GatherLoanCtxt<'a, 'tcx> { euv::Move(move_reason) => { gather_moves::gather_move_from_expr( self.bccx, &self.move_data, &mut self.move_error_collector, - consume_id, cmt, move_reason); + self.bccx.tcx.hir.node_to_hir_id(consume_id).local_id, cmt, move_reason); } euv::Copy => { } } @@ -127,8 +126,8 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for GatherLoanCtxt<'a, 'tcx> { bk={:?}, loan_cause={:?})", borrow_id, cmt, loan_region, bk, loan_cause); - - self.guarantee_valid(borrow_id, + let hir_id = self.bccx.tcx.hir.node_to_hir_id(borrow_id); + self.guarantee_valid(hir_id.local_id, borrow_span, cmt, bk, @@ -274,8 +273,12 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { self.mark_loan_path_as_mutated(&lp); } gather_moves::gather_assignment(self.bccx, &self.move_data, - assignment_id, assignment_span, - lp, cmt.id, mode); + self.bccx.tcx.hir.node_to_hir_id(assignment_id) + .local_id, + assignment_span, + lp, + self.bccx.tcx.hir.node_to_hir_id(cmt.id).local_id, + mode); } None => { // This can occur with e.g. `*foo() = 5`. In such @@ -289,13 +292,13 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { /// reports an error. This may entail taking out loans, which will be added to the /// `req_loan_map`. fn guarantee_valid(&mut self, - borrow_id: ast::NodeId, + borrow_id: hir::ItemLocalId, borrow_span: Span, cmt: mc::cmt<'tcx>, req_kind: ty::BorrowKind, loan_region: ty::Region<'tcx>, cause: euv::LoanCause) { - debug!("guarantee_valid(borrow_id={}, cmt={:?}, \ + debug!("guarantee_valid(borrow_id={:?}, cmt={:?}, \ req_mutbl={:?}, loan_region={:?})", borrow_id, cmt, @@ -348,11 +351,11 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { ty::ReScope(scope) => scope, ty::ReEarlyBound(ref br) => { - self.bccx.region_maps.early_free_extent(self.tcx(), br) + self.bccx.region_scope_tree.early_free_scope(self.tcx(), br) } ty::ReFree(ref fr) => { - self.bccx.region_maps.free_extent(self.tcx(), fr) + self.bccx.region_scope_tree.free_scope(self.tcx(), fr) } ty::ReStatic => self.item_ub, @@ -370,7 +373,7 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { }; debug!("loan_scope = {:?}", loan_scope); - let borrow_scope = region::CodeExtent::Misc(borrow_id); + let borrow_scope = region::Scope::Node(borrow_id); let gen_scope = self.compute_gen_scope(borrow_scope, loan_scope); debug!("gen_scope = {:?}", gen_scope); @@ -394,7 +397,7 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { } }; - debug!("guarantee_valid(borrow_id={}), loan={:?}", + debug!("guarantee_valid(borrow_id={:?}), loan={:?}", borrow_id, loan); // let loan_path = loan.loan_path; @@ -470,23 +473,23 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { } pub fn compute_gen_scope(&self, - borrow_scope: region::CodeExtent, - loan_scope: region::CodeExtent) - -> region::CodeExtent { + borrow_scope: region::Scope, + loan_scope: region::Scope) + -> region::Scope { //! Determine when to introduce the loan. Typically the loan //! is introduced at the point of the borrow, but in some cases, //! notably method arguments, the loan may be introduced only //! later, once it comes into scope. - if self.bccx.region_maps.is_subscope_of(borrow_scope, loan_scope) { + if self.bccx.region_scope_tree.is_subscope_of(borrow_scope, loan_scope) { borrow_scope } else { loan_scope } } - pub fn compute_kill_scope(&self, loan_scope: region::CodeExtent, lp: &LoanPath<'tcx>) - -> region::CodeExtent { + pub fn compute_kill_scope(&self, loan_scope: region::Scope, lp: &LoanPath<'tcx>) + -> region::Scope { //! Determine when the loan restrictions go out of scope. //! This is either when the lifetime expires or when the //! local variable which roots the loan-path goes out of scope, @@ -509,10 +512,10 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { //! do not require restrictions and hence do not cause a loan. let lexical_scope = lp.kill_scope(self.bccx); - if self.bccx.region_maps.is_subscope_of(lexical_scope, loan_scope) { + if self.bccx.region_scope_tree.is_subscope_of(lexical_scope, loan_scope) { lexical_scope } else { - assert!(self.bccx.region_maps.is_subscope_of(loan_scope, lexical_scope)); + assert!(self.bccx.region_scope_tree.is_subscope_of(loan_scope, lexical_scope)); loan_scope } } diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 38336655f217a..98c9a4a92ef30 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -32,11 +32,10 @@ use rustc::middle::expr_use_visitor as euv; use rustc::middle::mem_categorization as mc; use rustc::middle::mem_categorization::Categorization; use rustc::middle::mem_categorization::ImmutabilityBlame; -use rustc::middle::region::{self, RegionMaps}; +use rustc::middle::region; use rustc::middle::free_region::RegionRelations; use rustc::ty::{self, TyCtxt}; use rustc::ty::maps::Providers; - use rustc_mir::util::borrowck_errors::{BorrowckErrors, Origin}; use std::fmt; @@ -99,10 +98,9 @@ fn borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId) { let body_id = tcx.hir.body_owned_by(owner_id); let tables = tcx.typeck_tables_of(owner_def_id); - let region_maps = tcx.region_maps(owner_def_id); - let bccx = &mut BorrowckCtxt { tcx, tables, region_maps, owner_def_id }; - - let body = bccx.tcx.hir.body(body_id); + let region_scope_tree = tcx.region_scope_tree(owner_def_id); + let body = tcx.hir.body(body_id); + let bccx = &mut BorrowckCtxt { tcx, tables, region_scope_tree, owner_def_id, body }; // Eventually, borrowck will always read the MIR, but at the // moment we do not. So, for now, we always force MIR to be @@ -139,10 +137,9 @@ fn build_borrowck_dataflow_data<'a, 'c, 'tcx, F>(this: &mut BorrowckCtxt<'a, 'tc { // Check the body of fn items. let tcx = this.tcx; - let body = tcx.hir.body(body_id); let id_range = { let mut visitor = intravisit::IdRangeComputingVisitor::new(&tcx.hir); - visitor.visit_body(body); + visitor.visit_body(this.body); visitor.result() }; let (all_loans, move_data) = @@ -163,24 +160,25 @@ fn build_borrowck_dataflow_data<'a, 'c, 'tcx, F>(this: &mut BorrowckCtxt<'a, 'tc let mut loan_dfcx = DataFlowContext::new(this.tcx, "borrowck", - Some(body), + Some(this.body), cfg, LoanDataFlowOperator, id_range, all_loans.len()); for (loan_idx, loan) in all_loans.iter().enumerate() { - loan_dfcx.add_gen(loan.gen_scope.node_id(), loan_idx); + loan_dfcx.add_gen(loan.gen_scope.item_local_id(), loan_idx); loan_dfcx.add_kill(KillFrom::ScopeEnd, - loan.kill_scope.node_id(), loan_idx); + loan.kill_scope.item_local_id(), + loan_idx); } loan_dfcx.add_kills_from_flow_exits(cfg); - loan_dfcx.propagate(cfg, body); + loan_dfcx.propagate(cfg, this.body); let flowed_moves = move_data::FlowedMoveData::new(move_data, this, cfg, id_range, - body); + this.body); Some(AnalysisData { all_loans, loans: loan_dfcx, @@ -198,8 +196,9 @@ pub fn build_borrowck_dataflow_data_for_fn<'a, 'tcx>( let owner_id = tcx.hir.body_owner(body_id); let owner_def_id = tcx.hir.local_def_id(owner_id); let tables = tcx.typeck_tables_of(owner_def_id); - let region_maps = tcx.region_maps(owner_def_id); - let mut bccx = BorrowckCtxt { tcx, tables, region_maps, owner_def_id }; + let region_scope_tree = tcx.region_scope_tree(owner_def_id); + let body = tcx.hir.body(body_id); + let mut bccx = BorrowckCtxt { tcx, tables, region_scope_tree, owner_def_id, body }; let dataflow_data = build_borrowck_dataflow_data(&mut bccx, true, body_id, |_| cfg); (bccx, dataflow_data.unwrap()) @@ -215,9 +214,11 @@ pub struct BorrowckCtxt<'a, 'tcx: 'a> { // Some in `borrowck_fn` and cleared later tables: &'a ty::TypeckTables<'tcx>, - region_maps: Rc, + region_scope_tree: Rc, owner_def_id: DefId, + + body: &'tcx hir::Body, } impl<'b, 'tcx: 'b> BorrowckErrors for BorrowckCtxt<'b, 'tcx> { @@ -254,13 +255,13 @@ pub struct Loan<'tcx> { /// cases, notably method arguments, the loan may be introduced /// only later, once it comes into scope. See also /// `GatherLoanCtxt::compute_gen_scope`. - gen_scope: region::CodeExtent, + gen_scope: region::Scope, /// kill_scope indicates when the loan goes out of scope. This is /// either when the lifetime expires or when the local variable /// which roots the loan-path goes out of scope, whichever happens /// faster. See also `GatherLoanCtxt::compute_kill_scope`. - kill_scope: region::CodeExtent, + kill_scope: region::Scope, span: Span, cause: euv::LoanCause, } @@ -349,7 +350,7 @@ fn closure_to_block(closure_id: DefIndex, let closure_id = tcx.hir.def_index_to_node_id(closure_id); match tcx.hir.get(closure_id) { hir_map::NodeExpr(expr) => match expr.node { - hir::ExprClosure(.., body_id, _) => { + hir::ExprClosure(.., body_id, _, _) => { body_id.node_id } _ => { @@ -361,12 +362,16 @@ fn closure_to_block(closure_id: DefIndex, } impl<'a, 'tcx> LoanPath<'tcx> { - pub fn kill_scope(&self, bccx: &BorrowckCtxt<'a, 'tcx>) -> region::CodeExtent { + pub fn kill_scope(&self, bccx: &BorrowckCtxt<'a, 'tcx>) -> region::Scope { match self.kind { - LpVar(local_id) => bccx.region_maps.var_scope(local_id), + LpVar(local_id) => { + let hir_id = bccx.tcx.hir.node_to_hir_id(local_id); + bccx.region_scope_tree.var_scope(hir_id.local_id) + } LpUpvar(upvar_id) => { let block_id = closure_to_block(upvar_id.closure_expr_id, bccx.tcx); - region::CodeExtent::Misc(block_id) + let hir_id = bccx.tcx.hir.node_to_hir_id(block_id); + region::Scope::Node(hir_id.local_id) } LpDowncast(ref base, _) | LpExtend(ref base, ..) => base.kill_scope(bccx), @@ -530,7 +535,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { { let region_rels = RegionRelations::new(self.tcx, self.owner_def_id, - &self.region_maps, + &self.region_scope_tree, &self.tables.free_region_map); region_rels.is_subregion_of(r_sub, r_sup) } @@ -549,9 +554,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { _ => { } } - let mut db = self.bckerr_to_diag(&err); - self.note_and_explain_bckerr(&mut db, err); - db.emit(); + self.report_bckerr(&err); } pub fn report_use_of_moved_value(&self, @@ -641,19 +644,22 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { // Get type of value and span where it was previously // moved. + let node_id = self.tcx.hir.hir_to_node_id(hir::HirId { + owner: self.body.value.hir_id.owner, + local_id: the_move.id + }); let (move_span, move_note) = match the_move.kind { move_data::Declared => { unreachable!(); } move_data::MoveExpr | - move_data::MovePat => - (self.tcx.hir.span(the_move.id), ""), + move_data::MovePat => (self.tcx.hir.span(node_id), ""), move_data::Captured => - (match self.tcx.hir.expect_expr(the_move.id).node { - hir::ExprClosure(.., fn_decl_span) => fn_decl_span, - ref r => bug!("Captured({}) maps to non-closure: {:?}", + (match self.tcx.hir.expect_expr(node_id).node { + hir::ExprClosure(.., fn_decl_span, _) => fn_decl_span, + ref r => bug!("Captured({:?}) maps to non-closure: {:?}", the_move.id, r), }, " (into closure)"), }; @@ -722,8 +728,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.struct_span_err_with_code(s, msg, code) } - fn bckerr_to_diag(&self, err: &BckError<'tcx>) -> DiagnosticBuilder<'a> { - let span = err.span.clone(); + pub fn span_err_with_code>(&self, s: S, msg: &str, code: &str) { + self.tcx.sess.span_err_with_code(s, msg, code); + } + + fn report_bckerr(&self, err: &BckError<'tcx>) { + let error_span = err.span.clone(); match err.code { err_mutbl => { @@ -747,12 +757,16 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { } }; - match err.cause { + let mut db = match err.cause { MutabilityViolation => { - struct_span_err!(self.tcx.sess, span, E0594, "cannot assign to {}", descr) + struct_span_err!(self.tcx.sess, + error_span, + E0594, + "cannot assign to {}", + descr) } BorrowViolation(euv::ClosureCapture(_)) => { - struct_span_err!(self.tcx.sess, span, E0595, + struct_span_err!(self.tcx.sess, error_span, E0595, "closure cannot assign to {}", descr) } BorrowViolation(euv::OverloadedOperator) | @@ -762,30 +776,238 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { BorrowViolation(euv::AutoUnsafe) | BorrowViolation(euv::ForLoop) | BorrowViolation(euv::MatchDiscriminant) => { - struct_span_err!(self.tcx.sess, span, E0596, + struct_span_err!(self.tcx.sess, error_span, E0596, "cannot borrow {} as mutable", descr) } BorrowViolation(euv::ClosureInvocation) => { span_bug!(err.span, "err_mutbl with a closure invocation"); } - } + }; + + self.note_and_explain_mutbl_error(&mut db, &err, &error_span); + self.note_immutability_blame(&mut db, err.cmt.immutability_blame()); + db.emit(); } - err_out_of_scope(..) => { + err_out_of_scope(super_scope, sub_scope, cause) => { let msg = match opt_loan_path(&err.cmt) { None => "borrowed value".to_string(), Some(lp) => { format!("`{}`", self.loan_path_to_string(&lp)) } }; - struct_span_err!(self.tcx.sess, span, E0597, "{} does not live long enough", msg) + + // When you have a borrow that lives across a yield, + // that reference winds up captured in the generator + // type. Regionck then constraints it to live as long + // as the generator itself. If that borrow is borrowing + // data owned by the generator, this winds up resulting in + // an `err_out_of_scope` error: + // + // ``` + // { + // let g = || { + // let a = &3; // this borrow is forced to ... -+ + // yield (); // | + // println!("{}", a); // | + // }; // | + // } <----------------------... live until here --------+ + // ``` + // + // To detect this case, we look for cases where the + // `super_scope` (lifetime of the value) is within the + // body, but the `sub_scope` is not. + debug!("err_out_of_scope: self.body.is_generator = {:?}", + self.body.is_generator); + let maybe_borrow_across_yield = if self.body.is_generator { + let body_scope = region::Scope::Node(self.body.value.hir_id.local_id); + debug!("err_out_of_scope: body_scope = {:?}", body_scope); + debug!("err_out_of_scope: super_scope = {:?}", super_scope); + debug!("err_out_of_scope: sub_scope = {:?}", sub_scope); + match (super_scope, sub_scope) { + (&ty::RegionKind::ReScope(value_scope), + &ty::RegionKind::ReScope(loan_scope)) => { + if { + // value_scope <= body_scope && + self.region_scope_tree.is_subscope_of(value_scope, body_scope) && + // body_scope <= loan_scope + self.region_scope_tree.is_subscope_of(body_scope, loan_scope) + } { + // We now know that this is a case + // that fits the bill described above: + // a borrow of something whose scope + // is within the generator, but the + // borrow is for a scope outside the + // generator. + // + // Now look within the scope of the of + // the value being borrowed (in the + // example above, that would be the + // block remainder that starts with + // `let a`) for a yield. We can cite + // that for the user. + self.region_scope_tree.yield_in_scope(value_scope) + } else { + None + } + } + _ => None, + } + } else { + None + }; + + if let Some(yield_span) = maybe_borrow_across_yield { + debug!("err_out_of_scope: opt_yield_span = {:?}", yield_span); + struct_span_err!(self.tcx.sess, + error_span, + E0626, + "borrow may still be in use when generator yields") + .span_label(yield_span, "possible yield occurs here") + .emit(); + return; + } + + let mut db = struct_span_err!(self.tcx.sess, + error_span, + E0597, + "{} does not live long enough", + msg); + + let (value_kind, value_msg) = match err.cmt.cat { + mc::Categorization::Rvalue(..) => + ("temporary value", "temporary value created here"), + _ => + ("borrowed value", "borrow occurs here") + }; + + let is_closure = match cause { + euv::ClosureCapture(s) => { + // The primary span starts out as the closure creation point. + // Change the primary span here to highlight the use of the variable + // in the closure, because it seems more natural. Highlight + // closure creation point as a secondary span. + match db.span.primary_span() { + Some(primary) => { + db.span = MultiSpan::from_span(s); + db.span_label(primary, "capture occurs here"); + db.span_label(s, "does not live long enough"); + true + } + None => false + } + } + _ => { + db.span_label(error_span, "does not live long enough"); + false + } + }; + + let sub_span = self.region_end_span(sub_scope); + let super_span = self.region_end_span(super_scope); + + match (sub_span, super_span) { + (Some(s1), Some(s2)) if s1 == s2 => { + if !is_closure { + db.span = MultiSpan::from_span(s1); + db.span_label(error_span, value_msg); + let msg = match opt_loan_path(&err.cmt) { + None => value_kind.to_string(), + Some(lp) => { + format!("`{}`", self.loan_path_to_string(&lp)) + } + }; + db.span_label(s1, + format!("{} dropped here while still borrowed", msg)); + } else { + db.span_label(s1, format!("{} dropped before borrower", value_kind)); + } + db.note("values in a scope are dropped in the opposite order \ + they are created"); + } + (Some(s1), Some(s2)) if !is_closure => { + db.span = MultiSpan::from_span(s2); + db.span_label(error_span, value_msg); + let msg = match opt_loan_path(&err.cmt) { + None => value_kind.to_string(), + Some(lp) => { + format!("`{}`", self.loan_path_to_string(&lp)) + } + }; + db.span_label(s2, format!("{} dropped here while still borrowed", msg)); + db.span_label(s1, format!("{} needs to live until here", value_kind)); + } + _ => { + match sub_span { + Some(s) => { + db.span_label(s, format!("{} needs to live until here", + value_kind)); + } + None => { + self.tcx.note_and_explain_region( + &self.region_scope_tree, + &mut db, + "borrowed value must be valid for ", + sub_scope, + "..."); + } + } + match super_span { + Some(s) => { + db.span_label(s, format!("{} only lives until here", value_kind)); + } + None => { + self.tcx.note_and_explain_region( + &self.region_scope_tree, + &mut db, + "...but borrowed value is only valid for ", + super_scope, + ""); + } + } + } + } + + if let ty::ReScope(scope) = *super_scope { + let node_id = scope.node_id(self.tcx, &self.region_scope_tree); + match self.tcx.hir.find(node_id) { + Some(hir_map::NodeStmt(_)) => { + db.note("consider using a `let` binding to increase its lifetime"); + } + _ => {} + } + } + + db.emit(); } - err_borrowed_pointer_too_short(..) => { + err_borrowed_pointer_too_short(loan_scope, ptr_scope) => { let descr = self.cmt_to_path_or_string(&err.cmt); - struct_span_err!(self.tcx.sess, span, E0598, - "lifetime of {} is too short to guarantee \ - its contents can be safely reborrowed", - descr) + let mut db = struct_span_err!(self.tcx.sess, error_span, E0598, + "lifetime of {} is too short to guarantee \ + its contents can be safely reborrowed", + descr); + + let descr = match opt_loan_path(&err.cmt) { + Some(lp) => { + format!("`{}`", self.loan_path_to_string(&lp)) + } + None => self.cmt_to_string(&err.cmt), + }; + self.tcx.note_and_explain_region( + &self.region_scope_tree, + &mut db, + &format!("{} would have to be valid for ", + descr), + loan_scope, + "..."); + self.tcx.note_and_explain_region( + &self.region_scope_tree, + &mut db, + &format!("...but {} is only valid for ", descr), + ptr_scope, + ""); + + db.emit(); } } } @@ -1035,146 +1257,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { fn region_end_span(&self, region: ty::Region<'tcx>) -> Option { match *region { ty::ReScope(scope) => { - match scope.span(&self.tcx.hir) { - Some(s) => { - Some(s.end_point()) - } - None => { - None - } - } + Some(scope.span(self.tcx, &self.region_scope_tree).end_point()) } _ => None } } - fn note_and_explain_bckerr(&self, db: &mut DiagnosticBuilder, err: BckError<'tcx>) { - let error_span = err.span.clone(); - match err.code { - err_mutbl => { - self.note_and_explain_mutbl_error(db, &err, &error_span); - self.note_immutability_blame(db, err.cmt.immutability_blame()); - } - err_out_of_scope(super_scope, sub_scope, cause) => { - let (value_kind, value_msg) = match err.cmt.cat { - mc::Categorization::Rvalue(..) => - ("temporary value", "temporary value created here"), - _ => - ("borrowed value", "borrow occurs here") - }; - - let is_closure = match cause { - euv::ClosureCapture(s) => { - // The primary span starts out as the closure creation point. - // Change the primary span here to highlight the use of the variable - // in the closure, because it seems more natural. Highlight - // closure creation point as a secondary span. - match db.span.primary_span() { - Some(primary) => { - db.span = MultiSpan::from_span(s); - db.span_label(primary, "capture occurs here"); - db.span_label(s, "does not live long enough"); - true - } - None => false - } - } - _ => { - db.span_label(error_span, "does not live long enough"); - false - } - }; - - let sub_span = self.region_end_span(sub_scope); - let super_span = self.region_end_span(super_scope); - - match (sub_span, super_span) { - (Some(s1), Some(s2)) if s1 == s2 => { - if !is_closure { - db.span = MultiSpan::from_span(s1); - db.span_label(error_span, value_msg); - let msg = match opt_loan_path(&err.cmt) { - None => value_kind.to_string(), - Some(lp) => { - format!("`{}`", self.loan_path_to_string(&lp)) - } - }; - db.span_label(s1, - format!("{} dropped here while still borrowed", msg)); - } else { - db.span_label(s1, format!("{} dropped before borrower", value_kind)); - } - db.note("values in a scope are dropped in the opposite order \ - they are created"); - } - (Some(s1), Some(s2)) if !is_closure => { - db.span = MultiSpan::from_span(s2); - db.span_label(error_span, value_msg); - let msg = match opt_loan_path(&err.cmt) { - None => value_kind.to_string(), - Some(lp) => { - format!("`{}`", self.loan_path_to_string(&lp)) - } - }; - db.span_label(s2, format!("{} dropped here while still borrowed", msg)); - db.span_label(s1, format!("{} needs to live until here", value_kind)); - } - _ => { - match sub_span { - Some(s) => { - db.span_label(s, format!("{} needs to live until here", - value_kind)); - } - None => { - self.tcx.note_and_explain_region( - db, - "borrowed value must be valid for ", - sub_scope, - "..."); - } - } - match super_span { - Some(s) => { - db.span_label(s, format!("{} only lives until here", value_kind)); - } - None => { - self.tcx.note_and_explain_region( - db, - "...but borrowed value is only valid for ", - super_scope, - ""); - } - } - } - } - - if let Some(_) = statement_scope_span(self.tcx, super_scope) { - db.note("consider using a `let` binding to increase its lifetime"); - } - } - - err_borrowed_pointer_too_short(loan_scope, ptr_scope) => { - let descr = match opt_loan_path(&err.cmt) { - Some(lp) => { - format!("`{}`", self.loan_path_to_string(&lp)) - } - None => self.cmt_to_string(&err.cmt), - }; - self.tcx.note_and_explain_region( - db, - &format!("{} would have to be valid for ", - descr), - loan_scope, - "..."); - self.tcx.note_and_explain_region( - db, - &format!("...but {} is only valid for ", descr), - ptr_scope, - ""); - } - } - } - fn note_and_explain_mutbl_error(&self, db: &mut DiagnosticBuilder, err: &BckError<'tcx>, error_span: &Span) { match err.cmt.note { @@ -1306,18 +1394,6 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { } } -fn statement_scope_span(tcx: TyCtxt, region: ty::Region) -> Option { - match *region { - ty::ReScope(scope) => { - match tcx.hir.find(scope.node_id()) { - Some(hir_map::NodeStmt(stmt)) => Some(stmt.span), - _ => None - } - } - _ => None - } -} - impl BitwiseOperator for LoanDataFlowOperator { #[inline] fn join(&self, succ: usize, pred: usize) -> usize { diff --git a/src/librustc_borrowck/borrowck/move_data.rs b/src/librustc_borrowck/borrowck/move_data.rs index 217bd6e6ca1ca..7915eccbf7445 100644 --- a/src/librustc_borrowck/borrowck/move_data.rs +++ b/src/librustc_borrowck/borrowck/move_data.rs @@ -23,16 +23,16 @@ use rustc::middle::expr_use_visitor as euv; use rustc::middle::expr_use_visitor::MutateMode; use rustc::middle::mem_categorization as mc; use rustc::ty::{self, TyCtxt}; -use rustc::util::nodemap::{FxHashMap, NodeSet}; +use rustc::util::nodemap::{FxHashMap, FxHashSet}; use std::cell::RefCell; use std::rc::Rc; use std::usize; -use syntax::ast; use syntax_pos::Span; use rustc::hir; use rustc::hir::intravisit::IdRange; +#[derive(Default)] pub struct MoveData<'tcx> { /// Move paths. See section "Move paths" in `README.md`. pub paths: RefCell>>, @@ -54,7 +54,7 @@ pub struct MoveData<'tcx> { pub path_assignments: RefCell>, /// Assignments to a variable or path, like `x = foo`, but not `x += foo`. - pub assignee_ids: RefCell, + pub assignee_ids: RefCell>, } pub struct FlowedMoveData<'a, 'tcx: 'a> { @@ -133,7 +133,7 @@ pub struct Move { pub path: MovePathIndex, /// id of node that is doing the move. - pub id: ast::NodeId, + pub id: hir::ItemLocalId, /// Kind of move, for error messages. pub kind: MoveKind, @@ -148,13 +148,13 @@ pub struct Assignment { pub path: MovePathIndex, /// id where assignment occurs - pub id: ast::NodeId, + pub id: hir::ItemLocalId, /// span of node where assignment occurs pub span: Span, /// id for l-value expression on lhs of assignment - pub assignee_id: ast::NodeId, + pub assignee_id: hir::ItemLocalId, } #[derive(Clone, Copy)] @@ -189,17 +189,6 @@ fn loan_path_is_precise(loan_path: &LoanPath) -> bool { } impl<'a, 'tcx> MoveData<'tcx> { - pub fn new() -> MoveData<'tcx> { - MoveData { - paths: RefCell::new(Vec::new()), - path_map: RefCell::new(FxHashMap()), - moves: RefCell::new(Vec::new()), - path_assignments: RefCell::new(Vec::new()), - var_assignments: RefCell::new(Vec::new()), - assignee_ids: RefCell::new(NodeSet()), - } - } - /// return true if there are no trackable assignments or moves /// in this move data - that means that there is nothing that /// could cause a borrow error. @@ -345,7 +334,7 @@ impl<'a, 'tcx> MoveData<'tcx> { /// Adds a new move entry for a move of `lp` that occurs at location `id` with kind `kind`. pub fn add_move(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, orig_lp: Rc>, - id: ast::NodeId, + id: hir::ItemLocalId, kind: MoveKind) { // Moving one union field automatically moves all its fields. Also move siblings of // all parent union fields, moves do not propagate upwards automatically. @@ -373,9 +362,9 @@ impl<'a, 'tcx> MoveData<'tcx> { fn add_move_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, lp: Rc>, - id: ast::NodeId, + id: hir::ItemLocalId, kind: MoveKind) { - debug!("add_move(lp={:?}, id={}, kind={:?})", + debug!("add_move(lp={:?}, id={:?}, kind={:?})", lp, id, kind); @@ -398,9 +387,9 @@ impl<'a, 'tcx> MoveData<'tcx> { /// `span`. pub fn add_assignment(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, lp: Rc>, - assign_id: ast::NodeId, + assign_id: hir::ItemLocalId, span: Span, - assignee_id: ast::NodeId, + assignee_id: hir::ItemLocalId, mode: euv::MutateMode) { // Assigning to one union field automatically assigns to all its fields. if let LpExtend(ref base_lp, mutbl, LpInterior(opt_variant_id, interior)) = lp.kind { @@ -429,11 +418,11 @@ impl<'a, 'tcx> MoveData<'tcx> { fn add_assignment_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, lp: Rc>, - assign_id: ast::NodeId, + assign_id: hir::ItemLocalId, span: Span, - assignee_id: ast::NodeId, + assignee_id: hir::ItemLocalId, mode: euv::MutateMode) { - debug!("add_assignment(lp={:?}, assign_id={}, assignee_id={}", + debug!("add_assignment(lp={:?}, assign_id={:?}, assignee_id={:?}", lp, assign_id, assignee_id); let path_index = self.move_path(tcx, lp.clone()); @@ -496,7 +485,7 @@ impl<'a, 'tcx> MoveData<'tcx> { LpVar(..) | LpUpvar(..) | LpDowncast(..) => { let kill_scope = path.loan_path.kill_scope(bccx); let path = *self.path_map.borrow().get(&path.loan_path).unwrap(); - self.kill_moves(path, kill_scope.node_id(), + self.kill_moves(path, kill_scope.item_local_id(), KillFrom::ScopeEnd, dfcx_moves); } LpExtend(..) => {} @@ -511,7 +500,7 @@ impl<'a, 'tcx> MoveData<'tcx> { LpVar(..) | LpUpvar(..) | LpDowncast(..) => { let kill_scope = lp.kill_scope(bccx); dfcx_assign.add_kill(KillFrom::ScopeEnd, - kill_scope.node_id(), + kill_scope.item_local_id(), assignment_index); } LpExtend(..) => { @@ -579,7 +568,7 @@ impl<'a, 'tcx> MoveData<'tcx> { fn kill_moves(&self, path: MovePathIndex, - kill_id: ast::NodeId, + kill_id: hir::ItemLocalId, kill_kind: KillFrom, dfcx_moves: &mut MoveDataFlow) { // We can only perform kills for paths that refer to a unique location, @@ -589,7 +578,7 @@ impl<'a, 'tcx> MoveData<'tcx> { let loan_path = self.path_loan_path(path); if loan_path_is_precise(&loan_path) { self.each_applicable_move(path, |move_index| { - debug!("kill_moves add_kill {:?} kill_id={} move_index={}", + debug!("kill_moves add_kill {:?} kill_id={:?} move_index={}", kill_kind, kill_id, move_index.get()); dfcx_moves.add_kill(kill_kind, kill_id, move_index.get()); true @@ -642,7 +631,7 @@ impl<'a, 'tcx> FlowedMoveData<'a, 'tcx> { } pub fn kind_of_move_of_path(&self, - id: ast::NodeId, + id: hir::ItemLocalId, loan_path: &Rc>) -> Option { //! Returns the kind of a move of `loan_path` by `id`, if one exists. @@ -667,7 +656,7 @@ impl<'a, 'tcx> FlowedMoveData<'a, 'tcx> { /// have occurred on entry to `id` without an intervening assignment. In other words, any moves /// that would invalidate a reference to `loan_path` at location `id`. pub fn each_move_of(&self, - id: ast::NodeId, + id: hir::ItemLocalId, loan_path: &Rc>, mut f: F) -> bool where @@ -724,7 +713,7 @@ impl<'a, 'tcx> FlowedMoveData<'a, 'tcx> { /// Iterates through every assignment to `loan_path` that may have occurred on entry to `id`. /// `loan_path` must be a single variable. pub fn each_assignment_of(&self, - id: ast::NodeId, + id: hir::ItemLocalId, loan_path: &Rc>, mut f: F) -> bool where diff --git a/src/librustc_borrowck/diagnostics.rs b/src/librustc_borrowck/diagnostics.rs index 1f1fc4cc65fbe..74133c821f0fa 100644 --- a/src/librustc_borrowck/diagnostics.rs +++ b/src/librustc_borrowck/diagnostics.rs @@ -652,6 +652,92 @@ x.x = Some(&y); ``` "##, +E0626: r##" +This error occurs because a borrow in a generator persists across a +yield point. + +```compile_fail,E0626 +# #![feature(generators, generator_trait)] +# use std::ops::Generator; +let mut b = || { + let a = &String::new(); // <-- This borrow... + yield (); // ...is still in scope here, when the yield occurs. + println!("{}", a); +}; +b.resume(); +``` + +At present, it is not permitted to have a yield that occurs while a +borrow is still in scope. To resolve this error, the borrow must +either be "contained" to a smaller scope that does not overlap the +yield or else eliminated in another way. So, for example, we might +resolve the previous example by removing the borrow and just storing +the integer by value: + +``` +# #![feature(generators, generator_trait)] +# use std::ops::Generator; +let mut b = || { + let a = 3; + yield (); + println!("{}", a); +}; +b.resume(); +``` + +This is a very simple case, of course. In more complex cases, we may +wish to have more than one reference to the value that was borrowed -- +in those cases, something like the `Rc` or `Arc` types may be useful. + +This error also frequently arises with iteration: + +```compile_fail,E0626 +# #![feature(generators, generator_trait)] +# use std::ops::Generator; +let mut b = || { + let v = vec![1,2,3]; + for &x in &v { // <-- borrow of `v` is still in scope... + yield x; // ...when this yield occurs. + } +}; +b.resume(); +``` + +Such cases can sometimes be resolved by iterating "by value" (or using +`into_iter()`) to avoid borrowing: + +``` +# #![feature(generators, generator_trait)] +# use std::ops::Generator; +let mut b = || { + let v = vec![1,2,3]; + for x in v { // <-- Take ownership of the values instead! + yield x; // <-- Now yield is OK. + } +}; +b.resume(); +``` + +If taking ownership is not an option, using indices can work too: + +``` +# #![feature(generators, generator_trait)] +# use std::ops::Generator; +let mut b = || { + let v = vec![1,2,3]; + let len = v.len(); // (*) + for i in 0..len { + let x = v[i]; // (*) + yield x; // <-- Now yield is OK. + } +}; +b.resume(); + +// (*) -- Unfortunately, these temporaries are currently required. +// See . +``` +"##, + } register_diagnostics! { diff --git a/src/librustc_borrowck/graphviz.rs b/src/librustc_borrowck/graphviz.rs index e3a2bfa392738..22867ba5b55a4 100644 --- a/src/librustc_borrowck/graphviz.rs +++ b/src/librustc_borrowck/graphviz.rs @@ -52,7 +52,7 @@ pub struct DataflowLabeller<'a, 'tcx: 'a> { impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { fn dataflow_for(&self, e: EntryOrExit, n: &Node<'a>) -> String { let id = n.1.data.id(); - debug!("dataflow_for({:?}, id={}) {:?}", e, id, self.variants); + debug!("dataflow_for({:?}, id={:?}) {:?}", e, id, self.variants); let mut sets = "".to_string(); let mut seen_one = false; for &variant in &self.variants { diff --git a/src/librustc_const_eval/check_match.rs b/src/librustc_const_eval/check_match.rs index 2bed1950afc28..d832ad0ab1d20 100644 --- a/src/librustc_const_eval/check_match.rs +++ b/src/librustc_const_eval/check_match.rs @@ -18,7 +18,7 @@ use rustc::middle::expr_use_visitor::{ConsumeMode, Delegate, ExprUseVisitor}; use rustc::middle::expr_use_visitor::{LoanCause, MutateMode}; use rustc::middle::expr_use_visitor as euv; use rustc::middle::mem_categorization::{cmt}; -use rustc::middle::region::RegionMaps; +use rustc::middle::region; use rustc::session::Session; use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::subst::Substs; @@ -51,7 +51,7 @@ impl<'a, 'tcx> Visitor<'tcx> for OuterVisitor<'a, 'tcx> { MatchVisitor { tcx: self.tcx, tables: self.tcx.body_tables(b), - region_maps: &self.tcx.region_maps(def_id), + region_scope_tree: &self.tcx.region_scope_tree(def_id), param_env: self.tcx.param_env(def_id), identity_substs: Substs::identity_for_item(self.tcx, def_id), }.visit_body(self.tcx.hir.body(b)); @@ -72,7 +72,7 @@ struct MatchVisitor<'a, 'tcx: 'a> { tables: &'a ty::TypeckTables<'tcx>, param_env: ty::ParamEnv<'tcx>, identity_substs: &'tcx Substs<'tcx>, - region_maps: &'a RegionMaps, + region_scope_tree: &'a region::ScopeTree, } impl<'a, 'tcx> Visitor<'tcx> for MatchVisitor<'a, 'tcx> { @@ -526,7 +526,7 @@ fn check_for_mutation_in_guard(cx: &MatchVisitor, guard: &hir::Expr) { let mut checker = MutationChecker { cx, }; - ExprUseVisitor::new(&mut checker, cx.tcx, cx.param_env, cx.region_maps, cx.tables) + ExprUseVisitor::new(&mut checker, cx.tcx, cx.param_env, cx.region_scope_tree, cx.tables) .walk_expr(guard); } diff --git a/src/librustc_data_structures/indexed_set.rs b/src/librustc_data_structures/indexed_set.rs index 9cb6806e9ade5..47fa21e3bf0b2 100644 --- a/src/librustc_data_structures/indexed_set.rs +++ b/src/librustc_data_structures/indexed_set.rs @@ -9,9 +9,11 @@ // except according to those terms. use std::fmt; +use std::iter; use std::marker::PhantomData; use std::mem; use std::ops::{Deref, DerefMut, Range}; +use std::slice; use bitslice::{BitSlice, Word}; use bitslice::{bitwise, Union, Subtract}; use indexed_vec::Idx; @@ -21,6 +23,7 @@ use indexed_vec::Idx; /// /// In other words, `T` is the type used to index into the bitvector /// this type uses to represent the set of object it holds. +#[derive(Eq, PartialEq)] pub struct IdxSetBuf { _pd: PhantomData, bits: Vec, @@ -109,6 +112,13 @@ impl IdxSet { } } + /// Removes all elements + pub fn clear(&mut self) { + for b in &mut self.bits { + *b = 0; + } + } + /// Removes `elem` from the set `self`; returns true iff this changed `self`. pub fn remove(&mut self, elem: &T) -> bool { self.bits.clear_bit(elem.index()) @@ -154,6 +164,14 @@ impl IdxSet { bitwise(self.words_mut(), other.words(), &Subtract) } + pub fn iter(&self) -> Iter { + Iter { + cur: None, + iter: self.words().iter().enumerate(), + _pd: PhantomData, + } + } + /// Calls `f` on each index value held in this set, up to the /// bound `max_bits` on the size of universe of indexes. pub fn each_bit(&self, max_bits: usize, f: F) where F: FnMut(T) { @@ -218,3 +236,32 @@ fn each_bit(words: &IdxSet, max_bits: usize, mut f: F) where F: Fn } } } + +pub struct Iter<'a, T: Idx> { + cur: Option<(Word, usize)>, + iter: iter::Enumerate>, + _pd: PhantomData, +} + +impl<'a, T: Idx> Iterator for Iter<'a, T> { + type Item = T; + + fn next(&mut self) -> Option { + let word_bits = mem::size_of::() * 8; + loop { + if let Some((ref mut word, offset)) = self.cur { + let bit_pos = word.trailing_zeros() as usize; + if bit_pos != word_bits { + let bit = 1 << bit_pos; + *word ^= bit; + return Some(T::new(bit_pos + offset)) + } + } + + match self.iter.next() { + Some((i, word)) => self.cur = Some((*word, word_bits * i)), + None => return None, + } + } + } +} diff --git a/src/librustc_data_structures/indexed_vec.rs b/src/librustc_data_structures/indexed_vec.rs index bc9bfa4049723..1d0e88ee32855 100644 --- a/src/librustc_data_structures/indexed_vec.rs +++ b/src/librustc_data_structures/indexed_vec.rs @@ -38,7 +38,7 @@ impl Idx for u32 { fn index(self) -> usize { self as usize } } -#[derive(Clone)] +#[derive(Clone, PartialEq, Eq)] pub struct IndexVec { pub raw: Vec, _marker: PhantomData diff --git a/src/librustc_data_structures/lib.rs b/src/librustc_data_structures/lib.rs index da00ebc4b9ee9..47061883425e2 100644 --- a/src/librustc_data_structures/lib.rs +++ b/src/librustc_data_structures/lib.rs @@ -29,7 +29,6 @@ #![feature(unsize)] #![feature(i128_type)] #![feature(conservative_impl_trait)] -#![feature(discriminant_value)] #![feature(specialization)] #![cfg_attr(unix, feature(libc))] diff --git a/src/librustc_data_structures/stable_hasher.rs b/src/librustc_data_structures/stable_hasher.rs index 6801aa455e11e..33c4a041cff88 100644 --- a/src/librustc_data_structures/stable_hasher.rs +++ b/src/librustc_data_structures/stable_hasher.rs @@ -292,6 +292,15 @@ impl, CTX> HashStable for Vec { } } +impl, CTX> HashStable for Box { + #[inline] + fn hash_stable(&self, + ctx: &mut CTX, + hasher: &mut StableHasher) { + (**self).hash_stable(ctx, hasher); + } +} + impl, CTX> HashStable for ::std::rc::Rc { #[inline] fn hash_stable(&self, diff --git a/src/librustc_data_structures/unify/mod.rs b/src/librustc_data_structures/unify/mod.rs index 7853bf9478ab8..5411ae0257a4b 100644 --- a/src/librustc_data_structures/unify/mod.rs +++ b/src/librustc_data_structures/unify/mod.rs @@ -119,10 +119,10 @@ impl VarValue { } } -// We can't use V:LatticeValue, much as I would like to, -// because frequently the pattern is that V=Option for some -// other type parameter U, and we have no way to say -// Option:LatticeValue. +/// We can't use V:LatticeValue, much as I would like to, +/// because frequently the pattern is that V=Option for some +/// other type parameter U, and we have no way to say +/// Option:LatticeValue. impl UnificationTable { pub fn new() -> UnificationTable { @@ -249,7 +249,7 @@ impl sv::SnapshotVecDelegate for Delegate { fn reverse(_: &mut Vec>, _: ()) {} } -// # Base union-find algorithm, where we are just making sets +/// # Base union-find algorithm, where we are just making sets impl<'tcx, K: UnifyKey> UnificationTable where K::Value: Combine @@ -281,11 +281,11 @@ impl<'tcx, K: UnifyKey> UnificationTable } } -// # Non-subtyping unification -// -// Code to handle keys which carry a value, like ints, -// floats---anything that doesn't have a subtyping relationship we -// need to worry about. +/// # Non-subtyping unification +/// +/// Code to handle keys which carry a value, like ints, +/// floats---anything that doesn't have a subtyping relationship we +/// need to worry about. impl<'tcx, K, V> UnificationTable where K: UnifyKey>, diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 96688c6ac9cdb..000d89241a427 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -1015,6 +1015,8 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session, passes.push_pass(MIR_OPTIMIZED, mir::transform::deaggregator::Deaggregator); passes.push_pass(MIR_OPTIMIZED, mir::transform::copy_prop::CopyPropagation); passes.push_pass(MIR_OPTIMIZED, mir::transform::simplify::SimplifyLocals); + + passes.push_pass(MIR_OPTIMIZED, mir::transform::generator::StateTransform); passes.push_pass(MIR_OPTIMIZED, mir::transform::add_call_guards::CriticalCallEdges); passes.push_pass(MIR_OPTIMIZED, mir::transform::dump_mir::Marker("PreTrans")); diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 1915a1c86484a..5011b53799043 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -741,7 +741,9 @@ impl RustcDefaultCalls { odir: &Option, ofile: &Option) -> Compilation { - if sess.opts.prints.is_empty() { + // PrintRequest::NativeStaticLibs is special - printed during linking + // (empty iterator returns true) + if sess.opts.prints.iter().all(|&p| p==PrintRequest::NativeStaticLibs) { return Compilation::Continue; } @@ -851,6 +853,9 @@ impl RustcDefaultCalls { PrintRequest::TargetCPUs | PrintRequest::TargetFeatures => { rustc_trans::print(*req, sess); } + PrintRequest::NativeStaticLibs => { + println!("Native static libs can be printed only during linking"); + } } } return Compilation::Stop; diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index 20f2a146b0b15..6a58b7fb75360 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -765,7 +765,7 @@ fn print_flowgraph<'a, 'tcx, W: Write>(variants: Vec, let cfg = cfg::CFG::new(tcx, &body); let labelled_edges = mode != PpFlowGraphMode::UnlabelledEdges; let lcfg = LabelledCFG { - hir_map: &tcx.hir, + tcx, cfg: &cfg, name: format!("node_{}", code.id()), labelled_edges, diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index b187cdaa480ed..552130e8a4703 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -17,7 +17,7 @@ use rustc_resolve::MakeGlobMap; use rustc_trans; use rustc::middle::lang_items; use rustc::middle::free_region::FreeRegionMap; -use rustc::middle::region::{CodeExtent, RegionMaps}; +use rustc::middle::region; use rustc::middle::resolve_lifetime; use rustc::middle::stability; use rustc::ty::subst::{Kind, Subst}; @@ -45,12 +45,12 @@ use rustc::hir; struct Env<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { infcx: &'a infer::InferCtxt<'a, 'gcx, 'tcx>, - region_maps: &'a mut RegionMaps, + region_scope_tree: &'a mut region::ScopeTree, param_env: ty::ParamEnv<'tcx>, } struct RH<'a> { - id: ast::NodeId, + id: hir::ItemLocalId, sub: &'a [RH<'a>], } @@ -157,15 +157,15 @@ fn test_env(source_string: &str, "test_crate", |tcx| { tcx.infer_ctxt().enter(|infcx| { - let mut region_maps = RegionMaps::new(); + let mut region_scope_tree = region::ScopeTree::default(); body(Env { infcx: &infcx, - region_maps: &mut region_maps, + region_scope_tree: &mut region_scope_tree, param_env: ty::ParamEnv::empty(Reveal::UserFacing), }); let free_regions = FreeRegionMap::new(); let def_id = tcx.hir.local_def_id(ast::CRATE_NODE_ID); - infcx.resolve_regions_and_report_errors(def_id, ®ion_maps, &free_regions); + infcx.resolve_regions_and_report_errors(def_id, ®ion_scope_tree, &free_regions); assert_eq!(tcx.sess.err_count(), expected_err_count); }); }); @@ -176,9 +176,9 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> { self.infcx.tcx } - pub fn create_region_hierarchy(&mut self, rh: &RH, parent: CodeExtent) { - let me = CodeExtent::Misc(rh.id); - self.region_maps.record_code_extent(me, Some(parent)); + pub fn create_region_hierarchy(&mut self, rh: &RH, parent: region::Scope) { + let me = region::Scope::Node(rh.id); + self.region_scope_tree.record_scope_parent(me, Some(parent)); for child_rh in rh.sub { self.create_region_hierarchy(child_rh, me); } @@ -188,21 +188,19 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> { // creates a region hierarchy where 1 is root, 10 and 11 are // children of 1, etc - let node = ast::NodeId::from_u32; - let dscope = CodeExtent::DestructionScope(node(1)); - self.region_maps.record_code_extent(dscope, None); + let dscope = region::Scope::Destruction(hir::ItemLocalId(1)); + self.region_scope_tree.record_scope_parent(dscope, None); self.create_region_hierarchy(&RH { - id: node(1), - sub: &[RH { - id: node(10), - sub: &[], - }, - RH { - id: node(11), - sub: &[], - }], - }, - dscope); + id: hir::ItemLocalId(1), + sub: &[RH { + id: hir::ItemLocalId(10), + sub: &[], + }, + RH { + id: hir::ItemLocalId(11), + sub: &[], + }], + }, dscope); } #[allow(dead_code)] // this seems like it could be useful, even if we don't use it now @@ -335,7 +333,7 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> { } pub fn t_rptr_scope(&self, id: u32) -> Ty<'tcx> { - let r = ty::ReScope(CodeExtent::Misc(ast::NodeId::from_u32(id))); + let r = ty::ReScope(region::Scope::Node(hir::ItemLocalId(id))); self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r), self.tcx().types.isize) } diff --git a/src/librustc_errors/diagnostic.rs b/src/librustc_errors/diagnostic.rs index 0f063542383dc..9aae188f9ecdf 100644 --- a/src/librustc_errors/diagnostic.rs +++ b/src/librustc_errors/diagnostic.rs @@ -288,7 +288,7 @@ impl Diagnostic { /// Convenience function for internal use, clients should use one of the /// public methods above. - fn sub(&mut self, + pub(crate) fn sub(&mut self, level: Level, message: &str, span: MultiSpan, diff --git a/src/librustc_errors/diagnostic_builder.rs b/src/librustc_errors/diagnostic_builder.rs index 2c8d8b4691f0a..2cd433bfe3aee 100644 --- a/src/librustc_errors/diagnostic_builder.rs +++ b/src/librustc_errors/diagnostic_builder.rs @@ -110,6 +110,19 @@ impl<'a> DiagnosticBuilder<'a> { // } } + /// Convenience function for internal use, clients should use one of the + /// span_* methods instead. + pub fn sub>( + &mut self, + level: Level, + message: &str, + span: Option, + ) -> &mut Self { + let span = span.map(|s| s.into()).unwrap_or(MultiSpan::new()); + self.diagnostic.sub(level, message, span, None); + self + } + /// Delay emission of this diagnostic as a bug. /// /// This can be useful in contexts where an error indicates a bug but diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index 53d90531cc946..daa132dbf6213 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -183,8 +183,8 @@ impl EmitterWriter { continue; } - let lo = cm.lookup_char_pos(span_label.span.lo); - let mut hi = cm.lookup_char_pos(span_label.span.hi); + let lo = cm.lookup_char_pos(span_label.span.lo()); + let mut hi = cm.lookup_char_pos(span_label.span.hi()); // Watch out for "empty spans". If we get a span like 6..6, we // want to just display a `^` at 6, so convert that to @@ -683,7 +683,7 @@ impl EmitterWriter { if let Some(ref cm) = self.cm { for primary_span in msp.primary_spans() { if primary_span != &DUMMY_SP { - let hi = cm.lookup_char_pos(primary_span.hi); + let hi = cm.lookup_char_pos(primary_span.hi()); if hi.line > max { max = hi.line; } @@ -691,7 +691,7 @@ impl EmitterWriter { } for span_label in msp.span_labels() { if span_label.span != DUMMY_SP { - let hi = cm.lookup_char_pos(span_label.span.hi); + let hi = cm.lookup_char_pos(span_label.span.hi()); if hi.line > max { max = hi.line; } @@ -914,7 +914,7 @@ impl EmitterWriter { let (primary_lo, cm) = if let (Some(cm), Some(ref primary_span)) = (self.cm.as_ref(), msp.primary_span().as_ref()) { if primary_span != &&DUMMY_SP { - (cm.lookup_char_pos(primary_span.lo), cm) + (cm.lookup_char_pos(primary_span.lo()), cm) } else { emit_to_destination(&buffer.render(), level, &mut self.dst)?; return Ok(()); @@ -1091,7 +1091,7 @@ impl EmitterWriter { Some(Style::HeaderMsg)); let suggestions = suggestion.splice_lines(cm.borrow()); - let span_start_pos = cm.lookup_char_pos(primary_sub.span.lo); + let span_start_pos = cm.lookup_char_pos(primary_sub.span.lo()); let line_start = span_start_pos.line; draw_col_separator_no_space(&mut buffer, 1, max_line_num_len + 1); let mut row_num = 2; diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs index 870bb01bb9ffb..d9b0f4ac8a6c0 100644 --- a/src/librustc_errors/lib.rs +++ b/src/librustc_errors/lib.rs @@ -148,16 +148,12 @@ impl CodeSuggestion { // Assumption: all spans are in the same file, and all spans // are disjoint. Sort in ascending order. - primary_spans.sort_by_key(|sp| sp.0.lo); + primary_spans.sort_by_key(|sp| sp.0.lo()); // Find the bounding span. - let lo = primary_spans.iter().map(|sp| sp.0.lo).min().unwrap(); - let hi = primary_spans.iter().map(|sp| sp.0.hi).min().unwrap(); - let bounding_span = Span { - lo, - hi, - ctxt: NO_EXPANSION, - }; + let lo = primary_spans.iter().map(|sp| sp.0.lo()).min().unwrap(); + let hi = primary_spans.iter().map(|sp| sp.0.hi()).min().unwrap(); + let bounding_span = Span::new(lo, hi, NO_EXPANSION); let lines = cm.span_to_lines(bounding_span).unwrap(); assert!(!lines.lines.is_empty()); @@ -171,14 +167,14 @@ impl CodeSuggestion { // // Finally push the trailing line segment of the last span let fm = &lines.file; - let mut prev_hi = cm.lookup_char_pos(bounding_span.lo); + let mut prev_hi = cm.lookup_char_pos(bounding_span.lo()); prev_hi.col = CharPos::from_usize(0); let mut prev_line = fm.get_line(lines.lines[0].line_index); let mut bufs = vec![(String::new(), false); self.substitutions()]; for (sp, substitutes) in primary_spans { - let cur_lo = cm.lookup_char_pos(sp.lo); + let cur_lo = cm.lookup_char_pos(sp.lo()); for (&mut (ref mut buf, ref mut underline), substitute) in bufs.iter_mut() .zip(substitutes) { if prev_hi.line == cur_lo.line { @@ -208,7 +204,7 @@ impl CodeSuggestion { } buf.push_str(substitute); } - prev_hi = cm.lookup_char_pos(sp.hi); + prev_hi = cm.lookup_char_pos(sp.hi()); prev_line = fm.get_line(prev_hi.line - 1); } for &mut (ref mut buf, _) in &mut bufs { @@ -306,6 +302,12 @@ impl Handler { self.continue_after_error.set(continue_after_error); } + // NOTE: DO NOT call this function from rustc, as it relies on `err_count` being non-zero + // if an error happened to avoid ICEs. This function should only be called from tools. + pub fn reset_err_count(&self) { + self.err_count.set(0); + } + pub fn struct_dummy<'a>(&'a self) -> DiagnosticBuilder<'a> { DiagnosticBuilder::new(self, Level::Cancelled, "") } diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index 3bfe2897de175..780d34d570170 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -44,7 +44,7 @@ use std::collections::HashSet; use syntax::ast; use syntax::attr; use syntax::feature_gate::{AttributeGate, AttributeType, Stability, deprecated_attributes}; -use syntax_pos::Span; +use syntax_pos::{Span, SyntaxContext}; use syntax::symbol::keywords; use rustc::hir::{self, PatKind}; @@ -75,9 +75,11 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for WhileTrue { if let hir::ExprWhile(ref cond, ..) = e.node { if let hir::ExprLit(ref lit) = cond.node { if let ast::LitKind::Bool(true) = lit.node { - cx.span_lint(WHILE_TRUE, - e.span, - "denote infinite loops with loop { ... }"); + if lit.span.ctxt() == SyntaxContext::empty() { + cx.span_lint(WHILE_TRUE, + e.span, + "denote infinite loops with loop { ... }"); + } } } } @@ -850,23 +852,25 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnconditionalRecursion { } visited.insert(cfg_id); - let node_id = cfg.graph.node_data(idx).id(); - // is this a recursive call? - let self_recursive = if node_id != ast::DUMMY_NODE_ID { - match method { + let local_id = cfg.graph.node_data(idx).id(); + if local_id != hir::DUMMY_ITEM_LOCAL_ID { + let node_id = cx.tcx.hir.hir_to_node_id(hir::HirId { + owner: body.value.hir_id.owner, + local_id + }); + let self_recursive = match method { Some(ref method) => expr_refers_to_this_method(cx, method, node_id), None => expr_refers_to_this_fn(cx, id, node_id), + }; + if self_recursive { + self_call_spans.push(cx.tcx.hir.span(node_id)); + // this is a self call, so we shouldn't explore past + // this node in the CFG. + continue; } - } else { - false - }; - if self_recursive { - self_call_spans.push(cx.tcx.hir.span(node_id)); - // this is a self call, so we shouldn't explore past - // this node in the CFG. - continue; } + // add the successors of this node to explore the graph further. for (_, edge) in cfg.graph.outgoing_edges(idx) { let target_idx = edge.target(); diff --git a/src/librustc_lint/lib.rs b/src/librustc_lint/lib.rs index 5ef277f02ace6..fbf993f45576c 100644 --- a/src/librustc_lint/lib.rs +++ b/src/librustc_lint/lib.rs @@ -128,7 +128,6 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { NonSnakeCase, NonUpperCaseGlobals, NonShorthandFieldPatterns, - UnusedUnsafe, UnsafeCode, UnusedMut, UnusedAllocation, diff --git a/src/librustc_lint/types.rs b/src/librustc_lint/types.rs index cdf0996796085..ccd3194b5e388 100644 --- a/src/librustc_lint/types.rs +++ b/src/librustc_lint/types.rs @@ -543,6 +543,18 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { `u32` or `libc::wchar_t` should be used") } + ty::TyInt(ast::IntTy::I128) => { + FfiUnsafe("found Rust type `i128` in foreign module, but \ + 128-bit integers don't currently have a known \ + stable ABI") + } + + ty::TyUint(ast::UintTy::U128) => { + FfiUnsafe("found Rust type `u128` in foreign module, but \ + 128-bit integers don't currently have a known \ + stable ABI") + } + // Primitive types with a stable representation. ty::TyBool | ty::TyInt(..) | ty::TyUint(..) | ty::TyFloat(..) | ty::TyNever => FfiSafe, @@ -607,6 +619,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { ty::TyInfer(..) | ty::TyError | ty::TyClosure(..) | + ty::TyGenerator(..) | ty::TyProjection(..) | ty::TyAnon(..) | ty::TyFnDef(..) => bug!("Unexpected type in foreign function"), diff --git a/src/librustc_lint/unused.rs b/src/librustc_lint/unused.rs index cbc4ebe90fd09..15efc14b061f0 100644 --- a/src/librustc_lint/unused.rs +++ b/src/librustc_lint/unused.rs @@ -204,60 +204,6 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults { } } -declare_lint! { - pub UNUSED_UNSAFE, - Warn, - "unnecessary use of an `unsafe` block" -} - -#[derive(Copy, Clone)] -pub struct UnusedUnsafe; - -impl LintPass for UnusedUnsafe { - fn get_lints(&self) -> LintArray { - lint_array!(UNUSED_UNSAFE) - } -} - -impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedUnsafe { - fn check_expr(&mut self, cx: &LateContext, e: &hir::Expr) { - /// Return the NodeId for an enclosing scope that is also `unsafe` - fn is_enclosed(cx: &LateContext, id: ast::NodeId) -> Option<(String, ast::NodeId)> { - let parent_id = cx.tcx.hir.get_parent_node(id); - if parent_id != id { - if cx.tcx.used_unsafe.borrow().contains(&parent_id) { - Some(("block".to_string(), parent_id)) - } else if let Some(hir::map::NodeItem(&hir::Item { - node: hir::ItemFn(_, hir::Unsafety::Unsafe, _, _, _, _), - .. - })) = cx.tcx.hir.find(parent_id) { - Some(("fn".to_string(), parent_id)) - } else { - is_enclosed(cx, parent_id) - } - } else { - None - } - } - if let hir::ExprBlock(ref blk) = e.node { - // Don't warn about generated blocks, that'll just pollute the output. - if blk.rules == hir::UnsafeBlock(hir::UserProvided) && - !cx.tcx.used_unsafe.borrow().contains(&blk.id) { - - let mut db = cx.struct_span_lint(UNUSED_UNSAFE, blk.span, - "unnecessary `unsafe` block"); - - db.span_label(blk.span, "unnecessary `unsafe` block"); - if let Some((kind, id)) = is_enclosed(cx, blk.id) { - db.span_note(cx.tcx.hir.span(id), - &format!("because it's nested under this `unsafe` {}", kind)); - } - db.emit(); - } - } - } -} - declare_lint! { pub PATH_STATEMENTS, Warn, diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index ad320a7ff3d1e..537e12086141c 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -38,6 +38,7 @@ use syntax::ext::base::SyntaxExtension; use syntax::parse::filemap_to_stream; use syntax::symbol::Symbol; use syntax_pos::{Span, NO_EXPANSION}; +use rustc_data_structures::indexed_set::IdxSetBuf; use rustc::hir::svh::Svh; use rustc::hir; @@ -105,7 +106,10 @@ provide! { <'tcx> tcx, def_id, cdata, mir } - mir_const_qualif => { cdata.mir_const_qualif(def_id.index) } + generator_sig => { cdata.generator_sig(def_id.index, tcx) } + mir_const_qualif => { + (cdata.mir_const_qualif(def_id.index), Rc::new(IdxSetBuf::new_empty(0))) + } typeck_tables_of => { cdata.item_body_tables(def_id.index, tcx) } closure_kind => { cdata.closure_kind(def_id.index) } fn_sig => { cdata.fn_sig(def_id.index, tcx) } @@ -368,7 +372,7 @@ impl CrateStore for cstore::CStore { let source_name = format!("<{} macros>", name); let filemap = sess.parse_sess.codemap().new_filemap(source_name, def.body); - let local_span = Span { lo: filemap.start_pos, hi: filemap.end_pos, ctxt: NO_EXPANSION }; + let local_span = Span::new(filemap.start_pos, filemap.end_pos, NO_EXPANSION); let body = filemap_to_stream(&sess.parse_sess, filemap, None); // Mark the attrs as used diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 73b2e972b93ca..1e2f167f19962 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -242,7 +242,7 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { let sess = if let Some(sess) = self.sess { sess } else { - return Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }); + return Ok(Span::new(lo, hi, NO_EXPANSION)); }; let (lo, hi) = if lo > hi { @@ -289,7 +289,7 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { let lo = (lo - filemap.original_start_pos) + filemap.translated_filemap.start_pos; let hi = (hi - filemap.original_start_pos) + filemap.translated_filemap.start_pos; - Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }) + Ok(Span::new(lo, hi, NO_EXPANSION)) } } @@ -438,6 +438,7 @@ impl<'tcx> EntryKind<'tcx> { EntryKind::Impl(_) | EntryKind::DefaultImpl(_) | EntryKind::Field | + EntryKind::Generator(_) | EntryKind::Closure(_) => return None, }) } @@ -1100,6 +1101,23 @@ impl<'a, 'tcx> CrateMetadata { sig.decode((self, tcx)) } + fn get_generator_data(&self, + id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> Option> { + match self.entry(id).kind { + EntryKind::Generator(data) => Some(data.decode((self, tcx))), + _ => None, + } + } + + pub fn generator_sig(&self, + id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> Option> { + self.get_generator_data(id, tcx).map(|d| d.sig) + } + #[inline] pub fn def_key(&self, index: DefIndex) -> DefKey { self.def_path_table.def_key(index) diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index 1dc5d695348c1..d36aebe38eb0b 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -548,12 +548,13 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> { &hir::Visibility)>) -> Entry<'tcx> { let tcx = self.tcx; + let hir_id = tcx.hir.node_to_hir_id(id); let def_id = tcx.hir.local_def_id(id); debug!("IsolatedEncoder::encode_info_for_mod({:?})", def_id); let data = ModData { - reexports: match tcx.export_map.get(&id) { - Some(exports) if *vis == hir::Public => { + reexports: match tcx.module_exports(hir_id) { + Some(ref exports) if *vis == hir::Public => { self.lazy_seq_from_slice(exports.as_slice()) } _ => LazySeq::empty(), @@ -792,7 +793,7 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> { let kind = match impl_item.kind { ty::AssociatedKind::Const => { EntryKind::AssociatedConst(container, - self.tcx.at(ast_item.span).mir_const_qualif(def_id)) + self.tcx.at(ast_item.span).mir_const_qualif(def_id).0) } ty::AssociatedKind::Method => { let fn_data = if let hir::ImplItemKind::Method(ref sig, body) = ast_item.node { @@ -911,7 +912,7 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> { hir::ItemStatic(_, hir::MutMutable, _) => EntryKind::MutStatic, hir::ItemStatic(_, hir::MutImmutable, _) => EntryKind::ImmStatic, hir::ItemConst(..) => { - EntryKind::Const(tcx.at(item.span).mir_const_qualif(def_id)) + EntryKind::Const(tcx.at(item.span).mir_const_qualif(def_id).0) } hir::ItemFn(_, _, constness, .., body) => { let data = FnData { @@ -1213,13 +1214,23 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> { debug!("IsolatedEncoder::encode_info_for_closure({:?})", def_id); let tcx = self.tcx; - let data = ClosureData { - kind: tcx.closure_kind(def_id), - sig: self.lazy(&tcx.fn_sig(def_id)), + let kind = if let Some(sig) = self.tcx.generator_sig(def_id) { + let layout = self.tcx.generator_layout(def_id); + let data = GeneratorData { + sig, + layout: layout.clone(), + }; + EntryKind::Generator(self.lazy(&data)) + } else { + let data = ClosureData { + kind: tcx.closure_kind(def_id), + sig: self.lazy(&tcx.fn_sig(def_id)), + }; + EntryKind::Closure(self.lazy(&data)) }; Entry { - kind: EntryKind::Closure(self.lazy(&data)), + kind, visibility: self.lazy(&ty::Visibility::Public), span: self.lazy(&tcx.def_span(def_id)), attributes: self.encode_attributes(&tcx.get_attrs(def_id)), @@ -1245,7 +1256,7 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> { let body = tcx.hir.body_owned_by(id); Entry { - kind: EntryKind::Const(tcx.mir_const_qualif(def_id)), + kind: EntryKind::Const(tcx.mir_const_qualif(def_id).0), visibility: self.lazy(&ty::Visibility::Public), span: self.lazy(&tcx.def_span(def_id)), attributes: LazySeq::empty(), diff --git a/src/librustc_metadata/lib.rs b/src/librustc_metadata/lib.rs index f79abecf9da4b..f4e6f57c43777 100644 --- a/src/librustc_metadata/lib.rs +++ b/src/librustc_metadata/lib.rs @@ -21,7 +21,6 @@ #![feature(quote)] #![feature(rustc_diagnostic_macros)] #![feature(specialization)] -#![feature(discriminant_value)] #![feature(rustc_private)] #[macro_use] diff --git a/src/librustc_metadata/schema.rs b/src/librustc_metadata/schema.rs index b71f4d0d98cf9..d065a0fc21c6c 100644 --- a/src/librustc_metadata/schema.rs +++ b/src/librustc_metadata/schema.rs @@ -353,6 +353,7 @@ pub enum EntryKind<'tcx> { Mod(Lazy), MacroDef(Lazy), Closure(Lazy>), + Generator(Lazy>), Trait(Lazy>), Impl(Lazy>), DefaultImpl(Lazy>), @@ -401,6 +402,9 @@ impl<'a, 'gcx, 'tcx> HashStable> for EntryK EntryKind::MacroDef(ref macro_def) => { macro_def.hash_stable(hcx, hasher); } + EntryKind::Generator(data) => { + data.hash_stable(hcx, hasher); + } EntryKind::Closure(closure_data) => { closure_data.hash_stable(hcx, hasher); } @@ -564,3 +568,10 @@ pub struct ClosureData<'tcx> { pub sig: Lazy>, } impl_stable_hash_for!(struct ClosureData<'tcx> { kind, sig }); + +#[derive(RustcEncodable, RustcDecodable)] +pub struct GeneratorData<'tcx> { + pub sig: ty::PolyGenSig<'tcx>, + pub layout: mir::GeneratorLayout<'tcx>, +} +impl_stable_hash_for!(struct GeneratorData<'tcx> { sig, layout }); diff --git a/src/librustc_mir/borrow_check.rs b/src/librustc_mir/borrow_check.rs index eda2bacb11fdc..063cbc7755915 100644 --- a/src/librustc_mir/borrow_check.rs +++ b/src/librustc_mir/borrow_check.rs @@ -212,11 +212,11 @@ impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> DataflowResultsConsumer<'b, 'gcx> // ignored by borrowck } - StatementKind::StorageDead(ref lvalue) => { + StatementKind::StorageDead(local) => { // causes non-drop values to be dropped. self.consume_lvalue(ContextKind::StorageDead.new(location), ConsumeKind::Consume, - (lvalue, span), + (&Lvalue::Local(local), span), flow_state) } } @@ -279,12 +279,20 @@ impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> DataflowResultsConsumer<'b, 'gcx> (index, span), flow_state); } AssertMessage::Math(_/*const_math_err*/) => {} + AssertMessage::GeneratorResumedAfterReturn => {} + AssertMessage::GeneratorResumedAfterPanic => {} } } + TerminatorKind::Yield { ref value, resume: _, drop: _} => { + self.consume_operand(ContextKind::Yield.new(loc), + Consume, (value, span), flow_state); + } + TerminatorKind::Goto { target: _ } | TerminatorKind::Resume | TerminatorKind::Return | + TerminatorKind::GeneratorDrop | TerminatorKind::Unreachable => { // no data used, thus irrelevant to borrowck } @@ -595,7 +603,7 @@ impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> MirBorrowckCtxt<'c, 'b, 'a, 'gcx, 'tcx> // // (Or if you prefer, all the *other* iterations over loans // only consider loans that are in scope of some given - // CodeExtent) + // region::Scope) // // The (currently skeletal) code here does not encode such a // distinction, which means it is almost certainly over @@ -702,7 +710,7 @@ mod restrictions { use rustc::hir; use rustc::ty::{self, TyCtxt}; - use rustc::mir::{Lvalue, Mir, Operand, ProjectionElem}; + use rustc::mir::{Lvalue, Mir, ProjectionElem}; pub(super) struct Restrictions<'c, 'tcx: 'c> { mir: &'c Mir<'tcx>, @@ -801,12 +809,7 @@ mod restrictions { ProjectionElem::Downcast(..) | ProjectionElem::Subslice { .. } | ProjectionElem::ConstantIndex { .. } | - ProjectionElem::Index(Operand::Constant(..)) => { - cursor = &proj.base; - continue 'cursor; - } - ProjectionElem::Index(Operand::Consume(ref index)) => { - self.lvalue_stack.push(index); // FIXME: did old borrowck do this? + ProjectionElem::Index(_) => { cursor = &proj.base; continue 'cursor; } @@ -996,7 +999,7 @@ impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> MirBorrowckCtxt<'c, 'b, 'a, 'gcx, 'tcx> ("", format!(""), None), // (dont emit downcast info) ProjectionElem::Field(field, _ty) => ("", format!(".{}", field.index()), None), - ProjectionElem::Index(ref index) => + ProjectionElem::Index(index) => ("", format!(""), Some(index)), ProjectionElem::ConstantIndex { offset, min_length, from_end: true } => ("", format!("[{} of {}]", offset, min_length), None), @@ -1013,23 +1016,11 @@ impl<'c, 'b, 'a: 'b+'c, 'gcx, 'tcx: 'a> MirBorrowckCtxt<'c, 'b, 'a, 'gcx, 'tcx> self.append_lvalue_to_string(&proj.base, buf); if let Some(index) = index_operand { buf.push_str("["); - self.append_operand_to_string(index, buf); + self.append_lvalue_to_string(&Lvalue::Local(index), buf); buf.push_str("]"); } else { buf.push_str(&suffix); } - - } - } - } - - fn append_operand_to_string(&self, operand: &Operand, buf: &mut String) { - match *operand { - Operand::Consume(ref lvalue) => { - self.append_lvalue_to_string(lvalue, buf); - } - Operand::Constant(ref constant) => { - buf.push_str(&format!("{:?}", constant)); } } } @@ -1102,6 +1093,7 @@ enum ContextKind { CallDest, Assert, StorageDead, + Yield, } impl ContextKind { diff --git a/src/librustc_mir/build/block.rs b/src/librustc_mir/build/block.rs index 4583d80b83ddc..0e412fb27ca68 100644 --- a/src/librustc_mir/build/block.rs +++ b/src/librustc_mir/build/block.rs @@ -21,10 +21,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { ast_block: &'tcx hir::Block, source_info: SourceInfo) -> BlockAnd<()> { - let Block { extent, opt_destruction_extent, span, stmts, expr, targeted_by_break } = + let Block { region_scope, opt_destruction_scope, span, stmts, expr, targeted_by_break } = self.hir.mirror(ast_block); - self.in_opt_scope(opt_destruction_extent.map(|de|(de, source_info)), block, move |this| { - this.in_scope((extent, source_info), block, move |this| { + self.in_opt_scope(opt_destruction_scope.map(|de|(de, source_info)), block, move |this| { + this.in_scope((region_scope, source_info), block, move |this| { if targeted_by_break { // This is a `break`-able block (currently only `catch { ... }`) let exit_block = this.cfg.start_new_block(); @@ -67,15 +67,15 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { // the let-scopes at the end. // // First we build all the statements in the block. - let mut let_extent_stack = Vec::with_capacity(8); + let mut let_scope_stack = Vec::with_capacity(8); let outer_visibility_scope = this.visibility_scope; let source_info = this.source_info(span); for stmt in stmts { - let Stmt { span, kind, opt_destruction_extent } = this.hir.mirror(stmt); + let Stmt { kind, opt_destruction_scope } = this.hir.mirror(stmt); match kind { StmtKind::Expr { scope, expr } => { unpack!(block = this.in_opt_scope( - opt_destruction_extent.map(|de|(de, source_info)), block, |this| { + opt_destruction_scope.map(|de|(de, source_info)), block, |this| { this.in_scope((scope, source_info), block, |this| { let expr = this.hir.mirror(expr); this.stmt_expr(block, expr) @@ -83,21 +83,19 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { })); } StmtKind::Let { remainder_scope, init_scope, pattern, initializer } => { - let tcx = this.hir.tcx(); - // Enter the remainder scope, i.e. the bindings' destruction scope. this.push_scope((remainder_scope, source_info)); - let_extent_stack.push(remainder_scope); + let_scope_stack.push(remainder_scope); // Declare the bindings, which may create a visibility scope. - let remainder_span = remainder_scope.span(&tcx.hir); - let remainder_span = remainder_span.unwrap_or(span); + let remainder_span = remainder_scope.span(this.hir.tcx(), + &this.hir.region_scope_tree); let scope = this.declare_bindings(None, remainder_span, &pattern); // Evaluate the initializer, if present. if let Some(init) = initializer { unpack!(block = this.in_opt_scope( - opt_destruction_extent.map(|de|(de, source_info)), block, move |this| { + opt_destruction_scope.map(|de|(de, source_info)), block, move |this| { this.in_scope((init_scope, source_info), block, move |this| { // FIXME #30046 ^~~~ this.expr_into_pattern(block, pattern, init) @@ -126,8 +124,8 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { } // Finally, we pop all the let scopes before exiting out from the scope of block // itself. - for extent in let_extent_stack.into_iter().rev() { - unpack!(block = this.pop_scope((extent, source_info), block)); + for scope in let_scope_stack.into_iter().rev() { + unpack!(block = this.pop_scope((scope, source_info), block)); } // Restore the original visibility scope. this.visibility_scope = outer_visibility_scope; diff --git a/src/librustc_mir/build/cfg.rs b/src/librustc_mir/build/cfg.rs index b390e2888f26c..d77fe5170e040 100644 --- a/src/librustc_mir/build/cfg.rs +++ b/src/librustc_mir/build/cfg.rs @@ -14,7 +14,7 @@ //! Routines for manipulating the control-flow graph. use build::CFG; -use rustc::middle::region::CodeExtent; +use rustc::middle::region; use rustc::mir::*; impl<'tcx> CFG<'tcx> { @@ -47,10 +47,10 @@ impl<'tcx> CFG<'tcx> { pub fn push_end_region(&mut self, block: BasicBlock, source_info: SourceInfo, - extent: CodeExtent) { + region_scope: region::Scope) { self.push(block, Statement { source_info, - kind: StatementKind::EndRegion(extent), + kind: StatementKind::EndRegion(region_scope), }); } diff --git a/src/librustc_mir/build/expr/as_constant.rs b/src/librustc_mir/build/expr/as_constant.rs index 6d15f0a2e5d7f..a86b7f4d239a3 100644 --- a/src/librustc_mir/build/expr/as_constant.rs +++ b/src/librustc_mir/build/expr/as_constant.rs @@ -29,7 +29,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { let Expr { ty, temp_lifetime: _, span, kind } = expr; match kind { - ExprKind::Scope { extent: _, value } => + ExprKind::Scope { region_scope: _, value } => this.as_constant(value), ExprKind::Literal { literal } => Constant { span: span, ty: ty, literal: literal }, diff --git a/src/librustc_mir/build/expr/as_lvalue.rs b/src/librustc_mir/build/expr/as_lvalue.rs index 04c23215463dd..9cbaff2c113b6 100644 --- a/src/librustc_mir/build/expr/as_lvalue.rs +++ b/src/librustc_mir/build/expr/as_lvalue.rs @@ -39,8 +39,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { let expr_span = expr.span; let source_info = this.source_info(expr_span); match expr.kind { - ExprKind::Scope { extent, value } => { - this.in_scope((extent, source_info), block, |this| this.as_lvalue(block, value)) + ExprKind::Scope { region_scope, value } => { + this.in_scope((region_scope, source_info), block, |this| { + this.as_lvalue(block, value) + }) } ExprKind::Field { lhs, name } => { let lvalue = unpack!(block = this.as_lvalue(block, lhs)); @@ -56,10 +58,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { let (usize_ty, bool_ty) = (this.hir.usize_ty(), this.hir.bool_ty()); let slice = unpack!(block = this.as_lvalue(block, lhs)); - // extent=None so lvalue indexes live forever. They are scalars so they + // region_scope=None so lvalue indexes live forever. They are scalars so they // do not need storage annotations, and they are often copied between // places. - let idx = unpack!(block = this.as_operand(block, None, index)); + let idx = unpack!(block = this.as_temp(block, None, index)); // bounds check: let (len, lt) = (this.temp(usize_ty.clone(), expr_span), @@ -68,12 +70,12 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { &len, Rvalue::Len(slice.clone())); this.cfg.push_assign(block, source_info, // lt = idx < len <, Rvalue::BinaryOp(BinOp::Lt, - idx.clone(), + Operand::Consume(Lvalue::Local(idx)), Operand::Consume(len.clone()))); let msg = AssertMessage::BoundsCheck { len: Operand::Consume(len), - index: idx.clone() + index: Operand::Consume(Lvalue::Local(idx)) }; let success = this.assert(block, Operand::Consume(lt), true, msg, expr_span); @@ -118,13 +120,15 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { ExprKind::Return { .. } | ExprKind::Literal { .. } | ExprKind::InlineAsm { .. } | + ExprKind::Yield { .. } | ExprKind::Call { .. } => { // these are not lvalues, so we need to make a temporary. debug_assert!(match Category::of(&expr.kind) { Some(Category::Lvalue) => false, _ => true, }); - this.as_temp(block, expr.temp_lifetime, expr) + let temp = unpack!(block = this.as_temp(block, expr.temp_lifetime, expr)); + block.and(Lvalue::Local(temp)) } } } diff --git a/src/librustc_mir/build/expr/as_operand.rs b/src/librustc_mir/build/expr/as_operand.rs index 4679e0bb0a5c3..0a72ce8d05e14 100644 --- a/src/librustc_mir/build/expr/as_operand.rs +++ b/src/librustc_mir/build/expr/as_operand.rs @@ -13,7 +13,7 @@ use build::{BlockAnd, BlockAndExtension, Builder}; use build::expr::category::Category; use hair::*; -use rustc::middle::region::CodeExtent; +use rustc::middle::region; use rustc::mir::*; impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { @@ -39,7 +39,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// The operand is known to be live until the end of `scope`. pub fn as_operand(&mut self, block: BasicBlock, - scope: Option, + scope: Option, expr: M) -> BlockAnd> where M: Mirror<'tcx, Output = Expr<'tcx>> { @@ -49,16 +49,16 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { fn expr_as_operand(&mut self, mut block: BasicBlock, - scope: Option, + scope: Option, expr: Expr<'tcx>) -> BlockAnd> { debug!("expr_as_operand(block={:?}, expr={:?})", block, expr); let this = self; - if let ExprKind::Scope { extent, value } = expr.kind { + if let ExprKind::Scope { region_scope, value } = expr.kind { let source_info = this.source_info(expr.span); - let extent = (extent, source_info); - return this.in_scope(extent, block, |this| { + let region_scope = (region_scope, source_info); + return this.in_scope(region_scope, block, |this| { this.as_operand(block, scope, value) }); } @@ -74,7 +74,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { Category::Rvalue(..) => { let operand = unpack!(block = this.as_temp(block, scope, expr)); - block.and(Operand::Consume(operand)) + block.and(Operand::Consume(Lvalue::Local(operand))) } } } diff --git a/src/librustc_mir/build/expr/as_rvalue.rs b/src/librustc_mir/build/expr/as_rvalue.rs index c74378bdadce5..c83283ee38e1f 100644 --- a/src/librustc_mir/build/expr/as_rvalue.rs +++ b/src/librustc_mir/build/expr/as_rvalue.rs @@ -21,7 +21,7 @@ use build::expr::category::{Category, RvalueFunc}; use hair::*; use rustc_const_math::{ConstInt, ConstIsize}; use rustc::middle::const_val::ConstVal; -use rustc::middle::region::CodeExtent; +use rustc::middle::region; use rustc::ty; use rustc::mir::*; use syntax::ast; @@ -38,7 +38,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { } /// Compile `expr`, yielding an rvalue. - pub fn as_rvalue(&mut self, block: BasicBlock, scope: Option, expr: M) + pub fn as_rvalue(&mut self, block: BasicBlock, scope: Option, expr: M) -> BlockAnd> where M: Mirror<'tcx, Output = Expr<'tcx>> { @@ -48,7 +48,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { fn expr_as_rvalue(&mut self, mut block: BasicBlock, - scope: Option, + scope: Option, expr: Expr<'tcx>) -> BlockAnd> { debug!("expr_as_rvalue(block={:?}, scope={:?}, expr={:?})", block, scope, expr); @@ -58,9 +58,9 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { let source_info = this.source_info(expr_span); match expr.kind { - ExprKind::Scope { extent, value } => { - let extent = (extent, source_info); - this.in_scope(extent, block, |this| this.as_rvalue(block, scope, value)) + ExprKind::Scope { region_scope, value } => { + let region_scope = (region_scope, source_info); + this.in_scope(region_scope, block, |this| this.as_rvalue(block, scope, value)) } ExprKind::Repeat { value, count } => { let value_operand = unpack!(block = this.as_operand(block, scope, value)); @@ -96,23 +96,23 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { } ExprKind::Box { value } => { let value = this.hir.mirror(value); - let result = this.temp(expr.ty, expr_span); + let result = this.local_decls.push(LocalDecl::new_temp(expr.ty, expr_span)); + this.cfg.push(block, Statement { + source_info, + kind: StatementKind::StorageLive(result) + }); if let Some(scope) = scope { // schedule a shallow free of that memory, lest we unwind: - this.cfg.push(block, Statement { - source_info, - kind: StatementKind::StorageLive(result.clone()) - }); - this.schedule_drop(expr_span, scope, &result, value.ty); + this.schedule_drop(expr_span, scope, &Lvalue::Local(result), value.ty); } // malloc some memory of suitable type (thus far, uninitialized): let box_ = Rvalue::NullaryOp(NullOp::Box, value.ty); - this.cfg.push_assign(block, source_info, &result, box_); + this.cfg.push_assign(block, source_info, &Lvalue::Local(result), box_); // initialize the box contents: - unpack!(block = this.into(&result.clone().deref(), block, value)); - block.and(Rvalue::Use(Operand::Consume(result))) + unpack!(block = this.into(&Lvalue::Local(result).deref(), block, value)); + block.and(Rvalue::Use(Operand::Consume(Lvalue::Local(result)))) } ExprKind::Cast { source } => { let source = this.hir.mirror(source); @@ -185,12 +185,26 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { block.and(Rvalue::Aggregate(box AggregateKind::Tuple, fields)) } - ExprKind::Closure { closure_id, substs, upvars } => { // see (*) above - let upvars = + ExprKind::Closure { closure_id, substs, upvars, interior } => { // see (*) above + let mut operands: Vec<_> = upvars.into_iter() .map(|upvar| unpack!(block = this.as_operand(block, scope, upvar))) .collect(); - block.and(Rvalue::Aggregate(box AggregateKind::Closure(closure_id, substs), upvars)) + let result = if let Some(interior) = interior { + // Add the state operand since it follows the upvars in the generator + // struct. See librustc_mir/transform/generator.rs for more details. + operands.push(Operand::Constant(box Constant { + span: expr_span, + ty: this.hir.tcx().types.u32, + literal: Literal::Value { + value: ConstVal::Integral(ConstInt::U32(0)), + }, + })); + box AggregateKind::Generator(closure_id, substs, interior) + } else { + box AggregateKind::Closure(closure_id, substs) + }; + block.and(Rvalue::Aggregate(result, operands)) } ExprKind::Adt { adt_def, variant_index, substs, fields, base @@ -232,6 +246,17 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { block = unpack!(this.stmt_expr(block, expr)); block.and(this.unit_rvalue()) } + ExprKind::Yield { value } => { + let value = unpack!(block = this.as_operand(block, scope, value)); + let resume = this.cfg.start_new_block(); + let cleanup = this.generator_drop_cleanup(); + this.cfg.terminate(block, source_info, TerminatorKind::Yield { + value: value, + resume: resume, + drop: cleanup, + }); + resume.and(this.unit_rvalue()) + } ExprKind::Literal { .. } | ExprKind::Block { .. } | ExprKind::Match { .. } | diff --git a/src/librustc_mir/build/expr/as_temp.rs b/src/librustc_mir/build/expr/as_temp.rs index 4f248ddb0e2cf..7826769600bfa 100644 --- a/src/librustc_mir/build/expr/as_temp.rs +++ b/src/librustc_mir/build/expr/as_temp.rs @@ -13,7 +13,7 @@ use build::{BlockAnd, BlockAndExtension, Builder}; use build::expr::category::Category; use hair::*; -use rustc::middle::region::CodeExtent; +use rustc::middle::region; use rustc::mir::*; impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { @@ -21,9 +21,9 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// up rvalues so as to freeze the value that will be consumed. pub fn as_temp(&mut self, block: BasicBlock, - temp_lifetime: Option, + temp_lifetime: Option, expr: M) - -> BlockAnd> + -> BlockAnd where M: Mirror<'tcx, Output = Expr<'tcx>> { let expr = self.hir.mirror(expr); @@ -32,28 +32,28 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { fn expr_as_temp(&mut self, mut block: BasicBlock, - temp_lifetime: Option, + temp_lifetime: Option, expr: Expr<'tcx>) - -> BlockAnd> { + -> BlockAnd { debug!("expr_as_temp(block={:?}, temp_lifetime={:?}, expr={:?})", block, temp_lifetime, expr); let this = self; let expr_span = expr.span; let source_info = this.source_info(expr_span); - if let ExprKind::Scope { extent, value } = expr.kind { - return this.in_scope((extent, source_info), block, |this| { + if let ExprKind::Scope { region_scope, value } = expr.kind { + return this.in_scope((region_scope, source_info), block, |this| { this.as_temp(block, temp_lifetime, value) }); } - let expr_ty = expr.ty.clone(); - let temp = this.temp(expr_ty.clone(), expr_span); + let expr_ty = expr.ty; + let temp = this.local_decls.push(LocalDecl::new_temp(expr_ty, expr_span)); - if !expr_ty.is_never() && temp_lifetime.is_some() { + if !expr_ty.is_never() { this.cfg.push(block, Statement { source_info, - kind: StatementKind::StorageLive(temp.clone()) + kind: StatementKind::StorageLive(temp) }); } @@ -68,10 +68,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { Category::Lvalue => { let lvalue = unpack!(block = this.as_lvalue(block, expr)); let rvalue = Rvalue::Use(Operand::Consume(lvalue)); - this.cfg.push_assign(block, source_info, &temp, rvalue); + this.cfg.push_assign(block, source_info, &Lvalue::Local(temp), rvalue); } _ => { - unpack!(block = this.into(&temp, block, expr)); + unpack!(block = this.into(&Lvalue::Local(temp), block, expr)); } } @@ -79,7 +79,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { // anything because no values with a destructor can be created in // a constant at this time, even if the type may need dropping. if let Some(temp_lifetime) = temp_lifetime { - this.schedule_drop(expr_span, temp_lifetime, &temp, expr_ty); + this.schedule_drop(expr_span, temp_lifetime, &Lvalue::Local(temp), expr_ty); } block.and(temp) diff --git a/src/librustc_mir/build/expr/category.rs b/src/librustc_mir/build/expr/category.rs index 35173bb598c7c..f05411aacab19 100644 --- a/src/librustc_mir/build/expr/category.rs +++ b/src/librustc_mir/build/expr/category.rs @@ -77,6 +77,7 @@ impl Category { ExprKind::Borrow { .. } | ExprKind::Assign { .. } | ExprKind::AssignOp { .. } | + ExprKind::Yield { .. } | ExprKind::InlineAsm { .. } => Some(Category::Rvalue(RvalueFunc::AsRvalue)), diff --git a/src/librustc_mir/build/expr/into.rs b/src/librustc_mir/build/expr/into.rs index 576b1059e5997..80a126dc42569 100644 --- a/src/librustc_mir/build/expr/into.rs +++ b/src/librustc_mir/build/expr/into.rs @@ -38,9 +38,9 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { let source_info = this.source_info(expr_span); match expr.kind { - ExprKind::Scope { extent, value } => { - let extent = (extent, source_info); - this.in_scope(extent, block, |this| this.into(destination, block, value)) + ExprKind::Scope { region_scope, value } => { + let region_scope = (region_scope, source_info); + this.in_scope(region_scope, block, |this| this.into(destination, block, value)) } ExprKind::Block { body: ast_block } => { this.ast_block(destination, block, ast_block, source_info) @@ -229,7 +229,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { let topmost_scope = this.topmost_scope(); let ptr = unpack!(block = this.as_temp(block, Some(topmost_scope), ptr)); - this.into(&ptr.deref(), block, val) + this.into(&Lvalue::Local(ptr).deref(), block, val) } else { let args: Vec<_> = args.into_iter() @@ -284,6 +284,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { ExprKind::Index { .. } | ExprKind::Deref { .. } | ExprKind::Literal { .. } | + ExprKind::Yield { .. } | ExprKind::Field { .. } => { debug_assert!(match Category::of(&expr.kind).unwrap() { Category::Rvalue(RvalueFunc::Into) => false, diff --git a/src/librustc_mir/build/expr/stmt.rs b/src/librustc_mir/build/expr/stmt.rs index 0da722f72a15a..84468d5d6dc18 100644 --- a/src/librustc_mir/build/expr/stmt.rs +++ b/src/librustc_mir/build/expr/stmt.rs @@ -22,9 +22,11 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { // Handle a number of expressions that don't need a destination at all. This // avoids needing a mountain of temporary `()` variables. match expr.kind { - ExprKind::Scope { extent, value } => { + ExprKind::Scope { region_scope, value } => { let value = this.hir.mirror(value); - this.in_scope((extent, source_info), block, |this| this.stmt_expr(block, value)) + this.in_scope((region_scope, source_info), block, |this| { + this.stmt_expr(block, value) + }) } ExprKind::Assign { lhs, rhs } => { let lhs = this.hir.mirror(lhs); @@ -77,29 +79,29 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { block.unit() } ExprKind::Continue { label } => { - let BreakableScope { continue_block, extent, .. } = + let BreakableScope { continue_block, region_scope, .. } = *this.find_breakable_scope(expr_span, label); let continue_block = continue_block.expect( "Attempted to continue in non-continuable breakable block"); - this.exit_scope(expr_span, (extent, source_info), block, continue_block); + this.exit_scope(expr_span, (region_scope, source_info), block, continue_block); this.cfg.start_new_block().unit() } ExprKind::Break { label, value } => { - let (break_block, extent, destination) = { + let (break_block, region_scope, destination) = { let BreakableScope { break_block, - extent, + region_scope, ref break_destination, .. } = *this.find_breakable_scope(expr_span, label); - (break_block, extent, break_destination.clone()) + (break_block, region_scope, break_destination.clone()) }; if let Some(value) = value { unpack!(block = this.into(&destination, block, value)) } else { this.cfg.push_assign_unit(block, source_info, &destination) } - this.exit_scope(expr_span, (extent, source_info), block, break_block); + this.exit_scope(expr_span, (region_scope, source_info), block, break_block); this.cfg.start_new_block().unit() } ExprKind::Return { value } => { @@ -114,9 +116,9 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { block } }; - let extent = this.extent_of_return_scope(); + let region_scope = this.region_scope_of_return_scope(); let return_block = this.return_block(); - this.exit_scope(expr_span, (extent, source_info), block, return_block); + this.exit_scope(expr_span, (region_scope, source_info), block, return_block); this.cfg.start_new_block().unit() } ExprKind::InlineAsm { asm, outputs, inputs } => { diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs index 78805ba87ec22..ec2e487b4e74c 100644 --- a/src/librustc_mir/build/matches/mod.rs +++ b/src/librustc_mir/build/matches/mod.rs @@ -194,7 +194,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { let source_info = self.source_info(span); self.cfg.push(block, Statement { source_info, - kind: StatementKind::StorageLive(Lvalue::Local(local_id)) + kind: StatementKind::StorageLive(local_id) }); Lvalue::Local(local_id) } @@ -202,8 +202,9 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { pub fn schedule_drop_for_binding(&mut self, var: NodeId, span: Span) { let local_id = self.var_indices[&var]; let var_ty = self.local_decls[local_id].ty; - let extent = self.hir.region_maps.var_scope(var); - self.schedule_drop(span, extent, &Lvalue::Local(local_id), var_ty); + let hir_id = self.hir.tcx().hir.node_to_hir_id(var); + let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id); + self.schedule_drop(span, region_scope, &Lvalue::Local(local_id), var_ty); } pub fn visit_bindings(&mut self, pattern: &Pattern<'tcx>, f: &mut F) @@ -712,6 +713,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { ty: var_ty.clone(), name: Some(name), source_info, + internal: false, is_user_variable: true, }); self.var_indices.insert(var_id, var); diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index d7a295a1c3a24..7d1aace873e82 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -14,7 +14,7 @@ use hair::cx::Cx; use hair::Pattern; use rustc::hir; use rustc::hir::def_id::DefId; -use rustc::middle::region::CodeExtent; +use rustc::middle::region; use rustc::mir::*; use rustc::mir::transform::MirSource; use rustc::mir::visit::{MutVisitor, Lookup}; @@ -71,7 +71,7 @@ pub fn mir_build<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Mir<'t // Assume that everything other than closures // is a constant "initializer" expression. match expr.node { - hir::ExprClosure(_, _, body, _) => body, + hir::ExprClosure(_, _, body, _, _) => body, _ => hir::BodyId { node_id: expr.id } } } @@ -95,13 +95,18 @@ pub fn mir_build<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Mir<'t let ty = tcx.type_of(tcx.hir.local_def_id(id)); let mut abi = fn_sig.abi; - let implicit_argument = if let ty::TyClosure(..) = ty.sty { - // HACK(eddyb) Avoid having RustCall on closures, - // as it adds unnecessary (and wrong) auto-tupling. - abi = Abi::Rust; - Some((closure_self_ty(tcx, id, body_id), None)) - } else { - None + let implicit_argument = match ty.sty { + ty::TyClosure(..) => { + // HACK(eddyb) Avoid having RustCall on closures, + // as it adds unnecessary (and wrong) auto-tupling. + abi = Abi::Rust; + Some((closure_self_ty(tcx, id, body_id), None)) + } + ty::TyGenerator(..) => { + let gen_ty = tcx.body_tables(body_id).node_id_to_type(fn_hir_id); + Some((gen_ty, None)) + } + _ => None, }; let body = tcx.hir.body(body_id); @@ -114,7 +119,15 @@ pub fn mir_build<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Mir<'t }); let arguments = implicit_argument.into_iter().chain(explicit_arguments); - build::construct_fn(cx, id, arguments, abi, fn_sig.output(), body) + + let (yield_ty, return_ty) = if body.is_generator { + let gen_sig = cx.tables().generator_sigs()[fn_hir_id].clone().unwrap(); + (Some(gen_sig.yield_ty), gen_sig.return_ty) + } else { + (None, fn_sig.output()) + }; + + build::construct_fn(cx, id, arguments, abi, return_ty, yield_ty, body) } else { build::construct_const(cx, body_id) }; @@ -199,7 +212,7 @@ fn create_constructor_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, /////////////////////////////////////////////////////////////////////////// // BuildMir -- walks a crate, looking for fn items and methods to build MIR from -fn closure_self_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, +pub fn closure_self_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, closure_expr_id: ast::NodeId, body_id: hir::BodyId) -> Ty<'tcx> { @@ -328,6 +341,7 @@ fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>, arguments: A, abi: Abi, return_ty: Ty<'gcx>, + yield_ty: Option>, body: &'gcx hir::Body) -> Mir<'tcx> where A: Iterator, Option<&'gcx hir::Pat>)> @@ -336,18 +350,21 @@ fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>, let tcx = hir.tcx(); let span = tcx.hir.span(fn_id); - let mut builder = Builder::new(hir.clone(), span, arguments.len(), return_ty); + let mut builder = Builder::new(hir.clone(), + span, + arguments.len(), + return_ty); - let call_site_extent = CodeExtent::CallSiteScope(body.id()); - let arg_extent = CodeExtent::ParameterScope(body.id()); + let call_site_scope = region::Scope::CallSite(body.value.hir_id.local_id); + let arg_scope = region::Scope::Arguments(body.value.hir_id.local_id); let mut block = START_BLOCK; let source_info = builder.source_info(span); - unpack!(block = builder.in_scope((call_site_extent, source_info), block, |builder| { - unpack!(block = builder.in_scope((arg_extent, source_info), block, |builder| { - builder.args_and_body(block, &arguments, arg_extent, &body.value) + unpack!(block = builder.in_scope((call_site_scope, source_info), block, |builder| { + unpack!(block = builder.in_scope((arg_scope, source_info), block, |builder| { + builder.args_and_body(block, &arguments, arg_scope, &body.value) })); // Attribute epilogue to function's closing brace - let fn_end = Span { lo: span.hi, ..span }; + let fn_end = span.with_lo(span.hi()); let source_info = builder.source_info(fn_end); let return_block = builder.return_block(); builder.cfg.terminate(block, source_info, @@ -391,7 +408,7 @@ fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>, }).collect() }); - let mut mir = builder.finish(upvar_decls, return_ty); + let mut mir = builder.finish(upvar_decls, return_ty, yield_ty); mir.spread_arg = spread_arg; mir } @@ -416,7 +433,7 @@ fn construct_const<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>, // Constants can't `return` so a return block should not be created. assert_eq!(builder.cached_return_block, None); - builder.finish(vec![], ty) + builder.finish(vec![], ty, None) } fn construct_error<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>, @@ -427,7 +444,7 @@ fn construct_error<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>, let mut builder = Builder::new(hir, span, 0, ty); let source_info = builder.source_info(span); builder.cfg.terminate(START_BLOCK, source_info, TerminatorKind::Unreachable); - builder.finish(vec![], ty) + builder.finish(vec![], ty, None) } impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { @@ -462,7 +479,8 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { fn finish(self, upvar_decls: Vec, - return_ty: Ty<'tcx>) + return_ty: Ty<'tcx>, + yield_ty: Option>) -> Mir<'tcx> { for (index, block) in self.cfg.basic_blocks.iter().enumerate() { if block.terminator.is_none() { @@ -474,6 +492,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { self.visibility_scopes, IndexVec::new(), return_ty, + yield_ty, self.local_decls, self.arg_count, upvar_decls, @@ -484,7 +503,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { fn args_and_body(&mut self, mut block: BasicBlock, arguments: &[(Ty<'gcx>, Option<&'gcx hir::Pat>)], - argument_extent: CodeExtent, + argument_scope: region::Scope, ast_body: &'gcx hir::Expr) -> BlockAnd<()> { @@ -506,6 +525,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { span: pattern.map_or(self.fn_span, |pat| pat.span) }, name, + internal: false, is_user_variable: false, }); } @@ -527,7 +547,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { // Make sure we drop (parts of) the argument even when not matched on. self.schedule_drop(pattern.as_ref().map_or(ast_body.span, |pat| pat.span), - argument_extent, &lvalue, ty); + argument_scope, &lvalue, ty); } diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index c36da9410f34a..ed598c876f3e9 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -12,7 +12,7 @@ Managing the scope stack. The scopes are tied to lexical scopes, so as we descend the HAIR, we push a scope on the stack, translate ite contents, and then pop it off. Every scope is named by a -`CodeExtent`. +`region::Scope`. ### SEME Regions @@ -23,7 +23,7 @@ via a `break` or `return` or just by fallthrough, that marks an exit from the scope. Each lexical scope thus corresponds to a single-entry, multiple-exit (SEME) region in the control-flow graph. -For now, we keep a mapping from each `CodeExtent` to its +For now, we keep a mapping from each `region::Scope` to its corresponding SEME region for later reference (see caveat in next paragraph). This is because region scopes are tied to them. Eventually, when we shift to non-lexical lifetimes, there should @@ -88,7 +88,7 @@ should go to. */ use build::{BlockAnd, BlockAndExtension, Builder, CFG}; -use rustc::middle::region::CodeExtent; +use rustc::middle::region; use rustc::ty::Ty; use rustc::mir::*; use rustc::mir::transform::MirSource; @@ -101,11 +101,11 @@ pub struct Scope<'tcx> { /// The visibility scope this scope was created in. visibility_scope: VisibilityScope, - /// the extent of this scope within source code. - extent: CodeExtent, + /// the region span of this scope within source code. + region_scope: region::Scope, - /// the span of that extent - extent_span: Span, + /// the span of that region_scope + region_scope_span: Span, /// Whether there's anything to do for the cleanup path, that is, /// when unwinding through this scope. This includes destructors, @@ -125,7 +125,10 @@ pub struct Scope<'tcx> { drops: Vec>, /// The cache for drop chain on “normal” exit into a particular BasicBlock. - cached_exits: FxHashMap<(BasicBlock, CodeExtent), BasicBlock>, + cached_exits: FxHashMap<(BasicBlock, region::Scope), BasicBlock>, + + /// The cache for drop chain on "generator drop" exit. + cached_generator_drop: Option, } #[derive(Debug)] @@ -140,22 +143,30 @@ struct DropData<'tcx> { kind: DropKind } +#[derive(Debug, Default, Clone, Copy)] +struct CachedBlock { + /// The cached block for the cleanups-on-diverge path. This block + /// contains code to run the current drop and all the preceding + /// drops (i.e. those having lower index in Drop’s Scope drop + /// array) + unwind: Option, + + /// The cached block for unwinds during cleanups-on-generator-drop path + generator_drop: Option, +} + #[derive(Debug)] enum DropKind { Value { - /// The cached block for the cleanups-on-diverge path. This block - /// contains code to run the current drop and all the preceding - /// drops (i.e. those having lower index in Drop’s Scope drop - /// array) - cached_block: Option + cached_block: CachedBlock, }, Storage } #[derive(Clone, Debug)] pub struct BreakableScope<'tcx> { - /// Extent of the loop - pub extent: CodeExtent, + /// Region scope of the loop + pub region_scope: region::Scope, /// Where the body of the loop begins. `None` if block pub continue_block: Option, /// Block to branch into when the loop or block terminates (either by being `break`-en out @@ -166,6 +177,29 @@ pub struct BreakableScope<'tcx> { pub break_destination: Lvalue<'tcx>, } +impl CachedBlock { + fn invalidate(&mut self) { + self.generator_drop = None; + self.unwind = None; + } + + fn get(&self, generator_drop: bool) -> Option { + if generator_drop { + self.generator_drop + } else { + self.unwind + } + } + + fn ref_mut(&mut self, generator_drop: bool) -> &mut Option { + if generator_drop { + &mut self.generator_drop + } else { + &mut self.unwind + } + } +} + impl DropKind { fn may_panic(&self) -> bool { match *self { @@ -187,7 +221,7 @@ impl<'tcx> Scope<'tcx> { if !unwind { return; } for dropdata in &mut self.drops { if let DropKind::Value { ref mut cached_block } = dropdata.kind { - *cached_block = None; + cached_block.invalidate(); } } } @@ -196,10 +230,12 @@ impl<'tcx> Scope<'tcx> { /// /// Precondition: the caches must be fully filled (i.e. diverge_cleanup is called) in order for /// this method to work correctly. - fn cached_block(&self) -> Option { + fn cached_block(&self, generator_drop: bool) -> Option { let mut drops = self.drops.iter().rev().filter_map(|data| { match data.kind { - DropKind::Value { cached_block } => Some(cached_block), + DropKind::Value { cached_block } => { + Some(cached_block.get(generator_drop)) + } DropKind::Storage => None } }); @@ -233,9 +269,9 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { f: F) -> R where F: FnOnce(&mut Builder<'a, 'gcx, 'tcx>) -> R { - let extent = self.topmost_scope(); + let region_scope = self.topmost_scope(); let scope = BreakableScope { - extent, + region_scope, continue_block: loop_block, break_block, break_destination, @@ -243,41 +279,41 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { self.breakable_scopes.push(scope); let res = f(self); let breakable_scope = self.breakable_scopes.pop().unwrap(); - assert!(breakable_scope.extent == extent); + assert!(breakable_scope.region_scope == region_scope); res } pub fn in_opt_scope(&mut self, - opt_extent: Option<(CodeExtent, SourceInfo)>, + opt_scope: Option<(region::Scope, SourceInfo)>, mut block: BasicBlock, f: F) -> BlockAnd where F: FnOnce(&mut Builder<'a, 'gcx, 'tcx>) -> BlockAnd { - debug!("in_opt_scope(opt_extent={:?}, block={:?})", opt_extent, block); - if let Some(extent) = opt_extent { self.push_scope(extent); } + debug!("in_opt_scope(opt_scope={:?}, block={:?})", opt_scope, block); + if let Some(region_scope) = opt_scope { self.push_scope(region_scope); } let rv = unpack!(block = f(self)); - if let Some(extent) = opt_extent { - unpack!(block = self.pop_scope(extent, block)); + if let Some(region_scope) = opt_scope { + unpack!(block = self.pop_scope(region_scope, block)); } - debug!("in_scope: exiting opt_extent={:?} block={:?}", opt_extent, block); + debug!("in_scope: exiting opt_scope={:?} block={:?}", opt_scope, block); block.and(rv) } /// Convenience wrapper that pushes a scope and then executes `f` /// to build its contents, popping the scope afterwards. pub fn in_scope(&mut self, - extent: (CodeExtent, SourceInfo), + region_scope: (region::Scope, SourceInfo), mut block: BasicBlock, f: F) -> BlockAnd where F: FnOnce(&mut Builder<'a, 'gcx, 'tcx>) -> BlockAnd { - debug!("in_scope(extent={:?}, block={:?})", extent, block); - self.push_scope(extent); + debug!("in_scope(region_scope={:?}, block={:?})", region_scope, block); + self.push_scope(region_scope); let rv = unpack!(block = f(self)); - unpack!(block = self.pop_scope(extent, block)); - debug!("in_scope: exiting extent={:?} block={:?}", extent, block); + unpack!(block = self.pop_scope(region_scope, block)); + debug!("in_scope: exiting region_scope={:?} block={:?}", region_scope, block); block.and(rv) } @@ -285,27 +321,28 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// scope and call `pop_scope` afterwards. Note that these two /// calls must be paired; using `in_scope` as a convenience /// wrapper maybe preferable. - pub fn push_scope(&mut self, extent: (CodeExtent, SourceInfo)) { - debug!("push_scope({:?})", extent); + pub fn push_scope(&mut self, region_scope: (region::Scope, SourceInfo)) { + debug!("push_scope({:?})", region_scope); let vis_scope = self.visibility_scope; self.scopes.push(Scope { visibility_scope: vis_scope, - extent: extent.0, - extent_span: extent.1.span, + region_scope: region_scope.0, + region_scope_span: region_scope.1.span, needs_cleanup: false, drops: vec![], + cached_generator_drop: None, cached_exits: FxHashMap() }); } - /// Pops a scope, which should have extent `extent`, adding any - /// drops onto the end of `block` that are needed. This must - /// match 1-to-1 with `push_scope`. + /// Pops a scope, which should have region scope `region_scope`, + /// adding any drops onto the end of `block` that are needed. + /// This must match 1-to-1 with `push_scope`. pub fn pop_scope(&mut self, - extent: (CodeExtent, SourceInfo), + region_scope: (region::Scope, SourceInfo), mut block: BasicBlock) -> BlockAnd<()> { - debug!("pop_scope({:?}, {:?})", extent, block); + debug!("pop_scope({:?}, {:?})", region_scope, block); // If we are emitting a `drop` statement, we need to have the cached // diverge cleanup pads ready in case that drop panics. let may_panic = @@ -314,32 +351,35 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { self.diverge_cleanup(); } let scope = self.scopes.pop().unwrap(); - assert_eq!(scope.extent, extent.0); + assert_eq!(scope.region_scope, region_scope.0); unpack!(block = build_scope_drops(&mut self.cfg, &scope, &self.scopes, block, - self.arg_count)); + self.arg_count, + false)); - self.cfg.push_end_region(block, extent.1, scope.extent); + self.cfg.push_end_region(block, region_scope.1, scope.region_scope); block.unit() } /// Branch out of `block` to `target`, exiting all scopes up to - /// and including `extent`. This will insert whatever drops are + /// and including `region_scope`. This will insert whatever drops are /// needed, as well as tracking this exit for the SEME region. See /// module comment for details. pub fn exit_scope(&mut self, span: Span, - extent: (CodeExtent, SourceInfo), + region_scope: (region::Scope, SourceInfo), mut block: BasicBlock, target: BasicBlock) { - debug!("exit_scope(extent={:?}, block={:?}, target={:?})", extent, block, target); - let scope_count = 1 + self.scopes.iter().rev().position(|scope| scope.extent == extent.0) - .unwrap_or_else(||{ - span_bug!(span, "extent {:?} does not enclose", extent) - }); + debug!("exit_scope(region_scope={:?}, block={:?}, target={:?})", + region_scope, block, target); + let scope_count = 1 + self.scopes.iter().rev() + .position(|scope| scope.region_scope == region_scope.0) + .unwrap_or_else(|| { + span_bug!(span, "region_scope {:?} does not enclose", region_scope) + }); let len = self.scopes.len(); assert!(scope_count < len, "should not use `exit_scope` to pop ALL scopes"); @@ -355,7 +395,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { let mut rest = &mut self.scopes[(len - scope_count)..]; while let Some((scope, rest_)) = {rest}.split_last_mut() { rest = rest_; - block = if let Some(&e) = scope.cached_exits.get(&(target, extent.0)) { + block = if let Some(&e) = scope.cached_exits.get(&(target, region_scope.0)) { self.cfg.terminate(block, scope.source_info(span), TerminatorKind::Goto { target: e }); return; @@ -363,17 +403,18 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { let b = self.cfg.start_new_block(); self.cfg.terminate(block, scope.source_info(span), TerminatorKind::Goto { target: b }); - scope.cached_exits.insert((target, extent.0), b); + scope.cached_exits.insert((target, region_scope.0), b); b }; unpack!(block = build_scope_drops(&mut self.cfg, scope, rest, block, - self.arg_count)); + self.arg_count, + false)); // End all regions for scopes out of which we are breaking. - self.cfg.push_end_region(block, extent.1, scope.extent); + self.cfg.push_end_region(block, region_scope.1, scope.region_scope); } } let scope = &self.scopes[len - scope_count]; @@ -381,6 +422,55 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { TerminatorKind::Goto { target: target }); } + /// Creates a path that performs all required cleanup for dropping a generator. + /// + /// This path terminates in GeneratorDrop. Returns the start of the path. + /// None indicates there’s no cleanup to do at this point. + pub fn generator_drop_cleanup(&mut self) -> Option { + if !self.scopes.iter().any(|scope| scope.needs_cleanup) { + return None; + } + + // Fill in the cache + self.diverge_cleanup_gen(true); + + let src_info = self.scopes[0].source_info(self.fn_span); + let mut block = self.cfg.start_new_block(); + let result = block; + let mut rest = &mut self.scopes[..]; + + while let Some((scope, rest_)) = {rest}.split_last_mut() { + rest = rest_; + if !scope.needs_cleanup { + continue; + } + block = if let Some(b) = scope.cached_generator_drop { + self.cfg.terminate(block, src_info, + TerminatorKind::Goto { target: b }); + return Some(result); + } else { + let b = self.cfg.start_new_block(); + scope.cached_generator_drop = Some(b); + self.cfg.terminate(block, src_info, + TerminatorKind::Goto { target: b }); + b + }; + unpack!(block = build_scope_drops(&mut self.cfg, + scope, + rest, + block, + self.arg_count, + true)); + + // End all regions for scopes out of which we are breaking. + self.cfg.push_end_region(block, src_info, scope.region_scope); + } + + self.cfg.terminate(block, src_info, TerminatorKind::GeneratorDrop); + + Some(result) + } + /// Creates a new visibility scope, nested in the current one. pub fn new_visibility_scope(&mut self, span: Span) -> VisibilityScope { let parent = self.visibility_scope; @@ -398,12 +488,12 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// resolving `break` and `continue`. pub fn find_breakable_scope(&mut self, span: Span, - label: CodeExtent) + label: region::Scope) -> &mut BreakableScope<'tcx> { // find the loop-scope with the correct id self.breakable_scopes.iter_mut() .rev() - .filter(|breakable_scope| breakable_scope.extent == label) + .filter(|breakable_scope| breakable_scope.region_scope == label) .next() .unwrap_or_else(|| span_bug!(span, "no enclosing breakable scope found")) } @@ -416,23 +506,23 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { } } - /// Returns the extent of the scope which should be exited by a + /// Returns the `region::Scope` of the scope which should be exited by a /// return. - pub fn extent_of_return_scope(&self) -> CodeExtent { + pub fn region_scope_of_return_scope(&self) -> region::Scope { // The outermost scope (`scopes[0]`) will be the `CallSiteScope`. // We want `scopes[1]`, which is the `ParameterScope`. assert!(self.scopes.len() >= 2); - assert!(match self.scopes[1].extent { - CodeExtent::ParameterScope(_) => true, + assert!(match self.scopes[1].region_scope { + region::Scope::Arguments(_) => true, _ => false, }); - self.scopes[1].extent + self.scopes[1].region_scope } /// Returns the topmost active scope, which is known to be alive until /// the next scope expression. - pub fn topmost_scope(&self) -> CodeExtent { - self.scopes.last().expect("topmost_scope: no scopes present").extent + pub fn topmost_scope(&self) -> region::Scope { + self.scopes.last().expect("topmost_scope: no scopes present").region_scope } /// Returns the scope that we should use as the lifetime of an @@ -457,7 +547,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// /// When building statics/constants, returns `None` since /// intermediate values do not have to be dropped in that case. - pub fn local_scope(&self) -> Option { + pub fn local_scope(&self) -> Option { match self.hir.src { MirSource::Const(_) | MirSource::Static(..) => @@ -465,7 +555,8 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { None, MirSource::Fn(_) => Some(self.topmost_scope()), - MirSource::Promoted(..) => + MirSource::Promoted(..) | + MirSource::GeneratorDrop(..) => bug!(), } } @@ -473,15 +564,15 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { // Scheduling drops // ================ /// Indicates that `lvalue` should be dropped on exit from - /// `extent`. + /// `region_scope`. pub fn schedule_drop(&mut self, span: Span, - extent: CodeExtent, + region_scope: region::Scope, lvalue: &Lvalue<'tcx>, lvalue_ty: Ty<'tcx>) { let needs_drop = self.hir.needs_drop(lvalue_ty); let drop_kind = if needs_drop { - DropKind::Value { cached_block: None } + DropKind::Value { cached_block: CachedBlock::default() } } else { // Only temps and vars need their storage dead. match *lvalue { @@ -491,7 +582,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { }; for scope in self.scopes.iter_mut().rev() { - let this_scope = scope.extent == extent; + let this_scope = scope.region_scope == region_scope; // When building drops, we try to cache chains of drops in such a way so these drops // could be reused by the drops which would branch into the cached (already built) // blocks. This, however, means that whenever we add a drop into a scope which already @@ -544,10 +635,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { if let DropKind::Value { .. } = drop_kind { scope.needs_cleanup = true; } - let tcx = self.hir.tcx(); - let extent_span = extent.span(&tcx.hir).unwrap(); + let region_scope_span = region_scope.span(self.hir.tcx(), + &self.hir.region_scope_tree); // Attribute scope exit drops to scope's closing brace - let scope_end = Span { lo: extent_span.hi, .. extent_span}; + let scope_end = region_scope_span.with_lo(region_scope_span.hi()); scope.drops.push(DropData { span: scope_end, location: lvalue.clone(), @@ -556,7 +647,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { return; } } - span_bug!(span, "extent {:?} not in scope to drop {:?}", extent, lvalue); + span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, lvalue); } // Other @@ -567,6 +658,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// See module comment for more details. None indicates there’s no /// cleanup to do at this point. pub fn diverge_cleanup(&mut self) -> Option { + self.diverge_cleanup_gen(false) + } + + fn diverge_cleanup_gen(&mut self, generator_drop: bool) -> Option { if !self.scopes.iter().any(|scope| scope.needs_cleanup) { return None; } @@ -599,7 +694,8 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { }; for scope in scopes.iter_mut() { - target = build_diverge_scope(cfg, scope.extent_span, scope, target); + target = build_diverge_scope(cfg, scope.region_scope_span, + scope, target, generator_drop); } Some(target) } @@ -676,7 +772,8 @@ fn build_scope_drops<'tcx>(cfg: &mut CFG<'tcx>, scope: &Scope<'tcx>, earlier_scopes: &[Scope<'tcx>], mut block: BasicBlock, - arg_count: usize) + arg_count: usize, + generator_drop: bool) -> BlockAnd<()> { debug!("build_scope_drops({:?} -> {:?})", block, scope); let mut iter = scope.drops.iter().rev().peekable(); @@ -688,16 +785,20 @@ fn build_scope_drops<'tcx>(cfg: &mut CFG<'tcx>, // for us to diverge into in case the drop panics. let on_diverge = iter.peek().iter().filter_map(|dd| { match dd.kind { - DropKind::Value { cached_block: None } => - span_bug!(drop_data.span, "cached block not present?"), - DropKind::Value { cached_block } => cached_block, + DropKind::Value { cached_block } => { + let result = cached_block.get(generator_drop); + if result.is_none() { + span_bug!(drop_data.span, "cached block not present?") + } + result + }, DropKind::Storage => None } }).next(); // If there’s no `cached_block`s within current scope, // we must look for one in the enclosing scope. let on_diverge = on_diverge.or_else(|| { - earlier_scopes.iter().rev().flat_map(|s| s.cached_block()).next() + earlier_scopes.iter().rev().flat_map(|s| s.cached_block(generator_drop)).next() }); let next = cfg.start_new_block(); cfg.terminate(block, source_info, TerminatorKind::Drop { @@ -710,13 +811,18 @@ fn build_scope_drops<'tcx>(cfg: &mut CFG<'tcx>, DropKind::Storage => {} } + // We do not need to emit StorageDead for generator drops + if generator_drop { + continue + } + // Drop the storage for both value and storage drops. // Only temps and vars need their storage dead. match drop_data.location { Lvalue::Local(index) if index.index() > arg_count => { cfg.push(block, Statement { source_info, - kind: StatementKind::StorageDead(drop_data.location.clone()) + kind: StatementKind::StorageDead(index) }); } _ => continue @@ -728,7 +834,8 @@ fn build_scope_drops<'tcx>(cfg: &mut CFG<'tcx>, fn build_diverge_scope<'a, 'gcx, 'tcx>(cfg: &mut CFG<'tcx>, span: Span, scope: &mut Scope<'tcx>, - mut target: BasicBlock) + mut target: BasicBlock, + generator_drop: bool) -> BasicBlock { // Build up the drops in **reverse** order. The end result will @@ -742,7 +849,7 @@ fn build_diverge_scope<'a, 'gcx, 'tcx>(cfg: &mut CFG<'tcx>, // The code in this function reads from right to left. At each // point, we check for cached blocks representing the // remainder. If everything is cached, we'll just walk right to - // left reading the cached results but never created anything. + // left reading the cached results but never create anything. let visibility_scope = scope.visibility_scope; let source_info = |span| SourceInfo { @@ -764,7 +871,7 @@ fn build_diverge_scope<'a, 'gcx, 'tcx>(cfg: &mut CFG<'tcx>, // match the behavior of clang, but on inspection eddyb says // this is not what clang does. let cached_block = match drop_data.kind { - DropKind::Value { ref mut cached_block } => cached_block, + DropKind::Value { ref mut cached_block } => cached_block.ref_mut(generator_drop), DropKind::Storage => continue }; target = if let Some(cached_block) = *cached_block { @@ -786,7 +893,7 @@ fn build_diverge_scope<'a, 'gcx, 'tcx>(cfg: &mut CFG<'tcx>, // becomes trivial goto after pass that removes all EndRegions.) { let block = cfg.start_new_cleanup_block(); - cfg.push_end_region(block, source_info(span), scope.extent); + cfg.push_end_region(block, source_info(span), scope.region_scope); cfg.terminate(block, source_info(span), TerminatorKind::Goto { target: target }); target = block } diff --git a/src/librustc_mir/dataflow/impls/borrows.rs b/src/librustc_mir/dataflow/impls/borrows.rs index ab62342e607dd..3f815ec83e3a5 100644 --- a/src/librustc_mir/dataflow/impls/borrows.rs +++ b/src/librustc_mir/dataflow/impls/borrows.rs @@ -107,7 +107,7 @@ impl<'a, 'tcx> BitDenotation for Borrows<'a, 'tcx> { self.borrows.len() } fn start_block_effect(&self, _sets: &mut BlockSets) { - // no borrows of code extents have been taken prior to + // no borrows of code region_scopes have been taken prior to // function execution, so this method has no effect on // `_sets`. } @@ -121,9 +121,9 @@ impl<'a, 'tcx> BitDenotation for Borrows<'a, 'tcx> { panic!("could not find statement at location {:?}"); }); match stmt.kind { - mir::StatementKind::EndRegion(extent) => { - let borrow_indexes = self.region_map.get(&ReScope(extent)).unwrap_or_else(|| { - panic!("could not find BorrowIndexs for code-extent {:?}", extent); + mir::StatementKind::EndRegion(region_scope) => { + let borrow_indexes = self.region_map.get(&ReScope(region_scope)).unwrap_or_else(|| { + panic!("could not find BorrowIndexs for region scope {:?}", region_scope); }); for idx in borrow_indexes { sets.kill(&idx); } @@ -153,7 +153,7 @@ impl<'a, 'tcx> BitDenotation for Borrows<'a, 'tcx> { fn terminator_effect(&self, _sets: &mut BlockSets, _location: Location) { - // no terminators start nor end code extents. + // no terminators start nor end region scopes. } fn propagate_call_return(&self, @@ -161,7 +161,7 @@ impl<'a, 'tcx> BitDenotation for Borrows<'a, 'tcx> { _call_bb: mir::BasicBlock, _dest_bb: mir::BasicBlock, _dest_lval: &mir::Lvalue) { - // there are no effects on the extents from method calls. + // there are no effects on the region scopes from method calls. } } diff --git a/src/librustc_mir/dataflow/mod.rs b/src/librustc_mir/dataflow/mod.rs index 237795491b186..9f9909a8f57a9 100644 --- a/src/librustc_mir/dataflow/mod.rs +++ b/src/librustc_mir/dataflow/mod.rs @@ -653,15 +653,21 @@ impl<'a, 'tcx: 'a, D> DataflowAnalysis<'a, 'tcx, D> where D: BitDenotation match bb_data.terminator().kind { mir::TerminatorKind::Return | mir::TerminatorKind::Resume | + mir::TerminatorKind::GeneratorDrop | mir::TerminatorKind::Unreachable => {} mir::TerminatorKind::Goto { ref target } | mir::TerminatorKind::Assert { ref target, cleanup: None, .. } | + mir::TerminatorKind::Yield { resume: ref target, drop: None, .. } | mir::TerminatorKind::Drop { ref target, location: _, unwind: None } | mir::TerminatorKind::DropAndReplace { ref target, value: _, location: _, unwind: None } => { self.propagate_bits_into_entry_set_for(in_out, changed, target); } + mir::TerminatorKind::Yield { resume: ref target, drop: Some(ref drop), .. } => { + self.propagate_bits_into_entry_set_for(in_out, changed, target); + self.propagate_bits_into_entry_set_for(in_out, changed, drop); + } mir::TerminatorKind::Assert { ref target, cleanup: Some(ref unwind), .. } | mir::TerminatorKind::Drop { ref target, location: _, unwind: Some(ref unwind) } | mir::TerminatorKind::DropAndReplace { diff --git a/src/librustc_mir/dataflow/move_paths/abs_domain.rs b/src/librustc_mir/dataflow/move_paths/abs_domain.rs index 173396f22457a..00825c7a880e9 100644 --- a/src/librustc_mir/dataflow/move_paths/abs_domain.rs +++ b/src/librustc_mir/dataflow/move_paths/abs_domain.rs @@ -21,8 +21,7 @@ //! `a[x]` would still overlap them both. But that is not this //! representation does today.) -use rustc::mir::LvalueElem; -use rustc::mir::{Operand, ProjectionElem}; +use rustc::mir::{Local, LvalueElem, Operand, ProjectionElem}; use rustc::ty::Ty; #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] @@ -40,6 +39,10 @@ impl<'tcx> Lift for Operand<'tcx> { type Abstract = AbstractOperand; fn lift(&self) -> Self::Abstract { AbstractOperand } } +impl Lift for Local { + type Abstract = AbstractOperand; + fn lift(&self) -> Self::Abstract { AbstractOperand } +} impl<'tcx> Lift for Ty<'tcx> { type Abstract = AbstractType; fn lift(&self) -> Self::Abstract { AbstractType } diff --git a/src/librustc_mir/dataflow/move_paths/builder.rs b/src/librustc_mir/dataflow/move_paths/builder.rs index c45c91011d9f4..86298c3b83e29 100644 --- a/src/librustc_mir/dataflow/move_paths/builder.rs +++ b/src/librustc_mir/dataflow/move_paths/builder.rs @@ -263,6 +263,7 @@ impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> { match term.kind { TerminatorKind::Goto { target: _ } | TerminatorKind::Resume | + TerminatorKind::GeneratorDrop | TerminatorKind::Unreachable => { } TerminatorKind::Return => { @@ -274,6 +275,10 @@ impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> { // branching terminators - these don't move anything } + TerminatorKind::Yield { ref value, .. } => { + self.gather_operand(loc, value); + } + TerminatorKind::Drop { ref location, target: _, unwind: _ } => { self.gather_move(loc, location); } diff --git a/src/librustc_mir/hair/cx/block.rs b/src/librustc_mir/hair/cx/block.rs index 61d128fc84782..8b3031a3800a5 100644 --- a/src/librustc_mir/hair/cx/block.rs +++ b/src/librustc_mir/hair/cx/block.rs @@ -11,9 +11,8 @@ use hair::*; use hair::cx::Cx; use hair::cx::to_ref::ToRef; -use rustc::middle::region::{BlockRemainder, CodeExtent}; +use rustc::middle::region::{self, BlockRemainder}; use rustc::hir; -use syntax::ast; impl<'tcx> Mirror<'tcx> for &'tcx hir::Block { type Output = Block<'tcx>; @@ -21,12 +20,13 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Block { fn make_mirror<'a, 'gcx>(self, cx: &mut Cx<'a, 'gcx, 'tcx>) -> Block<'tcx> { // We have to eagerly translate the "spine" of the statements // in order to get the lexical scoping correctly. - let stmts = mirror_stmts(cx, self.id, &*self.stmts); - let opt_destruction_extent = cx.region_maps.opt_destruction_extent(self.id); + let stmts = mirror_stmts(cx, self.hir_id.local_id, &*self.stmts); + let opt_destruction_scope = + cx.region_scope_tree.opt_destruction_scope(self.hir_id.local_id); Block { targeted_by_break: self.targeted_by_break, - extent: CodeExtent::Misc(self.id), - opt_destruction_extent, + region_scope: region::Scope::Node(self.hir_id.local_id), + opt_destruction_scope, span: self.span, stmts, expr: self.expr.to_ref(), @@ -35,31 +35,31 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Block { } fn mirror_stmts<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, - block_id: ast::NodeId, + block_id: hir::ItemLocalId, stmts: &'tcx [hir::Stmt]) -> Vec> { let mut result = vec![]; for (index, stmt) in stmts.iter().enumerate() { - let opt_dxn_ext = cx.region_maps.opt_destruction_extent(stmt.node.id()); + let hir_id = cx.tcx.hir.node_to_hir_id(stmt.node.id()); + let opt_dxn_ext = cx.region_scope_tree.opt_destruction_scope(hir_id.local_id); match stmt.node { - hir::StmtExpr(ref expr, id) | - hir::StmtSemi(ref expr, id) => { + hir::StmtExpr(ref expr, _) | + hir::StmtSemi(ref expr, _) => { result.push(StmtRef::Mirror(Box::new(Stmt { - span: stmt.span, kind: StmtKind::Expr { - scope: CodeExtent::Misc(id), + scope: region::Scope::Node(hir_id.local_id), expr: expr.to_ref(), }, - opt_destruction_extent: opt_dxn_ext, + opt_destruction_scope: opt_dxn_ext, }))) } - hir::StmtDecl(ref decl, id) => { + hir::StmtDecl(ref decl, _) => { match decl.node { hir::DeclItem(..) => { // ignore for purposes of the MIR } hir::DeclLocal(ref local) => { - let remainder_extent = CodeExtent::Remainder(BlockRemainder { + let remainder_scope = region::Scope::Remainder(BlockRemainder { block: block_id, first_statement_index: index as u32, }); @@ -69,14 +69,13 @@ fn mirror_stmts<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, cx.tables(), &local.pat); result.push(StmtRef::Mirror(Box::new(Stmt { - span: stmt.span, kind: StmtKind::Let { - remainder_scope: remainder_extent, - init_scope: CodeExtent::Misc(id), + remainder_scope: remainder_scope, + init_scope: region::Scope::Node(hir_id.local_id), pattern, initializer: local.init.to_ref(), }, - opt_destruction_extent: opt_dxn_ext, + opt_destruction_scope: opt_dxn_ext, }))); } } @@ -90,7 +89,7 @@ pub fn to_expr_ref<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, block: &'tcx hir::Block) -> ExprRef<'tcx> { let block_ty = cx.tables().node_id_to_type(block.hir_id); - let temp_lifetime = cx.region_maps.temporary_scope(block.id); + let temp_lifetime = cx.region_scope_tree.temporary_scope(block.hir_id.local_id); let expr = Expr { ty: block_ty, temp_lifetime, diff --git a/src/librustc_mir/hair/cx/expr.rs b/src/librustc_mir/hair/cx/expr.rs index 944fb8e83329b..c96d42b94f975 100644 --- a/src/librustc_mir/hair/cx/expr.rs +++ b/src/librustc_mir/hair/cx/expr.rs @@ -25,8 +25,8 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr { type Output = Expr<'tcx>; fn make_mirror<'a, 'gcx>(self, cx: &mut Cx<'a, 'gcx, 'tcx>) -> Expr<'tcx> { - let temp_lifetime = cx.region_maps.temporary_scope(self.id); - let expr_extent = CodeExtent::Misc(self.id); + let temp_lifetime = cx.region_scope_tree.temporary_scope(self.hir_id.local_id); + let expr_scope = region::Scope::Node(self.hir_id.local_id); debug!("Expr::make_mirror(): id={}, span={:?}", self.id, self.span); @@ -46,19 +46,20 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr { ty: expr.ty, span: self.span, kind: ExprKind::Scope { - extent: expr_extent, + region_scope: expr_scope, value: expr.to_ref(), }, }; // Finally, create a destruction scope, if any. - if let Some(extent) = cx.region_maps.opt_destruction_extent(self.id) { + if let Some(region_scope) = + cx.region_scope_tree.opt_destruction_scope(self.hir_id.local_id) { expr = Expr { temp_lifetime, ty: expr.ty, span: self.span, kind: ExprKind::Scope { - extent, + region_scope, value: expr.to_ref(), }, }; @@ -125,7 +126,7 @@ fn apply_adjustment<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, // Convert this to a suitable `&foo` and // then an unsafe coercion. Limit the region to be just this // expression. - let region = ty::ReScope(CodeExtent::Misc(hir_expr.id)); + let region = ty::ReScope(region::Scope::Node(hir_expr.hir_id.local_id)); let region = cx.tcx.mk_region(region); expr = Expr { temp_lifetime, @@ -160,7 +161,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, expr: &'tcx hir::Expr) -> Expr<'tcx> { let expr_ty = cx.tables().expr_ty(expr); - let temp_lifetime = cx.region_maps.temporary_scope(expr.id); + let temp_lifetime = cx.region_scope_tree.temporary_scope(expr.hir_id.local_id); let kind = match expr.node { // Here comes the interesting stuff: @@ -432,8 +433,9 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, hir::ExprClosure(..) => { let closure_ty = cx.tables().expr_ty(expr); - let (def_id, substs) = match closure_ty.sty { - ty::TyClosure(def_id, substs) => (def_id, substs), + let (def_id, substs, interior) = match closure_ty.sty { + ty::TyClosure(def_id, substs) => (def_id, substs, None), + ty::TyGenerator(def_id, substs, interior) => (def_id, substs, Some(interior)), _ => { span_bug!(expr.span, "closure expr w/o closure type: {:?}", closure_ty); } @@ -448,6 +450,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, closure_id: def_id, substs, upvars, + interior, } } @@ -485,7 +488,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, match dest.target_id { hir::ScopeTarget::Block(target_id) | hir::ScopeTarget::Loop(hir::LoopIdResult::Ok(target_id)) => ExprKind::Break { - label: CodeExtent::Misc(target_id), + label: region::Scope::Node(cx.tcx.hir.node_to_hir_id(target_id).local_id), value: value.to_ref(), }, hir::ScopeTarget::Loop(hir::LoopIdResult::Err(err)) => @@ -496,7 +499,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, match dest.target_id { hir::ScopeTarget::Block(_) => bug!("cannot continue to blocks"), hir::ScopeTarget::Loop(hir::LoopIdResult::Ok(loop_id)) => ExprKind::Continue { - label: CodeExtent::Misc(loop_id), + label: region::Scope::Node(cx.tcx.hir.node_to_hir_id(loop_id).local_id), }, hir::ScopeTarget::Loop(hir::LoopIdResult::Err(err)) => bug!("invalid loop id for continue: {}", err) @@ -567,6 +570,8 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, } hir::ExprArray(ref fields) => ExprKind::Array { fields: fields.to_ref() }, hir::ExprTup(ref fields) => ExprKind::Tuple { fields: fields.to_ref() }, + + hir::ExprYield(ref v) => ExprKind::Yield { value: v.to_ref() }, }; Expr { @@ -581,7 +586,7 @@ fn method_callee<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, expr: &hir::Expr, custom_callee: Option<(DefId, &'tcx Substs<'tcx>)>) -> Expr<'tcx> { - let temp_lifetime = cx.region_maps.temporary_scope(expr.id); + let temp_lifetime = cx.region_scope_tree.temporary_scope(expr.hir_id.local_id); let (def_id, substs) = custom_callee.unwrap_or_else(|| { (cx.tables().type_dependent_defs()[expr.hir_id].def_id(), cx.tables().node_substs(expr.hir_id)) @@ -672,7 +677,7 @@ fn convert_var<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, expr: &'tcx hir::Expr, def: Def) -> ExprKind<'tcx> { - let temp_lifetime = cx.region_maps.temporary_scope(expr.id); + let temp_lifetime = cx.region_scope_tree.temporary_scope(expr.hir_id.local_id); match def { Def::Local(def_id) => { @@ -703,56 +708,65 @@ fn convert_var<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, }); let region = cx.tcx.mk_region(region); - let self_expr = match cx.tcx.closure_kind(closure_def_id) { - ty::ClosureKind::Fn => { - let ref_closure_ty = cx.tcx.mk_ref(region, - ty::TypeAndMut { - ty: closure_ty, - mutbl: hir::MutImmutable, - }); - Expr { - ty: closure_ty, - temp_lifetime, - span: expr.span, - kind: ExprKind::Deref { - arg: Expr { - ty: ref_closure_ty, - temp_lifetime, - span: expr.span, - kind: ExprKind::SelfRef, - } - .to_ref(), - }, + let self_expr = if let ty::TyClosure(..) = closure_ty.sty { + match cx.tcx.closure_kind(closure_def_id) { + ty::ClosureKind::Fn => { + let ref_closure_ty = cx.tcx.mk_ref(region, + ty::TypeAndMut { + ty: closure_ty, + mutbl: hir::MutImmutable, + }); + Expr { + ty: closure_ty, + temp_lifetime: temp_lifetime, + span: expr.span, + kind: ExprKind::Deref { + arg: Expr { + ty: ref_closure_ty, + temp_lifetime, + span: expr.span, + kind: ExprKind::SelfRef, + } + .to_ref(), + }, + } } - } - ty::ClosureKind::FnMut => { - let ref_closure_ty = cx.tcx.mk_ref(region, - ty::TypeAndMut { - ty: closure_ty, - mutbl: hir::MutMutable, - }); - Expr { - ty: closure_ty, - temp_lifetime, - span: expr.span, - kind: ExprKind::Deref { - arg: Expr { - ty: ref_closure_ty, - temp_lifetime, - span: expr.span, - kind: ExprKind::SelfRef, - }.to_ref(), - }, + ty::ClosureKind::FnMut => { + let ref_closure_ty = cx.tcx.mk_ref(region, + ty::TypeAndMut { + ty: closure_ty, + mutbl: hir::MutMutable, + }); + Expr { + ty: closure_ty, + temp_lifetime, + span: expr.span, + kind: ExprKind::Deref { + arg: Expr { + ty: ref_closure_ty, + temp_lifetime, + span: expr.span, + kind: ExprKind::SelfRef, + }.to_ref(), + }, + } } - } - ty::ClosureKind::FnOnce => { - Expr { - ty: closure_ty, - temp_lifetime, - span: expr.span, - kind: ExprKind::SelfRef, + ty::ClosureKind::FnOnce => { + Expr { + ty: closure_ty, + temp_lifetime, + span: expr.span, + kind: ExprKind::SelfRef, + } } } + } else { + Expr { + ty: closure_ty, + temp_lifetime, + span: expr.span, + kind: ExprKind::SelfRef, + } }; // at this point we have `self.n`, which loads up the upvar @@ -854,7 +868,7 @@ fn overloaded_lvalue<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, // construct the complete expression `foo()` for the overloaded call, // which will yield the &T type - let temp_lifetime = cx.region_maps.temporary_scope(expr.id); + let temp_lifetime = cx.region_scope_tree.temporary_scope(expr.hir_id.local_id); let fun = method_callee(cx, expr, custom_callee); let ref_expr = Expr { temp_lifetime, @@ -883,7 +897,7 @@ fn capture_freevar<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, closure_expr_id: cx.tcx.hir.local_def_id(closure_expr.id).index, }; let upvar_capture = cx.tables().upvar_capture(upvar_id); - let temp_lifetime = cx.region_maps.temporary_scope(closure_expr.id); + let temp_lifetime = cx.region_scope_tree.temporary_scope(closure_expr.hir_id.local_id); let var_ty = cx.tables() .node_id_to_type(cx.tcx.hir.node_to_hir_id(var_node_id)); let captured_var = Expr { diff --git a/src/librustc_mir/hair/cx/mod.rs b/src/librustc_mir/hair/cx/mod.rs index 2f4ab36d394b0..3cabd8f18c19e 100644 --- a/src/librustc_mir/hair/cx/mod.rs +++ b/src/librustc_mir/hair/cx/mod.rs @@ -22,7 +22,7 @@ use rustc_const_eval::ConstContext; use rustc_data_structures::indexed_vec::Idx; use rustc::hir::def_id::DefId; use rustc::hir::map::blocks::FnLikeNode; -use rustc::middle::region::RegionMaps; +use rustc::middle::region; use rustc::infer::InferCtxt; use rustc::ty::subst::Subst; use rustc::ty::{self, Ty, TyCtxt}; @@ -42,7 +42,7 @@ pub struct Cx<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { /// Identity `Substs` for use with const-evaluation. pub identity_substs: &'gcx Substs<'gcx>, - pub region_maps: Rc, + pub region_scope_tree: Rc, pub tables: &'a ty::TypeckTables<'gcx>, /// This is `Constness::Const` if we are compiling a `static`, @@ -61,6 +61,7 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> { let constness = match src { MirSource::Const(_) | MirSource::Static(..) => hir::Constness::Const, + MirSource::GeneratorDrop(..) => hir::Constness::NotConst, MirSource::Fn(id) => { let fn_like = FnLikeNode::from_node(infcx.tcx.hir.get(id)); fn_like.map_or(hir::Constness::NotConst, |f| f.constness()) @@ -91,7 +92,7 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> { infcx, param_env: tcx.param_env(src_def_id), identity_substs: Substs::identity_for_item(tcx.global_tcx(), src_def_id), - region_maps: tcx.region_maps(src_def_id), + region_scope_tree: tcx.region_scope_tree(src_def_id), tables: tcx.typeck_tables_of(src_def_id), constness, src, diff --git a/src/librustc_mir/hair/mod.rs b/src/librustc_mir/hair/mod.rs index 01faa61d0964a..067bd458d97dd 100644 --- a/src/librustc_mir/hair/mod.rs +++ b/src/librustc_mir/hair/mod.rs @@ -17,9 +17,9 @@ use rustc_const_math::ConstUsize; use rustc::mir::{BinOp, BorrowKind, Field, Literal, UnOp}; use rustc::hir::def_id::DefId; -use rustc::middle::region::CodeExtent; +use rustc::middle::region; use rustc::ty::subst::Substs; -use rustc::ty::{self, AdtDef, ClosureSubsts, Region, Ty}; +use rustc::ty::{self, AdtDef, ClosureSubsts, Region, Ty, GeneratorInterior}; use rustc::hir; use syntax::ast; use syntax_pos::Span; @@ -32,8 +32,8 @@ pub use rustc_const_eval::pattern::{BindingMode, Pattern, PatternKind, FieldPatt #[derive(Clone, Debug)] pub struct Block<'tcx> { pub targeted_by_break: bool, - pub extent: CodeExtent, - pub opt_destruction_extent: Option, + pub region_scope: region::Scope, + pub opt_destruction_scope: Option, pub span: Span, pub stmts: Vec>, pub expr: Option>, @@ -46,16 +46,15 @@ pub enum StmtRef<'tcx> { #[derive(Clone, Debug)] pub struct Stmt<'tcx> { - pub span: Span, pub kind: StmtKind<'tcx>, - pub opt_destruction_extent: Option, + pub opt_destruction_scope: Option, } #[derive(Clone, Debug)] pub enum StmtKind<'tcx> { Expr { /// scope for this statement; may be used as lifetime of temporaries - scope: CodeExtent, + scope: region::Scope, /// expression being evaluated in this statement expr: ExprRef<'tcx>, @@ -64,11 +63,11 @@ pub enum StmtKind<'tcx> { Let { /// scope for variables bound in this let; covers this and /// remaining statements in block - remainder_scope: CodeExtent, + remainder_scope: region::Scope, /// scope for the initialization itself; might be used as /// lifetime of temporaries - init_scope: CodeExtent, + init_scope: region::Scope, /// let = ... pattern: Pattern<'tcx>, @@ -99,7 +98,7 @@ pub struct Expr<'tcx> { /// lifetime of this expression if it should be spilled into a /// temporary; should be None only if in a constant context - pub temp_lifetime: Option, + pub temp_lifetime: Option, /// span of the expression in the source pub span: Span, @@ -111,7 +110,7 @@ pub struct Expr<'tcx> { #[derive(Clone, Debug)] pub enum ExprKind<'tcx> { Scope { - extent: CodeExtent, + region_scope: region::Scope, value: ExprRef<'tcx>, }, Box { @@ -208,11 +207,11 @@ pub enum ExprKind<'tcx> { arg: ExprRef<'tcx>, }, Break { - label: CodeExtent, + label: region::Scope, value: Option>, }, Continue { - label: CodeExtent, + label: region::Scope, }, Return { value: Option>, @@ -238,6 +237,7 @@ pub enum ExprKind<'tcx> { closure_id: DefId, substs: ClosureSubsts<'tcx>, upvars: Vec>, + interior: Option>, }, Literal { literal: Literal<'tcx>, @@ -247,6 +247,9 @@ pub enum ExprKind<'tcx> { outputs: Vec>, inputs: Vec> }, + Yield { + value: ExprRef<'tcx>, + }, } #[derive(Clone, Debug)] diff --git a/src/librustc_mir/shim.rs b/src/librustc_mir/shim.rs index 6bee1ceff89fb..d3c886dab4e86 100644 --- a/src/librustc_mir/shim.rs +++ b/src/librustc_mir/shim.rs @@ -140,6 +140,7 @@ fn temp_decl(mutability: Mutability, ty: Ty, span: Span) -> LocalDecl { LocalDecl { mutability, ty, name: None, source_info: SourceInfo { scope: ARGUMENT_VISIBILITY_SCOPE, span }, + internal: false, is_user_variable: false } } @@ -160,6 +161,12 @@ fn build_drop_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>, { debug!("build_drop_shim(def_id={:?}, ty={:?})", def_id, ty); + // Check if this is a generator, if so, return the drop glue for it + if let Some(&ty::TyS { sty: ty::TyGenerator(gen_def_id, substs, _), .. }) = ty { + let mir = &**tcx.optimized_mir(gen_def_id).generator_drop.as_ref().unwrap(); + return mir.subst(tcx, substs.substs); + } + let substs = if let Some(ty) = ty { tcx.mk_substs(iter::once(Kind::from(ty))) } else { @@ -190,6 +197,7 @@ fn build_drop_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>, ), IndexVec::new(), sig.output(), + None, local_decls_for_sig(&sig, span), sig.inputs().len(), vec![], @@ -225,10 +233,10 @@ fn build_drop_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>, } pub struct DropShimElaborator<'a, 'tcx: 'a> { - mir: &'a Mir<'tcx>, - patch: MirPatch<'tcx>, - tcx: ty::TyCtxt<'a, 'tcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, + pub mir: &'a Mir<'tcx>, + pub patch: MirPatch<'tcx>, + pub tcx: ty::TyCtxt<'a, 'tcx, 'tcx>, + pub param_env: ty::ParamEnv<'tcx>, } impl<'a, 'tcx> fmt::Debug for DropShimElaborator<'a, 'tcx> { @@ -327,6 +335,7 @@ impl<'a, 'tcx> CloneShimBuilder<'a, 'tcx> { ), IndexVec::new(), self.sig.output(), + None, self.local_decls, self.sig.inputs().len(), vec![], @@ -470,9 +479,10 @@ impl<'a, 'tcx> CloneShimBuilder<'a, 'tcx> { fn array_shim(&mut self, ty: ty::Ty<'tcx>, len: usize) { let tcx = self.tcx; + let span = self.span; let rcvr = Lvalue::Local(Local::new(1+0)).deref(); - let beg = self.make_lvalue(Mutability::Mut, tcx.types.usize); + let beg = self.local_decls.push(temp_decl(Mutability::Mut, tcx.types.usize, span)); let end = self.make_lvalue(Mutability::Not, tcx.types.usize); let ret = self.make_lvalue(Mutability::Mut, tcx.mk_array(ty, len)); @@ -483,7 +493,7 @@ impl<'a, 'tcx> CloneShimBuilder<'a, 'tcx> { let inits = vec![ self.make_statement( StatementKind::Assign( - beg.clone(), + Lvalue::Local(beg), Rvalue::Use(Operand::Constant(self.make_usize(0))) ) ), @@ -501,19 +511,19 @@ impl<'a, 'tcx> CloneShimBuilder<'a, 'tcx> { // BB #3; // } // BB #4; - self.loop_header(beg.clone(), end, BasicBlock::new(2), BasicBlock::new(4), false); + self.loop_header(Lvalue::Local(beg), end, BasicBlock::new(2), BasicBlock::new(4), false); // BB #2 // `let cloned = Clone::clone(rcvr[beg])`; // Goto #3 if ok, #5 if unwinding happens. - let rcvr_field = rcvr.clone().index(Operand::Consume(beg.clone())); + let rcvr_field = rcvr.clone().index(beg); let cloned = self.make_clone_call(ty, rcvr_field, BasicBlock::new(3), BasicBlock::new(5)); // BB #3 // `ret[beg] = cloned;` // `beg = beg + 1;` // `goto #1`; - let ret_field = ret.clone().index(Operand::Consume(beg.clone())); + let ret_field = ret.clone().index(beg); let statements = vec![ self.make_statement( StatementKind::Assign( @@ -523,10 +533,10 @@ impl<'a, 'tcx> CloneShimBuilder<'a, 'tcx> { ), self.make_statement( StatementKind::Assign( - beg.clone(), + Lvalue::Local(beg), Rvalue::BinaryOp( BinOp::Add, - Operand::Consume(beg.clone()), + Operand::Consume(Lvalue::Local(beg)), Operand::Constant(self.make_usize(1)) ) ) @@ -549,10 +559,10 @@ impl<'a, 'tcx> CloneShimBuilder<'a, 'tcx> { // `let mut beg = 0;` // goto #6; let end = beg; - let beg = self.make_lvalue(Mutability::Mut, tcx.types.usize); + let beg = self.local_decls.push(temp_decl(Mutability::Mut, tcx.types.usize, span)); let init = self.make_statement( StatementKind::Assign( - beg.clone(), + Lvalue::Local(beg), Rvalue::Use(Operand::Constant(self.make_usize(0))) ) ); @@ -563,12 +573,13 @@ impl<'a, 'tcx> CloneShimBuilder<'a, 'tcx> { // BB #8; // } // BB #9; - self.loop_header(beg.clone(), end, BasicBlock::new(7), BasicBlock::new(9), true); + self.loop_header(Lvalue::Local(beg), Lvalue::Local(end), + BasicBlock::new(7), BasicBlock::new(9), true); // BB #7 (cleanup) // `drop(ret[beg])`; self.block(vec![], TerminatorKind::Drop { - location: ret.index(Operand::Consume(beg.clone())), + location: ret.index(beg), target: BasicBlock::new(8), unwind: None, }, true); @@ -578,10 +589,10 @@ impl<'a, 'tcx> CloneShimBuilder<'a, 'tcx> { // `goto #6;` let statement = self.make_statement( StatementKind::Assign( - beg.clone(), + Lvalue::Local(beg), Rvalue::BinaryOp( BinOp::Add, - Operand::Consume(beg.clone()), + Operand::Consume(Lvalue::Local(beg)), Operand::Constant(self.make_usize(1)) ) ) @@ -770,6 +781,7 @@ fn build_call_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>, ), IndexVec::new(), sig.output(), + None, local_decls, sig.inputs().len(), vec![], @@ -841,6 +853,7 @@ pub fn build_adt_ctor<'a, 'gcx, 'tcx>(infcx: &infer::InferCtxt<'a, 'gcx, 'tcx>, ), IndexVec::new(), sig.output(), + None, local_decls, sig.inputs().len(), vec![], diff --git a/src/librustc_mir/transform/add_validation.rs b/src/librustc_mir/transform/add_validation.rs index 52c2eaa7cb632..8fad538af97ba 100644 --- a/src/librustc_mir/transform/add_validation.rs +++ b/src/librustc_mir/transform/add_validation.rs @@ -18,7 +18,7 @@ use rustc::ty::{self, TyCtxt, RegionKind}; use rustc::hir; use rustc::mir::*; use rustc::mir::transform::{MirPass, MirSource}; -use rustc::middle::region::CodeExtent; +use rustc::middle::region; pub struct AddValidation; @@ -27,7 +27,7 @@ fn lval_context<'a, 'tcx, D>( lval: &Lvalue<'tcx>, local_decls: &D, tcx: TyCtxt<'a, 'tcx, 'tcx> -) -> (Option, hir::Mutability) +) -> (Option, hir::Mutability) where D: HasLocalDecls<'tcx> { use rustc::mir::Lvalue::*; diff --git a/src/librustc_mir/transform/clean_end_regions.rs b/src/librustc_mir/transform/clean_end_regions.rs index f06b88551d11d..55a16b2f39161 100644 --- a/src/librustc_mir/transform/clean_end_regions.rs +++ b/src/librustc_mir/transform/clean_end_regions.rs @@ -21,7 +21,7 @@ use rustc_data_structures::fx::FxHashSet; -use rustc::middle::region::CodeExtent; +use rustc::middle::region; use rustc::mir::transform::{MirPass, MirSource}; use rustc::mir::{BasicBlock, Location, Mir, Rvalue, Statement, StatementKind}; use rustc::mir::visit::{MutVisitor, Visitor, Lookup}; @@ -30,11 +30,11 @@ use rustc::ty::{Ty, RegionKind, TyCtxt}; pub struct CleanEndRegions; struct GatherBorrowedRegions { - seen_regions: FxHashSet, + seen_regions: FxHashSet, } struct DeleteTrivialEndRegions<'a> { - seen_regions: &'a FxHashSet, + seen_regions: &'a FxHashSet, } impl MirPass for CleanEndRegions { @@ -84,8 +84,8 @@ impl<'a, 'tcx> MutVisitor<'tcx> for DeleteTrivialEndRegions<'a> { location: Location) { let mut delete_it = false; - if let StatementKind::EndRegion(ref extent) = statement.kind { - if !self.seen_regions.contains(extent) { + if let StatementKind::EndRegion(ref region_scope) = statement.kind { + if !self.seen_regions.contains(region_scope) { delete_it = true; } } diff --git a/src/librustc_mir/transform/copy_prop.rs b/src/librustc_mir/transform/copy_prop.rs index 59b81f7e77c44..ac8ebd306d321 100644 --- a/src/librustc_mir/transform/copy_prop.rs +++ b/src/librustc_mir/transform/copy_prop.rs @@ -60,6 +60,7 @@ impl MirPass for CopyPropagation { return } } + MirSource::GeneratorDrop(_) => (), } // We only run when the MIR optimization level is > 1. @@ -235,8 +236,7 @@ impl<'tcx> Action<'tcx> { } // Replace all uses of the destination local with the source local. - let src_lvalue = Lvalue::Local(src_local); - def_use_analysis.replace_all_defs_and_uses_with(dest_local, mir, src_lvalue); + def_use_analysis.replace_all_defs_and_uses_with(dest_local, mir, src_local); // Finally, zap the now-useless assignment instruction. debug!(" Deleting assignment"); diff --git a/src/librustc_mir/transform/elaborate_drops.rs b/src/librustc_mir/transform/elaborate_drops.rs index 97391452e596d..417083c4ff801 100644 --- a/src/librustc_mir/transform/elaborate_drops.rs +++ b/src/librustc_mir/transform/elaborate_drops.rs @@ -96,42 +96,42 @@ fn find_dead_unwinds<'a, 'tcx>( MaybeInitializedLvals::new(tcx, mir, &env), |bd, p| &bd.move_data().move_paths[p]); for (bb, bb_data) in mir.basic_blocks().iter_enumerated() { - match bb_data.terminator().kind { + let location = match bb_data.terminator().kind { TerminatorKind::Drop { ref location, unwind: Some(_), .. } | - TerminatorKind::DropAndReplace { ref location, unwind: Some(_), .. } => { - let mut init_data = InitializationData { - live: flow_inits.sets().on_entry_set_for(bb.index()).to_owned(), - dead: IdxSetBuf::new_empty(env.move_data.move_paths.len()), - }; - debug!("find_dead_unwinds @ {:?}: {:?}; init_data={:?}", - bb, bb_data, init_data.live); - for stmt in 0..bb_data.statements.len() { - let loc = Location { block: bb, statement_index: stmt }; - init_data.apply_location(tcx, mir, env, loc); - } + TerminatorKind::DropAndReplace { ref location, unwind: Some(_), .. } => location, + _ => continue, + }; - let path = match env.move_data.rev_lookup.find(location) { - LookupResult::Exact(e) => e, - LookupResult::Parent(..) => { - debug!("find_dead_unwinds: has parent; skipping"); - continue - } - }; + let mut init_data = InitializationData { + live: flow_inits.sets().on_entry_set_for(bb.index()).to_owned(), + dead: IdxSetBuf::new_empty(env.move_data.move_paths.len()), + }; + debug!("find_dead_unwinds @ {:?}: {:?}; init_data={:?}", + bb, bb_data, init_data.live); + for stmt in 0..bb_data.statements.len() { + let loc = Location { block: bb, statement_index: stmt }; + init_data.apply_location(tcx, mir, env, loc); + } + + let path = match env.move_data.rev_lookup.find(location) { + LookupResult::Exact(e) => e, + LookupResult::Parent(..) => { + debug!("find_dead_unwinds: has parent; skipping"); + continue + } + }; - debug!("find_dead_unwinds @ {:?}: path({:?})={:?}", bb, location, path); + debug!("find_dead_unwinds @ {:?}: path({:?})={:?}", bb, location, path); - let mut maybe_live = false; - on_all_drop_children_bits(tcx, mir, &env, path, |child| { - let (child_maybe_live, _) = init_data.state(child); - maybe_live |= child_maybe_live; - }); + let mut maybe_live = false; + on_all_drop_children_bits(tcx, mir, &env, path, |child| { + let (child_maybe_live, _) = init_data.state(child); + maybe_live |= child_maybe_live; + }); - debug!("find_dead_unwinds @ {:?}: maybe_live={}", bb, maybe_live); - if !maybe_live { - dead_unwinds.add(&bb); - } - } - _ => {} + debug!("find_dead_unwinds @ {:?}: maybe_live={}", bb, maybe_live); + if !maybe_live { + dead_unwinds.add(&bb); } } @@ -314,7 +314,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { let patch = &mut self.patch; debug!("create_drop_flag({:?})", self.mir.span); self.drop_flags.entry(index).or_insert_with(|| { - patch.new_temp(tcx.types.bool, span) + patch.new_internal(tcx.types.bool, span) }); } diff --git a/src/librustc_mir/transform/generator.rs b/src/librustc_mir/transform/generator.rs new file mode 100644 index 0000000000000..d1e0465f5551c --- /dev/null +++ b/src/librustc_mir/transform/generator.rs @@ -0,0 +1,798 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! This is the implementation of the pass which transforms generators into state machines. +//! +//! MIR generation for generators creates a function which has a self argument which +//! passes by value. This argument is effectively a generator type which only contains upvars and +//! is only used for this argument inside the MIR for the generator. +//! It is passed by value to enable upvars to be moved out of it. Drop elaboration runs on that +//! MIR before this pass and creates drop flags for MIR locals. +//! It will also drop the generator argument (which only consists of upvars) if any of the upvars +//! are moved out of. This pass elaborates the drops of upvars / generator argument in the case +//! that none of the upvars were moved out of. This is because we cannot have any drops of this +//! generator in the MIR, since it is used to create the drop glue for the generator. We'd get +//! infinite recursion otherwise. +//! +//! This pass creates the implementation for the Generator::resume function and the drop shim +//! for the generator based on the MIR input. It converts the generator argument from Self to +//! &mut Self adding derefs in the MIR as needed. It computes the final layout of the generator +//! struct which looks like this: +//! First upvars are stored +//! It is followed by the generator state field. +//! Then finally the MIR locals which are live across a suspension point are stored. +//! +//! struct Generator { +//! upvars..., +//! state: u32, +//! mir_locals..., +//! } +//! +//! This pass computes the meaning of the state field and the MIR locals which are live +//! across a suspension point. There are however two hardcoded generator states: +//! 0 - Generator have not been resumed yet +//! 1 - Generator has been poisoned +//! +//! It also rewrites `return x` and `yield y` as setting a new generator state and returning +//! GeneratorState::Complete(x) and GeneratorState::Yielded(y) respectively. +//! MIR locals which are live across a suspension point are moved to the generator struct +//! with references to them being updated with references to the generator struct. +//! +//! The pass creates two functions which have a switch on the generator state giving +//! the action to take. +//! +//! One of them is the implementation of Generator::resume. +//! For generators which have already returned it panics. +//! For generators with state 0 (unresumed) it starts the execution of the generator. +//! For generators with state 1 (poisoned) it panics. +//! Otherwise it continues the execution from the last suspension point. +//! +//! The other function is the drop glue for the generator. +//! For generators which have already returned it does nothing. +//! For generators with state 0 (unresumed) it drops the upvars of the generator. +//! For generators with state 1 (poisoned) it does nothing. +//! Otherwise it drops all the values in scope at the last suspension point. + +use rustc::hir; +use rustc::hir::def_id::DefId; +use rustc::middle::const_val::ConstVal; +use rustc::mir::*; +use rustc::mir::transform::{MirPass, MirSource}; +use rustc::mir::visit::{LvalueContext, MutVisitor}; +use rustc::ty::{self, TyCtxt, AdtDef, Ty, GeneratorInterior}; +use rustc::ty::subst::{Kind, Substs}; +use util::dump_mir; +use util::liveness; +use rustc_const_math::ConstInt; +use rustc_data_structures::indexed_vec::Idx; +use std::collections::HashMap; +use std::borrow::Cow; +use std::iter::once; +use std::mem; +use transform::simplify; +use transform::no_landing_pads::no_landing_pads; + +pub struct StateTransform; + +struct RenameLocalVisitor { + from: Local, + to: Local, +} + +impl<'tcx> MutVisitor<'tcx> for RenameLocalVisitor { + fn visit_local(&mut self, + local: &mut Local, + _: LvalueContext<'tcx>, + _: Location) { + if *local == self.from { + *local = self.to; + } + } +} + +struct DerefArgVisitor; + +impl<'tcx> MutVisitor<'tcx> for DerefArgVisitor { + fn visit_local(&mut self, + local: &mut Local, + _: LvalueContext<'tcx>, + _: Location) { + assert_ne!(*local, self_arg()); + } + + fn visit_lvalue(&mut self, + lvalue: &mut Lvalue<'tcx>, + context: LvalueContext<'tcx>, + location: Location) { + if *lvalue == Lvalue::Local(self_arg()) { + *lvalue = Lvalue::Projection(Box::new(Projection { + base: lvalue.clone(), + elem: ProjectionElem::Deref, + })); + } else { + self.super_lvalue(lvalue, context, location); + } + } +} + +fn self_arg() -> Local { + Local::new(1) +} + +struct TransformVisitor<'a, 'tcx: 'a> { + tcx: TyCtxt<'a, 'tcx, 'tcx>, + state_adt_ref: &'tcx AdtDef, + state_substs: &'tcx Substs<'tcx>, + + // The index of the generator state in the generator struct + state_field: usize, + + // Mapping from Local to (type of local, generator struct index) + remap: HashMap, usize)>, + + // The number of generator states. 0 is unresumed, 1 is poisoned. So this is initialized to 2 + bb_target_count: u32, + + // Map from a (which block to resume execution at, which block to use to drop the generator) + // to a generator state + bb_targets: HashMap<(BasicBlock, Option), u32>, + + // The original RETURN_POINTER local + new_ret_local: Local, + + // The block to resume execution when for Return + return_block: BasicBlock, +} + +impl<'a, 'tcx> TransformVisitor<'a, 'tcx> { + // Make a GeneratorState rvalue + fn make_state(&self, idx: usize, val: Operand<'tcx>) -> Rvalue<'tcx> { + let adt = AggregateKind::Adt(self.state_adt_ref, idx, self.state_substs, None); + Rvalue::Aggregate(box adt, vec![val]) + } + + // Create a Lvalue referencing a generator struct field + fn make_field(&self, idx: usize, ty: Ty<'tcx>) -> Lvalue<'tcx> { + let base = Lvalue::Local(self_arg()); + let field = Projection { + base: base, + elem: ProjectionElem::Field(Field::new(idx), ty), + }; + Lvalue::Projection(Box::new(field)) + } + + // Create a statement which changes the generator state + fn set_state(&self, state_disc: u32, source_info: SourceInfo) -> Statement<'tcx> { + let state = self.make_field(self.state_field, self.tcx.types.u32); + let val = Operand::Constant(box Constant { + span: source_info.span, + ty: self.tcx.types.u32, + literal: Literal::Value { + value: ConstVal::Integral(ConstInt::U32(state_disc)), + }, + }); + Statement { + source_info, + kind: StatementKind::Assign(state, Rvalue::Use(val)), + } + } +} + +impl<'a, 'tcx> MutVisitor<'tcx> for TransformVisitor<'a, 'tcx> { + fn visit_local(&mut self, + local: &mut Local, + _: LvalueContext<'tcx>, + _: Location) { + assert_eq!(self.remap.get(local), None); + } + + fn visit_lvalue(&mut self, + lvalue: &mut Lvalue<'tcx>, + context: LvalueContext<'tcx>, + location: Location) { + if let Lvalue::Local(l) = *lvalue { + // Replace an Local in the remap with a generator struct access + if let Some(&(ty, idx)) = self.remap.get(&l) { + *lvalue = self.make_field(idx, ty); + } + } else { + self.super_lvalue(lvalue, context, location); + } + } + + fn visit_basic_block_data(&mut self, + block: BasicBlock, + data: &mut BasicBlockData<'tcx>) { + // Remove StorageLive and StorageDead statements for remapped locals + data.retain_statements(|s| { + match s.kind { + StatementKind::StorageLive(l) | StatementKind::StorageDead(l) => { + !self.remap.contains_key(&l) + } + _ => true + } + }); + + let ret_val = match data.terminator().kind { + TerminatorKind::Return => Some((1, + self.return_block, + Operand::Consume(Lvalue::Local(self.new_ret_local)), + None)), + TerminatorKind::Yield { ref value, resume, drop } => Some((0, + resume, + value.clone(), + drop)), + _ => None + }; + + if let Some((state_idx, resume, v, drop)) = ret_val { + let bb_idx = { + let bb_targets = &mut self.bb_targets; + let bb_target = &mut self.bb_target_count; + *bb_targets.entry((resume, drop)).or_insert_with(|| { + let target = *bb_target; + *bb_target = target.checked_add(1).unwrap(); + target + }) + }; + let source_info = data.terminator().source_info; + data.statements.push(self.set_state(bb_idx, source_info)); + data.statements.push(Statement { + source_info, + kind: StatementKind::Assign(Lvalue::Local(RETURN_POINTER), + self.make_state(state_idx, v)), + }); + data.terminator.as_mut().unwrap().kind = TerminatorKind::Return; + } + + self.super_basic_block_data(block, data); + } +} + +fn make_generator_state_argument_indirect<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + def_id: DefId, + mir: &mut Mir<'tcx>) { + let gen_ty = mir.local_decls.raw[1].ty; + + let region = ty::ReFree(ty::FreeRegion { + scope: def_id, + bound_region: ty::BoundRegion::BrEnv, + }); + + let region = tcx.mk_region(region); + + let ref_gen_ty = tcx.mk_ref(region, ty::TypeAndMut { + ty: gen_ty, + mutbl: hir::MutMutable + }); + + // Replace the by value generator argument + mir.local_decls.raw[1].ty = ref_gen_ty; + + // Add a deref to accesses of the generator state + DerefArgVisitor.visit_mir(mir); +} + +fn replace_result_variable<'tcx>(ret_ty: Ty<'tcx>, + mir: &mut Mir<'tcx>) -> Local { + let source_info = SourceInfo { + span: mir.span, + scope: ARGUMENT_VISIBILITY_SCOPE, + }; + + let new_ret = LocalDecl { + mutability: Mutability::Mut, + ty: ret_ty, + name: None, + source_info, + internal: false, + is_user_variable: false, + }; + let new_ret_local = Local::new(mir.local_decls.len()); + mir.local_decls.push(new_ret); + mir.local_decls.swap(0, new_ret_local.index()); + + RenameLocalVisitor { + from: RETURN_POINTER, + to: new_ret_local, + }.visit_mir(mir); + + new_ret_local +} + +fn locals_live_across_suspend_points<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + mir: &Mir<'tcx>, + source: MirSource) -> liveness::LocalSet { + let mut set = liveness::LocalSet::new_empty(mir.local_decls.len()); + let result = liveness::liveness_of_locals(mir); + liveness::dump_mir(tcx, "generator_liveness", source, mir, &result); + + for (block, data) in mir.basic_blocks().iter_enumerated() { + if let TerminatorKind::Yield { .. } = data.terminator().kind { + set.union(&result.outs[block]); + } + } + + // The generator argument is ignored + set.remove(&self_arg()); + + set +} + +fn compute_layout<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + source: MirSource, + interior: GeneratorInterior<'tcx>, + mir: &mut Mir<'tcx>) + -> (HashMap, usize)>, GeneratorLayout<'tcx>) +{ + // Use a liveness analysis to compute locals which are live across a suspension point + let live_locals = locals_live_across_suspend_points(tcx, mir, source); + + // Erase regions from the types passed in from typeck so we can compare them with + // MIR types + let allowed = tcx.erase_regions(&interior.as_slice()); + + for (local, decl) in mir.local_decls.iter_enumerated() { + // Ignore locals which are internal or not live + if !live_locals.contains(&local) || decl.internal { + continue; + } + + // Sanity check that typeck knows about the type of locals which are + // live across a suspension point + if !allowed.contains(&decl.ty) { + span_bug!(mir.span, + "Broken MIR: generator contains type {} in MIR, \ + but typeck only knows about {}", + decl.ty, + interior); + } + } + + let upvar_len = mir.upvar_decls.len(); + let dummy_local = LocalDecl::new_internal(tcx.mk_nil(), mir.span); + + // Gather live locals and their indices replacing values in mir.local_decls with a dummy + // to avoid changing local indices + let live_decls = live_locals.iter().map(|local| { + let var = mem::replace(&mut mir.local_decls[local], dummy_local.clone()); + (local, var) + }); + + // Create a map from local indices to generator struct indices. + // These are offset by (upvar_len + 1) because of fields which comes before locals. + // We also create a vector of the LocalDecls of these locals. + let (remap, vars) = live_decls.enumerate().map(|(idx, (local, var))| { + ((local, (var.ty, upvar_len + 1 + idx)), var) + }).unzip(); + + let layout = GeneratorLayout { + fields: vars + }; + + (remap, layout) +} + +fn insert_entry_point<'tcx>(mir: &mut Mir<'tcx>, + block: BasicBlockData<'tcx>) { + mir.basic_blocks_mut().raw.insert(0, block); + + let blocks = mir.basic_blocks_mut().iter_mut(); + + for target in blocks.flat_map(|b| b.terminator_mut().successors_mut()) { + *target = BasicBlock::new(target.index() + 1); + } +} + +fn elaborate_generator_drops<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + def_id: DefId, + mir: &mut Mir<'tcx>) { + use util::elaborate_drops::{elaborate_drop, Unwind}; + use util::patch::MirPatch; + use shim::DropShimElaborator; + + // Note that `elaborate_drops` only drops the upvars of a generator, and + // this is ok because `open_drop` can only be reached within that own + // generator's resume function. + + let param_env = tcx.param_env(def_id); + let gen = self_arg(); + + for block in mir.basic_blocks().indices() { + let (target, unwind, source_info) = match mir.basic_blocks()[block].terminator() { + &Terminator { + source_info, + kind: TerminatorKind::Drop { + location: Lvalue::Local(local), + target, + unwind + } + } if local == gen => (target, unwind, source_info), + _ => continue, + }; + let unwind = if let Some(unwind) = unwind { + Unwind::To(unwind) + } else { + Unwind::InCleanup + }; + let patch = { + let mut elaborator = DropShimElaborator { + mir: &mir, + patch: MirPatch::new(mir), + tcx, + param_env + }; + elaborate_drop( + &mut elaborator, + source_info, + &Lvalue::Local(gen), + (), + target, + unwind, + block + ); + elaborator.patch + }; + patch.apply(mir); + } +} + +fn create_generator_drop_shim<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + transform: &TransformVisitor<'a, 'tcx>, + def_id: DefId, + source: MirSource, + gen_ty: Ty<'tcx>, + mir: &Mir<'tcx>, + drop_clean: BasicBlock) -> Mir<'tcx> { + let mut mir = mir.clone(); + + let source_info = SourceInfo { + span: mir.span, + scope: ARGUMENT_VISIBILITY_SCOPE, + }; + + let return_block = BasicBlock::new(mir.basic_blocks().len()); + mir.basic_blocks_mut().push(BasicBlockData { + statements: Vec::new(), + terminator: Some(Terminator { + source_info, + kind: TerminatorKind::Return, + }), + is_cleanup: false, + }); + + let mut cases: Vec<_> = transform.bb_targets.iter().filter_map(|(&(_, u), &s)| { + u.map(|d| (s, d)) + }).collect(); + + cases.insert(0, (0, drop_clean)); + + // The poisoned state 1 falls through to the default case which is just to return + + let switch = TerminatorKind::SwitchInt { + discr: Operand::Consume(transform.make_field(transform.state_field, tcx.types.u32)), + switch_ty: tcx.types.u32, + values: Cow::from(cases.iter().map(|&(i, _)| { + ConstInt::U32(i) + }).collect::>()), + targets: cases.iter().map(|&(_, d)| d).chain(once(return_block)).collect(), + }; + + insert_entry_point(&mut mir, BasicBlockData { + statements: Vec::new(), + terminator: Some(Terminator { + source_info, + kind: switch, + }), + is_cleanup: false, + }); + + for block in mir.basic_blocks_mut() { + let kind = &mut block.terminator_mut().kind; + if let TerminatorKind::GeneratorDrop = *kind { + *kind = TerminatorKind::Return; + } + } + + // Replace the return variable + let source_info = SourceInfo { + span: mir.span, + scope: ARGUMENT_VISIBILITY_SCOPE, + }; + + mir.return_ty = tcx.mk_nil(); + mir.local_decls[RETURN_POINTER] = LocalDecl { + mutability: Mutability::Mut, + ty: tcx.mk_nil(), + name: None, + source_info, + internal: false, + is_user_variable: false, + }; + + make_generator_state_argument_indirect(tcx, def_id, &mut mir); + + // Change the generator argument from &mut to *mut + mir.local_decls[self_arg()] = LocalDecl { + mutability: Mutability::Mut, + ty: tcx.mk_ptr(ty::TypeAndMut { + ty: gen_ty, + mutbl: hir::Mutability::MutMutable, + }), + name: None, + source_info, + internal: false, + is_user_variable: false, + }; + + no_landing_pads(tcx, &mut mir); + + // Make sure we remove dead blocks to remove + // unrelated code from the resume part of the function + simplify::remove_dead_blocks(&mut mir); + + dump_mir(tcx, None, "generator_drop", &0, source, &mut mir); + + mir +} + +fn insert_panic_on_resume_after_return<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + mir: &mut Mir<'tcx>) { + let assert_block = BasicBlock::new(mir.basic_blocks().len()); + let term = TerminatorKind::Assert { + cond: Operand::Constant(box Constant { + span: mir.span, + ty: tcx.types.bool, + literal: Literal::Value { + value: ConstVal::Bool(false), + }, + }), + expected: true, + msg: AssertMessage::GeneratorResumedAfterReturn, + target: assert_block, + cleanup: None, + }; + + let source_info = SourceInfo { + span: mir.span, + scope: ARGUMENT_VISIBILITY_SCOPE, + }; + + mir.basic_blocks_mut().push(BasicBlockData { + statements: Vec::new(), + terminator: Some(Terminator { + source_info, + kind: term, + }), + is_cleanup: false, + }); +} + +fn create_generator_resume_function<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + mut transform: TransformVisitor<'a, 'tcx>, + def_id: DefId, + source: MirSource, + mir: &mut Mir<'tcx>) { + // Poison the generator when it unwinds + for block in mir.basic_blocks_mut() { + let source_info = block.terminator().source_info; + if let &TerminatorKind::Resume = &block.terminator().kind { + block.statements.push(transform.set_state(1, source_info)); + } + } + + let source_info = SourceInfo { + span: mir.span, + scope: ARGUMENT_VISIBILITY_SCOPE, + }; + + let poisoned_block = BasicBlock::new(mir.basic_blocks().len()); + + let term = TerminatorKind::Assert { + cond: Operand::Constant(box Constant { + span: mir.span, + ty: tcx.types.bool, + literal: Literal::Value { + value: ConstVal::Bool(false), + }, + }), + expected: true, + msg: AssertMessage::GeneratorResumedAfterPanic, + target: transform.return_block, + cleanup: None, + }; + + mir.basic_blocks_mut().push(BasicBlockData { + statements: Vec::new(), + terminator: Some(Terminator { + source_info, + kind: term, + }), + is_cleanup: false, + }); + + transform.bb_targets.insert((poisoned_block, None), 1); + + let switch = TerminatorKind::SwitchInt { + discr: Operand::Consume(transform.make_field(transform.state_field, tcx.types.u32)), + switch_ty: tcx.types.u32, + values: Cow::from(transform.bb_targets.values().map(|&i| { + ConstInt::U32(i) + }).collect::>()), + targets: transform.bb_targets.keys() + .map(|&(k, _)| k) + .chain(once(transform.return_block)) + .collect(), + }; + + insert_entry_point(mir, BasicBlockData { + statements: Vec::new(), + terminator: Some(Terminator { + source_info, + kind: switch, + }), + is_cleanup: false, + }); + + make_generator_state_argument_indirect(tcx, def_id, mir); + + no_landing_pads(tcx, mir); + + // Make sure we remove dead blocks to remove + // unrelated code from the drop part of the function + simplify::remove_dead_blocks(mir); + + dump_mir(tcx, None, "generator_resume", &0, source, mir); +} + +fn insert_clean_drop<'a, 'tcx>(mir: &mut Mir<'tcx>) -> BasicBlock { + let source_info = SourceInfo { + span: mir.span, + scope: ARGUMENT_VISIBILITY_SCOPE, + }; + + let return_block = BasicBlock::new(mir.basic_blocks().len()); + mir.basic_blocks_mut().push(BasicBlockData { + statements: Vec::new(), + terminator: Some(Terminator { + source_info, + kind: TerminatorKind::Return, + }), + is_cleanup: false, + }); + + // Create a block to destroy an unresumed generators. This can only destroy upvars. + let drop_clean = BasicBlock::new(mir.basic_blocks().len()); + let term = TerminatorKind::Drop { + location: Lvalue::Local(self_arg()), + target: return_block, + unwind: None, + }; + mir.basic_blocks_mut().push(BasicBlockData { + statements: Vec::new(), + terminator: Some(Terminator { + source_info, + kind: term, + }), + is_cleanup: false, + }); + + drop_clean +} + +impl MirPass for StateTransform { + fn run_pass<'a, 'tcx>(&self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + source: MirSource, + mir: &mut Mir<'tcx>) { + let yield_ty = if let Some(yield_ty) = mir.yield_ty { + yield_ty + } else { + // This only applies to generators + return + }; + + assert!(mir.generator_drop.is_none()); + + let node_id = source.item_id(); + let def_id = tcx.hir.local_def_id(source.item_id()); + let hir_id = tcx.hir.node_to_hir_id(node_id); + + // Get the interior types which typeck computed + let interior = *tcx.typeck_tables_of(def_id).generator_interiors().get(hir_id).unwrap(); + + // The first argument is the generator type passed by value + let gen_ty = mir.local_decls.raw[1].ty; + + // Compute GeneratorState + let state_did = tcx.lang_items.gen_state().unwrap(); + let state_adt_ref = tcx.adt_def(state_did); + let state_substs = tcx.mk_substs([Kind::from(yield_ty), + Kind::from(mir.return_ty)].iter()); + let ret_ty = tcx.mk_adt(state_adt_ref, state_substs); + + // We rename RETURN_POINTER which has type mir.return_ty to new_ret_local + // RETURN_POINTER then is a fresh unused local with type ret_ty. + let new_ret_local = replace_result_variable(ret_ty, mir); + + // Extract locals which are live across suspension point into `layout` + // `remap` gives a mapping from local indices onto generator struct indices + let (remap, layout) = compute_layout(tcx, source, interior, mir); + + let state_field = mir.upvar_decls.len(); + + let mut bb_targets = HashMap::new(); + + // If we jump to the entry point, we should go to the initial 0 generator state. + // FIXME: Could this result in the need for destruction for state 0? + bb_targets.insert((BasicBlock::new(0), None), 0); + + // Run the transformation which converts Lvalues from Local to generator struct + // accesses for locals in `remap`. + // It also rewrites `return x` and `yield y` as writing a new generator state and returning + // GeneratorState::Complete(x) and GeneratorState::Yielded(y) respectively. + let mut transform = TransformVisitor { + tcx, + state_adt_ref, + state_substs, + remap, + bb_target_count: 2, + bb_targets, + new_ret_local, + state_field, + + // For returns we will resume execution at the next added basic block. + // This happens in `insert_panic_on_resume_after_return` + return_block: BasicBlock::new(mir.basic_blocks().len()), + }; + transform.visit_mir(mir); + + // Update our MIR struct to reflect the changed we've made + mir.return_ty = ret_ty; + mir.yield_ty = None; + mir.arg_count = 1; + mir.spread_arg = None; + mir.generator_layout = Some(layout); + + // Panic if we resumed after returning + insert_panic_on_resume_after_return(tcx, mir); + + // Insert `drop(generator_struct)` which is used to drop upvars for generators in + // the unresumed (0) state. + // This is expanded to a drop ladder in `elaborate_generator_drops`. + let drop_clean = insert_clean_drop(mir); + + dump_mir(tcx, None, "generator_pre-elab", &0, source, mir); + + // Expand `drop(generator_struct)` to a drop ladder which destroys upvars. + // If any upvars are moved out of, drop elaboration will handle upvar destruction. + // However we need to also elaborate the code generated by `insert_clean_drop`. + elaborate_generator_drops(tcx, def_id, mir); + + dump_mir(tcx, None, "generator_post-transform", &0, source, mir); + + // Create a copy of our MIR and use it to create the drop shim for the generator + let drop_shim = create_generator_drop_shim(tcx, + &transform, + def_id, + source, + gen_ty, + &mir, + drop_clean); + + mir.generator_drop = Some(box drop_shim); + + // Create the Generator::resume function + create_generator_resume_function(tcx, transform, def_id, source, mir); + } +} diff --git a/src/librustc_mir/transform/inline.rs b/src/librustc_mir/transform/inline.rs index 53b46dd2683fc..3f8070fb3aa31 100644 --- a/src/librustc_mir/transform/inline.rs +++ b/src/librustc_mir/transform/inline.rs @@ -180,6 +180,10 @@ impl<'a, 'tcx> Inliner<'a, 'tcx> { return false; } + // Cannot inline generators which haven't been transformed yet + if callee_mir.yield_ty.is_some() { + return false; + } let attrs = tcx.get_attrs(callsite.callee); let hint = attr::find_inline_attr(None, &attrs[..]); @@ -585,16 +589,6 @@ impl<'a, 'tcx> Integrator<'a, 'tcx> { new } - fn update_local(&self, local: Local) -> Option { - let idx = local.index(); - if idx < (self.args.len() + 1) { - return None; - } - let idx = idx - (self.args.len() + 1); - let local = Local::new(idx); - self.local_map.get(local).cloned() - } - fn arg_index(&self, arg: Local) -> Option { let idx = arg.index(); if idx > 0 && idx <= self.args.len() { @@ -606,32 +600,35 @@ impl<'a, 'tcx> Integrator<'a, 'tcx> { } impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> { + fn visit_local(&mut self, + local: &mut Local, + _ctxt: LvalueContext<'tcx>, + _location: Location) { + if *local == RETURN_POINTER { + match self.destination { + Lvalue::Local(l) => *local = l, + ref lval => bug!("Return lvalue is {:?}, not local", lval) + } + } + let idx = local.index() - 1; + if idx < self.args.len() { + match self.args[idx] { + Operand::Consume(Lvalue::Local(l)) => *local = l, + ref op => bug!("Arg operand `{:?}` is {:?}, not local", idx, op) + } + } + *local = self.local_map[Local::new(idx - self.args.len())]; + } + fn visit_lvalue(&mut self, lvalue: &mut Lvalue<'tcx>, _ctxt: LvalueContext<'tcx>, _location: Location) { - if let Lvalue::Local(ref mut local) = *lvalue { - if let Some(l) = self.update_local(*local) { - // Temp or Var; update the local reference - *local = l; - return; - } - } - if let Lvalue::Local(local) = *lvalue { - if local == RETURN_POINTER { - // Return pointer; update the lvalue itself - *lvalue = self.destination.clone(); - } else if local.index() < (self.args.len() + 1) { - // Argument, once again update the the lvalue itself - let idx = local.index() - 1; - if let Operand::Consume(ref lval) = self.args[idx] { - *lvalue = lval.clone(); - } else { - bug!("Arg operand `{:?}` is not an Lvalue use.", idx) - } - } + if let Lvalue::Local(RETURN_POINTER) = *lvalue { + // Return pointer; update the lvalue itself + *lvalue = self.destination.clone(); } else { - self.super_lvalue(lvalue, _ctxt, _location) + self.super_lvalue(lvalue, _ctxt, _location); } } @@ -657,6 +654,8 @@ impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> { self.super_terminator_kind(block, kind, loc); match *kind { + TerminatorKind::GeneratorDrop | + TerminatorKind::Yield { .. } => bug!(), TerminatorKind::Goto { ref mut target} => { *target = self.update_target(*target); } diff --git a/src/librustc_mir/transform/mod.rs b/src/librustc_mir/transform/mod.rs index d8dffa036621a..e0f2a40ab0732 100644 --- a/src/librustc_mir/transform/mod.rs +++ b/src/librustc_mir/transform/mod.rs @@ -40,6 +40,7 @@ pub mod dump_mir; pub mod deaggregator; pub mod instcombine; pub mod copy_prop; +pub mod generator; pub mod inline; pub mod nll; diff --git a/src/librustc_mir/transform/no_landing_pads.rs b/src/librustc_mir/transform/no_landing_pads.rs index 8595663ba18c4..fa6bb644871dc 100644 --- a/src/librustc_mir/transform/no_landing_pads.rs +++ b/src/librustc_mir/transform/no_landing_pads.rs @@ -43,6 +43,8 @@ impl<'tcx> MutVisitor<'tcx> for NoLandingPads { TerminatorKind::Resume | TerminatorKind::Return | TerminatorKind::Unreachable | + TerminatorKind::GeneratorDrop | + TerminatorKind::Yield { .. } | TerminatorKind::SwitchInt { .. } => { /* nothing to do */ }, diff --git a/src/librustc_mir/transform/promote_consts.rs b/src/librustc_mir/transform/promote_consts.rs index 1665cb2f15eb8..ca6eda5c2d716 100644 --- a/src/librustc_mir/transform/promote_consts.rs +++ b/src/librustc_mir/transform/promote_consts.rs @@ -83,52 +83,49 @@ struct TempCollector<'tcx> { } impl<'tcx> Visitor<'tcx> for TempCollector<'tcx> { - fn visit_lvalue(&mut self, - lvalue: &Lvalue<'tcx>, - context: LvalueContext<'tcx>, - location: Location) { - self.super_lvalue(lvalue, context, location); - if let Lvalue::Local(index) = *lvalue { - // We're only interested in temporaries - if self.mir.local_kind(index) != LocalKind::Temp { - return; - } + fn visit_local(&mut self, + &index: &Local, + context: LvalueContext<'tcx>, + location: Location) { + // We're only interested in temporaries + if self.mir.local_kind(index) != LocalKind::Temp { + return; + } - // Ignore drops, if the temp gets promoted, - // then it's constant and thus drop is noop. - // Storage live ranges are also irrelevant. - if context.is_drop() || context.is_storage_marker() { - return; - } + // Ignore drops, if the temp gets promoted, + // then it's constant and thus drop is noop. + // Storage live ranges are also irrelevant. + if context.is_drop() || context.is_storage_marker() { + return; + } - let temp = &mut self.temps[index]; - if *temp == TempState::Undefined { - match context { - LvalueContext::Store | - LvalueContext::Call => { - *temp = TempState::Defined { - location, - uses: 0 - }; - return; - } - _ => { /* mark as unpromotable below */ } - } - } else if let TempState::Defined { ref mut uses, .. } = *temp { - // We always allow borrows, even mutable ones, as we need - // to promote mutable borrows of some ZSTs e.g. `&mut []`. - let allowed_use = match context { - LvalueContext::Borrow {..} => true, - _ => context.is_nonmutating_use() - }; - if allowed_use { - *uses += 1; + let temp = &mut self.temps[index]; + if *temp == TempState::Undefined { + match context { + LvalueContext::Store | + LvalueContext::Call => { + *temp = TempState::Defined { + location, + uses: 0 + }; return; } - /* mark as unpromotable below */ + _ => { /* mark as unpromotable below */ } } - *temp = TempState::Unpromotable; + } else if let TempState::Defined { ref mut uses, .. } = *temp { + // We always allow borrows, even mutable ones, as we need + // to promote mutable borrows of some ZSTs e.g. `&mut []`. + let allowed_use = match context { + LvalueContext::Borrow {..} => true, + _ => context.is_nonmutating_use() + }; + if allowed_use { + *uses += 1; + return; + } + /* mark as unpromotable below */ } + *temp = TempState::Unpromotable; } fn visit_source_info(&mut self, source_info: &SourceInfo) { @@ -326,16 +323,13 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { /// Replaces all temporaries with their promoted counterparts. impl<'a, 'tcx> MutVisitor<'tcx> for Promoter<'a, 'tcx> { - fn visit_lvalue(&mut self, - lvalue: &mut Lvalue<'tcx>, - context: LvalueContext<'tcx>, - location: Location) { - if let Lvalue::Local(ref mut temp) = *lvalue { - if self.source.local_kind(*temp) == LocalKind::Temp { - *temp = self.promote_temp(*temp); - } + fn visit_local(&mut self, + local: &mut Local, + _: LvalueContext<'tcx>, + _: Location) { + if self.source.local_kind(*local) == LocalKind::Temp { + *local = self.promote_temp(*local); } - self.super_lvalue(lvalue, context, location); } } @@ -392,6 +386,7 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>, }).into_iter().collect(), IndexVec::new(), ty, + None, initial_locals, 0, vec![], @@ -411,8 +406,8 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>, block.statements.retain(|statement| { match statement.kind { StatementKind::Assign(Lvalue::Local(index), _) | - StatementKind::StorageLive(Lvalue::Local(index)) | - StatementKind::StorageDead(Lvalue::Local(index)) => { + StatementKind::StorageLive(index) | + StatementKind::StorageDead(index) => { !promoted(index) } _ => true diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs index ee99bb7d9d520..415421757c5c5 100644 --- a/src/librustc_mir/transform/qualify_consts.rs +++ b/src/librustc_mir/transform/qualify_consts.rs @@ -15,6 +15,7 @@ //! diagnostics as to why a constant rvalue wasn't promoted. use rustc_data_structures::bitvec::BitVector; +use rustc_data_structures::indexed_set::IdxSetBuf; use rustc_data_structures::indexed_vec::{IndexVec, Idx}; use rustc::hir; use rustc::hir::map as hir_map; @@ -33,6 +34,7 @@ use syntax::feature_gate::UnstableFeatures; use syntax_pos::{Span, DUMMY_SP}; use std::fmt; +use std::rc::Rc; use std::usize; use super::promote_consts::{self, Candidate, TempState}; @@ -120,13 +122,13 @@ struct Qualifier<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { return_qualif: Option, qualif: Qualif, const_fn_arg_vars: BitVector, + local_needs_drop: IndexVec>, temp_promotion_state: IndexVec, promotion_candidates: Vec } impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, def_id: DefId, mir: &'a Mir<'tcx>, mode: Mode) @@ -141,11 +143,12 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { mir, rpo, tcx, - param_env, + param_env: tcx.param_env(def_id), temp_qualif: IndexVec::from_elem(None, &mir.local_decls), return_qualif: None, qualif: Qualif::empty(), const_fn_arg_vars: BitVector::new(mir.local_decls.len()), + local_needs_drop: IndexVec::from_elem(None, &mir.local_decls), temp_promotion_state: temps, promotion_candidates: vec![] } @@ -193,16 +196,26 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { self.add(original); } + /// Check for NEEDS_DROP (from an ADT or const fn call) and + /// error, unless we're in a function. + fn always_deny_drop(&self) { + self.deny_drop_with_feature_gate_override(false); + } + /// Check for NEEDS_DROP (from an ADT or const fn call) and /// error, unless we're in a function, or the feature-gate /// for globals with destructors is enabled. fn deny_drop(&self) { + self.deny_drop_with_feature_gate_override(true); + } + + fn deny_drop_with_feature_gate_override(&self, allow_gate: bool) { if self.mode == Mode::Fn || !self.qualif.intersects(Qualif::NEEDS_DROP) { return; } // Static and const fn's allow destructors, but they're feature-gated. - let msg = if self.mode != Mode::Const { + let msg = if allow_gate && self.mode != Mode::Const { // Feature-gate for globals with destructors is enabled. if self.tcx.sess.features.borrow().drop_types_in_const { return; @@ -223,7 +236,7 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { let mut err = struct_span_err!(self.tcx.sess, self.span, E0493, "{}", msg); - if self.mode != Mode::Const { + if allow_gate && self.mode != Mode::Const { help!(&mut err, "in Nightly builds, add `#![feature(drop_types_in_const)]` \ to the crate attributes to enable"); @@ -231,7 +244,8 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { self.find_drop_implementation_method_span() .map(|span| err.span_label(span, "destructor defined here")); - err.span_label(self.span, "constants cannot have destructors"); + err.span_label(self.span, + format!("{}s cannot have destructors", self.mode)); } err.emit(); @@ -314,6 +328,15 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { return; } + // When initializing a local, record whether the *value* being + // stored in it needs dropping, which it may not, even if its + // type does, e.g. `None::`. + if let Lvalue::Local(local) = *dest { + if qualif.intersects(Qualif::NEEDS_DROP) { + self.local_needs_drop[local] = Some(self.span); + } + } + match *dest { Lvalue::Local(index) if self.mir.local_kind(index) == LocalKind::Temp => { debug!("store to temp {:?}", index); @@ -346,7 +369,7 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { } /// Qualify a whole const, static initializer or const fn. - fn qualify_const(&mut self) -> Qualif { + fn qualify_const(&mut self) -> (Qualif, Rc>) { debug!("qualifying {} {:?}", self.mode, self.def_id); let mir = self.mir; @@ -360,7 +383,6 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { let target = match mir[bb].terminator().kind { TerminatorKind::Goto { target } | - // Drops are considered noops. TerminatorKind::Drop { target, .. } | TerminatorKind::Assert { target, .. } | TerminatorKind::Call { destination: Some((_, target)), .. } => { @@ -369,12 +391,14 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { // Non-terminating calls cannot produce any value. TerminatorKind::Call { destination: None, .. } => { - return Qualif::empty(); + break; } TerminatorKind::SwitchInt {..} | TerminatorKind::DropAndReplace { .. } | TerminatorKind::Resume | + TerminatorKind::GeneratorDrop | + TerminatorKind::Yield { .. } | TerminatorKind::Unreachable => None, TerminatorKind::Return => { @@ -449,7 +473,25 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { } } } - self.qualif + + // Collect all the temps we need to promote. + let mut promoted_temps = IdxSetBuf::new_empty(self.temp_promotion_state.len()); + + for candidate in &self.promotion_candidates { + match *candidate { + Candidate::Ref(Location { block: bb, statement_index: stmt_idx }) => { + match self.mir[bb].statements[stmt_idx].kind { + StatementKind::Assign(_, Rvalue::Ref(_, _, Lvalue::Local(index))) => { + promoted_temps.add(&index); + } + _ => {} + } + } + Candidate::ShuffleIndices(_) => {} + } + } + + (self.qualif, Rc::new(promoted_temps)) } } @@ -457,33 +499,40 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { /// For functions (constant or not), it also records /// candidates for promotion in promotion_candidates. impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { + fn visit_local(&mut self, + &local: &Local, + _: LvalueContext<'tcx>, + _: Location) { + match self.mir.local_kind(local) { + LocalKind::ReturnPointer => { + self.not_const(); + } + LocalKind::Arg => { + self.add(Qualif::FN_ARGUMENT); + } + LocalKind::Var => { + self.add(Qualif::NOT_CONST); + } + LocalKind::Temp => { + if !self.temp_promotion_state[local].is_promotable() { + self.add(Qualif::NOT_PROMOTABLE); + } + + if let Some(qualif) = self.temp_qualif[local] { + self.add(qualif); + } else { + self.not_const(); + } + } + } + } + fn visit_lvalue(&mut self, lvalue: &Lvalue<'tcx>, context: LvalueContext<'tcx>, location: Location) { match *lvalue { - Lvalue::Local(local) => match self.mir.local_kind(local) { - LocalKind::ReturnPointer => { - self.not_const(); - } - LocalKind::Arg => { - self.add(Qualif::FN_ARGUMENT); - } - LocalKind::Var => { - self.add(Qualif::NOT_CONST); - } - LocalKind::Temp => { - if !self.temp_promotion_state[local].is_promotable() { - self.add(Qualif::NOT_PROMOTABLE); - } - - if let Some(qualif) = self.temp_qualif[local] { - self.add(qualif); - } else { - self.not_const(); - } - } - }, + Lvalue::Local(ref local) => self.visit_local(local, context, location), Lvalue::Static(ref global) => { self.add(Qualif::STATIC); @@ -493,6 +542,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { span_err!(self.tcx.sess, self.span, E0625, "thread-local statics cannot be \ accessed at compile-time"); + self.add(Qualif::NOT_CONST); return; } } @@ -558,22 +608,32 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) { match *operand { - Operand::Consume(_) => { + Operand::Consume(ref lvalue) => { self.nest(|this| { this.super_operand(operand, location); this.try_consume(); }); + + // Mark the consumed locals to indicate later drops are noops. + if let Lvalue::Local(local) = *lvalue { + self.local_needs_drop[local] = None; + } } Operand::Constant(ref constant) => { - if let Literal::Item { def_id, substs } = constant.literal { - // Don't peek inside generic (associated) constants. - if substs.types().next().is_some() { + if let Literal::Item { def_id, substs: _ } = constant.literal { + // Don't peek inside trait associated constants. + if self.tcx.trait_of_item(def_id).is_some() { self.add_type(constant.ty); } else { - let bits = self.tcx.at(constant.span).mir_const_qualif(def_id); + let (bits, _) = self.tcx.at(constant.span).mir_const_qualif(def_id); let qualif = Qualif::from_bits(bits).expect("invalid mir_const_qualif"); self.add(qualif); + + // Just in case the type is more specific than + // the definition, e.g. impl associated const + // with type parameters, take it into account. + self.qualif.restrict(constant.ty, self.tcx, self.param_env); } // Let `const fn` transitively have destructors, @@ -669,13 +729,11 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { // We might have a candidate for promotion. let candidate = Candidate::Ref(location); - if self.mode == Mode::Fn || self.mode == Mode::ConstFn { - if !self.qualif.intersects(Qualif::NEVER_PROMOTE) { - // We can only promote direct borrows of temps. - if let Lvalue::Local(local) = *lvalue { - if self.mir.local_kind(local) == LocalKind::Temp { - self.promotion_candidates.push(candidate); - } + if !self.qualif.intersects(Qualif::NEVER_PROMOTE) { + // We can only promote direct borrows of temps. + if let Lvalue::Local(local) = *lvalue { + if self.mir.local_kind(local) == LocalKind::Temp { + self.promotion_candidates.push(candidate); } } } @@ -864,6 +922,30 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { } self.assign(dest, location); } + } else if let TerminatorKind::Drop { location: ref lvalue, .. } = *kind { + self.super_terminator_kind(bb, kind, location); + + // Deny *any* live drops anywhere other than functions. + if self.mode != Mode::Fn { + // HACK(eddyb) Emulate a bit of dataflow analysis, + // conservatively, that drop elaboration will do. + let needs_drop = if let Lvalue::Local(local) = *lvalue { + self.local_needs_drop[local] + } else { + None + }; + + if let Some(span) = needs_drop { + let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx); + self.add_type(ty); + + // Use the original assignment span to be more precise. + let old_span = self.span; + self.span = span; + self.always_deny_drop(); + self.span = old_span; + } + } } else { // Qualify any operands inside other terminators. self.super_terminator_kind(bb, kind, location); @@ -942,7 +1024,7 @@ pub fn provide(providers: &mut Providers) { fn mir_const_qualif<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) - -> u8 { + -> (u8, Rc>) { // NB: This `borrow()` is guaranteed to be valid (i.e., the value // cannot yet be stolen), because `mir_validated()`, which steals // from `mir_const(), forces this query to execute before @@ -950,13 +1032,13 @@ fn mir_const_qualif<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let mir = &tcx.mir_const(def_id).borrow(); if mir.return_ty.references_error() { - return Qualif::NOT_CONST.bits(); + tcx.sess.delay_span_bug(mir.span, "mir_const_qualif: Mir had errors"); + return (Qualif::NOT_CONST.bits(), Rc::new(IdxSetBuf::new_empty(0))); } - let param_env = tcx.param_env(def_id); - - let mut qualifier = Qualifier::new(tcx, param_env, def_id, mir, Mode::Const); - qualifier.qualify_const().bits() + let mut qualifier = Qualifier::new(tcx, def_id, mir, Mode::Const); + let (qualif, promoted_temps) = qualifier.qualify_const(); + (qualif.bits(), promoted_temps) } pub struct QualifyAndPromoteConstants; @@ -966,8 +1048,15 @@ impl MirPass for QualifyAndPromoteConstants { tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &mut Mir<'tcx>) { + // There's not really any point in promoting errorful MIR. + if mir.return_ty.references_error() { + tcx.sess.delay_span_bug(mir.span, "QualifyAndPromoteConstants: Mir had errors"); + return; + } + let id = src.item_id(); let def_id = tcx.hir.local_def_id(id); + let mut const_promoted_temps = None; let mode = match src { MirSource::Fn(_) => { if tcx.is_const_fn(def_id) { @@ -976,19 +1065,21 @@ impl MirPass for QualifyAndPromoteConstants { Mode::Fn } } + MirSource::Const(_) => { + const_promoted_temps = Some(tcx.mir_const_qualif(def_id).1); + Mode::Const + } MirSource::Static(_, hir::MutImmutable) => Mode::Static, MirSource::Static(_, hir::MutMutable) => Mode::StaticMut, - MirSource::Const(_) | + MirSource::GeneratorDrop(_) | MirSource::Promoted(..) => return }; - let param_env = tcx.param_env(def_id); if mode == Mode::Fn || mode == Mode::ConstFn { // This is ugly because Qualifier holds onto mir, // which can't be mutated until its scope ends. let (temps, candidates) = { - let mut qualifier = Qualifier::new(tcx, param_env, - def_id, mir, mode); + let mut qualifier = Qualifier::new(tcx, def_id, mir, mode); if mode == Mode::ConstFn { // Enforce a constant-like CFG for `const fn`. qualifier.qualify_const(); @@ -1004,8 +1095,37 @@ impl MirPass for QualifyAndPromoteConstants { // Do the actual promotion, now that we know what's viable. promote_consts::promote_candidates(mir, tcx, temps, candidates); } else { - let mut qualifier = Qualifier::new(tcx, param_env, def_id, mir, mode); - qualifier.qualify_const(); + let promoted_temps = if mode == Mode::Const { + // Already computed by `mir_const_qualif`. + const_promoted_temps.unwrap() + } else { + Qualifier::new(tcx, def_id, mir, mode).qualify_const().1 + }; + + // In `const` and `static` everything without `StorageDead` + // is `'static`, we don't have to create promoted MIR fragments, + // just remove `Drop` and `StorageDead` on "promoted" locals. + for block in mir.basic_blocks_mut() { + block.statements.retain(|statement| { + match statement.kind { + StatementKind::StorageDead(index) => { + !promoted_temps.contains(&index) + } + _ => true + } + }); + let terminator = block.terminator_mut(); + match terminator.kind { + TerminatorKind::Drop { location: Lvalue::Local(index), target, .. } => { + if promoted_temps.contains(&index) { + terminator.kind = TerminatorKind::Goto { + target, + }; + } + } + _ => {} + } + } } // Statics must be Sync. diff --git a/src/librustc_mir/transform/simplify.rs b/src/librustc_mir/transform/simplify.rs index 070250cda4d5d..89828cf375aa7 100644 --- a/src/librustc_mir/transform/simplify.rs +++ b/src/librustc_mir/transform/simplify.rs @@ -352,15 +352,11 @@ struct DeclMarker { } impl<'tcx> Visitor<'tcx> for DeclMarker { - fn visit_lvalue(&mut self, lval: &Lvalue<'tcx>, ctx: LvalueContext<'tcx>, loc: Location) { - if ctx == LvalueContext::StorageLive || ctx == LvalueContext::StorageDead { - // ignore these altogether, they get removed along with their otherwise unused decls. - return; + fn visit_local(&mut self, local: &Local, ctx: LvalueContext<'tcx>, _: Location) { + // ignore these altogether, they get removed along with their otherwise unused decls. + if ctx != LvalueContext::StorageLive && ctx != LvalueContext::StorageDead { + self.locals.insert(local.index()); } - if let Lvalue::Local(ref v) = *lval { - self.locals.insert(v.index()); - } - self.super_lvalue(lval, ctx, loc); } } @@ -373,22 +369,15 @@ impl<'tcx> MutVisitor<'tcx> for LocalUpdater { // Remove unnecessary StorageLive and StorageDead annotations. data.statements.retain(|stmt| { match stmt.kind { - StatementKind::StorageLive(ref lval) | StatementKind::StorageDead(ref lval) => { - match *lval { - Lvalue::Local(l) => self.map[l.index()] != !0, - _ => true - } + StatementKind::StorageLive(l) | StatementKind::StorageDead(l) => { + self.map[l.index()] != !0 } _ => true } }); self.super_basic_block_data(block, data); } - fn visit_lvalue(&mut self, lval: &mut Lvalue<'tcx>, ctx: LvalueContext<'tcx>, loc: Location) { - match *lval { - Lvalue::Local(ref mut l) => *l = Local::new(self.map[l.index()]), - _ => (), - }; - self.super_lvalue(lval, ctx, loc); + fn visit_local(&mut self, l: &mut Local, _: LvalueContext<'tcx>, _: Location) { + *l = Local::new(self.map[l.index()]); } } diff --git a/src/librustc_mir/transform/type_check.rs b/src/librustc_mir/transform/type_check.rs index 72092042f8d39..d4da14ea96e72 100644 --- a/src/librustc_mir/transform/type_check.rs +++ b/src/librustc_mir/transform/type_check.rs @@ -34,7 +34,10 @@ fn mirbug(tcx: TyCtxt, span: Span, msg: &str) { macro_rules! span_mirbug { ($context:expr, $elem:expr, $($message:tt)*) => ({ mirbug($context.tcx(), $context.last_span, - &format!("broken MIR ({:?}): {}", $elem, format!($($message)*))) + &format!("broken MIR in {:?} ({:?}): {}", + $context.body_id, + $elem, + format_args!($($message)*))) }) } @@ -60,6 +63,7 @@ struct TypeVerifier<'a, 'b: 'a, 'gcx: 'b+'tcx, 'tcx: 'b> { cx: &'a mut TypeChecker<'b, 'gcx, 'tcx>, mir: &'a Mir<'tcx>, last_span: Span, + body_id: ast::NodeId, errors_reported: bool } @@ -108,8 +112,9 @@ impl<'a, 'b, 'gcx, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'gcx, 'tcx> { impl<'a, 'b, 'gcx, 'tcx> TypeVerifier<'a, 'b, 'gcx, 'tcx> { fn new(cx: &'a mut TypeChecker<'b, 'gcx, 'tcx>, mir: &'a Mir<'tcx>) -> Self { TypeVerifier { - cx, mir, + body_id: cx.body_id, + cx, last_span: mir.span, errors_reported: false } @@ -160,7 +165,7 @@ impl<'a, 'b, 'gcx, 'tcx> TypeVerifier<'a, 'b, 'gcx, 'tcx> { base: LvalueTy<'tcx>, pi: &LvalueElem<'tcx>, lvalue: &Lvalue<'tcx>, - location: Location) + _: Location) -> LvalueTy<'tcx> { debug!("sanitize_projection: {:?} {:?} {:?}", base, pi, lvalue); let tcx = self.tcx(); @@ -176,9 +181,8 @@ impl<'a, 'b, 'gcx, 'tcx> TypeVerifier<'a, 'b, 'gcx, 'tcx> { }) } } - ProjectionElem::Index(ref i) => { - self.visit_operand(i, location); - let index_ty = i.ty(self.mir, tcx); + ProjectionElem::Index(i) => { + let index_ty = Lvalue::Local(i).ty(self.mir, tcx).to_ty(tcx); if index_ty != tcx.types.usize { LvalueTy::Ty { ty: span_mirbug_and_err!(self, i, "index by non-usize {:?}", i) @@ -297,6 +301,19 @@ impl<'a, 'b, 'gcx, 'tcx> TypeVerifier<'a, 'b, 'gcx, 'tcx> { }) } } + ty::TyGenerator(def_id, substs, _) => { + // Try upvars first. `field_tys` requires final optimized MIR. + if let Some(ty) = substs.upvar_tys(def_id, tcx).nth(field.index()) { + return Ok(ty); + } + + return match substs.field_tys(def_id, tcx).nth(field.index()) { + Some(ty) => Ok(ty), + None => Err(FieldAccessError::OutOfRange { + field_count: substs.field_tys(def_id, tcx).count() + 1 + }) + } + } ty::TyTuple(tys, _) => { return match tys.get(field.index()) { Some(&ty) => Ok(ty), @@ -403,15 +420,8 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> { variant_index); }; } - StatementKind::StorageLive(ref lv) | - StatementKind::StorageDead(ref lv) => { - match *lv { - Lvalue::Local(_) => {} - _ => { - span_mirbug!(self, stmt, "bad lvalue: expected local"); - } - } - } + StatementKind::StorageLive(_) | + StatementKind::StorageDead(_) | StatementKind::InlineAsm { .. } | StatementKind::EndRegion(_) | StatementKind::Validate(..) | @@ -428,6 +438,7 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> { TerminatorKind::Goto { .. } | TerminatorKind::Resume | TerminatorKind::Return | + TerminatorKind::GeneratorDrop | TerminatorKind::Unreachable | TerminatorKind::Drop { .. } => { // no checks needed for these @@ -494,6 +505,22 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> { } } } + TerminatorKind::Yield { ref value, .. } => { + let value_ty = value.ty(mir, tcx); + match mir.yield_ty { + None => span_mirbug!(self, term, "yield in non-generator"), + Some(ty) => { + if let Err(terr) = self.sub_types(value_ty, ty) { + span_mirbug!(self, + term, + "type of yield value is {:?}, but the yield type is {:?}: {:?}", + value_ty, + ty, + terr); + } + } + } + } } } @@ -620,6 +647,20 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> { span_mirbug!(self, block, "return on cleanup block") } } + TerminatorKind::GeneratorDrop { .. } => { + if is_cleanup { + span_mirbug!(self, block, "generator_drop in cleanup block") + } + } + TerminatorKind::Yield { resume, drop, .. } => { + if is_cleanup { + span_mirbug!(self, block, "yield in cleanup block") + } + self.assert_iscleanup(mir, block, resume, is_cleanup); + if let Some(drop) = drop { + self.assert_iscleanup(mir, block, drop, is_cleanup); + } + } TerminatorKind::Unreachable => {} TerminatorKind::Drop { target, unwind, .. } | TerminatorKind::DropAndReplace { target, unwind, .. } | diff --git a/src/librustc_mir/util/def_use.rs b/src/librustc_mir/util/def_use.rs index 8263e149d84e7..bd9fb4bc3cc5f 100644 --- a/src/librustc_mir/util/def_use.rs +++ b/src/librustc_mir/util/def_use.rs @@ -10,7 +10,7 @@ //! Def-use analysis. -use rustc::mir::{Local, Location, Lvalue, Mir}; +use rustc::mir::{Local, Location, Mir}; use rustc::mir::visit::{LvalueContext, MutVisitor, Visitor}; use rustc_data_structures::indexed_vec::IndexVec; use std::marker::PhantomData; @@ -51,7 +51,7 @@ impl<'tcx> DefUseAnalysis<'tcx> { } fn mutate_defs_and_uses(&self, local: Local, mir: &mut Mir<'tcx>, mut callback: F) - where F: for<'a> FnMut(&'a mut Lvalue<'tcx>, + where F: for<'a> FnMut(&'a mut Local, LvalueContext<'tcx>, Location) { for lvalue_use in &self.info[local].defs_and_uses { @@ -65,8 +65,8 @@ impl<'tcx> DefUseAnalysis<'tcx> { pub fn replace_all_defs_and_uses_with(&self, local: Local, mir: &mut Mir<'tcx>, - new_lvalue: Lvalue<'tcx>) { - self.mutate_defs_and_uses(local, mir, |lvalue, _, _| *lvalue = new_lvalue.clone()) + new_local: Local) { + self.mutate_defs_and_uses(local, mir, |local, _, _| *local = new_local) } } @@ -74,30 +74,15 @@ struct DefUseFinder<'tcx> { info: IndexVec>, } -impl<'tcx> DefUseFinder<'tcx> { - fn lvalue_mut_info(&mut self, lvalue: &Lvalue<'tcx>) -> Option<&mut Info<'tcx>> { - let info = &mut self.info; - - if let Lvalue::Local(local) = *lvalue { - Some(&mut info[local]) - } else { - None - } - } -} - impl<'tcx> Visitor<'tcx> for DefUseFinder<'tcx> { - fn visit_lvalue(&mut self, - lvalue: &Lvalue<'tcx>, - context: LvalueContext<'tcx>, - location: Location) { - if let Some(ref mut info) = self.lvalue_mut_info(lvalue) { - info.defs_and_uses.push(Use { - context, - location, - }) - } - self.super_lvalue(lvalue, context, location) + fn visit_local(&mut self, + &local: &Local, + context: LvalueContext<'tcx>, + location: Location) { + self.info[local].defs_and_uses.push(Use { + context, + location, + }); } } @@ -134,7 +119,7 @@ struct MutateUseVisitor<'tcx, F> { impl<'tcx, F> MutateUseVisitor<'tcx, F> { fn new(query: Local, callback: F, _: &Mir<'tcx>) -> MutateUseVisitor<'tcx, F> - where F: for<'a> FnMut(&'a mut Lvalue<'tcx>, LvalueContext<'tcx>, Location) { + where F: for<'a> FnMut(&'a mut Local, LvalueContext<'tcx>, Location) { MutateUseVisitor { query, callback, @@ -144,16 +129,13 @@ impl<'tcx, F> MutateUseVisitor<'tcx, F> { } impl<'tcx, F> MutVisitor<'tcx> for MutateUseVisitor<'tcx, F> - where F: for<'a> FnMut(&'a mut Lvalue<'tcx>, LvalueContext<'tcx>, Location) { - fn visit_lvalue(&mut self, - lvalue: &mut Lvalue<'tcx>, + where F: for<'a> FnMut(&'a mut Local, LvalueContext<'tcx>, Location) { + fn visit_local(&mut self, + local: &mut Local, context: LvalueContext<'tcx>, location: Location) { - if let Lvalue::Local(local) = *lvalue { - if local == self.query { - (self.callback)(lvalue, context, location) - } + if *local == self.query { + (self.callback)(local, context, location) } - self.super_lvalue(lvalue, context, location) } } diff --git a/src/librustc_mir/util/elaborate_drops.rs b/src/librustc_mir/util/elaborate_drops.rs index d8a061f4b1991..f3b121f2eed57 100644 --- a/src/librustc_mir/util/elaborate_drops.rs +++ b/src/librustc_mir/util/elaborate_drops.rs @@ -565,7 +565,7 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D> /// drop(ptr) fn drop_loop(&mut self, succ: BasicBlock, - cur: &Lvalue<'tcx>, + cur: Local, length_or_end: &Lvalue<'tcx>, ety: Ty<'tcx>, unwind: Unwind, @@ -584,20 +584,20 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D> let one = self.constant_usize(1); let (ptr_next, cur_next) = if ptr_based { - (Rvalue::Use(use_(cur)), - Rvalue::BinaryOp(BinOp::Offset, use_(cur), one)) + (Rvalue::Use(use_(&Lvalue::Local(cur))), + Rvalue::BinaryOp(BinOp::Offset, use_(&Lvalue::Local(cur)), one)) } else { (Rvalue::Ref( tcx.types.re_erased, BorrowKind::Mut, - self.lvalue.clone().index(use_(cur))), - Rvalue::BinaryOp(BinOp::Add, use_(cur), one)) + self.lvalue.clone().index(cur)), + Rvalue::BinaryOp(BinOp::Add, use_(&Lvalue::Local(cur)), one)) }; let drop_block = BasicBlockData { statements: vec![ self.assign(ptr, ptr_next), - self.assign(cur, cur_next) + self.assign(&Lvalue::Local(cur), cur_next) ], is_cleanup: unwind.is_cleanup(), terminator: Some(Terminator { @@ -611,7 +611,7 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D> let loop_block = BasicBlockData { statements: vec![ self.assign(can_go, Rvalue::BinaryOp(BinOp::Eq, - use_(cur), + use_(&Lvalue::Local(cur)), use_(length_or_end))) ], is_cleanup: unwind.is_cleanup(), @@ -678,7 +678,7 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D> tcx.types.usize }; - let cur = Lvalue::Local(self.new_temp(iter_ty)); + let cur = self.new_temp(iter_ty); let length = Lvalue::Local(self.new_temp(tcx.types.usize)); let length_or_end = if ptr_based { Lvalue::Local(self.new_temp(iter_ty)) @@ -688,7 +688,7 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D> let unwind = self.unwind.map(|unwind| { self.drop_loop(unwind, - &cur, + cur, &length_or_end, ety, Unwind::InCleanup, @@ -698,12 +698,13 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D> let succ = self.succ; // FIXME(#6393) let loop_block = self.drop_loop( succ, - &cur, + cur, &length_or_end, ety, unwind, ptr_based); + let cur = Lvalue::Local(cur); let zero = self.constant_usize(0); let mut drop_block_stmts = vec![]; drop_block_stmts.push(self.assign(&length, Rvalue::Len(self.lvalue.clone()))); @@ -752,7 +753,14 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D> fn open_drop<'a>(&mut self) -> BasicBlock { let ty = self.lvalue_ty(self.lvalue); match ty.sty { - ty::TyClosure(def_id, substs) => { + ty::TyClosure(def_id, substs) | + // Note that `elaborate_drops` only drops the upvars of a generator, + // and this is ok because `open_drop` here can only be reached + // within that own generator's resume function. + // This should only happen for the self argument on the resume function. + // It effetively only contains upvars until the generator transformation runs. + // See librustc_mir/transform/generator.rs for more details. + ty::TyGenerator(def_id, substs, _) => { let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect(); self.open_drop_for_tuple(&tys) } diff --git a/src/librustc_mir/util/liveness.rs b/src/librustc_mir/util/liveness.rs new file mode 100644 index 0000000000000..e6d3a82ff9b53 --- /dev/null +++ b/src/librustc_mir/util/liveness.rs @@ -0,0 +1,245 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Liveness analysis which computes liveness of MIR local variables at the boundary of basic blocks +//! +//! This analysis considers references as being used only at the point of the +//! borrow. This means that this does not track uses because of references that +//! already exist: +//! +//! ```Rust +//! fn foo() { +//! x = 0; +//! // `x` is live here +//! GLOBAL = &x: *const u32; +//! // but not here, even while it can be accessed through `GLOBAL`. +//! foo(); +//! x = 1; +//! // `x` is live again here, because it is assigned to `OTHER_GLOBAL` +//! OTHER_GLOBAL = &x: *const u32; +//! // ... +//! } +//! ``` +//! +//! This means that users of this analysis still have to check whether +//! pre-existing references can be used to access the value (e.g. at movable +//! generator yield points, all pre-existing references are invalidated, so this +//! doesn't matter). + +use rustc::mir::*; +use rustc::mir::visit::{LvalueContext, Visitor}; +use rustc_data_structures::indexed_vec::{IndexVec, Idx}; +use rustc_data_structures::indexed_set::IdxSetBuf; +use util::pretty::{write_basic_block, dump_enabled, write_mir_intro}; +use rustc::mir::transform::MirSource; +use rustc::ty::item_path; +use std::path::{PathBuf, Path}; +use std::fs; +use rustc::ty::TyCtxt; +use std::io::{self, Write}; + +pub type LocalSet = IdxSetBuf; + +#[derive(Eq, PartialEq, Clone)] +struct BlockInfo { + defs: LocalSet, + uses: LocalSet, +} + +struct BlockInfoVisitor { + pre_defs: LocalSet, + defs: LocalSet, + uses: LocalSet, +} + +impl<'tcx> Visitor<'tcx> for BlockInfoVisitor { + fn visit_local(&mut self, + &local: &Local, + context: LvalueContext<'tcx>, + _: Location) { + match context { + LvalueContext::Store | + + // We let Call defined the result in both the success and unwind cases. + // This may not be right. + LvalueContext::Call | + + // Storage live and storage dead aren't proper defines, but we can ignore + // values that come before them. + LvalueContext::StorageLive | + LvalueContext::StorageDead => { + self.defs.add(&local); + } + LvalueContext::Projection(..) | + + // Borrows only consider their local used at the point of the borrow. + // This won't affect the results since we use this analysis for generators + // and we only care about the result at suspension points. Borrows cannot + // cross suspension points so this behavior is unproblematic. + LvalueContext::Borrow { .. } | + + LvalueContext::Inspect | + LvalueContext::Consume | + LvalueContext::Validate | + + // We consider drops to always be uses of locals. + // Drop eloboration should be run before this analysis otherwise + // the results might be too pessimistic. + LvalueContext::Drop => { + // Ignore uses which are already defined in this block + if !self.pre_defs.contains(&local) { + self.uses.add(&local); + } + } + } + } +} + +fn block<'tcx>(b: &BasicBlockData<'tcx>, locals: usize) -> BlockInfo { + let mut visitor = BlockInfoVisitor { + pre_defs: LocalSet::new_empty(locals), + defs: LocalSet::new_empty(locals), + uses: LocalSet::new_empty(locals), + }; + + let dummy_location = Location { block: BasicBlock::new(0), statement_index: 0 }; + + for statement in &b.statements { + visitor.visit_statement(BasicBlock::new(0), statement, dummy_location); + visitor.pre_defs.union(&visitor.defs); + } + visitor.visit_terminator(BasicBlock::new(0), b.terminator(), dummy_location); + + BlockInfo { + defs: visitor.defs, + uses: visitor.uses, + } +} + +// This gives the result of the liveness analysis at the boundary of basic blocks +pub struct LivenessResult { + pub ins: IndexVec, + pub outs: IndexVec, +} + +pub fn liveness_of_locals<'tcx>(mir: &Mir<'tcx>) -> LivenessResult { + let locals = mir.local_decls.len(); + let def_use: IndexVec<_, _> = mir.basic_blocks().iter().map(|b| { + block(b, locals) + }).collect(); + + let copy = |from: &IndexVec, to: &mut IndexVec| { + for (i, set) in to.iter_enumerated_mut() { + set.clone_from(&from[i]); + } + }; + + let mut ins: IndexVec<_, _> = mir.basic_blocks() + .indices() + .map(|_| LocalSet::new_empty(locals)).collect(); + let mut outs = ins.clone(); + + let mut ins_ = ins.clone(); + let mut outs_ = outs.clone(); + + loop { + copy(&ins, &mut ins_); + copy(&outs, &mut outs_); + + for b in mir.basic_blocks().indices().rev() { + // out = ∪ {ins of successors} + outs[b].clear(); + for &successor in mir.basic_blocks()[b].terminator().successors().into_iter() { + outs[b].union(&ins[successor]); + } + + // in = use ∪ (out - def) + ins[b].clone_from(&outs[b]); + ins[b].subtract(&def_use[b].defs); + ins[b].union(&def_use[b].uses); + } + + if ins_ == ins && outs_ == outs { + break; + } + } + + LivenessResult { + ins, + outs, + } +} + +pub fn dump_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + pass_name: &str, + source: MirSource, + mir: &Mir<'tcx>, + result: &LivenessResult) { + if !dump_enabled(tcx, pass_name, source) { + return; + } + let node_path = item_path::with_forced_impl_filename_line(|| { // see notes on #41697 below + tcx.item_path_str(tcx.hir.local_def_id(source.item_id())) + }); + dump_matched_mir_node(tcx, pass_name, &node_path, + source, mir, result); +} + +fn dump_matched_mir_node<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + pass_name: &str, + node_path: &str, + source: MirSource, + mir: &Mir<'tcx>, + result: &LivenessResult) { + let mut file_path = PathBuf::new(); + if let Some(ref file_dir) = tcx.sess.opts.debugging_opts.dump_mir_dir { + let p = Path::new(file_dir); + file_path.push(p); + }; + let file_name = format!("rustc.node{}{}-liveness.mir", + source.item_id(), pass_name); + file_path.push(&file_name); + let _ = fs::File::create(&file_path).and_then(|mut file| { + writeln!(file, "// MIR local liveness analysis for `{}`", node_path)?; + writeln!(file, "// source = {:?}", source)?; + writeln!(file, "// pass_name = {}", pass_name)?; + writeln!(file, "")?; + write_mir_fn(tcx, source, mir, &mut file, result)?; + Ok(()) + }); +} + +pub fn write_mir_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + src: MirSource, + mir: &Mir<'tcx>, + w: &mut Write, + result: &LivenessResult) + -> io::Result<()> { + write_mir_intro(tcx, src, mir, w)?; + for block in mir.basic_blocks().indices() { + let print = |w: &mut Write, prefix, result: &IndexVec| { + let live: Vec = mir.local_decls.indices() + .filter(|i| result[block].contains(i)) + .map(|i| format!("{:?}", i)) + .collect(); + writeln!(w, "{} {{{}}}", prefix, live.join(", ")) + }; + print(w, " ", &result.ins)?; + write_basic_block(tcx, block, mir, w)?; + print(w, " ", &result.outs)?; + if block.index() + 1 != mir.basic_blocks().len() { + writeln!(w, "")?; + } + } + + writeln!(w, "}}")?; + Ok(()) +} + diff --git a/src/librustc_mir/util/mod.rs b/src/librustc_mir/util/mod.rs index f0d837e1362d1..4b6da96824dcd 100644 --- a/src/librustc_mir/util/mod.rs +++ b/src/librustc_mir/util/mod.rs @@ -15,6 +15,7 @@ pub mod patch; mod graphviz; mod pretty; +pub mod liveness; pub use self::pretty::{dump_enabled, dump_mir, write_mir_pretty}; pub use self::graphviz::{write_mir_graphviz}; diff --git a/src/librustc_mir/util/patch.rs b/src/librustc_mir/util/patch.rs index 1af0b6c67f259..66607a9e0986f 100644 --- a/src/librustc_mir/util/patch.rs +++ b/src/librustc_mir/util/patch.rs @@ -101,6 +101,13 @@ impl<'tcx> MirPatch<'tcx> { Local::new(index as usize) } + pub fn new_internal(&mut self, ty: Ty<'tcx>, span: Span) -> Local { + let index = self.next_local; + self.next_local += 1; + self.new_locals.push(LocalDecl::new_internal(ty, span)); + Local::new(index as usize) + } + pub fn new_block(&mut self, data: BasicBlockData<'tcx>) -> BasicBlock { let block = BasicBlock::new(self.patch_map.len()); debug!("MirPatch: new_block: {:?}: {:?}", block, data); diff --git a/src/librustc_mir/util/pretty.rs b/src/librustc_mir/util/pretty.rs index 22a8c4378d4c3..0811783a9e57f 100644 --- a/src/librustc_mir/util/pretty.rs +++ b/src/librustc_mir/util/pretty.rs @@ -94,6 +94,7 @@ fn dump_matched_mir_node<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, mir: &Mir<'tcx>) { let promotion_id = match source { MirSource::Promoted(_, id) => format!("-{:?}", id), + MirSource::GeneratorDrop(_) => format!("-drop"), _ => String::new() }; @@ -120,6 +121,9 @@ fn dump_matched_mir_node<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, writeln!(file, "// source = {:?}", source)?; writeln!(file, "// pass_name = {}", pass_name)?; writeln!(file, "// disambiguator = {}", disambiguator)?; + if let Some(ref layout) = mir.generator_layout { + writeln!(file, "// generator_layout = {:?}", layout)?; + } writeln!(file, "")?; write_mir_fn(tcx, source, mir, &mut file)?; Ok(()) @@ -176,7 +180,7 @@ pub fn write_mir_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } /// Write out a human-readable textual representation for the given basic block. -fn write_basic_block(tcx: TyCtxt, +pub fn write_basic_block(tcx: TyCtxt, block: BasicBlock, mir: &Mir, w: &mut Write) @@ -274,7 +278,7 @@ fn write_scope_tree(tcx: TyCtxt, /// Write out a human-readable textual representation of the MIR's `fn` type and the types of its /// local variables (both user-defined bindings and compiler temporaries). -fn write_mir_intro<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, +pub fn write_mir_intro<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &Mir, w: &mut Write) @@ -322,28 +326,34 @@ fn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write) MirSource::Const(_) => write!(w, "const")?, MirSource::Static(_, hir::MutImmutable) => write!(w, "static")?, MirSource::Static(_, hir::MutMutable) => write!(w, "static mut")?, - MirSource::Promoted(_, i) => write!(w, "{:?} in", i)? + MirSource::Promoted(_, i) => write!(w, "{:?} in", i)?, + MirSource::GeneratorDrop(_) => write!(w, "drop_glue")?, } item_path::with_forced_impl_filename_line(|| { // see notes on #41697 elsewhere write!(w, " {}", tcx.node_path_str(src.item_id())) })?; - if let MirSource::Fn(_) = src { - write!(w, "(")?; - - // fn argument types. - for (i, arg) in mir.args_iter().enumerate() { - if i != 0 { - write!(w, ", ")?; + match src { + MirSource::Fn(_) | MirSource::GeneratorDrop(_) => { + write!(w, "(")?; + + // fn argument types. + for (i, arg) in mir.args_iter().enumerate() { + if i != 0 { + write!(w, ", ")?; + } + write!(w, "{:?}: {}", Lvalue::Local(arg), mir.local_decls[arg].ty)?; } - write!(w, "{:?}: {}", Lvalue::Local(arg), mir.local_decls[arg].ty)?; - } - write!(w, ") -> {}", mir.return_ty) - } else { - assert_eq!(mir.arg_count, 0); - write!(w, ": {} =", mir.return_ty) + write!(w, ") -> {}", mir.return_ty) + } + MirSource::Const(..) | + MirSource::Static(..) | + MirSource::Promoted(..) => { + assert_eq!(mir.arg_count, 0); + write!(w, ": {} =", mir.return_ty) + } } } diff --git a/src/librustc_passes/consts.rs b/src/librustc_passes/consts.rs index 763f885b4d005..b4f4f56519121 100644 --- a/src/librustc_passes/consts.rs +++ b/src/librustc_passes/consts.rs @@ -87,19 +87,14 @@ impl<'a, 'gcx> CheckCrateVisitor<'a, 'gcx> { } } - // Adds the worst effect out of all the values of one type. - fn add_type(&mut self, ty: Ty<'gcx>) { - if !ty.is_freeze(self.tcx, self.param_env, DUMMY_SP) { - self.promotable = false; - } - - if ty.needs_drop(self.tcx, self.param_env) { - self.promotable = false; - } + // Returns true iff all the values of the type are promotable. + fn type_has_only_promotable_values(&mut self, ty: Ty<'gcx>) -> bool { + ty.is_freeze(self.tcx, self.param_env, DUMMY_SP) && + !ty.needs_drop(self.tcx, self.param_env) } fn handle_const_fn_call(&mut self, def_id: DefId, ret_ty: Ty<'gcx>) { - self.add_type(ret_ty); + self.promotable &= self.type_has_only_promotable_values(ret_ty); self.promotable &= if let Some(fn_id) = self.tcx.hir.as_local_node_id(def_id) { FnLikeNode::from_node(self.tcx.hir.get(fn_id)).map_or(false, |fn_like| { @@ -148,8 +143,8 @@ impl<'a, 'tcx> Visitor<'tcx> for CheckCrateVisitor<'a, 'tcx> { let tcx = self.tcx; let param_env = self.param_env; - let region_maps = self.tcx.region_maps(item_def_id); - euv::ExprUseVisitor::new(self, tcx, param_env, ®ion_maps, self.tables) + let region_scope_tree = self.tcx.region_scope_tree(item_def_id); + euv::ExprUseVisitor::new(self, tcx, param_env, ®ion_scope_tree, self.tables) .consume_body(body); self.visit_body(body); @@ -333,20 +328,30 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr, node match def { Def::VariantCtor(..) | Def::StructCtor(..) | Def::Fn(..) | Def::Method(..) => {} - Def::AssociatedConst(_) => v.add_type(node_ty), - Def::Const(did) => { - v.promotable &= if let Some(node_id) = v.tcx.hir.as_local_node_id(did) { - match v.tcx.hir.expect_item(node_id).node { - hir::ItemConst(_, body) => { + + Def::Const(did) | + Def::AssociatedConst(did) => { + let promotable = if v.tcx.trait_of_item(did).is_some() { + // Don't peek inside trait associated constants. + false + } else if let Some(node_id) = v.tcx.hir.as_local_node_id(did) { + match v.tcx.hir.maybe_body_owned_by(node_id) { + Some(body) => { v.visit_nested_body(body); v.tcx.rvalue_promotable_to_static.borrow()[&body.node_id] } - _ => false + None => false } } else { v.tcx.const_is_rvalue_promotable_to_static(did) }; + + // Just in case the type is more specific than the definition, + // e.g. impl associated const with type parameters, check it. + // Also, trait associated consts are relaxed by this. + v.promotable &= promotable || v.type_has_only_promotable_values(node_ty); } + _ => { v.promotable = false; } @@ -435,6 +440,9 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr, node hir::ExprAgain(_) | hir::ExprRet(_) | + // Generator expressions + hir::ExprYield(_) | + // Expressions with side-effects. hir::ExprAssign(..) | hir::ExprAssignOp(..) | diff --git a/src/librustc_passes/loops.rs b/src/librustc_passes/loops.rs index 1b2a07cd1a7b8..c23f28fe2205f 100644 --- a/src/librustc_passes/loops.rs +++ b/src/librustc_passes/loops.rs @@ -81,7 +81,7 @@ impl<'a, 'hir> Visitor<'hir> for CheckLoopVisitor<'a, 'hir> { hir::ExprLoop(ref b, _, source) => { self.with_context(Loop(LoopKind::Loop(source)), |v| v.visit_block(&b)); } - hir::ExprClosure(.., b, _) => { + hir::ExprClosure(.., b, _, _) => { self.with_context(Closure, |v| v.visit_nested_body(b)); } hir::ExprBreak(label, ref opt_expr) => { diff --git a/src/librustc_passes/mir_stats.rs b/src/librustc_passes/mir_stats.rs index 9975082c55135..3273d66dd4f51 100644 --- a/src/librustc_passes/mir_stats.rs +++ b/src/librustc_passes/mir_stats.rs @@ -120,6 +120,8 @@ impl<'a, 'tcx> mir_visit::Visitor<'tcx> for StatCollector<'a, 'tcx> { TerminatorKind::DropAndReplace { .. } => "TerminatorKind::DropAndReplace", TerminatorKind::Call { .. } => "TerminatorKind::Call", TerminatorKind::Assert { .. } => "TerminatorKind::Assert", + TerminatorKind::GeneratorDrop => "TerminatorKind::GeneratorDrop", + TerminatorKind::Yield { .. } => "TerminatorKind::Yield", }, kind); self.super_terminator_kind(block, kind, location); } @@ -131,6 +133,12 @@ impl<'a, 'tcx> mir_visit::Visitor<'tcx> for StatCollector<'a, 'tcx> { self.record(match *msg { AssertMessage::BoundsCheck { .. } => "AssertMessage::BoundsCheck", AssertMessage::Math(..) => "AssertMessage::Math", + AssertMessage::GeneratorResumedAfterReturn => { + "AssertMessage::GeneratorResumedAfterReturn" + } + AssertMessage::GeneratorResumedAfterPanic => { + "AssertMessage::GeneratorResumedAfterPanic" + } }, msg); self.super_assert_message(msg, location); } @@ -158,6 +166,7 @@ impl<'a, 'tcx> mir_visit::Visitor<'tcx> for StatCollector<'a, 'tcx> { AggregateKind::Tuple => "AggregateKind::Tuple", AggregateKind::Adt(..) => "AggregateKind::Adt", AggregateKind::Closure(..) => "AggregateKind::Closure", + AggregateKind::Generator(..) => "AggregateKind::Generator", }, kind); "Rvalue::Aggregate" diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 772b16bbecfba..872a29e7bc0c5 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -325,8 +325,9 @@ impl<'a, 'tcx> Visitor<'tcx> for EmbargoVisitor<'a, 'tcx> { // This code is here instead of in visit_item so that the // crate module gets processed as well. if self.prev_level.is_some() { - if let Some(exports) = self.tcx.export_map.get(&id) { - for export in exports { + let hir_id = self.tcx.hir.node_to_hir_id(id); + if let Some(exports) = self.tcx.module_exports(hir_id) { + for export in exports.iter() { if let Some(node_id) = self.tcx.hir.as_local_node_id(export.def.def_id()) { self.update(node_id, Some(AccessLevel::Exported)); } @@ -446,6 +447,7 @@ impl<'b, 'a, 'tcx> TypeVisitor<'tcx> for ReachEverythingInTheInterfaceVisitor<'b ty::TyProjection(ref proj) => Some(proj.item_def_id), ty::TyFnDef(def_id, ..) | ty::TyClosure(def_id, ..) | + ty::TyGenerator(def_id, ..) | ty::TyAnon(def_id, _) => Some(def_id), _ => None }; @@ -477,7 +479,7 @@ struct NamePrivacyVisitor<'a, 'tcx: 'a> { impl<'a, 'tcx> NamePrivacyVisitor<'a, 'tcx> { // Checks that a field is accessible. fn check_field(&mut self, span: Span, def: &'tcx ty::AdtDef, field: &'tcx ty::FieldDef) { - let ident = Ident { ctxt: span.ctxt.modern(), ..keywords::Invalid.ident() }; + let ident = Ident { ctxt: span.ctxt().modern(), ..keywords::Invalid.ident() }; let def_id = self.tcx.adjust_ident(ident, def.did, self.current_item).1; if !def.is_enum() && !field.vis.is_accessible_from(def_id, self.tcx) { struct_span_err!(self.tcx.sess, span, E0451, "field `{}` of {} `{}` is private", diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index a83ac9bb63369..2183c9124e7f4 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -606,9 +606,7 @@ impl<'tcx> Visitor<'tcx> for UsePlacementFinder { // don't suggest placing a use before the prelude // import or other generated ones if item.span == DUMMY_SP { - let mut span = item.span; - span.hi = span.lo; - self.span = Some(span); + self.span = Some(item.span.with_hi(item.span.lo())); self.found_use = true; return; } @@ -617,9 +615,7 @@ impl<'tcx> Visitor<'tcx> for UsePlacementFinder { ItemKind::ExternCrate(_) => {} // but place them before the first other item _ => if self.span.map_or(true, |span| item.span < span ) { - let mut span = item.span; - span.hi = span.lo; - self.span = Some(span); + self.span = Some(item.span.with_hi(item.span.lo())); }, } } @@ -1732,7 +1728,7 @@ impl<'a> Resolver<'a> { fn resolve_self(&mut self, ctxt: &mut SyntaxContext, module: Module<'a>) -> Module<'a> { let mut module = self.get_module(module.normal_ancestor_id); - while module.span.ctxt.modern() != *ctxt { + while module.span.ctxt().modern() != *ctxt { let parent = module.parent.unwrap_or_else(|| self.macro_def_scope(ctxt.remove_mark())); module = self.get_module(parent.normal_ancestor_id); } @@ -2659,8 +2655,8 @@ impl<'a> Resolver<'a> { sp = sp.next_point(); if let Ok(snippet) = cm.span_to_snippet(sp.to(sp.next_point())) { debug!("snippet {:?}", snippet); - let line_sp = cm.lookup_char_pos(sp.hi).line; - let line_base_sp = cm.lookup_char_pos(base_span.lo).line; + let line_sp = cm.lookup_char_pos(sp.hi()).line; + let line_base_sp = cm.lookup_char_pos(base_span.lo()).line; debug!("{:?} {:?}", line_sp, line_base_sp); if snippet == ":" { err.span_label(base_span, @@ -3360,7 +3356,7 @@ impl<'a> Resolver<'a> { for &(trait_name, binding) in traits.as_ref().unwrap().iter() { let module = binding.module().unwrap(); let mut ident = ident; - if ident.ctxt.glob_adjust(module.expansion, binding.span.ctxt.modern()).is_none() { + if ident.ctxt.glob_adjust(module.expansion, binding.span.ctxt().modern()).is_none() { continue } if self.resolve_ident_in_module_unadjusted(module, ident, ns, false, false, module.span) @@ -3586,7 +3582,7 @@ impl<'a> Resolver<'a> { new_binding: &NameBinding, old_binding: &NameBinding) { // Error on the second of two conflicting names - if old_binding.span.lo > new_binding.span.lo { + if old_binding.span.lo() > new_binding.span.lo() { return self.report_conflict(parent, ident, ns, old_binding, new_binding); } diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index f8dc341653ece..9531c8baa0bc1 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -402,7 +402,8 @@ impl<'a> Resolver<'a> { let ast::Path { ref segments, span } = *path; let path: Vec<_> = segments.iter().map(|seg| respan(seg.span, seg.identifier)).collect(); let invocation = self.invocations[&scope]; - self.current_module = invocation.module.get(); + let module = invocation.module.get(); + self.current_module = if module.is_trait() { module.parent.unwrap() } else { module }; if path.len() > 1 { if !self.use_extern_macros && self.gated_errors.insert(span) { diff --git a/src/librustc_resolve/resolve_imports.rs b/src/librustc_resolve/resolve_imports.rs index 5616971e9d1f5..71bcee56ecc51 100644 --- a/src/librustc_resolve/resolve_imports.rs +++ b/src/librustc_resolve/resolve_imports.rs @@ -237,7 +237,7 @@ impl<'a> Resolver<'a> { } let module = unwrap_or!(directive.imported_module.get(), return Err(Undetermined)); let (orig_current_module, mut ident) = (self.current_module, ident.modern()); - match ident.ctxt.glob_adjust(module.expansion, directive.span.ctxt.modern()) { + match ident.ctxt.glob_adjust(module.expansion, directive.span.ctxt().modern()) { Some(Some(def)) => self.current_module = self.macro_def_scope(def), Some(None) => {} None => continue, @@ -398,7 +398,7 @@ impl<'a> Resolver<'a> { for directive in module.glob_importers.borrow_mut().iter() { let mut ident = ident.modern(); let scope = match ident.ctxt.reverse_glob_adjust(module.expansion, - directive.span.ctxt.modern()) { + directive.span.ctxt().modern()) { Some(Some(def)) => self.macro_def_scope(def), Some(None) => directive.parent, None => continue, @@ -800,7 +800,7 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> { }).collect::>(); for ((mut ident, ns), binding) in bindings { let scope = match ident.ctxt.reverse_glob_adjust(module.expansion, - directive.span.ctxt.modern()) { + directive.span.ctxt().modern()) { Some(Some(def)) => self.macro_def_scope(def), Some(None) => self.current_module, None => continue, diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index 9b74df865d747..9ee38dd86c1b5 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -91,13 +91,13 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { use rls_span::{Row, Column}; let cm = self.tcx.sess.codemap(); - let start = cm.lookup_char_pos(span.lo); - let end = cm.lookup_char_pos(span.hi); + let start = cm.lookup_char_pos(span.lo()); + let end = cm.lookup_char_pos(span.hi()); SpanData { file_name: start.file.name.clone().into(), - byte_start: span.lo.0, - byte_end: span.hi.0, + byte_start: span.lo().0, + byte_end: span.hi().0, line_start: Row::new_one_indexed(start.line as u32), line_end: Row::new_one_indexed(end.line as u32), column_start: Column::new_one_indexed(start.col.0 as u32 + 1), @@ -117,7 +117,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { continue; } }; - let lo_loc = self.span_utils.sess.codemap().lookup_char_pos(span.lo); + let lo_loc = self.span_utils.sess.codemap().lookup_char_pos(span.lo()); result.push(ExternalCrateData { name: self.tcx.sess.cstore.crate_name(n).to_string(), num: n.as_u32(), @@ -999,7 +999,7 @@ fn escape(s: String) -> String { // Helper function to determine if a span came from a // macro expansion or syntax extension. fn generated_code(span: Span) -> bool { - span.ctxt != NO_EXPANSION || span == DUMMY_SP + span.ctxt() != NO_EXPANSION || span == DUMMY_SP } // DefId::index is a newtype and so the JSON serialisation is ugly. Therefore diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index 36e4d09c963f5..b9d82b8e2512a 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -192,7 +192,7 @@ impl<'a> SpanUtils<'a> { prev = next; } if angle_count != 0 || bracket_count != 0 { - let loc = self.sess.codemap().lookup_char_pos(span.lo); + let loc = self.sess.codemap().lookup_char_pos(span.lo()); span_bug!(span, "Mis-counted brackets when breaking path? Parsing '{}' \ in {}, line {}", @@ -319,7 +319,7 @@ impl<'a> SpanUtils<'a> { }; //If the span comes from a fake filemap, filter it. - if !self.sess.codemap().lookup_char_pos(parent.lo).file.is_real_file() { + if !self.sess.codemap().lookup_char_pos(parent.lo()).file.is_real_file() { return true; } diff --git a/src/librustc_trans/adt.rs b/src/librustc_trans/adt.rs index d1c1dd7436a5b..11db23732fba3 100644 --- a/src/librustc_trans/adt.rs +++ b/src/librustc_trans/adt.rs @@ -77,6 +77,12 @@ pub fn compute_fields<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>, if variant_index > 0 { bug!("{} is a closure, which only has one variant", t);} substs.upvar_tys(def_id, cx.tcx()).collect() }, + ty::TyGenerator(def_id, substs, _) => { + if variant_index > 0 { bug!("{} is a generator, which only has one variant", t);} + substs.field_tys(def_id, cx.tcx()).map(|t| { + cx.tcx().normalize_associated_type(&t) + }).collect() + }, _ => bug!("{} is not a type that can have fields.", t) } } diff --git a/src/librustc_trans/attributes.rs b/src/librustc_trans/attributes.rs index 8863d4ea5ea8a..b6ca1460a7d0a 100644 --- a/src/librustc_trans/attributes.rs +++ b/src/librustc_trans/attributes.rs @@ -119,6 +119,8 @@ pub fn from_fn_attrs(ccx: &CrateContext, attrs: &[ast::Attribute], llfn: ValueRe llvm::AttributePlace::ReturnValue(), llfn); } else if attr.check_name("unwind") { unwind(llfn, true); + } else if attr.check_name("rustc_allocator_nounwind") { + unwind(llfn, false); } } if !target_features.is_empty() { diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index 4e211d83cff3e..4b56376ad9bea 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -15,7 +15,7 @@ use super::linker::Linker; use super::rpath::RPathConfig; use super::rpath; use metadata::METADATA_FILENAME; -use rustc::session::config::{self, NoDebugInfo, OutputFilenames, OutputType}; +use rustc::session::config::{self, NoDebugInfo, OutputFilenames, OutputType, PrintRequest}; use rustc::session::filesearch; use rustc::session::search_paths::PathKind; use rustc::session::Session; @@ -647,11 +647,20 @@ fn link_staticlib(sess: &Session, ab.build(); if !all_native_libs.is_empty() { - sess.note_without_error("link against the following native artifacts when linking against \ - this static library"); - sess.note_without_error("the order and any duplication can be significant on some \ - platforms, and so may need to be preserved"); + if sess.opts.prints.contains(&PrintRequest::NativeStaticLibs) { + print_native_static_libs(sess, &all_native_libs); + } else { + // Fallback for backwards compatibility only + print_native_static_libs_legacy(sess, &all_native_libs); + } } +} + +fn print_native_static_libs_legacy(sess: &Session, all_native_libs: &[NativeLibrary]) { + sess.note_without_error("link against the following native artifacts when linking against \ + this static library"); + sess.note_without_error("This list will not be printed by default. \ + Please add --print=native-static-libs if you need this information"); for lib in all_native_libs.iter().filter(|l| relevant_lib(sess, l)) { let name = match lib.kind { @@ -665,6 +674,35 @@ fn link_staticlib(sess: &Session, } } +fn print_native_static_libs(sess: &Session, all_native_libs: &[NativeLibrary]) { + let lib_args: Vec<_> = all_native_libs.iter() + .filter(|l| relevant_lib(sess, l)) + .filter_map(|lib| match lib.kind { + NativeLibraryKind::NativeStaticNobundle | + NativeLibraryKind::NativeUnknown => { + if sess.target.target.options.is_like_msvc { + Some(format!("{}.lib", lib.name)) + } else { + Some(format!("-l{}", lib.name)) + } + }, + NativeLibraryKind::NativeFramework => { + // ld-only syntax, since there are no frameworks in MSVC + Some(format!("-framework {}", lib.name)) + }, + // These are included, no need to print them + NativeLibraryKind::NativeStatic => None, + }) + .collect(); + if !lib_args.is_empty() { + sess.note_without_error("Link against the following native artifacts when linking \ + against this static library. The order and any duplication \ + can be significant on some platforms."); + // Prefix for greppability + sess.note_without_error(&format!("native-static-libs: {}", &lib_args.join(" "))); + } +} + // Create a dynamic library or executable // // This will invoke the system linker/cc to create the resulting file. This diff --git a/src/librustc_trans/base.rs b/src/librustc_trans/base.rs index e4b090471d75a..a6c6b0efcfa8c 100644 --- a/src/librustc_trans/base.rs +++ b/src/librustc_trans/base.rs @@ -1371,7 +1371,7 @@ fn assert_symbols_are_distinct<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trans_i // Deterministically select one of the spans for error reporting let span = match (span1, span2) { (Some(span1), Some(span2)) => { - Some(if span1.lo.0 > span2.lo.0 { + Some(if span1.lo().0 > span2.lo().0 { span1 } else { span2 diff --git a/src/librustc_trans/cabi_powerpc64.rs b/src/librustc_trans/cabi_powerpc64.rs index 5c695387236fa..fb5472eb6ae1f 100644 --- a/src/librustc_trans/cabi_powerpc64.rs +++ b/src/librustc_trans/cabi_powerpc64.rs @@ -14,14 +14,26 @@ use abi::{FnType, ArgType, LayoutExt, Reg, RegKind, Uniform}; use context::CrateContext; +use rustc::ty::layout; -fn is_homogeneous_aggregate<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, arg: &mut ArgType<'tcx>) +#[derive(Debug, Clone, Copy, PartialEq)] +enum ABI { + ELFv1, // original ABI used for powerpc64 (big-endian) + ELFv2, // newer ABI used for powerpc64le +} +use self::ABI::*; + +fn is_homogeneous_aggregate<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, + arg: &mut ArgType<'tcx>, + abi: ABI) -> Option { arg.layout.homogeneous_aggregate(ccx).and_then(|unit| { let size = arg.layout.size(ccx); - // Ensure we have at most eight uniquely addressable members. - if size > unit.size.checked_mul(8, ccx).unwrap() { + // ELFv1 only passes one-member aggregates transparently. + // ELFv2 passes up to eight uniquely addressable members. + if (abi == ELFv1 && size > unit.size) + || size > unit.size.checked_mul(8, ccx).unwrap() { return None; } @@ -42,21 +54,23 @@ fn is_homogeneous_aggregate<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, arg: &mut Ar }) } -fn classify_ret_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ret: &mut ArgType<'tcx>) { +fn classify_ret_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ret: &mut ArgType<'tcx>, abi: ABI) { if !ret.layout.is_aggregate() { ret.extend_integer_width_to(64); return; } - // The PowerPC64 big endian ABI doesn't return aggregates in registers - if ccx.sess().target.target.target_endian == "big" { + // The ELFv1 ABI doesn't return aggregates in registers + if abi == ELFv1 { ret.make_indirect(ccx); + return; } - if let Some(uniform) = is_homogeneous_aggregate(ccx, ret) { + if let Some(uniform) = is_homogeneous_aggregate(ccx, ret, abi) { ret.cast_to(ccx, uniform); return; } + let size = ret.layout.size(ccx); let bits = size.bits(); if bits <= 128 { @@ -80,31 +94,55 @@ fn classify_ret_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ret: &mut ArgType<'tc ret.make_indirect(ccx); } -fn classify_arg_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, arg: &mut ArgType<'tcx>) { +fn classify_arg_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, arg: &mut ArgType<'tcx>, abi: ABI) { if !arg.layout.is_aggregate() { arg.extend_integer_width_to(64); return; } - if let Some(uniform) = is_homogeneous_aggregate(ccx, arg) { + if let Some(uniform) = is_homogeneous_aggregate(ccx, arg, abi) { arg.cast_to(ccx, uniform); return; } - let total = arg.layout.size(ccx); + let size = arg.layout.size(ccx); + let (unit, total) = match abi { + ELFv1 => { + // In ELFv1, aggregates smaller than a doubleword should appear in + // the least-significant bits of the parameter doubleword. The rest + // should be padded at their tail to fill out multiple doublewords. + if size.bits() <= 64 { + (Reg { kind: RegKind::Integer, size }, size) + } else { + let align = layout::Align::from_bits(64, 64).unwrap(); + (Reg::i64(), size.abi_align(align)) + } + }, + ELFv2 => { + // In ELFv2, we can just cast directly. + (Reg::i64(), size) + }, + }; + arg.cast_to(ccx, Uniform { - unit: Reg::i64(), + unit, total }); } pub fn compute_abi_info<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fty: &mut FnType<'tcx>) { + let abi = match ccx.sess().target.target.target_endian.as_str() { + "big" => ELFv1, + "little" => ELFv2, + _ => unimplemented!(), + }; + if !fty.ret.is_ignore() { - classify_ret_ty(ccx, &mut fty.ret); + classify_ret_ty(ccx, &mut fty.ret, abi); } for arg in &mut fty.args { if arg.is_ignore() { continue; } - classify_arg_ty(ccx, arg); + classify_arg_ty(ccx, arg, abi); } } diff --git a/src/librustc_trans/cabi_x86.rs b/src/librustc_trans/cabi_x86.rs index 8b024b8c97fa0..49634d6e78ce9 100644 --- a/src/librustc_trans/cabi_x86.rs +++ b/src/librustc_trans/cabi_x86.rs @@ -11,12 +11,30 @@ use abi::{ArgAttribute, FnType, LayoutExt, Reg, RegKind}; use common::CrateContext; +use rustc::ty::layout::{self, Layout, TyLayout}; + #[derive(PartialEq)] pub enum Flavor { General, Fastcall } +fn is_single_fp_element<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, + layout: TyLayout<'tcx>) -> bool { + match *layout { + Layout::Scalar { value: layout::F32, .. } | + Layout::Scalar { value: layout::F64, .. } => true, + Layout::Univariant { .. } => { + if layout.field_count() == 1 { + is_single_fp_element(ccx, layout.field(ccx, 0)) + } else { + false + } + } + _ => false + } +} + pub fn compute_abi_info<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fty: &mut FnType<'tcx>, flavor: Flavor) { @@ -33,12 +51,23 @@ pub fn compute_abi_info<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, if t.options.is_like_osx || t.options.is_like_windows || t.options.is_like_openbsd { let size = fty.ret.layout.size(ccx); - match size.bytes() { - 1 => fty.ret.cast_to(ccx, Reg::i8()), - 2 => fty.ret.cast_to(ccx, Reg::i16()), - 4 => fty.ret.cast_to(ccx, Reg::i32()), - 8 => fty.ret.cast_to(ccx, Reg::i64()), - _ => fty.ret.make_indirect(ccx) + + // According to Clang, everyone but MSVC returns single-element + // float aggregates directly in a floating-point register. + if !t.options.is_like_msvc && is_single_fp_element(ccx, fty.ret.layout) { + match size.bytes() { + 4 => fty.ret.cast_to(ccx, Reg::f32()), + 8 => fty.ret.cast_to(ccx, Reg::f64()), + _ => fty.ret.make_indirect(ccx) + } + } else { + match size.bytes() { + 1 => fty.ret.cast_to(ccx, Reg::i8()), + 2 => fty.ret.cast_to(ccx, Reg::i16()), + 4 => fty.ret.cast_to(ccx, Reg::i32()), + 8 => fty.ret.cast_to(ccx, Reg::i64()), + _ => fty.ret.make_indirect(ccx) + } } } else { fty.ret.make_indirect(ccx); diff --git a/src/librustc_trans/collector.rs b/src/librustc_trans/collector.rs index c5de3a4ffb0fd..8b864a7fdcfbb 100644 --- a/src/librustc_trans/collector.rs +++ b/src/librustc_trans/collector.rs @@ -629,6 +629,8 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { mir::TerminatorKind::Return | mir::TerminatorKind::Unreachable | mir::TerminatorKind::Assert { .. } => {} + mir::TerminatorKind::GeneratorDrop | + mir::TerminatorKind::Yield { .. } => bug!(), } self.super_terminator_kind(block, kind, location); diff --git a/src/librustc_trans/common.rs b/src/librustc_trans/common.rs index 09aa3d2335a6a..261792735dcab 100644 --- a/src/librustc_trans/common.rs +++ b/src/librustc_trans/common.rs @@ -28,12 +28,13 @@ use type_::Type; use value::Value; use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::layout::{Layout, LayoutTyper}; -use rustc::ty::subst::{Subst, Substs}; +use rustc::ty::subst::{Kind, Subst, Substs}; use rustc::hir; use libc::{c_uint, c_char}; use std::iter; +use syntax::abi::Abi; use syntax::attr; use syntax::symbol::InternedString; use syntax_pos::Span; @@ -91,6 +92,16 @@ pub fn type_pair_fields<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) } })) } + ty::TyGenerator(def_id, substs, _) => { + let mut tys = substs.field_tys(def_id, ccx.tcx()); + tys.next().and_then(|first_ty| tys.next().and_then(|second_ty| { + if tys.next().is_some() { + None + } else { + Some([first_ty, second_ty]) + } + })) + } ty::TyTuple(tys, _) => { if tys.len() != 2 { return None; @@ -511,6 +522,28 @@ pub fn ty_fn_sig<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, sig.abi )) } + ty::TyGenerator(def_id, substs, _) => { + let tcx = ccx.tcx(); + let sig = tcx.generator_sig(def_id).unwrap().subst(tcx, substs.substs); + + let env_region = ty::ReLateBound(ty::DebruijnIndex::new(1), ty::BrEnv); + let env_ty = tcx.mk_mut_ref(tcx.mk_region(env_region), ty); + + sig.map_bound(|sig| { + let state_did = tcx.lang_items.gen_state().unwrap(); + let state_adt_ref = tcx.adt_def(state_did); + let state_substs = tcx.mk_substs([Kind::from(sig.yield_ty), + Kind::from(sig.return_ty)].iter()); + let ret_ty = tcx.mk_adt(state_adt_ref, state_substs); + + tcx.mk_fn_sig(iter::once(env_ty), + ret_ty, + false, + hir::Unsafety::Normal, + Abi::Rust + ) + }) + } _ => bug!("unexpected type {:?} to ty_fn_sig", ty) } } diff --git a/src/librustc_trans/debuginfo/metadata.rs b/src/librustc_trans/debuginfo/metadata.rs index 9aba075a20cdb..bcc6aca6149bd 100644 --- a/src/librustc_trans/debuginfo/metadata.rs +++ b/src/librustc_trans/debuginfo/metadata.rs @@ -580,6 +580,16 @@ pub fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, unique_type_id, usage_site_span).finalize(cx) } + ty::TyGenerator(def_id, substs, _) => { + let upvar_tys : Vec<_> = substs.field_tys(def_id, cx.tcx()).map(|t| { + cx.tcx().normalize_associated_type(&t) + }).collect(); + prepare_tuple_metadata(cx, + t, + &upvar_tys, + unique_type_id, + usage_site_span).finalize(cx) + } ty::TyAdt(def, ..) => match def.adt_kind() { AdtKind::Struct => { prepare_struct_metadata(cx, diff --git a/src/librustc_trans/debuginfo/type_names.rs b/src/librustc_trans/debuginfo/type_names.rs index 8cb2c2809f4f3..5dd1c15fd2d6e 100644 --- a/src/librustc_trans/debuginfo/type_names.rs +++ b/src/librustc_trans/debuginfo/type_names.rs @@ -165,6 +165,9 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, ty::TyClosure(..) => { output.push_str("closure"); } + ty::TyGenerator(..) => { + output.push_str("generator"); + } ty::TyError | ty::TyInfer(_) | ty::TyProjection(..) | diff --git a/src/librustc_trans/debuginfo/utils.rs b/src/librustc_trans/debuginfo/utils.rs index 6df509f34a472..7529139c05aac 100644 --- a/src/librustc_trans/debuginfo/utils.rs +++ b/src/librustc_trans/debuginfo/utils.rs @@ -49,7 +49,7 @@ pub fn create_DIArray(builder: DIBuilderRef, arr: &[DIDescriptor]) -> DIArray { /// Return syntax_pos::Loc corresponding to the beginning of the span pub fn span_start(cx: &CrateContext, span: Span) -> syntax_pos::Loc { - cx.sess().codemap().lookup_char_pos(span.lo) + cx.sess().codemap().lookup_char_pos(span.lo()) } pub fn size_and_align_of(cx: &CrateContext, llvm_type: Type) -> (u64, u32) { diff --git a/src/librustc_trans/intrinsic.rs b/src/librustc_trans/intrinsic.rs index 033ef988571dd..9a3c8a5079a2f 100644 --- a/src/librustc_trans/intrinsic.rs +++ b/src/librustc_trans/intrinsic.rs @@ -383,6 +383,18 @@ pub fn trans_intrinsic_call<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, _ => C_null(llret_ty) } } + + "align_offset" => { + // `ptr as usize` + let ptr_val = bcx.ptrtoint(llargs[0], bcx.ccx.int_type()); + // `ptr_val % align` + let offset = bcx.urem(ptr_val, llargs[1]); + let zero = C_null(bcx.ccx.int_type()); + // `offset == 0` + let is_zero = bcx.icmp(llvm::IntPredicate::IntEQ, offset, zero); + // `if offset == 0 { 0 } else { offset - align }` + bcx.select(is_zero, zero, bcx.sub(offset, llargs[1])) + } name if name.starts_with("simd_") => { generic_simd_intrinsic(bcx, name, callee_ty, diff --git a/src/librustc_trans/mir/analyze.rs b/src/librustc_trans/mir/analyze.rs index 598af1cda91d4..95b76d32bf848 100644 --- a/src/librustc_trans/mir/analyze.rs +++ b/src/librustc_trans/mir/analyze.rs @@ -134,60 +134,61 @@ impl<'mir, 'a, 'tcx> Visitor<'tcx> for LocalAnalyzer<'mir, 'a, 'tcx> { location: Location) { debug!("visit_lvalue(lvalue={:?}, context={:?})", lvalue, context); - // Allow uses of projections of immediate pair fields. if let mir::Lvalue::Projection(ref proj) = *lvalue { - if let mir::Lvalue::Local(_) = proj.base { - let ty = proj.base.ty(self.cx.mir, self.cx.ccx.tcx()); - - let ty = self.cx.monomorphize(&ty.to_ty(self.cx.ccx.tcx())); - if common::type_is_imm_pair(self.cx.ccx, ty) { + // Allow uses of projections of immediate pair fields. + if let LvalueContext::Consume = context { + if let mir::Lvalue::Local(_) = proj.base { if let mir::ProjectionElem::Field(..) = proj.elem { - if let LvalueContext::Consume = context { + let ty = proj.base.ty(self.cx.mir, self.cx.ccx.tcx()); + + let ty = self.cx.monomorphize(&ty.to_ty(self.cx.ccx.tcx())); + if common::type_is_imm_pair(self.cx.ccx, ty) { return; } } } } - } - if let mir::Lvalue::Local(index) = *lvalue { - match context { - LvalueContext::Call => { - self.mark_assigned(index); - } + // A deref projection only reads the pointer, never needs the lvalue. + if let mir::ProjectionElem::Deref = proj.elem { + return self.visit_lvalue(&proj.base, LvalueContext::Consume, location); + } + } - LvalueContext::StorageLive | - LvalueContext::StorageDead | - LvalueContext::Validate | - LvalueContext::Inspect | - LvalueContext::Consume => {} + self.super_lvalue(lvalue, context, location); + } - LvalueContext::Store | - LvalueContext::Borrow { .. } | - LvalueContext::Projection(..) => { - self.mark_as_lvalue(index); - } + fn visit_local(&mut self, + &index: &mir::Local, + context: LvalueContext<'tcx>, + _: Location) { + match context { + LvalueContext::Call => { + self.mark_assigned(index); + } - LvalueContext::Drop => { - let ty = lvalue.ty(self.cx.mir, self.cx.ccx.tcx()); - let ty = self.cx.monomorphize(&ty.to_ty(self.cx.ccx.tcx())); + LvalueContext::StorageLive | + LvalueContext::StorageDead | + LvalueContext::Validate | + LvalueContext::Inspect | + LvalueContext::Consume => {} - // Only need the lvalue if we're actually dropping it. - if self.cx.ccx.shared().type_needs_drop(ty) { - self.mark_as_lvalue(index); - } - } + LvalueContext::Store | + LvalueContext::Borrow { .. } | + LvalueContext::Projection(..) => { + self.mark_as_lvalue(index); } - } - // A deref projection only reads the pointer, never needs the lvalue. - if let mir::Lvalue::Projection(ref proj) = *lvalue { - if let mir::ProjectionElem::Deref = proj.elem { - return self.visit_lvalue(&proj.base, LvalueContext::Consume, location); + LvalueContext::Drop => { + let ty = mir::Lvalue::Local(index).ty(self.cx.mir, self.cx.ccx.tcx()); + let ty = self.cx.monomorphize(&ty.to_ty(self.cx.ccx.tcx())); + + // Only need the lvalue if we're actually dropping it. + if self.cx.ccx.shared().type_needs_drop(ty) { + self.mark_as_lvalue(index); + } } } - - self.super_lvalue(lvalue, context, location); } } @@ -216,8 +217,10 @@ pub fn cleanup_kinds<'a, 'tcx>(mir: &mir::Mir<'tcx>) -> IndexVec { + TerminatorKind::SwitchInt { .. } | + TerminatorKind::Yield { .. } => { /* nothing to do */ } TerminatorKind::Call { cleanup: unwind, .. } | diff --git a/src/librustc_trans/mir/block.rs b/src/librustc_trans/mir/block.rs index 28fe1044e7454..bba3b1fa5baee 100644 --- a/src/librustc_trans/mir/block.rs +++ b/src/librustc_trans/mir/block.rs @@ -330,7 +330,7 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> { self.set_debug_loc(&bcx, terminator.source_info); // Get the location information. - let loc = bcx.sess().codemap().lookup_char_pos(span.lo); + let loc = bcx.sess().codemap().lookup_char_pos(span.lo()); let filename = Symbol::intern(&loc.file.name).as_str(); let filename = C_str_slice(bcx.ccx, filename); let line = C_u32(bcx.ccx, loc.line as u32); @@ -374,6 +374,27 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> { vec![msg_file_line_col], Some(ErrKind::Math(err.clone()))) } + mir::AssertMessage::GeneratorResumedAfterReturn | + mir::AssertMessage::GeneratorResumedAfterPanic => { + let str = if let mir::AssertMessage::GeneratorResumedAfterReturn = *msg { + "generator resumed after completion" + } else { + "generator resumed after panicking" + }; + let msg_str = Symbol::intern(str).as_str(); + let msg_str = C_str_slice(bcx.ccx, msg_str); + let msg_file_line = C_struct(bcx.ccx, + &[msg_str, filename, line], + false); + let align = llalign_of_min(bcx.ccx, common::val_ty(msg_file_line)); + let msg_file_line = consts::addr_of(bcx.ccx, + msg_file_line, + align, + "panic_loc"); + (lang_items::PanicFnLangItem, + vec![msg_file_line], + None) + } }; // If we know we always panic, and the error message @@ -557,6 +578,8 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> { destination.as_ref().map(|&(_, target)| (ret_dest, sig.output(), target)), cleanup); } + mir::TerminatorKind::GeneratorDrop | + mir::TerminatorKind::Yield { .. } => bug!("generator ops in trans"), } } diff --git a/src/librustc_trans/mir/constant.rs b/src/librustc_trans/mir/constant.rs index 293e6462de279..9987c9c33102d 100644 --- a/src/librustc_trans/mir/constant.rs +++ b/src/librustc_trans/mir/constant.rs @@ -336,6 +336,9 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { mir::AssertMessage::Math(ref err) => { ErrKind::Math(err.clone()) } + mir::AssertMessage::GeneratorResumedAfterReturn | + mir::AssertMessage::GeneratorResumedAfterPanic => + span_bug!(span, "{:?} should not appear in constants?", msg), }; let err = ConstEvalErr { span: span, kind: err }; @@ -468,7 +471,8 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { }; (Base::Value(llprojected), llextra) } - mir::ProjectionElem::Index(ref index) => { + mir::ProjectionElem::Index(index) => { + let index = &mir::Operand::Consume(mir::Lvalue::Local(index)); let llindex = self.const_operand(index, span)?.llval; let iv = if let Some(iv) = common::const_to_opt_u128(llindex, false) { @@ -579,6 +583,7 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { } mir::AggregateKind::Adt(..) | mir::AggregateKind::Closure(..) | + mir::AggregateKind::Generator(..) | mir::AggregateKind::Tuple => { Const::new(trans_const(self.ccx, dest_ty, kind, &fields), dest_ty) } diff --git a/src/librustc_trans/mir/lvalue.rs b/src/librustc_trans/mir/lvalue.rs index 89c76ccdd27c2..8155303b0d3fc 100644 --- a/src/librustc_trans/mir/lvalue.rs +++ b/src/librustc_trans/mir/lvalue.rs @@ -333,7 +333,8 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> { }; (tr_base.trans_field_ptr(bcx, field.index()), llextra) } - mir::ProjectionElem::Index(ref index) => { + mir::ProjectionElem::Index(index) => { + let index = &mir::Operand::Consume(mir::Lvalue::Local(index)); let index = self.trans_operand(bcx, index); let llindex = self.prepare_index(bcx, index.immediate()); ((tr_base.project_index(bcx, llindex), align), ptr::null_mut()) diff --git a/src/librustc_trans/mir/mod.rs b/src/librustc_trans/mir/mod.rs index a67fa070324c3..5206ad74e2054 100644 --- a/src/librustc_trans/mir/mod.rs +++ b/src/librustc_trans/mir/mod.rs @@ -129,23 +129,23 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> { // In order to have a good line stepping behavior in debugger, we overwrite debug // locations of macro expansions with that of the outermost expansion site // (unless the crate is being compiled with `-Z debug-macros`). - if source_info.span.ctxt == NO_EXPANSION || + if source_info.span.ctxt() == NO_EXPANSION || self.ccx.sess().opts.debugging_opts.debug_macros { - let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo); + let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo()); (scope, source_info.span) } else { // Walk up the macro expansion chain until we reach a non-expanded span. // We also stop at the function body level because no line stepping can occur // at the level above that. let mut span = source_info.span; - while span.ctxt != NO_EXPANSION && span.ctxt != self.mir.span.ctxt { - if let Some(info) = span.ctxt.outer().expn_info() { + while span.ctxt() != NO_EXPANSION && span.ctxt() != self.mir.span.ctxt() { + if let Some(info) = span.ctxt().outer().expn_info() { span = info.call_site; } else { break; } } - let scope = self.scope_metadata_for_loc(source_info.scope, span.lo); + let scope = self.scope_metadata_for_loc(source_info.scope, span.lo()); // Use span of the outermost expansion site, while keeping the original lexical scope. (scope, span) } @@ -524,15 +524,15 @@ fn arg_local_refs<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, } // Or is it the closure environment? - let (closure_ty, env_ref) = if let ty::TyRef(_, mt) = arg_ty.sty { - (mt.ty, true) - } else { - (arg_ty, false) + let (closure_ty, env_ref) = match arg_ty.sty { + ty::TyRef(_, mt) | ty::TyRawPtr(mt) => (mt.ty, true), + _ => (arg_ty, false) }; - let upvar_tys = if let ty::TyClosure(def_id, substs) = closure_ty.sty { - substs.upvar_tys(def_id, tcx) - } else { - bug!("upvar_decls with non-closure arg0 type `{}`", closure_ty); + + let upvar_tys = match closure_ty.sty { + ty::TyClosure(def_id, substs) | + ty::TyGenerator(def_id, substs, _) => substs.upvar_tys(def_id, tcx), + _ => bug!("upvar_decls with non-closure arg0 type `{}`", closure_ty) }; // Store the pointer to closure data in an alloca for debuginfo diff --git a/src/librustc_trans/mir/statement.rs b/src/librustc_trans/mir/statement.rs index 52dfc8dc4de5c..bbf661ae9a735 100644 --- a/src/librustc_trans/mir/statement.rs +++ b/src/librustc_trans/mir/statement.rs @@ -67,11 +67,11 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> { variant_index as u64); bcx } - mir::StatementKind::StorageLive(ref lvalue) => { - self.trans_storage_liveness(bcx, lvalue, base::Lifetime::Start) + mir::StatementKind::StorageLive(local) => { + self.trans_storage_liveness(bcx, local, base::Lifetime::Start) } - mir::StatementKind::StorageDead(ref lvalue) => { - self.trans_storage_liveness(bcx, lvalue, base::Lifetime::End) + mir::StatementKind::StorageDead(local) => { + self.trans_storage_liveness(bcx, local, base::Lifetime::End) } mir::StatementKind::InlineAsm { ref asm, ref outputs, ref inputs } => { let outputs = outputs.iter().map(|output| { @@ -94,13 +94,11 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> { fn trans_storage_liveness(&self, bcx: Builder<'a, 'tcx>, - lvalue: &mir::Lvalue<'tcx>, + index: mir::Local, intrinsic: base::Lifetime) -> Builder<'a, 'tcx> { - if let mir::Lvalue::Local(index) = *lvalue { - if let LocalRef::Lvalue(tr_lval) = self.locals[index] { - intrinsic.call(&bcx, tr_lval.llval); - } + if let LocalRef::Lvalue(tr_lval) = self.locals[index] { + intrinsic.call(&bcx, tr_lval.llval); } bcx } diff --git a/src/librustc_trans/monomorphize.rs b/src/librustc_trans/monomorphize.rs index b0d8be23b0d96..4989ca8cc938c 100644 --- a/src/librustc_trans/monomorphize.rs +++ b/src/librustc_trans/monomorphize.rs @@ -125,6 +125,12 @@ fn resolve_associated_item<'a, 'tcx>( let substs = tcx.erase_regions(&substs); ty::Instance::new(def_id, substs) } + traits::VtableGenerator(closure_data) => { + Instance { + def: ty::InstanceDef::Item(closure_data.closure_def_id), + substs: closure_data.substs.substs + } + } traits::VtableClosure(closure_data) => { let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_id).unwrap(); resolve_closure(scx, closure_data.closure_def_id, closure_data.substs, diff --git a/src/librustc_trans/trans_item.rs b/src/librustc_trans/trans_item.rs index 38232ed1d113a..672fa32aa8591 100644 --- a/src/librustc_trans/trans_item.rs +++ b/src/librustc_trans/trans_item.rs @@ -504,6 +504,7 @@ impl<'a, 'tcx> DefPathBasedNames<'a, 'tcx> { self.push_type_name(sig.output(), output); } }, + ty::TyGenerator(def_id, ref closure_substs, _) | ty::TyClosure(def_id, ref closure_substs) => { self.push_def_path(def_id, output); let generics = self.tcx.generics_of(self.tcx.closure_base_def_id(def_id)); diff --git a/src/librustc_trans/type_of.rs b/src/librustc_trans/type_of.rs index 9f9126ba83a8f..38c49833e0d75 100644 --- a/src/librustc_trans/type_of.rs +++ b/src/librustc_trans/type_of.rs @@ -133,6 +133,11 @@ pub fn in_memory_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> // fill it in *after* placing it into the type cache. adt::incomplete_type_of(cx, t, "closure") } + ty::TyGenerator(..) => { + // Only create the named struct, but don't fill it in. We + // fill it in *after* placing it into the type cache. + adt::incomplete_type_of(cx, t, "generator") + } ty::TyRef(_, ty::TypeAndMut{ty, ..}) | ty::TyRawPtr(ty::TypeAndMut{ty, ..}) => { @@ -197,7 +202,7 @@ pub fn in_memory_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> // If this was an enum or struct, fill in the type now. match t.sty { - ty::TyAdt(..) | ty::TyClosure(..) if !t.is_simd() && !t.is_box() => { + ty::TyAdt(..) | ty::TyClosure(..) | ty::TyGenerator(..) if !t.is_simd() && !t.is_box() => { adt::finish_type_of(cx, t, &mut llty); } _ => () diff --git a/src/librustc_typeck/check/callee.rs b/src/librustc_typeck/check/callee.rs index 460e2858b22ec..5b9d4af08e069 100644 --- a/src/librustc_typeck/check/callee.rs +++ b/src/librustc_typeck/check/callee.rs @@ -38,6 +38,7 @@ pub fn check_legal_trait_for_method_call(tcx: TyCtxt, span: Span, trait_id: DefI enum CallStep<'tcx> { Builtin(Ty<'tcx>), DeferredClosure(ty::FnSig<'tcx>), + /// e.g. enum variant constructors Overloaded(MethodCallee<'tcx>), } diff --git a/src/librustc_typeck/check/closure.rs b/src/librustc_typeck/check/closure.rs index 61795a7e623d3..a768271f3b825 100644 --- a/src/librustc_typeck/check/closure.rs +++ b/src/librustc_typeck/check/closure.rs @@ -70,22 +70,29 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // inference phase (`upvar.rs`). let base_substs = Substs::identity_for_item(self.tcx, self.tcx.closure_base_def_id(expr_def_id)); - let closure_type = self.tcx.mk_closure(expr_def_id, - base_substs.extend_to(self.tcx, expr_def_id, + let substs = base_substs.extend_to(self.tcx, expr_def_id, |_, _| span_bug!(expr.span, "closure has region param"), |_, _| self.infcx.next_ty_var(TypeVariableOrigin::TransformedUpvar(expr.span)) - ) ); - debug!("check_closure: expr.id={:?} closure_type={:?}", expr.id, closure_type); - let fn_sig = self.liberate_late_bound_regions(expr_def_id, &sig); let fn_sig = self.inh.normalize_associated_types_in(body.value.span, body.value.id, self.param_env, &fn_sig); - check_fn(self, self.param_env, fn_sig, decl, expr.id, body); + let interior = check_fn(self, self.param_env, fn_sig, decl, expr.id, body, true).1; + + if let Some(interior) = interior { + let closure_substs = ty::ClosureSubsts { + substs: substs, + }; + return self.tcx.mk_generator(expr_def_id, closure_substs, interior); + } + + let closure_type = self.tcx.mk_closure(expr_def_id, substs); + + debug!("check_closure: expr.id={:?} closure_type={:?}", expr.id, closure_type); // Tuple up the arguments and insert the resulting function type into // the `closures` table. diff --git a/src/librustc_typeck/check/compare_method.rs b/src/librustc_typeck/check/compare_method.rs index bf134f9547d38..b21d48886120c 100644 --- a/src/librustc_typeck/check/compare_method.rs +++ b/src/librustc_typeck/check/compare_method.rs @@ -11,7 +11,7 @@ use rustc::hir::{self, ImplItemKind, TraitItemKind}; use rustc::infer::{self, InferOk}; use rustc::middle::free_region::FreeRegionMap; -use rustc::middle::region::RegionMaps; +use rustc::middle::region; use rustc::ty::{self, TyCtxt}; use rustc::traits::{self, ObligationCause, ObligationCauseCode, Reveal}; use rustc::ty::error::{ExpectedFound, TypeError}; @@ -340,10 +340,12 @@ fn compare_predicate_entailment<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // region obligations that get overlooked. The right // thing to do is the code below. But we keep this old // pass around temporarily. - let region_maps = RegionMaps::new(); + let region_scope_tree = region::ScopeTree::default(); let mut free_regions = FreeRegionMap::new(); free_regions.relate_free_regions_from_predicates(¶m_env.caller_bounds); - infcx.resolve_regions_and_report_errors(impl_m.def_id, ®ion_maps, &free_regions); + infcx.resolve_regions_and_report_errors(impl_m.def_id, + ®ion_scope_tree, + &free_regions); } else { let fcx = FnCtxt::new(&inh, param_env, impl_m_node_id); fcx.regionck_item(impl_m_node_id, impl_m_span, &[]); diff --git a/src/librustc_typeck/check/dropck.rs b/src/librustc_typeck/check/dropck.rs index 72ff9eb6f5b0d..9bee26a52c0d7 100644 --- a/src/librustc_typeck/check/dropck.rs +++ b/src/librustc_typeck/check/dropck.rs @@ -13,7 +13,7 @@ use check::regionck::RegionCtxt; use hir::def_id::DefId; use middle::free_region::FreeRegionMap; use rustc::infer::{self, InferOk}; -use rustc::middle::region::{self, RegionMaps}; +use rustc::middle::region; use rustc::ty::subst::{Subst, Substs}; use rustc::ty::{self, Ty, TyCtxt}; use rustc::traits::{self, ObligationCause}; @@ -114,9 +114,9 @@ fn ensure_drop_params_and_item_params_correspond<'a, 'tcx>( return Err(ErrorReported); } - let region_maps = RegionMaps::new(); + let region_scope_tree = region::ScopeTree::default(); let free_regions = FreeRegionMap::new(); - infcx.resolve_regions_and_report_errors(drop_impl_did, ®ion_maps, &free_regions); + infcx.resolve_regions_and_report_errors(drop_impl_did, ®ion_scope_tree, &free_regions); Ok(()) }) } @@ -270,14 +270,14 @@ pub fn check_safety_of_destructor_if_necessary<'a, 'gcx, 'tcx>( rcx: &mut RegionCtxt<'a, 'gcx, 'tcx>, ty: ty::Ty<'tcx>, span: Span, - scope: region::CodeExtent) + scope: region::Scope) -> Result<(), ErrorReported> { debug!("check_safety_of_destructor_if_necessary typ: {:?} scope: {:?}", ty, scope); - let parent_scope = match rcx.region_maps.opt_encl_scope(scope) { + let parent_scope = match rcx.region_scope_tree.opt_encl_scope(scope) { Some(parent_scope) => parent_scope, // If no enclosing scope, then it must be the root scope // which cannot be outlived. diff --git a/src/librustc_typeck/check/generator_interior.rs b/src/librustc_typeck/check/generator_interior.rs new file mode 100644 index 0000000000000..88219566792b6 --- /dev/null +++ b/src/librustc_typeck/check/generator_interior.rs @@ -0,0 +1,111 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! This calculates the types which has storage which lives across a suspension point in a +//! generator from the perspective of typeck. The actual types used at runtime +//! is calculated in `rustc_mir::transform::generator` and may be a subset of the +//! types computed here. + +use rustc::hir::def_id::DefId; +use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap}; +use rustc::hir::{self, Body, Pat, PatKind, Expr}; +use rustc::middle::region; +use rustc::ty::Ty; +use std::rc::Rc; +use super::FnCtxt; +use util::nodemap::FxHashMap; + +struct InteriorVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, + types: FxHashMap, usize>, + region_scope_tree: Rc, +} + +impl<'a, 'gcx, 'tcx> InteriorVisitor<'a, 'gcx, 'tcx> { + fn record(&mut self, ty: Ty<'tcx>, scope: Option, expr: Option<&'tcx Expr>) { + use syntax_pos::DUMMY_SP; + + let live_across_yield = scope.map_or(Some(DUMMY_SP), |s| { + self.region_scope_tree.yield_in_scope(s) + }); + + if let Some(span) = live_across_yield { + let ty = self.fcx.resolve_type_vars_if_possible(&ty); + + debug!("type in expr = {:?}, scope = {:?}, type = {:?}, span = {:?}", + expr, scope, ty, span); + + // Map the type to the number of types added before it + let entries = self.types.len(); + self.types.entry(&ty).or_insert(entries); + } else { + debug!("no type in expr = {:?}, span = {:?}", expr, expr.map(|e| e.span)); + } + } +} + +pub fn resolve_interior<'a, 'gcx, 'tcx>(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, + def_id: DefId, + body_id: hir::BodyId, + witness: Ty<'tcx>) { + let body = fcx.tcx.hir.body(body_id); + let mut visitor = InteriorVisitor { + fcx, + types: FxHashMap(), + region_scope_tree: fcx.tcx.region_scope_tree(def_id), + }; + intravisit::walk_body(&mut visitor, body); + + let mut types: Vec<_> = visitor.types.drain().collect(); + + // Sort types by insertion order + types.sort_by_key(|t| t.1); + + // Extract type components + let types: Vec<_> = types.into_iter().map(|t| t.0).collect(); + + let tuple = fcx.tcx.intern_tup(&types, false); + + debug!("Types in generator {:?}, span = {:?}", tuple, body.value.span); + + // Unify the tuple with the witness + match fcx.at(&fcx.misc(body.value.span), fcx.param_env).eq(witness, tuple) { + Ok(ok) => fcx.register_infer_ok_obligations(ok), + _ => bug!(), + } +} + +impl<'a, 'gcx, 'tcx> Visitor<'tcx> for InteriorVisitor<'a, 'gcx, 'tcx> { + fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> { + NestedVisitorMap::None + } + + fn visit_body(&mut self, _body: &'tcx Body) { + // Closures inside are not considered part of the generator interior + } + + fn visit_pat(&mut self, pat: &'tcx Pat) { + if let PatKind::Binding(..) = pat.node { + let scope = self.region_scope_tree.var_scope(pat.hir_id.local_id); + let ty = self.fcx.tables.borrow().pat_ty(pat); + self.record(ty, Some(scope), None); + } + + intravisit::walk_pat(self, pat); + } + + fn visit_expr(&mut self, expr: &'tcx Expr) { + let scope = self.region_scope_tree.temporary_scope(expr.hir_id.local_id); + let ty = self.fcx.tables.borrow().expr_ty_adjusted(expr); + self.record(ty, scope, Some(expr)); + + intravisit::walk_expr(self, expr); + } +} diff --git a/src/librustc_typeck/check/intrinsic.rs b/src/librustc_typeck/check/intrinsic.rs index 96643ae72abad..6fee7e58633fc 100644 --- a/src/librustc_typeck/check/intrinsic.rs +++ b/src/librustc_typeck/check/intrinsic.rs @@ -313,6 +313,11 @@ pub fn check_intrinsic_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, (0, vec![tcx.mk_fn_ptr(fn_ty), mut_u8, mut_u8], tcx.types.i32) } + "align_offset" => { + let ptr_ty = tcx.mk_imm_ptr(tcx.mk_nil()); + (0, vec![ptr_ty, tcx.types.usize], tcx.types.usize) + }, + ref other => { struct_span_err!(tcx.sess, it.span, E0093, "unrecognized intrinsic function: `{}`", diff --git a/src/librustc_typeck/check/method/confirm.rs b/src/librustc_typeck/check/method/confirm.rs index db383b6305b4a..08ec3bf74a71d 100644 --- a/src/librustc_typeck/check/method/confirm.rs +++ b/src/librustc_typeck/check/method/confirm.rs @@ -232,24 +232,6 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { }) } - probe::ExtensionImplPick(impl_def_id) => { - // The method being invoked is the method as defined on the trait, - // so return the substitutions from the trait. Consider: - // - // impl Trait for Foo { ... } - // - // If we instantiate A, B, and C with $A, $B, and $C - // respectively, then we want to return the type - // parameters from the trait ([$A,$B]), not those from - // the impl ([$A,$B,$C]) not the receiver type ([$C]). - let impl_polytype = self.impl_self_ty(self.span, impl_def_id); - let impl_trait_ref = - self.instantiate_type_scheme(self.span, - impl_polytype.substs, - &self.tcx.impl_trait_ref(impl_def_id).unwrap()); - impl_trait_ref.substs - } - probe::TraitPick => { let trait_def_id = pick.item.container.id(); diff --git a/src/librustc_typeck/check/method/mod.rs b/src/librustc_typeck/check/method/mod.rs index 819f48a1b57e8..31ceed5b965bf 100644 --- a/src/librustc_typeck/check/method/mod.rs +++ b/src/librustc_typeck/check/method/mod.rs @@ -52,10 +52,6 @@ pub enum MethodError<'tcx> { // Multiple methods might apply. Ambiguity(Vec), - // Using a `Fn`/`FnMut`/etc method on a raw closure type before we have inferred its kind. - ClosureAmbiguity(// DefId of fn trait - DefId), - // Found an applicable method, but it is not visible. The second argument contains a list of // not-in-scope traits which may work. PrivateMatch(Def, Vec), @@ -63,6 +59,9 @@ pub enum MethodError<'tcx> { // Found a `Self: Sized` bound where `Self` is a trait object, also the caller may have // forgotten to import a trait. IllegalSizedBound(Vec), + + // Found a match, but the return type is wrong + BadReturnType, } // Contains a list of static methods that may apply, a list of unsatisfied trait predicates which @@ -113,9 +112,12 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { Ok(..) => true, Err(NoMatch(..)) => false, Err(Ambiguity(..)) => true, - Err(ClosureAmbiguity(..)) => true, Err(PrivateMatch(..)) => allow_private, Err(IllegalSizedBound(..)) => true, + Err(BadReturnType) => { + bug!("no return type expectations but got BadReturnType") + } + } } diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs index 096b778cab2d3..ba74c902f55e0 100644 --- a/src/librustc_typeck/check/method/probe.rs +++ b/src/librustc_typeck/check/method/probe.rs @@ -18,7 +18,7 @@ use hir::def_id::DefId; use hir::def::Def; use rustc::ty::subst::{Subst, Substs}; use rustc::traits::{self, ObligationCause}; -use rustc::ty::{self, Ty, ToPolyTraitRef, TraitRef, TypeFoldable}; +use rustc::ty::{self, Ty, ToPolyTraitRef, ToPredicate, TraitRef, TypeFoldable}; use rustc::infer::type_variable::TypeVariableOrigin; use rustc::util::nodemap::FxHashSet; use rustc::infer::{self, InferOk}; @@ -32,15 +32,6 @@ use std::rc::Rc; use self::CandidateKind::*; pub use self::PickKind::*; -pub enum LookingFor<'tcx> { - /// looking for methods with the given name; this is the normal case - MethodName(ast::Name), - - /// looking for methods that return a given type; this is used to - /// assemble suggestions - ReturnType(Ty<'tcx>), -} - /// Boolean flag used to indicate if this search is for a suggestion /// or not. If true, we can allow ambiguity and so forth. pub struct IsSuggestion(pub bool); @@ -49,9 +40,9 @@ struct ProbeContext<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, span: Span, mode: Mode, - looking_for: LookingFor<'tcx>, + method_name: Option, + return_type: Option>, steps: Rc>>, - opt_simplified_steps: Option>, inherent_candidates: Vec>, extension_candidates: Vec>, impl_dups: FxHashSet, @@ -85,6 +76,7 @@ struct CandidateStep<'tcx> { #[derive(Debug)] struct Candidate<'tcx> { xform_self_ty: Ty<'tcx>, + xform_ret_ty: Option>, item: ty::AssociatedItem, kind: CandidateKind<'tcx>, import_id: Option, @@ -95,17 +87,19 @@ enum CandidateKind<'tcx> { InherentImplCandidate(&'tcx Substs<'tcx>, // Normalize obligations Vec>), - ExtensionImplCandidate(// Impl - DefId, - &'tcx Substs<'tcx>, - // Normalize obligations - Vec>), ObjectCandidate, - TraitCandidate, + TraitCandidate(ty::TraitRef<'tcx>), WhereClauseCandidate(// Trait ty::PolyTraitRef<'tcx>), } +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +enum ProbeResult { + NoMatch, + BadReturnType, + Match, +} + #[derive(Debug, PartialEq, Eq, Clone)] pub struct Pick<'tcx> { pub item: ty::AssociatedItem, @@ -133,8 +127,6 @@ pub struct Pick<'tcx> { #[derive(Clone, Debug, PartialEq, Eq)] pub enum PickKind<'tcx> { InherentImplPick, - ExtensionImplPick(// Impl - DefId), ObjectPick, TraitPick, WhereClausePick(// Trait @@ -183,19 +175,19 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { return_type, scope_expr_id); let method_names = - self.probe_op(span, mode, LookingFor::ReturnType(return_type), IsSuggestion(true), + self.probe_op(span, mode, None, Some(return_type), IsSuggestion(true), self_ty, scope_expr_id, ProbeScope::TraitsInScope, |probe_cx| Ok(probe_cx.candidate_method_names())) .unwrap_or(vec![]); - method_names - .iter() - .flat_map(|&method_name| { - match self.probe_for_name(span, mode, method_name, IsSuggestion(true), self_ty, - scope_expr_id, ProbeScope::TraitsInScope) { - Ok(pick) => Some(pick.item), - Err(_) => None, - } - }) + method_names + .iter() + .flat_map(|&method_name| { + self.probe_op( + span, mode, Some(method_name), Some(return_type), + IsSuggestion(true), self_ty, scope_expr_id, + ProbeScope::TraitsInScope, |probe_cx| probe_cx.pick() + ).ok().map(|pick| pick.item) + }) .collect() } @@ -214,7 +206,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { scope_expr_id); self.probe_op(span, mode, - LookingFor::MethodName(item_name), + Some(item_name), + None, is_suggestion, self_ty, scope_expr_id, @@ -225,7 +218,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { fn probe_op(&'a self, span: Span, mode: Mode, - looking_for: LookingFor<'tcx>, + method_name: Option, + return_type: Option>, is_suggestion: IsSuggestion, self_ty: Ty<'tcx>, scope_expr_id: ast::NodeId, @@ -259,24 +253,6 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { }] }; - // Create a list of simplified self types, if we can. - let mut simplified_steps = Vec::new(); - for step in &steps { - match ty::fast_reject::simplify_type(self.tcx, step.self_ty, true) { - None => { - break; - } - Some(simplified_type) => { - simplified_steps.push(simplified_type); - } - } - } - let opt_simplified_steps = if simplified_steps.len() < steps.len() { - None // failed to convert at least one of the steps - } else { - Some(simplified_steps) - }; - debug!("ProbeContext: steps for self_ty={:?} are {:?}", self_ty, steps); @@ -285,8 +261,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // that we create during the probe process are removed later self.probe(|_| { let mut probe_cx = - ProbeContext::new(self, span, mode, looking_for, - steps, opt_simplified_steps); + ProbeContext::new(self, span, mode, method_name, return_type, steps); probe_cx.assemble_inherent_candidates(); match scope { @@ -356,20 +331,20 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { fn new(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, span: Span, mode: Mode, - looking_for: LookingFor<'tcx>, - steps: Vec>, - opt_simplified_steps: Option>) + method_name: Option, + return_type: Option>, + steps: Vec>) -> ProbeContext<'a, 'gcx, 'tcx> { ProbeContext { fcx, span, mode, - looking_for, + method_name, + return_type, inherent_candidates: Vec::new(), extension_candidates: Vec::new(), impl_dups: FxHashSet(), steps: Rc::new(steps), - opt_simplified_steps, static_candidates: Vec::new(), private_candidate: None, unsatisfied_predicates: Vec::new(), @@ -387,33 +362,25 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { /////////////////////////////////////////////////////////////////////////// // CANDIDATE ASSEMBLY - fn push_inherent_candidate(&mut self, xform_self_ty: Ty<'tcx>, item: ty::AssociatedItem, - kind: CandidateKind<'tcx>, import_id: Option) { - let is_accessible = if let LookingFor::MethodName(name) = self.looking_for { - let def_scope = self.tcx.adjust(name, item.container.id(), self.body_id).1; - item.vis.is_accessible_from(def_scope, self.tcx) - } else { - true - }; - if is_accessible { - self.inherent_candidates.push(Candidate { xform_self_ty, item, kind, import_id }); - } else if self.private_candidate.is_none() { - self.private_candidate = Some(item.def()); - } - } - - fn push_extension_candidate(&mut self, xform_self_ty: Ty<'tcx>, item: ty::AssociatedItem, - kind: CandidateKind<'tcx>, import_id: Option) { - let is_accessible = if let LookingFor::MethodName(name) = self.looking_for { + fn push_candidate(&mut self, + candidate: Candidate<'tcx>, + is_inherent: bool) + { + let is_accessible = if let Some(name) = self.method_name { + let item = candidate.item; let def_scope = self.tcx.adjust(name, item.container.id(), self.body_id).1; item.vis.is_accessible_from(def_scope, self.tcx) } else { true }; if is_accessible { - self.extension_candidates.push(Candidate { xform_self_ty, item, kind, import_id }); + if is_inherent { + self.inherent_candidates.push(candidate); + } else { + self.extension_candidates.push(candidate); + } } else if self.private_candidate.is_none() { - self.private_candidate = Some(item.def()); + self.private_candidate = Some(candidate.item.def()); } } @@ -551,19 +518,22 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { let impl_ty = impl_ty.subst(self.tcx, impl_substs); // Determine the receiver type that the method itself expects. - let xform_self_ty = self.xform_self_ty(&item, impl_ty, impl_substs); + let xform_tys = self.xform_self_ty(&item, impl_ty, impl_substs); // We can't use normalize_associated_types_in as it will pollute the // fcx's fulfillment context after this probe is over. let cause = traits::ObligationCause::misc(self.span, self.body_id); let selcx = &mut traits::SelectionContext::new(self.fcx); - let traits::Normalized { value: xform_self_ty, obligations } = - traits::normalize(selcx, self.param_env, cause, &xform_self_ty); - debug!("assemble_inherent_impl_probe: xform_self_ty = {:?}", - xform_self_ty); - - self.push_inherent_candidate(xform_self_ty, item, - InherentImplCandidate(impl_substs, obligations), None); + let traits::Normalized { value: (xform_self_ty, xform_ret_ty), obligations } = + traits::normalize(selcx, self.param_env, cause, &xform_tys); + debug!("assemble_inherent_impl_probe: xform_self_ty = {:?}/{:?}", + xform_self_ty, xform_ret_ty); + + self.push_candidate(Candidate { + xform_self_ty, xform_ret_ty, item, + kind: InherentImplCandidate(impl_substs, obligations), + import_id: None + }, true); } } @@ -584,10 +554,13 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { self.elaborate_bounds(&[trait_ref], |this, new_trait_ref, item| { let new_trait_ref = this.erase_late_bound_regions(&new_trait_ref); - let xform_self_ty = + let (xform_self_ty, xform_ret_ty) = this.xform_self_ty(&item, new_trait_ref.self_ty(), new_trait_ref.substs); - - this.push_inherent_candidate(xform_self_ty, item, ObjectCandidate, None); + this.push_candidate(Candidate { + xform_self_ty, xform_ret_ty, item, + kind: ObjectCandidate, + import_id: None + }, true); }); } @@ -624,7 +597,8 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { self.elaborate_bounds(&bounds, |this, poly_trait_ref, item| { let trait_ref = this.erase_late_bound_regions(&poly_trait_ref); - let xform_self_ty = this.xform_self_ty(&item, trait_ref.self_ty(), trait_ref.substs); + let (xform_self_ty, xform_ret_ty) = + this.xform_self_ty(&item, trait_ref.self_ty(), trait_ref.substs); // Because this trait derives from a where-clause, it // should not contain any inference variables or other @@ -633,8 +607,11 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { // `WhereClausePick`. assert!(!trait_ref.substs.needs_infer()); - this.push_inherent_candidate(xform_self_ty, item, - WhereClauseCandidate(poly_trait_ref), None); + this.push_candidate(Candidate { + xform_self_ty, xform_ret_ty, item, + kind: WhereClauseCandidate(poly_trait_ref), + import_id: None + }, true); }); } @@ -662,10 +639,14 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { fn assemble_extension_candidates_for_traits_in_scope(&mut self, expr_id: ast::NodeId) -> Result<(), MethodError<'tcx>> { + if expr_id == ast::DUMMY_NODE_ID { + return Ok(()) + } let mut duplicates = FxHashSet(); - let opt_applicable_traits = self.tcx.trait_map.get(&expr_id); + let expr_hir_id = self.tcx.hir.node_to_hir_id(expr_id); + let opt_applicable_traits = self.tcx.in_scope_traits(expr_hir_id); if let Some(applicable_traits) = opt_applicable_traits { - for trait_candidate in applicable_traits { + for trait_candidate in applicable_traits.iter() { let trait_did = trait_candidate.def_id; if duplicates.insert(trait_did) { let import_id = trait_candidate.import_id; @@ -687,17 +668,27 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { Ok(()) } - pub fn matches_return_type(&self, method: &ty::AssociatedItem, - expected: ty::Ty<'tcx>) -> bool { + pub fn matches_return_type(&self, + method: &ty::AssociatedItem, + self_ty: Option>, + expected: Ty<'tcx>) -> bool { match method.def() { Def::Method(def_id) => { let fty = self.tcx.fn_sig(def_id); self.probe(|_| { let substs = self.fresh_substs_for_item(self.span, method.def_id); - let output = fty.output().subst(self.tcx, substs); - let (output, _) = self.replace_late_bound_regions_with_fresh_var( - self.span, infer::FnCall, &output); - self.can_sub(self.param_env, output, expected).is_ok() + let fty = fty.subst(self.tcx, substs); + let (fty, _) = self.replace_late_bound_regions_with_fresh_var( + self.span, infer::FnCall, &fty); + + if let Some(self_ty) = self_ty { + if let Err(_) = self.at(&ObligationCause::dummy(), self.param_env) + .sup(fty.inputs()[0], self_ty) + { + return false + } + } + self.can_sub(self.param_env, fty.output(), expected).is_ok() }) } _ => false, @@ -710,6 +701,8 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { -> Result<(), MethodError<'tcx>> { debug!("assemble_extension_candidates_for_trait(trait_def_id={:?})", trait_def_id); + let trait_substs = self.fresh_item_substs(trait_def_id); + let trait_ref = ty::TraitRef::new(trait_def_id, trait_substs); for item in self.impl_or_trait_item(trait_def_id) { // Check whether `trait_def_id` defines a method with suitable name: @@ -719,282 +712,31 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { continue; } - self.assemble_builtin_candidates(import_id, trait_def_id, item.clone()); - - self.assemble_extension_candidates_for_trait_impls(import_id, trait_def_id, - item.clone()); - - self.assemble_closure_candidates(import_id, trait_def_id, item.clone())?; - - self.assemble_projection_candidates(import_id, trait_def_id, item.clone()); - - self.assemble_where_clause_candidates(import_id, trait_def_id, item.clone()); - } - - Ok(()) - } - - fn assemble_builtin_candidates(&mut self, - import_id: Option, - trait_def_id: DefId, - item: ty::AssociatedItem) { - if Some(trait_def_id) == self.tcx.lang_items.clone_trait() { - self.assemble_builtin_clone_candidates(import_id, trait_def_id, item); - } - } - - fn assemble_builtin_clone_candidates(&mut self, - import_id: Option, - trait_def_id: DefId, - item: ty::AssociatedItem) { - for step in Rc::clone(&self.steps).iter() { - match step.self_ty.sty { - ty::TyInfer(ty::IntVar(_)) | ty::TyInfer(ty::FloatVar(_)) | - ty::TyUint(_) | ty::TyInt(_) | ty::TyBool | ty::TyFloat(_) | - ty::TyFnDef(..) | ty::TyFnPtr(_) | ty::TyChar | - ty::TyRawPtr(..) | ty::TyError | ty::TyNever | - ty::TyRef(_, ty::TypeAndMut { ty: _, mutbl: hir::MutImmutable }) | - ty::TyArray(..) | ty::TyTuple(..) => { - () - } - - _ => continue, - }; - - let substs = Substs::for_item(self.tcx, - trait_def_id, - |def, _| self.region_var_for_def(self.span, def), - |def, substs| { - if def.index == 0 { - step.self_ty - } else { - self.type_var_for_def(self.span, def, substs) - } - }); - - let xform_self_ty = self.xform_self_ty(&item, step.self_ty, substs); - self.push_inherent_candidate(xform_self_ty, item, TraitCandidate, import_id); - } - } - - fn assemble_extension_candidates_for_trait_impls(&mut self, - import_id: Option, - trait_def_id: DefId, - item: ty::AssociatedItem) { - // FIXME(arielb1): can we use for_each_relevant_impl here? - self.tcx.for_each_impl(trait_def_id, |impl_def_id| { - debug!("assemble_extension_candidates_for_trait_impl: trait_def_id={:?} \ - impl_def_id={:?}", - trait_def_id, - impl_def_id); - - if !self.impl_can_possibly_match(impl_def_id) { - return; - } - - let (_, impl_substs) = self.impl_ty_and_substs(impl_def_id); - - debug!("impl_substs={:?}", impl_substs); - - let impl_trait_ref = self.tcx.impl_trait_ref(impl_def_id) - .unwrap() // we know this is a trait impl - .subst(self.tcx, impl_substs); - - debug!("impl_trait_ref={:?}", impl_trait_ref); - - // Determine the receiver type that the method itself expects. - let xform_self_ty = - self.xform_self_ty(&item, impl_trait_ref.self_ty(), impl_trait_ref.substs); - - // Normalize the receiver. We can't use normalize_associated_types_in - // as it will pollute the fcx's fulfillment context after this probe - // is over. - let cause = traits::ObligationCause::misc(self.span, self.body_id); - let selcx = &mut traits::SelectionContext::new(self.fcx); - let traits::Normalized { value: xform_self_ty, obligations } = - traits::normalize(selcx, self.param_env, cause, &xform_self_ty); - - debug!("xform_self_ty={:?}", xform_self_ty); - - self.push_extension_candidate(xform_self_ty, item, - ExtensionImplCandidate(impl_def_id, impl_substs, obligations), import_id); - }); - } - - fn impl_can_possibly_match(&self, impl_def_id: DefId) -> bool { - let simplified_steps = match self.opt_simplified_steps { - Some(ref simplified_steps) => simplified_steps, - None => { - return true; - } - }; - - let impl_type = self.tcx.type_of(impl_def_id); - let impl_simplified_type = - match ty::fast_reject::simplify_type(self.tcx, impl_type, false) { - Some(simplified_type) => simplified_type, - None => { - return true; - } - }; - - simplified_steps.contains(&impl_simplified_type) - } - - fn assemble_closure_candidates(&mut self, - import_id: Option, - trait_def_id: DefId, - item: ty::AssociatedItem) - -> Result<(), MethodError<'tcx>> { - // Check if this is one of the Fn,FnMut,FnOnce traits. - let tcx = self.tcx; - let kind = if Some(trait_def_id) == tcx.lang_items.fn_trait() { - ty::ClosureKind::Fn - } else if Some(trait_def_id) == tcx.lang_items.fn_mut_trait() { - ty::ClosureKind::FnMut - } else if Some(trait_def_id) == tcx.lang_items.fn_once_trait() { - ty::ClosureKind::FnOnce - } else { - return Ok(()); - }; - - // Check if there is an unboxed-closure self-type in the list of receivers. - // If so, add "synthetic impls". - let steps = self.steps.clone(); - for step in steps.iter() { - let closure_id = match step.self_ty.sty { - ty::TyClosure(def_id, _) => { - if let Some(id) = self.tcx.hir.as_local_node_id(def_id) { - self.tcx.hir.node_to_hir_id(id) - } else { - continue; - } - } - _ => continue, - }; - - let closure_kind = { - match self.tables.borrow().closure_kinds().get(closure_id) { - Some(&(k, _)) => k, - None => { - return Err(MethodError::ClosureAmbiguity(trait_def_id)); - } - } - }; - - // this closure doesn't implement the right kind of `Fn` trait - if !closure_kind.extends(kind) { - continue; - } - - // create some substitutions for the argument/return type; - // for the purposes of our method lookup, we only take - // receiver type into account, so we can just substitute - // fresh types here to use during substitution and subtyping. - let substs = Substs::for_item(self.tcx, - trait_def_id, - |def, _| self.region_var_for_def(self.span, def), - |def, substs| { - if def.index == 0 { - step.self_ty - } else { - self.type_var_for_def(self.span, def, substs) - } - }); - - let xform_self_ty = self.xform_self_ty(&item, step.self_ty, substs); - self.push_inherent_candidate(xform_self_ty, item, TraitCandidate, import_id); + let (xform_self_ty, xform_ret_ty) = + self.xform_self_ty(&item, trait_ref.self_ty(), trait_substs); + self.push_candidate(Candidate { + xform_self_ty, xform_ret_ty, item, import_id, + kind: TraitCandidate(trait_ref), + }, false); } - Ok(()) } - fn assemble_projection_candidates(&mut self, - import_id: Option, - trait_def_id: DefId, - item: ty::AssociatedItem) { - debug!("assemble_projection_candidates(\ - trait_def_id={:?}, \ - item={:?})", - trait_def_id, - item); - - for step in Rc::clone(&self.steps).iter() { - debug!("assemble_projection_candidates: step={:?}", step); - - let (def_id, substs) = match step.self_ty.sty { - ty::TyProjection(ref data) => { - let trait_ref = data.trait_ref(self.tcx); - (trait_ref.def_id, trait_ref.substs) - }, - ty::TyAnon(def_id, substs) => (def_id, substs), - _ => continue, - }; - - debug!("assemble_projection_candidates: def_id={:?} substs={:?}", - def_id, - substs); - - let trait_predicates = self.tcx.predicates_of(def_id); - let bounds = trait_predicates.instantiate(self.tcx, substs); - let predicates = bounds.predicates; - debug!("assemble_projection_candidates: predicates={:?}", - predicates); - for poly_bound in traits::elaborate_predicates(self.tcx, predicates) - .filter_map(|p| p.to_opt_poly_trait_ref()) - .filter(|b| b.def_id() == trait_def_id) { - let bound = self.erase_late_bound_regions(&poly_bound); - - debug!("assemble_projection_candidates: def_id={:?} substs={:?} bound={:?}", - def_id, - substs, - bound); - - if self.can_eq(self.param_env, step.self_ty, bound.self_ty()).is_ok() { - let xform_self_ty = self.xform_self_ty(&item, bound.self_ty(), bound.substs); - - debug!("assemble_projection_candidates: bound={:?} xform_self_ty={:?}", - bound, - xform_self_ty); - - self.push_extension_candidate(xform_self_ty, item, TraitCandidate, import_id); - } - } - } - } - - fn assemble_where_clause_candidates(&mut self, - import_id: Option, - trait_def_id: DefId, - item: ty::AssociatedItem) { - debug!("assemble_where_clause_candidates(trait_def_id={:?})", - trait_def_id); - - let caller_predicates = self.param_env.caller_bounds.to_vec(); - for poly_bound in traits::elaborate_predicates(self.tcx, caller_predicates) - .filter_map(|p| p.to_opt_poly_trait_ref()) - .filter(|b| b.def_id() == trait_def_id) { - let bound = self.erase_late_bound_regions(&poly_bound); - let xform_self_ty = self.xform_self_ty(&item, bound.self_ty(), bound.substs); - - debug!("assemble_where_clause_candidates: bound={:?} xform_self_ty={:?}", - bound, - xform_self_ty); - - self.push_extension_candidate(xform_self_ty, item, - WhereClauseCandidate(poly_bound), import_id); - } - } - fn candidate_method_names(&self) -> Vec { let mut set = FxHashSet(); - let mut names: Vec<_> = - self.inherent_candidates - .iter() - .chain(&self.extension_candidates) - .map(|candidate| candidate.item.name) - .filter(|&name| set.insert(name)) - .collect(); + let mut names: Vec<_> = self.inherent_candidates + .iter() + .chain(&self.extension_candidates) + .filter(|candidate| { + if let Some(return_ty) = self.return_type { + self.matches_return_type(&candidate.item, None, return_ty) + } else { + true + } + }) + .map(|candidate| candidate.item.name) + .filter(|&name| set.insert(name)) + .collect(); // sort them by the name so we have a stable result names.sort_by_key(|n| n.as_str()); @@ -1005,10 +747,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { // THE ACTUAL SEARCH fn pick(mut self) -> PickResult<'tcx> { - assert!(match self.looking_for { - LookingFor::MethodName(_) => true, - LookingFor::ReturnType(_) => false, - }); + assert!(self.method_name.is_some()); if let Some(r) = self.pick_core() { return r; @@ -1051,10 +790,6 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { assert!(others.is_empty()); vec![] } - Some(Err(MethodError::ClosureAmbiguity(..))) => { - // this error only occurs when assembling candidates - span_bug!(span, "encountered ClosureAmbiguity from pick_core"); - } _ => vec![], }; @@ -1072,21 +807,17 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { let steps = self.steps.clone(); // find the first step that works - steps.iter().filter_map(|step| self.pick_step(step)).next() - } - - fn pick_step(&mut self, step: &CandidateStep<'tcx>) -> Option> { - debug!("pick_step: step={:?}", step); - - if step.self_ty.references_error() { - return None; - } - - if let Some(result) = self.pick_by_value_method(step) { - return Some(result); - } - - self.pick_autorefd_method(step) + steps + .iter() + .filter(|step| { + debug!("pick_core: step={:?}", step); + !step.self_ty.references_error() + }).flat_map(|step| { + self.pick_by_value_method(step).or_else(|| { + self.pick_autorefd_method(step, hir::MutImmutable).or_else(|| { + self.pick_autorefd_method(step, hir::MutMutable) + })})}) + .next() } fn pick_by_value_method(&mut self, step: &CandidateStep<'tcx>) -> Option> { @@ -1117,36 +848,30 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { }) } - fn pick_autorefd_method(&mut self, step: &CandidateStep<'tcx>) -> Option> { + fn pick_autorefd_method(&mut self, step: &CandidateStep<'tcx>, mutbl: hir::Mutability) + -> Option> { let tcx = self.tcx; // In general, during probing we erase regions. See // `impl_self_ty()` for an explanation. let region = tcx.types.re_erased; - // Search through mutabilities in order to find one where pick works: - [hir::MutImmutable, hir::MutMutable] - .iter() - .filter_map(|&m| { - let autoref_ty = tcx.mk_ref(region, - ty::TypeAndMut { - ty: step.self_ty, - mutbl: m, - }); - self.pick_method(autoref_ty).map(|r| { - r.map(|mut pick| { - pick.autoderefs = step.autoderefs; - pick.autoref = Some(m); - pick.unsize = if step.unsize { - Some(step.self_ty) - } else { - None - }; - pick - }) - }) + let autoref_ty = tcx.mk_ref(region, + ty::TypeAndMut { + ty: step.self_ty, mutbl + }); + self.pick_method(autoref_ty).map(|r| { + r.map(|mut pick| { + pick.autoderefs = step.autoderefs; + pick.autoref = Some(mutbl); + pick.unsize = if step.unsize { + Some(step.self_ty) + } else { + None + }; + pick }) - .nth(0) + }) } fn pick_method(&mut self, self_ty: Ty<'tcx>) -> Option> { @@ -1177,7 +902,10 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { possibly_unsatisfied_predicates: &mut Vec>) -> Option> { let mut applicable_candidates: Vec<_> = probes.iter() - .filter(|&probe| self.consider_probe(self_ty, probe, possibly_unsatisfied_predicates)) + .map(|probe| { + (probe, self.consider_probe(self_ty, probe, possibly_unsatisfied_predicates)) + }) + .filter(|&(_, status)| status != ProbeResult::NoMatch) .collect(); debug!("applicable_candidates: {:?}", applicable_candidates); @@ -1192,18 +920,60 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { } if applicable_candidates.len() > 1 { - let sources = probes.iter().map(|p| p.to_source()).collect(); + let sources = probes.iter() + .map(|p| self.candidate_source(p, self_ty)) + .collect(); return Some(Err(MethodError::Ambiguity(sources))); } - applicable_candidates.pop().map(|probe| Ok(probe.to_unadjusted_pick())) + applicable_candidates.pop().map(|(probe, status)| { + if status == ProbeResult::Match { + Ok(probe.to_unadjusted_pick()) + } else { + Err(MethodError::BadReturnType) + } + }) + } + + fn select_trait_candidate(&self, trait_ref: ty::TraitRef<'tcx>) + -> traits::SelectionResult<'tcx, traits::Selection<'tcx>> + { + let cause = traits::ObligationCause::misc(self.span, self.body_id); + let predicate = + trait_ref.to_poly_trait_ref().to_poly_trait_predicate(); + let obligation = traits::Obligation::new(cause, self.param_env, predicate); + traits::SelectionContext::new(self).select(&obligation) + } + + fn candidate_source(&self, candidate: &Candidate<'tcx>, self_ty: Ty<'tcx>) + -> CandidateSource + { + match candidate.kind { + InherentImplCandidate(..) => ImplSource(candidate.item.container.id()), + ObjectCandidate | + WhereClauseCandidate(_) => TraitSource(candidate.item.container.id()), + TraitCandidate(trait_ref) => self.probe(|_| { + let _ = self.at(&ObligationCause::dummy(), self.param_env) + .sup(candidate.xform_self_ty, self_ty); + match self.select_trait_candidate(trait_ref) { + Ok(Some(traits::Vtable::VtableImpl(ref impl_data))) => { + // If only a single impl matches, make the error message point + // to that impl. + ImplSource(impl_data.impl_def_id) + } + _ => { + TraitSource(candidate.item.container.id()) + } + } + }) + } } fn consider_probe(&self, self_ty: Ty<'tcx>, probe: &Candidate<'tcx>, possibly_unsatisfied_predicates: &mut Vec>) - -> bool { + -> ProbeResult { debug!("consider_probe: self_ty={:?} probe={:?}", self_ty, probe); self.probe(|_| { @@ -1213,60 +983,102 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { Ok(InferOk { obligations, value: () }) => obligations, Err(_) => { debug!("--> cannot relate self-types"); - return false; + return ProbeResult::NoMatch; } }; + let mut result = ProbeResult::Match; + let selcx = &mut traits::SelectionContext::new(self); + let cause = traits::ObligationCause::misc(self.span, self.body_id); + // If so, impls may carry other conditions (e.g., where // clauses) that must be considered. Make sure that those // match as well (or at least may match, sometimes we // don't have enough information to fully evaluate). - let (impl_def_id, substs, ref_obligations) = match probe.kind { + let candidate_obligations : Vec<_> = match probe.kind { InherentImplCandidate(ref substs, ref ref_obligations) => { - (probe.item.container.id(), substs, ref_obligations) - } - - ExtensionImplCandidate(impl_def_id, ref substs, ref ref_obligations) => { - (impl_def_id, substs, ref_obligations) + // Check whether the impl imposes obligations we have to worry about. + let impl_def_id = probe.item.container.id(); + let impl_bounds = self.tcx.predicates_of(impl_def_id); + let impl_bounds = impl_bounds.instantiate(self.tcx, substs); + let traits::Normalized { value: impl_bounds, obligations: norm_obligations } = + traits::normalize(selcx, self.param_env, cause.clone(), &impl_bounds); + + // Convert the bounds into obligations. + let impl_obligations = traits::predicates_for_generics( + cause.clone(), self.param_env, &impl_bounds); + + debug!("impl_obligations={:?}", impl_obligations); + impl_obligations.into_iter() + .chain(norm_obligations.into_iter()) + .chain(ref_obligations.iter().cloned()) + .collect() } ObjectCandidate | - TraitCandidate | WhereClauseCandidate(..) => { // These have no additional conditions to check. - return true; + vec![] } - }; - - let selcx = &mut traits::SelectionContext::new(self); - let cause = traits::ObligationCause::misc(self.span, self.body_id); - // Check whether the impl imposes obligations we have to worry about. - let impl_bounds = self.tcx.predicates_of(impl_def_id); - let impl_bounds = impl_bounds.instantiate(self.tcx, substs); - let traits::Normalized { value: impl_bounds, obligations: norm_obligations } = - traits::normalize(selcx, self.param_env, cause.clone(), &impl_bounds); + TraitCandidate(trait_ref) => { + let predicate = trait_ref.to_predicate(); + let obligation = + traits::Obligation::new(cause.clone(), self.param_env, predicate); + if !selcx.evaluate_obligation(&obligation) { + if self.probe(|_| self.select_trait_candidate(trait_ref).is_err()) { + // This candidate's primary obligation doesn't even + // select - don't bother registering anything in + // `potentially_unsatisfied_predicates`. + return ProbeResult::NoMatch; + } else { + // Some nested subobligation of this predicate + // failed. + // + // FIXME: try to find the exact nested subobligation + // and point at it rather than reporting the entire + // trait-ref? + result = ProbeResult::NoMatch; + let trait_ref = self.resolve_type_vars_if_possible(&trait_ref); + possibly_unsatisfied_predicates.push(trait_ref); + } + } + vec![] + } + }; - // Convert the bounds into obligations. - let obligations = traits::predicates_for_generics(cause.clone(), - self.param_env, - &impl_bounds); - debug!("impl_obligations={:?}", obligations); + debug!("consider_probe - candidate_obligations={:?} sub_obligations={:?}", + candidate_obligations, sub_obligations); // Evaluate those obligations to see if they might possibly hold. - let mut all_true = true; - for o in obligations.iter() - .chain(sub_obligations.iter()) - .chain(norm_obligations.iter()) - .chain(ref_obligations.iter()) { - if !selcx.evaluate_obligation(o) { - all_true = false; + for o in candidate_obligations.into_iter().chain(sub_obligations) { + let o = self.resolve_type_vars_if_possible(&o); + if !selcx.evaluate_obligation(&o) { + result = ProbeResult::NoMatch; if let &ty::Predicate::Trait(ref pred) = &o.predicate { possibly_unsatisfied_predicates.push(pred.0.trait_ref); } } } - all_true + + if let ProbeResult::Match = result { + if let (Some(return_ty), Some(xform_ret_ty)) = + (self.return_type, probe.xform_ret_ty) + { + let xform_ret_ty = self.resolve_type_vars_if_possible(&xform_ret_ty); + debug!("comparing return_ty {:?} with xform ret ty {:?}", + return_ty, + probe.xform_ret_ty); + if self.at(&ObligationCause::dummy(), self.param_env) + .sup(return_ty, xform_ret_ty) + .is_err() + { + return ProbeResult::BadReturnType; + } + } + } + + result }) } @@ -1287,22 +1099,25 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { /// /// Now imagine the receiver is `Vec<_>`. It doesn't really matter at this time which impl we /// use, so it's ok to just commit to "using the method from the trait Foo". - fn collapse_candidates_to_trait_pick(&self, probes: &[&Candidate<'tcx>]) -> Option> { + fn collapse_candidates_to_trait_pick(&self, probes: &[(&Candidate<'tcx>, ProbeResult)]) + -> Option> + { // Do all probes correspond to the same trait? - let container = probes[0].item.container; + let container = probes[0].0.item.container; match container { ty::TraitContainer(_) => {} ty::ImplContainer(_) => return None, } - if probes[1..].iter().any(|p| p.item.container != container) { + if probes[1..].iter().any(|&(p, _)| p.item.container != container) { return None; } + // FIXME: check the return type here somehow. // If so, just use this trait and call it a day. Some(Pick { - item: probes[0].item.clone(), + item: probes[0].0.item.clone(), kind: TraitPick, - import_id: probes[0].import_id, + import_id: probes[0].0.import_id, autoderefs: 0, autoref: None, unsize: None, @@ -1340,23 +1155,23 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { item: &ty::AssociatedItem, impl_ty: Ty<'tcx>, substs: &Substs<'tcx>) - -> Ty<'tcx> { + -> (Ty<'tcx>, Option>) { if item.kind == ty::AssociatedKind::Method && self.mode == Mode::MethodCall { - self.xform_method_self_ty(item.def_id, impl_ty, substs) + let sig = self.xform_method_sig(item.def_id, substs); + (sig.inputs()[0], Some(sig.output())) } else { - impl_ty + (impl_ty, None) } } - fn xform_method_self_ty(&self, - method: DefId, - impl_ty: Ty<'tcx>, - substs: &Substs<'tcx>) - -> Ty<'tcx> { - let self_ty = self.tcx.fn_sig(method).input(0); - debug!("xform_self_ty(impl_ty={:?}, self_ty={:?}, substs={:?})", - impl_ty, - self_ty, + fn xform_method_sig(&self, + method: DefId, + substs: &Substs<'tcx>) + -> ty::FnSig<'tcx> + { + let fn_sig = self.tcx.fn_sig(method); + debug!("xform_self_ty(fn_sig={:?}, substs={:?})", + fn_sig, substs); assert!(!substs.has_escaping_regions()); @@ -1372,10 +1187,10 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { // Erase any late-bound regions from the method and substitute // in the values from the substitution. - let xform_self_ty = self.erase_late_bound_regions(&self_ty); + let xform_fn_sig = self.erase_late_bound_regions(&fn_sig); if generics.types.is_empty() && generics.regions.is_empty() { - xform_self_ty.subst(self.tcx, substs) + xform_fn_sig.subst(self.tcx, substs) } else { let substs = Substs::for_item(self.tcx, method, |def, _| { let i = def.index as usize; @@ -1394,22 +1209,22 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { self.type_var_for_def(self.span, def, cur_substs) } }); - xform_self_ty.subst(self.tcx, substs) + xform_fn_sig.subst(self.tcx, substs) } } /// Get the type of an impl and generate substitutions with placeholders. fn impl_ty_and_substs(&self, impl_def_id: DefId) -> (Ty<'tcx>, &'tcx Substs<'tcx>) { - let impl_ty = self.tcx.type_of(impl_def_id); - - let substs = Substs::for_item(self.tcx, - impl_def_id, - |_, _| self.tcx.types.re_erased, - |_, _| self.next_ty_var( - TypeVariableOrigin::SubstitutionPlaceholder( - self.tcx.def_span(impl_def_id)))); + (self.tcx.type_of(impl_def_id), self.fresh_item_substs(impl_def_id)) + } - (impl_ty, substs) + fn fresh_item_substs(&self, def_id: DefId) -> &'tcx Substs<'tcx> { + Substs::for_item(self.tcx, + def_id, + |_, _| self.tcx.types.re_erased, + |_, _| self.next_ty_var( + TypeVariableOrigin::SubstitutionPlaceholder( + self.tcx.def_span(def_id)))) } /// Replace late-bound-regions bound by `value` with `'static` using @@ -1438,17 +1253,10 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { /// Find the method with the appropriate name (or return type, as the case may be). fn impl_or_trait_item(&self, def_id: DefId) -> Vec { - match self.looking_for { - LookingFor::MethodName(name) => { - self.fcx.associated_item(def_id, name).map_or(Vec::new(), |x| vec![x]) - } - LookingFor::ReturnType(return_ty) => { - self.tcx - .associated_items(def_id) - .map(|did| self.tcx.associated_item(did.def_id)) - .filter(|m| self.matches_return_type(m, return_ty)) - .collect() - } + if let Some(name) = self.method_name { + self.fcx.associated_item(def_id, name).map_or(Vec::new(), |x| vec![x]) + } else { + self.tcx.associated_items(def_id).collect() } } } @@ -1459,9 +1267,8 @@ impl<'tcx> Candidate<'tcx> { item: self.item.clone(), kind: match self.kind { InherentImplCandidate(..) => InherentImplPick, - ExtensionImplCandidate(def_id, ..) => ExtensionImplPick(def_id), ObjectCandidate => ObjectPick, - TraitCandidate => TraitPick, + TraitCandidate(_) => TraitPick, WhereClauseCandidate(ref trait_ref) => { // Only trait derived from where-clauses should // appear here, so they should not contain any @@ -1479,14 +1286,4 @@ impl<'tcx> Candidate<'tcx> { unsize: None, } } - - fn to_source(&self) -> CandidateSource { - match self.kind { - InherentImplCandidate(..) => ImplSource(self.item.container.id()), - ExtensionImplCandidate(def_id, ..) => ImplSource(def_id), - ObjectCandidate | - TraitCandidate | - WhereClauseCandidate(_) => TraitSource(self.item.container.id()), - } - } } diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs index c8b828f3a434d..f2d7842e473f6 100644 --- a/src/librustc_typeck/check/method/suggest.rs +++ b/src/librustc_typeck/check/method/suggest.rs @@ -296,22 +296,6 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { err.emit(); } - MethodError::ClosureAmbiguity(trait_def_id) => { - let msg = format!("the `{}` method from the `{}` trait cannot be explicitly \ - invoked on this closure as we have not yet inferred what \ - kind of closure it is", - item_name, - self.tcx.item_path_str(trait_def_id)); - let msg = if let Some(callee) = rcvr_expr { - format!("{}; use overloaded call notation instead (e.g., `{}()`)", - msg, - self.tcx.hir.node_to_pretty_string(callee.id)) - } else { - msg - }; - self.sess().span_err(span, &msg); - } - MethodError::PrivateMatch(def, out_of_scope_traits) => { let mut err = struct_span_err!(self.tcx.sess, span, E0624, "{} `{}` is private", def.kind_name(), item_name); @@ -337,6 +321,10 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } err.emit(); } + + MethodError::BadReturnType => { + bug!("no return type expectations but got BadReturnType") + } } } diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 955171203e13a..3f210ea1737b7 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -85,13 +85,12 @@ use self::method::MethodCallee; use self::TupleArgumentsFlag::*; use astconv::AstConv; -use fmt_macros::{Parser, Piece, Position}; use hir::def::{Def, CtorKind}; use hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; use rustc_back::slice::ref_slice; use rustc::infer::{self, InferCtxt, InferOk, RegionVariableOrigin}; use rustc::infer::type_variable::{TypeVariableOrigin}; -use rustc::middle::region::CodeExtent; +use rustc::middle::region; use rustc::ty::subst::{Kind, Subst, Substs}; use rustc::traits::{self, FulfillmentContext, ObligationCause, ObligationCauseCode}; use rustc::ty::{ParamTy, LvaluePreference, NoPreference, PreferMutLvalue}; @@ -146,6 +145,7 @@ mod cast; mod closure; mod callee; mod compare_method; +mod generator_interior; mod intrinsic; mod op; @@ -205,6 +205,8 @@ pub struct Inherited<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { deferred_cast_checks: RefCell>>, + deferred_generator_interiors: RefCell)>>, + // Anonymized types found in explicit return types and their // associated fresh inference variable. Writeback resolves these // variables to get the concrete type, which can be used to @@ -503,6 +505,8 @@ pub struct FnCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { ret_coercion: Option>>, + yield_ty: Option>, + ps: RefCell, /// Whether the last checked node generates a divergence (e.g., @@ -601,8 +605,9 @@ impl<'a, 'gcx, 'tcx> Inherited<'a, 'gcx, 'tcx> { let tcx = infcx.tcx; let item_id = tcx.hir.as_local_node_id(def_id); let body_id = item_id.and_then(|id| tcx.hir.maybe_body_owned_by(id)); - let implicit_region_bound = body_id.map(|body| { - tcx.mk_region(ty::ReScope(CodeExtent::CallSiteScope(body))) + let implicit_region_bound = body_id.map(|body_id| { + let body = tcx.hir.body(body_id); + tcx.mk_region(ty::ReScope(region::Scope::CallSite(body.value.hir_id.local_id))) }); Inherited { @@ -614,6 +619,7 @@ impl<'a, 'gcx, 'tcx> Inherited<'a, 'gcx, 'tcx> { locals: RefCell::new(NodeMap()), deferred_call_resolutions: RefCell::new(DefIdMap()), deferred_cast_checks: RefCell::new(Vec::new()), + deferred_generator_interiors: RefCell::new(Vec::new()), anon_types: RefCell::new(NodeMap()), implicit_region_bound, body_id, @@ -734,11 +740,20 @@ pub fn provide(providers: &mut Providers) { typeck_tables_of, has_typeck_tables, closure_kind, + generator_sig, adt_destructor, ..*providers }; } +fn generator_sig<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + def_id: DefId) + -> Option> { + let node_id = tcx.hir.as_local_node_id(def_id).unwrap(); + let hir_id = tcx.hir.node_to_hir_id(node_id); + tcx.typeck_tables_of(def_id).generator_sigs()[hir_id].map(|s| ty::Binder(s)) +} + fn closure_kind<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> ty::ClosureKind { @@ -865,7 +880,7 @@ fn typeck_tables_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, param_env, &fn_sig); - check_fn(&inh, param_env, fn_sig, decl, id, body) + check_fn(&inh, param_env, fn_sig, decl, id, body, false).0 } else { let fcx = FnCtxt::new(&inh, param_env, body.value.id); let expected_type = tcx.type_of(def_id); @@ -887,6 +902,7 @@ fn typeck_tables_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, fcx.closure_analyze(body); fcx.select_obligations_where_possible(); fcx.check_casts(); + fcx.resolve_generator_interiors(def_id); fcx.select_all_obligations_or_error(); if fn_decl.is_some() { @@ -986,8 +1002,9 @@ fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>, fn_sig: ty::FnSig<'tcx>, decl: &'gcx hir::FnDecl, fn_id: ast::NodeId, - body: &'gcx hir::Body) - -> FnCtxt<'a, 'gcx, 'tcx> + body: &'gcx hir::Body, + can_be_generator: bool) + -> (FnCtxt<'a, 'gcx, 'tcx>, Option>) { let mut fn_sig = fn_sig.clone(); @@ -1010,6 +1027,12 @@ fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>, fn_sig.abi ); + let span = body.value.span; + + if body.is_generator && can_be_generator { + fcx.yield_ty = Some(fcx.next_ty_var(TypeVariableOrigin::TypeInference(span))); + } + GatherLocalsVisitor { fcx: &fcx, }.visit_body(body); // Add formal parameters. @@ -1029,6 +1052,24 @@ fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>, } let fn_hir_id = fcx.tcx.hir.node_to_hir_id(fn_id); + let gen_ty = if can_be_generator && body.is_generator { + let gen_sig = ty::GenSig { + yield_ty: fcx.yield_ty.unwrap(), + return_ty: ret_ty, + }; + inherited.tables.borrow_mut().generator_sigs_mut().insert(fn_hir_id, Some(gen_sig)); + + let witness = fcx.next_ty_var(TypeVariableOrigin::MiscVariable(span)); + fcx.deferred_generator_interiors.borrow_mut().push((body.id(), witness)); + let interior = ty::GeneratorInterior::new(witness); + + inherited.tables.borrow_mut().generator_interiors_mut().insert(fn_hir_id, interior); + + Some(interior) + } else { + inherited.tables.borrow_mut().generator_sigs_mut().insert(fn_hir_id, None); + None + }; inherited.tables.borrow_mut().liberated_fn_sigs_mut().insert(fn_hir_id, fn_sig); fcx.check_return_expr(&body.value); @@ -1060,11 +1101,11 @@ fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>, let mut actual_return_ty = coercion.complete(&fcx); if actual_return_ty.is_never() { actual_return_ty = fcx.next_diverging_ty_var( - TypeVariableOrigin::DivergingFn(body.value.span)); + TypeVariableOrigin::DivergingFn(span)); } - fcx.demand_suptype(body.value.span, ret_ty, actual_return_ty); + fcx.demand_suptype(span, ret_ty, actual_return_ty); - fcx + (fcx, gen_ty) } fn check_struct<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, @@ -1173,55 +1214,11 @@ pub fn check_item_type<'a,'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, it: &'tcx hir::Item } fn check_on_unimplemented<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - def_id: DefId, + trait_def_id: DefId, item: &hir::Item) { - let generics = tcx.generics_of(def_id); - if let Some(ref attr) = item.attrs.iter().find(|a| { - a.check_name("rustc_on_unimplemented") - }) { - if let Some(istring) = attr.value_str() { - let istring = istring.as_str(); - let name = tcx.item_name(def_id).as_str(); - let parser = Parser::new(&istring); - let types = &generics.types; - for token in parser { - match token { - Piece::String(_) => (), // Normal string, no need to check it - Piece::NextArgument(a) => match a.position { - // `{Self}` is allowed - Position::ArgumentNamed(s) if s == "Self" => (), - // `{ThisTraitsName}` is allowed - Position::ArgumentNamed(s) if s == name => (), - // So is `{A}` if A is a type parameter - Position::ArgumentNamed(s) => match types.iter().find(|t| { - t.name == s - }) { - Some(_) => (), - None => { - span_err!(tcx.sess, attr.span, E0230, - "there is no type parameter \ - {} on trait {}", - s, name); - } - }, - // `{:1}` and `{}` are not to be used - Position::ArgumentIs(_) => { - span_err!(tcx.sess, attr.span, E0231, - "only named substitution \ - parameters are allowed"); - } - } - } - } - } else { - struct_span_err!( - tcx.sess, attr.span, E0232, - "this attribute must have a value") - .span_label(attr.span, "attribute requires a value") - .note(&format!("eg `#[rustc_on_unimplemented = \"foo\"]`")) - .emit(); - } - } + let item_def_id = tcx.hir.local_def_id(item.id); + // an error would be reported if this fails. + let _ = traits::OnUnimplementedDirective::of_item(tcx, trait_def_id, item_def_id); } fn report_forbidden_specialization<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, @@ -1700,6 +1697,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { param_env, err_count_on_creation: inh.tcx.sess.err_count(), ret_coercion: None, + yield_ty: None, ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, ast::CRATE_NODE_ID)), diverges: Cell::new(Diverges::Maybe), @@ -2089,6 +2087,13 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } } + fn resolve_generator_interiors(&self, def_id: DefId) { + let mut deferred_generator_interiors = self.deferred_generator_interiors.borrow_mut(); + for (body_id, witness) in deferred_generator_interiors.drain(..) { + generator_interior::resolve_interior(self, def_id, body_id, witness); + } + } + /// Apply "fallbacks" to some types /// unconstrained types get replaced with ! or () (depending on whether /// feature(never_type) is enabled), unconstrained ints with i32, and @@ -2423,7 +2428,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { fn parameter_count_error<'tcx>(sess: &Session, sp: Span, expected_count: usize, arg_count: usize, error_code: &str, variadic: bool, - def_span: Option) { + def_span: Option, sugg_unit: bool) { let mut err = sess.struct_span_err_with_code(sp, &format!("this function takes {}{} parameter{} but {} parameter{} supplied", if variadic {"at least "} else {""}, @@ -2433,13 +2438,23 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { if arg_count == 1 {" was"} else {"s were"}), error_code); - err.span_label(sp, format!("expected {}{} parameter{}", - if variadic {"at least "} else {""}, - expected_count, - if expected_count == 1 {""} else {"s"})); if let Some(def_s) = def_span { err.span_label(def_s, "defined here"); } + if sugg_unit { + let sugg_span = sp.end_point(); + // remove closing `)` from the span + let sugg_span = sugg_span.with_hi(sugg_span.lo()); + err.span_suggestion( + sugg_span, + "expected the unit value `()`. You can create one with a pair of parenthesis", + String::from("()")); + } else { + err.span_label(sp, format!("expected {}{} parameter{}", + if variadic {"at least "} else {""}, + expected_count, + if expected_count == 1 {""} else {"s"})); + } err.emit(); } @@ -2448,7 +2463,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { match tuple_type.sty { ty::TyTuple(arg_types, _) if arg_types.len() != args.len() => { parameter_count_error(tcx.sess, sp_args, arg_types.len(), args.len(), - "E0057", false, def_span); + "E0057", false, def_span, false); expected_arg_tys = &[]; self.err_args(args.len()) } @@ -2477,13 +2492,21 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { fn_inputs.to_vec() } else { parameter_count_error(tcx.sess, sp_args, expected_arg_count, - supplied_arg_count, "E0060", true, def_span); + supplied_arg_count, "E0060", true, def_span, false); expected_arg_tys = &[]; self.err_args(supplied_arg_count) } } else { + // is the missing argument of type `()`? + let sugg_unit = if expected_arg_tys.len() == 1 && supplied_arg_count == 0 { + self.resolve_type_vars_if_possible(&expected_arg_tys[0]).is_nil() + } else if fn_inputs.len() == 1 && supplied_arg_count == 0 { + self.resolve_type_vars_if_possible(&fn_inputs[0]).is_nil() + } else { + false + }; parameter_count_error(tcx.sess, sp_args, expected_arg_count, - supplied_arg_count, "E0061", false, def_span); + supplied_arg_count, "E0061", false, def_span, sugg_unit); expected_arg_tys = &[]; self.err_args(supplied_arg_count) }; @@ -2751,6 +2774,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { formal_ret: Ty<'tcx>, formal_args: &[Ty<'tcx>]) -> Vec> { + let formal_ret = self.resolve_type_vars_with_obligations(formal_ret); let expected_args = expected_ret.only_has_type(self).and_then(|ret_ty| { self.fudge_regions_if_ok(&RegionVariableOrigin::Coercion(call_span), || { // Attempt to apply a subtyping relationship between the formal @@ -3070,7 +3094,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { debug!("tuple struct named {:?}", base_t); let ident = ast::Ident { name: Symbol::intern(&idx.node.to_string()), - ctxt: idx.span.ctxt.modern(), + ctxt: idx.span.ctxt().modern(), }; let (ident, def_scope) = self.tcx.adjust_ident(ident, base_def.did, self.body_id); @@ -3114,8 +3138,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { if tuple_like { type_error_struct!(self.tcx().sess, expr.span, expr_t, E0612, - "attempted out-of-bounds tuple index `{}` on type `{}`", - idx.node, expr_t).emit(); + "attempted out-of-bounds tuple index `{}` on type `{}`", + idx.node, expr_t).emit(); } else { self.no_such_field_err(expr.span, idx.node, expr_t).emit(); } @@ -3193,7 +3217,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let adt_ty_hint = self.expected_inputs_for_expected_output(span, expected, adt_ty, &[adt_ty]) - .get(0).cloned().unwrap_or(adt_ty); + .get(0).cloned().unwrap_or(adt_ty); // re-link the regions that EIfEO can erase. self.demand_eqtype(span, adt_ty_hint, adt_ty); @@ -3231,10 +3255,10 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { error_happened = true; if let Some(_) = variant.find_field_named(field.name.node) { let mut err = struct_span_err!(self.tcx.sess, - field.name.span, - E0062, - "field `{}` specified more than once", - field.name.node); + field.name.span, + E0062, + "field `{}` specified more than once", + field.name.node); err.span_label(field.name.span, "used more than once"); @@ -3287,10 +3311,10 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { remaining_fields_names, truncated_fields_error, adt_ty) - .span_label(span, format!("missing {}{}", - remaining_fields_names, - truncated_fields_error)) - .emit(); + .span_label(span, format!("missing {}{}", + remaining_fields_names, + truncated_fields_error)) + .emit(); } } @@ -3725,13 +3749,10 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // Only check this if not in an `if` condition, as the // mistyped comparison help is more appropriate. if !self.tcx.expr_is_lval(&lhs) { - struct_span_err!( - self.tcx.sess, expr.span, E0070, - "invalid left-hand side expression") - .span_label( - expr.span, - "left-hand of expression not valid") - .emit(); + struct_span_err!(self.tcx.sess, expr.span, E0070, + "invalid left-hand side expression") + .span_label(expr.span, "left-hand of expression not valid") + .emit(); } } } @@ -3806,7 +3827,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { hir::ExprMatch(ref discrim, ref arms, match_src) => { self.check_match(expr, &discrim, arms, expected, match_src) } - hir::ExprClosure(capture, ref decl, body_id, _) => { + hir::ExprClosure(capture, ref decl, body_id, _, _) => { self.check_expr_closure(expr, capture, &decl, body_id, expected) } hir::ExprBlock(ref body) => { @@ -3914,6 +3935,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } hir::ExprTup(ref elts) => { let flds = expected.only_has_type(self).and_then(|ty| { + let ty = self.resolve_type_vars_with_obligations(ty); match ty.sty { ty::TyTuple(ref flds, _) => Some(&flds[..]), _ => None @@ -3997,6 +4019,18 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } } } + hir::ExprYield(ref value) => { + match self.yield_ty { + Some(ty) => { + self.check_expr_coercable_to_type(&value, ty); + } + None => { + struct_span_err!(self.tcx.sess, expr.span, E0627, + "yield statement outside of generator literal").emit(); + } + } + tcx.mk_nil() + } } } @@ -4448,11 +4482,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { return; } let original_span = original_sp(last_stmt.span, blk.span); - let span_semi = Span { - lo: original_span.hi - BytePos(1), - hi: original_span.hi, - ctxt: original_span.ctxt, - }; + let span_semi = original_span.with_lo(original_span.hi() - BytePos(1)); err.span_suggestion(span_semi, "consider removing this semicolon", "".to_string()); } diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs index fdbdf925e4cfe..d475c37ed8c94 100644 --- a/src/librustc_typeck/check/regionck.rs +++ b/src/librustc_typeck/check/regionck.rs @@ -87,7 +87,7 @@ use check::FnCtxt; use middle::free_region::FreeRegionMap; use middle::mem_categorization as mc; use middle::mem_categorization::Categorization; -use middle::region::{CodeExtent, RegionMaps}; +use middle::region; use rustc::hir::def_id::DefId; use rustc::ty::subst::Substs; use rustc::traits; @@ -179,7 +179,7 @@ pub struct RegionCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { region_bound_pairs: Vec<(ty::Region<'tcx>, GenericKind<'tcx>)>, - pub region_maps: Rc, + pub region_scope_tree: Rc, free_region_map: FreeRegionMap<'tcx>, @@ -187,7 +187,7 @@ pub struct RegionCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { body_id: ast::NodeId, // call_site scope of innermost fn - call_site_scope: Option, + call_site_scope: Option, // id of innermost fn or loop repeating_scope: ast::NodeId, @@ -230,10 +230,10 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { RepeatingScope(initial_repeating_scope): RepeatingScope, initial_body_id: ast::NodeId, Subject(subject): Subject) -> RegionCtxt<'a, 'gcx, 'tcx> { - let region_maps = fcx.tcx.region_maps(subject); + let region_scope_tree = fcx.tcx.region_scope_tree(subject); RegionCtxt { fcx, - region_maps, + region_scope_tree, repeating_scope: initial_repeating_scope, body_id: initial_body_id, call_site_scope: None, @@ -243,8 +243,8 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { } } - fn set_call_site_scope(&mut self, call_site_scope: Option) - -> Option { + fn set_call_site_scope(&mut self, call_site_scope: Option) + -> Option { mem::replace(&mut self.call_site_scope, call_site_scope) } @@ -305,7 +305,7 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { let body_id = body.id(); - let call_site = CodeExtent::CallSiteScope(body_id); + let call_site = region::Scope::CallSite(body.value.hir_id.local_id); let old_call_site_scope = self.set_call_site_scope(Some(call_site)); let fn_sig = { @@ -330,7 +330,7 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { let old_body_id = self.set_body_id(body_id.node_id); self.relate_free_regions(&fn_sig_tys[..], body_id.node_id, span); - self.link_fn_args(CodeExtent::Misc(body_id.node_id), &body.arguments); + self.link_fn_args(region::Scope::Node(body.value.hir_id.local_id), &body.arguments); self.visit_body(body); self.visit_region_obligations(body_id.node_id); @@ -580,7 +580,7 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { fn resolve_regions_and_report_errors(&self) { self.fcx.resolve_regions_and_report_errors(self.subject_def_id, - &self.region_maps, + &self.region_scope_tree, &self.free_region_map); } @@ -610,11 +610,11 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { // that the lifetime of any regions that appear in a // variable's type enclose at least the variable's scope. - let var_scope = self.region_maps.var_scope(id); + let hir_id = self.tcx.hir.node_to_hir_id(id); + let var_scope = self.region_scope_tree.var_scope(hir_id.local_id); let var_region = self.tcx.mk_region(ty::ReScope(var_scope)); let origin = infer::BindingTypeIsNotValidAtDecl(span); - let hir_id = self.tcx.hir.node_to_hir_id(id); self.type_of_node_must_outlive(origin, hir_id, var_region); let typ = self.resolve_node_type(hir_id); @@ -668,7 +668,8 @@ impl<'a, 'gcx, 'tcx> Visitor<'gcx> for RegionCtxt<'a, 'gcx, 'tcx> { // scope of that expression. This also guarantees basic WF. let expr_ty = self.resolve_node_type(expr.hir_id); // the region corresponding to this expression - let expr_region = self.tcx.node_scope_region(expr.id); + let expr_region = self.tcx.mk_region(ty::ReScope( + region::Scope::Node(expr.hir_id.local_id))); self.type_must_outlive(infer::ExprTypeIsNotInScope(expr_ty, expr.span), expr_ty, expr_region); @@ -825,7 +826,7 @@ impl<'a, 'gcx, 'tcx> Visitor<'gcx> for RegionCtxt<'a, 'gcx, 'tcx> { intravisit::walk_expr(self, expr); } - hir::ExprClosure(.., body_id, _) => { + hir::ExprClosure(.., body_id, _, _) => { self.check_expr_fn_block(expr, body_id); } @@ -950,7 +951,7 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { // call occurs. // // FIXME(#6268) to support nested method calls, should be callee_id - let callee_scope = CodeExtent::Misc(call_expr.id); + let callee_scope = region::Scope::Node(call_expr.hir_id.local_id); let callee_region = self.tcx.mk_region(ty::ReScope(callee_scope)); debug!("callee_region={:?}", callee_region); @@ -979,7 +980,7 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { where F: for<'b> FnOnce(mc::MemCategorizationContext<'b, 'gcx, 'tcx>) -> R { f(mc::MemCategorizationContext::with_infer(&self.infcx, - &self.region_maps, + &self.region_scope_tree, &self.tables.borrow())) } @@ -1002,7 +1003,8 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { // expression. self.check_safety_of_rvalue_destructor_if_necessary(cmt.clone(), expr.span); - let expr_region = self.tcx.node_scope_region(expr.id); + let expr_region = self.tcx.mk_region(ty::ReScope( + region::Scope::Node(expr.hir_id.local_id))); for adjustment in adjustments { debug!("constrain_adjustments: adjustment={:?}, cmt={:?}", adjustment, cmt); @@ -1095,7 +1097,7 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { debug!("constrain_index(index_expr=?, indexed_ty={}", self.ty_to_string(indexed_ty)); - let r_index_expr = ty::ReScope(CodeExtent::Misc(index_expr.id)); + let r_index_expr = ty::ReScope(region::Scope::Node(index_expr.hir_id.local_id)); if let ty::TyRef(r_ptr, mt) = indexed_ty.sty { match mt.ty.sty { ty::TySlice(_) | ty::TyStr => { @@ -1176,7 +1178,7 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { /// Computes the guarantors for any ref bindings in a match and /// then ensures that the lifetime of the resulting pointer is /// linked to the lifetime of its guarantor (if any). - fn link_fn_args(&self, body_scope: CodeExtent, args: &[hir::Arg]) { + fn link_fn_args(&self, body_scope: region::Scope, args: &[hir::Arg]) { debug!("regionck::link_fn_args(body_scope={:?})", body_scope); for arg in args { let arg_ty = self.node_ty(arg.hir_id); @@ -1232,7 +1234,7 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { } adjustment::AutoBorrow::RawPtr(m) => { - let r = self.tcx.node_scope_region(expr.id); + let r = self.tcx.mk_region(ty::ReScope(region::Scope::Node(expr.hir_id.local_id))); self.link_region(expr.span, r, ty::BorrowKind::from_mutbl(m), expr_cmt); } } diff --git a/src/librustc_typeck/check/upvar.rs b/src/librustc_typeck/check/upvar.rs index 6db5c5b1cb0af..82d1210f42b2e 100644 --- a/src/librustc_typeck/check/upvar.rs +++ b/src/librustc_typeck/check/upvar.rs @@ -76,10 +76,14 @@ impl<'a, 'gcx, 'tcx> Visitor<'gcx> for InferBorrowKindVisitor<'a, 'gcx, 'tcx> { fn visit_expr(&mut self, expr: &'gcx hir::Expr) { match expr.node { - hir::ExprClosure(cc, _, body_id, _) => { + hir::ExprClosure(cc, _, body_id, _, is_generator) => { let body = self.fcx.tcx.hir.body(body_id); self.visit_body(body); - self.fcx.analyze_closure((expr.id, expr.hir_id), expr.span, body, cc); + self.fcx.analyze_closure((expr.id, expr.hir_id), + expr.span, + body, + cc, + is_generator); } _ => { } @@ -94,22 +98,27 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { (closure_node_id, closure_hir_id): (ast::NodeId, hir::HirId), span: Span, body: &hir::Body, - capture_clause: hir::CaptureClause) { + capture_clause: hir::CaptureClause, + gen: bool) { /*! * Analysis starting point. */ debug!("analyze_closure(id={:?}, body.id={:?})", closure_node_id, body.id()); - let infer_kind = match self.tables - .borrow_mut() - .closure_kinds_mut() - .entry(closure_hir_id) { - Entry::Occupied(_) => false, - Entry::Vacant(entry) => { - debug!("check_closure: adding closure {:?} as Fn", closure_node_id); - entry.insert((ty::ClosureKind::Fn, None)); - true + let infer_kind = if gen { + false + } else { + match self.tables + .borrow_mut() + .closure_kinds_mut() + .entry(closure_hir_id) { + Entry::Occupied(_) => false, + Entry::Vacant(entry) => { + debug!("check_closure: adding closure {:?} as Fn", closure_node_id); + entry.insert((ty::ClosureKind::Fn, None)); + true + } } }; @@ -143,7 +152,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { { let body_owner_def_id = self.tcx.hir.body_owner_def_id(body.id()); - let region_maps = &self.tcx.region_maps(body_owner_def_id); + let region_scope_tree = &self.tcx.region_scope_tree(body_owner_def_id); let mut delegate = InferBorrowKind { fcx: self, adjust_closure_kinds: FxHashMap(), @@ -152,7 +161,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { euv::ExprUseVisitor::with_infer(&mut delegate, &self.infcx, self.param_env, - region_maps, + region_scope_tree, &self.tables.borrow()) .consume_body(body); @@ -184,7 +193,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // Extract the type variables UV0...UVn. let (def_id, closure_substs) = match self.node_ty(closure_hir_id).sty { - ty::TyClosure(def_id, substs) => (def_id, substs), + ty::TyClosure(def_id, substs) | + ty::TyGenerator(def_id, substs, _) => (def_id, substs), ref t => { span_bug!( span, diff --git a/src/librustc_typeck/check/writeback.rs b/src/librustc_typeck/check/writeback.rs index 36c72fc4b19d6..7810d9049e10e 100644 --- a/src/librustc_typeck/check/writeback.rs +++ b/src/librustc_typeck/check/writeback.rs @@ -45,6 +45,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { wbcx.visit_anon_types(); wbcx.visit_cast_types(); wbcx.visit_free_region_map(); + wbcx.visit_generator_sigs(); + wbcx.visit_generator_interiors(); let used_trait_imports = mem::replace(&mut self.tables.borrow_mut().used_trait_imports, DefIdSet()); @@ -164,7 +166,7 @@ impl<'cx, 'gcx, 'tcx> Visitor<'gcx> for WritebackCx<'cx, 'gcx, 'tcx> { self.visit_node_id(e.span, e.hir_id); - if let hir::ExprClosure(_, _, body, _) = e.node { + if let hir::ExprClosure(_, _, body, _, _) = e.node { let body = self.fcx.tcx.hir.body(body); for arg in &body.arguments { self.visit_node_id(e.span, arg.hir_id); @@ -357,6 +359,33 @@ impl<'cx, 'gcx, 'tcx> WritebackCx<'cx, 'gcx, 'tcx> { } } + fn visit_generator_interiors(&mut self) { + let common_local_id_root = self.fcx.tables.borrow().local_id_root.unwrap(); + for (&id, interior) in self.fcx.tables.borrow().generator_interiors().iter() { + let hir_id = hir::HirId { + owner: common_local_id_root.index, + local_id: id, + }; + let interior = self.resolve(interior, &hir_id); + self.tables.generator_interiors_mut().insert(hir_id, interior); + } + } + + fn visit_generator_sigs(&mut self) { + let common_local_id_root = self.fcx.tables.borrow().local_id_root.unwrap(); + for (&id, gen_sig) in self.fcx.tables.borrow().generator_sigs().iter() { + let hir_id = hir::HirId { + owner: common_local_id_root.index, + local_id: id, + }; + let gen_sig = gen_sig.map(|s| ty::GenSig { + yield_ty: self.resolve(&s.yield_ty, &hir_id), + return_ty: self.resolve(&s.return_ty, &hir_id), + }); + self.tables.generator_sigs_mut().insert(hir_id, gen_sig); + } + } + fn visit_liberated_fn_sigs(&mut self) { let fcx_tables = self.fcx.tables.borrow(); debug_assert_eq!(fcx_tables.local_id_root, self.tables.local_id_root); diff --git a/src/librustc_typeck/coherence/builtin.rs b/src/librustc_typeck/coherence/builtin.rs index 9305eff143652..b421fdfe8d813 100644 --- a/src/librustc_typeck/coherence/builtin.rs +++ b/src/librustc_typeck/coherence/builtin.rs @@ -12,7 +12,7 @@ //! up data structures required by type-checking/translation. use rustc::middle::free_region::FreeRegionMap; -use rustc::middle::region::RegionMaps; +use rustc::middle::region; use rustc::middle::lang_items::UnsizeTraitLangItem; use rustc::traits::{self, ObligationCause}; @@ -390,10 +390,10 @@ pub fn coerce_unsized_info<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } // Finally, resolve all regions. - let region_maps = RegionMaps::new(); + let region_scope_tree = region::ScopeTree::default(); let mut free_regions = FreeRegionMap::new(); free_regions.relate_free_regions_from_predicates(¶m_env.caller_bounds); - infcx.resolve_regions_and_report_errors(impl_did, ®ion_maps, &free_regions); + infcx.resolve_regions_and_report_errors(impl_did, ®ion_scope_tree, &free_regions); CoerceUnsizedInfo { custom_kind: kind diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index ea86c570c8296..c177623af48fb 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -1153,7 +1153,12 @@ fn type_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, NodeField(field) => icx.to_ty(&field.ty), - NodeExpr(&hir::Expr { node: hir::ExprClosure(..), .. }) => { + NodeExpr(&hir::Expr { node: hir::ExprClosure(.., is_generator), .. }) => { + if is_generator { + let hir_id = tcx.hir.node_to_hir_id(node_id); + return tcx.typeck_tables_of(def_id).node_id_to_type(hir_id); + } + tcx.mk_closure(def_id, Substs::for_item( tcx, def_id, |def, _| { diff --git a/src/librustc_typeck/diagnostics.rs b/src/librustc_typeck/diagnostics.rs index cb430efd9508f..6bbe2233ff1fa 100644 --- a/src/librustc_typeck/diagnostics.rs +++ b/src/librustc_typeck/diagnostics.rs @@ -2505,50 +2505,6 @@ fn baz(x: &::A) where I: Foo {} ``` "##, -E0230: r##" -The trait has more type parameters specified than appear in its definition. - -Erroneous example code: - -```compile_fail,E0230 -#![feature(on_unimplemented)] -#[rustc_on_unimplemented = "Trait error on `{Self}` with `<{A},{B},{C}>`"] -// error: there is no type parameter C on trait TraitWithThreeParams -trait TraitWithThreeParams -{} -``` - -Include the correct number of type parameters and the compilation should -proceed: - -``` -#![feature(on_unimplemented)] -#[rustc_on_unimplemented = "Trait error on `{Self}` with `<{A},{B},{C}>`"] -trait TraitWithThreeParams // ok! -{} -``` -"##, - -E0232: r##" -The attribute must have a value. Erroneous code example: - -```compile_fail,E0232 -#![feature(on_unimplemented)] - -#[rustc_on_unimplemented] // error: this attribute must have a value -trait Bar {} -``` - -Please supply the missing value of the attribute. Example: - -``` -#![feature(on_unimplemented)] - -#[rustc_on_unimplemented = "foo"] // ok! -trait Bar {} -``` -"##, - E0243: r##" This error indicates that not enough type parameters were found in a type or trait. @@ -4690,7 +4646,6 @@ register_diagnostics! { E0224, // at least one non-builtin train is required for an object type E0227, // ambiguous lifetime bound, explicit lifetime bound required E0228, // explicit lifetime bound required - E0231, // only named substitution parameters are allowed // E0233, // E0234, // E0235, // structure constructor specifies a structure of type but @@ -4721,4 +4676,5 @@ register_diagnostics! { E0588, // packed struct cannot transitively contain a `[repr(align)]` struct E0592, // duplicate definitions with name `{}` // E0613, // Removed (merged with E0609) + E0627, // yield statement outside of generator literal } diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs index 86feea13b1723..7a6ee73b9b9e9 100644 --- a/src/librustc_typeck/lib.rs +++ b/src/librustc_typeck/lib.rs @@ -84,7 +84,6 @@ This API is completely unstable and subject to change. extern crate syntax_pos; extern crate arena; -extern crate fmt_macros; #[macro_use] extern crate rustc; extern crate rustc_platform_intrinsics as intrinsics; extern crate rustc_back; diff --git a/src/librustc_typeck/variance/constraints.rs b/src/librustc_typeck/variance/constraints.rs index 40474a7933f80..70b989b6ab21c 100644 --- a/src/librustc_typeck/variance/constraints.rs +++ b/src/librustc_typeck/variance/constraints.rs @@ -294,6 +294,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { } ty::TyFnDef(..) | + ty::TyGenerator(..) | ty::TyClosure(..) => { bug!("Unexpected closure type in variance computation"); } diff --git a/src/librustdoc/Cargo.toml b/src/librustdoc/Cargo.toml index f9400e68a16c5..00050bc357827 100644 --- a/src/librustdoc/Cargo.toml +++ b/src/librustdoc/Cargo.toml @@ -12,6 +12,7 @@ path = "lib.rs" env_logger = { version = "0.4", default-features = false } log = "0.3" pulldown-cmark = { version = "0.0.14", default-features = false } +html-diff = "0.0.4" [build-dependencies] build_helper = { path = "../build_helper" } diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 7d6ad5286d111..aab44ddce0e6a 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -457,8 +457,8 @@ impl Clean for doctree::Module { // the outer `mod` item for the source code. let whence = { let cm = cx.sess().codemap(); - let outer = cm.lookup_char_pos(self.where_outer.lo); - let inner = cm.lookup_char_pos(self.where_inner.lo); + let outer = cm.lookup_char_pos(self.where_outer.lo()); + let inner = cm.lookup_char_pos(self.where_inner.lo()); if outer.file.start_pos == inner.file.start_pos { // mod foo { ... } self.where_outer @@ -1984,7 +1984,7 @@ impl<'tcx> Clean for ty::Ty<'tcx> { }).collect()) } - ty::TyClosure(..) => Tuple(vec![]), // FIXME(pcwalton) + ty::TyClosure(..) | ty::TyGenerator(..) => Tuple(vec![]), // FIXME(pcwalton) ty::TyInfer(..) => panic!("TyInfer"), ty::TyError => panic!("TyError"), @@ -2251,8 +2251,8 @@ impl Clean for syntax_pos::Span { let cm = cx.sess().codemap(); let filename = cm.span_to_filename(*self); - let lo = cm.lookup_char_pos(self.lo); - let hi = cm.lookup_char_pos(self.hi); + let lo = cm.lookup_char_pos(self.lo()); + let hi = cm.lookup_char_pos(self.hi()); Span { filename: filename.to_string(), loline: lo.line, diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index 988890ffedcdd..10a3878073e97 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -228,7 +228,7 @@ impl<'a> fmt::Display for WhereClause<'a> { } if end_newline { - //add a space so stripping
tags and breaking spaces still renders properly + // add a space so stripping
tags and breaking spaces still renders properly if f.alternate() { clause.push(' '); } else { diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index eb27fa3abfa19..e6b236deac4ee 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -172,6 +172,21 @@ impl<'a> Classifier<'a> { } } + /// Gets the next token out of the lexer, emitting fatal errors if lexing fails. + fn try_next_token(&mut self) -> io::Result { + match self.lexer.try_next_token() { + Ok(tas) => Ok(tas), + Err(_) => { + self.lexer.emit_fatal_errors(); + self.lexer.sess.span_diagnostic + .struct_warn("Backing out of syntax highlighting") + .note("You probably did not intend to render this as a rust code-block") + .emit(); + Err(io::Error::new(io::ErrorKind::Other, "")) + } + } + } + /// Exhausts the `lexer` writing the output into `out`. /// /// The general structure for this method is to iterate over each token, @@ -183,18 +198,7 @@ impl<'a> Classifier<'a> { out: &mut W) -> io::Result<()> { loop { - let next = match self.lexer.try_next_token() { - Ok(tas) => tas, - Err(_) => { - self.lexer.emit_fatal_errors(); - self.lexer.sess.span_diagnostic - .struct_warn("Backing out of syntax highlighting") - .note("You probably did not intend to render this as a rust code-block") - .emit(); - return Err(io::Error::new(io::ErrorKind::Other, "")); - } - }; - + let next = self.try_next_token()?; if next.tok == token::Eof { break; } @@ -255,13 +259,37 @@ impl<'a> Classifier<'a> { } } - // This is the start of an attribute. We're going to want to + // This might be the start of an attribute. We're going to want to // continue highlighting it as an attribute until the ending ']' is // seen, so skip out early. Down below we terminate the attribute // span when we see the ']'. token::Pound => { - self.in_attribute = true; - out.enter_span(Class::Attribute)?; + // We can't be sure that our # begins an attribute (it could + // just be appearing in a macro) until we read either `#![` or + // `#[` from the input stream. + // + // We don't want to start highlighting as an attribute until + // we're confident there is going to be a ] coming up, as + // otherwise # tokens in macros highlight the rest of the input + // as an attribute. + + // Case 1: #![inner_attribute] + if self.lexer.peek().tok == token::Not { + self.try_next_token()?; // NOTE: consumes `!` token! + if self.lexer.peek().tok == token::OpenDelim(token::Bracket) { + self.in_attribute = true; + out.enter_span(Class::Attribute)?; + } + out.string("#", Class::None, None)?; + out.string("!", Class::None, None)?; + return Ok(()); + } + + // Case 2: #[outer_attribute] + if self.lexer.peek().tok == token::OpenDelim(token::Bracket) { + self.in_attribute = true; + out.enter_span(Class::Attribute)?; + } out.string("#", Class::None, None)?; return Ok(()); } diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index 5457f69cb6dab..cc84e340c74f8 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -63,7 +63,7 @@ use rustc::util::nodemap::{FxHashMap, FxHashSet}; use rustc::session::config::nightly_options::is_nightly_build; use rustc_data_structures::flock; -use clean::{self, AttributesExt, GetDefId, SelfTy, Mutability}; +use clean::{self, AttributesExt, GetDefId, SelfTy, Mutability, Span}; use doctree; use fold::DocFolder; use html::escape::Escape; @@ -75,6 +75,8 @@ use html::item_type::ItemType; use html::markdown::{self, Markdown, MarkdownHtml, MarkdownSummaryLine, RenderType}; use html::{highlight, layout}; +use html_diff; + /// A pair of name and its optional document. pub type NameDoc = (String, Option); @@ -122,6 +124,9 @@ pub struct SharedContext { /// The given user css file which allow to customize the generated /// documentation theme. pub css_file_extension: Option, + /// Warnings for the user if rendering would differ using different markdown + /// parsers. + pub markdown_warnings: RefCell)>>, } /// Indicates where an external crate can be found. @@ -455,6 +460,7 @@ pub fn run(mut krate: clean::Crate, krate: krate.name.clone(), }, css_file_extension: css_file_extension.clone(), + markdown_warnings: RefCell::new(vec![]), }; // If user passed in `--playground-url` arg, we fill in crate name here @@ -577,8 +583,102 @@ pub fn run(mut krate: clean::Crate, write_shared(&cx, &krate, &*cache, index)?; + let scx = cx.shared.clone(); + // And finally render the whole crate's documentation - cx.krate(krate) + let result = cx.krate(krate); + + let markdown_warnings = scx.markdown_warnings.borrow(); + if !markdown_warnings.is_empty() { + println!("WARNING: documentation for this crate may be rendered \ + differently using the new Pulldown renderer."); + println!(" See https://github.com/rust-lang/rust/issues/44229 for details."); + for &(ref span, ref text, ref diffs) in &*markdown_warnings { + println!("WARNING: rendering difference in `{}`", concise_str(text)); + println!(" --> {}:{}:{}", span.filename, span.loline, span.locol); + for d in diffs { + render_difference(d); + } + } + } + + result +} + +// A short, single-line view of `s`. +fn concise_str(s: &str) -> String { + if s.contains('\n') { + return format!("{}...", s.lines().next().expect("Impossible! We just found a newline")); + } + if s.len() > 70 { + return format!("{} ... {}", &s[..50], &s[s.len()-20..]); + } + s.to_owned() +} + +// Returns short versions of s1 and s2, starting from where the strings differ. +fn concise_compared_strs(s1: &str, s2: &str) -> (String, String) { + let s1 = s1.trim(); + let s2 = s2.trim(); + if !s1.contains('\n') && !s2.contains('\n') && s1.len() <= 70 && s2.len() <= 70 { + return (s1.to_owned(), s2.to_owned()); + } + + let mut start_byte = 0; + for (c1, c2) in s1.chars().zip(s2.chars()) { + if c1 != c2 { + break; + } + + start_byte += c1.len_utf8(); + } + + if start_byte == 0 { + return (concise_str(s1), concise_str(s2)); + } + + let s1 = &s1[start_byte..]; + let s2 = &s2[start_byte..]; + (format!("...{}", concise_str(s1)), format!("...{}", concise_str(s2))) +} + +fn render_difference(diff: &html_diff::Difference) { + match *diff { + html_diff::Difference::NodeType { ref elem, ref opposite_elem } => { + println!(" {} Types differ: expected: `{}`, found: `{}`", + elem.path, elem.element_name, opposite_elem.element_name); + } + html_diff::Difference::NodeName { ref elem, ref opposite_elem } => { + println!(" {} Tags differ: expected: `{}`, found: `{}`", + elem.path, elem.element_name, opposite_elem.element_name); + } + html_diff::Difference::NodeAttributes { ref elem, + ref elem_attributes, + ref opposite_elem_attributes, + .. } => { + println!(" {} Attributes differ in `{}`: expected: `{:?}`, found: `{:?}`", + elem.path, elem.element_name, elem_attributes, opposite_elem_attributes); + } + html_diff::Difference::NodeText { ref elem, ref elem_text, ref opposite_elem_text, .. } => { + let (s1, s2) = concise_compared_strs(elem_text, opposite_elem_text); + println!(" {} Text differs:\n expected: `{}`\n found: `{}`", + elem.path, s1, s2); + } + html_diff::Difference::NotPresent { ref elem, ref opposite_elem } => { + if let Some(ref elem) = *elem { + println!(" {} One element is missing: expected: `{}`", + elem.path, elem.element_name); + } else if let Some(ref elem) = *opposite_elem { + if elem.element_name.is_empty() { + println!(" {} Unexpected element: `{}`", + elem.path, concise_str(&elem.element_content)); + } else { + println!(" {} Unexpected element `{}`: found: `{}`", + elem.path, elem.element_name, concise_str(&elem.element_content)); + } + } + } + } } /// Build the search index from the collected metadata @@ -1523,8 +1623,7 @@ impl<'a> fmt::Display for Item<'a> { } else { write!(fmt, "Module ")?; }, - clean::FunctionItem(..) | clean::ForeignFunctionItem(..) => - write!(fmt, "Function ")?, + clean::FunctionItem(..) | clean::ForeignFunctionItem(..) => write!(fmt, "Function ")?, clean::TraitItem(..) => write!(fmt, "Trait ")?, clean::StructItem(..) => write!(fmt, "Struct ")?, clean::UnionItem(..) => write!(fmt, "Union ")?, @@ -1532,8 +1631,7 @@ impl<'a> fmt::Display for Item<'a> { clean::TypedefItem(..) => write!(fmt, "Type Definition ")?, clean::MacroItem(..) => write!(fmt, "Macro ")?, clean::PrimitiveItem(..) => write!(fmt, "Primitive Type ")?, - clean::StaticItem(..) | clean::ForeignStaticItem(..) => - write!(fmt, "Static ")?, + clean::StaticItem(..) | clean::ForeignStaticItem(..) => write!(fmt, "Static ")?, clean::ConstantItem(..) => write!(fmt, "Constant ")?, _ => { // We don't generate pages for any other type. @@ -1641,12 +1739,84 @@ fn plain_summary_line(s: Option<&str>) -> String { fn document(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item) -> fmt::Result { document_stability(w, cx, item)?; let prefix = render_assoc_const_value(item); - document_full(w, item, cx.render_type, &prefix)?; + document_full(w, item, cx, &prefix)?; Ok(()) } +/// Render md_text as markdown. Warns the user if there are difference in +/// rendering between Pulldown and Hoedown. +fn render_markdown(w: &mut fmt::Formatter, + md_text: &str, + span: Span, + render_type: RenderType, + prefix: &str, + scx: &SharedContext) + -> fmt::Result { + let hoedown_output = format!("{}", Markdown(md_text, RenderType::Hoedown)); + // We only emit warnings if the user has opted-in to Pulldown rendering. + let output = if render_type == RenderType::Pulldown { + let pulldown_output = format!("{}", Markdown(md_text, RenderType::Pulldown)); + let differences = html_diff::get_differences(&pulldown_output, &hoedown_output); + let differences = differences.into_iter() + .filter(|s| { + match *s { + html_diff::Difference::NodeText { ref elem_text, + ref opposite_elem_text, + .. } + if match_non_whitespace(elem_text, opposite_elem_text) => false, + _ => true, + } + }) + .collect::>(); + + if !differences.is_empty() { + scx.markdown_warnings.borrow_mut().push((span, md_text.to_owned(), differences)); + } + + pulldown_output + } else { + hoedown_output + }; + + write!(w, "
{}{}
", prefix, output) +} + +// Returns true iff s1 and s2 match, ignoring whitespace. +fn match_non_whitespace(s1: &str, s2: &str) -> bool { + let s1 = s1.trim(); + let s2 = s2.trim(); + let mut cs1 = s1.chars(); + let mut cs2 = s2.chars(); + while let Some(c1) = cs1.next() { + if c1.is_whitespace() { + continue; + } + + loop { + if let Some(c2) = cs2.next() { + if !c2.is_whitespace() { + if c1 != c2 { + return false; + } + break; + } + } else { + return false; + } + } + } + + while let Some(c2) = cs2.next() { + if !c2.is_whitespace() { + return false; + } + } + + true +} + fn document_short(w: &mut fmt::Formatter, item: &clean::Item, link: AssocItemLink, - render_type: RenderType, prefix: &str) -> fmt::Result { + cx: &Context, prefix: &str) -> fmt::Result { if let Some(s) = item.doc_value() { let markdown = if s.contains('\n') { format!("{} [Read more]({})", @@ -1654,7 +1824,7 @@ fn document_short(w: &mut fmt::Formatter, item: &clean::Item, link: AssocItemLin } else { format!("{}", &plain_summary_line(Some(s))) }; - write!(w, "
{}{}
", prefix, Markdown(&markdown, render_type))?; + render_markdown(w, &markdown, item.source.clone(), cx.render_type, prefix, &cx.shared)?; } else if !prefix.is_empty() { write!(w, "
{}
", prefix)?; } @@ -1676,9 +1846,9 @@ fn render_assoc_const_value(item: &clean::Item) -> String { } fn document_full(w: &mut fmt::Formatter, item: &clean::Item, - render_type: RenderType, prefix: &str) -> fmt::Result { + cx: &Context, prefix: &str) -> fmt::Result { if let Some(s) = item.doc_value() { - write!(w, "
{}{}
", prefix, Markdown(s, render_type))?; + render_markdown(w, s, item.source.clone(), cx.render_type, prefix, &cx.shared)?; } else if !prefix.is_empty() { write!(w, "
{}
", prefix)?; } @@ -3077,20 +3247,20 @@ fn render_impl(w: &mut fmt::Formatter, cx: &Context, i: &Impl, link: AssocItemLi // because impls can't have a stability. document_stability(w, cx, it)?; if item.doc_value().is_some() { - document_full(w, item, cx.render_type, &prefix)?; + document_full(w, item, cx, &prefix)?; } else { // In case the item isn't documented, // provide short documentation from the trait. - document_short(w, it, link, cx.render_type, &prefix)?; + document_short(w, it, link, cx, &prefix)?; } } } else { document_stability(w, cx, item)?; - document_full(w, item, cx.render_type, &prefix)?; + document_full(w, item, cx, &prefix)?; } } else { document_stability(w, cx, item)?; - document_short(w, item, link, cx.render_type, &prefix)?; + document_short(w, item, link, cx, &prefix)?; } } Ok(()) @@ -3559,3 +3729,35 @@ fn test_name_sorting() { sorted.sort_by_key(|&s| name_key(s)); assert_eq!(names, sorted); } + +#[cfg(test)] +#[test] +fn test_match_non_whitespace() { + assert!(match_non_whitespace("", "")); + assert!(match_non_whitespace(" ", "")); + assert!(match_non_whitespace("", " ")); + + assert!(match_non_whitespace("a", "a")); + assert!(match_non_whitespace(" a ", "a")); + assert!(match_non_whitespace("a", " a")); + assert!(match_non_whitespace("abc", "abc")); + assert!(match_non_whitespace("abc", " abc ")); + assert!(match_non_whitespace("abc ", "abc")); + assert!(match_non_whitespace("abc xyz", "abc xyz")); + assert!(match_non_whitespace("abc xyz", "abc\nxyz")); + assert!(match_non_whitespace("abc xyz", "abcxyz")); + assert!(match_non_whitespace("abcxyz", "abc xyz")); + assert!(match_non_whitespace("abc xyz ", " abc xyz\n")); + + assert!(!match_non_whitespace("a", "b")); + assert!(!match_non_whitespace(" a ", "c")); + assert!(!match_non_whitespace("a", " aa")); + assert!(!match_non_whitespace("abc", "ac")); + assert!(!match_non_whitespace("abc", " adc ")); + assert!(!match_non_whitespace("abc ", "abca")); + assert!(!match_non_whitespace("abc xyz", "abc xy")); + assert!(!match_non_whitespace("abc xyz", "bc\nxyz")); + assert!(!match_non_whitespace("abc xyz", "abc.xyz")); + assert!(!match_non_whitespace("abcxyz", "abc.xyz")); + assert!(!match_non_whitespace("abc xyz ", " abc xyz w")); +} diff --git a/src/librustdoc/html/static/rustdoc.css b/src/librustdoc/html/static/rustdoc.css index 4a3286b421ae9..ca55d0e5d2a8e 100644 --- a/src/librustdoc/html/static/rustdoc.css +++ b/src/librustdoc/html/static/rustdoc.css @@ -329,6 +329,10 @@ h4 > code, h3 > code, .invisible > code { display: inline-block; } +.in-band > code { + display: inline-block; +} + #main { position: relative; } #main > .since { top: inherit; @@ -447,7 +451,8 @@ a { } .in-band:hover > .anchor { - display: initial; + display: inline-block; + position: absolute; } .anchor { display: none; @@ -653,11 +658,13 @@ h3 > .collapse-toggle, h4 > .collapse-toggle { .toggle-wrapper { position: relative; + margin-top: 5px; } .toggle-wrapper.collapsed { - height: 1em; + height: 25px; transition: height .2s; + margin-bottom: .6em; } .collapse-toggle > .inner { @@ -699,14 +706,16 @@ span.since { margin-top: 5px; } -.variant + .toggle-wrapper > a { - margin-top: 5px; -} - .sub-variant, .sub-variant > h3 { margin-top: 0 !important; } +.toggle-label { + display: inline-block; + margin-left: 4px; + margin-top: 3px; +} + .enum > .toggle-wrapper + .docblock, .struct > .toggle-wrapper + .docblock { margin-left: 30px; margin-bottom: 20px; diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 61a8165d26af1..d04b6d3417a5a 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -28,6 +28,7 @@ extern crate arena; extern crate getopts; extern crate env_logger; +extern crate html_diff; extern crate libc; extern crate rustc; extern crate rustc_data_structures; diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index 47e8d63f80aa0..5d74cbdf56a19 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -532,7 +532,7 @@ impl Collector { pub fn get_line(&self) -> usize { if let Some(ref codemap) = self.codemap { - let line = self.position.lo.to_usize(); + let line = self.position.lo().to_usize(); let line = codemap.lookup_char_pos(BytePos(line as u32)).line; if line > 0 { line - 1 } else { line } } else { diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index e3426fba1bca1..1f33cd7765164 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -199,8 +199,9 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { self.visit_item(item, None, &mut om); } self.inside_public_path = orig_inside_public_path; - if let Some(exports) = self.cx.tcx.export_map.get(&id) { - for export in exports { + let hir_id = self.cx.tcx.hir.node_to_hir_id(id); + if let Some(exports) = self.cx.tcx.module_exports(hir_id) { + for export in exports.iter() { if let Def::Macro(def_id, ..) = export.def { if def_id.krate == LOCAL_CRATE || self.reexported_macros.contains(&def_id) { continue // These are `krate.exported_macros`, handled in `self.visit()`. diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs index b57067e35e9d9..33bf0d68126d4 100644 --- a/src/libstd/lib.rs +++ b/src/libstd/lib.rs @@ -322,14 +322,12 @@ // if the user has disabled jemalloc in `./configure`). // `force_alloc_system` is *only* intended as a workaround for local rebuilds // with a rustc without jemalloc. -// The not(stage0+msvc) gates will only last until the next stage0 bump -#![cfg_attr(all( - not(all(stage0, target_env = "msvc")), - any(stage0, feature = "force_alloc_system")), - feature(global_allocator))] -#[cfg(all( - not(all(stage0, target_env = "msvc")), - any(stage0, feature = "force_alloc_system")))] +// FIXME(#44236) shouldn't need MSVC logic +#![cfg_attr(all(not(target_env = "msvc"), + any(stage0, feature = "force_alloc_system")), + feature(global_allocator))] +#[cfg(all(not(target_env = "msvc"), + any(stage0, feature = "force_alloc_system")))] #[global_allocator] static ALLOC: alloc_system::System = alloc_system::System; diff --git a/src/libstd/macros.rs b/src/libstd/macros.rs index c426bf8086eef..72d561fae3bd9 100644 --- a/src/libstd/macros.rs +++ b/src/libstd/macros.rs @@ -14,25 +14,35 @@ //! library. Each macro is available for use when linking against the standard //! library. -#[macro_export] -// This stability attribute is totally useless. -#[stable(feature = "rust1", since = "1.0.0")] -#[cfg(stage0)] -macro_rules! __rust_unstable_column { - () => { - column!() - } -} - /// The entry point for panic of Rust threads. /// +/// This allows a program to to terminate immediately and provide feedback +/// to the caller of the program. `panic!` should be used when a program reaches +/// an unrecoverable problem. +/// +/// This macro is the perfect way to assert conditions in example code and in +/// tests. `panic!` is closely tied with the `unwrap` method of both [`Option`] +/// and [`Result`][runwrap] enums. Both implementations call `panic!` when they are set +/// to None or Err variants. +/// /// This macro is used to inject panic into a Rust thread, causing the thread to /// panic entirely. Each thread's panic can be reaped as the `Box` type, /// and the single-argument form of the `panic!` macro will be the value which /// is transmitted. /// +/// [`Result`] enum is often a better solution for recovering from errors than +/// using the `panic!` macro. This macro should be used to avoid proceeding using +/// incorrect values, such as from external sources. Detailed information about +/// error handling is found in the [book]. +/// /// The multi-argument form of this macro panics with a string and has the -/// `format!` syntax for building a string. +/// [`format!`] syntax for building a string. +/// +/// [runwrap]: ../std/result/enum.Result.html#method.unwrap +/// [`Option`]: ../std/option/enum.Option.html#method.unwrap +/// [`Result`]: ../std/result/enum.Result.html +/// [`format!`]: ../std/macro.format.html +/// [book]: ../book/second-edition/ch09-01-unrecoverable-errors-with-panic.html /// /// # Current implementation /// @@ -78,15 +88,19 @@ macro_rules! panic { /// Macro for printing to the standard output. /// -/// Equivalent to the `println!` macro except that a newline is not printed at +/// Equivalent to the [`println!`] macro except that a newline is not printed at /// the end of the message. /// /// Note that stdout is frequently line-buffered by default so it may be -/// necessary to use `io::stdout().flush()` to ensure the output is emitted +/// necessary to use [`io::stdout().flush()`][flush] to ensure the output is emitted /// immediately. /// /// Use `print!` only for the primary output of your program. Use -/// `eprint!` instead to print error and progress messages. +/// [`eprint!`] instead to print error and progress messages. +/// +/// [`println!`]: ../std/macro.println.html +/// [flush]: ../std/io/trait.Write.html#tymethod.flush +/// [`eprint!`]: ../std/macro.eprint.html /// /// # Panics /// @@ -118,16 +132,20 @@ macro_rules! print { ($($arg:tt)*) => ($crate::io::_print(format_args!($($arg)*))); } -/// Macro for printing to the standard output, with a newline. On all -/// platforms, the newline is the LINE FEED character (`\n`/`U+000A`) alone +/// Macro for printing to the standard output, with a newline. +/// +/// On all platforms, the newline is the LINE FEED character (`\n`/`U+000A`) alone /// (no additional CARRIAGE RETURN (`\r`/`U+000D`). /// -/// Use the `format!` syntax to write data to the standard output. -/// See `std::fmt` for more information. +/// Use the [`format!`] syntax to write data to the standard output. +/// See [`std::fmt`] for more information. /// /// Use `println!` only for the primary output of your program. Use -/// `eprintln!` instead to print error and progress messages. +/// [`eprintln!`] instead to print error and progress messages. /// +/// [`format!`]: ../std/macro.format.html +/// [`std::fmt`]: ../std/fmt/index.html +/// [`eprintln!`]: ../std/macro.eprint.html /// # Panics /// /// Panics if writing to `io::stdout` fails. @@ -149,16 +167,25 @@ macro_rules! println { /// Macro for printing to the standard error. /// -/// Equivalent to the `print!` macro, except that output goes to -/// `io::stderr` instead of `io::stdout`. See `print!` for +/// Equivalent to the [`print!`] macro, except that output goes to +/// [`io::stderr`] instead of `io::stdout`. See [`print!`] for /// example usage. /// /// Use `eprint!` only for error and progress messages. Use `print!` /// instead for the primary output of your program. /// +/// [`io::stderr`]: ../std/io/struct.Stderr.html +/// [`print!`]: ../std/macro.print.html +/// /// # Panics /// /// Panics if writing to `io::stderr` fails. +/// +/// # Examples +/// +/// ``` +/// eprint!("Error: Could not complete task"); +/// ``` #[macro_export] #[stable(feature = "eprint", since = "1.19.0")] #[allow_internal_unstable] @@ -168,16 +195,25 @@ macro_rules! eprint { /// Macro for printing to the standard error, with a newline. /// -/// Equivalent to the `println!` macro, except that output goes to -/// `io::stderr` instead of `io::stdout`. See `println!` for +/// Equivalent to the [`println!`] macro, except that output goes to +/// [`io::stderr`] instead of `io::stdout`. See [`println!`] for /// example usage. /// /// Use `eprintln!` only for error and progress messages. Use `println!` /// instead for the primary output of your program. /// +/// [`io::stderr`]: ../std/io/struct.Stderr.html +/// [`println!`]: ../std/macro.println.html +/// /// # Panics /// /// Panics if writing to `io::stderr` fails. +/// +/// # Examples +/// +/// ``` +/// eprintln!("Error: Could not complete task"); +/// ``` #[macro_export] #[stable(feature = "eprint", since = "1.19.0")] macro_rules! eprintln { @@ -267,13 +303,23 @@ pub mod builtin { /// The core macro for formatted string creation & output. /// + /// This macro functions by taking a formatting string literal containing + /// `{}` for each additional argument passed. `format_args!` prepares the + /// additional parameters to ensure the output can be interpreted as a string + /// and canonicalizes the arguments into a single type. Any value that implements + /// the [`Display`] trait can be passed to `format_args!`, as can any + /// [`Debug`] implementation be passed to a `{:?}` within the formatting string. + /// /// This macro produces a value of type [`fmt::Arguments`]. This value can be - /// passed to the functions in [`std::fmt`] for performing useful functions. + /// passed to the macros within [`std::fmt`] for performing useful redirection. /// All other formatting macros ([`format!`], [`write!`], [`println!`], etc) are - /// proxied through this one. + /// proxied through this one. `format_args!`, unlike its derived macros, avoids + /// heap allocations. /// /// For more information, see the documentation in [`std::fmt`]. /// + /// [`Display`]: ../std/fmt/trait.Display.html + /// [`Debug`]: ../std/fmt/trait.Debug.html /// [`fmt::Arguments`]: ../std/fmt/struct.Arguments.html /// [`std::fmt`]: ../std/fmt/index.html /// [`format!`]: ../std/macro.format.html @@ -301,9 +347,11 @@ pub mod builtin { /// compile time, yielding an expression of type `&'static str`. /// /// If the environment variable is not defined, then a compilation error - /// will be emitted. To not emit a compile error, use the `option_env!` + /// will be emitted. To not emit a compile error, use the [`option_env!`] /// macro instead. /// + /// [`option_env!`]: ../std/macro.option_env.html + /// /// # Examples /// /// ``` @@ -319,11 +367,14 @@ pub mod builtin { /// If the named environment variable is present at compile time, this will /// expand into an expression of type `Option<&'static str>` whose value is /// `Some` of the value of the environment variable. If the environment - /// variable is not present, then this will expand to `None`. + /// variable is not present, then this will expand to `None`. See + /// [`Option`][option] for more information on this type. /// /// A compile time error is never emitted when using this macro regardless /// of whether the environment variable is present or not. /// + /// [option]: ../std/option/enum.Option.html + /// /// # Examples /// /// ``` @@ -385,10 +436,16 @@ pub mod builtin { /// A macro which expands to the line number on which it was invoked. /// + /// With [`column!`] and [`file!`], these macros provide debugging information for + /// developers about the location within the source. + /// /// The expanded expression has type `u32`, and the returned line is not /// the invocation of the `line!()` macro itself, but rather the first macro /// invocation leading up to the invocation of the `line!()` macro. /// + /// [`column!`]: macro.column.html + /// [`file!`]: macro.file.html + /// /// # Examples /// /// ``` @@ -401,9 +458,15 @@ pub mod builtin { /// A macro which expands to the column number on which it was invoked. /// + /// With [`line!`] and [`file!`], these macros provide debugging information for + /// developers about the location within the source. + /// /// The expanded expression has type `u32`, and the returned column is not - /// the invocation of the `column!()` macro itself, but rather the first macro - /// invocation leading up to the invocation of the `column!()` macro. + /// the invocation of the `column!` macro itself, but rather the first macro + /// invocation leading up to the invocation of the `column!` macro. + /// + /// [`line!`]: macro.line.html + /// [`file!`]: macro.file.html /// /// # Examples /// @@ -417,11 +480,18 @@ pub mod builtin { /// A macro which expands to the file name from which it was invoked. /// + /// With [`line!`] and [`column!`], these macros provide debugging information for + /// developers about the location within the source. + /// + /// /// The expanded expression has type `&'static str`, and the returned file - /// is not the invocation of the `file!()` macro itself, but rather the - /// first macro invocation leading up to the invocation of the `file!()` + /// is not the invocation of the `file!` macro itself, but rather the + /// first macro invocation leading up to the invocation of the `file!` /// macro. /// + /// [`line!`]: macro.line.html + /// [`column!`]: macro.column.html + /// /// # Examples /// /// ``` diff --git a/src/libstd/net/addr.rs b/src/libstd/net/addr.rs index 36c06dc0b58d0..e1d7a2531b6c9 100644 --- a/src/libstd/net/addr.rs +++ b/src/libstd/net/addr.rs @@ -705,30 +705,74 @@ impl hash::Hash for SocketAddrV6 { /// /// # Examples /// +/// Creating a [`SocketAddr`] iterator that yields one item: +/// +/// ``` +/// use std::net::{ToSocketAddrs, SocketAddr}; +/// +/// let addr = SocketAddr::from(([127, 0, 0, 1], 443)); +/// let mut addrs_iter = addr.to_socket_addrs().unwrap(); +/// +/// assert_eq!(Some(addr), addrs_iter.next()); +/// assert!(addrs_iter.next().is_none()); +/// ``` +/// +/// Creating a [`SocketAddr`] iterator from a hostname: +/// /// ```no_run -/// use std::net::{SocketAddrV4, TcpStream, UdpSocket, TcpListener, Ipv4Addr}; -/// -/// fn main() { -/// let ip = Ipv4Addr::new(127, 0, 0, 1); -/// let port = 12345; -/// -/// // The following lines are equivalent modulo possible "localhost" name -/// // resolution differences -/// let tcp_s = TcpStream::connect(SocketAddrV4::new(ip, port)); -/// let tcp_s = TcpStream::connect((ip, port)); -/// let tcp_s = TcpStream::connect(("127.0.0.1", port)); -/// let tcp_s = TcpStream::connect(("localhost", port)); -/// let tcp_s = TcpStream::connect("127.0.0.1:12345"); -/// let tcp_s = TcpStream::connect("localhost:12345"); -/// -/// // TcpListener::bind(), UdpSocket::bind() and UdpSocket::send_to() -/// // behave similarly -/// let tcp_l = TcpListener::bind("localhost:12345"); -/// -/// let mut udp_s = UdpSocket::bind(("127.0.0.1", port)).unwrap(); -/// udp_s.send_to(&[7], (ip, 23451)).unwrap(); -/// } +/// use std::net::{SocketAddr, ToSocketAddrs}; +/// +/// // assuming 'localhost' resolves to 127.0.0.1 +/// let mut addrs_iter = "localhost:443".to_socket_addrs().unwrap(); +/// assert_eq!(addrs_iter.next(), Some(SocketAddr::from(([127, 0, 0, 1], 443)))); +/// assert!(addrs_iter.next().is_none()); +/// +/// // assuming 'foo' does not resolve +/// assert!("foo:443".to_socket_addrs().is_err()); /// ``` +/// +/// Creating a [`SocketAddr`] iterator that yields multiple items: +/// +/// ``` +/// use std::net::{SocketAddr, ToSocketAddrs}; +/// +/// let addr1 = SocketAddr::from(([0, 0, 0, 0], 80)); +/// let addr2 = SocketAddr::from(([127, 0, 0, 1], 443)); +/// let addrs = vec![addr1, addr2]; +/// +/// let mut addrs_iter = (&addrs[..]).to_socket_addrs().unwrap(); +/// +/// assert_eq!(Some(addr1), addrs_iter.next()); +/// assert_eq!(Some(addr2), addrs_iter.next()); +/// assert!(addrs_iter.next().is_none()); +/// ``` +/// +/// Attempting to create a [`SocketAddr`] iterator from an improperly formatted +/// socket address `&str` (missing the port): +/// +/// ``` +/// use std::io; +/// use std::net::ToSocketAddrs; +/// +/// let err = "127.0.0.1".to_socket_addrs().unwrap_err(); +/// assert_eq!(err.kind(), io::ErrorKind::InvalidInput); +/// ``` +/// +/// [`TcpStream::connect`] is an example of an function that utilizes +/// `ToSocketsAddr` as a trait bound on its parameter in order to accept +/// different types: +/// +/// ```no_run +/// use std::net::{TcpStream, Ipv4Addr}; +/// +/// let stream = TcpStream::connect(("127.0.0.1", 443)); +/// // or +/// let stream = TcpStream::connect("127.0.0.1:443"); +/// // or +/// let stream = TcpStream::connect((Ipv4Addr::new(127, 0, 0, 1), 443)); +/// ``` +/// +/// [`TcpStream::connect`]: ../../std/net/struct.TcpStream.html#method.connect #[stable(feature = "rust1", since = "1.0.0")] pub trait ToSocketAddrs { /// Returned iterator over socket addresses which this type may correspond diff --git a/src/libstd/net/tcp.rs b/src/libstd/net/tcp.rs index 2eabb46441b32..5467eff202b02 100644 --- a/src/libstd/net/tcp.rs +++ b/src/libstd/net/tcp.rs @@ -111,15 +111,18 @@ impl TcpStream { /// `addr` is an address of the remote host. Anything which implements /// [`ToSocketAddrs`] trait can be supplied for the address; see this trait /// documentation for concrete examples. - /// In case [`ToSocketAddrs::to_socket_addrs()`] returns more than one entry, - /// then the first valid and reachable address is used. + /// + /// If `addr` yields multiple addresses, `connect` will be attempted with + /// each of the addresses until a connection is successful. If none of + /// the addresses result in a successful connection, the error returned from + /// the last connection attempt (the last address) is returned. /// /// [`ToSocketAddrs`]: ../../std/net/trait.ToSocketAddrs.html - /// [`ToSocketAddrs::to_socket_addrs()`]: - /// ../../std/net/trait.ToSocketAddrs.html#tymethod.to_socket_addrs /// /// # Examples /// + /// Open a TCP connection to `127.0.0.1:8080`: + /// /// ```no_run /// use std::net::TcpStream; /// @@ -129,6 +132,23 @@ impl TcpStream { /// println!("Couldn't connect to server..."); /// } /// ``` + /// + /// Open a TCP connection to `127.0.0.1:8080`. If the connection fails, open + /// a TCP connection to `127.0.0.1:8081`: + /// + /// ```no_run + /// use std::net::{SocketAddr, TcpStream}; + /// + /// let addrs = [ + /// SocketAddr::from(([127, 0, 0, 1], 8080)), + /// SocketAddr::from(([127, 0, 0, 1], 8081)), + /// ]; + /// if let Ok(stream) = TcpStream::connect(&addrs[..]) { + /// println!("Connected to the server!"); + /// } else { + /// println!("Couldn't connect to server..."); + /// } + /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn connect(addr: A) -> io::Result { super::each_addr(addr, net_imp::TcpStream::connect).map(TcpStream) @@ -557,16 +577,36 @@ impl TcpListener { /// The address type can be any implementor of [`ToSocketAddrs`] trait. See /// its documentation for concrete examples. /// + /// If `addr` yields multiple addresses, `bind` will be attempted with + /// each of the addresses until one succeeds and returns the listener. If + /// none of the addresses succeed in creating a listener, the error returned + /// from the last attempt (the last address) is returned. + /// /// [`local_addr`]: #method.local_addr /// [`ToSocketAddrs`]: ../../std/net/trait.ToSocketAddrs.html /// /// # Examples /// + /// Create a TCP listener bound to `127.0.0.1:80`: + /// /// ```no_run /// use std::net::TcpListener; /// /// let listener = TcpListener::bind("127.0.0.1:80").unwrap(); /// ``` + /// + /// Create a TCP listener bound to `127.0.0.1:80`. If that fails, create a + /// TCP listener bound to `127.0.0.1:443`: + /// + /// ```no_run + /// use std::net::{SocketAddr, TcpListener}; + /// + /// let addrs = [ + /// SocketAddr::from(([127, 0, 0, 1], 80)), + /// SocketAddr::from(([127, 0, 0, 1], 443)), + /// ]; + /// let listener = TcpListener::bind(&addrs[..]).unwrap(); + /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn bind(addr: A) -> io::Result { super::each_addr(addr, net_imp::TcpListener::bind).map(TcpListener) diff --git a/src/libstd/net/udp.rs b/src/libstd/net/udp.rs index 9aff989788536..35001833383c0 100644 --- a/src/libstd/net/udp.rs +++ b/src/libstd/net/udp.rs @@ -69,14 +69,34 @@ impl UdpSocket { /// The address type can be any implementor of [`ToSocketAddrs`] trait. See /// its documentation for concrete examples. /// + /// If `addr` yields multiple addresses, `bind` will be attempted with + /// each of the addresses until one succeeds and returns the socket. If none + /// of the addresses succeed in creating a socket, the error returned from + /// the last attempt (the last address) is returned. + /// /// [`ToSocketAddrs`]: ../../std/net/trait.ToSocketAddrs.html /// /// # Examples /// + /// Create a UDP socket bound to `127.0.0.1:3400`: + /// /// ```no_run /// use std::net::UdpSocket; /// - /// let socket = UdpSocket::bind("127.0.0.1:34254").expect("couldn't bind to address"); + /// let socket = UdpSocket::bind("127.0.0.1:3400").expect("couldn't bind to address"); + /// ``` + /// + /// Create a UDP socket bound to `127.0.0.1:3400`. If the socket cannot be + /// bound to that address, create a UDP socket bound to `127.0.0.1:3401`: + /// + /// ```no_run + /// use std::net::{SocketAddr, UdpSocket}; + /// + /// let addrs = [ + /// SocketAddr::from(([127, 0, 0, 1], 3400)), + /// SocketAddr::from(([127, 0, 0, 1], 3401)), + /// ]; + /// let socket = UdpSocket::bind(&addrs[..]).expect("couldn't bind to address"); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn bind(addr: A) -> io::Result { @@ -130,6 +150,9 @@ impl UdpSocket { /// Address type can be any implementor of [`ToSocketAddrs`] trait. See its /// documentation for concrete examples. /// + /// It is possible for `addr` to yield multiple addresses, but `send_to` + /// will only send data to the first address yielded by `addr`. + /// /// This will return an error when the IP version of the local socket /// does not match that returned from [`ToSocketAddrs`]. /// @@ -562,14 +585,37 @@ impl UdpSocket { /// `recv` syscalls to be used to send data and also applies filters to only /// receive data from the specified address. /// + /// If `addr` yields multiple addresses, `connect` will be attempted with + /// each of the addresses until a connection is successful. If none of + /// the addresses are able to be connected, the error returned from the + /// last connection attempt (the last address) is returned. + /// /// # Examples /// + /// Create a UDP socket bound to `127.0.0.1:3400` and connect the socket to + /// `127.0.0.1:8080`: + /// /// ```no_run /// use std::net::UdpSocket; /// - /// let socket = UdpSocket::bind("127.0.0.1:34254").expect("couldn't bind to address"); + /// let socket = UdpSocket::bind("127.0.0.1:3400").expect("couldn't bind to address"); /// socket.connect("127.0.0.1:8080").expect("connect function failed"); /// ``` + /// + /// Create a UDP socket bound to `127.0.0.1:3400` and connect the socket to + /// `127.0.0.1:8080`. If that connection fails, then the UDP socket will + /// connect to `127.0.0.1:8081`: + /// + /// ```no_run + /// use std::net::{SocketAddr, UdpSocket}; + /// + /// let socket = UdpSocket::bind("127.0.0.1:3400").expect("couldn't bind to address"); + /// let connect_addrs = [ + /// SocketAddr::from(([127, 0, 0, 1], 8080)), + /// SocketAddr::from(([127, 0, 0, 1], 8081)), + /// ]; + /// socket.connect(&connect_addrs[..]).expect("connect function failed"); + /// ``` #[stable(feature = "net2_mutators", since = "1.9.0")] pub fn connect(&self, addr: A) -> io::Result<()> { super::each_addr(addr, |addr| self.0.connect(addr)) diff --git a/src/libstd/os/raw.rs b/src/libstd/os/raw.rs index c34491941d690..fe0427d4e5f9c 100644 --- a/src/libstd/os/raw.rs +++ b/src/libstd/os/raw.rs @@ -14,8 +14,7 @@ use fmt; -#[cfg(any(target_os = "emscripten", - all(target_os = "linux", any(target_arch = "aarch64", +#[cfg(any(all(target_os = "linux", any(target_arch = "aarch64", target_arch = "arm", target_arch = "powerpc", target_arch = "powerpc64", @@ -24,8 +23,7 @@ use fmt; target_arch = "arm")), all(target_os = "fuchsia", target_arch = "aarch64")))] #[stable(feature = "raw_os", since = "1.1.0")] pub type c_char = u8; -#[cfg(not(any(target_os = "emscripten", - all(target_os = "linux", any(target_arch = "aarch64", +#[cfg(not(any(all(target_os = "linux", any(target_arch = "aarch64", target_arch = "arm", target_arch = "powerpc", target_arch = "powerpc64", diff --git a/src/libstd/panicking.rs b/src/libstd/panicking.rs index 739dc4163feb3..80ce15944a5c3 100644 --- a/src/libstd/panicking.rs +++ b/src/libstd/panicking.rs @@ -522,40 +522,6 @@ pub fn begin_panic_fmt(msg: &fmt::Arguments, begin_panic(s, file_line_col) } -// FIXME: In PR #42938, we have added the column as info passed to the panic -// handling code. For this, we want to break the ABI of begin_panic. -// This is not possible to do directly, as the stage0 compiler is hardcoded -// to emit a call to begin_panic in src/libsyntax/ext/build.rs, only -// with the file and line number being passed, but not the colum number. -// By changing the compiler source, we can only affect behaviour of higher -// stages. We need to perform the switch over two stage0 replacements, using -// a temporary function begin_panic_new while performing the switch: -// 0. Before the current switch, we told stage1 onward to emit a call -// to begin_panic_new. -// 1. Right now, stage0 calls begin_panic_new with the new ABI, -// begin_panic stops being used. We have changed begin_panic to -// the new ABI, and started to emit calls to begin_panic in higher -// stages again, this time with the new ABI. -// 2. After the second SNAP, stage0 calls begin_panic with the new ABI, -// and we can remove the temporary begin_panic_new function. - -/// This is the entry point of panicking for panic!() and assert!(). -#[cfg(stage0)] -#[unstable(feature = "libstd_sys_internals", - reason = "used by the panic! macro", - issue = "0")] -#[inline(never)] #[cold] // avoid code bloat at the call sites as much as possible -pub fn begin_panic_new(msg: M, file_line_col: &(&'static str, u32, u32)) -> ! { - // Note that this should be the only allocation performed in this code path. - // Currently this means that panic!() on OOM will invoke this code path, - // but then again we're not really ready for panic on OOM anyway. If - // we do start doing this, then we should propagate this allocation to - // be performed in the parent of this thread instead of the thread that's - // panicking. - - rust_panic_with_hook(Box::new(msg), file_line_col) -} - /// This is the entry point of panicking for panic!() and assert!(). #[unstable(feature = "libstd_sys_internals", reason = "used by the panic! macro", diff --git a/src/libstd/rt.rs b/src/libstd/rt.rs index 2aa23ea043b5b..06fd838ea06d9 100644 --- a/src/libstd/rt.rs +++ b/src/libstd/rt.rs @@ -25,8 +25,6 @@ // Reexport some of our utilities which are expected by other crates. -#[cfg(stage0)] -pub use panicking::begin_panic_new; pub use panicking::{begin_panic, begin_panic_fmt, update_panic_count}; #[cfg(not(test))] diff --git a/src/libstd/sys/unix/fd.rs b/src/libstd/sys/unix/fd.rs index 138087f165142..f50b093acc848 100644 --- a/src/libstd/sys/unix/fd.rs +++ b/src/libstd/sys/unix/fd.rs @@ -71,13 +71,21 @@ impl FileDesc { #[cfg(target_os = "android")] use super::android::cvt_pread64; - #[cfg(not(target_os = "android"))] + #[cfg(target_os = "emscripten")] unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: usize, offset: i64) -> io::Result { - #[cfg(any(target_os = "linux", target_os = "emscripten"))] use libc::pread64; - #[cfg(not(any(target_os = "linux", target_os = "emscripten")))] + cvt(pread64(fd, buf, count, offset as i32)) + } + + #[cfg(not(any(target_os = "android", target_os = "emscripten")))] + unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: usize, offset: i64) + -> io::Result + { + #[cfg(target_os = "linux")] + use libc::pread64; + #[cfg(not(target_os = "linux"))] use libc::pread as pread64; cvt(pread64(fd, buf, count, offset)) } @@ -104,13 +112,21 @@ impl FileDesc { #[cfg(target_os = "android")] use super::android::cvt_pwrite64; - #[cfg(not(target_os = "android"))] + #[cfg(target_os = "emscripten")] + unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: usize, offset: i64) + -> io::Result + { + use libc::pwrite64; + cvt(pwrite64(fd, buf, count, offset as i32)) + } + + #[cfg(not(any(target_os = "android", target_os = "emscripten")))] unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: usize, offset: i64) -> io::Result { - #[cfg(any(target_os = "linux", target_os = "emscripten"))] + #[cfg(target_os = "linux")] use libc::pwrite64; - #[cfg(not(any(target_os = "linux", target_os = "emscripten")))] + #[cfg(not(target_os = "linux"))] use libc::pwrite as pwrite64; cvt(pwrite64(fd, buf, count, offset)) } diff --git a/src/libstd/sys/unix/fs.rs b/src/libstd/sys/unix/fs.rs index cb0f687e0721c..f94af4913324f 100644 --- a/src/libstd/sys/unix/fs.rs +++ b/src/libstd/sys/unix/fs.rs @@ -514,6 +514,8 @@ impl File { SeekFrom::End(off) => (libc::SEEK_END, off), SeekFrom::Current(off) => (libc::SEEK_CUR, off), }; + #[cfg(target_os = "emscripten")] + let pos = pos as i32; let n = cvt(unsafe { lseek64(self.0.raw(), pos, whence) })?; Ok(n as u64) } diff --git a/src/libstd/sys/unix/process/process_unix.rs b/src/libstd/sys/unix/process/process_unix.rs index edd322ca6fa07..ae24021fb6c3a 100644 --- a/src/libstd/sys/unix/process/process_unix.rs +++ b/src/libstd/sys/unix/process/process_unix.rs @@ -10,7 +10,6 @@ use io::{self, Error, ErrorKind}; use libc::{self, c_int, gid_t, pid_t, uid_t}; -use mem; use ptr; use sys::cvt; @@ -184,7 +183,9 @@ impl Command { } // NaCl has no signal support. - if cfg!(not(any(target_os = "nacl", target_os = "emscripten"))) { + #[cfg(not(any(target_os = "nacl", target_os = "emscripten")))] + { + use mem; // Reset signal handling so the child process starts in a // standardized state. libstd ignores SIGPIPE, and signal-handling // libraries often set a mask. Child processes inherit ignored diff --git a/src/libstd/thread/local.rs b/src/libstd/thread/local.rs index 7a9b642350fa6..02347bf4906a6 100644 --- a/src/libstd/thread/local.rs +++ b/src/libstd/thread/local.rs @@ -157,7 +157,7 @@ macro_rules! thread_local { issue = "0")] #[macro_export] #[allow_internal_unstable] -#[cfg_attr(not(stage0), allow_internal_unsafe)] +#[allow_internal_unsafe] macro_rules! __thread_local_inner { ($(#[$attr:meta])* $vis:vis $name:ident, $t:ty, $init:expr) => { $(#[$attr])* $vis static $name: $crate::thread::LocalKey<$t> = { @@ -394,9 +394,6 @@ pub mod fast { } } - #[cfg(stage0)] - unsafe impl ::marker::Sync for Key { } - impl Key { pub const fn new() -> Key { Key { diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index fa4df2196a3c5..720f6cd32bdf8 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -134,7 +134,7 @@ impl PathSegment { } pub fn crate_root(span: Span) -> Self { PathSegment { - identifier: Ident { ctxt: span.ctxt, ..keywords::CrateRoot.ident() }, + identifier: Ident { ctxt: span.ctxt(), ..keywords::CrateRoot.ident() }, span, parameters: None, } @@ -810,6 +810,7 @@ pub struct Arm { pub pats: Vec>, pub guard: Option>, pub body: P, + pub beginning_vert: Option, // For RFC 1925 feature gate } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] @@ -1016,6 +1017,9 @@ pub enum ExprKind { /// `expr?` Try(P), + + /// A `yield`, with an optional value to be yielded + Yield(Option>), } /// The explicit Self type in a "qualified path". The actual diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index ca87c807103fd..adbbc1b0ac588 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -490,6 +490,10 @@ pub fn contains_name(attrs: &[Attribute], name: &str) -> bool { }) } +pub fn find_by_name<'a>(attrs: &'a [Attribute], name: &str) -> Option<&'a Attribute> { + attrs.iter().find(|attr| attr.check_name(name)) +} + pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str) -> Option { attrs.iter() .find(|at| at.check_name(name)) @@ -581,6 +585,20 @@ pub fn requests_inline(attrs: &[Attribute]) -> bool { /// Tests if a cfg-pattern matches the cfg set pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Features>) -> bool { + eval_condition(cfg, sess, &mut |cfg| { + if let (Some(feats), Some(gated_cfg)) = (features, GatedCfg::gate(cfg)) { + gated_cfg.check_and_emit(sess, feats); + } + sess.config.contains(&(cfg.name(), cfg.value_str())) + }) +} + +/// Evaluate a cfg-like condition (with `any` and `all`), using `eval` to +/// evaluate individual items. +pub fn eval_condition(cfg: &ast::MetaItem, sess: &ParseSess, eval: &mut F) + -> bool + where F: FnMut(&ast::MetaItem) -> bool +{ match cfg.node { ast::MetaItemKind::List(ref mis) => { for mi in mis.iter() { @@ -594,10 +612,10 @@ pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Feat // that they won't fail with the loop above. match &*cfg.name.as_str() { "any" => mis.iter().any(|mi| { - cfg_matches(mi.meta_item().unwrap(), sess, features) + eval_condition(mi.meta_item().unwrap(), sess, eval) }), "all" => mis.iter().all(|mi| { - cfg_matches(mi.meta_item().unwrap(), sess, features) + eval_condition(mi.meta_item().unwrap(), sess, eval) }), "not" => { if mis.len() != 1 { @@ -605,7 +623,7 @@ pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Feat return false; } - !cfg_matches(mis[0].meta_item().unwrap(), sess, features) + !eval_condition(mis[0].meta_item().unwrap(), sess, eval) }, p => { span_err!(sess.span_diagnostic, cfg.span, E0537, "invalid predicate `{}`", p); @@ -614,10 +632,7 @@ pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Feat } }, ast::MetaItemKind::Word | ast::MetaItemKind::NameValue(..) => { - if let (Some(feats), Some(gated_cfg)) = (features, GatedCfg::gate(cfg)) { - gated_cfg.check_and_emit(sess, feats); - } - sess.config.contains(&(cfg.name(), cfg.value_str())) + eval(cfg) } } } @@ -1055,7 +1070,7 @@ impl MetaItem { fn from_tokens(tokens: &mut iter::Peekable) -> Option where I: Iterator, { - let (mut span, name) = match tokens.next() { + let (span, name) = match tokens.next() { Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name), Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 { token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name), @@ -1064,17 +1079,17 @@ impl MetaItem { }, _ => return None, }; - let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi); + let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi()); let node = match MetaItemKind::from_tokens(tokens) { Some(node) => node, _ => return None, }; - span.hi = match node { - MetaItemKind::NameValue(ref lit) => lit.span.hi, - MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(span.hi), - _ => span.hi, + let hi = match node { + MetaItemKind::NameValue(ref lit) => lit.span.hi(), + MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(span.hi()), + _ => span.hi(), }; - Some(MetaItem { name: name, span: span, node: node }) + Some(MetaItem { name, node, span: span.with_hi(hi) }) } } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 30ae7df93532e..cd4a6f921fe6f 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -34,8 +34,8 @@ use errors::CodeMapper; /// otherwise return the call site span up to the `enclosing_sp` by /// following the `expn_info` chain. pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span { - let call_site1 = sp.ctxt.outer().expn_info().map(|ei| ei.call_site); - let call_site2 = enclosing_sp.ctxt.outer().expn_info().map(|ei| ei.call_site); + let call_site1 = sp.ctxt().outer().expn_info().map(|ei| ei.call_site); + let call_site2 = enclosing_sp.ctxt().outer().expn_info().map(|ei| ei.call_site); match (call_site1, call_site2) { (None, _) => sp, (Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp, @@ -232,7 +232,7 @@ impl CodeMap { } pub fn mk_substr_filename(&self, sp: Span) -> String { - let pos = self.lookup_char_pos(sp.lo); + let pos = self.lookup_char_pos(sp.lo()); (format!("<{}:{}:{}>", pos.file.name, pos.line, @@ -299,18 +299,16 @@ impl CodeMap { /// * the lhs span needs to end on the same line the rhs span begins /// * the lhs span must start at or before the rhs span pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option { - use std::cmp; - // make sure we're at the same expansion id - if sp_lhs.ctxt != sp_rhs.ctxt { + if sp_lhs.ctxt() != sp_rhs.ctxt() { return None; } - let lhs_end = match self.lookup_line(sp_lhs.hi) { + let lhs_end = match self.lookup_line(sp_lhs.hi()) { Ok(x) => x, Err(_) => return None }; - let rhs_begin = match self.lookup_line(sp_rhs.lo) { + let rhs_begin = match self.lookup_line(sp_rhs.lo()) { Ok(x) => x, Err(_) => return None }; @@ -321,12 +319,8 @@ impl CodeMap { } // ensure these follow the expected order and we don't overlap - if (sp_lhs.lo <= sp_rhs.lo) && (sp_lhs.hi <= sp_rhs.lo) { - Some(Span { - lo: cmp::min(sp_lhs.lo, sp_rhs.lo), - hi: cmp::max(sp_lhs.hi, sp_rhs.hi), - ctxt: sp_lhs.ctxt, - }) + if (sp_lhs.lo() <= sp_rhs.lo()) && (sp_lhs.hi() <= sp_rhs.lo()) { + Some(sp_lhs.to(sp_rhs)) } else { None } @@ -337,8 +331,8 @@ impl CodeMap { return "no-location".to_string(); } - let lo = self.lookup_char_pos_adj(sp.lo); - let hi = self.lookup_char_pos_adj(sp.hi); + let lo = self.lookup_char_pos_adj(sp.lo()); + let hi = self.lookup_char_pos_adj(sp.hi()); return (format!("{}:{}:{}: {}:{}", lo.filename, lo.line, @@ -348,19 +342,19 @@ impl CodeMap { } pub fn span_to_filename(&self, sp: Span) -> FileName { - self.lookup_char_pos(sp.lo).file.name.to_string() + self.lookup_char_pos(sp.lo()).file.name.to_string() } pub fn span_to_lines(&self, sp: Span) -> FileLinesResult { debug!("span_to_lines(sp={:?})", sp); - if sp.lo > sp.hi { + if sp.lo() > sp.hi() { return Err(SpanLinesError::IllFormedSpan(sp)); } - let lo = self.lookup_char_pos(sp.lo); + let lo = self.lookup_char_pos(sp.lo()); debug!("span_to_lines: lo={:?}", lo); - let hi = self.lookup_char_pos(sp.hi); + let hi = self.lookup_char_pos(sp.hi()); debug!("span_to_lines: hi={:?}", hi); if lo.file.start_pos != hi.file.start_pos { @@ -400,12 +394,12 @@ impl CodeMap { } pub fn span_to_snippet(&self, sp: Span) -> Result { - if sp.lo > sp.hi { + if sp.lo() > sp.hi() { return Err(SpanSnippetError::IllFormedSpan(sp)); } - let local_begin = self.lookup_byte_offset(sp.lo); - let local_end = self.lookup_byte_offset(sp.hi); + let local_begin = self.lookup_byte_offset(sp.lo()); + let local_end = self.lookup_byte_offset(sp.hi()); if local_begin.fm.start_pos != local_end.fm.start_pos { return Err(SpanSnippetError::DistinctSources(DistinctSources { @@ -450,7 +444,7 @@ impl CodeMap { Ok(snippet) => { let snippet = snippet.split(c).nth(0).unwrap_or("").trim_right(); if !snippet.is_empty() && !snippet.contains('\n') { - Span { hi: BytePos(sp.lo.0 + snippet.len() as u32), ..sp } + sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32)) } else { sp } @@ -752,7 +746,7 @@ mod tests { fn t7() { // Test span_to_lines for a span ending at the end of filemap let cm = init_code_map(); - let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION}; + let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let file_lines = cm.span_to_lines(span).unwrap(); assert_eq!(file_lines.file.name, "blork.rs"); @@ -768,7 +762,7 @@ mod tests { assert_eq!(input.len(), selection.len()); let left_index = selection.find('~').unwrap() as u32; let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index); - Span { lo: BytePos(left_index), hi: BytePos(right_index + 1), ctxt: NO_EXPANSION } + Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION) } /// Test span_to_snippet and span_to_lines for a span converting 3 @@ -798,7 +792,7 @@ mod tests { fn t8() { // Test span_to_snippet for a span ending at the end of filemap let cm = init_code_map(); - let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION}; + let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let snippet = cm.span_to_snippet(span); assert_eq!(snippet, Ok("second line".to_string())); @@ -808,7 +802,7 @@ mod tests { fn t9() { // Test span_to_str for a span ending at the end of filemap let cm = init_code_map(); - let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION}; + let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let sstr = cm.span_to_string(span); assert_eq!(sstr, "blork.rs:2:1: 2:12"); @@ -859,11 +853,11 @@ mod tests { let lo = hi + offset; hi = lo + substring.len(); if i == n { - let span = Span { - lo: BytePos(lo as u32 + file.start_pos.0), - hi: BytePos(hi as u32 + file.start_pos.0), - ctxt: NO_EXPANSION, - }; + let span = Span::new( + BytePos(lo as u32 + file.start_pos.0), + BytePos(hi as u32 + file.start_pos.0), + NO_EXPANSION, + ); assert_eq!(&self.span_to_snippet(span).unwrap()[..], substring); return span; diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index 14f1f8fbf8cb0..0909eec62691b 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -14,7 +14,6 @@ use {fold, attr}; use ast; use codemap::Spanned; use parse::{token, ParseSess}; -use syntax_pos::Span; use ptr::P; use util::small_vector::SmallVector; @@ -89,10 +88,10 @@ impl<'a> StripUnconfigured<'a> { parser.expect(&token::OpenDelim(token::Paren))?; let cfg = parser.parse_meta_item()?; parser.expect(&token::Comma)?; - let lo = parser.span.lo; + let lo = parser.span.lo(); let (path, tokens) = parser.parse_path_and_tokens()?; parser.expect(&token::CloseDelim(token::Paren))?; - Ok((cfg, path, tokens, Span { lo: lo, ..parser.prev_span })) + Ok((cfg, path, tokens, parser.prev_span.with_lo(lo))) }) { Ok(result) => result, Err(mut e) => { diff --git a/src/libsyntax/diagnostics/metadata.rs b/src/libsyntax/diagnostics/metadata.rs index 5bbd18bd9ee2e..daa7112235f47 100644 --- a/src/libsyntax/diagnostics/metadata.rs +++ b/src/libsyntax/diagnostics/metadata.rs @@ -47,7 +47,7 @@ pub struct ErrorLocation { impl ErrorLocation { /// Create an error location from a span. pub fn from_span(ecx: &ExtCtxt, sp: Span) -> ErrorLocation { - let loc = ecx.codemap().lookup_char_pos_adj(sp.lo); + let loc = ecx.codemap().lookup_char_pos_adj(sp.lo()); ErrorLocation { filename: loc.filename, line: loc.line diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index e57d9c6fe896a..cac2ff975d64b 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -731,7 +731,7 @@ impl<'a> ExtCtxt<'a> { // Stop going up the backtrace once include! is encountered return None; } - ctxt = info.call_site.ctxt; + ctxt = info.call_site.ctxt(); last_macro = Some(info.call_site); Some(()) }).is_none() { @@ -837,7 +837,7 @@ pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P, err_msg: &st -> Option> { // Update `expr.span`'s ctxt now in case expr is an `include!` macro invocation. let expr = expr.map(|mut expr| { - expr.span.ctxt = expr.span.ctxt.apply_mark(cx.current_expansion.mark); + expr.span = expr.span.with_ctxt(expr.span.ctxt().apply_mark(cx.current_expansion.mark)); expr }); diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index cbdd00135930e..48d789372a07b 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -755,7 +755,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn expr_fail(&self, span: Span, msg: Symbol) -> P { - let loc = self.codemap().lookup_char_pos(span.lo); + let loc = self.codemap().lookup_char_pos(span.lo()); let expr_file = self.expr_str(span, Symbol::intern(&loc.file.name)); let expr_line = self.expr_u32(span, loc.line as u32); let expr_col = self.expr_u32(span, loc.col.to_usize() as u32 + 1); @@ -878,7 +878,8 @@ impl<'a> AstBuilder for ExtCtxt<'a> { attrs: vec![], pats, guard: None, - body: expr + body: expr, + beginning_vert: None, } } diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs index 38715f7275de5..2e70962cad6f8 100644 --- a/src/libsyntax/ext/derive.rs +++ b/src/libsyntax/ext/derive.rs @@ -68,7 +68,7 @@ pub fn add_derived_markers(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path] }, }); - let span = Span { ctxt: cx.backtrace(), ..span }; + let span = span.with_ctxt(cx.backtrace()); item.map_attrs(|mut attrs| { if names.contains(&Symbol::intern("Eq")) && names.contains(&Symbol::intern("PartialEq")) { let meta = cx.meta_word(span, Symbol::intern("structural_match")); diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index d1172b1b2ce94..2f7d5685b6efe 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -598,7 +598,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { match *ext { ProcMacroDerive(ref ext, _) => { invoc.expansion_data.mark.set_expn_info(expn_info); - let span = Span { ctxt: self.cx.backtrace(), ..span }; + let span = span.with_ctxt(self.cx.backtrace()); let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this name: keywords::Invalid.name(), span: DUMMY_SP, @@ -609,7 +609,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { BuiltinDerive(func) => { expn_info.callee.allow_internal_unstable = true; invoc.expansion_data.mark.set_expn_info(expn_info); - let span = Span { ctxt: self.cx.backtrace(), ..span }; + let span = span.with_ctxt(self.cx.backtrace()); let mut items = Vec::new(); func(self.cx, span, &attr.meta().unwrap(), &item, &mut |a| items.push(a)); kind.expect_from_annotatables(items) @@ -684,8 +684,8 @@ impl<'a> Parser<'a> { if self.token != token::Eof { let msg = format!("macro expansion ignores token `{}` and any following", self.this_token_to_string()); - let mut def_site_span = self.span; - def_site_span.ctxt = SyntaxContext::empty(); // Avoid emitting backtrace info twice. + // Avoid emitting backtrace info twice. + let def_site_span = self.span.with_ctxt(SyntaxContext::empty()); let mut err = self.diagnostic().struct_span_err(def_site_span, &msg); let msg = format!("caused by the macro expansion here; the usage \ of `{}!` is likely invalid in {} context", @@ -1069,9 +1069,8 @@ impl Folder for Marker { ident } - fn new_span(&mut self, mut span: Span) -> Span { - span.ctxt = span.ctxt.apply_mark(self.0); - span + fn new_span(&mut self, span: Span) -> Span { + span.with_ctxt(span.ctxt().apply_mark(self.0)) } fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 95fe41be12254..18a262d139a27 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -36,7 +36,7 @@ pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) base::check_zero_tts(cx, sp, tts, "line!"); let topmost = cx.expansion_cause().unwrap_or(sp); - let loc = cx.codemap().lookup_char_pos(topmost.lo); + let loc = cx.codemap().lookup_char_pos(topmost.lo()); base::MacEager::expr(cx.expr_u32(topmost, loc.line as u32)) } @@ -47,7 +47,7 @@ pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) base::check_zero_tts(cx, sp, tts, "column!"); let topmost = cx.expansion_cause().unwrap_or(sp); - let loc = cx.codemap().lookup_char_pos(topmost.lo); + let loc = cx.codemap().lookup_char_pos(topmost.lo()); base::MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32)) } @@ -70,7 +70,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) base::check_zero_tts(cx, sp, tts, "file!"); let topmost = cx.expansion_cause().unwrap_or(sp); - let loc = cx.codemap().lookup_char_pos(topmost.lo); + let loc = cx.codemap().lookup_char_pos(topmost.lo()); base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name))) } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 405d06dafbfda..2167b64e6103d 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -329,7 +329,8 @@ fn inner_parse_loop(sess: &ParseSess, // Only touch the binders we have actually bound for idx in item.match_lo..item.match_hi { let sub = item.matches[idx].clone(); - new_pos.push_match(idx, MatchedSeq(sub, Span { lo: item.sp_lo, ..span })); + let span = span.with_lo(item.sp_lo); + new_pos.push_match(idx, MatchedSeq(sub, span)); } new_pos.match_cur = item.match_hi; @@ -379,7 +380,7 @@ fn inner_parse_loop(sess: &ParseSess, match_cur: item.match_cur, match_hi: item.match_cur + seq.num_captures, up: Some(item), - sp_lo: sp.lo, + sp_lo: sp.lo(), top_elts: Tt(TokenTree::Sequence(sp, seq)), })); } @@ -424,7 +425,7 @@ pub fn parse(sess: &ParseSess, recurse_into_modules: bool) -> NamedParseResult { let mut parser = Parser::new(sess, tts, directory, recurse_into_modules, true); - let mut cur_items = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo)); + let mut cur_items = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo())); let mut next_items = Vec::new(); // or proceed normally loop { diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 983b19c5bf073..6d58af497f091 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -130,7 +130,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, tts = tts.map_enumerated(|i, tt| { let mut tt = tt.clone(); let mut sp = rhs_spans[i]; - sp.ctxt = tt.span().ctxt; + sp = sp.with_ctxt(tt.span().ctxt()); tt.set_span(sp); tt }); @@ -161,7 +161,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, macro_ident: name }) } - Failure(sp, tok) => if sp.lo >= best_fail_spot.lo { + Failure(sp, tok) => if sp.lo() >= best_fail_spot.lo() { best_fail_spot = sp; best_fail_tok = Some(tok); }, diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 012d4a54b36f7..0e21e3f6b0010 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -37,7 +37,7 @@ impl Delimited { let open_span = if span == DUMMY_SP { DUMMY_SP } else { - Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span } + span.with_lo(span.lo() + BytePos(self.delim.len() as u32)) }; TokenTree::Token(open_span, self.open_token()) } @@ -46,7 +46,7 @@ impl Delimited { let close_span = if span == DUMMY_SP { DUMMY_SP } else { - Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span } + span.with_lo(span.hi() - BytePos(self.delim.len() as u32)) }; TokenTree::Token(close_span, self.close_token()) } @@ -152,7 +152,7 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() { Some(kind) => { - let span = Span { lo: start_sp.lo, ..end_sp }; + let span = end_sp.with_lo(start_sp.lo()); result.push(TokenTree::MetaVarDecl(span, ident, kind)); continue } @@ -198,7 +198,7 @@ fn parse_tree(tree: tokenstream::TokenTree, } Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => { let ident = token.ident().unwrap(); - let span = Span { lo: span.lo, ..ident_span }; + let span = ident_span.with_lo(span.lo()); if ident.name == keywords::Crate.name() { let ident = ast::Ident { name: keywords::DollarCrate.name(), ..ident }; TokenTree::Token(span, token::Ident(ident)) diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index fe3dd83f9d5c0..d51b0d0ae3e93 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -155,7 +155,7 @@ pub fn transcribe(cx: &ExtCtxt, if let NtTT(ref tt) = **nt { result.push(tt.clone().into()); } else { - sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark); + sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark)); let token = TokenTree::Token(sp, Token::interpolated((**nt).clone())); result.push(token.into()); } @@ -166,13 +166,13 @@ pub fn transcribe(cx: &ExtCtxt, } else { let ident = Ident { ctxt: ident.ctxt.apply_mark(cx.current_expansion.mark), ..ident }; - sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark); + sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark)); result.push(TokenTree::Token(sp, token::Dollar).into()); result.push(TokenTree::Token(sp, token::Ident(ident)).into()); } } quoted::TokenTree::Delimited(mut span, delimited) => { - span.ctxt = span.ctxt.apply_mark(cx.current_expansion.mark); + span = span.with_ctxt(span.ctxt().apply_mark(cx.current_expansion.mark)); stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span }); result_stack.push(mem::replace(&mut result, Vec::new())); } diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 09574d5ba129e..54d41a030fd77 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -366,6 +366,10 @@ declare_features! ( // Allows unsized tuple coercion. (active, unsized_tuple_coercion, "1.20.0", Some(42877)), + // Generators + (active, generators, "1.21.0", None), + + // global allocators and their internals (active, global_allocator, "1.20.0", None), (active, allocator_internals, "1.20.0", None), @@ -375,6 +379,9 @@ declare_features! ( // allow `#[must_use]` on functions (RFC 1940) (active, fn_must_use, "1.21.0", Some(43302)), + + // allow '|' at beginning of match arms (RFC 1925) + (active, match_beginning_vert, "1.21.0", Some(44101)), ); declare_features! ( @@ -1248,8 +1255,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { fn visit_item(&mut self, i: &'a ast::Item) { match i.node { ast::ItemKind::ExternCrate(_) => { - if attr::contains_name(&i.attrs[..], "macro_reexport") { - gate_feature_post!(&self, macro_reexport, i.span, + if let Some(attr) = attr::find_by_name(&i.attrs[..], "macro_reexport") { + gate_feature_post!(&self, macro_reexport, attr.span, "macros reexports are experimental \ and possibly buggy"); } @@ -1276,36 +1283,32 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { function may change over time, for now \ a top-level `fn main()` is required"); } - if attr::contains_name(&i.attrs[..], "must_use") { - gate_feature_post!(&self, fn_must_use, i.span, + if let Some(attr) = attr::find_by_name(&i.attrs[..], "must_use") { + gate_feature_post!(&self, fn_must_use, attr.span, "`#[must_use]` on functions is experimental", GateStrength::Soft); } } ast::ItemKind::Struct(..) => { - if attr::contains_name(&i.attrs[..], "simd") { - gate_feature_post!(&self, simd, i.span, + if let Some(attr) = attr::find_by_name(&i.attrs[..], "simd") { + gate_feature_post!(&self, simd, attr.span, "SIMD types are experimental and possibly buggy"); - self.context.parse_sess.span_diagnostic.span_warn(i.span, + self.context.parse_sess.span_diagnostic.span_warn(attr.span, "the `#[simd]` attribute \ is deprecated, use \ `#[repr(simd)]` instead"); } - for attr in &i.attrs { - if attr.path == "repr" { - for item in attr.meta_item_list().unwrap_or_else(Vec::new) { - if item.check_name("simd") { - gate_feature_post!(&self, repr_simd, i.span, - "SIMD types are experimental \ - and possibly buggy"); - - } - if item.check_name("align") { - gate_feature_post!(&self, repr_align, i.span, - "the struct `#[repr(align(u16))]` attribute \ - is experimental"); - } + if let Some(attr) = attr::find_by_name(&i.attrs[..], "repr") { + for item in attr.meta_item_list().unwrap_or_else(Vec::new) { + if item.check_name("simd") { + gate_feature_post!(&self, repr_simd, attr.span, + "SIMD types are experimental and possibly buggy"); + } + if item.check_name("align") { + gate_feature_post!(&self, repr_align, attr.span, + "the struct `#[repr(align(u16))]` attribute \ + is experimental"); } } } @@ -1334,8 +1337,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { for impl_item in impl_items { if let ast::ImplItemKind::Method(..) = impl_item.node { - if attr::contains_name(&impl_item.attrs[..], "must_use") { - gate_feature_post!(&self, fn_must_use, impl_item.span, + if let Some(attr) = attr::find_by_name(&impl_item.attrs[..], "must_use") { + gate_feature_post!(&self, fn_must_use, attr.span, "`#[must_use]` on methods is experimental", GateStrength::Soft); } @@ -1410,6 +1413,11 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { ast::ExprKind::InPlace(..) => { gate_feature_post!(&self, placement_in_syntax, e.span, EXPLAIN_PLACEMENT_IN); } + ast::ExprKind::Yield(..) => { + gate_feature_post!(&self, generators, + e.span, + "yield syntax is experimental"); + } ast::ExprKind::Lit(ref lit) => { if let ast::LitKind::Int(_, ref ty) = lit.node { match *ty { @@ -1430,6 +1438,15 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { visit::walk_expr(self, e); } + fn visit_arm(&mut self, arm: &'a ast::Arm) { + if let Some(span) = arm.beginning_vert { + gate_feature_post!(&self, match_beginning_vert, + span, + "Use of a '|' at the beginning of a match arm is experimental") + } + visit::walk_arm(self, arm) + } + fn visit_pat(&mut self, pattern: &'a ast::Pat) { match pattern.node { PatKind::Slice(_, Some(_), ref last) if !last.is_empty() => { diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 6fd0a2eab4235..03c47b71d02d7 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -344,12 +344,14 @@ pub fn fold_thin_attrs(attrs: ThinVec, fld: &mut T) -> Thi fold_attrs(attrs.into(), fld).into() } -pub fn noop_fold_arm(Arm {attrs, pats, guard, body}: Arm, fld: &mut T) -> Arm { +pub fn noop_fold_arm(Arm {attrs, pats, guard, body, beginning_vert}: Arm, + fld: &mut T) -> Arm { Arm { attrs: fold_attrs(attrs, fld), pats: pats.move_map(|x| fld.fold_pat(x)), guard: guard.map(|x| fld.fold_expr(x)), body: fld.fold_expr(body), + beginning_vert, } } @@ -1311,6 +1313,7 @@ pub fn noop_fold_expr(Expr {id, node, span, attrs}: Expr, folder: &mu attrs: fold_attrs(attrs.into(), folder).into(), }; } + ExprKind::Yield(ex) => ExprKind::Yield(ex.map(|x| folder.fold_expr(x))), ExprKind::Try(ex) => ExprKind::Try(folder.fold_expr(ex)), ExprKind::Catch(body) => ExprKind::Catch(folder.fold_block(body)), }, diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs index 37a59411c1618..db49ab1034358 100644 --- a/src/libsyntax/json.rs +++ b/src/libsyntax/json.rs @@ -230,8 +230,8 @@ impl DiagnosticSpan { mut backtrace: vec::IntoIter, je: &JsonEmitter) -> DiagnosticSpan { - let start = je.cm.lookup_char_pos(span.lo); - let end = je.cm.lookup_char_pos(span.hi); + let start = je.cm.lookup_char_pos(span.lo()); + let end = je.cm.lookup_char_pos(span.hi()); let backtrace_step = backtrace.next().map(|bt| { let call_site = Self::from_span_full(bt.call_site, @@ -256,8 +256,8 @@ impl DiagnosticSpan { }); DiagnosticSpan { file_name: start.file.name.clone(), - byte_start: span.lo.0 - start.file.start_pos.0, - byte_end: span.hi.0 - start.file.start_pos.0, + byte_start: span.lo().0 - start.file.start_pos.0, + byte_end: span.hi().0 - start.file.start_pos.0, line_start: start.line, line_end: end.line, column_start: start.col.0 + 1, diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index f65fffebe337a..fb558d1a58f85 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -386,7 +386,7 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: String, srdr: &mut R debug!("tok lit: {}", s); literals.push(Literal { lit: s.to_string(), - pos: sp.lo, + pos: sp.lo(), }); }) } else { diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 527d2e413969e..f26a046090545 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -71,7 +71,7 @@ pub struct StringReader<'a> { impl<'a> StringReader<'a> { fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span { - unwrap_or!(self.override_span, Span { lo: lo, hi: hi, ctxt: NO_EXPANSION}) + unwrap_or!(self.override_span, Span::new(lo, hi, NO_EXPANSION)) } fn next_token(&mut self) -> TokenAndSpan where Self: Sized { @@ -190,20 +190,20 @@ impl<'a> StringReader<'a> { } pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self { - let begin = sess.codemap().lookup_byte_offset(span.lo); - let end = sess.codemap().lookup_byte_offset(span.hi); + let begin = sess.codemap().lookup_byte_offset(span.lo()); + let end = sess.codemap().lookup_byte_offset(span.hi()); // Make the range zero-length if the span is invalid. - if span.lo > span.hi || begin.fm.start_pos != end.fm.start_pos { - span.hi = span.lo; + if span.lo() > span.hi() || begin.fm.start_pos != end.fm.start_pos { + span = span.with_hi(span.lo()); } let mut sr = StringReader::new_raw_internal(sess, begin.fm); // Seek the lexer to the right byte range. sr.save_new_lines_and_multibyte = false; - sr.next_pos = span.lo; - sr.terminator = Some(span.hi); + sr.next_pos = span.lo(); + sr.terminator = Some(span.hi()); sr.bump(); @@ -1745,11 +1745,7 @@ mod tests { let tok1 = string_reader.next_token(); let tok2 = TokenAndSpan { tok: token::Ident(id), - sp: Span { - lo: BytePos(21), - hi: BytePos(23), - ctxt: NO_EXPANSION, - }, + sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION), }; assert_eq!(tok1, tok2); assert_eq!(string_reader.next_token().tok, token::Whitespace); @@ -1759,11 +1755,7 @@ mod tests { let tok3 = string_reader.next_token(); let tok4 = TokenAndSpan { tok: token::Ident(Ident::from_str("main")), - sp: Span { - lo: BytePos(24), - hi: BytePos(28), - ctxt: NO_EXPANSION, - }, + sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION), }; assert_eq!(tok3, tok4); // the lparen is already read: @@ -1921,7 +1913,7 @@ mod tests { let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); let comment = lexer.next_token(); assert_eq!(comment.tok, token::Comment); - assert_eq!((comment.sp.lo, comment.sp.hi), (BytePos(0), BytePos(7))); + assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7))); assert_eq!(lexer.next_token().tok, token::Whitespace); assert_eq!(lexer.next_token().tok, token::DocComment(Symbol::intern("/// test"))); diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index ad389ab510aaf..a2c81e2475428 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -11,7 +11,6 @@ use print::pprust::token_to_string; use parse::lexer::StringReader; use parse::{token, PResult}; -use syntax_pos::Span; use tokenstream::{Delimited, TokenStream, TokenTree}; impl<'a> StringReader<'a> { @@ -20,7 +19,7 @@ impl<'a> StringReader<'a> { let mut tts = Vec::new(); while self.token != token::Eof { let tree = self.parse_token_tree()?; - let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token); + let is_joint = tree.span().hi() == self.span.lo() && token::is_op(&self.token); tts.push(if is_joint { tree.joint() } else { tree.into() }); } Ok(TokenStream::concat(tts)) @@ -40,7 +39,7 @@ impl<'a> StringReader<'a> { return TokenStream::concat(tts); } }; - let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token); + let is_joint = tree.span().hi() == self.span.lo() && token::is_op(&self.token); tts.push(if is_joint { tree.joint() } else { tree.into() }); } } @@ -69,7 +68,7 @@ impl<'a> StringReader<'a> { let tts = self.parse_token_trees_until_close_delim(); // Expand to cover the entire delimited token tree - let span = Span { hi: self.span.hi, ..pre_span }; + let span = pre_span.with_hi(self.span.hi()); match self.token { // Correct delimiter. diff --git a/src/libsyntax/parse/lexer/unicode_chars.rs b/src/libsyntax/parse/lexer/unicode_chars.rs index c36fdef2d4c1d..39b5482a066d4 100644 --- a/src/libsyntax/parse/lexer/unicode_chars.rs +++ b/src/libsyntax/parse/lexer/unicode_chars.rs @@ -340,7 +340,7 @@ pub fn check_for_substitution<'a>(reader: &StringReader<'a>, .iter() .find(|&&(c, _, _)| c == ch) .map(|&(_, u_name, ascii_char)| { - let span = Span { lo: reader.pos, hi: reader.next_pos, ctxt: NO_EXPANSION }; + let span = Span::new(reader.pos, reader.next_pos, NO_EXPANSION); match ASCII_ARRAY.iter().find(|&&(c, _)| c == ascii_char) { Some(&(ascii_char, ascii_name)) => { let msg = diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 67b4954a8f15b..76a7e2923fc39 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -181,7 +181,7 @@ pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc, ) -> Parser { let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None)); if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP { - parser.span = Span { lo: end_pos, hi: end_pos, ctxt: NO_EXPANSION }; + parser.span = Span::new(end_pos, end_pos, NO_EXPANSION); } parser @@ -661,7 +661,7 @@ mod tests { // produce a syntax_pos::span fn sp(a: u32, b: u32) -> Span { - Span {lo: BytePos(a), hi: BytePos(b), ctxt: NO_EXPANSION} + Span::new(BytePos(a), BytePos(b), NO_EXPANSION) } fn str2seg(s: &str, lo: u32, hi: u32) -> ast::PathSegment { @@ -976,7 +976,7 @@ mod tests { for &src in &srcs { let spans = get_spans_of_pat_idents(src); - let Span{ lo, hi, .. } = spans[0]; + let (lo, hi) = (spans[0].lo(), spans[0].hi()); assert!("self" == &src[lo.to_usize()..hi.to_usize()], "\"{}\" != \"self\". src=\"{}\"", &src[lo.to_usize()..hi.to_usize()], src) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 90a635fdf44fe..1f033b25fe4f6 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -790,9 +790,8 @@ impl<'a> Parser<'a> { Ok(()) } token::AndAnd => { - let span = self.span; - let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::BinOp(token::And), Span { lo: lo, ..span })) + let span = self.span.with_lo(self.span.lo() + BytePos(1)); + Ok(self.bump_with(token::BinOp(token::And), span)) } _ => self.unexpected() } @@ -824,9 +823,8 @@ impl<'a> Parser<'a> { true } token::BinOp(token::Shl) => { - let span = self.span; - let lo = span.lo + BytePos(1); - self.bump_with(token::Lt, Span { lo: lo, ..span }); + let span = self.span.with_lo(self.span.lo() + BytePos(1)); + self.bump_with(token::Lt, span); true } _ => false, @@ -852,19 +850,16 @@ impl<'a> Parser<'a> { Ok(()) } token::BinOp(token::Shr) => { - let span = self.span; - let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::Gt, Span { lo: lo, ..span })) + let span = self.span.with_lo(self.span.lo() + BytePos(1)); + Ok(self.bump_with(token::Gt, span)) } token::BinOpEq(token::Shr) => { - let span = self.span; - let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::Ge, Span { lo: lo, ..span })) + let span = self.span.with_lo(self.span.lo() + BytePos(1)); + Ok(self.bump_with(token::Ge, span)) } token::Ge => { - let span = self.span; - let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::Eq, Span { lo: lo, ..span })) + let span = self.span.with_lo(self.span.lo() + BytePos(1)); + Ok(self.bump_with(token::Eq, span)) } _ => self.unexpected() } @@ -1094,7 +1089,7 @@ impl<'a> Parser<'a> { /// Advance the parser using provided token as a next one. Use this when /// consuming a part of a token. For example a single `<` from `<<`. pub fn bump_with(&mut self, next: token::Token, span: Span) { - self.prev_span = Span { hi: span.lo, ..self.span }; + self.prev_span = self.span.with_hi(span.lo()); // It would be incorrect to record the kind of the current token, but // fortunately for tokens currently using `bump_with`, the // prev_token_kind will be of no use anyway. @@ -1356,7 +1351,7 @@ impl<'a> Parser<'a> { if self.eat(&token::RArrow) { Ok(FunctionRetTy::Ty(self.parse_ty_no_plus()?)) } else { - Ok(FunctionRetTy::Default(Span { hi: self.span.lo, ..self.span })) + Ok(FunctionRetTy::Default(self.span.with_hi(self.span.lo()))) } } @@ -2222,6 +2217,14 @@ impl<'a> Parser<'a> { }; ex = ExprKind::Break(lt, e); hi = self.prev_span; + } else if self.eat_keyword(keywords::Yield) { + if self.token.can_begin_expr() { + let e = self.parse_expr()?; + hi = e.span; + ex = ExprKind::Yield(Some(e)); + } else { + ex = ExprKind::Yield(None); + } } else if self.token.is_keyword(keywords::Let) { // Catch this syntax error here, instead of in `parse_ident`, so // that we can explicitly mention that let is not to be used as an expression @@ -2524,7 +2527,7 @@ impl<'a> Parser<'a> { pub fn process_potential_macro_variable(&mut self) { let ident = match self.token { - token::Dollar if self.span.ctxt != syntax_pos::hygiene::SyntaxContext::empty() && + token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() && self.look_ahead(1, |t| t.is_ident()) => { self.bump(); let name = match self.token { token::Ident(ident) => ident, _ => unreachable!() }; @@ -2726,8 +2729,8 @@ impl<'a> Parser<'a> { err.span_label(self.span, "expecting a type here because of type ascription"); let cm = self.sess.codemap(); - let cur_pos = cm.lookup_char_pos(self.span.lo); - let op_pos = cm.lookup_char_pos(cur_op_span.hi); + let cur_pos = cm.lookup_char_pos(self.span.lo()); + let op_pos = cm.lookup_char_pos(cur_op_span.hi()); if cur_pos.line != op_pos.line { err.span_suggestion_short(cur_op_span, "did you mean to use `;` here?", @@ -3022,7 +3025,10 @@ impl<'a> Parser<'a> { let decl = self.parse_fn_block_decl()?; let decl_hi = self.prev_span; let body = match decl.output { - FunctionRetTy::Default(_) => self.parse_expr()?, + FunctionRetTy::Default(_) => { + let restrictions = self.restrictions - RESTRICTION_STMT_EXPR; + self.parse_expr_res(restrictions, None)? + }, _ => { // If an explicit return type is given, require a // block to appear (RFC 968). @@ -3149,6 +3155,12 @@ impl<'a> Parser<'a> { maybe_whole!(self, NtArm, |x| x); let attrs = self.parse_outer_attributes()?; + // Allow a '|' before the pats (RFC 1925) + let beginning_vert = if self.eat(&token::BinOp(token::Or)) { + Some(self.prev_span) + } else { + None + }; let pats = self.parse_pats()?; let guard = if self.eat_keyword(keywords::If) { Some(self.parse_expr()?) @@ -3172,6 +3184,7 @@ impl<'a> Parser<'a> { pats, guard, body: expr, + beginning_vert, }) } @@ -4048,7 +4061,7 @@ impl<'a> Parser<'a> { let mut stmt_span = stmt.span; // expand the span to include the semicolon, if it exists if self.eat(&token::Semi) { - stmt_span.hi = self.prev_span.hi; + stmt_span = stmt_span.with_hi(self.prev_span.hi()); } let sugg = pprust::to_string(|s| { use print::pprust::{PrintState, INDENT_UNIT}; @@ -4140,7 +4153,7 @@ impl<'a> Parser<'a> { stmt = stmt.add_trailing_semicolon(); } - stmt.span.hi = self.prev_span.hi; + stmt.span = stmt.span.with_hi(self.prev_span.hi()); Ok(Some(stmt)) } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 834ac38af9870..d39f11bc3eef1 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -106,6 +106,7 @@ fn ident_can_begin_expr(ident: ast::Ident) -> bool { keywords::True.name(), keywords::Unsafe.name(), keywords::While.name(), + keywords::Yield.name(), ].contains(&ident.name) } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 3be831e828c72..3b5ec1caf0de9 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -603,8 +603,8 @@ pub trait PrintState<'a> { } fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> { - self.maybe_print_comment(lit.span.lo)?; - if let Some(ltrl) = self.next_lit(lit.span.lo) { + self.maybe_print_comment(lit.span.lo())?; + if let Some(ltrl) = self.next_lit(lit.span.lo()) { return self.writer().word(<rl.lit); } match lit.node { @@ -723,7 +723,7 @@ pub trait PrintState<'a> { if !is_inline { self.hardbreak_if_not_bol()?; } - self.maybe_print_comment(attr.span.lo)?; + self.maybe_print_comment(attr.span.lo())?; if attr.is_sugared_doc { self.writer().word(&attr.value_str().unwrap().as_str())?; self.writer().hardbreak() @@ -892,7 +892,7 @@ impl<'a> State<'a> { } pub fn bclose_maybe_open(&mut self, span: syntax_pos::Span, indented: usize, close_box: bool) -> io::Result<()> { - self.maybe_print_comment(span.hi)?; + self.maybe_print_comment(span.hi())?; self.break_offset_if_not_bol(1, -(indented as isize))?; self.s.word("}")?; if close_box { @@ -950,13 +950,13 @@ impl<'a> State<'a> { let len = elts.len(); let mut i = 0; for elt in elts { - self.maybe_print_comment(get_span(elt).hi)?; + self.maybe_print_comment(get_span(elt).hi())?; op(self, elt)?; i += 1; if i < len { self.s.word(",")?; self.maybe_print_trailing_comment(get_span(elt), - Some(get_span(&elts[i]).hi))?; + Some(get_span(&elts[i]).hi()))?; self.space_if_not_bol()?; } } @@ -996,7 +996,7 @@ impl<'a> State<'a> { } pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> { - self.maybe_print_comment(ty.span.lo)?; + self.maybe_print_comment(ty.span.lo())?; self.ibox(0)?; match ty.node { ast::TyKind::Slice(ref ty) => { @@ -1094,7 +1094,7 @@ impl<'a> State<'a> { pub fn print_foreign_item(&mut self, item: &ast::ForeignItem) -> io::Result<()> { self.hardbreak_if_not_bol()?; - self.maybe_print_comment(item.span.lo)?; + self.maybe_print_comment(item.span.lo())?; self.print_outer_attributes(&item.attrs)?; match item.node { ast::ForeignItemKind::Fn(ref decl, ref generics) => { @@ -1163,7 +1163,7 @@ impl<'a> State<'a> { /// Pretty-print an item pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> { self.hardbreak_if_not_bol()?; - self.maybe_print_comment(item.span.lo)?; + self.maybe_print_comment(item.span.lo())?; self.print_outer_attributes(&item.attrs)?; self.ann.pre(self, NodeItem(item))?; match item.node { @@ -1433,7 +1433,7 @@ impl<'a> State<'a> { self.bopen()?; for v in variants { self.space_if_not_bol()?; - self.maybe_print_comment(v.span.lo)?; + self.maybe_print_comment(v.span.lo())?; self.print_outer_attributes(&v.node.attrs)?; self.ibox(INDENT_UNIT)?; self.print_variant(v)?; @@ -1481,7 +1481,7 @@ impl<'a> State<'a> { self.commasep( Inconsistent, struct_def.fields(), |s, field| { - s.maybe_print_comment(field.span.lo)?; + s.maybe_print_comment(field.span.lo())?; s.print_outer_attributes(&field.attrs)?; s.print_visibility(&field.vis)?; s.print_type(&field.ty) @@ -1503,7 +1503,7 @@ impl<'a> State<'a> { for field in struct_def.fields() { self.hardbreak_if_not_bol()?; - self.maybe_print_comment(field.span.lo)?; + self.maybe_print_comment(field.span.lo())?; self.print_outer_attributes(&field.attrs)?; self.print_visibility(&field.vis)?; self.print_ident(field.ident.unwrap())?; @@ -1548,7 +1548,7 @@ impl<'a> State<'a> { -> io::Result<()> { self.ann.pre(self, NodeSubItem(ti.id))?; self.hardbreak_if_not_bol()?; - self.maybe_print_comment(ti.span.lo)?; + self.maybe_print_comment(ti.span.lo())?; self.print_outer_attributes(&ti.attrs)?; match ti.node { ast::TraitItemKind::Const(ref ty, ref default) => { @@ -1590,7 +1590,7 @@ impl<'a> State<'a> { pub fn print_impl_item(&mut self, ii: &ast::ImplItem) -> io::Result<()> { self.ann.pre(self, NodeSubItem(ii.id))?; self.hardbreak_if_not_bol()?; - self.maybe_print_comment(ii.span.lo)?; + self.maybe_print_comment(ii.span.lo())?; self.print_outer_attributes(&ii.attrs)?; self.print_defaultness(ii.defaultness)?; match ii.node { @@ -1622,7 +1622,7 @@ impl<'a> State<'a> { } pub fn print_stmt(&mut self, st: &ast::Stmt) -> io::Result<()> { - self.maybe_print_comment(st.span.lo)?; + self.maybe_print_comment(st.span.lo())?; match st.node { ast::StmtKind::Local(ref loc) => { self.print_outer_attributes(&loc.attrs)?; @@ -1705,7 +1705,7 @@ impl<'a> State<'a> { BlockCheckMode::Unsafe(..) => self.word_space("unsafe")?, BlockCheckMode::Default => () } - self.maybe_print_comment(blk.span.lo)?; + self.maybe_print_comment(blk.span.lo())?; self.ann.pre(self, NodeBlock(blk))?; self.bopen()?; @@ -1714,10 +1714,10 @@ impl<'a> State<'a> { for (i, st) in blk.stmts.iter().enumerate() { match st.node { ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => { - self.maybe_print_comment(st.span.lo)?; + self.maybe_print_comment(st.span.lo())?; self.space_if_not_bol()?; self.print_expr_outer_attr_style(expr, false)?; - self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))?; + self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi()))?; } _ => self.print_stmt(st)?, } @@ -1988,7 +1988,7 @@ impl<'a> State<'a> { fn print_expr_outer_attr_style(&mut self, expr: &ast::Expr, is_inline: bool) -> io::Result<()> { - self.maybe_print_comment(expr.span.lo)?; + self.maybe_print_comment(expr.span.lo())?; let attrs = &expr.attrs; if is_inline { @@ -2281,6 +2281,16 @@ impl<'a> State<'a> { self.print_expr(e)?; self.pclose()?; }, + ast::ExprKind::Yield(ref e) => { + self.s.word("yield")?; + match *e { + Some(ref expr) => { + self.s.space()?; + self.print_expr(&expr)?; + } + _ => () + } + } ast::ExprKind::Try(ref e) => { self.print_expr(e)?; self.s.word("?")? @@ -2333,7 +2343,7 @@ impl<'a> State<'a> { defaults_to_global: bool) -> io::Result<()> { - self.maybe_print_comment(path.span.lo)?; + self.maybe_print_comment(path.span.lo())?; let mut segments = path.segments[..path.segments.len()-depth].iter(); if defaults_to_global && path.is_global() { @@ -2455,7 +2465,7 @@ impl<'a> State<'a> { } pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> { - self.maybe_print_comment(pat.span.lo)?; + self.maybe_print_comment(pat.span.lo())?; self.ann.pre(self, NodePat(pat))?; /* Pat isn't normalized, but the beauty of it is that it doesn't matter */ @@ -2597,7 +2607,7 @@ impl<'a> State<'a> { } self.cbox(INDENT_UNIT)?; self.ibox(0)?; - self.maybe_print_comment(arm.pats[0].span.lo)?; + self.maybe_print_comment(arm.pats[0].span.lo())?; self.print_outer_attributes(&arm.attrs)?; let mut first = true; for p in &arm.pats { @@ -2705,7 +2715,7 @@ impl<'a> State<'a> { match decl.output { ast::FunctionRetTy::Ty(ref ty) => { self.print_type(ty)?; - self.maybe_print_comment(ty.span.lo) + self.maybe_print_comment(ty.span.lo()) } ast::FunctionRetTy::Default(..) => unreachable!(), } @@ -2961,7 +2971,7 @@ impl<'a> State<'a> { self.end()?; match decl.output { - ast::FunctionRetTy::Ty(ref output) => self.maybe_print_comment(output.span.lo), + ast::FunctionRetTy::Ty(ref output) => self.maybe_print_comment(output.span.lo()), _ => Ok(()) } } @@ -3007,10 +3017,10 @@ impl<'a> State<'a> { }; if let Some(ref cmnt) = self.next_comment() { if cmnt.style != comments::Trailing { return Ok(()) } - let span_line = cm.lookup_char_pos(span.hi); + let span_line = cm.lookup_char_pos(span.hi()); let comment_line = cm.lookup_char_pos(cmnt.pos); let next = next_pos.unwrap_or(cmnt.pos + BytePos(1)); - if span.hi < cmnt.pos && cmnt.pos < next && span_line.line == comment_line.line { + if span.hi() < cmnt.pos && cmnt.pos < next && span_line.line == comment_line.line { self.print_comment(cmnt)?; } } diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index 8977d701e5a2b..7aa94de9d3d5b 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -31,7 +31,7 @@ fn ignored_span(sp: Span) -> Span { allow_internal_unsafe: false, } }); - Span { ctxt: SyntaxContext::empty().apply_mark(mark), ..sp } + sp.with_ctxt(SyntaxContext::empty().apply_mark(mark)) } pub fn injected_crate_name(krate: &ast::Crate) -> Option<&'static str> { diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 35dc981952908..5a5a1ce3777e6 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -306,7 +306,7 @@ fn generate_test_harness(sess: &ParseSess, /// call to codemap's `is_internal` check. /// The expanded code calls some unstable functions in the test crate. fn ignored_span(cx: &TestCtxt, sp: Span) -> Span { - Span { ctxt: cx.ctxt, ..sp } + sp.with_ctxt(cx.ctxt) } #[derive(PartialEq)] diff --git a/src/libsyntax/test_snippet.rs b/src/libsyntax/test_snippet.rs index 4fae2ff9814fd..e9b1976ea472b 100644 --- a/src/libsyntax/test_snippet.rs +++ b/src/libsyntax/test_snippet.rs @@ -80,11 +80,7 @@ fn make_span(file_text: &str, start: &Position, end: &Position) -> Span { let start = make_pos(file_text, start); let end = make_pos(file_text, end) + end.string.len(); // just after matching thing ends assert!(start <= end); - Span { - lo: BytePos(start as u32), - hi: BytePos(end as u32), - ctxt: NO_EXPANSION, - } + Span::new(BytePos(start as u32), BytePos(end as u32), NO_EXPANSION) } fn make_pos(file_text: &str, pos: &Position) -> usize { diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 747bc7b438554..870f54e4396af 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -59,7 +59,7 @@ impl Delimited { let open_span = if span == DUMMY_SP { DUMMY_SP } else { - Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span } + span.with_hi(span.lo() + BytePos(self.delim.len() as u32)) }; TokenTree::Token(open_span, self.open_token()) } @@ -69,7 +69,7 @@ impl Delimited { let close_span = if span == DUMMY_SP { DUMMY_SP } else { - Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span } + span.with_lo(span.hi() - BytePos(self.delim.len() as u32)) }; TokenTree::Token(close_span, self.close_token()) } @@ -602,11 +602,7 @@ mod tests { } fn sp(a: u32, b: u32) -> Span { - Span { - lo: BytePos(a), - hi: BytePos(b), - ctxt: NO_EXPANSION, - } + Span::new(BytePos(a), BytePos(b), NO_EXPANSION) } #[test] diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index f4ac7e341ce4b..05077d42a0bed 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -784,6 +784,9 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) { visitor.visit_expr(&output.expr) } } + ExprKind::Yield(ref optional_expression) => { + walk_list!(visitor, visit_expr, optional_expression); + } ExprKind::Try(ref subexpression) => { visitor.visit_expr(subexpression) } diff --git a/src/libsyntax_ext/cfg.rs b/src/libsyntax_ext/cfg.rs index 98da49545f927..1d8dc4064685b 100644 --- a/src/libsyntax_ext/cfg.rs +++ b/src/libsyntax_ext/cfg.rs @@ -24,6 +24,7 @@ pub fn expand_cfg<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { + let sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark)); let mut p = cx.new_parser_from_tts(tts); let cfg = panictry!(p.parse_meta_item()); diff --git a/src/libsyntax_ext/concat.rs b/src/libsyntax_ext/concat.rs index bfe18dc4060c9..c79e7867c5f5e 100644 --- a/src/libsyntax_ext/concat.rs +++ b/src/libsyntax_ext/concat.rs @@ -57,5 +57,6 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, } } } + let sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark)); base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&accumulator))) } diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs index 6f4c112acb6c6..8d0104e512bfb 100644 --- a/src/libsyntax_ext/concat_idents.rs +++ b/src/libsyntax_ext/concat_idents.rs @@ -92,6 +92,6 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, Box::new(Result { ident: res, - span: sp, + span: sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark)), }) } diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs index 71dd7abfab04a..5d93c2a5f72a1 100644 --- a/src/libsyntax_ext/deriving/clone.rs +++ b/src/libsyntax_ext/deriving/clone.rs @@ -111,7 +111,7 @@ fn cs_clone_shallow(name: &str, ty: P, span: Span, helper_name: &str) { // Generate statement `let _: helper_name;`, // set the expn ID so we can use the unstable struct. - let span = Span { ctxt: cx.backtrace(), ..span}; + let span = span.with_ctxt(cx.backtrace()); let assert_path = cx.path_all(span, true, cx.std_path(&["clone", helper_name]), vec![], vec![ty], vec![]); diff --git a/src/libsyntax_ext/deriving/cmp/eq.rs b/src/libsyntax_ext/deriving/cmp/eq.rs index 0b57beeae858b..a282ff5bd045f 100644 --- a/src/libsyntax_ext/deriving/cmp/eq.rs +++ b/src/libsyntax_ext/deriving/cmp/eq.rs @@ -58,7 +58,7 @@ fn cs_total_eq_assert(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) ty: P, span: Span, helper_name: &str) { // Generate statement `let _: helper_name;`, // set the expn ID so we can use the unstable struct. - let span = Span { ctxt: cx.backtrace(), ..span }; + let span = span.with_ctxt(cx.backtrace()); let assert_path = cx.path_all(span, true, cx.std_path(&["cmp", helper_name]), vec![], vec![ty], vec![]); diff --git a/src/libsyntax_ext/deriving/debug.rs b/src/libsyntax_ext/deriving/debug.rs index 54d71dd4b48e3..ab6dd04520c1b 100644 --- a/src/libsyntax_ext/deriving/debug.rs +++ b/src/libsyntax_ext/deriving/debug.rs @@ -67,7 +67,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P MethodDef<'a> { .iter() .map(|v| { let ident = v.node.name; - let sp = Span { ctxt: trait_.span.ctxt, ..v.span }; + let sp = v.span.with_ctxt(trait_.span.ctxt()); let summary = trait_.summarise_struct(cx, &v.node.data); (ident, sp, summary) }) @@ -1484,7 +1484,7 @@ impl<'a> TraitDef<'a> { let mut named_idents = Vec::new(); let mut just_spans = Vec::new(); for field in struct_def.fields() { - let sp = Span { ctxt: self.span.ctxt, ..field.span }; + let sp = field.span.with_ctxt(self.span.ctxt()); match field.ident { Some(ident) => named_idents.push((ident, sp)), _ => just_spans.push(sp), @@ -1529,7 +1529,7 @@ impl<'a> TraitDef<'a> { let mut paths = Vec::new(); let mut ident_exprs = Vec::new(); for (i, struct_field) in struct_def.fields().iter().enumerate() { - let sp = Span { ctxt: self.span.ctxt, ..struct_field.span }; + let sp = struct_field.span.with_ctxt(self.span.ctxt()); let ident = cx.ident_of(&format!("{}_{}", prefix, i)); paths.push(codemap::Spanned { span: sp, @@ -1550,7 +1550,7 @@ impl<'a> TraitDef<'a> { cx.span_bug(sp, "a braced struct with unnamed fields in `derive`"); } codemap::Spanned { - span: Span { ctxt: self.span.ctxt, ..pat.span }, + span: pat.span.with_ctxt(self.span.ctxt()), node: ast::FieldPat { ident: ident.unwrap(), pat, @@ -1582,7 +1582,7 @@ impl<'a> TraitDef<'a> { mutbl: ast::Mutability) -> (P, Vec<(Span, Option, P, &'a [ast::Attribute])>) { let variant_ident = variant.node.name; - let sp = Span { ctxt: self.span.ctxt, ..variant.span }; + let sp = variant.span.with_ctxt(self.span.ctxt()); let variant_path = cx.path(sp, vec![enum_ident, variant_ident]); self.create_struct_pattern(cx, variant_path, &variant.node.data, prefix, mutbl) } diff --git a/src/libsyntax_ext/deriving/mod.rs b/src/libsyntax_ext/deriving/mod.rs index cd706f14a680d..ccf3d5502341f 100644 --- a/src/libsyntax_ext/deriving/mod.rs +++ b/src/libsyntax_ext/deriving/mod.rs @@ -158,13 +158,13 @@ fn call_intrinsic(cx: &ExtCtxt, args: Vec>) -> P { if cx.current_expansion.mark.expn_info().unwrap().callee.allow_internal_unstable { - span.ctxt = cx.backtrace(); + span = span.with_ctxt(cx.backtrace()); } else { // Avoid instability errors with user defined curstom derives, cc #36316 let mut info = cx.current_expansion.mark.expn_info().unwrap(); info.callee.allow_internal_unstable = true; let mark = Mark::fresh(Mark::root()); mark.set_expn_info(info); - span.ctxt = SyntaxContext::empty().apply_mark(mark); + span = span.with_ctxt(SyntaxContext::empty().apply_mark(mark)); } let path = cx.std_path(&["intrinsics", intrinsic]); let call = cx.expr_call_global(span, path, args); diff --git a/src/libsyntax_ext/env.rs b/src/libsyntax_ext/env.rs index affebbabbbda4..fcad065be52bc 100644 --- a/src/libsyntax_ext/env.rs +++ b/src/libsyntax_ext/env.rs @@ -32,6 +32,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, Some(v) => v, }; + let sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark)); let e = match env::var(&*var.as_str()) { Err(..) => { cx.expr_path(cx.path_all(sp, diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index 764cedfcf2061..3e20bc481bde8 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -558,10 +558,8 @@ impl<'a, 'b> Context<'a, 'b> { // passed to this function. for (i, e) in self.args.into_iter().enumerate() { let name = self.ecx.ident_of(&format!("__arg{}", i)); - let span = Span { - ctxt: e.span.ctxt.apply_mark(self.ecx.current_expansion.mark), - ..DUMMY_SP - }; + let span = + DUMMY_SP.with_ctxt(e.span.ctxt().apply_mark(self.ecx.current_expansion.mark)); pats.push(self.ecx.pat_ident(span, name)); for ref arg_ty in self.arg_unique_types[i].iter() { locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name)); @@ -642,7 +640,7 @@ impl<'a, 'b> Context<'a, 'b> { ty: &ArgumentType, arg: ast::Ident) -> P { - sp.ctxt = sp.ctxt.apply_mark(ecx.current_expansion.mark); + sp = sp.with_ctxt(sp.ctxt().apply_mark(ecx.current_expansion.mark)); let arg = ecx.expr_ident(sp, arg); let trait_ = match *ty { Placeholder(ref tyname) => { @@ -679,7 +677,7 @@ pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt, mut sp: Span, tts: &[tokenstream::TokenTree]) -> Box { - sp.ctxt = sp.ctxt.apply_mark(ecx.current_expansion.mark); + sp = sp.with_ctxt(sp.ctxt().apply_mark(ecx.current_expansion.mark)); match parse_args(ecx, sp, tts) { Some((efmt, args, names)) => { MacEager::expr(expand_preparsed_format_args(ecx, sp, efmt, args, names)) @@ -701,7 +699,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, let arg_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect(); let arg_unique_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect(); let mut macsp = ecx.call_site(); - macsp.ctxt = macsp.ctxt.apply_mark(ecx.current_expansion.mark); + macsp = macsp.with_ctxt(macsp.ctxt().apply_mark(ecx.current_expansion.mark)); let msg = "format argument must be a string literal."; let fmt = match expr_to_spanned_string(ecx, efmt, msg) { Some(fmt) => fmt, diff --git a/src/libsyntax_ext/proc_macro_registrar.rs b/src/libsyntax_ext/proc_macro_registrar.rs index a8a54a97ac368..a58d2c96388ca 100644 --- a/src/libsyntax_ext/proc_macro_registrar.rs +++ b/src/libsyntax_ext/proc_macro_registrar.rs @@ -371,7 +371,7 @@ fn mk_registrar(cx: &mut ExtCtxt, allow_internal_unsafe: false, } }); - let span = Span { ctxt: SyntaxContext::empty().apply_mark(mark), ..DUMMY_SP }; + let span = DUMMY_SP.with_ctxt(SyntaxContext::empty().apply_mark(mark)); let proc_macro = Ident::from_str("proc_macro"); let krate = cx.item(span, diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index d34dcfa3ed324..cba5c812b07ce 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -25,6 +25,7 @@ #![feature(optin_builtin_traits)] #![allow(unused_attributes)] #![feature(specialization)] +#![feature(staged_api)] use std::borrow::Cow; use std::cell::{Cell, RefCell}; @@ -60,13 +61,22 @@ pub type FileName = String; /// range between files. #[derive(Clone, Copy, Hash, PartialEq, Eq, Ord, PartialOrd)] pub struct Span { + #[unstable(feature = "rustc_private", issue = "27812")] + #[rustc_deprecated(since = "1.21", reason = "use getters/setters instead")] pub lo: BytePos, + #[unstable(feature = "rustc_private", issue = "27812")] + #[rustc_deprecated(since = "1.21", reason = "use getters/setters instead")] pub hi: BytePos, /// Information about where the macro came from, if this piece of /// code was created by a macro expansion. + #[unstable(feature = "rustc_private", issue = "27812")] + #[rustc_deprecated(since = "1.21", reason = "use getters/setters instead")] pub ctxt: SyntaxContext, } +#[allow(deprecated)] +pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), ctxt: NO_EXPANSION }; + /// A collection of spans. Spans have two orthogonal attributes: /// /// - they can be *primary spans*. In this case they are the locus of @@ -80,16 +90,50 @@ pub struct MultiSpan { } impl Span { + #[allow(deprecated)] + #[inline] + pub fn new(lo: BytePos, hi: BytePos, ctxt: SyntaxContext) -> Self { + if lo <= hi { Span { lo, hi, ctxt } } else { Span { lo: hi, hi: lo, ctxt } } + } + + #[allow(deprecated)] + #[inline] + pub fn lo(self) -> BytePos { + self.lo + } + #[inline] + pub fn with_lo(self, lo: BytePos) -> Span { + Span::new(lo, self.hi(), self.ctxt()) + } + #[allow(deprecated)] + #[inline] + pub fn hi(self) -> BytePos { + self.hi + } + #[inline] + pub fn with_hi(self, hi: BytePos) -> Span { + Span::new(self.lo(), hi, self.ctxt()) + } + #[allow(deprecated)] + #[inline] + pub fn ctxt(self) -> SyntaxContext { + self.ctxt + } + #[inline] + pub fn with_ctxt(self, ctxt: SyntaxContext) -> Span { + Span::new(self.lo(), self.hi(), ctxt) + } + /// Returns a new span representing just the end-point of this span pub fn end_point(self) -> Span { - let lo = cmp::max(self.hi.0 - 1, self.lo.0); - Span { lo: BytePos(lo), ..self } + let lo = cmp::max(self.hi().0 - 1, self.lo().0); + self.with_lo(BytePos(lo)) } /// Returns a new span representing the next character after the end-point of this span pub fn next_point(self) -> Span { - let lo = cmp::max(self.hi.0, self.lo.0 + 1); - Span { lo: BytePos(lo), hi: BytePos(lo), ..self } + let lo = cmp::max(self.hi().0, self.lo().0 + 1); + Span::new(BytePos(lo), BytePos(lo), self.ctxt()) } /// Returns `self` if `self` is not the dummy span, and `other` otherwise. @@ -99,7 +143,7 @@ impl Span { /// Return true if `self` fully encloses `other`. pub fn contains(self, other: Span) -> bool { - self.lo <= other.lo && other.hi <= self.hi + self.lo() <= other.lo() && other.hi() <= self.hi() } /// Return true if the spans are equal with regards to the source text. @@ -107,13 +151,13 @@ impl Span { /// Use this instead of `==` when either span could be generated code, /// and you only care that they point to the same bytes of source text. pub fn source_equal(&self, other: &Span) -> bool { - self.lo == other.lo && self.hi == other.hi + self.lo() == other.lo() && self.hi() == other.hi() } /// Returns `Some(span)`, where the start is trimmed by the end of `other` pub fn trim_start(self, other: Span) -> Option { - if self.hi > other.hi { - Some(Span { lo: cmp::max(self.lo, other.hi), .. self }) + if self.hi() > other.hi() { + Some(self.with_lo(cmp::max(self.lo(), other.hi()))) } else { None } @@ -122,7 +166,7 @@ impl Span { /// Return the source span - this is either the supplied span, or the span for /// the macro callsite that expanded to it. pub fn source_callsite(self) -> Span { - self.ctxt.outer().expn_info().map(|info| info.call_site.source_callsite()).unwrap_or(self) + self.ctxt().outer().expn_info().map(|info| info.call_site.source_callsite()).unwrap_or(self) } /// Return the source callee. @@ -132,19 +176,19 @@ impl Span { /// corresponding to the source callsite. pub fn source_callee(self) -> Option { fn source_callee(info: ExpnInfo) -> NameAndSpan { - match info.call_site.ctxt.outer().expn_info() { + match info.call_site.ctxt().outer().expn_info() { Some(info) => source_callee(info), None => info.callee, } } - self.ctxt.outer().expn_info().map(source_callee) + self.ctxt().outer().expn_info().map(source_callee) } /// Check if a span is "internal" to a macro in which #[unstable] /// items can be used (that is, a macro marked with /// `#[allow_internal_unstable]`). pub fn allows_unstable(&self) -> bool { - match self.ctxt.outer().expn_info() { + match self.ctxt().outer().expn_info() { Some(info) => info.callee.allow_internal_unstable, None => false, } @@ -152,7 +196,7 @@ impl Span { /// Check if this span arises from a compiler desugaring of kind `kind`. pub fn is_compiler_desugaring(&self, kind: CompilerDesugaringKind) -> bool { - match self.ctxt.outer().expn_info() { + match self.ctxt().outer().expn_info() { Some(info) => match info.callee.format { ExpnFormat::CompilerDesugaring(k) => k == kind, _ => false, @@ -161,11 +205,23 @@ impl Span { } } + /// Return the compiler desugaring that created this span, or None + /// if this span is not from a desugaring. + pub fn compiler_desugaring_kind(&self) -> Option { + match self.ctxt().outer().expn_info() { + Some(info) => match info.callee.format { + ExpnFormat::CompilerDesugaring(k) => Some(k), + _ => None + }, + None => None + } + } + /// Check if a span is "internal" to a macro in which `unsafe` /// can be used without triggering the `unsafe_code` lint // (that is, a macro marked with `#[allow_internal_unsafe]`). pub fn allows_unsafe(&self) -> bool { - match self.ctxt.outer().expn_info() { + match self.ctxt().outer().expn_info() { Some(info) => info.callee.allow_internal_unsafe, None => false, } @@ -175,7 +231,7 @@ impl Span { let mut prev_span = DUMMY_SP; let mut result = vec![]; loop { - let info = match self.ctxt.outer().expn_info() { + let info = match self.ctxt().outer().expn_info() { Some(info) => info, None => break, }; @@ -205,42 +261,30 @@ impl Span { /// Return a `Span` that would enclose both `self` and `end`. pub fn to(self, end: Span) -> Span { - Span { - lo: cmp::min(self.lo, end.lo), - hi: cmp::max(self.hi, end.hi), + Span::new( + cmp::min(self.lo(), end.lo()), + cmp::max(self.hi(), end.hi()), // FIXME(jseyfried): self.ctxt should always equal end.ctxt here (c.f. issue #23480) - ctxt: if self.ctxt == SyntaxContext::empty() { - end.ctxt - } else { - self.ctxt - }, - } + if self.ctxt() == SyntaxContext::empty() { end.ctxt() } else { self.ctxt() }, + ) } /// Return a `Span` between the end of `self` to the beginning of `end`. pub fn between(self, end: Span) -> Span { - Span { - lo: self.hi, - hi: end.lo, - ctxt: if end.ctxt == SyntaxContext::empty() { - end.ctxt - } else { - self.ctxt - } - } + Span::new( + self.hi(), + end.lo(), + if end.ctxt() == SyntaxContext::empty() { end.ctxt() } else { self.ctxt() }, + ) } /// Return a `Span` between the beginning of `self` to the beginning of `end`. pub fn until(self, end: Span) -> Span { - Span { - lo: self.lo, - hi: end.lo, - ctxt: if end.ctxt == SyntaxContext::empty() { - end.ctxt - } else { - self.ctxt - } - } + Span::new( + self.lo(), + end.lo(), + if end.ctxt() == SyntaxContext::empty() { end.ctxt() } else { self.ctxt() }, + ) } } @@ -267,11 +311,11 @@ impl serialize::UseSpecializedEncodable for Span { fn default_encode(&self, s: &mut S) -> Result<(), S::Error> { s.emit_struct("Span", 2, |s| { s.emit_struct_field("lo", 0, |s| { - self.lo.encode(s) + self.lo().encode(s) })?; s.emit_struct_field("hi", 1, |s| { - self.hi.encode(s) + self.hi().encode(s) }) }) } @@ -282,14 +326,14 @@ impl serialize::UseSpecializedDecodable for Span { d.read_struct("Span", 2, |d| { let lo = d.read_struct_field("lo", 0, Decodable::decode)?; let hi = d.read_struct_field("hi", 1, Decodable::decode)?; - Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }) + Ok(Span::new(lo, hi, NO_EXPANSION)) }) } } fn default_span_debug(span: Span, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Span {{ lo: {:?}, hi: {:?}, ctxt: {:?} }}", - span.lo, span.hi, span.ctxt) + span.lo(), span.hi(), span.ctxt()) } impl fmt::Debug for Span { @@ -298,8 +342,6 @@ impl fmt::Debug for Span { } } -pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), ctxt: NO_EXPANSION }; - impl MultiSpan { pub fn new() -> MultiSpan { MultiSpan { diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs index e49f1f28e5f1d..4d3db15ef29db 100644 --- a/src/libsyntax_pos/symbol.rs +++ b/src/libsyntax_pos/symbol.rs @@ -115,6 +115,12 @@ impl Symbol { } } +impl<'a> From<&'a str> for Symbol { + fn from(string: &'a str) -> Symbol { + Symbol::intern(string) + } +} + impl fmt::Debug for Symbol { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}({})", self, self.0) diff --git a/src/stage0.txt b/src/stage0.txt index e49b301abbf6b..892679c192916 100644 --- a/src/stage0.txt +++ b/src/stage0.txt @@ -12,7 +12,7 @@ # source tarball for a stable release you'll likely see `1.x.0` for rustc and # `0.x.0` for Cargo where they were released on `date`. -date: 2017-07-18 +date: 2017-08-29 rustc: beta cargo: beta diff --git a/src/test/codegen/dealloc-no-unwind.rs b/src/test/codegen/dealloc-no-unwind.rs new file mode 100644 index 0000000000000..551b66e103a11 --- /dev/null +++ b/src/test/codegen/dealloc-no-unwind.rs @@ -0,0 +1,32 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +// +// no-system-llvm +// compile-flags: -O + +#![crate_type="lib"] + +struct A; + +impl Drop for A { + fn drop(&mut self) { + extern { fn foo(); } + unsafe { foo(); } + } +} + +#[no_mangle] +pub fn a(a: Box) { + // CHECK-LABEL: define void @a + // CHECK: call void @__rust_dealloc + // CHECK-NEXT: call void @foo + let _a = A; + drop(a); +} diff --git a/src/test/codegen/issue-34947-pow-i32.rs b/src/test/codegen/issue-34947-pow-i32.rs new file mode 100644 index 0000000000000..0564cd2e5016c --- /dev/null +++ b/src/test/codegen/issue-34947-pow-i32.rs @@ -0,0 +1,23 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -O + +#![crate_type = "lib"] + +// CHECK-LABEL: @issue_34947 +#[no_mangle] +pub fn issue_34947(x: i32) -> i32 { + // CHECK: mul + // CHECK-NEXT: mul + // CHECK-NEXT: mul + // CHECK-NEXT: ret + x.pow(5) +} diff --git a/src/test/compile-fail-fulldeps/gated-macro-reexports.rs b/src/test/compile-fail-fulldeps/gated-macro-reexports.rs index 22c92623e1c19..2a20c28cfb871 100644 --- a/src/test/compile-fail-fulldeps/gated-macro-reexports.rs +++ b/src/test/compile-fail-fulldeps/gated-macro-reexports.rs @@ -16,6 +16,6 @@ #![crate_type = "dylib"] #[macro_reexport(reexported)] +//~^ ERROR macros reexports are experimental and possibly buggy #[macro_use] #[no_link] extern crate macro_reexport_1; -//~^ ERROR macros reexports are experimental and possibly buggy diff --git a/src/test/compile-fail/E0232.rs b/src/test/compile-fail/E0232.rs index ce4f4638dac59..a33120bbebd23 100644 --- a/src/test/compile-fail/E0232.rs +++ b/src/test/compile-fail/E0232.rs @@ -12,7 +12,7 @@ #[rustc_on_unimplemented] //~^ ERROR E0232 -//~| NOTE attribute requires a value +//~| NOTE value required here //~| NOTE eg `#[rustc_on_unimplemented = "foo"]` trait Bar {} diff --git a/src/test/compile-fail/check-static-values-constraints.rs b/src/test/compile-fail/check-static-values-constraints.rs index 3642add32597b..c349aababd6c0 100644 --- a/src/test/compile-fail/check-static-values-constraints.rs +++ b/src/test/compile-fail/check-static-values-constraints.rs @@ -86,8 +86,9 @@ static STATIC8: SafeStruct = SafeStruct{field1: SafeEnum::Variant1, // This example should fail because field1 in the base struct is not safe static STATIC9: SafeStruct = SafeStruct{field1: SafeEnum::Variant1, ..SafeStruct{field1: SafeEnum::Variant3(WithDtor), +//~^ ERROR destructors in statics are an unstable feature +//~| ERROR statics are not allowed to have destructors field2: SafeEnum::Variant1}}; -//~^^ ERROR destructors in statics are an unstable feature struct UnsafeStruct; diff --git a/src/test/compile-fail/feature-gate-fn_must_use.rs b/src/test/compile-fail/feature-gate-fn_must_use.rs index 2dd6b90407267..72fdcc76cf4f6 100644 --- a/src/test/compile-fail/feature-gate-fn_must_use.rs +++ b/src/test/compile-fail/feature-gate-fn_must_use.rs @@ -13,12 +13,12 @@ struct MyStruct; impl MyStruct { - #[must_use] - fn need_to_use_method() -> bool { true } //~ WARN `#[must_use]` on methods is experimental + #[must_use] //~ WARN `#[must_use]` on methods is experimental + fn need_to_use_method() -> bool { true } } -#[must_use] -fn need_to_use_it() -> bool { true } //~ WARN `#[must_use]` on functions is experimental +#[must_use] //~ WARN `#[must_use]` on functions is experimental +fn need_to_use_it() -> bool { true } // Feature gates are tidy-required to have a specially named (or diff --git a/src/test/compile-fail/feature-gate-generators.rs b/src/test/compile-fail/feature-gate-generators.rs new file mode 100644 index 0000000000000..3754f92d8cde2 --- /dev/null +++ b/src/test/compile-fail/feature-gate-generators.rs @@ -0,0 +1,13 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + yield true; //~ ERROR yield syntax is experimental +} diff --git a/src/test/compile-fail/feature-gate-match_beginning_vert.rs b/src/test/compile-fail/feature-gate-match_beginning_vert.rs new file mode 100644 index 0000000000000..9085563c99d6d --- /dev/null +++ b/src/test/compile-fail/feature-gate-match_beginning_vert.rs @@ -0,0 +1,36 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[allow(dead_code)] +enum Foo { + A, + B, + C, + D, + E, +} +use Foo::*; + +fn main() { + let x = Foo::A; + match x { + | A => println!("A"), + //~^ ERROR: Use of a '|' at the beginning of a match arm is experimental (see issue #44101) + | B | C => println!("BC!"), + //~^ ERROR: Use of a '|' at the beginning of a match arm is experimental (see issue #44101) + | _ => {}, + //~^ ERROR: Use of a '|' at the beginning of a match arm is experimental (see issue #44101) + }; + match x { + A | B | C => println!("ABC!"), + _ => {}, + }; +} + diff --git a/src/test/compile-fail/feature-gate-repr-simd.rs b/src/test/compile-fail/feature-gate-repr-simd.rs index fdafb2ad950c9..429cec7ec90d0 100644 --- a/src/test/compile-fail/feature-gate-repr-simd.rs +++ b/src/test/compile-fail/feature-gate-repr-simd.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#[repr(simd)] -struct Foo(u64, u64); //~ error: SIMD types are experimental +#[repr(simd)] //~ error: SIMD types are experimental +struct Foo(u64, u64); fn main() {} diff --git a/src/test/compile-fail/feature-gate-repr_align.rs b/src/test/compile-fail/feature-gate-repr_align.rs index 8e986e197f269..9591d367a2d19 100644 --- a/src/test/compile-fail/feature-gate-repr_align.rs +++ b/src/test/compile-fail/feature-gate-repr_align.rs @@ -9,7 +9,7 @@ // except according to those terms. #![feature(attr_literals)] -#[repr(align(64))] -struct Foo(u64, u64); //~ error: the struct `#[repr(align(u16))]` attribute is experimental +#[repr(align(64))] //~ error: the struct `#[repr(align(u16))]` attribute is experimental +struct Foo(u64, u64); fn main() {} diff --git a/src/test/compile-fail/feature-gate-simd.rs b/src/test/compile-fail/feature-gate-simd.rs index 168e84aa128c0..025eaca553336 100644 --- a/src/test/compile-fail/feature-gate-simd.rs +++ b/src/test/compile-fail/feature-gate-simd.rs @@ -11,13 +11,12 @@ // pretty-expanded FIXME #23616 -#[repr(simd)] +#[repr(simd)] //~ ERROR SIMD types are experimental struct RGBA { r: f32, g: f32, b: f32, a: f32 } -//~^^^^^^ ERROR SIMD types are experimental and possibly buggy (see issue #27731) pub fn main() {} diff --git a/src/test/compile-fail/feature-gate/issue-43106-gating-of-builtin-attrs.rs b/src/test/compile-fail/feature-gate/issue-43106-gating-of-builtin-attrs.rs index 204190d64acc1..6eec1779a2d87 100644 --- a/src/test/compile-fail/feature-gate/issue-43106-gating-of-builtin-attrs.rs +++ b/src/test/compile-fail/feature-gate/issue-43106-gating-of-builtin-attrs.rs @@ -354,8 +354,7 @@ mod repr { #[repr = "3900"] fn f() { } //~^ WARN unused attribute - #[repr = "3900"] struct S; - //~^ WARN unused attribute + struct S; #[repr = "3900"] type T = S; //~^ WARN unused attribute diff --git a/src/test/compile-fail/lint-ctypes.rs b/src/test/compile-fail/lint-ctypes.rs index 608b1eb0872ad..1d9b179c05d85 100644 --- a/src/test/compile-fail/lint-ctypes.rs +++ b/src/test/compile-fail/lint-ctypes.rs @@ -9,7 +9,7 @@ // except according to those terms. #![deny(improper_ctypes)] -#![feature(libc)] +#![feature(libc, i128_type)] extern crate libc; @@ -39,6 +39,8 @@ extern { pub fn str_type(p: &str); //~ ERROR: found Rust type pub fn box_type(p: Box); //~ ERROR found struct without pub fn char_type(p: char); //~ ERROR found Rust type + pub fn i128_type(p: i128); //~ ERROR found Rust type + pub fn u128_type(p: u128); //~ ERROR found Rust type pub fn trait_type(p: &Clone); //~ ERROR found Rust trait type pub fn tuple_type(p: (i32, i32)); //~ ERROR found Rust tuple type pub fn tuple_type2(p: I32Pair); //~ ERROR found Rust tuple type diff --git a/src/test/compile-fail/lint-impl-fn.rs b/src/test/compile-fail/lint-impl-fn.rs index 608aec327b63a..54a720d75b5ad 100644 --- a/src/test/compile-fail/lint-impl-fn.rs +++ b/src/test/compile-fail/lint-impl-fn.rs @@ -36,3 +36,8 @@ mod foo { fn main() { while true {} //~ ERROR: infinite loops } + +#[deny(while_true)] +fn bar() { + while cfg!(unix) {} // no error +} diff --git a/src/test/compile-fail/static-drop-scope.rs b/src/test/compile-fail/static-drop-scope.rs new file mode 100644 index 0000000000000..e5f10b65ceed7 --- /dev/null +++ b/src/test/compile-fail/static-drop-scope.rs @@ -0,0 +1,26 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(drop_types_in_const)] + +struct WithDtor; + +impl Drop for WithDtor { + fn drop(&mut self) {} +} + +static FOO: Option<&'static WithDtor> = Some(&WithDtor); +//~^ ERROR statics are not allowed to have destructors +//~| ERROR borrowed value does not live long enoug + +static BAR: i32 = (WithDtor, 0).1; +//~^ ERROR statics are not allowed to have destructors + +fn main () {} diff --git a/src/test/mir-opt/end_region_1.rs b/src/test/mir-opt/end_region_1.rs index 55dac4440275f..54409d3543ebc 100644 --- a/src/test/mir-opt/end_region_1.rs +++ b/src/test/mir-opt/end_region_1.rs @@ -22,16 +22,16 @@ fn main() { // START rustc.node4.SimplifyCfg-qualify-consts.after.mir // let mut _0: (); // let _1: i32; -// let _2: &'6_1rce i32; +// let _2: &'10_1rs i32; // // bb0: { // StorageLive(_1); // _1 = const 3i32; // StorageLive(_2); -// _2 = &'6_1rce _1; +// _2 = &'10_1rs _1; // _0 = (); // StorageDead(_2); -// EndRegion('6_1rce); +// EndRegion('10_1rs); // StorageDead(_1); // return; // } diff --git a/src/test/mir-opt/end_region_2.rs b/src/test/mir-opt/end_region_2.rs index a1386ec47a13b..b37bd4188d970 100644 --- a/src/test/mir-opt/end_region_2.rs +++ b/src/test/mir-opt/end_region_2.rs @@ -27,8 +27,8 @@ fn main() { // START rustc.node4.SimplifyCfg-qualify-consts.after.mir // let mut _0: (); // let _2: bool; -// let _3: &'7_1rce bool; -// let _7: &'7_3rce bool; +// let _3: &'23_1rs bool; +// let _7: &'23_3rs bool; // let mut _4: (); // let mut _5: bool; // bb0: { @@ -38,7 +38,7 @@ fn main() { // StorageLive(_2); // _2 = const true; // StorageLive(_3); -// _3 = &'7_1rce _2; +// _3 = &'23_1rs _2; // StorageLive(_5); // _5 = _2; // switchInt(_5) -> [0u8: bb3, otherwise: bb2]; @@ -47,19 +47,19 @@ fn main() { // _0 = (); // StorageDead(_5); // StorageDead(_3); -// EndRegion('7_1rce); +// EndRegion('23_1rs); // StorageDead(_2); // return; // } // bb3: { // StorageDead(_5); // StorageLive(_7); -// _7 = &'7_3rce _2; +// _7 = &'23_3rs _2; // _1 = (); // StorageDead(_7); -// EndRegion('7_3rce); +// EndRegion('23_3rs); // StorageDead(_3); -// EndRegion('7_1rce); +// EndRegion('23_1rs); // StorageDead(_2); // goto -> bb1; // } diff --git a/src/test/mir-opt/end_region_3.rs b/src/test/mir-opt/end_region_3.rs index b3d2809e76ceb..be9fc579ab636 100644 --- a/src/test/mir-opt/end_region_3.rs +++ b/src/test/mir-opt/end_region_3.rs @@ -28,8 +28,8 @@ fn main() { // START rustc.node4.SimplifyCfg-qualify-consts.after.mir // let mut _0: (); // let mut _1: bool; -// let _3: &'9_1rce bool; -// let _7: &'9_3rce bool; +// let _3: &'26_1rs bool; +// let _7: &'26_3rs bool; // let mut _2: (); // let mut _4: (); // let mut _5: bool; @@ -41,7 +41,7 @@ fn main() { // bb1: { // _1 = const true; // StorageLive(_3); -// _3 = &'9_1rce _1; +// _3 = &'26_1rs _1; // StorageLive(_5); // _5 = _1; // switchInt(_5) -> [0u8: bb3, otherwise: bb2]; @@ -50,7 +50,7 @@ fn main() { // _0 = (); // StorageDead(_5); // StorageDead(_3); -// EndRegion('9_1rce); +// EndRegion('26_1rs); // StorageDead(_1); // return; // } @@ -58,12 +58,12 @@ fn main() { // _4 = (); // StorageDead(_5); // StorageLive(_7); -// _7 = &'9_3rce _1; +// _7 = &'26_3rs _1; // _2 = (); // StorageDead(_7); -// EndRegion('9_3rce); +// EndRegion('26_3rs); // StorageDead(_3); -// EndRegion('9_1rce); +// EndRegion('26_1rs); // goto -> bb1; // } // END rustc.node4.SimplifyCfg-qualify-consts.after.mir diff --git a/src/test/mir-opt/end_region_4.rs b/src/test/mir-opt/end_region_4.rs index 0b34231b4eec6..58ce1ed5b84b7 100644 --- a/src/test/mir-opt/end_region_4.rs +++ b/src/test/mir-opt/end_region_4.rs @@ -33,8 +33,8 @@ fn foo(i: i32) { // let mut _0: (); // let _1: D; // let _2: i32; -// let _3: &'6_2rce i32; -// let _6: &'6_4rce i32; +// let _3: &'26_2rs i32; +// let _6: &'26_4rs i32; // let mut _4: (); // let mut _5: i32; // bb0: { @@ -43,7 +43,7 @@ fn foo(i: i32) { // StorageLive(_2); // _2 = const 0i32; // StorageLive(_3); -// _3 = &'6_2rce _2; +// _3 = &'26_2rs _2; // StorageLive(_5); // _5 = (*_3); // _4 = const foo(_5) -> [return: bb1, unwind: bb3]; @@ -51,12 +51,12 @@ fn foo(i: i32) { // bb1: { // StorageDead(_5); // StorageLive(_6); -// _6 = &'6_4rce _2; +// _6 = &'26_4rs _2; // _0 = (); // StorageDead(_6); -// EndRegion('6_4rce); +// EndRegion('26_4rs); // StorageDead(_3); -// EndRegion('6_2rce); +// EndRegion('26_2rs); // StorageDead(_2); // drop(_1) -> bb4; // } @@ -64,7 +64,7 @@ fn foo(i: i32) { // resume; // } // bb3: { -// EndRegion('6_2rce); +// EndRegion('26_2rs); // drop(_1) -> bb2; // } // bb4: { diff --git a/src/test/mir-opt/end_region_5.rs b/src/test/mir-opt/end_region_5.rs index e51bb9350db60..be62a5473a6ff 100644 --- a/src/test/mir-opt/end_region_5.rs +++ b/src/test/mir-opt/end_region_5.rs @@ -31,21 +31,21 @@ fn foo(f: F) where F: FnOnce() -> i32 { // let mut _0: (); // let _1: D; // let mut _2: (); -// let mut _3: [closure@NodeId(18) d:&'19mce D]; -// let mut _4: &'19mce D; +// let mut _3: [closure@NodeId(18) d:&'14s D]; +// let mut _4: &'14s D; // bb0: { // StorageLive(_1); // _1 = D::{{constructor}}(const 0i32,); // StorageLive(_3); // StorageLive(_4); -// _4 = &'19mce _1; +// _4 = &'14s _1; // _3 = [closure@NodeId(18)] { d: _4 }; // StorageDead(_4); // _2 = const foo(_3) -> [return: bb1, unwind: bb3]; // } // bb1: { // StorageDead(_3); -// EndRegion('19mce); +// EndRegion('14s); // _0 = (); // drop(_1) -> bb4; // } @@ -53,7 +53,7 @@ fn foo(f: F) where F: FnOnce() -> i32 { // resume; // } // bb3: { -// EndRegion('19mce); +// EndRegion('14s); // drop(_1) -> bb2; // } // bb4: { @@ -64,13 +64,13 @@ fn foo(f: F) where F: FnOnce() -> i32 { // END rustc.node4.SimplifyCfg-qualify-consts.after.mir // START rustc.node18.SimplifyCfg-qualify-consts.after.mir -// fn main::{{closure}}(_1: [closure@NodeId(18) d:&'19mce D]) -> i32 { +// fn main::{{closure}}(_1: [closure@NodeId(18) d:&'14s D]) -> i32 { // let mut _0: i32; // let mut _2: i32; // // bb0: { // StorageLive(_2); -// _2 = ((*(_1.0: &'19mce D)).0: i32); +// _2 = ((*(_1.0: &'14s D)).0: i32); // _0 = _2; // StorageDead(_2); // return; diff --git a/src/test/mir-opt/end_region_6.rs b/src/test/mir-opt/end_region_6.rs index c55e6d105cbdc..23b92583a11da 100644 --- a/src/test/mir-opt/end_region_6.rs +++ b/src/test/mir-opt/end_region_6.rs @@ -31,21 +31,21 @@ fn foo(f: F) where F: FnOnce() -> i32 { // let mut _0: (); // let _1: D; // let mut _2: (); -// let mut _3: [closure@NodeId(22) d:&'23mce D]; -// let mut _4: &'23mce D; +// let mut _3: [closure@NodeId(22) d:&'19s D]; +// let mut _4: &'19s D; // bb0: { // StorageLive(_1); // _1 = D::{{constructor}}(const 0i32,); // StorageLive(_3); // StorageLive(_4); -// _4 = &'23mce _1; +// _4 = &'19s _1; // _3 = [closure@NodeId(22)] { d: _4 }; // StorageDead(_4); // _2 = const foo(_3) -> [return: bb1, unwind: bb3]; // } // bb1: { // StorageDead(_3); -// EndRegion('23mce); +// EndRegion('19s); // _0 = (); // drop(_1) -> bb4; // } @@ -53,7 +53,7 @@ fn foo(f: F) where F: FnOnce() -> i32 { // resume; // } // bb3: { -// EndRegion('23mce); +// EndRegion('19s); // drop(_1) -> bb2; // } // bb4: { @@ -63,20 +63,20 @@ fn foo(f: F) where F: FnOnce() -> i32 { // END rustc.node4.SimplifyCfg-qualify-consts.after.mir // START rustc.node22.SimplifyCfg-qualify-consts.after.mir -// fn main::{{closure}}(_1: [closure@NodeId(22) d:&'23mce D]) -> i32 { +// fn main::{{closure}}(_1: [closure@NodeId(22) d:&'19s D]) -> i32 { // let mut _0: i32; -// let _2: &'14_0rce D; +// let _2: &'15_0rs D; // let mut _3: i32; // // bb0: { // StorageLive(_2); -// _2 = &'14_0rce (*(_1.0: &'23mce D)); +// _2 = &'15_0rs (*(_1.0: &'19s D)); // StorageLive(_3); // _3 = ((*_2).0: i32); // _0 = _3; // StorageDead(_3); // StorageDead(_2); -// EndRegion('14_0rce); +// EndRegion('15_0rs); // return; // } // END rustc.node22.SimplifyCfg-qualify-consts.after.mir diff --git a/src/test/mir-opt/end_region_7.rs b/src/test/mir-opt/end_region_7.rs index 9c8e3ec08d498..ee0615f2bb312 100644 --- a/src/test/mir-opt/end_region_7.rs +++ b/src/test/mir-opt/end_region_7.rs @@ -74,18 +74,18 @@ fn foo(f: F) where F: FnOnce() -> i32 { // START rustc.node22.SimplifyCfg-qualify-consts.after.mir // fn main::{{closure}}(_1: [closure@NodeId(22) d:D]) -> i32 { // let mut _0: i32; -// let _2: &'14_0rce D; +// let _2: &'15_0rs D; // let mut _3: i32; // // bb0: { // StorageLive(_2); -// _2 = &'14_0rce (_1.0: D); +// _2 = &'15_0rs (_1.0: D); // StorageLive(_3); // _3 = ((*_2).0: i32); // _0 = _3; // StorageDead(_3); // StorageDead(_2); -// EndRegion('14_0rce); +// EndRegion('15_0rs); // drop(_1) -> bb1; // } // bb1: { diff --git a/src/test/mir-opt/end_region_8.rs b/src/test/mir-opt/end_region_8.rs index b4dbec5cd2dd7..ef184e39c0061 100644 --- a/src/test/mir-opt/end_region_8.rs +++ b/src/test/mir-opt/end_region_8.rs @@ -31,15 +31,15 @@ fn foo(f: F) where F: FnOnce() -> i32 { // fn main() -> () { // let mut _0: (); // let _1: D; -// let _2: &'6_1rce D; +// let _2: &'21_1rs D; // let mut _3: (); -// let mut _4: [closure@NodeId(22) r:&'6_1rce D]; -// let mut _5: &'6_1rce D; +// let mut _4: [closure@NodeId(22) r:&'21_1rs D]; +// let mut _5: &'21_1rs D; // bb0: { // StorageLive(_1); // _1 = D::{{constructor}}(const 0i32,); // StorageLive(_2); -// _2 = &'6_1rce _1; +// _2 = &'21_1rs _1; // StorageLive(_4); // StorageLive(_5); // _5 = _2; @@ -51,14 +51,14 @@ fn foo(f: F) where F: FnOnce() -> i32 { // StorageDead(_4); // _0 = (); // StorageDead(_2); -// EndRegion('6_1rce); +// EndRegion('21_1rs); // drop(_1) -> bb4; // } // bb2: { // resume; // } // bb3: { -// EndRegion('6_1rce); +// EndRegion('21_1rs); // drop(_1) -> bb2; // } // bb4: { @@ -69,13 +69,13 @@ fn foo(f: F) where F: FnOnce() -> i32 { // END rustc.node4.SimplifyCfg-qualify-consts.after.mir // START rustc.node22.SimplifyCfg-qualify-consts.after.mir -// fn main::{{closure}}(_1: [closure@NodeId(22) r:&'6_1rce D]) -> i32 { +// fn main::{{closure}}(_1: [closure@NodeId(22) r:&'21_1rs D]) -> i32 { // let mut _0: i32; // let mut _2: i32; // // bb0: { // StorageLive(_2); -// _2 = ((*(_1.0: &'6_1rce D)).0: i32); +// _2 = ((*(_1.0: &'21_1rs D)).0: i32); // _0 = _2; // StorageDead(_2); // return; diff --git a/src/test/mir-opt/end_region_9.rs b/src/test/mir-opt/end_region_9.rs index deff984e4d0de..719bc3ff9dd04 100644 --- a/src/test/mir-opt/end_region_9.rs +++ b/src/test/mir-opt/end_region_9.rs @@ -42,7 +42,7 @@ fn main() { // let mut _0: (); // let mut _1: bool; // let _2: i32; -// let mut _4: &'13_0rce i32; +// let mut _4: &'13_0rs i32; // let mut _3: (); // let mut _5: !; // let mut _6: (); @@ -68,14 +68,14 @@ fn main() { // _0 = (); // StorageDead(_7); // StorageDead(_4); -// EndRegion('13_0rce); +// EndRegion('13_0rs); // StorageDead(_2); // StorageDead(_1); // return; // } // // bb3: { -// _4 = &'13_0rce _2; +// _4 = &'13_0rs _2; // _6 = (); // StorageDead(_7); // _1 = const true; diff --git a/src/test/mir-opt/issue-43457.rs b/src/test/mir-opt/issue-43457.rs index 708784df317a6..2a36672a45774 100644 --- a/src/test/mir-opt/issue-43457.rs +++ b/src/test/mir-opt/issue-43457.rs @@ -30,14 +30,14 @@ fn main() { } // scope 1 { // let _2: std::cell::RefCell; // } -// let mut _3: std::cell::RefMut<'17dce, i32>; -// let mut _4: &'17dce std::cell::RefCell; +// let mut _3: std::cell::RefMut<'17ds, i32>; +// let mut _4: &'17ds std::cell::RefCell; // // bb0: { // StorageLive(_2); // _2 = _1; // StorageLive(_4); -// _4 = &'17dce _2; +// _4 = &'17ds _2; // _3 = const >::borrow_mut(_4) -> bb1; // } // @@ -47,7 +47,7 @@ fn main() { } // // bb2: { // StorageDead(_4); -// EndRegion('17dce); +// EndRegion('17ds); // _0 = (); // StorageDead(_2); // return; diff --git a/src/test/mir-opt/validate_1.rs b/src/test/mir-opt/validate_1.rs index 677c92ea71b7a..ae9dcf8b7352d 100644 --- a/src/test/mir-opt/validate_1.rs +++ b/src/test/mir-opt/validate_1.rs @@ -37,19 +37,19 @@ fn main() { // START rustc.node23.EraseRegions.after.mir // fn main() -> () { // bb0: { -// Validate(Suspend(ReScope(Misc(NodeId(34)))), [_1: i32]); +// Validate(Suspend(ReScope(Node(ItemLocalId(10)))), [_1: i32]); // _6 = &ReErased mut _1; -// Validate(Acquire, [(*_6): i32/ReScope(Misc(NodeId(34)))]); -// Validate(Suspend(ReScope(Misc(NodeId(34)))), [(*_6): i32/ReScope(Misc(NodeId(34)))]); +// Validate(Acquire, [(*_6): i32/ReScope(Node(ItemLocalId(10)))]); +// Validate(Suspend(ReScope(Node(ItemLocalId(10)))), [(*_6): i32/ReScope(Node(ItemLocalId(10)))]); // _5 = &ReErased mut (*_6); -// Validate(Acquire, [(*_5): i32/ReScope(Misc(NodeId(34)))]); -// Validate(Release, [_2: (), _3: &ReScope(Misc(NodeId(34))) Test, _5: &ReScope(Misc(NodeId(34))) mut i32]); +// Validate(Acquire, [(*_5): i32/ReScope(Node(ItemLocalId(10)))]); +// Validate(Release, [_2: (), _3: &ReScope(Node(ItemLocalId(10))) Test, _5: &ReScope(Node(ItemLocalId(10))) mut i32]); // _2 = const Test::foo(_3, _5) -> bb1; // } // // bb1: { // Validate(Acquire, [_2: ()]); -// EndRegion(ReScope(Misc(NodeId(34)))); +// EndRegion(ReScope(Node(ItemLocalId(10)))); // return; // } // } @@ -61,15 +61,15 @@ fn main() { // StorageLive(_3); // _3 = _2; // StorageLive(_4); -// Validate(Suspend(ReScope(Remainder(BlockRemainder { block: NodeId(41), first_statement_index: 0 }))), [(*_3): i32]); +// Validate(Suspend(ReScope(Remainder(BlockRemainder { block: ItemLocalId(22), first_statement_index: 0 }))), [(*_3): i32]); // _4 = &ReErased (*_3); -// Validate(Acquire, [(*_4): i32/ReScope(Remainder(BlockRemainder { block: NodeId(41), first_statement_index: 0 })) (imm)]); +// Validate(Acquire, [(*_4): i32/ReScope(Remainder(BlockRemainder { block: ItemLocalId(22), first_statement_index: 0 })) (imm)]); // StorageLive(_5); // _5 = (*_4); // _0 = _5; // StorageDead(_5); // StorageDead(_4); -// EndRegion(ReScope(Remainder(BlockRemainder { block: NodeId(41), first_statement_index: 0 }))); +// EndRegion(ReScope(Remainder(BlockRemainder { block: ItemLocalId(22), first_statement_index: 0 }))); // StorageDead(_3); // return; // } diff --git a/src/test/mir-opt/validate_3.rs b/src/test/mir-opt/validate_3.rs index 9140cf5768f59..116e35b2d6f26 100644 --- a/src/test/mir-opt/validate_3.rs +++ b/src/test/mir-opt/validate_3.rs @@ -32,18 +32,18 @@ fn main() { // fn main() -> () { // let mut _5: &ReErased i32; // bb0: { -// Validate(Suspend(ReScope(Misc(NodeId(46)))), [((*_2).0: i32): i32/ReScope(Remainder(BlockRemainder { block: NodeId(18), first_statement_index: 3 })) (imm)]); +// Validate(Suspend(ReScope(Node(ItemLocalId(17)))), [((*_2).0: i32): i32/ReScope(Remainder(BlockRemainder { block: ItemLocalId(19), first_statement_index: 3 })) (imm)]); // _5 = &ReErased ((*_2).0: i32); -// Validate(Acquire, [(*_5): i32/ReScope(Misc(NodeId(46))) (imm)]); -// Validate(Suspend(ReScope(Misc(NodeId(46)))), [(*_5): i32/ReScope(Misc(NodeId(46))) (imm)]); +// Validate(Acquire, [(*_5): i32/ReScope(Node(ItemLocalId(17))) (imm)]); +// Validate(Suspend(ReScope(Node(ItemLocalId(17)))), [(*_5): i32/ReScope(Node(ItemLocalId(17))) (imm)]); // _4 = &ReErased (*_5); -// Validate(Acquire, [(*_4): i32/ReScope(Misc(NodeId(46))) (imm)]); -// Validate(Release, [_3: (), _4: &ReScope(Misc(NodeId(46))) i32]); +// Validate(Acquire, [(*_4): i32/ReScope(Node(ItemLocalId(17))) (imm)]); +// Validate(Release, [_3: (), _4: &ReScope(Node(ItemLocalId(17))) i32]); // _3 = const foo(_4) -> bb1; // } // bb1: { -// EndRegion(ReScope(Misc(NodeId(46)))); -// EndRegion(ReScope(Remainder(BlockRemainder { block: NodeId(18), first_statement_index: 3 }))); +// EndRegion(ReScope(Node(ItemLocalId(17)))); +// EndRegion(ReScope(Remainder(BlockRemainder { block: ItemLocalId(19), first_statement_index: 3 }))); // return; // } // } diff --git a/src/test/mir-opt/validate_5.rs b/src/test/mir-opt/validate_5.rs index 0182e6e296445..ef2073dcc4b0f 100644 --- a/src/test/mir-opt/validate_5.rs +++ b/src/test/mir-opt/validate_5.rs @@ -50,12 +50,12 @@ fn main() { // _3 = _2; // StorageLive(_4); // StorageLive(_5); -// Validate(Suspend(ReScope(Misc(NodeId(44)))), [(*_3): i32]); +// Validate(Suspend(ReScope(Node(ItemLocalId(9)))), [(*_3): i32]); // _5 = &ReErased mut (*_3); -// Validate(Acquire, [(*_5): i32/ReScope(Misc(NodeId(44)))]); +// Validate(Acquire, [(*_5): i32/ReScope(Node(ItemLocalId(9)))]); // _4 = _5 as *mut i32 (Misc); // StorageDead(_5); -// EndRegion(ReScope(Misc(NodeId(44)))); +// EndRegion(ReScope(Node(ItemLocalId(9)))); // Validate(Release, [_0: bool, _4: *mut i32]); // _0 = const write_42(_4) -> bb1; // } diff --git a/src/test/parse-fail/struct-literal-restrictions-in-lamda.rs b/src/test/parse-fail/struct-literal-restrictions-in-lamda.rs new file mode 100644 index 0000000000000..6b7a26556f430 --- /dev/null +++ b/src/test/parse-fail/struct-literal-restrictions-in-lamda.rs @@ -0,0 +1,29 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only + +struct Foo { + x: isize, +} + +impl Foo { + fn hi(&self) -> bool { + true + } +} + +fn main() { + while || Foo { + x: 3 //~ ERROR expected type, found `3` + }.hi() { //~ ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `{` + println!("yo"); + } +} diff --git a/src/test/run-make/extern-fn-struct-passing-abi/test.c b/src/test/run-make/extern-fn-struct-passing-abi/test.c index 44a940a17a98a..25cd6da10b8fd 100644 --- a/src/test/run-make/extern-fn-struct-passing-abi/test.c +++ b/src/test/run-make/extern-fn-struct-passing-abi/test.c @@ -43,6 +43,16 @@ struct FloatPoint { double y; }; +struct FloatOne { + double x; +}; + +struct IntOdd { + int8_t a; + int8_t b; + int8_t c; +}; + // System V x86_64 ABI: // a, b, c, d, e should be in registers // s should be byval pointer @@ -283,7 +293,7 @@ struct Huge huge_struct(struct Huge s) { // p should be in registers // return should be in registers // -// Win64 ABI: +// Win64 ABI and 64-bit PowerPC ELFv1 ABI: // p should be a byval pointer // return should be in a hidden sret pointer struct FloatPoint float_point(struct FloatPoint p) { @@ -292,3 +302,23 @@ struct FloatPoint float_point(struct FloatPoint p) { return p; } + +// 64-bit PowerPC ELFv1 ABI: +// f1 should be in a register +// return should be in a hidden sret pointer +struct FloatOne float_one(struct FloatOne f1) { + assert(f1.x == 7.); + + return f1; +} + +// 64-bit PowerPC ELFv1 ABI: +// i should be in the least-significant bits of a register +// return should be in a hidden sret pointer +struct IntOdd int_odd(struct IntOdd i) { + assert(i.a == 1); + assert(i.b == 2); + assert(i.c == 3); + + return i; +} diff --git a/src/test/run-make/extern-fn-struct-passing-abi/test.rs b/src/test/run-make/extern-fn-struct-passing-abi/test.rs index aaae7ae4fb49b..54a4f868eb4e5 100644 --- a/src/test/run-make/extern-fn-struct-passing-abi/test.rs +++ b/src/test/run-make/extern-fn-struct-passing-abi/test.rs @@ -53,6 +53,20 @@ struct FloatPoint { y: f64 } +#[derive(Clone, Copy, Debug, PartialEq)] +#[repr(C)] +struct FloatOne { + x: f64, +} + +#[derive(Clone, Copy, Debug, PartialEq)] +#[repr(C)] +struct IntOdd { + a: i8, + b: i8, + c: i8, +} + #[link(name = "test", kind = "static")] extern { fn byval_rect(a: i32, b: i32, c: i32, d: i32, e: i32, s: Rect); @@ -83,6 +97,10 @@ extern { fn huge_struct(s: Huge) -> Huge; fn float_point(p: FloatPoint) -> FloatPoint; + + fn float_one(f: FloatOne) -> FloatOne; + + fn int_odd(i: IntOdd) -> IntOdd; } fn main() { @@ -91,6 +109,8 @@ fn main() { let u = FloatRect { a: 3489, b: 3490, c: 8. }; let v = Huge { a: 5647, b: 5648, c: 5649, d: 5650, e: 5651 }; let p = FloatPoint { x: 5., y: -3. }; + let f1 = FloatOne { x: 7. }; + let i = IntOdd { a: 1, b: 2, c: 3 }; unsafe { byval_rect(1, 2, 3, 4, 5, s); @@ -113,5 +133,12 @@ fn main() { assert_eq!(sret_byval_struct(1, 2, 3, 4, s), t); assert_eq!(sret_split_struct(1, 2, s), t); assert_eq!(float_point(p), p); + assert_eq!(int_odd(i), i); + + // MSVC/GCC/Clang are not consistent in the ABI of single-float aggregates. + // x86_64: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=82028 + // i686: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=82041 + #[cfg(not(all(windows, target_env = "gnu")))] + assert_eq!(float_one(f1), f1); } } diff --git a/src/test/run-make/extern-fn-with-packed-struct/test.c b/src/test/run-make/extern-fn-with-packed-struct/test.c index 506954fca4617..4124e202c1dd0 100644 --- a/src/test/run-make/extern-fn-with-packed-struct/test.c +++ b/src/test/run-make/extern-fn-with-packed-struct/test.c @@ -1,6 +1,8 @@ // ignore-license // Pragma needed cause of gcc bug on windows: http://gcc.gnu.org/bugzilla/show_bug.cgi?id=52991 +#include + #ifdef _MSC_VER #pragma pack(push,1) struct Foo { @@ -18,5 +20,8 @@ struct __attribute__((packed)) Foo { #endif struct Foo foo(struct Foo foo) { + assert(foo.a == 1); + assert(foo.b == 2); + assert(foo.c == 3); return foo; } diff --git a/src/test/run-make/extern-fn-with-packed-struct/test.rs b/src/test/run-make/extern-fn-with-packed-struct/test.rs index 9e81636e36703..d2540ad61542b 100644 --- a/src/test/run-make/extern-fn-with-packed-struct/test.rs +++ b/src/test/run-make/extern-fn-with-packed-struct/test.rs @@ -8,36 +8,14 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::fmt; - -#[repr(packed)] -#[derive(Copy, Clone)] +#[repr(C, packed)] +#[derive(Copy, Clone, Debug, PartialEq)] struct Foo { a: i8, b: i16, c: i8 } -impl PartialEq for Foo { - fn eq(&self, other: &Foo) -> bool { - self.a == other.a && self.b == other.b && self.c == other.c - } -} - -impl fmt::Debug for Foo { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let a = self.a; - let b = self.b; - let c = self.c; - - f.debug_struct("Foo") - .field("a", &a) - .field("b", &b) - .field("c", &c) - .finish() - } -} - #[link(name = "test", kind = "static")] extern { fn foo(f: Foo) -> Foo; diff --git a/src/test/run-make/sysroot-crates-are-unstable/Makefile b/src/test/run-make/sysroot-crates-are-unstable/Makefile index 4b7052f9b9493..a35174b3c2ac4 100644 --- a/src/test/run-make/sysroot-crates-are-unstable/Makefile +++ b/src/test/run-make/sysroot-crates-are-unstable/Makefile @@ -1,35 +1,2 @@ --include ../tools.mk - -# This is a whitelist of files which are stable crates or simply are not crates, -# we don't check for the instability of these crates as they're all stable! -STABLE_CRATES := \ - std \ - core \ - proc_macro \ - rsbegin.o \ - rsend.o \ - dllcrt2.o \ - crt2.o \ - clang_rt.%_dynamic.dylib - -# Generate a list of all crates in the sysroot. To do this we list all files in -# rustc's sysroot, look at the filename, strip everything after the `-`, and -# strip the leading `lib` (if present) -SYSROOT := $(shell $(RUSTC) --print sysroot) -LIBS := $(wildcard $(SYSROOT)/lib/rustlib/$(TARGET)/lib/*) -LIBS := $(foreach lib,$(LIBS),$(notdir $(lib))) -LIBS := $(foreach lib,$(LIBS),$(word 1,$(subst -, ,$(lib)))) -LIBS := $(foreach lib,$(LIBS),$(patsubst lib%,%,$(lib))) -LIBS := $(filter-out $(STABLE_CRATES),$(LIBS)) - -all: $(foreach lib,$(LIBS),check-crate-$(lib)-is-unstable) - -check-crate-%-is-unstable: - @echo verifying $* is an unstable crate - @echo 'extern crate $*;' | \ - $(RUSTC) - --crate-type rlib 2>&1 | cat > $(TMPDIR)/$*; \ - true - @grep -q 'use of unstable library feature' $(TMPDIR)/$* || \ - (echo crate $* is not unstable && \ - cat $(TMPDIR)/$* && \ - false) +all: + python2.7 test.py diff --git a/src/test/run-make/sysroot-crates-are-unstable/test.py b/src/test/run-make/sysroot-crates-are-unstable/test.py new file mode 100644 index 0000000000000..210059e1010df --- /dev/null +++ b/src/test/run-make/sysroot-crates-are-unstable/test.py @@ -0,0 +1,71 @@ +# Copyright 2015 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +import sys +import os +from os import listdir +from os.path import isfile, join +from subprocess import PIPE, Popen + + +# This is a whitelist of files which are stable crates or simply are not crates, +# we don't check for the instability of these crates as they're all stable! +STABLE_CRATES = ['std', 'core', 'proc_macro', 'rsbegin.o', 'rsend.o', 'dllcrt2.o', 'crt2.o', + 'clang_rt'] + + +def convert_to_string(s): + if s.__class__.__name__ == 'bytes': + return s.decode('utf-8') + return s + + +def exec_command(command, to_input=None): + child = None + if to_input is None: + child = Popen(command, stdout=PIPE, stderr=PIPE) + else: + child = Popen(command, stdout=PIPE, stderr=PIPE, stdin=PIPE) + stdout, stderr = child.communicate(input=to_input) + return (convert_to_string(stdout), convert_to_string(stderr)) + + +def check_lib(lib): + if lib['name'] in STABLE_CRATES: + return True + print('verifying if {} is an unstable crate'.format(lib['name'])) + stdout, stderr = exec_command([os.environ['RUSTC'], '-', '--crate-type', 'rlib', + '--extern', '{}={}'.format(lib['name'], lib['path'])], + to_input='extern crate {};'.format(lib['name'])) + if not 'use of unstable library feature' in '{}{}'.format(stdout, stderr): + print('crate {} "{}" is not unstable'.format(lib['name'], lib['path'])) + print('{}{}'.format(stdout, stderr)) + print('') + return False + return True + +# Generate a list of all crates in the sysroot. To do this we list all files in +# rustc's sysroot, look at the filename, strip everything after the `-`, and +# strip the leading `lib` (if present) +def get_all_libs(dir_path): + return [{ 'path': join(dir_path, f), 'name': f[3:].split('-')[0] } + for f in listdir(dir_path) + if isfile(join(dir_path, f)) and f.endswith('.rlib') and f not in STABLE_CRATES] + + +sysroot = exec_command([os.environ['RUSTC'], '--print', 'sysroot'])[0].replace('\n', '') +libs = get_all_libs(join(sysroot, 'lib/rustlib/{}/lib'.format(os.environ['TARGET']))) + +ret = 0 +for lib in libs: + if not check_lib(lib): + # We continue so users can see all the not unstable crates. + ret = 1 +sys.exit(ret) diff --git a/src/test/run-pass-fulldeps/proc-macro/attr-on-trait.rs b/src/test/run-pass-fulldeps/proc-macro/attr-on-trait.rs new file mode 100644 index 0000000000000..8ba38875eff5b --- /dev/null +++ b/src/test/run-pass-fulldeps/proc-macro/attr-on-trait.rs @@ -0,0 +1,28 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:attr-on-trait.rs + +#![feature(proc_macro)] + +extern crate attr_on_trait; + +trait Foo { + #[attr_on_trait::foo] + fn foo() {} +} + +impl Foo for i32 { + fn foo(&self) {} +} + +fn main() { + 3i32.foo(); +} diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-on-trait.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-on-trait.rs new file mode 100644 index 0000000000000..8e9770340276b --- /dev/null +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-on-trait.rs @@ -0,0 +1,25 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// no-prefer-dynamic + +#![feature(proc_macro)] +#![crate_type = "proc-macro"] + +extern crate proc_macro; + +use proc_macro::TokenStream; + +#[proc_macro_attribute] +pub fn foo(attr: TokenStream, item: TokenStream) -> TokenStream { + drop(attr); + assert_eq!(item.to_string(), "fn foo() { }"); + "fn foo(&self);".parse().unwrap() +} diff --git a/src/test/run-pass/discriminant_value-wrapper.rs b/src/test/run-pass/discriminant_value-wrapper.rs index 2dbda0be18d98..d7a32423710fd 100644 --- a/src/test/run-pass/discriminant_value-wrapper.rs +++ b/src/test/run-pass/discriminant_value-wrapper.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(discriminant_value)] - use std::mem; enum ADT { diff --git a/src/test/run-pass/generator/auxiliary/xcrate-reachable.rs b/src/test/run-pass/generator/auxiliary/xcrate-reachable.rs new file mode 100644 index 0000000000000..a6a2a2d081e1f --- /dev/null +++ b/src/test/run-pass/generator/auxiliary/xcrate-reachable.rs @@ -0,0 +1,24 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(conservative_impl_trait, generators, generator_trait)] + +use std::ops::Generator; + +fn msg() -> u32 { + 0 +} + +pub fn foo() -> impl Generator { + || { + yield; + return msg(); + } +} diff --git a/src/test/run-pass/generator/auxiliary/xcrate.rs b/src/test/run-pass/generator/auxiliary/xcrate.rs new file mode 100644 index 0000000000000..f6878e64fbf93 --- /dev/null +++ b/src/test/run-pass/generator/auxiliary/xcrate.rs @@ -0,0 +1,27 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators, generator_trait, conservative_impl_trait)] + +use std::ops::Generator; + +pub fn foo() -> impl Generator { + || { + if false { + yield; + } + } +} + +pub fn bar(t: T) -> Box> { + Box::new(|| { + yield t; + }) +} diff --git a/src/test/run-pass/generator/conditional-drop.rs b/src/test/run-pass/generator/conditional-drop.rs new file mode 100644 index 0000000000000..8329684e1a39b --- /dev/null +++ b/src/test/run-pass/generator/conditional-drop.rs @@ -0,0 +1,65 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators, generator_trait)] + +use std::ops::Generator; +use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering}; + +static A: AtomicUsize = ATOMIC_USIZE_INIT; + +struct B; + +impl Drop for B { + fn drop(&mut self) { + A.fetch_add(1, Ordering::SeqCst); + } +} + + +fn test() -> bool { true } +fn test2() -> bool { false } + +fn main() { + t1(); + t2(); +} + +fn t1() { + let mut a = || { + let b = B; + if test() { + drop(b); + } + yield; + }; + + let n = A.load(Ordering::SeqCst); + a.resume(); + assert_eq!(A.load(Ordering::SeqCst), n + 1); + a.resume(); + assert_eq!(A.load(Ordering::SeqCst), n + 1); +} + +fn t2() { + let mut a = || { + let b = B; + if test2() { + drop(b); + } + yield; + }; + + let n = A.load(Ordering::SeqCst); + a.resume(); + assert_eq!(A.load(Ordering::SeqCst), n); + a.resume(); + assert_eq!(A.load(Ordering::SeqCst), n + 1); +} diff --git a/src/test/run-pass/generator/control-flow.rs b/src/test/run-pass/generator/control-flow.rs new file mode 100644 index 0000000000000..60a00b4e46756 --- /dev/null +++ b/src/test/run-pass/generator/control-flow.rs @@ -0,0 +1,56 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators, generator_trait)] + +use std::ops::{GeneratorState, Generator}; + +fn finish(mut amt: usize, mut t: T) -> T::Return + where T: Generator +{ + loop { + match t.resume() { + GeneratorState::Yielded(()) => amt = amt.checked_sub(1).unwrap(), + GeneratorState::Complete(ret) => { + assert_eq!(amt, 0); + return ret + } + } + } + +} + +fn main() { + finish(1, || yield); + finish(8, || { + for _ in 0..8 { + yield; + } + }); + finish(1, || { + if true { + yield; + } else { + } + }); + finish(1, || { + if false { + } else { + yield; + } + }); + finish(2, || { + if { yield; false } { + yield; + panic!() + } + yield + }); +} diff --git a/src/test/run-pass/generator/drop-env.rs b/src/test/run-pass/generator/drop-env.rs new file mode 100644 index 0000000000000..ac42a25899dbb --- /dev/null +++ b/src/test/run-pass/generator/drop-env.rs @@ -0,0 +1,70 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators, generator_trait)] + +use std::ops::Generator; +use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering}; + +static A: AtomicUsize = ATOMIC_USIZE_INIT; + +struct B; + +impl Drop for B { + fn drop(&mut self) { + A.fetch_add(1, Ordering::SeqCst); + } +} + +fn main() { + t1(); + t2(); + t3(); +} + +fn t1() { + let b = B; + let mut foo = || { + yield; + drop(b); + }; + + let n = A.load(Ordering::SeqCst); + drop(foo.resume()); + assert_eq!(A.load(Ordering::SeqCst), n); + drop(foo); + assert_eq!(A.load(Ordering::SeqCst), n + 1); +} + +fn t2() { + let b = B; + let mut foo = || { + yield b; + }; + + let n = A.load(Ordering::SeqCst); + drop(foo.resume()); + assert_eq!(A.load(Ordering::SeqCst), n + 1); + drop(foo); + assert_eq!(A.load(Ordering::SeqCst), n + 1); +} + +fn t3() { + let b = B; + let foo = || { + yield; + drop(b); + }; + + let n = A.load(Ordering::SeqCst); + assert_eq!(A.load(Ordering::SeqCst), n); + drop(foo); + assert_eq!(A.load(Ordering::SeqCst), n + 1); +} diff --git a/src/test/run-pass/generator/iterator-count.rs b/src/test/run-pass/generator/iterator-count.rs new file mode 100644 index 0000000000000..9afe95f9e865c --- /dev/null +++ b/src/test/run-pass/generator/iterator-count.rs @@ -0,0 +1,48 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators, generator_trait, conservative_impl_trait)] + +use std::ops::{GeneratorState, Generator}; + +struct W(T); + +impl> Iterator for W { + type Item = T::Yield; + + fn next(&mut self) -> Option { + match self.0.resume() { + GeneratorState::Complete(..) => None, + GeneratorState::Yielded(v) => Some(v), + } + } +} + +fn test() -> impl Generator { + || { + for i in 1..6 { + yield i + } + } +} + +fn main() { + let end = 11; + + let closure_test = |start| { + move || { + for i in start..end { + yield i + } + } + }; + + assert!(W(test()).chain(W(closure_test(6))).eq(1..11)); +} diff --git a/src/test/run-pass/generator/panic-drops.rs b/src/test/run-pass/generator/panic-drops.rs new file mode 100644 index 0000000000000..53cd3235d9d0c --- /dev/null +++ b/src/test/run-pass/generator/panic-drops.rs @@ -0,0 +1,62 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators, generator_trait)] + +use std::ops::Generator; +use std::panic; +use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering}; + +static A: AtomicUsize = ATOMIC_USIZE_INIT; + +struct B; + +impl Drop for B { + fn drop(&mut self) { + A.fetch_add(1, Ordering::SeqCst); + } +} + +fn bool_true() -> bool { + true +} + +fn main() { + let b = B; + let mut foo = || { + if bool_true() { + panic!(); + } + drop(b); + yield; + }; + + assert_eq!(A.load(Ordering::SeqCst), 0); + let res = panic::catch_unwind(panic::AssertUnwindSafe(|| { + foo.resume() + })); + assert!(res.is_err()); + assert_eq!(A.load(Ordering::SeqCst), 1); + + let mut foo = || { + if bool_true() { + panic!(); + } + drop(B); + yield; + }; + + assert_eq!(A.load(Ordering::SeqCst), 1); + let res = panic::catch_unwind(panic::AssertUnwindSafe(|| { + foo.resume() + })); + assert!(res.is_err()); + assert_eq!(A.load(Ordering::SeqCst), 1); +} diff --git a/src/test/run-pass/generator/panic-safe.rs b/src/test/run-pass/generator/panic-safe.rs new file mode 100644 index 0000000000000..a583f42b93d8c --- /dev/null +++ b/src/test/run-pass/generator/panic-safe.rs @@ -0,0 +1,35 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators, generator_trait)] + +use std::ops::Generator; +use std::panic; + +fn main() { + let mut foo = || { + if true { + panic!(); + } + yield; + }; + + let res = panic::catch_unwind(panic::AssertUnwindSafe(|| { + foo.resume() + })); + assert!(res.is_err()); + + for _ in 0..10 { + let res = panic::catch_unwind(panic::AssertUnwindSafe(|| { + foo.resume() + })); + assert!(res.is_err()); + } +} diff --git a/src/test/run-pass/generator/resume-after-return.rs b/src/test/run-pass/generator/resume-after-return.rs new file mode 100644 index 0000000000000..b2e2a3e7e9d5b --- /dev/null +++ b/src/test/run-pass/generator/resume-after-return.rs @@ -0,0 +1,33 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators, generator_trait)] + +use std::ops::{GeneratorState, Generator}; +use std::panic; + +fn main() { + let mut foo = || { + if true { + return + } + yield; + }; + + match foo.resume() { + GeneratorState::Complete(()) => {} + s => panic!("bad state: {:?}", s), + } + + match panic::catch_unwind(move || foo.resume()) { + Ok(_) => panic!("generator successfully resumed"), + Err(_) => {} + } +} diff --git a/src/test/run-pass/generator/smoke.rs b/src/test/run-pass/generator/smoke.rs new file mode 100644 index 0000000000000..e9bdfbf28ea9c --- /dev/null +++ b/src/test/run-pass/generator/smoke.rs @@ -0,0 +1,181 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-emscripten +// compile-flags: --test + +#![feature(generators, generator_trait)] + +use std::ops::{GeneratorState, Generator}; +use std::thread; + +#[test] +fn simple() { + let mut foo = || { + if false { + yield; + } + }; + + match foo.resume() { + GeneratorState::Complete(()) => {} + s => panic!("bad state: {:?}", s), + } +} + +#[test] +fn return_capture() { + let a = String::from("foo"); + let mut foo = || { + if false { + yield; + } + a + }; + + match foo.resume() { + GeneratorState::Complete(ref s) if *s == "foo" => {} + s => panic!("bad state: {:?}", s), + } +} + +#[test] +fn simple_yield() { + let mut foo = || { + yield; + }; + + match foo.resume() { + GeneratorState::Yielded(()) => {} + s => panic!("bad state: {:?}", s), + } + match foo.resume() { + GeneratorState::Complete(()) => {} + s => panic!("bad state: {:?}", s), + } +} + +#[test] +fn yield_capture() { + let b = String::from("foo"); + let mut foo = || { + yield b; + }; + + match foo.resume() { + GeneratorState::Yielded(ref s) if *s == "foo" => {} + s => panic!("bad state: {:?}", s), + } + match foo.resume() { + GeneratorState::Complete(()) => {} + s => panic!("bad state: {:?}", s), + } +} + +#[test] +fn simple_yield_value() { + let mut foo = || { + yield String::from("bar"); + return String::from("foo") + }; + + match foo.resume() { + GeneratorState::Yielded(ref s) if *s == "bar" => {} + s => panic!("bad state: {:?}", s), + } + match foo.resume() { + GeneratorState::Complete(ref s) if *s == "foo" => {} + s => panic!("bad state: {:?}", s), + } +} + +#[test] +fn return_after_yield() { + let a = String::from("foo"); + let mut foo = || { + yield; + return a + }; + + match foo.resume() { + GeneratorState::Yielded(()) => {} + s => panic!("bad state: {:?}", s), + } + match foo.resume() { + GeneratorState::Complete(ref s) if *s == "foo" => {} + s => panic!("bad state: {:?}", s), + } +} + +#[test] +fn send_and_sync() { + assert_send_sync(|| { + yield + }); + assert_send_sync(|| { + yield String::from("foo"); + }); + assert_send_sync(|| { + yield; + return String::from("foo"); + }); + let a = 3; + assert_send_sync(|| { + yield a; + return + }); + let a = 3; + assert_send_sync(move || { + yield a; + return + }); + let a = String::from("a"); + assert_send_sync(|| { + yield ; + drop(a); + return + }); + let a = String::from("a"); + assert_send_sync(move || { + yield ; + drop(a); + return + }); + + fn assert_send_sync(_: T) {} +} + +#[test] +fn send_over_threads() { + let mut foo = || { yield }; + thread::spawn(move || { + match foo.resume() { + GeneratorState::Yielded(()) => {} + s => panic!("bad state: {:?}", s), + } + match foo.resume() { + GeneratorState::Complete(()) => {} + s => panic!("bad state: {:?}", s), + } + }).join().unwrap(); + + let a = String::from("a"); + let mut foo = || { yield a }; + thread::spawn(move || { + match foo.resume() { + GeneratorState::Yielded(ref s) if *s == "a" => {} + s => panic!("bad state: {:?}", s), + } + match foo.resume() { + GeneratorState::Complete(()) => {} + s => panic!("bad state: {:?}", s), + } + }).join().unwrap(); +} diff --git a/src/test/run-pass/generator/xcrate-reachable.rs b/src/test/run-pass/generator/xcrate-reachable.rs new file mode 100644 index 0000000000000..dff5e08b9c20e --- /dev/null +++ b/src/test/run-pass/generator/xcrate-reachable.rs @@ -0,0 +1,21 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:xcrate-reachable.rs + +#![feature(conservative_impl_trait, generator_trait)] + +extern crate xcrate_reachable as foo; + +use std::ops::Generator; + +fn main() { + foo::foo().resume(); +} diff --git a/src/test/run-pass/generator/xcrate.rs b/src/test/run-pass/generator/xcrate.rs new file mode 100644 index 0000000000000..dc7a6fdef9c7e --- /dev/null +++ b/src/test/run-pass/generator/xcrate.rs @@ -0,0 +1,37 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:xcrate.rs + +#![feature(generators, generator_trait)] + +extern crate xcrate; + +use std::ops::{GeneratorState, Generator}; + +fn main() { + let mut foo = xcrate::foo(); + + match foo.resume() { + GeneratorState::Complete(()) => {} + s => panic!("bad state: {:?}", s), + } + + let mut foo = xcrate::bar(3); + + match foo.resume() { + GeneratorState::Yielded(3) => {} + s => panic!("bad state: {:?}", s), + } + match foo.resume() { + GeneratorState::Complete(()) => {} + s => panic!("bad state: {:?}", s), + } +} diff --git a/src/test/run-pass/generator/yield-subtype.rs b/src/test/run-pass/generator/yield-subtype.rs new file mode 100644 index 0000000000000..5ff070f311e01 --- /dev/null +++ b/src/test/run-pass/generator/yield-subtype.rs @@ -0,0 +1,23 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators)] + +fn bar<'a>() { + let a: &'static str = "hi"; + let b: &'a str = a; + + || { + yield a; + yield b; + }; +} + +fn main() {} \ No newline at end of file diff --git a/src/test/run-pass/import-crate-with-invalid-spans/main.rs b/src/test/run-pass/import-crate-with-invalid-spans/main.rs index 39c175f60da41..2f80a0954dc0a 100644 --- a/src/test/run-pass/import-crate-with-invalid-spans/main.rs +++ b/src/test/run-pass/import-crate-with-invalid-spans/main.rs @@ -16,7 +16,7 @@ extern crate crate_with_invalid_spans; fn main() { // The AST of `exported_generic` stored in crate_with_invalid_spans's - // metadata should contain an invalid span where span.lo > span.hi. + // metadata should contain an invalid span where span.lo() > span.hi(). // Let's make sure the compiler doesn't crash when encountering this. let _ = crate_with_invalid_spans::exported_generic(32u32, 7u32); } diff --git a/src/test/run-pass/method-argument-inference-associated-type.rs b/src/test/run-pass/method-argument-inference-associated-type.rs new file mode 100644 index 0000000000000..76b8cf92329d1 --- /dev/null +++ b/src/test/run-pass/method-argument-inference-associated-type.rs @@ -0,0 +1,37 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct ClientMap; +pub struct ClientMap2; + +pub trait Service { + type Request; + fn call(&self, _req: Self::Request); +} + +pub struct S(T); + +impl Service for ClientMap { + type Request = S>; + fn call(&self, _req: Self::Request) {} +} + + +impl Service for ClientMap2 { + type Request = (Box,); + fn call(&self, _req: Self::Request) {} +} + + +fn main() { + ClientMap.call(S { 0: Box::new(|_msgid| ()) }); + ClientMap.call(S(Box::new(|_msgid| ()))); + ClientMap2.call((Box::new(|_msgid| ()),)); +} diff --git a/src/test/run-pass/rvalue-static-promotion.rs b/src/test/run-pass/rvalue-static-promotion.rs index e57491930a45b..acf96b566df84 100644 --- a/src/test/run-pass/rvalue-static-promotion.rs +++ b/src/test/run-pass/rvalue-static-promotion.rs @@ -8,8 +8,20 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#[allow(unused_variables)] +use std::cell::Cell; + +const NONE_CELL_STRING: Option> = None; + +struct Foo(T); +impl Foo { + const FOO: Option> = None; +} + fn main() { - let x: &'static u32 = &42; - let y: &'static Option = &None; + let _: &'static u32 = &42; + let _: &'static Option = &None; + + // We should be able to peek at consts and see they're None. + let _: &'static Option> = &NONE_CELL_STRING; + let _: &'static Option> = &Foo::FOO; } diff --git a/src/test/run-pass/semistatement-in-lambda.rs b/src/test/run-pass/semistatement-in-lambda.rs new file mode 100644 index 0000000000000..0fc5fe498a62d --- /dev/null +++ b/src/test/run-pass/semistatement-in-lambda.rs @@ -0,0 +1,19 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + +pub fn main() { + // Test that lambdas behave as unary expressions with block-like expressions + -if true { 1 } else { 2 } * 3; + || if true { 1 } else { 2 } * 3; + + // The following is invalid and parses as `if true { 1 } else { 2 }; *3` + // if true { 1 } else { 2 } * 3 +} diff --git a/src/test/compile-fail/unboxed-closures-infer-explicit-call-too-early.rs b/src/test/run-pass/unboxed-closures-infer-explicit-call-early.rs similarity index 79% rename from src/test/compile-fail/unboxed-closures-infer-explicit-call-too-early.rs rename to src/test/run-pass/unboxed-closures-infer-explicit-call-early.rs index 62f6ee56ca5de..028f2e9375b3c 100644 --- a/src/test/compile-fail/unboxed-closures-infer-explicit-call-too-early.rs +++ b/src/test/run-pass/unboxed-closures-infer-explicit-call-early.rs @@ -8,8 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![feature(fn_traits)] + fn main() { - let mut zero = || {}; - let () = zero.call_mut(()); - //~^ ERROR we have not yet inferred what kind of closure it is + let mut zero = || 0; + let x = zero.call_mut(()); + assert_eq!(x, 0); } diff --git a/src/test/rustdoc/issue-41783.rs b/src/test/rustdoc/issue-41783.rs index 3933b8bcbb8fd..991cf4cf2b375 100644 --- a/src/test/rustdoc/issue-41783.rs +++ b/src/test/rustdoc/issue-41783.rs @@ -12,8 +12,10 @@ // @!has - 'space' // @!has - 'comment' // @has - '# single' -// @has - '## double' -// @has - '### triple' +// @has - '## double' +// @has - '### triple' +// @has - '#[outer]' +// @has - '#![inner]' /// ```no_run /// # # space @@ -21,5 +23,7 @@ /// ## single /// ### double /// #### triple +/// ##[outer] +/// ##![inner] /// ``` pub struct Foo; diff --git a/src/test/ui-fulldeps/proc-macro/auxiliary/three-equals.rs b/src/test/ui-fulldeps/proc-macro/auxiliary/three-equals.rs new file mode 100644 index 0000000000000..6fca32fece1d4 --- /dev/null +++ b/src/test/ui-fulldeps/proc-macro/auxiliary/three-equals.rs @@ -0,0 +1,56 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// no-prefer-dynamic +#![feature(proc_macro)] +#![crate_type = "proc-macro"] + +extern crate proc_macro; + +use proc_macro::{TokenStream, TokenNode, Span, Diagnostic}; + +fn parse(input: TokenStream) -> Result<(), Diagnostic> { + let mut count = 0; + let mut last_span = Span::default(); + for tree in input { + let span = tree.span; + if count >= 3 { + return Err(span.error(format!("expected EOF, found `{}`.", tree)) + .span_note(last_span, "last good input was here") + .help("input must be: `===`")) + } + + if let TokenNode::Op('=', _) = tree.kind { + count += 1; + } else { + return Err(span.error(format!("expected `=`, found `{}`.", tree))); + } + + last_span = span; + } + + if count < 3 { + return Err(Span::default() + .error(format!("found {} equal signs, need exactly 3", count)) + .help("input must be: `===`")) + } + + Ok(()) +} + +#[proc_macro] +pub fn three_equals(input: TokenStream) -> TokenStream { + if let Err(diag) = parse(input) { + diag.emit(); + return TokenStream::empty(); + } + + "3".parse().unwrap() +} diff --git a/src/test/ui-fulldeps/proc-macro/three-equals.rs b/src/test/ui-fulldeps/proc-macro/three-equals.rs new file mode 100644 index 0000000000000..016e05c51f507 --- /dev/null +++ b/src/test/ui-fulldeps/proc-macro/three-equals.rs @@ -0,0 +1,38 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:three-equals.rs +// ignore-stage1 + +#![feature(proc_macro)] + +extern crate three_equals; + +use three_equals::three_equals; + +fn main() { + // This one is okay. + three_equals!(===); + + // Need exactly three equals. + three_equals!(==); + + // Need exactly three equals. + three_equals!(=====); + + // Only equals accepted. + three_equals!(abc); + + // Only equals accepted. + three_equals!(!!); + + // Only three characters expected. + three_equals!(===a); +} diff --git a/src/test/ui-fulldeps/proc-macro/three-equals.stderr b/src/test/ui-fulldeps/proc-macro/three-equals.stderr new file mode 100644 index 0000000000000..1afe0be280009 --- /dev/null +++ b/src/test/ui-fulldeps/proc-macro/three-equals.stderr @@ -0,0 +1,48 @@ +error: found 2 equal signs, need exactly 3 + --> $DIR/three-equals.rs:25:5 + | +25 | three_equals!(==); + | ^^^^^^^^^^^^^^^^^^ + | + = help: input must be: `===` + +error: expected EOF, found `=`. + --> $DIR/three-equals.rs:28:21 + | +28 | three_equals!(=====); + | ^^ + | +note: last good input was here + --> $DIR/three-equals.rs:28:21 + | +28 | three_equals!(=====); + | ^^ + = help: input must be: `===` + +error: expected `=`, found `abc`. + --> $DIR/three-equals.rs:31:19 + | +31 | three_equals!(abc); + | ^^^ + +error: expected `=`, found `!`. + --> $DIR/three-equals.rs:34:19 + | +34 | three_equals!(!!); + | ^ + +error: expected EOF, found `a`. + --> $DIR/three-equals.rs:37:22 + | +37 | three_equals!(===a); + | ^ + | +note: last good input was here + --> $DIR/three-equals.rs:37:21 + | +37 | three_equals!(===a); + | ^ + = help: input must be: `===` + +error: aborting due to 5 previous errors + diff --git a/src/test/ui/generator/borrowing.rs b/src/test/ui/generator/borrowing.rs new file mode 100644 index 0000000000000..de10bdef4aee0 --- /dev/null +++ b/src/test/ui/generator/borrowing.rs @@ -0,0 +1,30 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators, generator_trait)] + +use std::ops::Generator; + +fn main() { + let _b = { + let a = 3; + (|| yield &a).resume() + //~^ ERROR: `a` does not live long enough + }; + + let _b = { + let a = 3; + || { + yield &a + //~^ ERROR: `a` does not live long enough + } + }; +} + diff --git a/src/test/ui/generator/borrowing.stderr b/src/test/ui/generator/borrowing.stderr new file mode 100644 index 0000000000000..0ed7e1f99027d --- /dev/null +++ b/src/test/ui/generator/borrowing.stderr @@ -0,0 +1,29 @@ +error[E0597]: `a` does not live long enough + --> $DIR/borrowing.rs:18:20 + | +18 | (|| yield &a).resume() + | -- ^ does not live long enough + | | + | capture occurs here +19 | //~^ ERROR: `a` does not live long enough +20 | }; + | - borrowed value only lives until here +... +29 | } + | - borrowed value needs to live until here + +error[E0597]: `a` does not live long enough + --> $DIR/borrowing.rs:25:20 + | +24 | || { + | -- capture occurs here +25 | yield &a + | ^ does not live long enough +... +28 | }; + | - borrowed value only lives until here +29 | } + | - borrowed value needs to live until here + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/generator/no-arguments-on-generators.rs b/src/test/ui/generator/no-arguments-on-generators.rs new file mode 100644 index 0000000000000..a7e98fe450927 --- /dev/null +++ b/src/test/ui/generator/no-arguments-on-generators.rs @@ -0,0 +1,17 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators)] + +fn main() { + let gen = |start| { //~ ERROR generators cannot have explicit arguments + yield; + }; +} diff --git a/src/test/ui/generator/no-arguments-on-generators.stderr b/src/test/ui/generator/no-arguments-on-generators.stderr new file mode 100644 index 0000000000000..4d2e228685ae3 --- /dev/null +++ b/src/test/ui/generator/no-arguments-on-generators.stderr @@ -0,0 +1,8 @@ +error[E0628]: generators cannot have explicit arguments + --> $DIR/no-arguments-on-generators.rs:14:15 + | +14 | let gen = |start| { //~ ERROR generators cannot have explicit arguments + | ^^^^^^^ + +error: aborting due to previous error + diff --git a/src/test/ui/generator/not-send-sync.rs b/src/test/ui/generator/not-send-sync.rs new file mode 100644 index 0000000000000..0419758d8ea17 --- /dev/null +++ b/src/test/ui/generator/not-send-sync.rs @@ -0,0 +1,31 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators)] + +use std::cell::Cell; + +fn main() { + fn assert_sync(_: T) {} + fn assert_send(_: T) {} + + assert_sync(|| { + //~^ ERROR: Sync` is not satisfied + let a = Cell::new(2); + yield; + }); + + let a = Cell::new(2); + assert_send(|| { + //~^ ERROR: Sync` is not satisfied + drop(&a); + yield; + }); +} diff --git a/src/test/ui/generator/not-send-sync.stderr b/src/test/ui/generator/not-send-sync.stderr new file mode 100644 index 0000000000000..e0c32a95e0d9b --- /dev/null +++ b/src/test/ui/generator/not-send-sync.stderr @@ -0,0 +1,24 @@ +error[E0277]: the trait bound `std::cell::Cell: std::marker::Sync` is not satisfied + --> $DIR/not-send-sync.rs:26:5 + | +26 | assert_send(|| { + | ^^^^^^^^^^^ `std::cell::Cell` cannot be shared between threads safely + | + = help: the trait `std::marker::Sync` is not implemented for `std::cell::Cell` + = note: required because of the requirements on the impl of `std::marker::Send` for `&std::cell::Cell` + = note: required because it appears within the type `[generator@$DIR/not-send-sync.rs:26:17: 30:6 a:&std::cell::Cell _]` + = note: required by `main::assert_send` + +error[E0277]: the trait bound `std::cell::Cell: std::marker::Sync` is not satisfied in `[generator@$DIR/not-send-sync.rs:19:17: 23:6 ((), std::cell::Cell)]` + --> $DIR/not-send-sync.rs:19:5 + | +19 | assert_sync(|| { + | ^^^^^^^^^^^ `std::cell::Cell` cannot be shared between threads safely + | + = help: within `[generator@$DIR/not-send-sync.rs:19:17: 23:6 ((), std::cell::Cell)]`, the trait `std::marker::Sync` is not implemented for `std::cell::Cell` + = note: required because it appears within the type `((), std::cell::Cell)` + = note: required because it appears within the type `[generator@$DIR/not-send-sync.rs:19:17: 23:6 ((), std::cell::Cell)]` + = note: required by `main::assert_sync` + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/generator/ref-escapes-but-not-over-yield.rs b/src/test/ui/generator/ref-escapes-but-not-over-yield.rs new file mode 100644 index 0000000000000..87edbb22baae1 --- /dev/null +++ b/src/test/ui/generator/ref-escapes-but-not-over-yield.rs @@ -0,0 +1,28 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators, generator_trait)] + +use std::ops::{GeneratorState, Generator}; +use std::cell::Cell; + +fn foo(x: &i32) { + // In this case, a reference to `b` escapes the generator, but not + // because of a yield. We see that there is no yield in the scope of + // `b` and give the more generic error message. + let mut a = &3; + let mut b = move || { + yield(); + let b = 5; + a = &b; //~ ERROR + }; +} + +fn main() { } diff --git a/src/test/ui/generator/ref-escapes-but-not-over-yield.stderr b/src/test/ui/generator/ref-escapes-but-not-over-yield.stderr new file mode 100644 index 0000000000000..e30d28c2db83b --- /dev/null +++ b/src/test/ui/generator/ref-escapes-but-not-over-yield.stderr @@ -0,0 +1,12 @@ +error[E0597]: `b` does not live long enough + --> $DIR/ref-escapes-but-not-over-yield.rs:25:5 + | +24 | a = &b; //~ ERROR + | - borrow occurs here +25 | }; + | ^ `b` dropped here while still borrowed +26 | } + | - borrowed value needs to live until here + +error: aborting due to previous error + diff --git a/src/test/ui/generator/yield-in-args-rev.rs b/src/test/ui/generator/yield-in-args-rev.rs new file mode 100644 index 0000000000000..fb0e68136f544 --- /dev/null +++ b/src/test/ui/generator/yield-in-args-rev.rs @@ -0,0 +1,24 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators)] + +fn foo(_a: (), _b: &bool) {} + +// Some examples that probably *could* be accepted, but which we reject for now. + +fn bar() { + || { + let b = true; + foo(yield, &b); //~ ERROR + }; +} + +fn main() { } diff --git a/src/test/ui/generator/yield-in-args-rev.stderr b/src/test/ui/generator/yield-in-args-rev.stderr new file mode 100644 index 0000000000000..157f896820906 --- /dev/null +++ b/src/test/ui/generator/yield-in-args-rev.stderr @@ -0,0 +1,10 @@ +error[E0626]: borrow may still be in use when generator yields + --> $DIR/yield-in-args-rev.rs:20:21 + | +20 | foo(yield, &b); //~ ERROR + | ----- ^ + | | + | possible yield occurs here + +error: aborting due to previous error + diff --git a/src/test/ui/generator/yield-in-args.rs b/src/test/ui/generator/yield-in-args.rs new file mode 100644 index 0000000000000..faeb4b1feb28d --- /dev/null +++ b/src/test/ui/generator/yield-in-args.rs @@ -0,0 +1,20 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators)] + +fn foo(_b: &bool, _a: ()) {} + +fn main() { + || { + let b = true; + foo(&b, yield); //~ ERROR + }; +} diff --git a/src/test/ui/generator/yield-in-args.stderr b/src/test/ui/generator/yield-in-args.stderr new file mode 100644 index 0000000000000..06561853dee8c --- /dev/null +++ b/src/test/ui/generator/yield-in-args.stderr @@ -0,0 +1,8 @@ +error[E0626]: borrow may still be in use when generator yields + --> $DIR/yield-in-args.rs:18:14 + | +18 | foo(&b, yield); //~ ERROR + | ^ ----- possible yield occurs here + +error: aborting due to previous error + diff --git a/src/test/ui/generator/yield-in-const.rs b/src/test/ui/generator/yield-in-const.rs new file mode 100644 index 0000000000000..e166d26515975 --- /dev/null +++ b/src/test/ui/generator/yield-in-const.rs @@ -0,0 +1,14 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators)] + +const A: u8 = { yield 3u8; 3u8}; +//~^ ERROR yield statement outside diff --git a/src/test/ui/generator/yield-in-const.stderr b/src/test/ui/generator/yield-in-const.stderr new file mode 100644 index 0000000000000..8a265c065b988 --- /dev/null +++ b/src/test/ui/generator/yield-in-const.stderr @@ -0,0 +1,10 @@ +error[E0601]: main function not found + +error[E0627]: yield statement outside of generator literal + --> $DIR/yield-in-const.rs:13:17 + | +13 | const A: u8 = { yield 3u8; 3u8}; + | ^^^^^^^^^ + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/generator/yield-in-function.rs b/src/test/ui/generator/yield-in-function.rs new file mode 100644 index 0000000000000..2f6c5a9ef754c --- /dev/null +++ b/src/test/ui/generator/yield-in-function.rs @@ -0,0 +1,14 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators)] + +fn main() { yield; } +//~^ ERROR yield statement outside diff --git a/src/test/ui/generator/yield-in-function.stderr b/src/test/ui/generator/yield-in-function.stderr new file mode 100644 index 0000000000000..c6ee3b8e9e7e1 --- /dev/null +++ b/src/test/ui/generator/yield-in-function.stderr @@ -0,0 +1,8 @@ +error[E0627]: yield statement outside of generator literal + --> $DIR/yield-in-function.rs:13:13 + | +13 | fn main() { yield; } + | ^^^^^ + +error: aborting due to previous error + diff --git a/src/test/ui/generator/yield-in-static.rs b/src/test/ui/generator/yield-in-static.rs new file mode 100644 index 0000000000000..823a2aa425e2c --- /dev/null +++ b/src/test/ui/generator/yield-in-static.rs @@ -0,0 +1,14 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators)] + +static B: u8 = { yield 3u8; 3u8}; +//~^ ERROR yield statement outside diff --git a/src/test/ui/generator/yield-in-static.stderr b/src/test/ui/generator/yield-in-static.stderr new file mode 100644 index 0000000000000..d0575a0e47b3a --- /dev/null +++ b/src/test/ui/generator/yield-in-static.stderr @@ -0,0 +1,10 @@ +error[E0601]: main function not found + +error[E0627]: yield statement outside of generator literal + --> $DIR/yield-in-static.rs:13:18 + | +13 | static B: u8 = { yield 3u8; 3u8}; + | ^^^^^^^^^ + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/generator/yield-while-iterating.rs b/src/test/ui/generator/yield-while-iterating.rs new file mode 100644 index 0000000000000..bc53448cb08e6 --- /dev/null +++ b/src/test/ui/generator/yield-while-iterating.rs @@ -0,0 +1,84 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators, generator_trait)] + +use std::ops::{GeneratorState, Generator}; +use std::cell::Cell; + +fn yield_during_iter_owned_data(x: Vec) { + // The generator owns `x`, so we error out when yielding with a + // reference to it. This winds up becoming a rather confusing + // regionck error -- in particular, we would freeze with the + // reference in scope, and it doesn't live long enough. + let _b = move || { + for p in &x { //~ ERROR + yield(); + } + }; +} + +fn yield_during_iter_borrowed_slice(x: &[i32]) { + let _b = move || { + for p in x { + yield(); + } + }; +} + +fn yield_during_iter_borrowed_slice_2() { + let mut x = vec![22_i32]; + let _b = || { + for p in &x { + yield(); + } + }; + println!("{:?}", x); +} + +fn yield_during_iter_borrowed_slice_3() { + // OK to take a mutable ref to `x` and yield + // up pointers from it: + let mut x = vec![22_i32]; + let mut b = || { + for p in &mut x { + yield p; + } + }; + b.resume(); +} + +fn yield_during_iter_borrowed_slice_4() { + // ...but not OK to do that while reading + // from `x` too + let mut x = vec![22_i32]; + let mut b = || { + for p in &mut x { + yield p; + } + }; + println!("{}", x[0]); //~ ERROR + b.resume(); +} + +fn yield_during_range_iter() { + // Should be OK. + let mut b = || { + let v = vec![1,2,3]; + let len = v.len(); + for i in 0..len { + let x = v[i]; + yield x; + } + }; + b.resume(); +} + +fn main() { } diff --git a/src/test/ui/generator/yield-while-iterating.stderr b/src/test/ui/generator/yield-while-iterating.stderr new file mode 100644 index 0000000000000..ea55e032e4761 --- /dev/null +++ b/src/test/ui/generator/yield-while-iterating.stderr @@ -0,0 +1,24 @@ +error[E0626]: borrow may still be in use when generator yields + --> $DIR/yield-while-iterating.rs:22:19 + | +22 | for p in &x { //~ ERROR + | ^ +23 | yield(); + | ------- possible yield occurs here + +error[E0502]: cannot borrow `x` as immutable because it is also borrowed as mutable + --> $DIR/yield-while-iterating.rs:67:20 + | +62 | let mut b = || { + | -- mutable borrow occurs here +63 | for p in &mut x { + | - previous borrow occurs due to use of `x` in closure +... +67 | println!("{}", x[0]); //~ ERROR + | ^ immutable borrow occurs here +68 | b.resume(); +69 | } + | - mutable borrow ends here + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/generator/yield-while-local-borrowed.rs b/src/test/ui/generator/yield-while-local-borrowed.rs new file mode 100644 index 0000000000000..d21c86e88681e --- /dev/null +++ b/src/test/ui/generator/yield-while-local-borrowed.rs @@ -0,0 +1,56 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators, generator_trait)] + +use std::ops::{GeneratorState, Generator}; +use std::cell::Cell; + +fn borrow_local_inline() { + // Not OK to yield with a borrow of a temporary. + // + // (This error occurs because the region shows up in the type of + // `b` and gets extended by region inference.) + let mut b = move || { + let a = &3; //~ ERROR + yield(); + println!("{}", a); + }; + b.resume(); +} + +fn borrow_local_inline_done() { + // No error here -- `a` is not in scope at the point of `yield`. + let mut b = move || { + { + let a = &3; + } + yield(); + }; + b.resume(); +} + +fn borrow_local() { + // Not OK to yield with a borrow of a temporary. + // + // (This error occurs because the region shows up in the type of + // `b` and gets extended by region inference.) + let mut b = move || { + let a = 3; + { + let b = &a; //~ ERROR + yield(); + println!("{}", b); + } + }; + b.resume(); +} + +fn main() { } diff --git a/src/test/ui/generator/yield-while-local-borrowed.stderr b/src/test/ui/generator/yield-while-local-borrowed.stderr new file mode 100644 index 0000000000000..2fe6c686ce366 --- /dev/null +++ b/src/test/ui/generator/yield-while-local-borrowed.stderr @@ -0,0 +1,10 @@ +error[E0626]: borrow may still be in use when generator yields + --> $DIR/yield-while-local-borrowed.rs:48:22 + | +48 | let b = &a; //~ ERROR + | ^ +49 | yield(); + | ------- possible yield occurs here + +error: aborting due to previous error + diff --git a/src/test/ui/generator/yield-while-ref-reborrowed.rs b/src/test/ui/generator/yield-while-ref-reborrowed.rs new file mode 100644 index 0000000000000..b9c963ae74077 --- /dev/null +++ b/src/test/ui/generator/yield-while-ref-reborrowed.rs @@ -0,0 +1,49 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generators, generator_trait)] + +use std::ops::{GeneratorState, Generator}; +use std::cell::Cell; + +fn reborrow_shared_ref(x: &i32) { + // This is OK -- we have a borrow live over the yield, but it's of + // data that outlives the generator. + let mut b = move || { + let a = &*x; + yield(); + println!("{}", a); + }; + b.resume(); +} + +fn reborrow_mutable_ref(x: &mut i32) { + // This is OK -- we have a borrow live over the yield, but it's of + // data that outlives the generator. + let mut b = move || { + let a = &mut *x; + yield(); + println!("{}", a); + }; + b.resume(); +} + +fn reborrow_mutable_ref_2(x: &mut i32) { + // ...but not OK to go on using `x`. + let mut b = || { + let a = &mut *x; + yield(); + println!("{}", a); + }; + println!("{}", x); //~ ERROR + b.resume(); +} + +fn main() { } diff --git a/src/test/ui/generator/yield-while-ref-reborrowed.stderr b/src/test/ui/generator/yield-while-ref-reborrowed.stderr new file mode 100644 index 0000000000000..7269f72973701 --- /dev/null +++ b/src/test/ui/generator/yield-while-ref-reborrowed.stderr @@ -0,0 +1,16 @@ +error[E0501]: cannot borrow `x` as immutable because previous closure requires unique access + --> $DIR/yield-while-ref-reborrowed.rs:45:20 + | +40 | let mut b = || { + | -- closure construction occurs here +41 | let a = &mut *x; + | - previous borrow occurs due to use of `x` in closure +... +45 | println!("{}", x); //~ ERROR + | ^ borrow occurs here +46 | b.resume(); +47 | } + | - borrow from closure ends here + +error: aborting due to previous error + diff --git a/src/test/ui/impl-trait/no-method-suggested-traits.stderr b/src/test/ui/impl-trait/no-method-suggested-traits.stderr index fc441f9484273..23f115858cd5e 100644 --- a/src/test/ui/impl-trait/no-method-suggested-traits.stderr +++ b/src/test/ui/impl-trait/no-method-suggested-traits.stderr @@ -8,6 +8,8 @@ error[E0599]: no method named `method` found for type `u32` in the current scope = note: the following traits are implemented but not in scope, perhaps add a `use` for one of them: candidate #1: `use foo::Bar;` candidate #2: `use no_method_suggested_traits::foo::PubPub;` + candidate #3: `use no_method_suggested_traits::qux::PrivPub;` + candidate #4: `use no_method_suggested_traits::Reexported;` error[E0599]: no method named `method` found for type `std::rc::Rc<&mut std::boxed::Box<&u32>>` in the current scope --> $DIR/no-method-suggested-traits.rs:38:44 @@ -19,6 +21,8 @@ error[E0599]: no method named `method` found for type `std::rc::Rc<&mut std::box = note: the following traits are implemented but not in scope, perhaps add a `use` for one of them: candidate #1: `use foo::Bar;` candidate #2: `use no_method_suggested_traits::foo::PubPub;` + candidate #3: `use no_method_suggested_traits::qux::PrivPub;` + candidate #4: `use no_method_suggested_traits::Reexported;` error[E0599]: no method named `method` found for type `char` in the current scope --> $DIR/no-method-suggested-traits.rs:44:9 diff --git a/src/test/ui/lint/not_found.rs b/src/test/ui/lint/not_found.rs new file mode 100644 index 0000000000000..5cdc19823cfff --- /dev/null +++ b/src/test/ui/lint/not_found.rs @@ -0,0 +1,21 @@ +// Copyright 2014–2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// this tests the `unknown_lint` lint, especially the suggestions + +// the suggestion only appears if a lint with the lowercase name exists +#[allow(FOO_BAR)] +// the suggestion appears on all-uppercase names +#[warn(DEAD_CODE)] +// the suggestion appears also on mixed-case names +#[deny(Warnings)] +fn main() { + unimplemented!(); +} diff --git a/src/test/ui/lint/not_found.stderr b/src/test/ui/lint/not_found.stderr new file mode 100644 index 0000000000000..73265845494c2 --- /dev/null +++ b/src/test/ui/lint/not_found.stderr @@ -0,0 +1,20 @@ +warning: unknown lint: `FOO_BAR` + --> $DIR/not_found.rs:14:9 + | +14 | #[allow(FOO_BAR)] + | ^^^^^^^ + | + = note: #[warn(unknown_lints)] on by default + +warning: unknown lint: `DEAD_CODE` + --> $DIR/not_found.rs:16:8 + | +16 | #[warn(DEAD_CODE)] + | ^^^^^^^^^ help: lowercase the lint name: `dead_code` + +warning: unknown lint: `Warnings` + --> $DIR/not_found.rs:18:8 + | +18 | #[deny(Warnings)] + | ^^^^^^^^ help: lowercase the lint name: `warnings` + diff --git a/src/test/ui/mismatched_types/closure-mismatch.stderr b/src/test/ui/mismatched_types/closure-mismatch.stderr index b7479f15b1812..d928a6a0a8e64 100644 --- a/src/test/ui/mismatched_types/closure-mismatch.stderr +++ b/src/test/ui/mismatched_types/closure-mismatch.stderr @@ -4,7 +4,6 @@ error[E0271]: type mismatch resolving `for<'r> <[closure@$DIR/closure-mismatch.r 18 | baz(|_| ()); | ^^^ expected bound lifetime parameter, found concrete lifetime | - = note: concrete lifetime that was found is lifetime '_#0r = note: required because of the requirements on the impl of `Foo` for `[closure@$DIR/closure-mismatch.rs:18:9: 18:15]` = note: required by `baz` diff --git a/src/test/ui/mismatched_types/issue-36053-2.stderr b/src/test/ui/mismatched_types/issue-36053-2.stderr index 174f7dfa0d0f5..e2e2019307f34 100644 --- a/src/test/ui/mismatched_types/issue-36053-2.stderr +++ b/src/test/ui/mismatched_types/issue-36053-2.stderr @@ -5,8 +5,8 @@ error[E0599]: no method named `count` found for type `std::iter::Filter` `std::iter::Filter>, [closure@$DIR/issue-36053-2.rs:17:39: 17:53]> : std::iter::Iterator` + `&mut std::iter::Filter>, [closure@$DIR/issue-36053-2.rs:17:39: 17:53]> : std::iter::Iterator` error[E0281]: type mismatch: `[closure@$DIR/issue-36053-2.rs:17:39: 17:53]` implements the trait `for<'r> std::ops::FnMut<(&'r str,)>`, but the trait `for<'r> std::ops::FnMut<(&'r &str,)>` is required --> $DIR/issue-36053-2.rs:17:32 diff --git a/src/test/ui/on-unimplemented/bad-annotation.rs b/src/test/ui/on-unimplemented/bad-annotation.rs index 8580749084d22..54d3b3e087653 100644 --- a/src/test/ui/on-unimplemented/bad-annotation.rs +++ b/src/test/ui/on-unimplemented/bad-annotation.rs @@ -37,5 +37,29 @@ trait BadAnnotation2 trait BadAnnotation3 {} +#[rustc_on_unimplemented(lorem="")] +trait BadAnnotation4 {} + +#[rustc_on_unimplemented(lorem(ipsum(dolor)))] +trait BadAnnotation5 {} + +#[rustc_on_unimplemented(message="x", message="y")] +trait BadAnnotation6 {} + +#[rustc_on_unimplemented(message="x", on(desugared, message="y"))] +trait BadAnnotation7 {} + +#[rustc_on_unimplemented(on(), message="y")] +trait BadAnnotation8 {} + +#[rustc_on_unimplemented(on="x", message="y")] +trait BadAnnotation9 {} + +#[rustc_on_unimplemented(on(x="y"), message="y")] +trait BadAnnotation10 {} + +#[rustc_on_unimplemented(on(desugared, on(desugared, message="x")), message="y")] +trait BadAnnotation11 {} + pub fn main() { } diff --git a/src/test/ui/on-unimplemented/bad-annotation.stderr b/src/test/ui/on-unimplemented/bad-annotation.stderr index 8599477e8ed7e..73834f4422d38 100644 --- a/src/test/ui/on-unimplemented/bad-annotation.stderr +++ b/src/test/ui/on-unimplemented/bad-annotation.stderr @@ -1,8 +1,8 @@ -error[E0232]: this attribute must have a value +error[E0232]: `#[rustc_on_unimplemented]` requires a value --> $DIR/bad-annotation.rs:26:1 | 26 | #[rustc_on_unimplemented] //~ ERROR this attribute must have a value - | ^^^^^^^^^^^^^^^^^^^^^^^^^ attribute requires a value + | ^^^^^^^^^^^^^^^^^^^^^^^^^ value required here | = note: eg `#[rustc_on_unimplemented = "foo"]` @@ -18,5 +18,59 @@ error[E0231]: only named substitution parameters are allowed 35 | #[rustc_on_unimplemented = "Unimplemented trait error on `{Self}` with params `<{A},{B},{}>`"] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 3 previous errors +error[E0232]: this attribute must have a valid value + --> $DIR/bad-annotation.rs:40:26 + | +40 | #[rustc_on_unimplemented(lorem="")] + | ^^^^^^^^ expected value here + | + = note: eg `#[rustc_on_unimplemented = "foo"]` + +error[E0232]: this attribute must have a valid value + --> $DIR/bad-annotation.rs:43:26 + | +43 | #[rustc_on_unimplemented(lorem(ipsum(dolor)))] + | ^^^^^^^^^^^^^^^^^^^ expected value here + | + = note: eg `#[rustc_on_unimplemented = "foo"]` + +error[E0232]: this attribute must have a valid value + --> $DIR/bad-annotation.rs:46:39 + | +46 | #[rustc_on_unimplemented(message="x", message="y")] + | ^^^^^^^^^^^ expected value here + | + = note: eg `#[rustc_on_unimplemented = "foo"]` + +error[E0232]: this attribute must have a valid value + --> $DIR/bad-annotation.rs:49:39 + | +49 | #[rustc_on_unimplemented(message="x", on(desugared, message="y"))] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ expected value here + | + = note: eg `#[rustc_on_unimplemented = "foo"]` + +error[E0232]: empty `on`-clause in `#[rustc_on_unimplemented]` + --> $DIR/bad-annotation.rs:52:26 + | +52 | #[rustc_on_unimplemented(on(), message="y")] + | ^^^^ empty on-clause here + +error[E0232]: this attribute must have a valid value + --> $DIR/bad-annotation.rs:55:26 + | +55 | #[rustc_on_unimplemented(on="x", message="y")] + | ^^^^^^ expected value here + | + = note: eg `#[rustc_on_unimplemented = "foo"]` + +error[E0232]: this attribute must have a valid value + --> $DIR/bad-annotation.rs:61:40 + | +61 | #[rustc_on_unimplemented(on(desugared, on(desugared, message="x")), message="y")] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ expected value here + | + = note: eg `#[rustc_on_unimplemented = "foo"]` + +error: aborting due to 10 previous errors diff --git a/src/test/ui/resolve/token-error-correct.stderr b/src/test/ui/resolve/token-error-correct.stderr index 226fa6469bc74..281c21f6f85ee 100644 --- a/src/test/ui/resolve/token-error-correct.stderr +++ b/src/test/ui/resolve/token-error-correct.stderr @@ -28,11 +28,11 @@ error: expected expression, found `;` 14 | foo(bar(; | ^ -error: expected one of `)`, `,`, `.`, `<`, `?`, `break`, `continue`, `false`, `for`, `if`, `loop`, `match`, `move`, `return`, `true`, `unsafe`, `while`, or an operator, found `;` +error: expected one of `)`, `,`, `.`, `<`, `?`, `break`, `continue`, `false`, `for`, `if`, `loop`, `match`, `move`, `return`, `true`, `unsafe`, `while`, `yield`, or an operator, found `;` --> $DIR/token-error-correct.rs:14:13 | 14 | foo(bar(; - | ^ expected one of 18 possible tokens here + | ^ expected one of 19 possible tokens here error: expected expression, found `)` --> $DIR/token-error-correct.rs:23:1 diff --git a/src/test/ui/span/gated-features-attr-spans.rs b/src/test/ui/span/gated-features-attr-spans.rs new file mode 100644 index 0000000000000..d5ccd2ea7ad3f --- /dev/null +++ b/src/test/ui/span/gated-features-attr-spans.rs @@ -0,0 +1,39 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(attr_literals)] + +#[repr(align(16))] +struct Gem { + mohs_hardness: u8, + poofed: bool, + weapon: Weapon, +} + +#[repr(simd)] +struct Weapon { + name: String, + damage: u32 +} + +impl Gem { + #[must_use] fn summon_weapon(&self) -> Weapon { self.weapon } +} + +#[must_use] +fn bubble(gem: Gem) -> Result { + if gem.poofed { + Ok(gem) + } else { + Err(()) + } +} + +fn main() {} diff --git a/src/test/ui/span/gated-features-attr-spans.stderr b/src/test/ui/span/gated-features-attr-spans.stderr new file mode 100644 index 0000000000000..66b2567f728a3 --- /dev/null +++ b/src/test/ui/span/gated-features-attr-spans.stderr @@ -0,0 +1,34 @@ +error: the struct `#[repr(align(u16))]` attribute is experimental (see issue #33626) + --> $DIR/gated-features-attr-spans.rs:13:1 + | +13 | #[repr(align(16))] + | ^^^^^^^^^^^^^^^^^^ + | + = help: add #![feature(repr_align)] to the crate attributes to enable + +error: SIMD types are experimental and possibly buggy (see issue #27731) + --> $DIR/gated-features-attr-spans.rs:20:1 + | +20 | #[repr(simd)] + | ^^^^^^^^^^^^^ + | + = help: add #![feature(repr_simd)] to the crate attributes to enable + +warning: `#[must_use]` on methods is experimental (see issue #43302) + --> $DIR/gated-features-attr-spans.rs:27:5 + | +27 | #[must_use] fn summon_weapon(&self) -> Weapon { self.weapon } + | ^^^^^^^^^^^ + | + = help: add #![feature(fn_must_use)] to the crate attributes to enable + +warning: `#[must_use]` on functions is experimental (see issue #43302) + --> $DIR/gated-features-attr-spans.rs:30:1 + | +30 | #[must_use] + | ^^^^^^^^^^^ + | + = help: add #![feature(fn_must_use)] to the crate attributes to enable + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/span/lint-unused-unsafe.stderr b/src/test/ui/span/lint-unused-unsafe.stderr index f4998e08907a3..1fa5f94aa4ca6 100644 --- a/src/test/ui/span/lint-unused-unsafe.stderr +++ b/src/test/ui/span/lint-unused-unsafe.stderr @@ -65,14 +65,12 @@ note: because it's nested under this `unsafe` block | |_____^ error: unnecessary `unsafe` block - --> $DIR/lint-unused-unsafe.rs:39:5 + --> $DIR/lint-unused-unsafe.rs:40:9 | -39 | / unsafe { //~ ERROR: unnecessary `unsafe` block -40 | | unsafe { //~ ERROR: unnecessary `unsafe` block +40 | / unsafe { //~ ERROR: unnecessary `unsafe` block 41 | | unsf() 42 | | } -43 | | } - | |_____^ unnecessary `unsafe` block + | |_________^ unnecessary `unsafe` block | note: because it's nested under this `unsafe` fn --> $DIR/lint-unused-unsafe.rs:38:1 @@ -87,12 +85,14 @@ note: because it's nested under this `unsafe` fn | |_^ error: unnecessary `unsafe` block - --> $DIR/lint-unused-unsafe.rs:40:9 + --> $DIR/lint-unused-unsafe.rs:39:5 | -40 | / unsafe { //~ ERROR: unnecessary `unsafe` block +39 | / unsafe { //~ ERROR: unnecessary `unsafe` block +40 | | unsafe { //~ ERROR: unnecessary `unsafe` block 41 | | unsf() 42 | | } - | |_________^ unnecessary `unsafe` block +43 | | } + | |_____^ unnecessary `unsafe` block | note: because it's nested under this `unsafe` fn --> $DIR/lint-unused-unsafe.rs:38:1 diff --git a/src/test/ui/span/missing-unit-argument.rs b/src/test/ui/span/missing-unit-argument.rs new file mode 100644 index 0000000000000..2cdab5bedc49a --- /dev/null +++ b/src/test/ui/span/missing-unit-argument.rs @@ -0,0 +1,19 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn foo(():(), ():()) {} +fn bar(():()) {} + +fn main() { + let _: Result<(), String> = Ok(); + foo(); + foo(()); + bar(); +} diff --git a/src/test/ui/span/missing-unit-argument.stderr b/src/test/ui/span/missing-unit-argument.stderr new file mode 100644 index 0000000000000..e508a30d1826c --- /dev/null +++ b/src/test/ui/span/missing-unit-argument.stderr @@ -0,0 +1,45 @@ +error[E0061]: this function takes 1 parameter but 0 parameters were supplied + --> $DIR/missing-unit-argument.rs:15:33 + | +15 | let _: Result<(), String> = Ok(); + | ^^^^ + | +help: expected the unit value `()`. You can create one with a pair of parenthesis + | +15 | let _: Result<(), String> = Ok(()); + | ^^ + +error[E0061]: this function takes 2 parameters but 0 parameters were supplied + --> $DIR/missing-unit-argument.rs:16:5 + | +11 | fn foo(():(), ():()) {} + | ----------------------- defined here +... +16 | foo(); + | ^^^^^ expected 2 parameters + +error[E0061]: this function takes 2 parameters but 1 parameter was supplied + --> $DIR/missing-unit-argument.rs:17:9 + | +11 | fn foo(():(), ():()) {} + | ----------------------- defined here +... +17 | foo(()); + | ^^ expected 2 parameters + +error[E0061]: this function takes 1 parameter but 0 parameters were supplied + --> $DIR/missing-unit-argument.rs:18:5 + | +12 | fn bar(():()) {} + | ---------------- defined here +... +18 | bar(); + | ^^^^^ + | +help: expected the unit value `()`. You can create one with a pair of parenthesis + | +18 | bar(()); + | ^^ + +error: aborting due to 4 previous errors + diff --git a/src/test/ui/suggestions/issue-43420-no-over-suggest.rs b/src/test/ui/suggestions/issue-43420-no-over-suggest.rs new file mode 100644 index 0000000000000..d504b7cae28c4 --- /dev/null +++ b/src/test/ui/suggestions/issue-43420-no-over-suggest.rs @@ -0,0 +1,19 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// check that we substitute type parameters before we suggest anything - otherwise +// we would suggest function such as `as_slice` for the `&[u16]`. + +fn foo(b: &[u16]) {} + +fn main() { + let a: Vec = Vec::new(); + foo(&a); +} diff --git a/src/test/ui/suggestions/issue-43420-no-over-suggest.stderr b/src/test/ui/suggestions/issue-43420-no-over-suggest.stderr new file mode 100644 index 0000000000000..bcad9ce56c65e --- /dev/null +++ b/src/test/ui/suggestions/issue-43420-no-over-suggest.stderr @@ -0,0 +1,11 @@ +error[E0308]: mismatched types + --> $DIR/issue-43420-no-over-suggest.rs:18:9 + | +18 | foo(&a); + | ^^ expected slice, found struct `std::vec::Vec` + | + = note: expected type `&[u16]` + found type `&std::vec::Vec` + +error: aborting due to previous error + diff --git a/src/test/ui/suggestions/try-operator-on-main.rs b/src/test/ui/suggestions/try-operator-on-main.rs index 55154e3507e87..eadd12924df66 100644 --- a/src/test/ui/suggestions/try-operator-on-main.rs +++ b/src/test/ui/suggestions/try-operator-on-main.rs @@ -8,6 +8,26 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![feature(try_trait)] + +use std::ops::Try; + fn main() { + // error for a `Try` type on a non-`Try` fn std::fs::File::open("foo")?; + + // a non-`Try` type on a non-`Try` fn + ()?; + + // an unrelated use of `Try` + try_trait_generic::<()>(); +} + + + +fn try_trait_generic() -> T { + // and a non-`Try` object on a `Try` fn. + ()?; + + loop {} } diff --git a/src/test/ui/suggestions/try-operator-on-main.stderr b/src/test/ui/suggestions/try-operator-on-main.stderr index cf0481bdab772..e83bf2abc1504 100644 --- a/src/test/ui/suggestions/try-operator-on-main.stderr +++ b/src/test/ui/suggestions/try-operator-on-main.stderr @@ -1,14 +1,46 @@ -error[E0277]: the trait bound `(): std::ops::Try` is not satisfied - --> $DIR/try-operator-on-main.rs:12:5 +error[E0277]: the `?` operator can only be used in a function that returns `Result` (or another type that implements `std::ops::Try`) + --> $DIR/try-operator-on-main.rs:17:5 | -12 | std::fs::File::open("foo")?; +17 | std::fs::File::open("foo")?; | --------------------------- | | - | the `?` operator can only be used in a function that returns `Result` (or another type that implements `std::ops::Try`) + | cannot use the `?` operator in a function that returns `()` | in this macro invocation | = help: the trait `std::ops::Try` is not implemented for `()` = note: required by `std::ops::Try::from_error` -error: aborting due to previous error +error[E0277]: the `?` operator can only be applied to values that implement `std::ops::Try` + --> $DIR/try-operator-on-main.rs:20:5 + | +20 | ()?; + | --- + | | + | the `?` operator cannot be applied to type `()` + | in this macro invocation + | + = help: the trait `std::ops::Try` is not implemented for `()` + = note: required by `std::ops::Try::into_result` + +error[E0277]: the trait bound `(): std::ops::Try` is not satisfied + --> $DIR/try-operator-on-main.rs:23:5 + | +23 | try_trait_generic::<()>(); + | ^^^^^^^^^^^^^^^^^^^^^^^ the trait `std::ops::Try` is not implemented for `()` + | + = note: required by `try_trait_generic` + +error[E0277]: the `?` operator can only be applied to values that implement `std::ops::Try` + --> $DIR/try-operator-on-main.rs:30:5 + | +30 | ()?; + | --- + | | + | the `?` operator cannot be applied to type `()` + | in this macro invocation + | + = help: the trait `std::ops::Try` is not implemented for `()` + = note: required by `std::ops::Try::into_result` + +error: aborting due to 4 previous errors diff --git a/src/tools/build-manifest/src/main.rs b/src/tools/build-manifest/src/main.rs index 7a09ae48b91bd..e2be021e7cc39 100644 --- a/src/tools/build-manifest/src/main.rs +++ b/src/tools/build-manifest/src/main.rs @@ -239,7 +239,12 @@ impl Builder { self.package("rust-std", &mut manifest.pkg, TARGETS); self.package("rust-docs", &mut manifest.pkg, TARGETS); self.package("rust-src", &mut manifest.pkg, &["*"]); - self.package("rls", &mut manifest.pkg, HOSTS); + let rls_package_name = if self.rust_release == "nightly" { + "rls" + } else { + "rls-preview" + }; + self.package(rls_package_name, &mut manifest.pkg, HOSTS); self.package("rust-analysis", &mut manifest.pkg, TARGETS); let mut pkg = Package { @@ -276,7 +281,7 @@ impl Builder { } extensions.push(Component { - pkg: "rls".to_string(), + pkg: rls_package_name.to_string(), target: host.to_string(), }); extensions.push(Component { @@ -353,7 +358,7 @@ impl Builder { format!("rust-src-{}.tar.gz", self.rust_release) } else if component == "cargo" { format!("cargo-{}-{}.tar.gz", self.cargo_release, target) - } else if component == "rls" { + } else if component == "rls" || component == "rls-preview" { format!("rls-{}-{}.tar.gz", self.rls_release, target) } else { format!("{}-{}-{}.tar.gz", component, self.rust_release, target) @@ -363,7 +368,7 @@ impl Builder { fn cached_version(&self, component: &str) -> &str { if component == "cargo" { &self.cargo_version - } else if component == "rls" { + } else if component == "rls" || component == "rls-preview" { &self.rls_version } else { &self.rust_version diff --git a/src/tools/cargo b/src/tools/cargo index 3d3f2c05d742e..34c0674a25128 160000 --- a/src/tools/cargo +++ b/src/tools/cargo @@ -1 +1 @@ -Subproject commit 3d3f2c05d742e5f907e951aa8849b03f0bc1a895 +Subproject commit 34c0674a251287c94cdd1a112966bcb9010c62e8 diff --git a/src/tools/clippy b/src/tools/clippy new file mode 160000 index 0000000000000..25444585592f5 --- /dev/null +++ b/src/tools/clippy @@ -0,0 +1 @@ +Subproject commit 25444585592f5da648edd5317fcdd21f2db8bb64 diff --git a/src/tools/compiletest/src/common.rs b/src/tools/compiletest/src/common.rs index 0d6b350a1d431..cee7e52c7f3c6 100644 --- a/src/tools/compiletest/src/common.rs +++ b/src/tools/compiletest/src/common.rs @@ -83,117 +83,117 @@ impl fmt::Display for Mode { #[derive(Clone)] pub struct Config { - // The library paths required for running the compiler + /// The library paths required for running the compiler pub compile_lib_path: PathBuf, - // The library paths required for running compiled programs + /// The library paths required for running compiled programs pub run_lib_path: PathBuf, - // The rustc executable + /// The rustc executable pub rustc_path: PathBuf, - // The rustdoc executable + /// The rustdoc executable pub rustdoc_path: Option, - // The python executable to use for LLDB + /// The python executable to use for LLDB pub lldb_python: String, - // The python executable to use for htmldocck + /// The python executable to use for htmldocck pub docck_python: String, - // The llvm FileCheck binary path + /// The llvm FileCheck binary path pub llvm_filecheck: Option, - // The valgrind path + /// The valgrind path pub valgrind_path: Option, - // Whether to fail if we can't run run-pass-valgrind tests under valgrind - // (or, alternatively, to silently run them like regular run-pass tests). + /// Whether to fail if we can't run run-pass-valgrind tests under valgrind + /// (or, alternatively, to silently run them like regular run-pass tests). pub force_valgrind: bool, - // The directory containing the tests to run + /// The directory containing the tests to run pub src_base: PathBuf, - // The directory where programs should be built + /// The directory where programs should be built pub build_base: PathBuf, - // The name of the stage being built (stage1, etc) + /// The name of the stage being built (stage1, etc) pub stage_id: String, - // The test mode, compile-fail, run-fail, run-pass + /// The test mode, compile-fail, run-fail, run-pass pub mode: Mode, - // Run ignored tests + /// Run ignored tests pub run_ignored: bool, - // Only run tests that match this filter + /// Only run tests that match this filter pub filter: Option, - // Exactly match the filter, rather than a substring + /// Exactly match the filter, rather than a substring pub filter_exact: bool, - // Write out a parseable log of tests that were run + /// Write out a parseable log of tests that were run pub logfile: Option, - // A command line to prefix program execution with, - // for running under valgrind + /// A command line to prefix program execution with, + /// for running under valgrind pub runtool: Option, - // Flags to pass to the compiler when building for the host + /// Flags to pass to the compiler when building for the host pub host_rustcflags: Option, - // Flags to pass to the compiler when building for the target + /// Flags to pass to the compiler when building for the target pub target_rustcflags: Option, - // Target system to be tested + /// Target system to be tested pub target: String, - // Host triple for the compiler being invoked + /// Host triple for the compiler being invoked pub host: String, - // Path to / name of the GDB executable + /// Path to / name of the GDB executable pub gdb: Option, - // Version of GDB, encoded as ((major * 1000) + minor) * 1000 + patch + /// Version of GDB, encoded as ((major * 1000) + minor) * 1000 + patch pub gdb_version: Option, - // Whether GDB has native rust support + /// Whether GDB has native rust support pub gdb_native_rust: bool, - // Version of LLDB + /// Version of LLDB pub lldb_version: Option, - // Version of LLVM + /// Version of LLVM pub llvm_version: Option, - // Is LLVM a system LLVM + /// Is LLVM a system LLVM pub system_llvm: bool, - // Path to the android tools + /// Path to the android tools pub android_cross_path: PathBuf, - // Extra parameter to run adb on arm-linux-androideabi + /// Extra parameter to run adb on arm-linux-androideabi pub adb_path: String, - // Extra parameter to run test suite on arm-linux-androideabi + /// Extra parameter to run test suite on arm-linux-androideabi pub adb_test_dir: String, - // status whether android device available or not + /// status whether android device available or not pub adb_device_status: bool, - // the path containing LLDB's Python module + /// the path containing LLDB's Python module pub lldb_python_dir: Option, - // Explain what's going on + /// Explain what's going on pub verbose: bool, - // Print one character per test instead of one line + /// Print one character per test instead of one line pub quiet: bool, - // Whether to use colors in test. + /// Whether to use colors in test. pub color: ColorConfig, - // where to find the remote test client process, if we're using it + /// where to find the remote test client process, if we're using it pub remote_test_client: Option, // Configuration for various run-make tests frobbing things like C compilers diff --git a/src/tools/rls b/src/tools/rls index 25ffb3a3d7809..303671ea8103c 160000 --- a/src/tools/rls +++ b/src/tools/rls @@ -1 +1 @@ -Subproject commit 25ffb3a3d7809b4fa112f3e04e926eb539dd5e90 +Subproject commit 303671ea8103cbc39575a1f47a204159546a04d0 diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs index f572ad9cd0204..e9e4b55402c47 100644 --- a/src/tools/tidy/src/deps.rs +++ b/src/tools/tidy/src/deps.rs @@ -33,6 +33,12 @@ static EXCEPTIONS: &'static [&'static str] = &[ "openssl", // BSD+advertising clause, cargo, mdbook "pest", // MPL2, mdbook via handlebars "thread-id", // Apache-2.0, mdbook + "cssparser", // MPL-2.0, rustdoc + "smallvec", // MPL-2.0, rustdoc + "magenta-sys", // BSD-3-Clause, rustdoc + "magenta", // BSD-3-Clause, rustdoc + "cssparser-macros", // MPL-2.0, rustdoc + "selectors", // MPL-2.0, rustdoc ]; pub fn check(path: &Path, bad: &mut bool) { diff --git a/src/tools/tidy/src/lib.rs b/src/tools/tidy/src/lib.rs index 020570e61dc63..731a3d96cff9d 100644 --- a/src/tools/tidy/src/lib.rs +++ b/src/tools/tidy/src/lib.rs @@ -62,6 +62,7 @@ fn filter_dirs(path: &Path) -> bool { "src/rt/hoedown", "src/tools/cargo", "src/tools/rls", + "src/tools/clippy", "src/tools/rust-installer", ]; skip.iter().any(|p| path.ends_with(p)) diff --git a/src/tools/tidy/src/pal.rs b/src/tools/tidy/src/pal.rs index 10c9971382046..8092a9e156beb 100644 --- a/src/tools/tidy/src/pal.rs +++ b/src/tools/tidy/src/pal.rs @@ -65,11 +65,10 @@ const EXCEPTION_PATHS: &'static [&'static str] = &[ "src/rtstartup", // Not sure what to do about this. magic stuff for mingw // temporary exceptions - "src/libstd/rtdeps.rs", // Until rustbuild replaces make + "src/libstd/lib.rs", // FIXME(#44217) "src/libstd/path.rs", "src/libstd/f32.rs", "src/libstd/f64.rs", - "src/libstd/lib.rs", // Until next stage0 snapshot bump "src/libstd/sys_common/mod.rs", "src/libstd/sys_common/net.rs", "src/libterm", // Not sure how to make this crate portable, but test needs it