Leveraged -Xmigration to burn off some warts which arose in the
new collections. Warnings put in place for behavioral changes, allowing the following. 1) Buffers: create new collections on ++ and -- like all the other collections. 2) Maps: eliminated never-shipped redundant method valuesIterable and supplied these return types: def keys: Iterable[A] def keysIterator: Iterator[A] def values: Iterable[B] def valuesIterator: Iterator[B] def keySet: Set[A] I concluded that keys should return Iterable because keySet also exists on Map, and is not solely in the province of Maps even if we wanted to change it: it's defined on Sorted and also appears in some Sets. So it seems sensible to have keySet return a Set and keys return the more general type. Closes #3089, #3145. Review by odersky. git-svn-id: http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk@21167 5e8d7ff9-d8ef-0310-90f0-a4852d11357a
This commit is contained in:
parent
70c414ef77
commit
6de14d83b6
|
@ -140,7 +140,7 @@ private[remote] class NetKernel(service: Service) {
|
|||
|
||||
def terminate() {
|
||||
// tell all proxies to terminate
|
||||
proxies.valuesIterator foreach { p => p.send(Terminate, null) }
|
||||
proxies.values foreach { _.send(Terminate, null) }
|
||||
|
||||
// tell service to terminate
|
||||
service.terminate()
|
||||
|
|
|
@ -923,7 +923,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
|
|||
|
||||
private def writeICode() {
|
||||
val printer = new icodes.TextPrinter(null, icodes.linearizer)
|
||||
icodes.classes.valuesIterator.foreach((cls) => {
|
||||
icodes.classes.values.foreach((cls) => {
|
||||
val suffix = if (cls.symbol hasFlag Flags.MODULE) "$.icode" else ".icode"
|
||||
var file = getFile(cls.symbol, suffix)
|
||||
// if (file.exists())
|
||||
|
|
|
@ -268,10 +268,10 @@ class Interpreter(val settings: Settings, out: PrintWriter) {
|
|||
// ))
|
||||
}
|
||||
|
||||
private def keyList[T](x: collection.Map[T, _]): List[T] = x.keysIterator.toList sortBy (_.toString)
|
||||
private def keyList[T](x: collection.Map[T, _]): List[T] = x.keys.toList sortBy (_.toString)
|
||||
def allUsedNames = keyList(usedNameMap)
|
||||
def allBoundNames = keyList(boundNameMap)
|
||||
def allSeenTypes = prevRequests.toList flatMap (_.typeOf.valuesIterator.toList) distinct
|
||||
def allSeenTypes = prevRequests.toList flatMap (_.typeOf.values.toList) distinct
|
||||
def allValueGeneratingNames = allHandlers flatMap (_.generatesValue)
|
||||
def allImplicits = partialFlatMap(allHandlers) {
|
||||
case x: MemberHandler if x.definesImplicit => x.boundNames
|
||||
|
|
|
@ -96,21 +96,8 @@ trait PhaseAssembly { self: Global =>
|
|||
/* Given the entire graph, collect the phase objects at each level, where the phase
|
||||
* names are sorted alphabetical at each level, into the compiler phase list
|
||||
*/
|
||||
def compilerPhaseList(): List[SubComponent] = {
|
||||
var chain: List[SubComponent] = Nil
|
||||
|
||||
var lvl = 1
|
||||
var nds = nodes.valuesIterator.filter(_.level == lvl).toList
|
||||
while(nds.size > 0) {
|
||||
nds = nds sortBy (_.phasename)
|
||||
for (n <- nds) {
|
||||
chain = chain ::: n.phaseobj.get
|
||||
}
|
||||
lvl += 1
|
||||
nds = nodes.valuesIterator.filter(_.level == lvl).toList
|
||||
}
|
||||
chain
|
||||
}
|
||||
def compilerPhaseList(): List[SubComponent] =
|
||||
nodes.values.toList sortBy (x => (x.level, x.phasename)) flatMap (_.phaseobj) flatten
|
||||
|
||||
/* Test if there are cycles in the graph, assign levels to the nodes
|
||||
* and collapse hard links into nodes
|
||||
|
|
|
@ -191,7 +191,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean)
|
|||
|
||||
/** Extract all the namespaces from the attribute map. */
|
||||
val namespaces: List[Tree] =
|
||||
for (z <- attrMap.keysIterator.toList ; if z startsWith xmlns) yield {
|
||||
for (z <- attrMap.keys.toList ; if z startsWith xmlns) yield {
|
||||
val ns = splitPrefix(z) match {
|
||||
case (Some(_), rest) => rest
|
||||
case _ => null
|
||||
|
|
|
@ -74,7 +74,7 @@ abstract class Checkers {
|
|||
def checkICodes: Unit = {
|
||||
if (settings.verbose.value)
|
||||
println("[[consistency check at the beginning of phase " + globalPhase.name + "]]")
|
||||
classes.valuesIterator foreach check
|
||||
classes.values foreach check
|
||||
}
|
||||
|
||||
def check(cls: IClass) {
|
||||
|
|
|
@ -59,7 +59,7 @@ abstract class ICodes extends AnyRef
|
|||
val printer = new TextPrinter(new PrintWriter(Console.out, true),
|
||||
new DumpLinearizer)
|
||||
|
||||
classes.valuesIterator foreach printer.printClass
|
||||
classes.values foreach printer.printClass
|
||||
}
|
||||
|
||||
object liveness extends Liveness {
|
||||
|
|
|
@ -238,7 +238,7 @@ trait Linearizers { self: ICodes =>
|
|||
covered.size + (hs :\ 0)((h, s) => h.blocks.length + s)
|
||||
}
|
||||
|
||||
val tryBlocks = handlersByCovered.keysIterator.toList sortBy size
|
||||
val tryBlocks = handlersByCovered.keys.toList sortBy size
|
||||
|
||||
var result = normalLinearizer.linearize(m)
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ abstract class GenJVM extends SubComponent {
|
|||
for ((sym, cls) <- icodes.classes ; if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym))
|
||||
icodes.classes -= sym
|
||||
|
||||
classes.valuesIterator foreach apply
|
||||
classes.values foreach apply
|
||||
}
|
||||
|
||||
override def apply(cls: IClass) {
|
||||
|
|
|
@ -42,15 +42,15 @@ abstract class GenMSIL extends SubComponent {
|
|||
val codeGenerator = new BytecodeGenerator
|
||||
|
||||
//classes is ICodes.classes, a HashMap[Symbol, IClass]
|
||||
classes.valuesIterator foreach codeGenerator.findEntryPoint
|
||||
classes.values foreach codeGenerator.findEntryPoint
|
||||
|
||||
codeGenerator.initAssembly
|
||||
|
||||
classes.valuesIterator foreach codeGenerator.createTypeBuilder
|
||||
classes.valuesIterator foreach codeGenerator.createClassMembers
|
||||
classes.values foreach codeGenerator.createTypeBuilder
|
||||
classes.values foreach codeGenerator.createClassMembers
|
||||
|
||||
try {
|
||||
classes.valuesIterator foreach codeGenerator.genClass
|
||||
classes.values foreach codeGenerator.genClass
|
||||
} finally {
|
||||
codeGenerator.writeAssembly
|
||||
}
|
||||
|
@ -469,7 +469,7 @@ abstract class GenMSIL extends SubComponent {
|
|||
}
|
||||
|
||||
private def createTypes() {
|
||||
for (sym <- classes.keysIterator) {
|
||||
for (sym <- classes.keys) {
|
||||
val iclass = classes(sym)
|
||||
val tBuilder = types(sym).asInstanceOf[TypeBuilder]
|
||||
|
||||
|
@ -755,7 +755,7 @@ abstract class GenMSIL extends SubComponent {
|
|||
val newHandlersBySize = newHandlers.groupBy(_.covered.size)
|
||||
// big handlers first, smaller ones are nested inside the try of the big one
|
||||
// (checked by the assertions below)
|
||||
val sizes = newHandlersBySize.keysIterator.toList.sortWith(_ > _)
|
||||
val sizes = newHandlersBySize.keys.toList.sortWith(_ > _)
|
||||
|
||||
val beginHandlers = new ListBuffer[ExceptionHandler]
|
||||
for (s <- sizes) {
|
||||
|
|
|
@ -143,7 +143,7 @@ final class CommentFactory(val reporter: Reporter) { parser =>
|
|||
case Nil =>
|
||||
|
||||
val bodyTags: mutable.Map[TagKey, List[Body]] =
|
||||
mutable.Map((tags map { case (key, values) => key -> (values map (parseWiki(_, pos))) }).toSeq:_*)
|
||||
mutable.Map(tags mapValues (_ map (parseWiki(_, pos))) toSeq: _*)
|
||||
|
||||
def oneTag(key: SimpleTagKey): Option[Body] =
|
||||
((bodyTags remove key): @unchecked) match {
|
||||
|
@ -158,7 +158,7 @@ final class CommentFactory(val reporter: Reporter) { parser =>
|
|||
|
||||
def allSymsOneTag(key: TagKey): Map[String, Body] = {
|
||||
val keys: Seq[SymbolTagKey] =
|
||||
bodyTags.keysIterator.toSeq flatMap {
|
||||
bodyTags.keys.toSeq flatMap {
|
||||
case stk: SymbolTagKey if (stk.name == key.name) => Some(stk)
|
||||
case stk: SimpleTagKey if (stk.name == key.name) =>
|
||||
reporter.warning(pos, "Tag '@" + stk.name + "' must be followed by a symbol name")
|
||||
|
|
|
@ -222,7 +222,7 @@ self =>
|
|||
reporter.reset
|
||||
firsts = firsts filter (s => unitOfFile contains (s.file))
|
||||
val prefix = firsts map unitOf
|
||||
val units = prefix ::: (unitOfFile.valuesIterator.toList diff prefix) filter (!_.isUpToDate)
|
||||
val units = prefix ::: (unitOfFile.values.toList diff prefix) filter (!_.isUpToDate)
|
||||
recompile(units)
|
||||
if (debugIDE) inform("Everything is now up to date")
|
||||
}
|
||||
|
@ -387,7 +387,7 @@ self =>
|
|||
addScopeMember(sym, pre, imp.qual)
|
||||
}
|
||||
}
|
||||
val result = locals.valuesIterator.toList
|
||||
val result = locals.values.toList
|
||||
if (debugIDE) for (m <- result) println(m)
|
||||
result
|
||||
}
|
||||
|
@ -455,7 +455,7 @@ self =>
|
|||
addTypeMember(sym, vpre, false, view.tree.symbol)
|
||||
}
|
||||
}
|
||||
members.valuesIterator.toList
|
||||
members.values.toList
|
||||
}
|
||||
|
||||
// ---------------- Helper classes ---------------------------
|
||||
|
|
|
@ -108,6 +108,6 @@ object CompletionAware {
|
|||
*/
|
||||
def apply(terms: () => List[String]): CompletionAware = apply(terms, _ => None)
|
||||
def apply(map: collection.Map[String, CompletionAware]): CompletionAware =
|
||||
apply(() => map.keysIterator.toList, map.get _)
|
||||
apply(() => map.keys.toList, map.get _)
|
||||
}
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ extends AbstractFile {
|
|||
|
||||
// the toList is so that the directory may continue to be
|
||||
// modified while its elements are iterated
|
||||
def iterator = files.valuesIterator.toList.iterator
|
||||
def iterator = files.values.toList.iterator
|
||||
|
||||
override def lookupName(name: String, directory: Boolean): AbstractFile =
|
||||
files get name filter (_.isDirectory == directory) orNull
|
||||
|
|
|
@ -456,7 +456,16 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
|
|||
|
||||
def isDeprecated = hasAnnotation(DeprecatedAttr)
|
||||
def deprecationMessage = getAnnotationArg(DeprecatedAttr, 0) partialMap { case Literal(const) => const.stringValue }
|
||||
def migrationMessage = getAnnotationArg(MigrationAnnotationClass, 2) partialMap { case Literal(const) => const.stringValue }
|
||||
// !!! when annotation arguments are not literal strings, but any sort of
|
||||
// assembly of strings, there is a fair chance they will turn up here not as
|
||||
// Literal(const) but some arbitrary AST. However nothing in the compiler
|
||||
// prevents someone from writing a @migration annotation with a calculated
|
||||
// string. So this needs attention. For now the fact that migration is
|
||||
// private[scala] ought to provide enough protection.
|
||||
def migrationMessage = getAnnotationArg(MigrationAnnotationClass, 2) partialMap {
|
||||
case Literal(const) => const.stringValue
|
||||
case x => x.toString // should not be necessary, but better than silently ignoring an issue
|
||||
}
|
||||
def elisionLevel = getAnnotationArg(ElidableMethodClass, 0) partialMap { case Literal(Constant(x: Int)) => x }
|
||||
|
||||
/** Does this symbol denote a wrapper object of the interpreter or its class? */
|
||||
|
|
|
@ -3366,7 +3366,7 @@ A type's typeSymbol should never be inspected directly.
|
|||
override val dropNonConstraintAnnotations = true
|
||||
|
||||
private var existSyms = immutable.Map.empty[Int, Symbol]
|
||||
def existentialsNeeded: List[Symbol] = existSyms.valuesIterator.toList
|
||||
def existentialsNeeded: List[Symbol] = existSyms.values.toList
|
||||
|
||||
/* Return the type symbol for referencing a parameter index
|
||||
* inside the existential quantifier. */
|
||||
|
|
|
@ -952,7 +952,7 @@ abstract class ClassfileParser {
|
|||
}
|
||||
}
|
||||
|
||||
for (entry <- innerClasses.valuesIterator) {
|
||||
for (entry <- innerClasses.values) {
|
||||
// create a new class member for immediate inner classes
|
||||
if (entry.outerName == externalName) {
|
||||
val file = global.classPath.findSourceFile(entry.externalName.toString) getOrElse {
|
||||
|
|
|
@ -428,7 +428,7 @@ abstract class LambdaLift extends InfoTransform {
|
|||
override def transformUnit(unit: CompilationUnit) {
|
||||
computeFreeVars
|
||||
atPhase(phase.next)(super.transformUnit(unit))
|
||||
assert(liftedDefs.size == 0, liftedDefs.keysIterator.toList)
|
||||
assert(liftedDefs.size == 0, liftedDefs.keys.toList)
|
||||
}
|
||||
} // class LambdaLifter
|
||||
|
||||
|
|
|
@ -278,7 +278,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
|
|||
tpes
|
||||
case _ =>
|
||||
log(sym + " specialized on everything")
|
||||
primitiveTypes.valuesIterator.toList
|
||||
primitiveTypes.values.toList
|
||||
}
|
||||
case _ =>
|
||||
Nil
|
||||
|
@ -581,8 +581,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
|
|||
if (sym.isMethod && !sym.info.typeParams.isEmpty) {
|
||||
val (stps, tps) = splitParams(sym.info.typeParams)
|
||||
val res = sym :: (for (env <- specializations(stps) if needsSpecialization(env, sym)) yield {
|
||||
val keys = env.keysIterator.toList;
|
||||
val vals = env.valuesIterator.toList
|
||||
val keys = env.keys.toList;
|
||||
val vals = env.values.toList
|
||||
val specMember = sym.cloneSymbol(owner).setFlag(SPECIALIZED).resetFlag(DEFERRED)
|
||||
specMember.name = specializedName(sym, env)
|
||||
|
||||
|
@ -778,8 +778,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
|
|||
}
|
||||
// disabled because of bugs in std. collections
|
||||
//val (keys, values) = env.iterator.toList.unzip
|
||||
val keys = env.keysIterator.toList
|
||||
val values = env.valuesIterator.toList
|
||||
val keys = env.keys.toList
|
||||
val values = env.values.toList
|
||||
(new FullTypeMap(keys, values))(tpe)
|
||||
// tpe.subst(keys, values)
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ abstract class Duplicators extends Analyzer {
|
|||
newClassOwner = newThis
|
||||
} else resetClassOwners
|
||||
|
||||
envSubstitution = new SubstSkolemsTypeMap(env.keysIterator.toList, env.valuesIterator.toList)
|
||||
envSubstitution = new SubstSkolemsTypeMap(env.keys.toList, env.values.toList)
|
||||
log("retyped with env: " + env)
|
||||
(new BodyDuplicator(context)).typed(tree)
|
||||
}
|
||||
|
|
|
@ -913,7 +913,7 @@ self: Analyzer =>
|
|||
def allImplicits: List[SearchResult] = {
|
||||
val invalidImplicits = new ListBuffer[Symbol]
|
||||
def search(iss: List[List[ImplicitInfo]], isLocal: Boolean) =
|
||||
applicableInfos(iss, isLocal, invalidImplicits).valuesIterator.toList
|
||||
applicableInfos(iss, isLocal, invalidImplicits).values.toList
|
||||
search(context.implicitss, true) ::: search(implicitsOfExpectedType, false)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -929,9 +929,10 @@ abstract class RefChecks extends InfoTransform {
|
|||
/** Similar to deprecation: check if the symbol is marked with @migration
|
||||
* indicating it has changed semantics between versions.
|
||||
*/
|
||||
private def checkMigration(sym: Symbol, pos: Position) =
|
||||
private def checkMigration(sym: Symbol, pos: Position) = {
|
||||
for (msg <- sym.migrationMessage)
|
||||
unit.warning(pos, "%s%s has changed semantics:\n %s".format(sym, sym.locationString, msg))
|
||||
unit.warning(pos, "%s%s has changed semantics:\n%s".format(sym, sym.locationString, msg))
|
||||
}
|
||||
|
||||
/** Check that a deprecated val or def does not override a
|
||||
* concrete, non-deprecated method. If it does, then
|
||||
|
|
|
@ -206,7 +206,7 @@ abstract class Statistics {
|
|||
if (phase.name != "parser") {
|
||||
val counts = new ClassCounts
|
||||
for (u <- currentRun.units; t <- u.body) counts(t.getClass) += 1
|
||||
inform("#retained nodes : " + counts.valuesIterable.sum)
|
||||
inform("#retained nodes : " + counts.values.sum)
|
||||
inform("#retained nodes by type : " + showCounts(counts))
|
||||
inform("#typechecked identifiers : " + typedIdentCount)
|
||||
inform("#typechecked selections : " + typedSelectCount)
|
||||
|
|
|
@ -90,7 +90,7 @@ abstract class Enumeration(initial: Int, names: String*) {
|
|||
*/
|
||||
def values: ValueSet = {
|
||||
if (!vsetDefined) {
|
||||
vset = new ValueSet(BitSet.empty ++ (vmap.valuesIterator map (_.id)))
|
||||
vset = new ValueSet(BitSet.empty ++ (vmap.values map (_.id)))
|
||||
vsetDefined = true
|
||||
}
|
||||
vset
|
||||
|
|
|
@ -279,6 +279,11 @@ trait Iterator[+A] { self =>
|
|||
*/
|
||||
def next(): A
|
||||
|
||||
/** Tests whether this iterator is empty.
|
||||
* @return `true` if hasNext is false, `false` otherwise.
|
||||
*/
|
||||
def isEmpty: Boolean = !hasNext
|
||||
|
||||
/** Selects first ''n'' values of this iterator.
|
||||
* @param n the number of values to take
|
||||
* @return an iterator producing only of the first `n` values of this iterator, or else the
|
||||
|
|
|
@ -12,6 +12,7 @@ package scala.collection
|
|||
|
||||
import generic._
|
||||
import mutable.{Builder, StringBuilder, MapBuilder}
|
||||
import annotation.migration
|
||||
import PartialFunction._
|
||||
|
||||
/** A template trait for maps of type `Map[A, B]` which associate keys of type `A`
|
||||
|
@ -181,15 +182,16 @@ self =>
|
|||
*
|
||||
* @return an iterator over all keys.
|
||||
*/
|
||||
@deprecated("use `keysIterator' instead")
|
||||
def keys: Iterator[A] = keysIterator
|
||||
@migration(2, 8, "As of 2.8, keys returns Iterable[A] rather than Iterator[A].")
|
||||
def keys: Iterable[A] = keySet
|
||||
|
||||
/** Collects all values of this map in an iterable collection.
|
||||
* @return the values of this map as an iterable.
|
||||
*/
|
||||
def valuesIterable: Iterable[B] = new DefaultValuesIterable
|
||||
@migration(2, 8, "As of 2.8, values returns Iterable[B] rather than Iterator[B].")
|
||||
def values: Iterable[B] = new DefaultValuesIterable
|
||||
|
||||
/** The implementation class of the iterable returned by `valuesIterable`.
|
||||
/** The implementation class of the iterable returned by `values`.
|
||||
*/
|
||||
protected class DefaultValuesIterable extends Iterable[B] {
|
||||
def iterator = valuesIterator
|
||||
|
@ -207,13 +209,6 @@ self =>
|
|||
def next = iter.next._2
|
||||
}
|
||||
|
||||
/** Creates an iterator for all contained values.
|
||||
*
|
||||
* @return an iterator over all values.
|
||||
*/
|
||||
@deprecated("use `valuesIterator' instead")
|
||||
def values: Iterator[B] = valuesIterator
|
||||
|
||||
/** Defines the default value computation for the map,
|
||||
* returned when a key is not found
|
||||
* The method implemented here throws an exception,
|
||||
|
|
|
@ -36,10 +36,9 @@ trait MapProxyLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
|
|||
override def isDefinedAt(key: A) = self.isDefinedAt(key)
|
||||
override def keySet: Set[A] = self.keySet
|
||||
override def keysIterator: Iterator[A] = self.keysIterator
|
||||
override def keys: Iterator[A] = self.keysIterator
|
||||
override def valuesIterable: Iterable[B] = self.valuesIterable
|
||||
override def keys: Iterable[A] = self.keys
|
||||
override def values: Iterable[B] = self.values
|
||||
override def valuesIterator: Iterator[B] = self.valuesIterator
|
||||
override def values: Iterator[B] = self.valuesIterator
|
||||
override def default(key: A): B = self.default(key)
|
||||
override def filterKeys(p: A => Boolean) = self.filterKeys(p)
|
||||
override def mapValues[C](f: B => C) = self.mapValues(f)
|
||||
|
|
|
@ -16,8 +16,8 @@ package generic
|
|||
* @author Sean McDirmid
|
||||
* @since 2.8
|
||||
*/
|
||||
trait Sorted[K, +This <: Sorted[K, This]]{
|
||||
def ordering : Ordering[K];
|
||||
trait Sorted[K, +This <: Sorted[K, This]] {
|
||||
def ordering : Ordering[K]
|
||||
|
||||
/** The current collection */
|
||||
protected def repr: This
|
||||
|
@ -25,7 +25,6 @@ trait Sorted[K, +This <: Sorted[K, This]]{
|
|||
/** return as a projection the set of keys in this collection */
|
||||
def keySet: SortedSet[K]
|
||||
|
||||
|
||||
/** Returns the first key of the collection. */
|
||||
def firstKey: K
|
||||
|
||||
|
@ -67,25 +66,26 @@ trait Sorted[K, +This <: Sorted[K, This]]{
|
|||
* @return ...
|
||||
*/
|
||||
def range(from: K, until: K): This = rangeImpl(Some(from), Some(until))
|
||||
|
||||
|
||||
/** Create a range projection of this collection with no lower-bound.
|
||||
* @param to The upper-bound (inclusive) of the ranged projection.
|
||||
*/
|
||||
def to(to: K): This = {
|
||||
// tough!
|
||||
val i = keySet.from(to).iterator;
|
||||
if (!i.hasNext) return repr
|
||||
val next = i.next;
|
||||
if (next == to) {
|
||||
if (!i.hasNext) return repr
|
||||
else return until(i.next)
|
||||
} else return until(next)
|
||||
val i = keySet.from(to).iterator
|
||||
if (i.isEmpty) return repr
|
||||
val next = i.next
|
||||
if (next == to)
|
||||
if (i.isEmpty) repr
|
||||
else until(i.next)
|
||||
else
|
||||
until(next)
|
||||
}
|
||||
|
||||
protected def hasAll(j: Iterator[K]): Boolean = {
|
||||
val i = keySet.iterator;
|
||||
if (!i.hasNext) return !j.hasNext;
|
||||
val i = keySet.iterator
|
||||
if (i.isEmpty) return j.isEmpty
|
||||
|
||||
var in = i.next;
|
||||
while (j.hasNext) {
|
||||
val jn = j.next;
|
||||
|
@ -99,5 +99,4 @@ trait Sorted[K, +This <: Sorted[K, This]]{
|
|||
}
|
||||
true
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -30,9 +30,10 @@ with SubtractableMethods[A, This]
|
|||
def apply(key: A): B
|
||||
def contains(key: A): Boolean
|
||||
def isDefinedAt(key: A): Boolean
|
||||
def keySet: Set[A]
|
||||
def keys: Iterable[A]
|
||||
def keysIterator: Iterator[A]
|
||||
def valuesIterable: Iterable[B]
|
||||
def keySet: Set[A]
|
||||
def values: Iterable[B]
|
||||
def valuesIterator: Iterator[B]
|
||||
def default(key: A): B
|
||||
def filterKeys(p: A => Boolean): DefaultMap[A, B]
|
||||
|
|
|
@ -14,6 +14,7 @@ package mutable
|
|||
|
||||
import generic._
|
||||
import script._
|
||||
import annotation.migration
|
||||
|
||||
/** A template trait for buffers of type `Buffer[A]`.
|
||||
*
|
||||
|
@ -278,9 +279,11 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
|
|||
*
|
||||
* @param iter the iterable object.
|
||||
*/
|
||||
@deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
|
||||
"Use `clone() ++=` if you intend to create a new collection.")
|
||||
override def ++(iter: Traversable[A]): This = {
|
||||
@migration(2, 8,
|
||||
"As of 2.8, ++ always creates a new collection, even on Buffers.\n"+
|
||||
"Use ++= instead if you intend to add by side effect to an existing collection.\n"
|
||||
)
|
||||
override def ++(iter: Traversable[A]): This = {
|
||||
for (elem <- iter) +=(elem)
|
||||
repr
|
||||
}
|
||||
|
@ -290,8 +293,10 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
|
|||
*
|
||||
* @param iter the iterator
|
||||
*/
|
||||
@deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
|
||||
"Use `clone() ++=` if you intend to create a new collection.")
|
||||
@migration(2, 8,
|
||||
"As of 2.8, ++ always creates a new collection, even on Buffers.\n"+
|
||||
"Use ++= instead if you intend to add by side effect to an existing collection.\n"
|
||||
)
|
||||
override def ++ (iter: Iterator[A]): This = {
|
||||
for (elem <- iter) +=(elem)
|
||||
repr
|
||||
|
@ -325,8 +330,10 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
|
|||
*
|
||||
* @param iter the Traversable object.
|
||||
*/
|
||||
@deprecated("Use --= instead if you intend to remove by side effect from an existing collection.\n"+
|
||||
"Use `clone() --=` if you intend to create a new collection.")
|
||||
@migration(2, 8,
|
||||
"As of 2.8, -- always creates a new collection, even on Buffers.\n"+
|
||||
"Use --= instead if you intend to add by side effect to an existing collection.\n"
|
||||
)
|
||||
override def --(iter: Traversable[A]): This = {
|
||||
for (elem <- iter) -=(elem)
|
||||
repr
|
||||
|
@ -340,13 +347,12 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
|
|||
*
|
||||
* @param iter the iterator
|
||||
*/
|
||||
@deprecated("Use --= instead if you intend to remove by side effect from an existing collection.\n"+
|
||||
"Use `clone() --=` if you intend to create a new collection.")
|
||||
@migration(2, 8,
|
||||
"As of 2.8, -- always creates a new collection, even on Buffers.\n"+
|
||||
"Use --= instead if you intend to add by side effect to an existing collection.\n"
|
||||
)
|
||||
override def --(iter: Iterator[A]): This = {
|
||||
for (elem <- iter) -=(elem)
|
||||
repr
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@ class HashMap[A, B] extends Map[A, B]
|
|||
}
|
||||
|
||||
/* Override to avoid tuple allocation in foreach */
|
||||
override def valuesIterable: collection.Iterable[B] = new DefaultValuesIterable {
|
||||
override def values: collection.Iterable[B] = new DefaultValuesIterable {
|
||||
override def foreach[C](f: B => C) = foreachEntry(e => f(e.value))
|
||||
}
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
package scala.collection
|
||||
package mutable
|
||||
|
||||
import annotation.migration
|
||||
|
||||
/** This class can be used as an adaptor to create mutable maps from
|
||||
* immutable map implementations. Only method <code>empty</code> has
|
||||
|
@ -41,19 +42,17 @@ extends Map[A, B]
|
|||
|
||||
override def isDefinedAt(key: A) = imap.isDefinedAt(key)
|
||||
|
||||
override def keySet: scala.collection.Set[A] = imap.keySet
|
||||
override def keySet: collection.Set[A] = imap.keySet
|
||||
|
||||
override def keysIterator: Iterator[A] = imap.keysIterator
|
||||
|
||||
@deprecated("use `keysIterator' instead")
|
||||
override def keys: Iterator[A] = imap.keysIterator
|
||||
|
||||
override def valuesIterable: scala.collection.Iterable[B] = imap.valuesIterable
|
||||
@migration(2, 8, "As of 2.8, keys returns Iterable[A] rather than Iterator[A].")
|
||||
override def keys: collection.Iterable[A] = imap.keys
|
||||
|
||||
override def valuesIterator: Iterator[B] = imap.valuesIterator
|
||||
|
||||
@deprecated("use `valuesIterator' instead")
|
||||
override def values: Iterator[B] = imap.valuesIterator
|
||||
@migration(2, 8, "As of 2.8, values returns Iterable[B] rather than Iterator[B].")
|
||||
override def values: collection.Iterable[B] = imap.values
|
||||
|
||||
def iterator: Iterator[(A, B)] = imap.iterator
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ trait Publisher[Evt] {
|
|||
def removeSubscriptions() { filters.clear }
|
||||
|
||||
protected def publish(event: Evt) {
|
||||
filters.keysIterator.foreach(sub =>
|
||||
filters.keys.foreach(sub =>
|
||||
if (!suspended.contains(sub) &&
|
||||
filters.entryExists(sub, p => p(event)))
|
||||
sub.notify(self, event)
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
package scala.collection
|
||||
package mutable
|
||||
|
||||
import annotation.migration
|
||||
|
||||
/** This class should be used as a mixin. It synchronizes the <code>Map</code>
|
||||
* functions of the class into which it is mixed in.
|
||||
|
@ -35,14 +36,15 @@ trait SynchronizedMap[A, B] extends Map[A, B] {
|
|||
override def getOrElseUpdate(key: A, default: => B): B = synchronized { super.getOrElseUpdate(key, default) }
|
||||
override def transform(f: (A, B) => B): this.type = synchronized[this.type] { super.transform(f) }
|
||||
override def retain(p: (A, B) => Boolean): this.type = synchronized[this.type] { super.retain(p) }
|
||||
override def valuesIterable: scala.collection.Iterable[B] = synchronized { super.valuesIterable }
|
||||
@deprecated("Use `valuesIterator' instead") override def values: Iterator[B] = synchronized { super.valuesIterator }
|
||||
@migration(2, 8, "As of 2.8, values returns Iterable[B] rather than Iterator[B].")
|
||||
override def values: collection.Iterable[B] = synchronized { super.values }
|
||||
override def valuesIterator: Iterator[B] = synchronized { super.valuesIterator }
|
||||
override def clone(): Self = synchronized { super.clone() }
|
||||
override def foreach[U](f: ((A, B)) => U) = synchronized { super.foreach(f) }
|
||||
override def apply(key: A): B = synchronized { super.apply(key) }
|
||||
override def keySet: scala.collection.Set[A] = synchronized { super.keySet }
|
||||
@deprecated("Use `keysIterator' instead") override def keys: Iterator[A] = synchronized { super.keysIterator }
|
||||
override def keySet: collection.Set[A] = synchronized { super.keySet }
|
||||
@migration(2, 8, "As of 2.8, keys returns Iterable[A] rather than Iterator[A].")
|
||||
override def keys: collection.Iterable[A] = synchronized { super.keys }
|
||||
override def keysIterator: Iterator[A] = synchronized { super.keysIterator }
|
||||
override def isEmpty: Boolean = synchronized { super.isEmpty }
|
||||
override def contains(key: A): Boolean = synchronized {super.contains(key) }
|
||||
|
|
|
@ -51,7 +51,7 @@ abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
|
|||
log("advanceDFA(trans): " + trans)
|
||||
trans.get(ContentModel.ElemName(label)) match {
|
||||
case Some(qNew) => qCurrent = qNew
|
||||
case _ => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keysIterator);
|
||||
case _ => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keys);
|
||||
}
|
||||
}
|
||||
// advance in current automaton
|
||||
|
|
|
@ -152,7 +152,7 @@ class ConsoleFileManager extends FileManager {
|
|||
)
|
||||
|
||||
// run setup based on most recent time
|
||||
pairs(pairs.keysIterator.toList max)()
|
||||
pairs(pairs.keys max)()
|
||||
|
||||
latestFjbgFile = prefixFile("lib/fjbg.jar")
|
||||
}
|
||||
|
|
|
@ -404,7 +404,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
|
|||
"\\$greater" -> ">", "\\$qmark" -> "?", "\\$percent" -> "%",
|
||||
"\\$amp" -> "&", "\\$colon" -> ":", "\\$u2192" -> "→",
|
||||
"\\$hash" -> "#")
|
||||
val pattern = Pattern.compile(_syms.keysIterator.foldLeft("")((x, y) => if (x == "") y else x + "|" + y))
|
||||
val pattern = Pattern.compile(_syms.keys.foldLeft("")((x, y) => if (x == "") y else x + "|" + y))
|
||||
val placeholderPattern = "_\\$(\\d)+"
|
||||
|
||||
private def stripPrivatePrefix(name: String) = {
|
||||
|
|
|
@ -2,7 +2,7 @@ migration28.scala:5: error: method ++= in class Stack is deprecated: use pushAll
|
|||
s ++= List(1,2,3)
|
||||
^
|
||||
migration28.scala:7: error: method foreach in class Stack has changed semantics:
|
||||
Stack iterator and foreach now traverse in FIFO order.
|
||||
Stack iterator and foreach now traverse in FIFO order.
|
||||
s foreach (_ => ())
|
||||
^
|
||||
two errors found
|
||||
|
|
|
@ -20,9 +20,8 @@ object Test {
|
|||
assertForeach(keys, m.keysIterator)
|
||||
assertForeach(keys, m.keySet)
|
||||
|
||||
assertForeach(values, m.valuesIterable.iterator)
|
||||
assertForeach(values, m.values.iterator)
|
||||
assertForeach(values, m.valuesIterator)
|
||||
assertForeach(values, m.valuesIterable)
|
||||
|
||||
assertForeach(entries, m)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue