diff --git a/core/src/main/java/org/jruby/RubyHash.java b/core/src/main/java/org/jruby/RubyHash.java index b337f76a8b7..7f3e3f10540 100644 --- a/core/src/main/java/org/jruby/RubyHash.java +++ b/core/src/main/java/org/jruby/RubyHash.java @@ -314,7 +314,7 @@ private final void alloc(int buckets) { * ============================ */ - private static final int MRI_PRIMES[] = { + public static final int MRI_PRIMES[] = { 8 + 3, 16 + 3, 32 + 5, 64 + 3, 128 + 3, 256 + 27, 512 + 9, 1024 + 9, 2048 + 5, 4096 + 3, 8192 + 27, 16384 + 43, 32768 + 3, 65536 + 45, 131072 + 29, 262144 + 3, 524288 + 21, 1048576 + 7, 2097152 + 17, 4194304 + 15, 8388608 + 9, 16777216 + 43, 33554432 + 35, 67108864 + 15, diff --git a/core/src/main/java/org/jruby/truffle/nodes/RubyNode.java b/core/src/main/java/org/jruby/truffle/nodes/RubyNode.java index 00893d12182..8fe05988475 100644 --- a/core/src/main/java/org/jruby/truffle/nodes/RubyNode.java +++ b/core/src/main/java/org/jruby/truffle/nodes/RubyNode.java @@ -10,7 +10,6 @@ package org.jruby.truffle.nodes; import com.oracle.truffle.api.CompilerAsserts; -import com.oracle.truffle.api.dsl.ImportGuards; import com.oracle.truffle.api.source.SourceSection; import com.oracle.truffle.api.dsl.TypeSystemReference; import com.oracle.truffle.api.frame.VirtualFrame; @@ -24,6 +23,7 @@ import org.jruby.truffle.runtime.core.RubyHash; import org.jruby.truffle.runtime.core.RubyRange; import org.jruby.truffle.runtime.core.RubyBasicObject; +import org.jruby.truffle.runtime.hash.HashSearchResult; import org.jruby.truffle.runtime.rubinius.RubiniusByteArray; import org.jruby.truffle.runtime.rubinius.RubiniusChannel; diff --git a/core/src/main/java/org/jruby/truffle/nodes/RubyTypes.java b/core/src/main/java/org/jruby/truffle/nodes/RubyTypes.java index dcd562f55a2..d2276ad0c62 100644 --- a/core/src/main/java/org/jruby/truffle/nodes/RubyTypes.java +++ b/core/src/main/java/org/jruby/truffle/nodes/RubyTypes.java @@ -9,7 +9,6 @@ */ package org.jruby.truffle.nodes; -import com.oracle.truffle.api.dsl.ImplicitCast; import com.oracle.truffle.api.dsl.TypeSystem; import org.jruby.truffle.nodes.dispatch.Dispatch; import org.jruby.truffle.runtime.UndefinedPlaceholder; @@ -18,6 +17,7 @@ import org.jruby.truffle.runtime.core.RubyHash; import org.jruby.truffle.runtime.core.RubyRange; import org.jruby.truffle.runtime.core.RubyBasicObject; +import org.jruby.truffle.runtime.hash.HashSearchResult; import org.jruby.truffle.runtime.rubinius.RubiniusByteArray; import org.jruby.truffle.runtime.rubinius.RubiniusChannel; import org.jruby.truffle.runtime.LexicalScope; diff --git a/core/src/main/java/org/jruby/truffle/nodes/core/ArrayNodes.java b/core/src/main/java/org/jruby/truffle/nodes/core/ArrayNodes.java index 0079703d603..9c76edbda1f 100644 --- a/core/src/main/java/org/jruby/truffle/nodes/core/ArrayNodes.java +++ b/core/src/main/java/org/jruby/truffle/nodes/core/ArrayNodes.java @@ -1110,7 +1110,6 @@ public ClearNode(ClearNode prev) { @Specialization public RubyArray clear(RubyArray array) { notDesignedForCompilation(); - array.setSize(0); return array; } @@ -3551,6 +3550,43 @@ public RubyArray toA(RubyArray array) { } + @CoreMethod(names = "uniq") + public abstract static class UniqNode extends CoreMethodNode { + + public UniqNode(RubyContext context, SourceSection sourceSection) { + super(context, sourceSection); + } + + public UniqNode(UniqNode prev) { + super(prev); + } + + @Specialization + public RubyArray uniq(RubyArray array) { + notDesignedForCompilation(); + + final RubyArray uniq = new RubyArray(getContext().getCoreLibrary().getArrayClass(), null, 0); + + for (Object value : array.slowToArray()) { + boolean duplicate = false; + + for (Object compare : uniq.slowToArray()) { + if ((boolean) DebugOperations.send(getContext(), value, "==", null, compare)) { + duplicate = true; + break; + } + } + + if (!duplicate) { + uniq.slowPush(value); + } + } + + return uniq; + } + + } + @CoreMethod(names = "unshift", argumentsAsArray = true) public abstract static class UnshiftNode extends CoreMethodNode { diff --git a/core/src/main/java/org/jruby/truffle/nodes/core/HashGuards.java b/core/src/main/java/org/jruby/truffle/nodes/core/HashGuards.java index 3b7576ae815..4872cde6d83 100644 --- a/core/src/main/java/org/jruby/truffle/nodes/core/HashGuards.java +++ b/core/src/main/java/org/jruby/truffle/nodes/core/HashGuards.java @@ -10,8 +10,7 @@ package org.jruby.truffle.nodes.core; import org.jruby.truffle.runtime.core.RubyHash; - -import java.util.LinkedHashMap; +import org.jruby.truffle.runtime.hash.Entry; public class HashGuards { @@ -19,24 +18,8 @@ public static boolean isNull(RubyHash hash) { return hash.getStore() == null; } - public static boolean isObjectArray(RubyHash hash) { - return hash.getStore() instanceof Object[]; - } - - public static boolean isObjectLinkedHashMap(RubyHash hash) { - return hash.getStore() instanceof LinkedHashMap; - } - - public static boolean isOtherNull(RubyHash hash, RubyHash other) { - return other.getStore() == null; - } - - public static boolean isOtherObjectArray(RubyHash hash, RubyHash other) { - return other.getStore() instanceof Object[]; - } - - public static boolean isOtherObjectLinkedHashMap(RubyHash hash, RubyHash other) { - return other.getStore() instanceof LinkedHashMap; + public static boolean isBuckets(RubyHash hash) { + return hash.getStore() instanceof Entry[]; } } diff --git a/core/src/main/java/org/jruby/truffle/nodes/core/HashNodes.java b/core/src/main/java/org/jruby/truffle/nodes/core/HashNodes.java index fe8e3ac79b3..c3d1b613c97 100644 --- a/core/src/main/java/org/jruby/truffle/nodes/core/HashNodes.java +++ b/core/src/main/java/org/jruby/truffle/nodes/core/HashNodes.java @@ -9,8 +9,6 @@ */ package org.jruby.truffle.nodes.core; -import java.util.*; - import com.oracle.truffle.api.*; import com.oracle.truffle.api.nodes.ExplodeLoop; import com.oracle.truffle.api.source.*; @@ -21,11 +19,20 @@ import org.jruby.truffle.nodes.RubyRootNode; import org.jruby.truffle.nodes.dispatch.DispatchHeadNode; import org.jruby.truffle.nodes.dispatch.PredicateDispatchHeadNode; +import org.jruby.truffle.nodes.hash.FindEntryNode; import org.jruby.truffle.nodes.yield.YieldDispatchHeadNode; import org.jruby.truffle.runtime.*; import org.jruby.truffle.runtime.core.*; import org.jruby.truffle.runtime.core.RubyArray; import org.jruby.truffle.runtime.core.RubyHash; +import org.jruby.truffle.runtime.hash.Entry; +import org.jruby.truffle.runtime.hash.HashSearchResult; +import org.jruby.truffle.runtime.hash.KeyValue; +import org.jruby.truffle.runtime.hash.HashOperations; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; @CoreClass(name = "Hash") public abstract class HashNodes { @@ -45,88 +52,48 @@ public EqualNode(EqualNode prev) { equalNode = prev.equalNode; } - @Specialization(guards = {"isNull", "isOtherNull"}) + @Specialization(guards = {"isNull", "isNull(arguments[1])"}) public boolean equalNull(RubyHash a, RubyHash b) { return true; } - @Specialization(guards = {"isObjectArray", "isOtherObjectArray"}) - public boolean equalObjectArray(VirtualFrame frame, RubyHash a, RubyHash b) { + @Specialization + public boolean equal(VirtualFrame frame, RubyHash a, RubyHash b) { notDesignedForCompilation(); - if (a == b) { - return true; - } - - final Object[] aStore = (Object[]) a.getStore(); - final int aSize = a.getStoreSize(); - - final Object[] bStore = (Object[]) b.getStore(); - final int bSize = b.getStoreSize(); + final List aEntries = HashOperations.verySlowToKeyValues(a); + final List bEntries = HashOperations.verySlowToKeyValues(a); - if (aSize != bSize) { + if (aEntries.size() != bEntries.size()) { return false; } - // TODO(CS): this is badly broken - I think it assumes the hash is ordered? - - for (int n = 0; n < aSize * 2; n++) { - if (!equalNode.call(frame, aStore[n], "==", null, bStore[n])) { - return false; - } - } - - return true; - } - - @Specialization(guards = {"isObjectLinkedHashMap", "isOtherObjectLinkedHashMap"}) - public boolean equalObjectLinkedHashMap(RubyHash a, RubyHash b) { - notDesignedForCompilation(); - throw new UnsupportedOperationException(); - } + // For each entry in a, check that there is a corresponding entry in b, and don't use entries in b more than once - @Specialization(guards = {"isObjectLinkedHashMap", "isOtherObjectArray"}) - public boolean equalObjectLinkedHashMapArray(VirtualFrame frame, RubyHash a, RubyHash b) { - notDesignedForCompilation(); - - final LinkedHashMap aStore = (LinkedHashMap) a.getStore(); - final int aSize = a.getStoreSize(); - - final Object[] bStore = (Object[]) b.getStore(); - final int bSize = b.getStoreSize(); - - if (aSize != bSize) { - return false; - } + final boolean[] bUsed = new boolean[bEntries.size()]; - // TODO(CS): this is crap - doesn't check for duplicates or anything - badly need to improve the Hash stuff + for (KeyValue aKeyValue : aEntries) { + boolean found = false; - for (Map.Entry entry : aStore.entrySet()) { - boolean match = false; + for (int n = 0; n < bEntries.size(); n++) { + if (!bUsed[n]) { + // TODO: cast - for (int n = 0; n < aSize * 2; n += 1) { - if (equalNode.call(frame, entry.getKey(), "==", null, bStore[n]) && equalNode.call(frame, entry.getValue(), "==", null, bStore[n + 1])) { - match = true; + if ((boolean) DebugOperations.send(getContext(), aKeyValue.getKey(), "eql?", null, bEntries.get(n).getKey())) { + bUsed[n] = true; + found = true; + break; + } } } - if (!match) { + if (!found) { return false; } } return true; } - - @Specialization(guards = "!isHash(arguments[1])") - public boolean equal(RubyHash a, Object b) { - notDesignedForCompilation(); - return false; - } - - protected boolean isHash(Object object) { - return object instanceof RubyHash; - } } @CoreMethod(names = "[]", onSingleton = true, argumentsAsArray = true) @@ -135,8 +102,8 @@ public abstract static class ConstructNode extends HashCoreMethodNode { private final BranchProfile singleObject = new BranchProfile(); private final BranchProfile singleArray = new BranchProfile(); private final BranchProfile objectArray = new BranchProfile(); - private final BranchProfile smallObjectArray = new BranchProfile(); - private final BranchProfile largeObjectArray = new BranchProfile(); + private final BranchProfile smallPackedArray = new BranchProfile(); + private final BranchProfile largePackedArray = new BranchProfile(); private final BranchProfile otherArray = new BranchProfile(); private final BranchProfile singleOther = new BranchProfile(); private final BranchProfile keyValues = new BranchProfile(); @@ -169,13 +136,13 @@ public RubyHash construct(Object[] args) { // TODO(CS): zero length arrays might be a good specialisation - if (store.length <= RubyHash.HASHES_SMALL) { - smallObjectArray.enter(); + if (store.length <= HashOperations.SMALL_HASH_SIZE) { + smallPackedArray.enter(); final int size = store.length; - final Object[] newStore = new Object[RubyHash.HASHES_SMALL * 2]; + final Object[] newStore = new Object[HashOperations.SMALL_HASH_SIZE * 2]; - for (int n = 0; n < RubyHash.HASHES_SMALL; n++) { + for (int n = 0; n < HashOperations.SMALL_HASH_SIZE; n++) { if (n < size) { final Object pair = store[n]; @@ -198,9 +165,9 @@ public RubyHash construct(Object[] args) { } } - return new RubyHash(getContext().getCoreLibrary().getHashClass(), null, null, newStore, size); + return new RubyHash(getContext().getCoreLibrary().getHashClass(), null, null, newStore, size, null); } else { - largeObjectArray.enter(); + largePackedArray.enter(); throw new UnsupportedOperationException(); } } else { @@ -213,20 +180,15 @@ public RubyHash construct(Object[] args) { } } else { keyValues.enter(); - // Slow because we don't want the PE to see the hash map at all - return constructObjectLinkedMapMap(args); - } - } - @CompilerDirectives.SlowPath - public RubyHash constructObjectLinkedMapMap(Object[] args) { - final LinkedHashMap store = new LinkedHashMap<>(); + final List entries = new ArrayList<>(); - for (int n = 0; n < args.length; n += 2) { - store.put(args[n], args[n + 1]); - } + for (int n = 0; n < args.length; n += 2) { + entries.add(new KeyValue(args[n], args[n + 1])); + } - return new RubyHash(getContext().getCoreLibrary().getHashClass(), null, null, store, 0); + return HashOperations.verySlowFromEntries(getContext(), entries); + } } } @@ -236,6 +198,7 @@ public abstract static class GetIndexNode extends HashCoreMethodNode { @Child protected PredicateDispatchHeadNode eqlNode; @Child protected YieldDispatchHeadNode yield; + @Child protected FindEntryNode findEntryNode; private final BranchProfile notInHashProfile = new BranchProfile(); private final BranchProfile useDefaultProfile = new BranchProfile(); @@ -244,12 +207,14 @@ public GetIndexNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); eqlNode = new PredicateDispatchHeadNode(context); yield = new YieldDispatchHeadNode(context); + findEntryNode = new FindEntryNode(context, sourceSection); } public GetIndexNode(GetIndexNode prev) { super(prev); eqlNode = prev.eqlNode; yield = prev.yield; + findEntryNode = prev.findEntryNode; } @Specialization(guards = "isNull") @@ -266,12 +231,12 @@ public Object getNull(VirtualFrame frame, RubyHash hash, Object key) { } @ExplodeLoop - @Specialization(guards = "isObjectArray") - public Object getObjectArray(VirtualFrame frame, RubyHash hash, Object key) { + @Specialization(guards = {"!isNull", "!isBuckets"}) + public Object getPackedArray(VirtualFrame frame, RubyHash hash, Object key) { final Object[] store = (Object[]) hash.getStore(); - final int size = hash.getStoreSize(); + final int size = hash.getSize(); - for (int n = 0; n < RubyHash.HASHES_SMALL; n++) { + for (int n = 0; n < HashOperations.SMALL_HASH_SIZE; n++) { if (n < size && eqlNode.call(frame, store[n * 2], "eql?", null, key)) { return store[n * 2 + 1]; } @@ -292,27 +257,28 @@ public Object getObjectArray(VirtualFrame frame, RubyHash hash, Object key) { } - @Specialization(guards = "isObjectLinkedHashMap") - public Object getObjectLinkedHashMap(VirtualFrame frame, RubyHash hash, Object key) { + @Specialization(guards = "isBuckets") + public Object getBuckets(VirtualFrame frame, RubyHash hash, Object key) { notDesignedForCompilation(); - final LinkedHashMap store = (LinkedHashMap) hash.getStore(); + final HashSearchResult hashSearchResult = findEntryNode.search(frame, hash, key); - // TODO(CS): not correct - using Java's Object#equals + if (hashSearchResult.getEntry() != null) { + return hashSearchResult.getEntry().getValue(); + } - final Object value = store.get(key); + notInHashProfile.enter(); - if (value == null) { - if (hash.getDefaultBlock() != null) { - return yield.dispatch(frame, hash.getDefaultBlock(), hash, key); - } else if (hash.getDefaultValue() != null) { - return hash.getDefaultValue(); - } else { - return getContext().getCoreLibrary().getNilObject(); - } + if (hash.getDefaultBlock() != null) { + useDefaultProfile.enter(); + return yield.dispatch(frame, hash.getDefaultBlock(), hash, key); } - return value; + if (hash.getDefaultValue() != null) { + return hash.getDefaultValue(); + } + + return getContext().getCoreLibrary().getNilObject(); } } @@ -324,7 +290,6 @@ public abstract static class SetIndexNode extends HashCoreMethodNode { private final BranchProfile considerExtendProfile = new BranchProfile(); private final BranchProfile extendProfile = new BranchProfile(); - private final BranchProfile transitionToLinkedHashMapProfile = new BranchProfile(); public SetIndexNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); @@ -339,22 +304,22 @@ public SetIndexNode(SetIndexNode prev) { @Specialization(guards = "isNull") public Object setNull(RubyHash hash, Object key, Object value) { hash.checkFrozen(this); - final Object[] store = new Object[RubyHash.HASHES_SMALL * 2]; + final Object[] store = new Object[HashOperations.SMALL_HASH_SIZE * 2]; store[0] = key; store[1] = value; - hash.setStore(store, 1); + hash.setStore(store, 1, null, null); return value; } @ExplodeLoop - @Specialization(guards = "isObjectArray") - public Object setObjectArray(VirtualFrame frame, RubyHash hash, Object key, Object value) { + @Specialization(guards = {"!isNull", "!isBuckets"}) + public Object setPackedArray(VirtualFrame frame, RubyHash hash, Object key, Object value) { hash.checkFrozen(this); final Object[] store = (Object[]) hash.getStore(); - final int size = hash.getStoreSize(); + final int size = hash.getSize(); - for (int n = 0; n < RubyHash.HASHES_SMALL; n++) { + for (int n = 0; n < HashOperations.SMALL_HASH_SIZE; n++) { if (n < size && eqlNode.call(frame, store[n * 2], "eql?", null, key)) { store[n * 2 + 1] = value; return value; @@ -365,44 +330,64 @@ public Object setObjectArray(VirtualFrame frame, RubyHash hash, Object key, Obje final int newSize = size + 1; - if (newSize <= RubyHash.HASHES_SMALL) { + if (newSize <= HashOperations.SMALL_HASH_SIZE) { extendProfile.enter(); store[size * 2] = key; store[size * 2 + 1] = value; - hash.setStoreSize(newSize); + hash.setSize(newSize); return value; } + CompilerDirectives.transferToInterpreter(); + + // TODO(CS): need to watch for that transfer until we make the following fast path - transitionToLinkedHashMapProfile.enter(); + final List entries = HashOperations.verySlowToKeyValues(hash); + + hash.setStore(new Entry[HashOperations.capacityGreaterThan(newSize)], newSize, null, null); + + for (KeyValue keyValue : entries) { + HashOperations.verySlowSetInBuckets(hash, keyValue.getKey(), keyValue.getValue()); + } + + HashOperations.verySlowSetInBuckets(hash, key, value); - transitionToLinkedHashMap(hash, store, key, value); return value; } - @CompilerDirectives.SlowPath - private void transitionToLinkedHashMap(RubyHash hash, Object[] oldStore, Object key, Object value) { - final LinkedHashMap newStore = new LinkedHashMap<>(); + @Specialization(guards = "isBuckets") + public Object setBuckets(RubyHash hash, Object key, Object value) { + notDesignedForCompilation(); - for (int n = 0; n < oldStore.length; n += 2) { - newStore.put(oldStore[n], oldStore[n + 1]); + if (HashOperations.verySlowSetInBuckets(hash, key, value)) { + hash.setSize(hash.getSize() + 1); } - newStore.put(key, value); - hash.setStore(newStore, 0); + return value; } - @Specialization(guards = "isObjectLinkedHashMap") - public Object setObjectLinkedHashMap(RubyHash hash, Object key, Object value) { - notDesignedForCompilation(); + } - hash.checkFrozen(this); + @CoreMethod(names = "clear") + public abstract static class ClearNode extends HashCoreMethodNode { - // TODO(CS): not correct - using Java's Object#equals + public ClearNode(RubyContext context, SourceSection sourceSection) { + super(context, sourceSection); + } - final LinkedHashMap store = (LinkedHashMap) hash.getStore(); - store.put(key, value); - return value; + public ClearNode(ClearNode prev) { + super(prev); + } + + @Specialization(guards = "isNull") + public RubyHash emptyNull(RubyHash hash) { + return hash; + } + + @Specialization(guards = "!isNull") + public RubyHash empty(RubyHash hash) { + hash.setStore(null, 0, null, null); + return hash; } } @@ -410,12 +395,19 @@ public Object setObjectLinkedHashMap(RubyHash hash, Object key, Object value) { @CoreMethod(names = "delete", required = 1) public abstract static class DeleteNode extends HashCoreMethodNode { + @Child protected PredicateDispatchHeadNode eqlNode; + @Child protected FindEntryNode findEntryNode; + public DeleteNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); + eqlNode = new PredicateDispatchHeadNode(context); + findEntryNode = new FindEntryNode(context, sourceSection); } public DeleteNode(DeleteNode prev) { super(prev); + eqlNode = prev.eqlNode; + findEntryNode = prev.findEntryNode; } @Specialization(guards = "isNull") @@ -424,51 +416,66 @@ public RubyNilClass deleteNull(RubyHash hash, Object key) { return getContext().getCoreLibrary().getNilObject(); } - @Specialization(guards = "isObjectArray") - public Object deleteObjectArray(RubyHash hash, Object key) { - notDesignedForCompilation(); + @Specialization(guards = {"!isNull", "!isBuckets"}) + public Object deletePackedArray(VirtualFrame frame, RubyHash hash, Object key) { + hash.checkFrozen(this); - // TODO(CS): seriously not correct + final Object[] store = (Object[]) hash.getStore(); + final int size = hash.getSize(); - hash.checkFrozen(this); + for (int n = 0; n < HashOperations.SMALL_HASH_SIZE * 2; n += 2) { + if (n < size && eqlNode.call(frame, store[n], "eql?", null, key)) { + final Object value = store[n + 1]; - final Object[] oldStore = (Object[]) hash.getStore(); + // Move the later values down + System.arraycopy(store, n + 2, store, n, HashOperations.SMALL_HASH_SIZE * 2 - n - 2); - final LinkedHashMap newStore = new LinkedHashMap<>(); - hash.setStore(newStore, 0); + hash.setSize(size - 1); - for (int n = 0; n < hash.getStoreSize(); n++) { - newStore.put(oldStore[n * 2], oldStore[n * 2 + 1]); + return value; + } } - // TODO(CS): seriously not correct - using Java's Object#equals + return getContext().getCoreLibrary().getNilObject(); + } - final Object removed = newStore.remove(key); + @Specialization(guards = "isBuckets") + public Object delete(VirtualFrame frame, RubyHash hash, Object key) { + notDesignedForCompilation(); + + final HashSearchResult hashSearchResult = findEntryNode.search(frame, hash, key); - if (removed == null) { + if (hashSearchResult.getEntry() == null) { return getContext().getCoreLibrary().getNilObject(); - } else { - return removed; } - } - @Specialization(guards = "isObjectLinkedHashMap") - public Object delete(RubyHash hash, Object key) { - notDesignedForCompilation(); + final Entry entry = hashSearchResult.getEntry(); - hash.checkFrozen(this); + // Remove from the sequence chain - final LinkedHashMap store = (LinkedHashMap) hash.getStore(); + if (entry.getPreviousInSequence() == null) { + hash.setFirstInSequence(entry.getNextInSequence()); + } else { + entry.getPreviousInSequence().setNextInSequence(entry.getNextInSequence()); + } - // TODO(CS): seriously not correct - using Java's Object#equals + if (entry.getNextInSequence() == null) { + hash.setLastInSequence(entry.getPreviousInSequence()); + } else { + entry.getNextInSequence().setPreviousInSequence(entry.getPreviousInSequence()); + } - final Object removed = store.remove(key); + // Remove from the lookup chain - if (removed == null) { - return getContext().getCoreLibrary().getNilObject(); + if (hashSearchResult.getPreviousEntry() == null) { + ((Entry[]) hash.getStore())[hashSearchResult.getIndex()] = entry.getNextInLookup(); } else { - return removed; + hashSearchResult.getPreviousEntry().setNextInLookup(entry.getNextInLookup()); } + + hash.setSize(hash.getSize() - 1); + + return entry.getValue(); } } @@ -493,17 +500,17 @@ public RubyHash eachNull(RubyHash hash, RubyProc block) { } @ExplodeLoop - @Specialization(guards = "isObjectArray") - public RubyHash eachObjectArray(VirtualFrame frame, RubyHash hash, RubyProc block) { + @Specialization(guards = {"!isNull", "!isBuckets"}) + public RubyHash eachPackedArray(VirtualFrame frame, RubyHash hash, RubyProc block) { notDesignedForCompilation(); final Object[] store = (Object[]) hash.getStore(); - final int size = hash.getStoreSize(); + final int size = hash.getSize(); int count = 0; try { - for (int n = 0; n < RubyHash.HASHES_SMALL; n++) { + for (int n = 0; n < HashOperations.SMALL_HASH_SIZE; n++) { if (CompilerDirectives.inInterpreter()) { count++; } @@ -521,26 +528,12 @@ public RubyHash eachObjectArray(VirtualFrame frame, RubyHash hash, RubyProc bloc return hash; } - @Specialization(guards = "isObjectLinkedHashMap") - public RubyHash eachObjectLinkedHashMap(VirtualFrame frame, RubyHash hash, RubyProc block) { + @Specialization(guards = "isBuckets") + public RubyHash eachBuckets(VirtualFrame frame, RubyHash hash, RubyProc block) { notDesignedForCompilation(); - final LinkedHashMap store = (LinkedHashMap) hash.getStore(); - - int count = 0; - - try { - for (Map.Entry entry : store.entrySet()) { - if (CompilerDirectives.inInterpreter()) { - count++; - } - - yield(frame, block, RubyArray.fromObjects(getContext().getCoreLibrary().getArrayClass(), entry.getKey(), entry.getValue())); - } - } finally { - if (CompilerDirectives.inInterpreter()) { - ((RubyRootNode) getRootNode()).reportLoopCountThroughBlocks(count); - } + for (KeyValue keyValue : HashOperations.verySlowToKeyValues(hash)) { + yield(frame, block, RubyArray.fromObjects(getContext().getCoreLibrary().getArrayClass(), keyValue.getKey(), keyValue.getValue())); } return hash; @@ -564,17 +557,9 @@ public boolean emptyNull(RubyHash hash) { return true; } - @Specialization(guards = "isObjectArray") - public boolean emptyObjectArray(RubyHash hash) { - return hash.getStoreSize() == 0; - } - - @Specialization(guards = "isObjectLinkedHashMap") - public boolean emptyObjectLinkedHashMap(RubyHash hash) { - notDesignedForCompilation(); - - final LinkedHashMap store = (LinkedHashMap) hash.getStore(); - return store.isEmpty(); + @Specialization(guards = "!isNull") + public boolean emptyPackedArray(RubyHash hash) { + return hash.getSize() == 0; } } @@ -593,7 +578,7 @@ public InitializeNode(InitializeNode prev) { @Specialization public RubyNilClass initialize(RubyHash hash, UndefinedPlaceholder defaultValue, UndefinedPlaceholder block) { notDesignedForCompilation(); - hash.setStore(null, 0); + hash.setStore(null, 0, null, null); hash.setDefaultBlock(null); return getContext().getCoreLibrary().getNilObject(); } @@ -601,7 +586,7 @@ public RubyNilClass initialize(RubyHash hash, UndefinedPlaceholder defaultValue, @Specialization public RubyNilClass initialize(RubyHash hash, UndefinedPlaceholder defaultValue, RubyProc block) { notDesignedForCompilation(); - hash.setStore(null, 0); + hash.setStore(null, 0, null, null); hash.setDefaultBlock(block); return getContext().getCoreLibrary().getNilObject(); } @@ -626,7 +611,7 @@ public InitializeCopyNode(InitializeCopyNode prev) { super(prev); } - @Specialization(guards = "isOtherNull") + @Specialization(guards = "isNull(arguments[1])") public RubyHash dupNull(RubyHash self, RubyHash from) { notDesignedForCompilation(); @@ -636,13 +621,13 @@ public RubyHash dupNull(RubyHash self, RubyHash from) { self.setDefaultBlock(from.getDefaultBlock()); self.setDefaultValue(from.getDefaultValue()); - self.setStore(null, 0); + self.setStore(null, 0, null, null); return self; } - @Specialization(guards = "isOtherObjectArray") - public RubyHash dupObjectArray(RubyHash self, RubyHash from) { + @Specialization(guards = {"!isNull(arguments[1])", "!isBuckets(arguments[1])"}) + public RubyHash dupPackedArray(RubyHash self, RubyHash from) { notDesignedForCompilation(); if (self == from) { @@ -650,25 +635,22 @@ public RubyHash dupObjectArray(RubyHash self, RubyHash from) { } final Object[] store = (Object[]) from.getStore(); - self.setStore(Arrays.copyOf(store, RubyHash.HASHES_SMALL * 2), store.length); + self.setStore(Arrays.copyOf(store, HashOperations.SMALL_HASH_SIZE * 2), store.length, null, null); self.setDefaultBlock(from.getDefaultBlock()); self.setDefaultValue(from.getDefaultValue()); return self; } - @Specialization(guards = "isOtherObjectLinkedHashMap") - public RubyHash dupObjectLinkedHashMap(RubyHash self, RubyHash from) { + @Specialization(guards = "isBuckets(arguments[1])") + public RubyHash dupBuckets(RubyHash self, RubyHash from) { notDesignedForCompilation(); if (self == from) { return self; } - final LinkedHashMap store = (LinkedHashMap) from.getStore(); - self.setStore(new LinkedHashMap<>(store), store.size()); - self.setDefaultBlock(from.getDefaultBlock()); - self.setDefaultValue(from.getDefaultValue()); + HashOperations.verySlowSetKeyValues(self, HashOperations.verySlowToKeyValues(from)); return self; } @@ -697,55 +679,24 @@ public RubyString inspectNull(RubyHash hash) { return getContext().makeString("{}"); } - @Specialization(guards = "isObjectArray") - public RubyString inspectObjectArray(VirtualFrame frame, RubyHash hash) { + @Specialization + public RubyString inspectPackedArray(VirtualFrame frame, RubyHash hash) { notDesignedForCompilation(); - final Object[] store = (Object[]) hash.getStore(); - final StringBuilder builder = new StringBuilder(); builder.append("{"); - for (int n = 0; n < hash.getStoreSize(); n++) { - if (n > 0) { + for (KeyValue keyValue : HashOperations.verySlowToKeyValues(hash)) { + if (builder.length() > 1) { builder.append(", "); } // TODO(CS): to string - builder.append(inspect.call(frame, store[n * 2], "inspect", null)); + builder.append(inspect.call(frame, keyValue.getKey(), "inspect", null)); builder.append("=>"); - builder.append(inspect.call(frame, store[n * 2 + 1], "inspect", null)); - } - - builder.append("}"); - - return getContext().makeString(builder.toString()); - } - - @Specialization(guards = "isObjectLinkedHashMap") - public RubyString inspectObjectLinkedHashMap(VirtualFrame frame, RubyHash hash) { - notDesignedForCompilation(); - - final LinkedHashMap store = (LinkedHashMap) hash.getStore(); - - final StringBuilder builder = new StringBuilder(); - - builder.append("{"); - - boolean first = true; - - for (Map.Entry entry : store.entrySet()) { - if (first) { - first = false; - } else { - builder.append(", "); - } - - builder.append(inspect.call(frame, entry.getKey(), "inspect", null)); - builder.append("=>"); - builder.append(inspect.call(frame, entry.getValue(), "inspect", null)); + builder.append(inspect.call(frame, keyValue.getValue(), "inspect", null)); } builder.append("}"); @@ -775,11 +726,11 @@ public boolean keyNull(RubyHash hash, Object key) { return false; } - @Specialization(guards = "isObjectArray") - public boolean keyObjectArray(VirtualFrame frame, RubyHash hash, Object key) { + @Specialization(guards = {"!isNull", "!isBuckets"}) + public boolean keyPackedArray(VirtualFrame frame, RubyHash hash, Object key) { notDesignedForCompilation(); - final int size = hash.getStoreSize(); + final int size = hash.getSize(); final Object[] store = (Object[]) hash.getStore(); for (int n = 0; n < store.length; n += 2) { @@ -791,15 +742,17 @@ public boolean keyObjectArray(VirtualFrame frame, RubyHash hash, Object key) { return false; } - @Specialization(guards = "isObjectLinkedHashMap") - public boolean keyObjectLinkedHashMap(RubyHash hash, Object key) { + @Specialization(guards = "isBuckets") + public boolean keyBuckets(VirtualFrame frame, RubyHash hash, Object key) { notDesignedForCompilation(); - final LinkedHashMap store = (LinkedHashMap) hash.getStore(); - - // TODO(CS): seriously not correct - using Java's Object#equals + for (KeyValue keyValue : HashOperations.verySlowToKeyValues(hash)) { + if (eqlNode.call(frame, keyValue.getKey(), "eql?", null, key)) { + return true; + } + } - return store.containsKey(key); + return false; } } @@ -820,13 +773,13 @@ public RubyArray keysNull(RubyHash hash) { return new RubyArray(getContext().getCoreLibrary().getArrayClass(), null, 0); } - @Specialization(guards = "isObjectArray") - public RubyArray keysObjectArray(RubyHash hash) { + @Specialization(guards = {"!isNull", "!isBuckets"}) + public RubyArray keysPackedArray(RubyHash hash) { notDesignedForCompilation(); final Object[] store = (Object[]) hash.getStore(); - final Object[] keys = new Object[hash.getStoreSize()]; + final Object[] keys = new Object[hash.getSize()]; for (int n = 0; n < keys.length; n++) { keys[n] = store[n * 2]; @@ -835,19 +788,19 @@ public RubyArray keysObjectArray(RubyHash hash) { return new RubyArray(getContext().getCoreLibrary().getArrayClass(), keys, keys.length); } - @Specialization(guards = "isObjectLinkedHashMap") - public RubyArray keysObjectLinkedHashMap(RubyHash hash) { + @Specialization(guards = "isBuckets") + public RubyArray keysBuckets(RubyHash hash) { notDesignedForCompilation(); - final LinkedHashMap store = (LinkedHashMap) hash.getStore(); - - final Object[] keys = new Object[store.size()]; + final Object[] keys = new Object[hash.getSize()]; + Entry entry = hash.getFirstInSequence(); int n = 0; - for (Object key : store.keySet()) { - keys[n] = key; + while (entry != null) { + keys[n] = entry.getKey(); n++; + entry = entry.getNextInSequence(); } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), keys, keys.length); @@ -868,10 +821,10 @@ public MapNode(MapNode prev) { } @ExplodeLoop - @Specialization(guards = "isObjectArray") - public RubyArray mapObjectArray(VirtualFrame frame, RubyHash hash, RubyProc block) { + @Specialization(guards = {"!isNull", "!isBuckets"}) + public RubyArray mapPackedArray(VirtualFrame frame, RubyHash hash, RubyProc block) { final Object[] store = (Object[]) hash.getStore(); - final int size = hash.getStoreSize(); + final int size = hash.getSize(); final int resultSize = store.length / 2; final Object[] result = new Object[resultSize]; @@ -879,7 +832,7 @@ public RubyArray mapObjectArray(VirtualFrame frame, RubyHash hash, RubyProc bloc int count = 0; try { - for (int n = 0; n < RubyHash.HASHES_SMALL; n++) { + for (int n = 0; n < HashOperations.SMALL_HASH_SIZE; n++) { if (n < size) { final Object key = store[n * 2]; final Object value = store[n * 2 + 1]; @@ -899,31 +852,17 @@ public RubyArray mapObjectArray(VirtualFrame frame, RubyHash hash, RubyProc bloc return new RubyArray(getContext().getCoreLibrary().getArrayClass(), result, resultSize); } - @Specialization(guards = "isObjectLinkedHashMap") - public RubyArray mapObjectLinkedHashMap(VirtualFrame frame, RubyHash hash, RubyProc block) { + @Specialization(guards = "isBuckets") + public RubyArray mapBuckets(VirtualFrame frame, RubyHash hash, RubyProc block) { notDesignedForCompilation(); - final LinkedHashMap store = (LinkedHashMap) hash.getStore(); - - final RubyArray result = new RubyArray(getContext().getCoreLibrary().getArrayClass()); - - int count = 0; + final RubyArray array = new RubyArray(getContext().getCoreLibrary().getArrayClass(), null, 0); - try { - for (Map.Entry entry : store.entrySet()) { - if (CompilerDirectives.inInterpreter()) { - count++; - } - - result.slowPush(yield(frame, block, entry.getKey(), entry.getValue())); - } - } finally { - if (CompilerDirectives.inInterpreter()) { - ((RubyRootNode) getRootNode()).reportLoopCountThroughBlocks(count); - } + for (KeyValue keyValue : HashOperations.verySlowToKeyValues(hash)) { + array.slowPush(yield(frame, block, keyValue.getKey(), keyValue.getValue())); } - return result; + return array; } } @@ -939,7 +878,7 @@ public abstract static class MergeNode extends HashCoreMethodNode { private final BranchProfile considerResultIsSmallProfile = new BranchProfile(); private final BranchProfile resultIsSmallProfile = new BranchProfile(); - private final int smallHashSize = RubyHash.HASHES_SMALL; + private final int smallHashSize = HashOperations.SMALL_HASH_SIZE; public MergeNode(RubyContext context, SourceSection sourceSection) { super(context, sourceSection); @@ -951,33 +890,33 @@ public MergeNode(MergeNode prev) { eqlNode = prev.eqlNode; } - @Specialization(guards = {"isObjectArray", "isOtherNull"}) - public RubyHash mergeObjectArrayNull(RubyHash hash, RubyHash other) { + @Specialization(guards = {"!isNull", "!isBuckets", "isNull(arguments[1])"}) + public RubyHash mergePackedArrayNull(RubyHash hash, RubyHash other) { final Object[] store = (Object[]) hash.getStore(); - final Object[] copy = Arrays.copyOf(store, RubyHash.HASHES_SMALL * 2); + final Object[] copy = Arrays.copyOf(store, HashOperations.SMALL_HASH_SIZE * 2); - return new RubyHash(getContext().getCoreLibrary().getHashClass(), hash.getDefaultBlock(), hash.getDefaultValue(), copy, hash.getStoreSize()); + return new RubyHash(getContext().getCoreLibrary().getHashClass(), hash.getDefaultBlock(), hash.getDefaultValue(), copy, hash.getSize(), null); } @ExplodeLoop - @Specialization(guards = {"isObjectArray", "isOtherObjectArray"}) - public RubyHash mergeObjectArrayObjectArray(VirtualFrame frame, RubyHash hash, RubyHash other) { + @Specialization(guards = {"!isNull", "!isBuckets", "!isNull(arguments[1])", "!isBuckets(arguments[1])"}) + public RubyHash mergePackedArrayPackedArray(VirtualFrame frame, RubyHash hash, RubyHash other) { // TODO(CS): what happens with the default block here? Which side does it get merged from? final Object[] storeA = (Object[]) hash.getStore(); - final int storeASize = hash.getStoreSize(); + final int storeASize = hash.getSize(); final Object[] storeB = (Object[]) other.getStore(); - final int storeBSize = hash.getStoreSize(); + final int storeBSize = hash.getSize(); final boolean[] mergeFromA = new boolean[storeASize]; int mergeFromACount = 0; - for (int a = 0; a < RubyHash.HASHES_SMALL; a++) { + for (int a = 0; a < HashOperations.SMALL_HASH_SIZE; a++) { if (a < storeASize) { boolean merge = true; - for (int b = 0; b < RubyHash.HASHES_SMALL; b++) { + for (int b = 0; b < HashOperations.SMALL_HASH_SIZE; b++) { if (b < storeBSize) { if (eqlNode.call(frame, storeA[a * 2], "eql?", null, storeB[b * 2])) { merge = false; @@ -996,14 +935,14 @@ public RubyHash mergeObjectArrayObjectArray(VirtualFrame frame, RubyHash hash, R if (mergeFromACount == 0) { nothingFromFirstProfile.enter(); - return new RubyHash(getContext().getCoreLibrary().getHashClass(), hash.getDefaultBlock(), hash.getDefaultValue(), Arrays.copyOf(storeB, RubyHash.HASHES_SMALL * 2), storeBSize); + return new RubyHash(getContext().getCoreLibrary().getHashClass(), hash.getDefaultBlock(), hash.getDefaultValue(), Arrays.copyOf(storeB, HashOperations.SMALL_HASH_SIZE * 2), storeBSize, null); } considerNothingFromSecondProfile.enter(); if (mergeFromACount == storeB.length) { nothingFromSecondProfile.enter(); - return new RubyHash(getContext().getCoreLibrary().getHashClass(), hash.getDefaultBlock(), hash.getDefaultValue(), Arrays.copyOf(storeB, RubyHash.HASHES_SMALL * 2), storeBSize); + return new RubyHash(getContext().getCoreLibrary().getHashClass(), hash.getDefaultBlock(), hash.getDefaultValue(), Arrays.copyOf(storeB, HashOperations.SMALL_HASH_SIZE * 2), storeBSize, null); } considerResultIsSmallProfile.enter(); @@ -1013,7 +952,7 @@ public RubyHash mergeObjectArrayObjectArray(VirtualFrame frame, RubyHash hash, R if (storeBSize + mergeFromACount <= smallHashSize) { resultIsSmallProfile.enter(); - final Object[] merged = new Object[RubyHash.HASHES_SMALL * 2]; + final Object[] merged = new Object[HashOperations.SMALL_HASH_SIZE * 2]; int index = 0; @@ -1031,13 +970,36 @@ public RubyHash mergeObjectArrayObjectArray(VirtualFrame frame, RubyHash hash, R index += 2; } - return new RubyHash(getContext().getCoreLibrary().getHashClass(), hash.getDefaultBlock(), hash.getDefaultValue(), merged, mergedSize); + return new RubyHash(getContext().getCoreLibrary().getHashClass(), hash.getDefaultBlock(), hash.getDefaultValue(), merged, mergedSize, null); } CompilerDirectives.transferToInterpreter(); throw new UnsupportedOperationException(); } + + @Specialization + public RubyHash mergeBucketsBuckets(RubyHash hash, RubyHash other) { + final RubyHash merged = new RubyHash(getContext().getCoreLibrary().getHashClass(), null, null, new Entry[HashOperations.capacityGreaterThan(hash.getSize() + other.getSize())], 0, null); + + int size = 0; + + for (KeyValue keyValue : HashOperations.verySlowToKeyValues(hash)) { + HashOperations.verySlowSetInBuckets(merged, keyValue.getKey(), keyValue.getValue()); + size++; + } + + for (KeyValue keyValue : HashOperations.verySlowToKeyValues(other)) { + if (HashOperations.verySlowSetInBuckets(merged, keyValue.getKey(), keyValue.getValue())) { + size++; + } + } + + merged.setSize(size); + + return merged; + } + } @CoreMethod(names = "default", optional = 1) @@ -1092,15 +1054,9 @@ public int sizeNull(RubyHash hash) { return 0; } - @Specialization(guards = "isObjectArray") - public int sizeObjectArray(RubyHash hash) { - return hash.getStoreSize(); - } - - @Specialization(guards = "isObjectLinkedHashMap") - public int sizeObjectLinkedHashMap(RubyHash hash) { - notDesignedForCompilation(); - return ((LinkedHashMap) hash.getStore()).size(); + @Specialization(guards = "!isNull") + public int sizePackedArray(RubyHash hash) { + return hash.getSize(); } } @@ -1121,11 +1077,11 @@ public RubyArray valuesNull(RubyHash hash) { return new RubyArray(getContext().getCoreLibrary().getArrayClass(), null, 0); } - @Specialization(guards = "isObjectArray") - public RubyArray valuesObjectArray(RubyHash hash) { + @Specialization(guards = {"!isNull", "!isBuckets"}) + public RubyArray valuesPackedArray(RubyHash hash) { final Object[] store = (Object[]) hash.getStore(); - final Object[] values = new Object[hash.getStoreSize()]; + final Object[] values = new Object[hash.getSize()]; for (int n = 0; n < values.length; n++) { values[n] = store[n * 2 + 1]; @@ -1134,19 +1090,19 @@ public RubyArray valuesObjectArray(RubyHash hash) { return new RubyArray(getContext().getCoreLibrary().getArrayClass(), values, values.length); } - @Specialization(guards = "isObjectLinkedHashMap") - public RubyArray valuesObjectLinkedHashMap(RubyHash hash) { + @Specialization(guards = "isBuckets") + public RubyArray valuesBuckets(RubyHash hash) { notDesignedForCompilation(); - final LinkedHashMap store = (LinkedHashMap) hash.getStore(); - - final Object[] values = new Object[store.size()]; + final Object[] values = new Object[hash.getSize()]; + Entry entry = hash.getFirstInSequence(); int n = 0; - for (Object value : store.values()) { - values[n] = value; + while (entry != null) { + values[n] = entry.getValue(); n++; + entry = entry.getNextInSequence(); } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), values, values.length); @@ -1172,12 +1128,12 @@ public RubyArray toArrayNull(RubyHash hash) { return new RubyArray(getContext().getCoreLibrary().getArrayClass(), null, 0); } - @Specialization(guards = "isObjectArray") - public RubyArray toArrayObjectArray(RubyHash hash) { + @Specialization(guards = {"!isNull", "!isBuckets"}) + public RubyArray toArrayPackedArray(RubyHash hash) { notDesignedForCompilation(); final Object[] store = (Object[]) hash.getStore(); - final int size = hash.getStoreSize(); + final int size = hash.getSize(); final Object[] pairs = new Object[size]; for (int n = 0; n < size; n++) { @@ -1187,18 +1143,18 @@ public RubyArray toArrayObjectArray(RubyHash hash) { return new RubyArray(getContext().getCoreLibrary().getArrayClass(), pairs, size); } - @Specialization(guards = "isObjectLinkedHashMap") - public RubyArray toArrayLinkedHashMap(RubyHash hash) { + @Specialization(guards = "isBuckets") + public RubyArray toArrayBuckets(RubyHash hash) { notDesignedForCompilation(); - final LinkedHashMap store = (LinkedHashMap) hash.getStore(); - final int size = hash.getStoreSize(); + final int size = hash.getSize(); final Object[] pairs = new Object[size]; + int n = 0; - for (Map.Entry pair : store.entrySet()) { - pairs[n] = RubyArray.fromObjects(getContext().getCoreLibrary().getArrayClass(), pair.getKey(), pair.getValue()); - n += 1; + for (KeyValue keyValue : HashOperations.verySlowToKeyValues(hash)) { + pairs[n] = RubyArray.fromObjects(getContext().getCoreLibrary().getArrayClass(), keyValue.getValue(), keyValue.getValue()); + n++; } return new RubyArray(getContext().getCoreLibrary().getArrayClass(), pairs, size); diff --git a/core/src/main/java/org/jruby/truffle/nodes/core/KernelNodes.java b/core/src/main/java/org/jruby/truffle/nodes/core/KernelNodes.java index 9c3702c4700..0aca28a450b 100644 --- a/core/src/main/java/org/jruby/truffle/nodes/core/KernelNodes.java +++ b/core/src/main/java/org/jruby/truffle/nodes/core/KernelNodes.java @@ -37,6 +37,8 @@ import org.jruby.truffle.runtime.core.*; import org.jruby.truffle.runtime.core.RubyArray; import org.jruby.truffle.runtime.core.RubyHash; +import org.jruby.truffle.runtime.hash.KeyValue; +import org.jruby.truffle.runtime.hash.HashOperations; import org.jruby.truffle.runtime.methods.RubyMethod; import org.jruby.util.cli.Options; @@ -596,9 +598,8 @@ private static void exec(RubyContext context, String[] commandLine) { final RubyHash env = context.getCoreLibrary().getENV(); - // TODO(CS): cast - for (Map.Entry entry : ((LinkedHashMap) env.getStore()).entrySet()) { - builder.environment().put(entry.getKey().toString(), entry.getValue().toString()); + for (KeyValue keyValue : HashOperations.verySlowToKeyValues(env)) { + builder.environment().put(keyValue.getKey().toString(), keyValue.getValue().toString()); } Process process; @@ -839,23 +840,24 @@ public HashNode(HashNode prev) { @Specialization public int hash(int value) { + // TODO(CS): should check this matches MRI return value; } @Specialization public int hash(long value) { - return (int) (value ^ value >>> 32); + // TODO(CS): should check this matches MRI + return Long.valueOf(value).hashCode(); } @Specialization - public int hash(RubyBignum value) { - return value.hashCode(); + public int hash(double value) { + // TODO(CS): should check this matches MRI + return Double.valueOf(value).hashCode(); } @Specialization public int hash(RubyBasicObject self) { - notDesignedForCompilation(); - return self.hashCode(); } diff --git a/core/src/main/java/org/jruby/truffle/nodes/core/NilClassNodes.java b/core/src/main/java/org/jruby/truffle/nodes/core/NilClassNodes.java index f427ed4a065..85e30b84845 100644 --- a/core/src/main/java/org/jruby/truffle/nodes/core/NilClassNodes.java +++ b/core/src/main/java/org/jruby/truffle/nodes/core/NilClassNodes.java @@ -133,7 +133,7 @@ public ToHNode(ToHNode prev) { @Specialization public RubyHash toH() { - return new RubyHash(getContext().getCoreLibrary().getHashClass(), null, getContext().getCoreLibrary().getNilObject(), null, 0); + return new RubyHash(getContext().getCoreLibrary().getHashClass(), null, getContext().getCoreLibrary().getNilObject(), null, 0, null); } } diff --git a/core/src/main/java/org/jruby/truffle/nodes/core/SystemNode.java b/core/src/main/java/org/jruby/truffle/nodes/core/SystemNode.java index 6c0471c9fb6..9692700f510 100644 --- a/core/src/main/java/org/jruby/truffle/nodes/core/SystemNode.java +++ b/core/src/main/java/org/jruby/truffle/nodes/core/SystemNode.java @@ -11,15 +11,15 @@ import java.io.*; import java.util.ArrayList; -import java.util.LinkedHashMap; import java.util.List; -import java.util.Map; import com.oracle.truffle.api.source.*; import com.oracle.truffle.api.frame.*; import org.jruby.truffle.nodes.*; import org.jruby.truffle.runtime.*; import org.jruby.truffle.runtime.core.RubyHash; +import org.jruby.truffle.runtime.hash.KeyValue; +import org.jruby.truffle.runtime.hash.HashOperations; /** * Represents an expression that is evaluated by running it as a system command via forking and @@ -45,8 +45,8 @@ public Object execute(VirtualFrame frame) { final List envp = new ArrayList<>(); // TODO(CS): cast - for (Map.Entry entry : ((LinkedHashMap) env.getStore()).entrySet()) { - envp.add(entry.getKey().toString() + "=" + entry.getValue().toString()); + for (KeyValue keyValue : HashOperations.verySlowToKeyValues(env)) { + envp.add(keyValue.getKey().toString() + "=" + keyValue.getValue().toString()); } final String command = child.execute(frame).toString(); diff --git a/core/src/main/java/org/jruby/truffle/nodes/hash/FindEntryNode.java b/core/src/main/java/org/jruby/truffle/nodes/hash/FindEntryNode.java new file mode 100644 index 00000000000..646b0089194 --- /dev/null +++ b/core/src/main/java/org/jruby/truffle/nodes/hash/FindEntryNode.java @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2014 Oracle and/or its affiliates. All rights reserved. This + * code is released under a tri EPL/GPL/LGPL license. You can use it, + * redistribute it and/or modify it under the terms of the: + * + * Eclipse Public License version 1.0 + * GNU General Public License version 2 + * GNU Lesser General Public License version 2.1 + */ +package org.jruby.truffle.nodes.hash; + +import com.oracle.truffle.api.frame.VirtualFrame; +import com.oracle.truffle.api.source.SourceSection; +import org.jruby.truffle.nodes.RubyNode; +import org.jruby.truffle.nodes.dispatch.DispatchHeadNode; +import org.jruby.truffle.nodes.dispatch.PredicateDispatchHeadNode; +import org.jruby.truffle.runtime.RubyContext; +import org.jruby.truffle.runtime.core.RubyHash; +import org.jruby.truffle.runtime.hash.Entry; +import org.jruby.truffle.runtime.hash.HashSearchResult; +import org.jruby.truffle.runtime.hash.HashOperations; + +public class FindEntryNode extends RubyNode { + + @Child DispatchHeadNode hashNode; + @Child PredicateDispatchHeadNode eqlNode; + + public FindEntryNode(RubyContext context, SourceSection sourceSection) { + super(context, sourceSection); + hashNode = new DispatchHeadNode(context); + eqlNode = new PredicateDispatchHeadNode(context); + } + + public HashSearchResult search(VirtualFrame frame, RubyHash hash, Object key) { + final Object hashValue = hashNode.call(frame, key, "hash", null); + + final int hashed; + + if (hashValue instanceof Integer) { + hashed = (int) hashValue; + } else if (hashValue instanceof Long) { + hashed = (int) (long) hashValue; + } else { + throw new UnsupportedOperationException(); + } + + final Entry[] entries = (Entry[]) hash.getStore(); + final int index = (hashed & HashOperations.SIGN_BIT_MASK) % entries.length; + Entry entry = entries[index]; + + Entry previousEntry = null; + + while (entry != null) { + if (eqlNode.call(frame, key, "eql?", null, entry.getKey())) { + return new HashSearchResult(index, previousEntry, entry); + } + + previousEntry = entry; + entry = entry.getNextInLookup(); + } + + return new HashSearchResult(index, previousEntry, null); + } + + @Override + public Object execute(VirtualFrame frame) { + throw new UnsupportedOperationException(); + } + +} diff --git a/core/src/main/java/org/jruby/truffle/nodes/literal/HashLiteralNode.java b/core/src/main/java/org/jruby/truffle/nodes/literal/HashLiteralNode.java index 3aed0b13267..942e93f25df 100644 --- a/core/src/main/java/org/jruby/truffle/nodes/literal/HashLiteralNode.java +++ b/core/src/main/java/org/jruby/truffle/nodes/literal/HashLiteralNode.java @@ -18,8 +18,10 @@ import org.jruby.truffle.runtime.*; import org.jruby.truffle.runtime.core.RubyHash; import org.jruby.truffle.runtime.core.RubyString; +import org.jruby.truffle.runtime.hash.KeyValue; +import org.jruby.truffle.runtime.hash.HashOperations; -import java.util.LinkedHashMap; +import java.util.*; public abstract class HashLiteralNode extends RubyNode { @@ -38,7 +40,7 @@ protected HashLiteralNode(RubyContext context, SourceSection sourceSection, Ruby public static HashLiteralNode create(RubyContext context, SourceSection sourceSection, RubyNode[] keyValues) { if (keyValues.length == 0) { return new EmptyHashLiteralNode(context, sourceSection); - } else if (keyValues.length <= RubyHash.HASHES_SMALL * 2) { + } else if (keyValues.length <= HashOperations.SMALL_HASH_SIZE * 2) { return new SmallHashLiteralNode(context, sourceSection, keyValues); } else { return new GenericHashLiteralNode(context, sourceSection, keyValues); @@ -69,7 +71,7 @@ public EmptyHashLiteralNode(RubyContext context, SourceSection sourceSection) { @ExplodeLoop @Override public RubyHash executeRubyHash(VirtualFrame frame) { - return new RubyHash(getContext().getCoreLibrary().getHashClass(), null, null, null, 0); + return new RubyHash(getContext().getCoreLibrary().getHashClass(), null, null, null, 0, null); } } @@ -86,9 +88,9 @@ public SmallHashLiteralNode(RubyContext context, SourceSection sourceSection, Ru @ExplodeLoop @Override public RubyHash executeRubyHash(VirtualFrame frame) { - final Object[] storage = new Object[RubyHash.HASHES_SMALL * 2]; + final Object[] storage = new Object[HashOperations.SMALL_HASH_SIZE * 2]; - int position = 0; + int end = 0; initializers: for (int n = 0; n < keyValues.length; n += 2) { Object key = keyValues[n].execute(frame); @@ -99,52 +101,42 @@ public RubyHash executeRubyHash(VirtualFrame frame) { final Object value = keyValues[n + 1].execute(frame); - for (int i = 0; i < n; i += 2) { + for (int i = 0; i < end; i += 2) { if (equalNode.call(frame, key, "eql?", null, storage[i])) { storage[i + 1] = value; continue initializers; } } - storage[position] = key; - storage[position + 1] = value; - position += 2; + storage[end] = key; + storage[end + 1] = value; + end += 2; } - return new RubyHash(getContext().getCoreLibrary().getHashClass(), null, null, storage, position / 2); + return new RubyHash(getContext().getCoreLibrary().getHashClass(), null, null, storage, end / 2, null); } } public static class GenericHashLiteralNode extends HashLiteralNode { - @Child protected DispatchHeadNode equalNode; - public GenericHashLiteralNode(RubyContext context, SourceSection sourceSection, RubyNode[] keyValues) { super(context, sourceSection, keyValues); - equalNode = new DispatchHeadNode(context); } - @ExplodeLoop @Override public RubyHash executeRubyHash(VirtualFrame frame) { notDesignedForCompilation(); - final LinkedHashMap storage = new LinkedHashMap<>(); + final List entries = new ArrayList<>(); for (int n = 0; n < keyValues.length; n += 2) { - Object key = keyValues[n].execute(frame); - - if (key instanceof RubyString) { - key = freezeNode.call(frame, dupNode.call(frame, key, "dup", null), "freeze", null); - } - + final Object key = keyValues[n].execute(frame); final Object value = keyValues[n + 1].execute(frame); - - storage.put(key, value); + entries.add(new KeyValue(key, value)); } - return new RubyHash(getContext().getCoreLibrary().getHashClass(), null, null, storage, 0); + return HashOperations.verySlowFromEntries(getContext(), entries); } } diff --git a/core/src/main/java/org/jruby/truffle/nodes/methods/arguments/CheckArityNode.java b/core/src/main/java/org/jruby/truffle/nodes/methods/arguments/CheckArityNode.java index a745e33c8df..cfb65eef5f7 100644 --- a/core/src/main/java/org/jruby/truffle/nodes/methods/arguments/CheckArityNode.java +++ b/core/src/main/java/org/jruby/truffle/nodes/methods/arguments/CheckArityNode.java @@ -16,10 +16,10 @@ import org.jruby.truffle.runtime.*; import org.jruby.truffle.runtime.control.RaiseException; import org.jruby.truffle.runtime.core.RubyHash; +import org.jruby.truffle.runtime.hash.KeyValue; +import org.jruby.truffle.runtime.hash.HashOperations; import org.jruby.truffle.runtime.methods.*; -import java.util.Map; - /** * Check arguments meet the arity of the method. */ @@ -50,10 +50,10 @@ public void executeVoid(VirtualFrame frame) { } if (!keywordsRest && arity.hasKeywords() && getKeywordsHash(frame) != null) { - for (Map.Entry entry : getKeywordsHash(frame).slowToMap().entrySet()) { + for (KeyValue keyValue : HashOperations.verySlowToKeyValues(getKeywordsHash(frame))) { for (String keyword : keywords) { - if (!keyword.toString().equals(entry.getKey().toString())) { - throw new RaiseException(getContext().getCoreLibrary().argumentError("unknown keyword: " + entry.getKey().toString(), this)); + if (!keyword.toString().equals(keyValue.getKey().toString())) { + throw new RaiseException(getContext().getCoreLibrary().argumentError("unknown keyword: " + keyValue.getKey().toString(), this)); } } } diff --git a/core/src/main/java/org/jruby/truffle/nodes/methods/arguments/ReadKeywordArgumentNode.java b/core/src/main/java/org/jruby/truffle/nodes/methods/arguments/ReadKeywordArgumentNode.java index 319dc87f456..cfacca82810 100644 --- a/core/src/main/java/org/jruby/truffle/nodes/methods/arguments/ReadKeywordArgumentNode.java +++ b/core/src/main/java/org/jruby/truffle/nodes/methods/arguments/ReadKeywordArgumentNode.java @@ -11,16 +11,12 @@ import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.source.SourceSection; -import com.oracle.truffle.api.utilities.BranchProfile; import org.jruby.truffle.nodes.RubyNode; -import org.jruby.truffle.nodes.RubyValueProfile; import org.jruby.truffle.runtime.RubyArguments; import org.jruby.truffle.runtime.RubyContext; -import org.jruby.truffle.runtime.UndefinedPlaceholder; import org.jruby.truffle.runtime.core.RubyHash; -import org.jruby.truffle.runtime.core.RubyString; - -import java.util.Map; +import org.jruby.truffle.runtime.hash.KeyValue; +import org.jruby.truffle.runtime.hash.HashOperations; public class ReadKeywordArgumentNode extends RubyNode { @@ -47,9 +43,9 @@ public Object execute(VirtualFrame frame) { Object value = null; - for (Map.Entry entry : hash.slowToMap().entrySet()) { - if (entry.getKey().toString().equals(name)) { - value = entry.getValue(); + for (KeyValue keyValue : HashOperations.verySlowToKeyValues(hash)) { + if (keyValue.getKey().toString().equals(name)) { + value = keyValue.getValue(); break; } } diff --git a/core/src/main/java/org/jruby/truffle/nodes/methods/arguments/ReadKeywordRestArgumentNode.java b/core/src/main/java/org/jruby/truffle/nodes/methods/arguments/ReadKeywordRestArgumentNode.java index a95b27f6272..7ef55acda24 100644 --- a/core/src/main/java/org/jruby/truffle/nodes/methods/arguments/ReadKeywordRestArgumentNode.java +++ b/core/src/main/java/org/jruby/truffle/nodes/methods/arguments/ReadKeywordRestArgumentNode.java @@ -15,10 +15,10 @@ import org.jruby.truffle.runtime.RubyArguments; import org.jruby.truffle.runtime.RubyContext; import org.jruby.truffle.runtime.core.RubyHash; +import org.jruby.truffle.runtime.hash.KeyValue; +import org.jruby.truffle.runtime.hash.HashOperations; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.Map; +import java.util.*; public class ReadKeywordRestArgumentNode extends RubyNode { @@ -38,22 +38,22 @@ public Object execute(VirtualFrame frame) { final RubyHash hash = getKeywordsHash(frame); if (hash == null) { - return new RubyHash(getContext().getCoreLibrary().getHashClass(), null, null, null, 0); + return new RubyHash(getContext().getCoreLibrary().getHashClass(), null, null, null, 0, null); } - final LinkedHashMap store = new LinkedHashMap<>(); + final List entries = new ArrayList<>(); - outer: for (Map.Entry entry : hash.slowToMap().entrySet()) { + outer: for (KeyValue keyValue : HashOperations.verySlowToKeyValues(hash)) { for (String excludedKeyword : excludedKeywords) { - if (excludedKeyword.toString().equals(entry.getKey().toString())) { + if (excludedKeyword.toString().equals(keyValue.getKey().toString())) { continue outer; } } - store.put(entry.getKey(), entry.getValue()); + entries.add(new KeyValue(keyValue.getKey(), keyValue.getValue())); } - return new RubyHash(getContext().getCoreLibrary().getHashClass(), null, null, store, store.size()); + return HashOperations.verySlowFromEntries(getContext(), entries); } private RubyHash getKeywordsHash(VirtualFrame frame) { diff --git a/core/src/main/java/org/jruby/truffle/runtime/core/CoreLibrary.java b/core/src/main/java/org/jruby/truffle/runtime/core/CoreLibrary.java index 37a80d40044..5a2d29e4f35 100644 --- a/core/src/main/java/org/jruby/truffle/runtime/core/CoreLibrary.java +++ b/core/src/main/java/org/jruby/truffle/runtime/core/CoreLibrary.java @@ -15,15 +15,14 @@ import com.oracle.truffle.api.source.Source; import org.jcodings.Encoding; import org.jcodings.EncodingDB; -import org.jruby.embed.variable.Constant; import org.jruby.runtime.Constants; -import org.jruby.runtime.Visibility; import org.jruby.runtime.encoding.EncodingService; import org.jruby.truffle.nodes.RubyNode; import org.jruby.truffle.nodes.core.ArrayNodes; -import org.jruby.truffle.runtime.ModuleOperations; import org.jruby.truffle.runtime.RubyCallStack; import org.jruby.truffle.runtime.RubyContext; +import org.jruby.truffle.runtime.hash.KeyValue; +import org.jruby.truffle.runtime.hash.HashOperations; import org.jruby.truffle.runtime.rubinius.RubiniusLibrary; import org.jruby.truffle.translator.TranslatorDriver; import org.jruby.util.cli.Options; @@ -32,7 +31,8 @@ import java.io.File; import java.io.IOException; import java.io.InputStreamReader; -import java.util.LinkedHashMap; +import java.util.ArrayList; +import java.util.List; import java.util.Map; public class CoreLibrary { @@ -245,13 +245,6 @@ public void initialize() { objectClass.setConstant(null, "RUBY_ENGINE", RubyString.fromJavaString(stringClass, Constants.ENGINE + "+truffle")); objectClass.setConstant(null, "RUBY_PLATFORM", RubyString.fromJavaString(stringClass, Constants.PLATFORM)); - final LinkedHashMap configHashMap = new LinkedHashMap<>(); - configHashMap.put(RubyString.fromJavaString(stringClass, "ruby_install_name"), RubyString.fromJavaString(stringClass, "rubytruffle")); - configHashMap.put(RubyString.fromJavaString(stringClass, "RUBY_INSTALL_NAME"), RubyString.fromJavaString(stringClass, "rubytruffle")); - configHashMap.put(RubyString.fromJavaString(stringClass, "host_os"), RubyString.fromJavaString(stringClass, "unknown")); - configHashMap.put(RubyString.fromJavaString(stringClass, "exeext"), RubyString.fromJavaString(stringClass, "")); - configHashMap.put(RubyString.fromJavaString(stringClass, "EXEEXT"), RubyString.fromJavaString(stringClass, "rubytruffle")); - edomClass = new RubyException.RubyExceptionClass(context, errnoModule, systemCallErrorClass, "EDOM"); new RubyClass(context, errnoModule, systemCallErrorClass, "ENOENT"); new RubyClass(context, errnoModule, systemCallErrorClass, "EPERM"); @@ -263,9 +256,6 @@ public void initialize() { // TODO(cs): this should be a separate exception mathModule.setConstant(null, "DomainError", edomClass); - // TODO(cs): the alias should be the other way round, Config is legacy (and should warn). - objectClass.setConstant(null, "RbConfig", configModule); - // Create some key objects mainObject = new RubyBasicObject(objectClass); @@ -287,12 +277,7 @@ public void initialize() { arrayMaxBlock = new ArrayNodes.MaxBlock(context); argv = new RubyArray(arrayClass); - envHash = getSystemEnv(); objectClass.setConstant(null, "ARGV", argv); - objectClass.setConstant(null, "ENV", envHash); - - final RubyHash configHash = new RubyHash(hashClass, null, null, configHashMap, 0); - configModule.setConstant(null, "CONFIG", configHash); fileClass.setConstant(null, "SEPARATOR", RubyString.fromJavaString(stringClass, File.separator)); fileClass.setConstant(null, "Separator", RubyString.fromJavaString(stringClass, File.separator)); @@ -313,6 +298,11 @@ public void initializeAfterMethodsAdded() { if (Options.TRUFFLE_LOAD_CORE.load()) { loadRubyCore("jruby/truffle/core.rb"); } + + // ENV is supposed to be an object that actually updates the environment, and sees any updates + + envHash = getSystemEnv(); + objectClass.setConstant(null, "ENV", envHash); } public void loadRubyCore(String fileName) { @@ -747,13 +737,13 @@ public RubyHash getENV() { public RubyEncoding getDefaultEncoding() { return RubyEncoding.getEncoding(context, "US-ASCII"); } private RubyHash getSystemEnv() { - final LinkedHashMap storage = new LinkedHashMap<>(); + final List entries = new ArrayList<>(); for (Map.Entry variable : System.getenv().entrySet()) { - storage.put(context.makeString(variable.getKey()), context.makeString(variable.getValue())); + entries.add(new KeyValue(context.makeString(variable.getKey()), context.makeString(variable.getValue()))); } - return new RubyHash(context.getCoreLibrary().getHashClass(), null, null, storage, 0); + return HashOperations.verySlowFromEntries(context, entries); } public ArrayNodes.MinBlock getArrayMinBlock() { diff --git a/core/src/main/java/org/jruby/truffle/runtime/core/RubyHash.java b/core/src/main/java/org/jruby/truffle/runtime/core/RubyHash.java index 0905bb53c5c..786032d6113 100644 --- a/core/src/main/java/org/jruby/truffle/runtime/core/RubyHash.java +++ b/core/src/main/java/org/jruby/truffle/runtime/core/RubyHash.java @@ -9,25 +9,15 @@ */ package org.jruby.truffle.runtime.core; -import java.util.*; - import org.jruby.truffle.nodes.RubyNode; import org.jruby.truffle.runtime.RubyContext; +import org.jruby.truffle.runtime.hash.Entry; +import org.jruby.truffle.runtime.hash.KeyValue; +import org.jruby.truffle.runtime.hash.HashOperations; import org.jruby.truffle.runtime.subsystems.ObjectSpaceManager; -import org.jruby.util.cli.Options; -/** - * Represents the Ruby {@code Hash} class. - */ public class RubyHash extends RubyBasicObject { - public static final int HASHES_SMALL = Options.TRUFFLE_HASHES_SMALL.load(); - - /** - * The class from which we create the object that is {@code Hash}. A subclass of - * {@link org.jruby.truffle.runtime.core.RubyClass} so that we can override {@link RubyClass#newInstance} and allocate a - * {@link RubyHash} rather than a normal {@link org.jruby.truffle.runtime.core.RubyBasicObject}. - */ public static class RubyHashClass extends RubyClass { public RubyHashClass(RubyContext context, RubyClass objectClass) { @@ -36,7 +26,7 @@ public RubyHashClass(RubyContext context, RubyClass objectClass) { @Override public RubyBasicObject newInstance(RubyNode currentNode) { - return new RubyHash(this, null, null, null, 0); + return new RubyHash(this, null, null, null, 0, null); } } @@ -45,86 +35,78 @@ public RubyBasicObject newInstance(RubyNode currentNode) { private Object defaultValue; private Object store; private int storeSize; + private Entry firstInSequence; + private Entry lastInSequence; - public RubyHash(RubyClass rubyClass, RubyProc defaultBlock, Object defaultValue, Object store, int storeSize) { + public RubyHash(RubyClass rubyClass, RubyProc defaultBlock, Object defaultValue, Object store, int storeSize, Entry firstInSequence) { super(rubyClass); - - assert store == null || store instanceof Object[] || store instanceof LinkedHashMap; - assert !(store instanceof Object[]) || ((Object[]) store).length == HASHES_SMALL * 2; - assert !(store instanceof Object[]) || storeSize <= HASHES_SMALL; - this.defaultBlock = defaultBlock; this.defaultValue = defaultValue; this.store = store; this.storeSize = storeSize; + this.firstInSequence = firstInSequence; } public RubyProc getDefaultBlock() { return defaultBlock; } + public void setDefaultBlock(RubyProc defaultBlock) { + this.defaultBlock = defaultBlock; + } + public Object getDefaultValue() { return defaultValue; } - public Object getStore() { - return store; + public void setDefaultValue(Object defaultValue) { + this.defaultValue = defaultValue; } - public int getStoreSize() { - return storeSize; + public Object getStore() { + return store; } - public void setDefaultBlock(RubyProc defaultBlock) { - this.defaultBlock = defaultBlock; + public void setStore(Object store, int storeSize, Entry firstInSequence, Entry lastInSequence) { + this.store = store; + this.storeSize = storeSize; + this.firstInSequence = firstInSequence; + this.lastInSequence = lastInSequence; } - public void setDefaultValue(Object defaultValue) { - this.defaultValue = defaultValue; + public int getSize() { + return storeSize; } - public void setStore(Object store, int storeSize) { - assert store == null || store instanceof Object[] || store instanceof LinkedHashMap; - assert !(store instanceof Object[]) || ((Object[]) store).length == HASHES_SMALL * 2; - assert !(store instanceof Object[]) || storeSize <= HASHES_SMALL; - - - this.store = store; + public void setSize(int storeSize) { this.storeSize = storeSize; } - public void setStoreSize(int storeSize) { - assert storeSize <= HASHES_SMALL; - this.storeSize = storeSize; + public Entry getFirstInSequence() { + return firstInSequence; } - public Map slowToMap() { - if (store == null) { - return Collections.EMPTY_MAP; - } if (store instanceof Object[]) { - final Map map = new HashMap<>(); + public void setFirstInSequence(Entry firstInSequence) { + this.firstInSequence = firstInSequence; + } - for (int n = 0; n < storeSize; n++) { - map.put(((Object[]) store)[n * 2], ((Object[]) store)[n * 2 + 1]); - } + public Entry getLastInSequence() { + return lastInSequence; + } - return map; - } else if (store instanceof LinkedHashMap) { - return (LinkedHashMap) store; - } else { - throw new UnsupportedOperationException(); - } + public void setLastInSequence(Entry lastInSequence) { + this.lastInSequence = lastInSequence; } @Override public void visitObjectGraphChildren(ObjectSpaceManager.ObjectGraphVisitor visitor) { - for (Map.Entry entry : slowToMap().entrySet()) { - if (entry.getKey() instanceof RubyBasicObject) { - ((RubyBasicObject) entry.getKey()).visitObjectGraph(visitor); + for (KeyValue keyValue : HashOperations.verySlowToKeyValues(this)) { + if (keyValue.getKey() instanceof RubyBasicObject) { + ((RubyBasicObject) keyValue.getKey()).visitObjectGraph(visitor); } - if (entry.getValue() instanceof RubyBasicObject) { - ((RubyBasicObject) entry.getValue()).visitObjectGraph(visitor); + if (keyValue.getValue() instanceof RubyBasicObject) { + ((RubyBasicObject) keyValue.getValue()).visitObjectGraph(visitor); } } } diff --git a/core/src/main/java/org/jruby/truffle/runtime/hash/Entry.java b/core/src/main/java/org/jruby/truffle/runtime/hash/Entry.java new file mode 100644 index 00000000000..2511c03fc48 --- /dev/null +++ b/core/src/main/java/org/jruby/truffle/runtime/hash/Entry.java @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2014 Oracle and/or its affiliates. All rights reserved. This + * code is released under a tri EPL/GPL/LGPL license. You can use it, + * redistribute it and/or modify it under the terms of the: + * + * Eclipse Public License version 1.0 + * GNU General Public License version 2 + * GNU Lesser General Public License version 2.1 + */ +package org.jruby.truffle.runtime.hash; + +/** + * An entry in the Ruby hash. That is, a container for a key and a value, and a member of two lists - the chain of + * buckets for a given index, and the chain of entries for the insertion order across the whole hash. + */ +public class Entry { + + private Object key; + private Object value; + + private Entry nextInLookup; + + private Entry previousInSequence; + private Entry nextInSequence; + + public Entry(Object key, Object value) { + this.key = key; + this.value = value; + } + + public Object getKey() { + return key; + } + + public void setKey(Object key) { + this.key = key; + } + + public Object getValue() { + return value; + } + + public void setValue(Object value) { + this.value = value; + } + + public Entry getNextInLookup() { + return nextInLookup; + } + + public void setNextInLookup(Entry nextInLookup) { + this.nextInLookup = nextInLookup; + } + + public Entry getPreviousInSequence() { + return previousInSequence; + } + + public void setPreviousInSequence(Entry previousInSequence) { + this.previousInSequence = previousInSequence; + } + + public Entry getNextInSequence() { + return nextInSequence; + } + + public void setNextInSequence(Entry nextInSequence) { + this.nextInSequence = nextInSequence; + } + +} diff --git a/core/src/main/java/org/jruby/truffle/runtime/hash/HashOperations.java b/core/src/main/java/org/jruby/truffle/runtime/hash/HashOperations.java new file mode 100644 index 00000000000..aaafa459d3d --- /dev/null +++ b/core/src/main/java/org/jruby/truffle/runtime/hash/HashOperations.java @@ -0,0 +1,214 @@ +/* + * Copyright (c) 2014 Oracle and/or its affiliates. All rights reserved. This + * code is released under a tri EPL/GPL/LGPL license. You can use it, + * redistribute it and/or modify it under the terms of the: + * + * Eclipse Public License version 1.0 + * GNU General Public License version 2 + * GNU Lesser General Public License version 2.1 + */ +package org.jruby.truffle.runtime.hash; + +import com.oracle.truffle.api.CompilerDirectives; +import org.jruby.truffle.nodes.RubyNode; +import org.jruby.truffle.runtime.DebugOperations; +import org.jruby.truffle.runtime.RubyContext; +import org.jruby.truffle.runtime.core.RubyHash; +import org.jruby.truffle.runtime.core.RubyString; +import org.jruby.util.cli.Options; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class HashOperations { + + public static final int SMALL_HASH_SIZE = Options.TRUFFLE_HASHES_SMALL.load(); + public static final int[] CAPACITIES = Arrays.copyOf(org.jruby.RubyHash.MRI_PRIMES, org.jruby.RubyHash.MRI_PRIMES.length - 1); + public static final int SIGN_BIT_MASK = ~(1 << 31); + + public static int capacityGreaterThan(int size) { + for (int capacity : CAPACITIES) { + if (capacity > size) { + return capacity; + } + } + + return CAPACITIES[CAPACITIES.length - 1]; + } + + @CompilerDirectives.SlowPath + public static RubyHash verySlowFromEntries(RubyContext context, List entries) { + RubyNode.notDesignedForCompilation(); + + final RubyHash hash = new RubyHash(context.getCoreLibrary().getHashClass(), null, null, null, 0, null); + verySlowSetKeyValues(hash, entries); + return hash; + } + + public static void dump(RubyHash hash) { + final StringBuilder builder = new StringBuilder(); + + builder.append("["); + builder.append(hash.getSize()); + builder.append("]("); + + for (Entry entry : (Entry[]) hash.getStore()) { + builder.append("("); + + while (entry != null) { + builder.append("["); + builder.append(entry.getKey()); + builder.append(","); + builder.append(entry.getValue()); + builder.append("]"); + entry = entry.getNextInLookup(); + } + + builder.append(")"); + } + + builder.append(")~>("); + + Entry entry = hash.getFirstInSequence(); + + while (entry != null) { + builder.append("["); + builder.append(entry.getKey()); + builder.append(","); + builder.append(entry.getValue()); + builder.append("]"); + entry = entry.getNextInSequence(); + } + + builder.append(")<~("); + + entry = hash.getLastInSequence(); + + while (entry != null) { + builder.append("["); + builder.append(entry.getKey()); + builder.append(","); + builder.append(entry.getValue()); + builder.append("]"); + entry = entry.getPreviousInSequence(); + } + + builder.append(")"); + + System.err.println(builder); + } + + @CompilerDirectives.SlowPath + public static List verySlowToKeyValues(RubyHash hash) { + final List keyValues = new ArrayList<>(); + + if (hash.getStore() instanceof Entry[]) { + Entry entry = hash.getFirstInSequence(); + + while (entry != null) { + keyValues.add(new KeyValue(entry.getKey(), entry.getValue())); + entry = entry.getNextInSequence(); + } + } else if (hash.getStore() instanceof Object[]) { + for (int n = 0; n < hash.getSize(); n++) { + keyValues.add(new KeyValue(((Object[]) hash.getStore())[n * 2], ((Object[]) hash.getStore())[n * 2 + 1])); + } + } else if (hash.getStore() != null) { + throw new UnsupportedOperationException(); + } + + return keyValues; + } + + @CompilerDirectives.SlowPath + public static HashSearchResult verySlowFindBucket(RubyHash hash, Object key) { + final Object hashValue = DebugOperations.send(hash.getContext(), key, "hash", null); + + final int hashed; + + if (hashValue instanceof Integer) { + hashed = (int) hashValue; + } else if (hashValue instanceof Long) { + hashed = (int) (long) hashValue; + } else { + throw new UnsupportedOperationException(); + } + + final Entry[] entries = (Entry[]) hash.getStore(); + final int bucketIndex = (hashed & SIGN_BIT_MASK) % entries.length; + Entry entry = entries[bucketIndex]; + + Entry previousEntry = null; + + while (entry != null) { + // TODO: cast + + if ((boolean) DebugOperations.send(hash.getContext(), key, "eql?", null, entry.getKey())) { + return new HashSearchResult(bucketIndex, previousEntry, entry); + } + + previousEntry = entry; + entry = entry.getNextInLookup(); + } + + return new HashSearchResult(bucketIndex, previousEntry, null); + } + + public static void setAtBucket(RubyHash hash, HashSearchResult hashSearchResult, Object key, Object value) { + if (hashSearchResult.getEntry() == null) { + final Entry entry = new Entry(key, value); + + if (hashSearchResult.getPreviousEntry() == null) { + ((Entry[]) hash.getStore())[hashSearchResult.getIndex()] = entry; + } else { + hashSearchResult.getPreviousEntry().setNextInLookup(entry); + } + + if (hash.getFirstInSequence() == null) { + hash.setFirstInSequence(entry); + hash.setLastInSequence(entry); + } else { + hash.getLastInSequence().setNextInSequence(entry); + entry.setPreviousInSequence(hash.getLastInSequence()); + hash.setLastInSequence(entry); + } + } else { + final Entry entry = hashSearchResult.getEntry(); + + // The bucket stays in the same place in the sequence + + // Update the key (it overwrites even it it's eql?) and value + + entry.setKey(key); + entry.setValue(value); + } + } + + @CompilerDirectives.SlowPath + public static boolean verySlowSetInBuckets(RubyHash hash, Object key, Object value) { + if (key instanceof RubyString) { + key = DebugOperations.send(hash.getContext(), DebugOperations.send(hash.getContext(), key, "dup", null), "freeze", null); + } + + final HashSearchResult hashSearchResult = verySlowFindBucket(hash, key); + setAtBucket(hash, hashSearchResult, key, value); + return hashSearchResult.getEntry() == null; + } + + @CompilerDirectives.SlowPath + public static void verySlowSetKeyValues(RubyHash hash, List keyValues) { + final int size = keyValues.size(); + hash.setStore(new Entry[capacityGreaterThan(size)], 0, null, null); + + int actualSize = 0; + + for (KeyValue keyValue : keyValues) { + if (verySlowSetInBuckets(hash, keyValue.getKey(), keyValue.getValue())) { + actualSize++; + } + } + + hash.setSize(actualSize); + } +} diff --git a/core/src/main/java/org/jruby/truffle/runtime/hash/HashSearchResult.java b/core/src/main/java/org/jruby/truffle/runtime/hash/HashSearchResult.java new file mode 100644 index 00000000000..b2c52661492 --- /dev/null +++ b/core/src/main/java/org/jruby/truffle/runtime/hash/HashSearchResult.java @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2014 Oracle and/or its affiliates. All rights reserved. This + * code is released under a tri EPL/GPL/LGPL license. You can use it, + * redistribute it and/or modify it under the terms of the: + * + * Eclipse Public License version 1.0 + * GNU General Public License version 2 + * GNU Lesser General Public License version 2.1 + */ +package org.jruby.truffle.runtime.hash; + +/** + * The result of looking for an entry (an {@link Entry}) in a Ruby hash. We get the previous entry in the lookup chain + * for this index until the entry was found, the entry that was found, and the index that was used. There are three + * possible outcomes for a search. + *
    + *
  • There is nothing at that index, in which case the entry and previous entry in the chain will be + * {@code null}
  • + *
  • There were entries at that index, but none for our key, in which case the entry will be null, but the + * previous entry will be the last entry in the chain at that index, presumably where we will want to insert our + * new entry
  • + *
  • A entry was found for our key, in which case the entry will be the one correspond to the key, and the + * previous entry will be the one in the entry chain before that one
  • + *
+ */ +public class HashSearchResult { + + private final Entry previousEntry; + private final Entry entry; + private final int index; + + public HashSearchResult(int index, Entry previousEntry, Entry entry) { + this.index = index; + this.previousEntry = previousEntry; + this.entry = entry; + } + + public int getIndex() { + return index; + } + + public Entry getPreviousEntry() { + return previousEntry; + } + + public Entry getEntry() { + return entry; + } + +} diff --git a/core/src/main/java/org/jruby/truffle/runtime/hash/KeyValue.java b/core/src/main/java/org/jruby/truffle/runtime/hash/KeyValue.java new file mode 100644 index 00000000000..3cfb6e42249 --- /dev/null +++ b/core/src/main/java/org/jruby/truffle/runtime/hash/KeyValue.java @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2014 Oracle and/or its affiliates. All rights reserved. This + * code is released under a tri EPL/GPL/LGPL license. You can use it, + * redistribute it and/or modify it under the terms of the: + * + * Eclipse Public License version 1.0 + * GNU General Public License version 2 + * GNU Lesser General Public License version 2.1 + */ +package org.jruby.truffle.runtime.hash; + +/** + * A simple key-value for inserting or retrieving from a hash. + */ +public class KeyValue { + + private final Object key; + private final Object value; + + public KeyValue(Object key, Object value) { + this.key = key; + this.value = value; + } + + public Object getValue() { + return value; + } + + public Object getKey() { + return key; + } + +} diff --git a/core/src/main/ruby/jruby/truffle/core.rb b/core/src/main/ruby/jruby/truffle/core.rb index e18d7d2d7a3..9a2a80c0806 100644 --- a/core/src/main/ruby/jruby/truffle/core.rb +++ b/core/src/main/ruby/jruby/truffle/core.rb @@ -7,6 +7,7 @@ # GNU Lesser General Public License version 2.1 require_relative 'core/main' +require_relative 'core/config' require_relative 'core/kernel' require_relative 'core/float' require_relative 'core/math' diff --git a/core/src/main/ruby/jruby/truffle/core/config.rb b/core/src/main/ruby/jruby/truffle/core/config.rb new file mode 100644 index 00000000000..1481ab99b02 --- /dev/null +++ b/core/src/main/ruby/jruby/truffle/core/config.rb @@ -0,0 +1,17 @@ +# Copyright (c) 2014 Oracle and/or its affiliates. All rights reserved. This +# code is released under a tri EPL/GPL/LGPL license. You can use it, +# redistribute it and/or modify it under the terms of the: +# +# Eclipse Public License version 1.0 +# GNU General Public License version 2 +# GNU Lesser General Public License version 2.1 + +module RbConfig + CONFIG = { + :ruby_install_name => "rubytruffle", + :RUBY_INSTALL_NAME => "rubytruffle", + :host_os => "unknown", + :exeext => "", + :EXEEXT => "rubytruffle", + } +end diff --git a/core/src/main/ruby/jruby/truffle/core/kernel.rb b/core/src/main/ruby/jruby/truffle/core/kernel.rb index ce1d50417e1..1fc086f3edd 100644 --- a/core/src/main/ruby/jruby/truffle/core/kernel.rb +++ b/core/src/main/ruby/jruby/truffle/core/kernel.rb @@ -52,3 +52,13 @@ def Complex(real, imaginary) class Channel end + +# Here temporarily + +class Hash + + def include?(key) + keys.include? key + end + +end diff --git a/test/truffle/hash_stress_test.rb b/test/truffle/hash_stress_test.rb new file mode 100644 index 00000000000..160a81bcd2e --- /dev/null +++ b/test/truffle/hash_stress_test.rb @@ -0,0 +1,179 @@ +# Copyright (c) 2014 Oracle and/or its affiliates. All rights reserved. This +# code is released under a tri EPL/GPL/LGPL license. You can use it, +# redistribute it and/or modify it under the terms of the: +# +# Eclipse Public License version 1.0 +# GNU General Public License version 2 +# GNU Lesser General Public License version 2.1 + +# Truffle doesn't have a compliant random number generator yet + +SMALL_RANDOM = [ + 5, 7, 1, 8, 3, + 3, 2, 9, 1, 5, + 2, 6, 4, 3, 3, + 4, 7, 3, 6, 9, + 6, 6, 3, 1, 2, + 4, 1, 8, 5, 7, + 2, 7, 0, 6, 9, + 6, 0, 3, 0, 5, + 5, 4, 8, 9, 7, + 1, 6, 8, 7, 5, + 9, 4, 8, 3, 6, + 6, 3, 4, 5, 6, + 3, 5, 8, 0, 3, + 3, 3, 0, 0, 0, + 8, 8, 4, 0, 3, + 6, 4, 3, 6, 6, + 3, 4, 7, 0, 8, + 2, 3, 4, 3, 9, + 5, 7, 0, 1, 4, + 2, 4, 2, 8, 6 +] + +BIG_RANDOM = [ + 18, 82, 58, 32, 94, + 69, 98, 38, 30, 0, + 10, 61, 70, 97, 83, + 44, 72, 15, 3, 44, + 64, 53, 46, 2, 58, + 9, 13, 39, 17, 40, + 35, 28, 20, 81, 7, + 77, 43, 48, 56, 71, + 43, 66, 35, 77, 29, + 35, 73, 9, 89, 26, + 54, 93, 1, 14, 9, + 30, 77, 81, 75, 62, + 79, 55, 58, 77, 8, + 74, 4, 18, 31, 1, + 82, 90, 34, 53, 21, + 99, 82, 54, 26, 43, + 35, 65, 35, 45, 84, + 66, 27, 7, 1, 79, + 20, 54, 78, 20, 73, + 49, 5, 31, 61, 0 +] + +@small_random = 0 + +def small_random + SMALL_RANDOM[(@small_random += 1) % SMALL_RANDOM.size] +end + +@big_random = 0 + +def big_random + BIG_RANDOM[(@big_random += 1) % BIG_RANDOM.size] +end + +def random_range(range) + big_random % range +end + +$assert_index = 0 + +def assert(condition) + raise "failure on #{$assert_index}" unless condition + $assert_index += 1 +end + +def random_hash + case random_range(5) + when 0 + {} + when 1 + Hash.new + when 2 + eval("{" + small_random.times.map { |n| "#{small_random} => #{small_random}" }.join(", ") + "}") + when 3 + eval("{" + small_random.times.map { |n| "#{big_random} => #{big_random}" }.join(", ") + "}") + when 4 + eval("{" + big_random.times.map { |n| "#{big_random} => #{big_random}" }.join(", ") + "}") + end +end + +def check_hash(hash) + assert eval(hash.inspect) == hash + assert hash == eval(hash.inspect) + assert hash.keys.size == hash.size + assert hash.keys.uniq == hash.keys +end + +1_000.times do + # Create a hash + + hash = random_hash + + check_hash(hash) + + 100.times do + # Perform a random mutation + + case random_range(7) + when 0 + # Clear + hash.clear + assert hash.size == 0 + when 1 + # Merge with a new hash creating a new hash + original_size = hash.size + hash = hash.merge(random_hash) + assert hash.size >= original_size + when 2 + # Set a big random key + original_size = hash.size + key = big_random + value = big_random + hash[key] = value + assert hash[key] == value + assert (hash.size == original_size) || (hash.size == original_size + 1) + when 3 + # Set a small random key + original_size = hash.size + key = small_random + value = small_random + hash[key] = value + assert hash[key] == value + assert (hash.size == original_size) || (hash.size == original_size + 1) + when 4 + # Delete a big random key - if it exists + original_size = hash.size + hash.delete(big_random) + assert (hash.size == original_size) || (hash.size == original_size - 1) + when 5 + # Delete a small random key - if it exists + original_size = hash.size + hash.delete(small_random) + assert (hash.size == original_size) || (hash.size == original_size - 1) + when 6 + # Delete a random one of the actual keys + if hash.size > 0 + original_size = hash.size + hash.delete(hash.keys[big_random % hash.keys.size]) + assert (hash.size == original_size - 1) + end + end + + check_hash(hash) + end +end + +1_000.times do + # Create a hash + + hash = {} + + # Add random elements, remembering the order we put them in + + keys = [] + + 100.times do + key = big_random + keys << key + hash[key] = big_random + end + + # Check the order we get out is the same + + assert hash.keys == keys.uniq +end