public SettableBeanProperty(String paramString, JavaType paramJavaType, PropertyName paramPropertyName, TypeDeserializer paramTypeDeserializer, Annotations paramAnnotations, boolean paramBoolean) { if ((paramString == null) || (paramString.length() == 0)) this._propName = ""; else this._propName = InternCache.instance.intern(paramString); this._type = paramJavaType; this._wrapperName = paramPropertyName; this._isRequired = paramBoolean; this._contextAnnotations = paramAnnotations; this._viewMatcher = null; this._nullProvider = null; if (paramTypeDeserializer != null) paramTypeDeserializer = paramTypeDeserializer.forProperty(this); this._valueTypeDeserializer = paramTypeDeserializer; this._valueDeserializer = MISSING_VALUE_DESERIALIZER; }
public final Name addName(String paramString, int[] paramArrayOfInt, int paramInt) { if (this._intern) paramString = InternCache.instance.intern(paramString); int i; if (paramInt < 3) { if (paramInt == 1) i = calcHash(paramArrayOfInt[0]); else i = calcHash(paramArrayOfInt[0], paramArrayOfInt[1]); } else i = calcHash(paramArrayOfInt, paramInt); Name localName = constructName(i, paramString, paramArrayOfInt, paramInt); _addSymbol(i, localName); return localName; }
protected SettableBeanProperty(String propName, JavaType type, TypeDeserializer typeDeser, Annotations contextAnnotations) { /* 09-Jan-2009, tatu: Intern()ing makes sense since Jackson parsed * field names are (usually) interned too, hence lookups will be faster. */ // 23-Oct-2009, tatu: should this be disabled wrt [JACKSON-180]? /* Probably need not, given that namespace of field/method names * is not unbounded, unlike potential JSON names. */ if (propName == null || propName.length() == 0) { _propName = ""; } else { _propName = InternCache.instance.intern(propName); } _type = type; _contextAnnotations = contextAnnotations; _viewMatcher = null; // 30-Jan-2012, tatu: Important: contextualize TypeDeserializer now... if (typeDeser != null) { typeDeser = typeDeser.forProperty(this); } _valueTypeDeserializer = typeDeser; }
private String _addSymbol(char[] buffer, int start, int len, int h, int index) { if (_hashShared) { //need to do copy-on-write? copyArrays(); _hashShared = false; } else if (_size >= _sizeThreshold) { // Need to expand? rehash(); /* Need to recalc hash; rare occurence (index mask has been * recalculated as part of rehash) */ index = _hashToIndex(calcHash(buffer, start, len)); } String newSymbol = new String(buffer, start, len); if (JsonFactory.Feature.INTERN_FIELD_NAMES.enabledIn(_flags)) { newSymbol = InternCache.instance.intern(newSymbol); } ++_size; // Ok; do we need to add primary entry, or a bucket? if (_symbols[index] == null) { _symbols[index] = newSymbol; } else { final int bix = (index >> 1); Bucket newB = new Bucket(newSymbol, _buckets[bix]); int collLen = newB.length; if (collLen > MAX_COLL_CHAIN_LENGTH) { // 23-May-2014, tatu: Instead of throwing an exception right away, // let's handle in bit smarter way. _handleSpillOverflow(bix, newB); } else { _buckets[bix] = newB; _longestCollisionList = Math.max(collLen, _longestCollisionList); } } return newSymbol; }
public String addName(String name, int q1) { _verifySharing(); if (_intern) { name = InternCache.instance.intern(name); } int offset = _findOffsetForAdd(calcHash(q1)); _hashArea[offset] = q1; _hashArea[offset+3] = 1; _names[offset >> 2] = name; ++_count; _verifyNeedForRehash(); return name; }
public String addName(String name, int q1, int q2) { _verifySharing(); if (_intern) { name = InternCache.instance.intern(name); } int hash = (q2 == 0) ? calcHash(q1) : calcHash(q1, q2); int offset = _findOffsetForAdd(hash); _hashArea[offset] = q1; _hashArea[offset+1] = q2; _hashArea[offset+3] = 2; _names[offset >> 2] = name; ++_count; _verifyNeedForRehash(); return name; }
public String addName(String name, int q1, int q2, int q3) { _verifySharing(); if (_intern) { name = InternCache.instance.intern(name); } int offset = _findOffsetForAdd(calcHash(q1, q2, q3)); _hashArea[offset] = q1; _hashArea[offset+1] = q2; _hashArea[offset+2] = q3; _hashArea[offset+3] = 3; _names[offset >> 2] = name; ++_count; _verifyNeedForRehash(); return name; }
public final Name addName(String paramString, int paramInt1, int paramInt2) { if (this._intern) paramString = InternCache.instance.intern(paramString); int i; if (paramInt2 == 0) i = calcHash(paramInt1); else i = calcHash(paramInt1, paramInt2); int j = i; Name localName = constructName(i, paramString, paramInt1, paramInt2); _addSymbol(j, localName); return localName; }
public Name addName(String symbolStr, int q1, int q2) { if (_intern) { symbolStr = InternCache.instance.intern(symbolStr); } int hash = (q2 == 0) ? calcHash(q1) : calcHash(q1, q2); Name symbol = constructName(hash, symbolStr, q1, q2); _addSymbol(hash, symbol); return symbol; }
public Name addName(String symbolStr, int[] quads, int qlen) { if (_intern) { symbolStr = InternCache.instance.intern(symbolStr); } int hash; if (qlen < 3) { hash = (qlen == 1) ? calcHash(quads[0]) : calcHash(quads[0], quads[1]); } else { hash = calcHash(quads, qlen); } Name symbol = constructName(hash, symbolStr, quads, qlen); _addSymbol(hash, symbol); return symbol; }
public String addName(String name, int[] q, int qlen) { _verifySharing(); if (_intern) { name = InternCache.instance.intern(name); } int offset; switch (qlen) { case 1: { offset = _findOffsetForAdd(calcHash(q[0])); _hashArea[offset] = q[0]; _hashArea[offset+3] = 1; } break; case 2: { offset = _findOffsetForAdd(calcHash(q[0], q[1])); _hashArea[offset] = q[0]; _hashArea[offset+1] = q[1]; _hashArea[offset+3] = 2; } break; case 3: { offset = _findOffsetForAdd(calcHash(q[0], q[1], q[2])); _hashArea[offset] = q[0]; _hashArea[offset+1] = q[1]; _hashArea[offset+2] = q[2]; _hashArea[offset+3] = 3; } break; default: final int hash = calcHash(q, qlen); offset = _findOffsetForAdd(hash); _hashArea[offset] = hash; int longStart = _appendLongName(q, qlen); _hashArea[offset+1] = longStart; _hashArea[offset+3] = qlen; } // plus add the actual String _names[offset >> 2] = name; // and finally; see if we really should rehash. ++_count; _verifyNeedForRehash(); return name; }
public final String findSymbol(char[] paramArrayOfChar, int paramInt1, int paramInt2, int paramInt3) { if (paramInt2 <= 0) return ""; if (!this._canonicalize) return new String(paramArrayOfChar, paramInt1, paramInt2); int i = _hashToIndex(paramInt3); String str1 = this._symbols[i]; if (str1 != null) { if (str1.length() == paramInt2) { int k = 0; do { if (str1.charAt(k) != paramArrayOfChar[(paramInt1 + k)]) break; k++; } while (k < paramInt2); if (k == paramInt2) return str1; } Bucket localBucket2 = this._buckets[(i >> 1)]; if (localBucket2 != null) { String str3 = localBucket2.find(paramArrayOfChar, paramInt1, paramInt2); if (str3 != null) return str3; } } if (!this._dirty) { copyArrays(); this._dirty = true; } else if (this._size >= this._sizeThreshold) { rehash(); i = _hashToIndex(calcHash(paramArrayOfChar, paramInt1, paramInt2)); } String str2 = new String(paramArrayOfChar, paramInt1, paramInt2); if (this._intern) str2 = InternCache.instance.intern(str2); this._size = (1 + this._size); if (this._symbols[i] == null) { this._symbols[i] = str2; return str2; } int j = i >> 1; Bucket localBucket1 = new Bucket(str2, this._buckets[j]); this._buckets[j] = localBucket1; this._longestCollisionList = Math.max(localBucket1.length(), this._longestCollisionList); if (this._longestCollisionList > 255) reportTooManyCollisions(255); return str2; }
public String findSymbol(char[] buffer, int start, int len, int h) { if (len < 1) { // empty Strings are simplest to handle up front return ""; } if (!_canonicalize) { // [JACKSON-259] return new String(buffer, start, len); } /* Related to problems with sub-standard hashing (somewhat * relevant for collision attacks too), let's try little * bit of shuffling to improve hash codes. * (note, however, that this can't help with full collisions) */ int index = _hashToIndex(h); String sym = _symbols[index]; // Optimal case; checking existing primary symbol for hash index: if (sym != null) { // Let's inline primary String equality checking: if (sym.length() == len) { int i = 0; do { if (sym.charAt(i) != buffer[start+i]) { break; } } while (++i < len); // Optimal case; primary match found if (i == len) { return sym; } } // How about collision bucket? Bucket b = _buckets[index >> 1]; if (b != null) { sym = b.find(buffer, start, len); if (sym != null) { return sym; } } } if (!_dirty) { //need to do copy-on-write? copyArrays(); _dirty = true; } else if (_size >= _sizeThreshold) { // Need to expand? rehash(); /* Need to recalc hash; rare occurence (index mask has been * recalculated as part of rehash) */ index = _hashToIndex(calcHash(buffer, start, len)); } String newSymbol = new String(buffer, start, len); if (_intern) { newSymbol = InternCache.instance.intern(newSymbol); } ++_size; // Ok; do we need to add primary entry, or a bucket? if (_symbols[index] == null) { _symbols[index] = newSymbol; } else { int bix = (index >> 1); Bucket newB = new Bucket(newSymbol, _buckets[bix]); _buckets[bix] = newB; _longestCollisionList = Math.max(newB.length(), _longestCollisionList); if (_longestCollisionList > MAX_COLL_CHAIN_LENGTH) { reportTooManyCollisions(MAX_COLL_CHAIN_LENGTH); } } return newSymbol; }