rm murmurhash-native

This commit is contained in:
babayaga 2026-01-01 20:56:43 +01:00
parent 4e97104eb0
commit 4bdf9727f7
71 changed files with 7 additions and 12080 deletions

View File

@ -1,12 +1,12 @@
export default {
"environment": "dev",
"environment": "build",
"isSsrBuild": false,
"projectBase": "",
"publicDir": "C:\\Users\\zx\\Desktop\\polymech\\library.polymech\\public\\",
"rootDir": "C:\\Users\\zx\\Desktop\\polymech\\library.polymech\\",
"mode": "dev",
"outDir": "dist",
"assetsDir": "/_astro",
"sourcemap": "inline",
"publicDir": "C:\\Users\\zx\\Desktop\\polymech\\site2\\public\\",
"rootDir": "C:\\Users\\zx\\Desktop\\polymech\\site2\\",
"mode": "production",
"outDir": "C:\\Users\\zx\\Desktop\\polymech\\site2\\dist\\",
"assetsDir": "_astro",
"sourcemap": false,
"assetFileNames": "/_astro/[name]@[width].[hash][extname]"
}

View File

@ -1,20 +0,0 @@
.DS_Store
Thumbs.db
.svn
*.tmp
*.log
*~
*.lnk
*.sublime-project
*.sublime-workspace
sftp-config.json
/node_modules
/tmp
/build
*.o
*.so
.nyc_output
*.tgz
/lib
package-lock.json
/doc

View File

@ -1,6 +0,0 @@
{
"node": true,
"laxcomma": true,
"laxbreak": true,
"undef": true
}

View File

@ -1,27 +0,0 @@
.jshintrc
.travis.yml
appveyor.yml
bench/
test/
checkout-murmurhash.sh
# MIRRORED FROM .gitignore
.DS_Store
Thumbs.db
.svn
*.tmp
*.log
*~
*.lnk
*.sublime-project
*.sublime-workspace
sftp-config.json
/node_modules
/tmp
/build
*.o
*.so
.nyc_output
*.tgz
/lib
/doc

View File

@ -1 +0,0 @@
package-lock=false

View File

@ -1,70 +0,0 @@
sudo: false
language: node_js
env:
global:
secure: "LKh0Cc14itGFicEVCD+whTswTmI0HzWqy1C3cLw2z16TgST2dtmMdmUFXU1jNATfi0l4qqwrlnIgJrtzFNrc5fwxMY7XYr71F9QjfWIY7hfrTh3iZBYZJdlUdx0zClwGsN06ldDiaxKKljf/OmXuu5tds5iyel5O8KKZUaIFzKc="
os:
- windows
- osx
- linux
node_js: [10, 11, 12, 13, 14]
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-4.9
osx_image: xcode8
matrix:
exclude:
# until travis.ci resolves problem with "VM fails to come up if you have secrets"
# https://travis-ci.community/t/current-known-issues-please-read-this-before-posting-a-new-topic/264
- os: windows
before_install:
- if [[ $TRAVIS_OS_NAME == "linux" ]]; then CXX=g++-4.9; fi
- if [[ $TRAVIS_OS_NAME == "osx" ]]; then CXX=g++; fi # g++ is an alias of clang, so let's use it
# update npm
- |
echo "TRAVIS_NODE_VERSION=$TRAVIS_NODE_VERSION"
if [[ $TRAVIS_NODE_VERSION -le 6 ]]; then NPM_VERSION=3
elif [[ $TRAVIS_NODE_VERSION -le 8 ]]; then NPM_VERSION=4
elif [[ $TRAVIS_NODE_VERSION -le 9 ]]; then NPM_VERSION=5
else NPM_VERSION=6
fi
echo "NPM_VERSION=$NPM_VERSION"
- npm -g install npm@${NPM_VERSION}
# print versions
- node --version
- npm --version
- if [[ $TRAVIS_OS_NAME != "windows" ]]; then $CXX --version; fi
# install windows-build-tools
- if [[ $TRAVIS_OS_NAME == "windows" ]]; then npm install --global --production windows-build-tools; fi
- if [[ $TRAVIS_OS_NAME == "windows" ]]; then setx PYTHON "%USERPROFILE%\.windows-build-tools\python27\python.exe"; fi
# figure out if we should publish
- PUBLISH_BINARY=no
# if we are building a tag then publish
- echo $TRAVIS_BRANCH
- echo `git describe --tags --always HEAD`
- if [[ $TRAVIS_BRANCH == `git describe --tags --always HEAD` ]]; then PUBLISH_BINARY=yes; fi;
- echo "Are we going to publish a binary? ->" $PUBLISH_BINARY
install:
# ensure source install works
- npm install --build-from-source
# compile typescript tests
- npm run tsc
script:
- npm test
# publish on a tag
- if [[ $PUBLISH_BINARY == yes ]]; then node-pre-gyp package; fi;
- if [[ $PUBLISH_BINARY == yes ]]; then node-pre-gyp-github publish --release; fi;
# cleanup
- node-pre-gyp clean
# test binary exists
- if [[ $PUBLISH_BINARY == yes ]]; then npm install --fallback-to-build=false; fi;

View File

@ -1,168 +0,0 @@
v3.5.0
* bump nan to 2.14.1, node-pre-gyp to 0.14.0
* bump development dependencies
* added binaries for node v13 and v14
* dropped binaries for node pre v10
v3.4.1
* restrict node to v6 or later
v3.4.0
* bump nan to 2.13 and remove v8 deprecation warnings suppression introduced in v3.2.5
* bump node-pre-gyp to 0.13 and node-pre-gyp-github to 1.4.3
* bump development dependencies
* bump typescript and typedoc dependencies
* added tests and binaries for node v12
* dropped support for node pre v6
v3.3.0
* TypeScript declarations, documentation and tests
* bump bluebird to 3.5.3, commander to 2.19.0 and tap to 12.1.0
* added development dependencies: typescript, @types, typedoc and typedoc plugins
v3.2.5
* bump node-pre-gyp to 0.11.0, nan to 2.11.1 and tap to 12.0.1
* adapt async uncaughtException tests to tap 12
* test and release binaries for node v11
* suppress v8 deprecation warnings from nan
v3.2.4
* bump node-pre-gyp to 0.10.3, commander to 2.17
* test and release binaries for nodejs v10
* replaced deprecated Buffer factory api in tests and benches with the class methods
v3.2.3
* bump nan to 2.10, node-pre-gyp to 0.9.1, tap to 9, commander to 2.15
* replaced deprecated synchronous Nan::Callback::Call with Nan::Call
* removed redundant const Nan::NAN_METHOD_ARGS_TYPE
* updated arguments to asynchronous Nan::Callback::Call
* dropped support for node pre v4 (broken node-gyp 0.12.18 on XCode LLVM 8.1)
on other systems it might still work though - not looking into it anymore
v3.2.2
* bump nan to 2.7.0, node-pre-gyp to 0.6.39
* bump development dependencies
* replace deprecated Nan::ForceSet with Nan::DefineOwnProperty
* test and release binaries for node v8 and v9
* appveyor: pin npm version 5.3 for node v9 to workaround npm's issue #16649
* npmrc: turn off package-lock
v3.2.1
* bump nan to 2.6.2, node-pre-gyp to 0.6.34
* bump development dependencies
* test and release binaries for node v7
* appveyor: pin npm versions
v3.2.0
* bump nan to 2.3.5
* removed strcasecmp dependency
* asynchronous: static byte array for small strings added to the worker
* incremental async: static byte array for small strings added to the hasher
* incremental: endianness configurable via property and argument to the constructor
* variants of murmur hash functions producing BE (MSB) or LE (LSB) results
v3.1.1
* fix incremental async: ensure hasher is not GC'ed before worker completes
* fix incremental async: prevent from copying state over busy target
v3.1.0
* replaced MurmurHash3 implementation with PMurHash and PMurHash128
* new ability to update incremental hashers asynchronously via libuv
* stream implementation chooses sync vs async update depending on chunk size
* test: ensure persistence under gc stress
* bench: streaming
v3.0.4
* test cases: incremental digest() method with buffer output
* fix stream.js wrapper: missing support for offset and length in digest()
v3.0.3
* improved node-pre-gyp configuration so only essential binaries are being packaged
v3.0.2
* removed bundled dependencies
v3.0.1
* facilitate installation with prebuilt native binaries
* use "bindings" gem for finding native modules
* backward compatibility testing of serialized data
* c++ code cleanup: most of the precompiler macros replaces with type-safe constants
* js code cleanup with jshint
* remove iojs-3 from ci tests
v3.0.0
* results always in network order MSB (byte)
* restored output types: "hex" "base64" and "binary"
* incremental MurmurHash 3: 32bit, 128bit x86 and x64
* copyable and serializable state of incremental MurmurHash
* stream wrapper for incremental MurmurHash
v2.1.0
* new ability to calculate hashes asynchronously via libuv
* ensure correct byte alignment while directly writing to a buffer
* bench: asynchronous version
* promisify wrapper
v2.0.0
* output string encoding types removed
* "number" output type is a hex number for 64 and 128bit hashes
* "number" output type is the default output type for all hashes
* consistent hash regardless of platform endian-ness
* throws TypeError on incorrect encoding or output_type
* second string argument interpreted as an output type or encoding
* remove legacy pre v0.10 code
v1.0.2
* bump nan to 2.3.3, fixes node v6 buld
v1.0.1
* use nan converters instead of soon deprecated ->XValue()
v1.0.0
* bump nan to 2.0.9, fixes build with iojs-3 and node v4
v0.3.1
* bump nan to 1.8, fixes build with newset io.js
v0.3.0
* output Buffer, offset and length arguments
* use NODE_SET_METHOD macro to export functions
v0.2.1
* bump nan to 1.6, remove polyfill
* bench: compare with all crypto hashes
v0.2.0
* default input encoding changed from "utf8" to "binary"
* ensure default output type is "number" (32bit) or "buffer" (>32bit)
* decode "utf8" string faster on node >= 0.10
* handle some cases of 3 arguments better
* bench: compare with md5/sha1
* bench: string encoding argument
v0.1.1
* fix handling of non-ascii encoding argument string

View File

@ -1,22 +0,0 @@
The MIT License (MIT)
Copyright (c) 2015 royal at yeondir.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,329 +0,0 @@
MurmurHash bindings for node
============================
This library provides Austin Appleby's non-cryptographic "MurmurHash" hashing algorithm functions in a few different flavours.
[![NPM][NPM img]][NPM Status]
[![Node][Node img]][NPM Status]
[![Travis][Travis img]][Travis Status]
[![AppVeyor][AppVeyor img]][AppVeyor Status]
[![License][License img]][License Link]
Key features:
* blocking and asynchronous api interfaces
* additional MurmurHash3 32 and 128 bit progressive implementations based on [PMurHash][PMurHash]
* stream wrapper for progressive hasher with [crypto.Hash-like][crypto.Hash] bi-api interface
* serializable state of the progressive hasher
* BE or LE byte order variants of hashes
* promise wrapper
* prebuilt binaries for most standard system configurations
* TypeScript declarations ([docs][typescript-docs])
Install:
--------
There are prebuilt [binaries][releases] available for painless installation on
some Linuxes (x64), OS-X (x64) and Windows (x64 and x86) thanks to [node-pre-gyp][node-pre-gyp] and [node-pre-gyp-github][node-pre-gyp-github].
```
npm install murmurhash-native
```
If the prebuilt release is not available for your system or nodejs version,
the compilation from source will kick-in. For more information on building from
source please consult [this page][node-gyp-install].
If for some reason (e.g. an incompatible GLIBC) you might want to force building from source, type:
```
npm i murmurhash-native --build-from-source
```
To reinstall prebuilt binary (e.g. after switching between major nodejs versions):
```
npm rebuild --update-binary
```
TypeScript
----------
`murmurhash-native` is [ready][typescript-docs] for the TypeScript without any external declarations. However this module is node-specific package, if you're going to use it in TypeScript, do not forget to include `@types/node` and enable `es2015` language features in your `tsconfig.json`.
Make a hash:
------------
```js
var murmurHash = require('murmurhash-native').murmurHash
murmurHash( 'hash me!' ) // 2061152078
murmurHash( new Buffer('hash me!') ) // 2061152078
murmurHash( 'hash me!', 0x12345789 ) // 1908692277
murmurHash( 'hash me!', 0x12345789, 'buffer' ) // <Buffer 71 c4 55 35>
murmurHash( 'hash me!', 0x12345789, 'hex' ) // '71c45535'
var buf = new Buffer('hash me!____')
murmurHash( buf.slice(0,8), 0x12345789, buf, 8 )
// <Buffer 68 61 73 68 20 6d 65 21 71 c4 55 35>
var murmurHash128x64 = require('murmurhash-native').murmurHash128x64
murmurHash128x64( 'hash me!' ) // 'c43668294e89db0ba5772846e5804467'
var murmurHash128x86 = require('murmurhash-native').murmurHash128x86
murmurHash128x86( 'hash me!' ) // 'c7009299985a5627a9280372a9280372'
// asynchronous
murmurHash( 'hash me!', function(err, hash) { assert.equal(hash, 2061152078) });
// output byte order (default is BE)
var murmurHashLE = require('murmurhash-native').LE.murmurHash;
murmurHashLE( 'hash me!', 0x12345789, 'buffer' ) // <Buffer 35 55 c4 71>
murmurHashLE( 'hash me!', 0x12345789, 'hex' ) // '3555c471'
```
These functions are awaiting your command:
* `murmurHash` - MurmurHash v3 32bit
* `murmurHash32` - (an alias of murmurHash)
* `murmurHash128` - MurmurHash v3 128bit platform (x64 or x86) optimized
* `murmurHash128x64` - MurmurHash v3 128bit x64 optimized
* `murmurHash128x86` - MurmurHash v3 128bit x86 optimized
* `murmurHash64` - MurmurHash v2 64bit platform (x64 or x86) optimized
* `murmurHash64x64` - MurmurHash v2 64bit x64 optimized
* `murmurHash64x86` - MurmurHash v2 64bit x86 optimized
and they share the following signature:
```js
murmurHash(data[, callback])
murmurHash(data, output[, offset[, length]][, callback])
murmurHash(data{string}, encoding|output_type[, seed][, callback])
murmurHash(data{Buffer}, output_type[, seed][, callback])
murmurHash(data, seed[, callback])
murmurHash(data, seed, output[, offset[, length]][, callback])
murmurHash(data, seed, output_type[, callback])
murmurHash(data, encoding, output_type[, callback])
murmurHash(data{string}, encoding, output[, offset[, length]][, callback])
murmurHash(data{string}, encoding, seed[, callback])
murmurHash(data{string}, encoding, seed, output[, offset[, length]][, callback])
murmurHash(data{string}, encoding, seed, output_type[, callback])
```
* `@param` `{string|Buffer}` `data` - a byte-string to calculate hash from
* `@param` `{string}` `encoding` - data string encoding, should be:
"utf8", "ucs2", "ascii", "hex", "base64" or "binary";
"binary" by default
* `@param` `{Uint32}` `seed` - murmur hash seed, 0 by default
* `@param` `{Buffer}` `output` - a Buffer object to write hash bytes to;
the same object will be returned
* `@param` `{number}` `offset` - start writing into output at offset byte;
negative offset starts from the end of the output buffer
* `@param` `{number}` `length` - a number of bytes to write from calculated hash;
negative length starts from the end of the hash;
if absolute value of length is larger than the size of calculated
hash, bytes are written only up to the hash size
* `@param` `{string}` `output_type` - a string indicating return type:
- "number" - (default) for murmurHash32 an unsigned 32-bit integer,
other hashes - hexadecimal string
- "hex" - hexadecimal string
- "base64" - base64 string
- "binary" - binary string
- "buffer" - a new Buffer object;
* `@param` `{string}` `encoding|output_type` - data string encoding
or a return type; because some valid return types are also valid
encodings, the only values recognized here for `output_type` are:
- "number"
- "buffer"
* `@param` `{Function}` `callback` - optional callback(err, result)
if provided the hash will be calculated asynchronously using libuv
worker queue, the return value in this instance will be `undefined`
and the result will be provided to the callback function;
Be carefull as reading and writing by multiple threads to the same
memory may render undetermined results
* `@return` `{number|Buffer|String|undefined}`
The order of bytes written to a Buffer or encoded string depends on
function's endianness.
`data` and `output` arguments might reference the same Buffer object
or buffers referencing the same memory (views).
There are additional namespaces, each for different variant of function endianness:
* `BE` - big-endian (most significant byte first or network byte order)
* `LE` - little-endian (least significant byte first)
* `platform` - compatible with `os.endianness()`
Functions in the root namespace are big-endian.
Streaming and incremental api
-----------------------------
The dual-api interface for progressive MurmurHash3 is available as a submodule:
```js
var murmur = require('murmurhash-native/stream');
````
Incremental (a.k.a. progressive) api
```js
var hash = murmur.createHash('murmurhash128x86');
hash.update('hash').digest('hex'); // '0d872bbf2cd001722cd001722cd00172'
hash.update(' me!').digest('hex'); // 'c7009299985a5627a9280372a9280372'
var hash = murmur.createHash('murmurhash128x86', {endianness: 'LE'});
hash.update('hash').digest('hex'); // 'bf2b870d7201d02c7201d02c7201d02c'
hash.update(' me!').digest('hex'); // '999200c727565a98720328a9720328a9'
```
Streaming api
```js
var hash = murmur.createHash('murmurhash32', {seed: 123, encoding: 'hex', endianness: 'platform'});
fs.createReadStream('README.md').pipe(hash).pipe(process.stdout);
```
### Serializable state
The incremental MurmurHash utilities may be serialized and later deserialized.
One may also copy a hasher's internal state onto another.
This way the hasher utility can be re-used to calculate a hash of some data
with already known prefix.
```js
var hash = murmur.createHash('murmurhash128x64').update('hash');
hash.digest('hex'); // '4ab2e1e022f63e2e9add75dfcea2dede'
var backup = murmur.createHash(hash); // create a copy of a hash with the same internal state
backup.update(' me!').digest('hex'); // 'c43668294e89db0ba5772846e5804467'
hash.copy(backup) // copy hash's state onto the backup
.update(' me!').digest('hex'); // 'c43668294e89db0ba5772846e5804467'
var serial = hash.serialize(); // serialize hash's state
serial == 'AAAAAAAAAAAAAAAAAAAAAGhzYWgAAAAAAAAAAAAAAFQAAAAEtd3X';
// restore backup from serialized state
var backup = murmur.createHash('murmurhash128x64', {seed: serial});
backup.update(' me!').digest('hex'); // 'c43668294e89db0ba5772846e5804467'
// finally
hash.update(' me!').digest('hex'); // 'c43668294e89db0ba5772846e5804467'
```
The dual-api with streaming is a javascript wrapper over the native module.
The native incremental module is directly available at `murmurhash-native/incremental`.
See [hasher.cc](src/incremental/hasher.cc) for full api description
(and there's some crazy templating going on there...).
Promises
--------
The native murmurHash functions run asynchronously if the last argument is a callback.
There is however a promisify wrapper:
```js
var mm = require('murmurhash-native/promisify')();
mm.murmurHash32Async( 'hash me!', 0x12345789 )
.then(hash => { assert.equal(hash, 1908692277) });
// Promise { <pending> }
```
You may provide your own promise constructor:
```js
var bluebird = require('bluebird');
var mm = require('murmurhash-native/promisify')(bluebird);
mm.murmurHash32Async( 'hash me!', 0x12345789 )
.then(hash => { assert.equal(hash, 1908692277) });
// Promise {
// _bitField: 0,
// _fulfillmentHandler0: undefined,
// _rejectionHandler0: undefined,
// _promise0: undefined,
// _receiver0: undefined }
```
Significant changes in 3.x
--------------------------
The most important change is full platform indifference of rendered output.
In 2.x output hash as binary data provided via buffer was endian sensitive.
Starting with 3.x the data written to output buffer is always MSB (byte) first.
The "hex", "base64" and "binary" output types has been (re)added, but this time
with a sane definition.
So in this version the following is true on all platforms:
```js
assert.strictEqual(murmurHash('foo', 'buffer').toString('hex'), murmurHash('foo', 0, 'hex'));
assert.strictEqual(murmurHash('foo', 'buffer').toString('base64'), murmurHash('foo', 0, 'base64'));
```
Significant changes in 2.x
--------------------------
The 1.x output types were very confusing. E.g. "hex" was just an equivalent of
`murmurHash(data, "buffer").toString("hex")` which rendered incorrect hexadecimal
number. So all the string output type encodings: "utf8", "ucs2", "ascii", "hex",
"base64" and "binary" were completely removed in 2.0 as being simply useless.
The "number" output type has been adapted to all variants in a way more compatible
with other murmurhash [implementations][murmurhash3js]. For 32bit hash the return
value is an unsigned 32-bit integer (it was signed integer in 1.x) and for other
hashes it's a hexadecimal number.
The "buffer" output type wasn't modified except that the default output is now
"number" for all of the hashes.
Additionally when passing unsupported value to `encoding` or `output_type`
argument the function throws `TypeError`.
Another breaking change is for the BE platforms. Starting with 2.0 endian-ness
is recognized, so hashes should be consistent regardless of the cpu type.
Since v2.1 the callback argument was introduced.
Bugs, limitations, caveats
--------------------------
When working with Buffers, input data is not being copied, however for strings
this is unavoidable. For strings with byte-length < 1kB the static buffer is
provided to avoid mem-allocs.
The hash functions optimized for x64 and x86 produce different results.
Tested on Linux (x64), OS X (x64) and MS Windows (x64 and x86).
This version provides binaries for nodejs: v10, v11, v12, v13 and v14.
For binaries of murmurhash-native for previous versions of nodejs, use version
3.4.1 or 3.3.0 of this module.
[Travis Status]: https://travis-ci.org/royaltm/node-murmurhash-native
[Travis img]: https://img.shields.io/travis/royaltm/node-murmurhash-native.svg?maxAge=86400&style=flat-square&label=unix
[AppVeyor img]: https://img.shields.io/appveyor/ci/royaltm/node-murmurhash-native.svg?maxAge=86400&style=flat-square&label=windows
[AppVeyor Status]: https://ci.appveyor.com/project/royaltm/node-murmurhash-native
[NPM img]: https://img.shields.io/npm/v/murmurhash-native.svg?maxAge=86400&style=flat-square
[NPM Status]: https://www.npmjs.com/package/murmurhash-native
[Node img]: https://img.shields.io/node/v/murmurhash-native.svg?maxAge=2592000&style=flat-square
[License img]: https://img.shields.io/npm/l/murmurhash-native.svg?maxAge=2592000&style=flat-square
[License Link]: https://opensource.org/licenses/MIT
[bitHound img]: https://img.shields.io/bithound/dependencies/github/royaltm/node-murmurhash-native.svg?maxAge=86400&style=flat-square
[murmurhash3js]: https://www.npmjs.com/package/murmurhash3js
[PMurHash]: https://github.com/aappleby/smhasher/blob/master/src/PMurHash.c
[crypto.Hash]: https://nodejs.org/dist/latest-v6.x/docs/api/crypto.html#crypto_class_hash
[node-pre-gyp]: https://github.com/mapbox/node-pre-gyp
[node-pre-gyp-github]: https://github.com/bchr02/node-pre-gyp-github
[releases]: https://github.com/royaltm/node-murmurhash-native/releases
[node-gyp-install]: https://github.com/nodejs/node-gyp#installation
[typescript-docs]: http://royaltm.github.io/node-murmurhash-native/globals.html

View File

@ -1,79 +0,0 @@
# http://www.appveyor.com/docs/appveyor-yml
platform:
- x86
- x64
configuration:
- Release
environment:
NODE_PRE_GYP_GITHUB_TOKEN:
secure: zPYtraT93rgt16J0Egnm0+SD3u8ncUEjAbsquMN5zsY9qowJP/t362GfmtIgbJS4
matrix:
- {"nodejs_version": "10", "npm_version": "6"}
- {"nodejs_version": "11", "npm_version": "6"}
- {"nodejs_version": "12", "npm_version": "6"}
- {"nodejs_version": "13", "npm_version": "6"}
- {"nodejs_version": "14", "npm_version": "6"}
# for:
# -
# matrix:
# exclude:
# - {"platform": "x64", "nodejs_version": "8", "npm_version": "4"}
# - {"platform": "x64", "nodejs_version": "9", "npm_version": "5"}
# - {"platform": "x64", "nodejs_version": "10", "npm_version": "6"}
# - {"platform": "x64", "nodejs_version": "11", "npm_version": "6"}
matrix:
fast_finish: true
install:
# https://www.appveyor.com/docs/lang/nodejs-iojs/#installing-any-version-of-nodejs-or-iojs
- ps: Update-NodeJsInstallation (Get-NodeJsLatestBuild $env:nodejs_version) $env:platform
# - ps: Install-Product node $env:nodejs_version $env:platform;
- npm -g install npm@%NPM_VERSION%
- ps: $env:Path += ";$(pwd)\node_modules\.bin;$env:AppData\npm";
- ps: >
@{
"nodejs_version" = $env:nodejs_version
"platform" = $env:platform
"node binary version" = $(node -v)
"npm version" = $(npm -v)
"APPVEYOR_REPO_COMMIT_MESSAGE" = $env:APPVEYOR_REPO_COMMIT_MESSAGE
"git latest tag" = "$(git describe --tags --always HEAD)"
"appveyor_repo_tag" = $env:appveyor_repo_tag
} | Out-String | Write-Host;
# Check if we're building the latest tag, if so
# then we publish the binaries if tests pass.
- ps: >
if ($env:appveyor_repo_tag -match "true" -and ("$(git describe --tags --always HEAD)" -eq $env:appveyor_repo_tag_name)) {
$env:publish_binary = 1;
} else {
$env:publish_binary = 0;
}
true;
- ps: >
if ($env:publish_binary -eq 1) {
"We're going to publish a binary!" | Write-Host
} else {
"We're NOT going to publish a binary" | Write-Host
}
true;
build_script:
- npm install --build-from-source
- npm run tsc
test_script:
- npm test
- IF %PUBLISH_BINARY% == 1 (node-pre-gyp package 2>&1)
- IF %PUBLISH_BINARY% == 1 (node-pre-gyp-github publish --release 2>&1)
- IF %PUBLISH_BINARY% == 1 (node-pre-gyp clean install)
# Set build version format here instead of in the admin panel.
version: "{build}"
deploy: off

View File

@ -1,10 +0,0 @@
#!/usr/bin/sh
SCRIPT=$(readlink -f "$0")
SCRIPTPATH=$(dirname "$SCRIPT")
for bench in "$SCRIPTPATH"/bench*.js
do
echo $(basename "$bench") "$@"
node "$bench" "$@" || exit $?
done

View File

@ -1,119 +0,0 @@
#!/usr/bin/env node
"use strict";
var os = require('os')
, assert = require('assert')
, parben = require('./parben').parallel
, hash = require('..')
, incr = require('../incremental')
, duration = 1000
, parallel = os.cpus().length
, stringEncoding = 'binary'
, outputType = 'number';
var program = require('commander');
program
.version(require(__dirname + '/../package.json').version)
.usage('[options] [seconds=1]')
.option('-n, --no-crypto', 'do not benchmark crypto hashers')
.option('-p, --parallel <n>', 'number of parallel threads', parseInt)
.option('-s, --small <chars>', 'small string size in chars', 80)
.option('-l, --large <kilos>', 'large string/buffer size in kilos', 128)
.option('-o, --output [type]', 'output type')
.option('-e, --encoding [enc]', 'input string encoding')
.parse(process.argv);
if (program.args.length > 0) duration = 1000*program.args[0]>>>0;
if (program.encoding) {
stringEncoding = program.encoding;
console.log('string encoding: %s', stringEncoding);
}
if (program.output) {
outputType = program.output;
console.log('output type: %s', outputType);
}
if (program.parallel) {
parallel = Math.max(0, program.parallel>>>0);
}
console.log('parallel threads: %d', parallel);
console.log('test duration: %d ms', duration);
function incremental(constr) {
return function(data, encoding, outputType, next) {
var hash = new constr();
return hash.update(data, encoding, function(err) {
next(err, hash.digest(outputType));
});
};
}
var funmatrix = [
[hash.murmurHash, 'murmurHash '],
[hash.murmurHash64x86, 'murmurHash64x86 '],
[hash.murmurHash64x64, 'murmurHash64x64 '],
[hash.murmurHash128x86, 'murmurHash128x86 '],
[hash.murmurHash128x64, 'murmurHash128x64 '],
[incremental(incr.MurmurHash), 'MurmurHash '],
[incremental(incr.MurmurHash128x86), 'MurmurHash128x86 '],
[incremental(incr.MurmurHash128x64), 'MurmurHash128x64 ']
];
var queued = [];
function fillrandom(buffer) {
for(var i = 0; i < buffer.length; ++i)
buffer[i] = (Math.random()*0x100)|0;
return buffer;
}
function randomstring(length) {
var buffer = fillrandom(Buffer.allocUnsafe(length));
return buffer.toString('binary');
}
function bench(size, inputStr, duration) {
var input = inputStr
? randomstring(size)
: fillrandom(Buffer.allocUnsafe(size));
funmatrix.forEach(function(args) {
var fun = args[0], name = args[1];
queue(measure, inputStr ? "string" : "buffer", fun, name, duration, parallel, size, input);
});
}
bench(+program.small, true, duration);
bench(program.large*1024, true, duration);
bench(program.large*1024, false, duration);
next();
function measure(label, fun, name, duration, parallel, size, arg) {
var cb = function(next) { fun(arg, stringEncoding, outputType, next); };
parben.calibrate(duration, parallel, cb)
.then(function(iters) {
return parben(iters, parallel, cb);
})
.then(function(res) {
fun(arg, stringEncoding, outputType, function(err, digest) {
assert.ifError(err);
console.log(name + "(" + label + "[" + size + "]): single: %s avg: %s %s",
(size / res.single / 1000).toFixed(4) + 'MB/s',
(size / res.avg / 1000).toFixed(4) + 'MB/s',
digest);
next();
});
});
}
function queue() {
queued.push([].slice.call(arguments, 0));
}
function next() {
if (queued.length > 0) setImmediate.apply(null, queued.shift());
}

View File

@ -1,99 +0,0 @@
#!/usr/bin/env node
"use strict";
var crypto = require('crypto')
, ben = require('./parben')
, incr = require('../incremental')
, duration = 1000
, stringEncoding = 'binary'
, outputType = 'hex';
var program = require('commander');
program
.version(require(__dirname + '/../package.json').version)
.usage('[options] [seconds=1]')
.option('-n, --no-crypto', 'do not benchmark crypto hashers')
.option('-l, --large <kilobytes>', 'large string/buffer size in kilos', 128)
.option('-o, --output [type]', 'output type')
.option('-e, --encoding [enc]', 'string encoding')
.parse(process.argv);
if (program.args.length > 0) duration = 1000*program.args[0]>>>0;
if (program.encoding) {
stringEncoding = program.encoding;
console.log('string encoding: %s', stringEncoding);
}
if (program.output) {
outputType = program.output;
console.log('output type: %s', outputType);
}
console.log('test duration: %d ms', duration);
var funmatrix = [
[incr.MurmurHash, 'MurmurHash '],
[incr.MurmurHash128x86, 'MurmurHash128x86 '],
[incr.MurmurHash128x64, 'MurmurHash128x64 '],
];
if (program.crypto) {
crypto.getHashes().forEach(function(cipher) {
var pad = ' ';
funmatrix.push([
function() { return new crypto.createHash(cipher); },
cipher + pad.substr(0, pad.length - cipher.length)
]);
});
}
function fillrandom(buffer) {
for(var i = 0; i < buffer.length; ++i)
buffer[i] = (Math.random()*0x100)|0;
return buffer;
}
function randomstring(length) {
var buffer = fillrandom(Buffer.allocUnsafe(length));
return buffer.toString('binary');
}
function bench(size, inputStr, duration) {
var input = inputStr
? randomstring(size)
: fillrandom(Buffer.allocUnsafe(size));
funmatrix.forEach(function(args) {
var Hash = args[0], name = args[1];
[1, 3, 4, 8, 16, 17, 32, 64, 101, 128, 1009, 1024, size / 4>>>0, size / 2>>>0].forEach(function(chunksize) {
measure(inputStr ? "string" : "buffer", chunksize,
function fun(stringEncoding, outputType) {
var hash = new Hash();
for(var i = 0; i < size; i += chunksize) {
hash.update(input.slice(i, i + chunksize),stringEncoding);
}
return hash.digest(outputType);
}, name, duration, size, input);
});
measure(inputStr ? "string" : "buffer", size, function fun(stringEncoding, outputType) {
return new Hash().update(input, stringEncoding).digest(outputType);
}, name, duration, size, input);
});
}
bench(program.large*1024, true, duration);
bench(program.large*1024, false, duration);
function measure(label, chunk, fun, name, duration, size) {
var padstr = ' ';
var pad = function(str,pad) { return padstr.substr(0, (pad || padstr.length) - (''+str).length) + str; };
var cb = function(){ fun(stringEncoding, outputType); };
var iters = ben.calibrate(duration, cb);
var ms = ben(iters, cb);
console.log(name + label + " (%s of %s): %s %s",
pad(chunk, size.toString().length), size,
pad((size / ms / 1000).toFixed(1) + 'MB/s'),
fun(stringEncoding, outputType));
}

View File

@ -1,103 +0,0 @@
#!/usr/bin/env node
"use strict";
var crypto = require('crypto')
, ben = require('./parben')
, hash = require('..')
, incr = require('../incremental')
, createHash = crypto.createHash
, duration = 1000
, stringEncoding = 'binary'
, outputType = 'number';
var program = require('commander');
program
.version(require(__dirname + '/../package.json').version)
.usage('[options] [seconds=1]')
.option('-n, --no-crypto', 'do not benchmark crypto hashers')
.option('-s, --small <chars>', 'small string size in chars', 80)
.option('-l, --large <kilobytes>', 'large string/buffer size in kilos', 128)
.option('-o, --output [type]', 'output type')
.option('-e, --encoding [enc]', 'input string encoding')
.parse(process.argv);
if (program.args.length > 0) duration = 1000*program.args[0]>>>0;
if (program.encoding) {
stringEncoding = program.encoding;
console.log('string encoding: %s', stringEncoding);
}
if (program.output) {
outputType = program.output;
console.log('output type: %s', outputType);
}
console.log('test duration: %d ms', duration);
function cryptohasher(name, data, encoding) {
var sum = createHash(name);
sum.update(data, encoding);
return sum.digest('hex');
}
function incremental(constr) {
return function(data, encoding, outputType) {
return new constr().update(data, encoding).digest(outputType);
};
}
var funmatrix = [
[hash.murmurHash, 'murmurHash '],
[hash.murmurHash64x86, 'murmurHash64x86 '],
[hash.murmurHash64x64, 'murmurHash64x64 '],
[hash.murmurHash128x86, 'murmurHash128x86 '],
[hash.murmurHash128x64, 'murmurHash128x64 '],
[incremental(incr.MurmurHash), 'MurmurHash '],
[incremental(incr.MurmurHash128x86), 'MurmurHash128x86 '],
[incremental(incr.MurmurHash128x64), 'MurmurHash128x64 ']
];
if (program.crypto) {
crypto.getHashes().forEach(function(cipher) {
var pad = ' ';
funmatrix.push([
function(data, encoding) { return cryptohasher(cipher, data, encoding); },
cipher + pad.substr(0, pad.length - cipher.length)
]);
});
}
function fillrandom(buffer) {
for(var i = 0; i < buffer.length; ++i)
buffer[i] = (Math.random()*0x100)|0;
return buffer;
}
function randomstring(length) {
var buffer = fillrandom(Buffer.allocUnsafe(length));
return buffer.toString('binary');
}
function bench(size, inputStr, duration) {
var input = inputStr
? randomstring(size)
: fillrandom(Buffer.allocUnsafe(size));
funmatrix.forEach(function(args) {
var fun = args[0], name = args[1];
measure(inputStr ? "string" : "buffer", fun, name, duration, size, input);
});
}
bench(program.small, true, duration);
bench(program.large*1024, true, duration);
bench(program.large*1024, false, duration);
function measure(label, fun, name, duration, size, arg) {
var cb = function(){ fun(arg, stringEncoding, outputType); };
var iters = ben.calibrate(duration, cb);
var ms = ben(iters, cb);
console.log(name + "(" + label + "[" + size + "]): %s %s",
(size / ms / 1000).toFixed(4) + 'MB/s',
fun(arg, stringEncoding, outputType));
}

View File

@ -1,156 +0,0 @@
#!/usr/bin/env node
// jshint eqnull:true
"use strict";
var os = require('os')
, fs = require('fs')
, crypto = require('crypto')
, stream = require('stream')
, util = require('util')
, parben = require('./parben').parallel
, strm = require('../stream')
, pipes = 1
, parallel = os.cpus().length
;
var Readable = stream.Readable;
util.inherits(RandomStream, Readable);
var program = require('commander');
program
.version(require(__dirname + '/../package.json').version)
.usage('[options] [pipes=1]')
.option('-n, --no-crypto', 'do not benchmark crypto hashers')
.option('-p, --parallel <n>', 'number of parallel streams', parseInt)
.option('-r, --random <mb>', 'size of random streams in megabytes', 64)
.option('-f, --file <mb>', 'file size in megabytes', 2048)
.option('-s, --small <bytes>', 'smallest random chunk', 1)
.option('-l, --large <bytes>', 'largest random chunk', 16384)
.parse(process.argv);
if (program.args.length > 0) pipes = program.args[0]>>>0;
if (program.parallel) {
parallel = Math.max(0, program.parallel>>>0);
}
console.log('parallel streams: x%d', parallel);
console.log('parallel pipes: x%d', pipes);
console.log('smallest random chunk: %d', +program.small);
console.log(' largest random chunk: %d', +program.large);
var funmatrix = [
['murmurhash', 'MurmurHash ', strm],
['murmurhash128x86', 'MurmurHash128x86 ', strm],
['murmurhash128x64', 'MurmurHash128x64 ', strm],
];
if (program.crypto) {
crypto.getHashes().forEach(function(cipher) {
var pad = ' ';
funmatrix.push([cipher, cipher + pad.substr(0, pad.length - cipher.length), crypto]);
});
}
var queued = [];
function RandomStream(options) {
this.size = +options.size;
this.minchunk = null == options.minchunk ? 65536 : options.minchunk;
var maxchunk = null == options.maxchunk ? 65536 : options.maxchunk;
this.topchunk = maxchunk - this.minchunk + 1;
this.pending = 0;
Readable.call(this, options);
}
RandomStream.prototype._read = function() {
var self = this;
var chunksize = Math.min(this.size, this.topchunk > 1 ? (Math.random()*this.topchunk|0) + this.minchunk : this.minchunk);
if (!chunksize) return;
this.size -= chunksize;
this.pending += chunksize;
crypto.randomBytes(chunksize, function(err, data) {
if (err) throw err;
var okmore = self.push(data);
self.pending -= chunksize;
if (!self.pending && !self.size) {
self.push(null);
} else if (okmore) {
self._read();
}
});
};
function benchRandomChunks(fun, name, hash, size, maxchunk, minchunk) {
measure('rndchunks', name, size, parben(parallel, parallel, function(next) {
var chunks = new RandomStream({maxchunk: maxchunk, minchunk: minchunk, size: size});
for(var i = 0; i < pipes; ++i) {
chunks.pipe(hash.createHash(fun, {encoding: 'hex'})).once('readable', cb);
}
function cb() { if (!--i) next(); }
}));
}
function createRandomFile(file, size, next) {
fs.stat(file, function(err, stat) {
var oldsize = err ? 0 : stat.size;
if (oldsize < size) {
new RandomStream({size: size - oldsize}).pipe(fs.createWriteStream(file, {start: oldsize})).on('finish', next);
} else {
next();
}
});
}
function benchBigfile(file, fun, name, hash, size) {
measure('bigfile', name, size, parben(parallel, parallel, function(next) {
var fstrm = fs.createReadStream(file, {encoding:null});
for(var i = 0; i < pipes; ++i) {
fstrm.pipe(hash.createHash(fun, {encoding: 'hex'})).once('readable', cb);
}
function cb() { if (!--i) next(); }
}));
}
function benchRandomStream(fun, name, hash, size) {
measure('bigchunks', name, size, parben(parallel, parallel, function(next) {
var rstrm = new RandomStream({size: size});
for(var i = 0; i < pipes; ++i) {
rstrm.pipe(hash.createHash(fun, {encoding: 'hex'})).once('readable', cb);
}
function cb() { if (!--i) next(); }
}));
}
queue(createRandomFile, 'tmp-bigone.tmp', +program.file*1024*1024, next);
funmatrix.forEach(function(row) {
queue(benchBigfile, 'tmp-bigone.tmp', row[0], row[1], row[2], program.file*1024*1024);
});
funmatrix.forEach(function(row) {
queue(benchRandomChunks, row[0], row[1], row[2], +program.random*1024*1024, +program.large, +program.small);
});
funmatrix.forEach(function(row) {
queue(benchRandomStream, row[0], row[1], row[2], +program.random*1024*1024);
});
next();
function measure(label, name, size, promise) {
promise.then(function(res) {
console.log(name + "(" + label + "[" + size + "]): single: %s avg: %s",
(size / res.single / 1000).toFixed(4) + 'MB/s',
(size / res.avg / 1000).toFixed(4) + 'MB/s');
next();
});
}
function queue() {
queued.push([].slice.call(arguments, 0));
}
function next() {
if (queued.length > 0) setImmediate.apply(null, queued.shift());
}

View File

@ -1,78 +0,0 @@
"use strict";
var Promise = require('bluebird');
var now = Date.now;
var ben = module.exports = function(times, cb) {
if ('function' === typeof times) {
cb = times;
times = 10000;
}
var t0 = now();
for(var i = times; i-- > 0; cb());
var elapsed = now() - t0;
return elapsed / times;
};
ben.sync = ben;
ben.calibrate = function(duration, cb) {
var times = 5;
var elapsed = 100;
do {
times = (elapsed > 0 ? (100/elapsed)*times+times : 2*times) >>> 0;
var t0 = now();
for(var i = times; i-- > 0; cb());
elapsed = now() - t0;
} while(elapsed < 100);
return Math.max(1, duration * times / elapsed) >>> 0;
};
var parben = ben.parallel = function(times, parallel, cb) {
return new Promise(function(resolve) {
if (times < parallel) parallel = times;
var pending = times>>>0;
var start = now();
var elapsed = 0;
function spawn() {
var t = now();
cb(function fn () {
var fin = now();
elapsed += fin - t;
if (--pending === 0) {
resolve({elapsed: elapsed, single: elapsed / times,
wall: (fin - start), avg: (fin - start) / times});
}
else if (pending >= parallel) {
t = now();
cb(fn);
}
});
}
for(var i = parallel; i-- > 0; spawn());
});
};
ben.parallel.calibrate = function(duration, parallel, cb) {
return new Promise(function(resolve) {
var times = 5*parallel;
var elapsed = 100;
var calib = function() {
times = elapsed > 0 ? (100/elapsed)*times+times : 2*times;
parben(times, parallel, cb).then(function(res) {
elapsed = res.wall;
if (elapsed < 100) calib();
else {
resolve(Math.max(1, duration * times / elapsed) >>> 0);
}
});
};
calib();
});
};

View File

@ -1,88 +0,0 @@
{
'targets': [
{
'target_name': 'murmurhash',
'sources': [
'src/murmurhash/MurmurHash2.cpp',
'src/murmurhash/PMurHash.cpp',
'src/murmurhash/PMurHash128.cpp',
'src/nodemurmurhash.cc'
],
'include_dirs': [
"<!(node -e \"require('nan')\")",
'src/murmurhash',
'src'
],
'defines': [
# 'NODE_MURMURHASH_TEST_BYTESWAP=1',
# 'NODE_MURMURHASH_TEST_ALIGNED=1',
'NODE_MURMURHASH_KEY_BUFFER_SIZE=1024'
],
'conditions': [
['target_arch!="x64"', {
'defines': [
'NODE_MURMURHASH_DEFAULT_32BIT',
]
}],
['OS=="win"', {
'msvs_settings': {
'VCCLCompilerTool': {
'ExceptionHandling': 1,
'AdditionalOptions': ['/EHsc'], # pre 1.0 node compiler complaining
'DisableSpecificWarnings': ['4506']
}
}
}]
]
},
{
'target_name': 'murmurhashincremental',
'sources': [
'src/murmurhash/PMurHash.cpp',
'src/murmurhash/PMurHash128.cpp',
'src/incremental/hasher.cc'
],
'include_dirs': [
"<!(node -e \"require('nan')\")",
'src/murmurhash',
'src/incremental',
'src'
],
'defines': [
# 'NODE_MURMURHASH_TEST_BYTESWAP=1',
# 'NODE_MURMURHASH_TEST_ALIGNED=1',
'NODE_MURMURHASH_KEY_BUFFER_SIZE=1024'
],
'conditions': [
['target_arch!="x64"', {
'defines': [
'NODE_MURMURHASH_DEFAULT_32BIT',
]
}],
['OS=="win"', {
'msvs_settings': {
'VCCLCompilerTool': {
'ExceptionHandling': 1,
'AdditionalOptions': ['/EHsc'], # pre 1.0 node compiler complaining
'DisableSpecificWarnings': ['4506']
}
}
}]
]
},
{
"target_name": "action_after_build",
"type": "none",
"dependencies": [ "murmurhash", "murmurhashincremental" ],
"copies": [
{
"files": [
"<(PRODUCT_DIR)/murmurhash.node",
"<(PRODUCT_DIR)/murmurhashincremental.node"
],
"destination": "<(module_path)"
}
]
}
]
}

View File

@ -1,17 +0,0 @@
#!/bin/sh
mkdir -p src/murmurhash
cd src/murmurhash
for file in \
MurmurHash2.cpp \
MurmurHash2.h \
MurmurHash3.cpp \
MurmurHash3.h \
PMurHash.c \
PMurHash.h
do
if [ ! -e "$file" ]
then curl "https://raw.githubusercontent.com/aappleby/smhasher/master/src/$file" -O -f
else echo "$file already exists, skipping"
fi
done

View File

@ -1,251 +0,0 @@
/**
* `murmurhash-native/incremental` module.
*
* Example:
*
* ```ts
* import { MurmurHash128x64 } from "murmurhash-native/incremental"
*
* let hasher = new MurmurHash128x64(42)
* hasher.update("hash ")
* hasher.update("me!")
* console.log(hasher.digest("hex"))
* ```
*
* This module hosts native implementations of incremental murmur hashes.
*
* @module incremental
*/
/***/
import { Encoding, OutputType } from "./index";
export { Encoding, OutputType };
/** An endianness type for the murmur hash incremental utilities */
export type Endianness = "BE"|"LE"|"platform";
/** A common interface to all of the murmur hash incremental utilities. */
export interface IMurHasher {
/** Size in bytes of the serialized hasher. */
readonly SERIAL_BYTE_LENGTH: number;
/**
* Copies the internal state onto the target utility.
*
* This method does not alter target endianness.
*
* @param target a different instance of a MurmurHash utility of the same type.
* @returns target.
*/
copy(target: IMurHasher): IMurHasher;
/**
* Generates the murmur hash of all of the data provided so far.
*
* The order of bytes written to a Buffer or encoded string depends on
* endianness property.
*
* @param output a Buffer object to write hash bytes to; the same object will be returned.
* @param offset start writing into the output at offset byte;
* negative offset starts from the end of the output buffer.
* @param length a number of bytes to write from calculated hash;
* negative length starts from the end of the hash;
* if absolute value of length is larger than the size of a calculated
* hash, bytes are written only up to the hash size.
* @returns murmur hash.
*/
digest(output: Buffer, offset?: number, length?: number): Buffer;
/**
* Generates the murmur hash of all of the data provided so far.
*
* If outputType is not provided a new Buffer instance is returned.
*
* The order of bytes written to a Buffer or encoded string depends on
* endianness property.
*
* @param outputType indicates the form and encoding of the returned hash.
* @returns murmur hash.
*/
digest(outputType?: OutputType): number|string|Buffer;
/**
* Serializes the internal state of the murmur hash utility instance
*
* The returned type depends on the implementation.
**/
toJSON(): any;
/**
* Serializes the internal state of the murmur hash utility instance
* into the provided Buffer.
*
* When output has not enough space for the serialized data
* at the given offset it throws an Error. You may consult the required
* byte length reading constant: SERIAL_BYTE_LENGTH
*
* @param output a Buffer to write serialized state to.
* @param offset offset at output.
*/
serialize(output: Buffer, offset?: number): Buffer;
/**
* Serializes the internal state of the murmur hash utility instance.
*
* The serial is generated as a base64 encoded string.
*/
serialize(): string;
/**
* Updates a internal state with the given data asynchronously.
*
* If data is a Buffer then encoding is being ignored.
*
* The hash will be updated asynchronously using libuv worker queue.
*
* @param data a chunk of data to calculate hash from.
* @param encoding of the data provided as a string.
* @param callback will be called when asynchronous operation completes.
*/
update(data: string|Buffer, encoding: Encoding, callback: (err: Error) => void): void;
/**
* Updates a internal state with the given data asynchronously.
*
* If the data is a string, an encoding of "utf8" is assumed.
*
* The hash will be updated asynchronously using libuv worker queue.
*
* @param data a chunk of data to calculate hash from.
* @param callback will be called when asynchronous operation completes.
*/
update(data: string|Buffer, callback: (err: Error) => void): void;
/**
* Updates a internal state with the given data.
*
* If the data is a string and encoding is not explicitly provided,
* an encoding of "utf8" is being assumed.
*
* @param data a chunk of data to calculate hash from.
* @param encoding of the data provided as a string.
*/
update(data: string|Buffer, encoding?: Encoding): this;
/** Digest byte order. */
endianness: Endianness;
/**
* True if asynchronous update is in progress.
*
* When this property is true, trying to update, calculate digest, serialize or copy state will result in
* an error thrown from the related method.
*/
readonly isBusy: boolean;
/** The total (modulo 2^32) bytes of data provided so far. */
readonly total: number;
}
/** A factory interface for murmurhash incremental utility */
export interface IMurHasherConstructor {
/** Size in bytes of the serialized hasher. */
readonly SERIAL_BYTE_LENGTH: number;
/**
* Creates MurmurHash utility.
*
* The default seed is 0 and the endianness is set to "BE".
*/
new(): IMurHasher;
/**
* Creates MurmurHash utility.
*
* If not provided, the endianness is set to "BE".
*
* @param seed initial murmur hash seed as an unsigned 32-bit integer.
* @param endianness digest byte order: "BE", "LE" or "platform", optional. Default is "BE".
*/
new(seed: number, endianness?: Endianness): IMurHasher;
/**
* Creates MurmurHash utility.
*
* The initial state is taken from the serialized state. Throws an error if serial is incorrect.
*
* @param serial serialized state of the same MurmurHash type.
* @param endianness digest byte order: "BE", "LE" or "platform", optional. Default is "BE".
*/
new(serial: string|Buffer, endianness?: Endianness): IMurHasher;
/**
* Creates MurmurHash utility.
*
* The initial state is taken from another instance of murmur hash utility.
* Throws an error if incompatible hash is provided.
*
* @param hash an instance of another MurmurHash.
* @param endianness digest byte order: "BE", "LE" or "platform", optional. Default is hash.endianness.
*/
new(hash: IMurHasher, endianness?: Endianness): IMurHasher;
}
/** @hidden An abstract base class for the murmurhash incremental utility. */
declare abstract class IMurHasherBase implements IMurHasher {
/** Size in bytes of the serialized hasher. */
static readonly SERIAL_BYTE_LENGTH: number;
readonly SERIAL_BYTE_LENGTH: number;
/**
* Creates MurmurHash utility.
*
* The default seed is 0 and the endianness is set to "BE".
*/
constructor();
/**
* Creates MurmurHash utility.
*
* If not provided, the endianness is set to "BE".
*
* @param seed initial murmur hash seed as an unsigned 32-bit integer.
* @param endianness digest byte order: "BE", "LE" or "platform", optional. Default is "BE".
*/
constructor(seed: number, endianness?: Endianness);
/**
* Creates MurmurHash utility.
*
* The initial state is taken from the serialized state. Throws an error if serial is incorrect.
*
* @param serial serialized state of the same MurmurHash type.
* @param endianness digest byte order: "BE", "LE" or "platform", optional. Default is "BE".
*/
constructor(serial: string|Buffer, endianness?: Endianness);
/**
* Creates MurmurHash utility.
*
* The initial state is taken from another instance of murmur hash utility.
* Throws an error if incompatible hash is provided.
*
* @param hash an instance of another MurmurHash.
* @param endianness digest byte order: "BE", "LE" or "platform", optional. Default is hash.endianness.
*/
constructor(hash: IMurHasher, endianness?: Endianness);
copy(target: IMurHasher): IMurHasher;
digest(output: Buffer, offset?: number, length?: number): Buffer;
digest(outputType?: OutputType): number|string|Buffer;
toJSON(): string;
serialize(output: Buffer, offset?: number): Buffer;
serialize(): string;
update(data: string|Buffer, encoding: Encoding, callback: (err: Error) => void): void;
update(data: string|Buffer, callback: (err: Error) => void): void;
update(data: string|Buffer, encoding?: Encoding): this;
endianness: Endianness;
readonly isBusy: boolean;
readonly total: number;
}
/** A murmurhash32 implementation of the murmur hash incremental utility */
export class MurmurHash extends IMurHasherBase {}
/** A murmurhash128 (os arch) implementation of the murmur hash incremental utility */
export class MurmurHash128 extends IMurHasherBase {
digest(output: Buffer, offset?: number, length?: number): Buffer;
digest(outputType?: OutputType): string|Buffer;
}
/** A murmurhash128x64 implementation of the murmur hash incremental utility */
export class MurmurHash128x64 extends IMurHasherBase {
digest(output: Buffer, offset?: number, length?: number): Buffer;
digest(outputType?: OutputType): string|Buffer;
}
/** A murmurhash128x86 implementation of the murmur hash incremental utility */
export class MurmurHash128x86 extends IMurHasherBase {
digest(output: Buffer, offset?: number, length?: number): Buffer;
digest(outputType?: OutputType): string|Buffer;
}

View File

@ -1,7 +0,0 @@
/* expose pure incremental api */
"use strict";
var binary = require('node-pre-gyp');
var path = require('path');
var binding_path = binary.find(path.resolve(path.join(__dirname,'./package.json')));
module.exports = require(path.join(path.dirname(binding_path), 'murmurhashincremental.node'));

View File

@ -1,150 +0,0 @@
/**
* `murmurhash-native` module.
*
* Example:
*
* ```ts
* import { murmurHash128x64 } from "murmurhash-native"
*
* console.log(murmurHash128x64("hash me!", 42))
* ```
*
* @module murmurhash-native
*/
/**
* Indicates the form and encoding of the resulting hash and can be one of:
*
* - "base64" - base64 string
* - "binary" - binary string
* - "buffer" - a new Buffer object
* - "hex" - hexadecimal string
* - "number" - for 32-bit murmur hash an unsigned 32-bit integer, other hashes - hexadecimal string
*/
export type OutputType = "base64"|"binary"|"buffer"|"hex"|"number";
/**
* The expected encoding of the provided data as a string.
*/
export type Encoding = "ascii"|"base64"|"binary"|"hex"|"ucs-2"|"ucs2"|"utf-16le"|"utf-8"|"utf16le"|"utf8";
/** An interface for murmurhash functions. */
export interface MurmurHashFn {
(data: string|Buffer): number|string;
(data: string|Buffer, callback: (err: Error, res: number|string) => void): void;
(data: string|Buffer, output: Buffer, offset?: number, length?: number): Buffer;
(data: string|Buffer, output: Buffer, callback: (err: Error, res: Buffer) => void): void;
(data: string|Buffer, output: Buffer, offset: number, callback: (err: Error, res: Buffer) => void): void;
(data: string|Buffer, output: Buffer, offset: number, length: number, callback: (err: Error, res: Buffer) => void): void;
(data: string, encOrOutType: Encoding|OutputType, seed?: number): number|string|Buffer;
(data: string, encOrOutType: Encoding|OutputType, callback: (err: Error, res: number|string|Buffer) => void): void;
(data: string, encOrOutType: Encoding|OutputType, seed: number, callback: (err: Error, res: number|string|Buffer) => void): void;
(data: Buffer, outputType: OutputType, seed?: number): number|string|Buffer;
(data: Buffer, outputType: OutputType, callback: (err: Error, res: number|string|Buffer) => void): void;
(data: Buffer, outputType: OutputType, seed: number, callback: (err: Error, res: number|string|Buffer) => void): void;
(data: string|Buffer, seed: number): number|string;
(data: string|Buffer, seed: number, callback: (err: Error, res: number|string) => void): void;
(data: string|Buffer, seed: number, output: Buffer, offset?: number, length?: number): Buffer;
(data: string|Buffer, seed: number, output: Buffer, callback: (err: Error, res: Buffer) => void): void;
(data: string|Buffer, seed: number, output: Buffer, offset: number, callback: (err: Error, res: Buffer) => void): void;
(data: string|Buffer, seed: number, output: Buffer, offset: number, length: number, callback: (err: Error, res: Buffer) => void): void;
(data: string|Buffer, seed: number, outputType: OutputType): number|string|Buffer;
(data: string|Buffer, seed: number, outputType: OutputType, callback: (err: Error, res: number|string|Buffer) => void): void;
(data: string|Buffer, encoding: Encoding, outputType: OutputType): number|string|Buffer;
(data: string|Buffer, encoding: Encoding, outputType: OutputType, callback: (err: Error, res: number|string|Buffer) => void): void;
(data: string, encoding: Encoding, output: Buffer, offset?: number, length?: number): Buffer;
(data: string, encoding: Encoding, output: Buffer, callback: (err: Error, res: Buffer) => void): void;
(data: string, encoding: Encoding, output: Buffer, offset: number, callback: (err: Error, res: Buffer) => void): void;
(data: string, encoding: Encoding, output: Buffer, offset: number, length: number, callback: (err: Error, res: Buffer) => void): void;
(data: string, encoding: Encoding, seed: number): number|string;
(data: string, encoding: Encoding, seed: number, callback: (err: Error, res: number|string) => void): void;
(data: string, encoding: Encoding, seed: number, output: Buffer, offset?: number, length?: number): Buffer;
(data: string, encoding: Encoding, seed: number, output: Buffer, callback: (err: Error, res: Buffer) => void): void;
(data: string, encoding: Encoding, seed: number, output: Buffer, offset: number, callback: (err: Error, res: Buffer) => void): void;
(data: string, encoding: Encoding, seed: number, output: Buffer, offset: number, length: number, callback: (err: Error, res: Buffer) => void): void;
(data: string, encoding: Encoding, seed: number, outputType: OutputType): number|string|Buffer;
(data: string, encoding: Encoding, seed: number, outputType: OutputType, callback: (err: Error, res: number|string|Buffer) => void): void;
}
/**
* An interface for murmurhash 32-bit functions.
*
* Functions of this type produce hashes as an unsigned 32-bit integers by default
* and for the "number" output type.
*/
export interface MurmurHashFnI extends MurmurHashFn {
(data: string|Buffer): number;
(data: string|Buffer, callback: (err: Error, res: number) => void): void;
(data: string|Buffer, seed: number): number;
(data: string|Buffer, seed: number, callback: (err: Error, res: number) => void): void;
(data: string, encoding: Encoding, seed: number): number;
(data: string, encoding: Encoding, seed: number, callback: (err: Error, res: number) => void): void;
}
/**
* An interface for murmurhash 64/128-bit functions.
*
* Functions of this type produce hashes as a hexadecimal string by default
* and for the "number" output type.
*/
export interface MurmurHashFnH extends MurmurHashFn {
(data: string|Buffer): string;
(data: string|Buffer, callback: (err: Error, res: string) => void): void;
(data: string, encOrOutType: Encoding|OutputType, seed?: number): string|Buffer;
(data: string, encOrOutType: Encoding|OutputType, callback: (err: Error, res: string|Buffer) => void): void;
(data: string, encOrOutType: Encoding|OutputType, seed: number, callback: (err: Error, res: string|Buffer) => void): void;
(data: Buffer, outputType: OutputType, seed?: number): string|Buffer;
(data: Buffer, outputType: OutputType, callback: (err: Error, res: string|Buffer) => void): void;
(data: Buffer, outputType: OutputType, seed: number, callback: (err: Error, res: string|Buffer) => void): void;
(data: string|Buffer, seed: number): string;
(data: string|Buffer, seed: number, callback: (err: Error, res: string) => void): void;
(data: string|Buffer, seed: number, outputType: OutputType): string|Buffer;
(data: string|Buffer, seed: number, outputType: OutputType, callback: (err: Error, res: string|Buffer) => void): void;
(data: string|Buffer, encoding: Encoding, outputType: OutputType): string|Buffer;
(data: string|Buffer, encoding: Encoding, outputType: OutputType, callback: (err: Error, res: string|Buffer) => void): void;
(data: string, encoding: Encoding, seed: number): string;
(data: string, encoding: Encoding, seed: number, callback: (err: Error, res: string) => void): void;
(data: string, encoding: Encoding, seed: number, outputType: OutputType): string|Buffer;
(data: string, encoding: Encoding, seed: number, outputType: OutputType, callback: (err: Error, res: string|Buffer) => void): void;
}
export const murmurHash: MurmurHashFnI;
export const murmurHash32: MurmurHashFnI;
export const murmurHash64: MurmurHashFnH;
export const murmurHash64x64: MurmurHashFnH;
export const murmurHash64x86: MurmurHashFnH;
export const murmurHash128: MurmurHashFnH;
export const murmurHash128x64: MurmurHashFnH;
export const murmurHash128x86: MurmurHashFnH;
export namespace BE {
export const murmurHash: MurmurHashFnI;
export const murmurHash32: MurmurHashFnI;
export const murmurHash64: MurmurHashFnH;
export const murmurHash64x64: MurmurHashFnH;
export const murmurHash64x86: MurmurHashFnH;
export const murmurHash128: MurmurHashFnH;
export const murmurHash128x64: MurmurHashFnH;
export const murmurHash128x86: MurmurHashFnH;
}
export namespace LE {
export const murmurHash: MurmurHashFnI;
export const murmurHash32: MurmurHashFnI;
export const murmurHash64: MurmurHashFnH;
export const murmurHash64x64: MurmurHashFnH;
export const murmurHash64x86: MurmurHashFnH;
export const murmurHash128: MurmurHashFnH;
export const murmurHash128x64: MurmurHashFnH;
export const murmurHash128x86: MurmurHashFnH;
}
export namespace platform {
export const murmurHash: MurmurHashFnI;
export const murmurHash32: MurmurHashFnI;
export const murmurHash64: MurmurHashFnH;
export const murmurHash64x64: MurmurHashFnH;
export const murmurHash64x86: MurmurHashFnH;
export const murmurHash128: MurmurHashFnH;
export const murmurHash128x64: MurmurHashFnH;
export const murmurHash128x86: MurmurHashFnH;
}

View File

@ -1,6 +0,0 @@
"use strict";
var binary = require('node-pre-gyp');
var path = require('path');
var binding_path = binary.find(path.resolve(path.join(__dirname,'./package.json')));
module.exports = require(binding_path);

View File

@ -1,42 +0,0 @@
/* from nodejs lib/internal/streams/lazy_transform.js */
// LazyTransform is a special type of Transform stream that is lazily loaded.
// This is used for performance with bi-API-ship: when two APIs are available
// for the stream, one conventional and one non-conventional.
'use strict';
var stream = require('stream');
var util = require('util');
module.exports = LazyTransform;
function LazyTransform(options) {
this._options = options;
}
util.inherits(LazyTransform, stream.Transform);
[
'_readableState',
'_writableState',
'_transformState'
].forEach(function(prop, i, props) {
Object.defineProperty(LazyTransform.prototype, prop, {
get: function() {
stream.Transform.call(this, this._options);
this._writableState.decodeStrings = false;
this._writableState.defaultEncoding = 'binary';
return this[prop];
},
set: function(val) {
Object.defineProperty(this, prop, {
value: val,
enumerable: true,
configurable: true,
writable: true
});
},
configurable: true,
enumerable: true
});
});

View File

@ -1,64 +0,0 @@
{
"name": "murmurhash-native",
"description": "MurmurHash (32,64,128)bit native bindings for nodejs",
"keywords": [
"murmurhash",
"murmurhash3",
"murmurhash128",
"murmurhash32",
"murmurhash2",
"murmurhash64",
"progressive hash",
"PMurHash",
"PMurHash128",
"hash"
],
"version": "3.5.0",
"license": "MIT",
"author": "Rafał Michalski <royal@yeondir.com>",
"contributors": [],
"homepage": "http://github.com/royaltm/node-murmurhash-native",
"repository": {
"type": "git",
"url": "git+https://github.com/royaltm/node-murmurhash-native.git"
},
"scripts": {
"install": "node-pre-gyp install --fallback-to-build",
"test": "tap -C -Rclassic ./test/test*.js ./test/types/test*.js",
"test-ts": "tap -C -Rclassic ./test/types/test*.js",
"test-gc": "tap -Rspec --node-arg=--expose-gc ./test/persistent/test.gc.*.js",
"bench": "sh bench/bench.all.sh",
"rebuild": "node-pre-gyp rebuild",
"tsc": "tsc -p test/types",
"typedoc": "typedoc --tsconfig test/types/tsconfig.json index.d.ts promisify.d.ts incremental.d.ts stream.d.ts"
},
"binary": {
"module_name": "murmurhash",
"module_path": "./lib/{configuration}/",
"host": "https://github.com/royaltm/node-murmurhash-native/releases/download/",
"remote_path": "v{version}"
},
"main": "index.js",
"types": "index.d.ts",
"typeScriptVersion": "2.0",
"engines": {
"node": ">=6"
},
"dependencies": {
"nan": "^2.14.1",
"node-pre-gyp": "^0.17.0"
},
"devDependencies": {
"@types/bluebird": "^3.5.30",
"@types/node": "^13.13.2",
"bluebird": "^3.7.2",
"commander": "^5.0.0",
"murmurhash3js": "^3.0.1",
"node-pre-gyp-github": "1.4.3",
"tap": "^14.10.7",
"typedoc": "^0.17.4",
"typedoc-plugin-external-module-name": "^3.0.0",
"typedoc-plugin-sourcefile-url": "^1.0.6",
"typescript": "^3.8.3"
}
}

View File

@ -1,77 +0,0 @@
/**
* `murmurhash-native/promisify` module.
*
* Example:
*
* ```ts
* import * as promisify from "murmurhash-native/promisify"
*
* const { murmurHash128x64Async } = promisify()
* murmurHash128x64Async("hash me!", 42).then(hash => console.log(hash))
* ```
*
* @module promisify
*/
/***/
import { Encoding, OutputType } from "./index"
declare namespace promisify {
/** An interface for promisified murmurhash functions */
export interface MurmurHashFnAsync {
(data: string|Buffer): PromiseLike<number|string>;
(data: string|Buffer, output: Buffer, offset?: number, length?: number): PromiseLike<Buffer>;
(data: string, encOrOutType: Encoding|OutputType, seed?: number): PromiseLike<number|string|Buffer>;
(data: Buffer, outputType: OutputType, seed?: number): PromiseLike<number|string|Buffer>;
(data: string|Buffer, seed: number): PromiseLike<number|string>;
(data: string|Buffer, seed: number, output: Buffer, offset?: number, length?: number): PromiseLike<Buffer>;
(data: string|Buffer, seed: number, outputType: OutputType): PromiseLike<number|string|Buffer>;
(data: string|Buffer, encoding: Encoding, outputType: OutputType): PromiseLike<number|string|Buffer>;
(data: string, encoding: Encoding, output: Buffer, offset?: number, length?: number): PromiseLike<Buffer>;
(data: string, encoding: Encoding, seed: number): PromiseLike<number|string>;
(data: string, encoding: Encoding, seed: number, output: Buffer, offset?: number, length?: number): PromiseLike<Buffer>;
(data: string, encoding: Encoding, seed: number, outputType: OutputType): PromiseLike<number|string|Buffer>;
}
/** An interface for promisified murmurhash 32-bit functions */
export interface MurmurHashFnAsyncI extends MurmurHashFnAsync {
(data: string|Buffer): PromiseLike<number>;
(data: string|Buffer, seed: number): PromiseLike<number>;
(data: string, encoding: Encoding, seed: number): PromiseLike<number>;
}
/** An interface for promisified murmurhash 64/128-bit functions */
export interface MurmurHashFnAsyncH extends MurmurHashFnAsync {
(data: string|Buffer): PromiseLike<string>;
(data: string, encOrOutType: Encoding|OutputType, seed?: number): PromiseLike<string|Buffer>;
(data: Buffer, outputType: OutputType, seed?: number): PromiseLike<string|Buffer>;
(data: string|Buffer, seed: number): PromiseLike<string>;
(data: string|Buffer, seed: number, outputType: OutputType): PromiseLike<string|Buffer>;
(data: string|Buffer, encoding: Encoding, outputType: OutputType): PromiseLike<string|Buffer>;
(data: string, encoding: Encoding, seed: number): PromiseLike<string>;
(data: string, encoding: Encoding, seed: number, outputType: OutputType): PromiseLike<string|Buffer>;
}
export interface MurmurHashAsyncNs {
readonly murmurHashAsync: MurmurHashFnAsyncI;
readonly murmurHash32Async: MurmurHashFnAsyncI;
readonly murmurHash64x64Async: MurmurHashFnAsyncH;
readonly murmurHash64x86Async: MurmurHashFnAsyncH;
readonly murmurHash128x64Async: MurmurHashFnAsyncH;
readonly murmurHash128x86Async: MurmurHashFnAsyncH;
readonly murmurHash64Async: MurmurHashFnAsyncH;
readonly murmurHash128Async: MurmurHashFnAsyncH;
}
export interface MurmurHashAsyncNsRoot extends MurmurHashAsyncNs {
readonly BE: MurmurHashAsyncNs;
readonly LE: MurmurHashAsyncNs;
readonly platform: MurmurHashAsyncNs;
}
}
/**
* Returns all promisified murmur hash functions in their corresponding namespaces.
*
* @param promise optional Promise constructor
*/
declare function promisify(promise?: PromiseConstructorLike): promisify.MurmurHashAsyncNsRoot;
export = promisify;

View File

@ -1,53 +0,0 @@
// jshint multistr:true, evil:true
"use strict";
var murmurhash = require('./index');
module.exports = function(promise) {
if (!promise) {
promise = global.Promise;
}
if ('function' !== typeof promise) {
throw new Error("Promise constructor required");
}
var promisify = function(object) {
var value, hash = {};
for(var name in object) {
if (object.hasOwnProperty(name)) {
if (!!(value = object[name])) {
switch(typeof value) {
case 'function':
hash[name + 'Async'] = wrap(promise, value);
break;
case 'object':
hash[name] = promisify(value);
break;
}
}
}
}
return hash;
};
return promisify(murmurhash);
};
function wrap(promise, fn) {
return new Function('Promise', 'slice', 'fn',
'return function ' + fn.name + 'Async() { \
var cb, args = slice.call(arguments); \
var promise = new Promise(function(resolve, reject) { \
cb = function(err, res) { \
if (err) reject(err); else { \
resolve(res); \
} \
}; \
}); \
args.push(cb); \
fn.apply(this, args); \
return promise; \
}')(promise, Array.prototype.slice, fn);
}

View File

@ -1,35 +0,0 @@
#if !defined(ASYNCWORKER_HEADER)
#define ASYNCWORKER_HEADER
namespace MurmurHash {
using v8::Local;
using v8::Value;
using v8::String;
using v8::Uint32;
template<MurmurHashFunctionType HashFunction, typename HashValueType, int32_t HashLength, ByteOrderType OutputByteOrder>
class MurmurHashWorker : public Nan::AsyncWorker {
public:
NAN_INLINE MurmurHashWorker(Nan::Callback *callback);
NAN_INLINE MurmurHashWorker(Nan::Callback *callback, const OutputType outputType, const uint32_t seed,
Local<Value> key, const enum Nan::Encoding encoding, const bool validEncoding);
NAN_INLINE void SaveOutputBuffer(const Local<Value> &buffer, int32_t offset, int32_t length);
void Execute();
void HandleOKCallback();
void HandleErrorCallback();
private:
InputData data_;
OutputType outputType_;
uint32_t seed_;
int32_t offset_;
int32_t length_;
HashValueType hash_[HashLength];
char dataBuffer[NODE_MURMURHASH_KEY_BUFFER_SIZE];
};
}
#include "asyncworker_impl.h"
#endif

View File

@ -1,123 +0,0 @@
#if !defined(ASYNCWORKER_HEADER)
# error 'asyncworker_impl.h' is not supposed to be included directly. Include 'asyncworker.h' instead.
#endif
namespace MurmurHash {
namespace {
enum { kInputBufferIndex, kOutputBufferIndex };
}
template<MurmurHashFunctionType HashFunction, typename HashValueType, int32_t HashLength, ByteOrderType OutputByteOrder>
NAN_INLINE MurmurHashWorker<HashFunction,HashValueType,HashLength,OutputByteOrder>
::MurmurHashWorker(
Nan::Callback *callback)
: Nan::AsyncWorker(callback), data_(dataBuffer), outputType_(UnknownOutputType), seed_(0) {}
template<MurmurHashFunctionType HashFunction, typename HashValueType, int32_t HashLength, ByteOrderType OutputByteOrder>
NAN_INLINE MurmurHashWorker<HashFunction,HashValueType,HashLength,OutputByteOrder>
::MurmurHashWorker(
Nan::Callback *callback, const OutputType outputType, const uint32_t seed,
Local<Value> key, const enum Nan::Encoding encoding, const bool validEncoding)
: Nan::AsyncWorker(callback), data_(dataBuffer), outputType_(outputType), seed_(seed)
// offset_(0), length_(0)
{
data_.Setup(key, encoding, validEncoding);
if (data_.IsFromBuffer())
SaveToPersistent(kInputBufferIndex, key);
}
template<MurmurHashFunctionType HashFunction, typename HashValueType, int32_t HashLength, ByteOrderType OutputByteOrder>
NAN_INLINE void MurmurHashWorker<HashFunction,HashValueType,HashLength,OutputByteOrder>
::SaveOutputBuffer(
const Local<Value> &buffer, int32_t offset, int32_t length)
{
SaveToPersistent(kOutputBufferIndex, buffer);
offset_ = offset;
length_ = length;
}
template<MurmurHashFunctionType HashFunction, typename HashValueType, int32_t HashLength, ByteOrderType OutputByteOrder>
void MurmurHashWorker<HashFunction,HashValueType,HashLength,OutputByteOrder>
::Execute()
{
if ( ! data_.IsValid() )
return SetErrorMessage(data_.Error());
switch(outputType_) {
case DefaultOutputType:
case NumberOutputType:
case HexStringOutputType:
case BinaryStringOutputType:
case Base64StringOutputType:
case BufferOutputType:
case ProvidedBufferOutputType:
HashFunction( (const void *) *data_, (int) data_.length(), seed_, (void *)hash_ );
break;
default:
SetErrorMessage("Unknown output type: should be \"number\", \"buffer\", \"binary\", \"base64\" or \"hex\"");
}
}
template<MurmurHashFunctionType HashFunction, typename HashValueType, int32_t HashLength, ByteOrderType OutputByteOrder>
void MurmurHashWorker<HashFunction,HashValueType,HashLength,OutputByteOrder>
::HandleOKCallback()
{
Nan::HandleScope scope;
Local<Value> argv[2] = { Nan::Null() };
switch(outputType_) {
case DefaultOutputType:
case NumberOutputType:
if (HashSize == sizeof(uint32_t)) {
argv[1] = Nan::New<Uint32>( (uint32_t) hash_[0] );
} else {
argv[1] = HashToEncodedString<OutputByteOrder, HashLength>( hash_, Nan::HEX );
}
break;
case HexStringOutputType:
argv[1] = HashToEncodedString<OutputByteOrder, HashLength>( hash_, Nan::HEX );
break;
case BinaryStringOutputType:
argv[1] = HashToEncodedString<OutputByteOrder, HashLength>( hash_, Nan::BINARY );
break;
case Base64StringOutputType:
argv[1] = HashToEncodedString<OutputByteOrder, HashLength>( hash_, Nan::BASE64 );
break;
case BufferOutputType:
argv[1] = Nan::NewBuffer( HashSize ).ToLocalChecked();
WriteHashBytes<OutputByteOrder, HashLength>(hash_, (uint8_t *) node::Buffer::Data(argv[1]));
break;
case ProvidedBufferOutputType:
argv[1] = GetFromPersistent(kOutputBufferIndex);
WriteHashToBuffer<OutputByteOrder, HashLength>(
hash_,
node::Buffer::Data(argv[1]),
(int32_t) node::Buffer::Length(argv[1]),
offset_,
length_);
break;
default:
void(0);
}
callback->Call(2, argv, async_resource);
}
template<MurmurHashFunctionType HashFunction, typename HashValueType, int32_t HashLength, ByteOrderType OutputByteOrder>
void MurmurHashWorker<HashFunction,HashValueType,HashLength,OutputByteOrder>
::HandleErrorCallback() {
Nan::HandleScope scope;
Local<Value> argv[] = {
v8::Exception::TypeError(Nan::New<String>(ErrorMessage()).ToLocalChecked())
};
callback->Call(1, argv, async_resource);
}
}

View File

@ -1,29 +0,0 @@
#if !defined(ASYNCUPDATE_HEADER)
#define ASYNCUPDATE_HEADER
namespace MurmurHash {
using v8::Local;
using v8::Value;
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
class IncrementalHashUpdater : public Nan::AsyncWorker {
public:
typedef IncrementalHasher<H,HashValueType,HashLength> IncrementalHasher_T;
NAN_INLINE IncrementalHashUpdater(Nan::Callback *callback,
IncrementalHasher_T* hasher,
Local<Value> key, const enum Nan::Encoding encoding);
void Execute();
void HandleOKCallback();
void HandleErrorCallback();
private:
InputData data_;
IncrementalHasher_T *hasher_;
};
}
#include "asyncupdate_impl.h"
#endif

View File

@ -1,56 +0,0 @@
#if !defined(ASYNCUPDATE_HEADER)
# error 'asyncupdate_impl.h' is not supposed to be included directly. Include 'asyncupdate.h' instead.
#endif
namespace MurmurHash {
namespace {
enum { kInputBufferIndex };
}
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_INLINE IncrementalHashUpdater<H,HashValueType,HashLength>
::IncrementalHashUpdater(
Nan::Callback *callback,
IncrementalHasher_T* hasher,
Local<Value> key, const enum Nan::Encoding encoding)
: Nan::AsyncWorker(callback), data_(hasher->dataBuffer), hasher_(hasher)
{
data_.Setup(key, encoding);
if (data_.IsFromBuffer())
SaveToPersistent(kInputBufferIndex, key);
}
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
void IncrementalHashUpdater<H,HashValueType,HashLength>
::Execute()
{
if ( ! data_.IsValid() )
return SetErrorMessage(data_.Error());
hasher_->Update( (const void *) *data_, (int32_t) data_.length() );
}
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
void IncrementalHashUpdater<H,HashValueType,HashLength>
::HandleOKCallback()
{
hasher_->AsyncUpdateComplete();
Nan::AsyncWorker::HandleOKCallback();
}
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
void IncrementalHashUpdater<H,HashValueType,HashLength>
::HandleErrorCallback()
{
Nan::HandleScope scope;
hasher_->AsyncUpdateComplete();
Local<Value> argv[] = {
v8::Exception::TypeError(Nan::New<String>(ErrorMessage()).ToLocalChecked())
};
callback->Call(1, argv, async_resource);
}
}

View File

@ -1,653 +0,0 @@
#include "static_assert.h"
#include "hasher.h"
#include "murmurhashutils.h"
#include "inputdata.h"
#include "hasher_impl.h"
#include "asyncupdate.h"
namespace MurmurHash {
using v8::Object;
using v8::Int32;
using v8::Uint32;
using v8::Function;
using v8::ObjectTemplate;
using v8::PropertyAttribute;
using v8::ReadOnly;
using v8::DontDelete;
using v8::DontEnum;
using Nan::MaybeLocal;
#define SINGLE_ARG(...) __VA_ARGS__
namespace {
static const char * const MESSAGE_ERROR_PENDING_UPDATE = "Asynchronous update still in progress";
}
/**
* @class
*
* Create MurmurHash utility
*
* new MurmurHash([seed|hash|serial][, endianness="BE"])
*
* @param {number} seed - initial murmur hash seed as 32 bit integer
* @param {MurmurHash} hash - an instance of another MurmurHash of the same type
* @param {string|Buffer} serial - serialized state of the same MurmurHash type
* @param {string} endianness - digest byte order: "BE", "LE" or "platform"
**/
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_METHOD(SINGLE_ARG(IncrementalHasher<H,HashValueType,HashLength>::New))
{
IncrementalHasher_T *self;
const int argc = std::min(2, info.Length());
if ( info.IsConstructCall() ) {
if ( argc > 0 ) {
if ( info[0]->IsNumber() ) {
// seed
uint32_t seed = Nan::To<uint32_t>(info[0]).FromMaybe(0U);
self = new IncrementalHasher_T(seed);
} else if ( Nan::New(constructor)->HasInstance( info[0] ) ) {
// hasher instance
IncrementalHasher_T *other = ObjectWrap::Unwrap<IncrementalHasher_T>( info[0].As<Object>() );
if ( other->CheckAsyncUpdateInProgress() ) return;
self = new IncrementalHasher_T(*other);
} else if ( info[0]->IsString() ) {
// serial string
uint8_t serial[kHashSerialSize];
if ( kHashSerialStringSize == info[0].As<String>()->Length() ) {
Nan::DecodeWrite( (char *) serial, sizeof(serial), info[0], Nan::BASE64 );
} else {
return Nan::ThrowTypeError("Incorrect size of the serialized string");
}
if ( IsSerialTypeValid( serial ) ) {
self = new IncrementalHasher_T(serial);
} else {
return Nan::ThrowTypeError("Incorrect serialized string");
}
} else if ( node::Buffer::HasInstance( info[0] ) ) {
// serial buffer
if ( kHashSerialSize <= static_cast<int32_t>(node::Buffer::Length( info[0] )) ) {
uint8_t *serial = (uint8_t *) node::Buffer::Data( info[0] );
if ( IsSerialTypeValid( serial ) ) {
self = new IncrementalHasher_T(serial);
} else {
return Nan::ThrowTypeError("Incorrect serialized data");
}
} else {
return Nan::ThrowTypeError("Incorrect size of the serialized data");
}
} else if ( info[0]->IsUndefined() || info[0]->IsNull() ) {
self = new IncrementalHasher_T();
} else {
return Nan::ThrowTypeError("Expected a seed number, MurmurHash instance or serialized state");
}
if ( argc > 1 && !info[1]->IsUndefined() && !info[1]->IsNull() ) {
if ( ! self->SetEndiannessFrom( info[1] ) ) {
delete self;
return;
}
}
} else {
self = new IncrementalHasher_T();
}
self->Wrap( info.This() );
info.GetReturnValue().Set( info.This() );
} else {
Local<Value> argv[2];
for (int i = argc; i-- > 0; argv[i] = info[i]);
Local<Function> cons = Nan::GetFunction(Nan::New(constructor)).ToLocalChecked();
MaybeLocal<Object> mayinst = Nan::NewInstance(cons, argc, &argv[0]);
if ( ! mayinst.IsEmpty() ) {
info.GetReturnValue().Set( mayinst.ToLocalChecked() );
}
}
}
/**
* Copy the internal state onto the target utility instance.
*
* copy(target)
*
* This method does not alter target endianness.
*
* @param {MurmurHash} target - a different instance of MurmurHash utility
* of the same type
* @return {MurmurHash} target
**/
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_METHOD(SINGLE_ARG(IncrementalHasher<H,HashValueType,HashLength>::Copy))
{
IncrementalHasher_T *self = ObjectWrap::Unwrap<IncrementalHasher_T>( info.Holder() );
if ( self->CheckAsyncUpdateInProgress() ) return;
if ( info.Length() > 0 && Nan::New(constructor)->HasInstance( info[0] ) ) {
IncrementalHasher_T *other = ObjectWrap::Unwrap<IncrementalHasher_T>( info[0].As<Object>() );
if ( other == self ) return Nan::ThrowError("Target must not be the same instance");
if ( other->CheckAsyncUpdateInProgress() ) return;
*other = *self;
} else {
return Nan::ThrowTypeError("Target must be another instance of the same murmur hash type utility");
}
info.GetReturnValue().Set( info[0] );
}
/**
* Generate the murmur hash of all of the data provided so far
*
* digest([output_type])
* digest(output, offset, length)
*
* output_type indicates the form and encoding of the returned hash and can be one of:
*
* - 'number' - for 32-bit murmur hash an unsigned 32-bit integer,
* other hashes - hexadecimal string
* - 'hex' - hexadecimal string
* - 'base64' - base64 string
* - 'binary' - binary string
* - 'buffer' - a new Buffer object
*
* If neither output nor known output_type is provided a new Buffer instance
* is returned.
*
* The order of bytes written to a Buffer or encoded string depends on
* `endianness` property.
*
* @param {string} [output_type]
* @param {Buffer} output - a Buffer object to write hash bytes to;
* the same object will be returned
* @param {number} offset - start writing into output at offset byte;
* negative offset starts from the end of the output buffer
* @param {number} length - a number of bytes to write from calculated hash;
* negative length starts from the end of the hash;
* if absolute value of length is larger than the size of a calculated
* hash, bytes are written only up to the hash size
* @return {Buffer|string|number} murmur hash
**/
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_METHOD(SINGLE_ARG(IncrementalHasher<H,HashValueType,HashLength>::Digest))
{
IncrementalHasher_T *self = ObjectWrap::Unwrap<IncrementalHasher_T>( info.Holder() );
if ( self->CheckAsyncUpdateInProgress() ) return;
OutputType outputType( DefaultOutputType );
const int argc = info.Length();
if ( argc > 0 ) {
if ( info[0]->IsString() ) {
InputData::ReadEncodingString( info[0].As<String>() );
outputType = InputData::DetermineOutputType();
} else if ( node::Buffer::HasInstance( info[0] ) ) {
outputType = ProvidedBufferOutputType;
}
}
Local<Value> result;
HashValueType hash[HashLength];
self->Digest( hash );
if (self->outputByteOrder == MSBFirst) {
Output<MSBFirst>(hash, outputType, argc, info, result);
} else {
Output<LSBFirst>(hash, outputType, argc, info, result);
}
info.GetReturnValue().Set( result );
}
/**
* Serialize the internal state of the murmur hash utility instance
*
* serialize([output[, offset]])
*
* If the output buffer is not provided the serial is generated as a base64
* encoded string. When output has not enough space for the serialized data
* at the given offset it throws an Error. You may consult the required
* byte length reading constant: MurmurHashClass.SERIAL_BYTE_LENGTH
*
* @param {Buffer} output - a buffer to write serialized state to
* @param {number} offset - offset at output
* @return {string|Buffer}
**/
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_METHOD(SINGLE_ARG(IncrementalHasher<H,HashValueType,HashLength>::Serialize))
{
IncrementalHasher_T *self = ObjectWrap::Unwrap<IncrementalHasher_T>( info.Holder() );
if ( self->CheckAsyncUpdateInProgress() ) return;
const int argc = info.Length();
Local<Value> result;
if ( argc > 0 && node::Buffer::HasInstance( info[0] ) ) {
int32_t length = static_cast<int32_t>(node::Buffer::Length( info[0] ));
int32_t offset = (argc > 1) ? Nan::To<int32_t>(info[1]).FromMaybe(0) : 0;
if (offset < 0) offset += length;
if (offset >= 0 && kHashSerialSize <= length - offset ) {
result = info[0];
uint8_t *serial = (uint8_t *) node::Buffer::Data( result ) + offset;
self->Serialize(serial);
} else {
return Nan::ThrowError("Serialized state does not fit in the provided buffer at the given offset");
}
} else {
uint8_t serial[kHashSerialSize];
self->Serialize(serial);
result = Nan::Encode((void *)serial, sizeof(serial), Nan::BASE64);
}
info.GetReturnValue().Set( result );
}
/**
* Update internal state with the given data
*
* update(data[, encoding][, callback])
*
* The encoding can be 'utf8', 'ascii', 'binary', 'ucs2', 'base64' or 'hex'.
* If encoding is not provided or is not known and the data is a string,
* an encoding of 'utf8' is enforced. If data is a Buffer then encoding is ignored.
*
* @param {string|Buffer} data
* @param {string} [encoding]
* @param {Function} callback - optional callback(err)
* if provided the hash will be updated asynchronously using libuv
* worker queue, the return value in this instance will be `undefined`
* @return {MurmurHash} this
**/
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_METHOD(SINGLE_ARG(IncrementalHasher<H,HashValueType,HashLength>::Update))
{
IncrementalHasher_T *self = ObjectWrap::Unwrap<IncrementalHasher_T>( info.Holder() );
InputData data;
int argc = info.Length();
if ( argc > 0 ) {
enum Nan::Encoding encoding = Nan::BUFFER;
int callbackIndex = -1;
if ( argc > 1 && info[argc - 1]->IsFunction() ) {
callbackIndex = --argc;
}
if ( info[0]->IsString() ) {
encoding = Nan::UTF8;
if ( argc > 1 && info[1]->IsString() ) {
InputData::ReadEncodingString( info[1].As<String>() );
(void) InputData::DetermineEncoding( encoding );
}
}
if ( callbackIndex > -1 ) {
Nan::Callback *callback = new Nan::Callback(
Local<Function>::Cast(info[callbackIndex]));
if ( self->AsyncUpdateBegin() ) {
Nan::AsyncQueueWorker(new IncrementalHashUpdater<H,HashValueType,HashLength>(
callback, self, info[0], encoding));
} else {
Local<Value> argv[1] = {
v8::Exception::Error(Nan::New<String>(MESSAGE_ERROR_PENDING_UPDATE).ToLocalChecked())
};
Nan::Call(*callback, 1, argv);
delete callback;
}
return;
} else if ( self->CheckAsyncUpdateInProgress() ) {
return;
} else {
data.Setup( info[0], encoding );
}
}
if ( ! data.IsValid() )
return Nan::ThrowTypeError(data.Error());
self->Update( (const void *) *data, (int32_t) data.length());
info.GetReturnValue().Set( self->handle() );
}
/**
* @property {string} endianness - digest byte order: "BE", "LE" or "platform"
**/
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_GETTER(SINGLE_ARG(IncrementalHasher<H,HashValueType,HashLength>::GetEndianness))
{
IncrementalHasher_T *self = ObjectWrap::Unwrap<IncrementalHasher_T>( info.Holder() );
info.GetReturnValue().Set(
Nan::New<String>(self->outputByteOrder == MSBFirst ? "BE" : "LE").ToLocalChecked()
);
}
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_SETTER(SINGLE_ARG(IncrementalHasher<H,HashValueType,HashLength>::SetEndianness))
{
IncrementalHasher_T *self = ObjectWrap::Unwrap<IncrementalHasher_T>( info.Holder() );
(void) self->SetEndiannessFrom( value );
}
/**
* @property {boolean} isBusy - is asynchronous update in progress
**/
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_GETTER(SINGLE_ARG(IncrementalHasher<H,HashValueType,HashLength>::GetIsBusy))
{
IncrementalHasher_T *self = ObjectWrap::Unwrap<IncrementalHasher_T>( info.Holder() );
info.GetReturnValue().Set( Nan::New( self->asyncInProgress ) );
}
/**
* @property {number} total - (read only) The total (modulo 2^32) bytes of data
* provided so far
**/
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_GETTER(SINGLE_ARG(IncrementalHasher<H,HashValueType,HashLength>::GetTotal))
{
IncrementalHasher_T *self = ObjectWrap::Unwrap<IncrementalHasher_T>( info.Holder() );
info.GetReturnValue().Set( Nan::New<Uint32>(self->total) );
}
#undef SINGLE_ARG
/*************************************************/
/******************* internal ********************/
/*************************************************/
/*---------------- constructors -----------------*/
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_INLINE IncrementalHasher<H,HashValueType,HashLength>
::IncrementalHasher(const uint32_t seed) :
hasher(seed), total(0), outputByteOrder(MSBFirst), asyncInProgress(false) {};
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_INLINE IncrementalHasher<H,HashValueType,HashLength>
::IncrementalHasher(const IncrementalHasher_T& other) : ObjectWrap(),
hasher(other.hasher), total(other.total), outputByteOrder(other.outputByteOrder), asyncInProgress(false) {};
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_INLINE IncrementalHasher<H,HashValueType,HashLength>
::IncrementalHasher(const uint8_t * const serial) :
hasher(serial), outputByteOrder(MSBFirst), asyncInProgress(false)
{
ReadHashBytesMSB<1>(&serial[kHashSerialTotalIndex], &total);
}
/*--------------- static methods ----------------*/
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_INLINE bool IncrementalHasher<H,HashValueType,HashLength>
::IsSerialTypeValid(uint8_t *serial)
{
// check state type
if (kHashSerialType == (serial[kHashSerialTypeIndex] & kHashSerialTypeMask)) {
// read checksum
checksum_t chksum = (checksum_t) serial[kHashSerialCkIndex];
for(int i = kHashSerialCkIndex;
++i < kHashSerialSize;
chksum = (chksum << 8) | serial[i]);
// build verify
const checksum_t verify = PMurHash32(serial, kHashSerialSize - kHashSerialCkSize, kHashSerialCkSeed);
STATIC_ASSERT(kHashSerialCkSize > 0 && kHashSerialCkSize <= sizeof(checksum_t),
"must have 1 <= kHashSerialCkSize <= sizeof(checksum_t)");
if (kHashSerialCkSize < sizeof(checksum_t)) {
chksum ^= (verify >> ((sizeof(checksum_t) - kHashSerialCkSize)*8));
}
// verify checksum
return chksum == (verify & kHashSerialCkMask);
}
return false;
}
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
template<ByteOrderType OutputByteOrder>
NAN_INLINE void IncrementalHasher<H,HashValueType,HashLength>
::Output(const HashValueType hash[HashLength], const OutputType &outputType,
const int &argc, Nan::NAN_METHOD_ARGS_TYPE info, Local<Value> &result)
{
switch(outputType) {
case HexStringOutputType:
result = HashToEncodedString<OutputByteOrder, HashLength>( hash, Nan::HEX );
break;
case BinaryStringOutputType:
result = HashToEncodedString<OutputByteOrder, HashLength>( hash, Nan::BINARY );
break;
case Base64StringOutputType:
result = HashToEncodedString<OutputByteOrder, HashLength>( hash, Nan::BASE64 );
break;
case NumberOutputType:
if (HashSize == sizeof(uint32_t)) {
result = Nan::New<Uint32>( (uint32_t) (*hash) );
} else {
result = HashToEncodedString<OutputByteOrder, HashLength>( hash, Nan::HEX );
}
break;
case ProvidedBufferOutputType:
result = info[0];
WriteHashToBuffer<OutputByteOrder, HashLength>(
hash,
node::Buffer::Data(result),
(int32_t) node::Buffer::Length(result),
(argc > 1) ? Nan::To<int32_t>(info[1]).FromMaybe(0) : 0,
(argc > 2)
? Nan::To<int32_t>(info[2]).FromMaybe(HashSize)
: HashSize);
break;
default:
result = Nan::NewBuffer( HashSize ).ToLocalChecked();
WriteHashBytes<OutputByteOrder, HashLength>(hash, (uint8_t *) node::Buffer::Data(result));
break;
}
}
/*-------------- instance methods ---------------*/
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_INLINE void IncrementalHasher<H,HashValueType,HashLength>
::AsyncUpdateComplete()
{
Unref();
asyncInProgress = false;
}
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_INLINE bool IncrementalHasher<H,HashValueType,HashLength>
::AsyncUpdateBegin()
{
if (asyncInProgress) return false;
asyncInProgress = true;
Ref();
return true;
}
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_INLINE bool IncrementalHasher<H,HashValueType,HashLength>
::CheckAsyncUpdateInProgress() const
{
if (asyncInProgress) {
Nan::ThrowError(MESSAGE_ERROR_PENDING_UPDATE);
return true;
}
return false;
}
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_INLINE void IncrementalHasher<H,HashValueType,HashLength>
::Digest(HashValueType *hash) const
{
hasher.Digest( hash, (uint32_t) total );
}
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_INLINE void IncrementalHasher<H,HashValueType,HashLength>
::Serialize(uint8_t *serial) const
{
// write state
hasher.Serialize(serial);
// write state type
serial[kHashSerialTypeIndex] |= kHashSerialType;
// write total
WriteHashBytesMSB<1>(&total, &serial[kHashSerialTotalIndex]);
// build checksum
checksum_t chksum = PMurHash32(serial, kHashSerialSize - kHashSerialCkSize, kHashSerialCkSeed);
if (kHashSerialCkSize < sizeof(checksum_t)) {
chksum ^= (chksum >> ((sizeof(checksum_t) - kHashSerialCkSize)*8));
}
// write checksum
for(int i = kHashSerialCkIndex + kHashSerialCkSize ;; chksum >>=8) {
serial[--i] = (uint8_t) chksum & 0xFF;
if (i == kHashSerialCkIndex) break;
}
}
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
bool IncrementalHasher<H,HashValueType,HashLength>
::SetEndiannessFrom(const Local<Value> &value)
{
Nan::Utf8String strval( value );
if ( *strval != NULL ) {
if ( StringEqualLower(*strval, "be") ) {
outputByteOrder = MSBFirst;
return true;
} else if ( StringEqualLower(*strval, "le") ) {
outputByteOrder = LSBFirst;
return true;
} else if ( StringEqualLower(*strval, "platform") ) {
outputByteOrder = IsBigEndian() ? MSBFirst : LSBFirst;
return true;
}
}
Nan::ThrowTypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"");
return false;
}
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_INLINE void IncrementalHasher<H,HashValueType,HashLength>
::Update(const void *data, uint32_t length)
{
total += (total_t) length;
hasher.Update( data, length );
}
/*------------------ operators ------------------*/
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
NAN_INLINE void IncrementalHasher<H,HashValueType,HashLength>
::operator=(const IncrementalHasher_T& other)
{
hasher = other.hasher;
total = other.total;
}
/*-------------- static variables ---------------*/
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
Persistent<FunctionTemplate> IncrementalHasher<H,HashValueType,HashLength>::constructor;
/*------------------ node init ------------------*/
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
void IncrementalHasher<H,HashValueType,HashLength>
::Init(Nan::ADDON_REGISTER_FUNCTION_ARGS_TYPE& target, const char* name, const char *altname)
{
STATIC_ASSERT((HashSize & 0x1c) && (HashSize|(HashSize-1))+1 == HashSize<<1, "HashSize is not 4, 8 or 16");
Local<FunctionTemplate> tpl = Nan::New<FunctionTemplate>(New);
tpl->SetClassName( Nan::New<String>(name).ToLocalChecked() );
Local<ObjectTemplate> i_t = tpl->InstanceTemplate();
i_t->SetInternalFieldCount(1);
Nan::SetAccessor( i_t, Nan::New<String>("isBusy").ToLocalChecked(),
GetIsBusy, NULL, Local<Value>(), v8::DEFAULT,
static_cast<PropertyAttribute>(DontEnum | DontDelete));
Nan::SetAccessor( i_t, Nan::New<String>("total").ToLocalChecked(),
GetTotal, NULL, Local<Value>(), v8::DEFAULT,
static_cast<PropertyAttribute>(DontDelete));
Nan::SetAccessor( i_t, Nan::New<String>("endianness").ToLocalChecked(),
GetEndianness, SetEndianness, Local<Value>(), v8::DEFAULT,
static_cast<PropertyAttribute>(DontDelete));
Nan::SetTemplate(tpl, Nan::New<String>("SERIAL_BYTE_LENGTH").ToLocalChecked(),
Nan::New<Int32>(kHashSerialSize),
static_cast<PropertyAttribute>(ReadOnly | DontDelete) );
Nan::SetInstanceTemplate(tpl,
Nan::New<String>("SERIAL_BYTE_LENGTH").ToLocalChecked(),
Nan::New<Int32>(kHashSerialSize),
static_cast<PropertyAttribute>(ReadOnly | DontDelete) );
Nan::SetPrototypeMethod(tpl, "copy", Copy);
Nan::SetPrototypeMethod(tpl, "digest", Digest);
Nan::SetPrototypeMethod(tpl, "serialize", Serialize);
Nan::SetPrototypeMethod(tpl, "toJSON", Serialize);
Nan::SetPrototypeMethod(tpl, "update", Update);
Local<Value> fn = Nan::GetFunction(tpl).ToLocalChecked();
constructor.Reset( tpl );
Nan::Set(target, Nan::New<String>(name).ToLocalChecked(), fn);
if (altname != NULL) {
Nan::Set(target, Nan::New<String>(altname).ToLocalChecked(), fn);
}
}
NAN_MODULE_INIT(Init)
{
IncrementalHasher<IncrementalMurmurHash3A, uint32_t, 1>::Init(target, "MurmurHash");
#if defined(NODE_MURMURHASH_DEFAULT_32BIT)
IncrementalHasher<IncrementalMurmurHash128, uint64_t, 2>::Init(target, "MurmurHash128x64");
IncrementalHasher<IncrementalMurmurHash128, uint32_t, 4>::Init(target, "MurmurHash128x86", "MurmurHash128");
#else
IncrementalHasher<IncrementalMurmurHash128, uint64_t, 2>::Init(target, "MurmurHash128x64", "MurmurHash128");
IncrementalHasher<IncrementalMurmurHash128, uint32_t, 4>::Init(target, "MurmurHash128x86");
#endif
}
}
NODE_MODULE(murmurhashincremental, MurmurHash::Init)

View File

@ -1,98 +0,0 @@
#if !defined(INCREMENTAL_HASHER_HEADER)
#define INCREMENTAL_HASHER_HEADER
#include "nodemurmurhash.h"
namespace MurmurHash {
using v8::Local;
using v8::Value;
using v8::FunctionTemplate;
using Nan::Persistent;
using Nan::ObjectWrap;
NAN_MODULE_INIT(Init);
template<template <typename,int32_t>class H, typename HashValueType, int32_t HashLength>
class IncrementalHasher : public ObjectWrap {
public:
typedef IncrementalHasher<H,HashValueType,HashLength> IncrementalHasher_T;
typedef uint32_t total_t;
typedef uint32_t checksum_t;
static void Init(Nan::ADDON_REGISTER_FUNCTION_ARGS_TYPE& target,
const char* name, const char *altname = NULL);
static NAN_METHOD(New);
static NAN_METHOD(Copy);
static NAN_METHOD(Digest);
static NAN_METHOD(Serialize);
static NAN_METHOD(Update);
static NAN_GETTER(GetEndianness);
static NAN_SETTER(SetEndianness);
static NAN_GETTER(GetIsBusy);
static NAN_GETTER(GetTotal);
NAN_INLINE static bool IsSerialTypeValid(uint8_t *serial);
static Persistent<FunctionTemplate> constructor;
NAN_INLINE void AsyncUpdateComplete(void);
NAN_INLINE bool CheckAsyncUpdateInProgress(void) const;
NAN_INLINE void Digest(HashValueType *hash) const;
NAN_INLINE void Serialize(uint8_t *serial) const;
NAN_INLINE void Update(const void *data, uint32_t length);
char dataBuffer[NODE_MURMURHASH_KEY_BUFFER_SIZE];
private:
NAN_INLINE IncrementalHasher(const uint32_t seed = 0U);
NAN_INLINE IncrementalHasher(const uint8_t * const serial);
NAN_INLINE IncrementalHasher(const IncrementalHasher_T& other);
NAN_INLINE void operator=(const IncrementalHasher_T&);
template<ByteOrderType OutputByteOrder>
NAN_INLINE static void Output(const HashValueType hash[HashLength], const OutputType &outputType,
const int &argc, Nan::NAN_METHOD_ARGS_TYPE info, Local<Value> &result);
NAN_INLINE bool AsyncUpdateBegin(void);
bool SetEndiannessFrom(const Local<Value> &value);
H<HashValueType,HashLength> hasher;
total_t total;
ByteOrderType outputByteOrder;
bool asyncInProgress;
#define BASE64_ENCODED_SIZE(size) ((size + 2 - ((size + 2) % 3)) / 3 * 4)
static const int32_t kHashSerialTotalIndex = HashSize + HashSize;
static const int32_t kHashSerialCkSize = 3;
static const checksum_t kHashSerialCkSeed = 0xDEADBACA;
static const checksum_t kHashSerialCkMask = static_cast<checksum_t>((1LLU << (kHashSerialCkSize * 8)) - 1);
static const int32_t kHashSerialCkIndex = kHashSerialTotalIndex + sizeof(total_t);
static const int32_t kHashSerialSize = kHashSerialCkIndex + kHashSerialCkSize;
static const int32_t kHashSerialStringSize = BASE64_ENCODED_SIZE(kHashSerialSize);
/*
Serial data in network byte order
0: hstate[MSByte] ... hstate[LSByte]
+HashSize: carry[MSByte] ... carry[LSByte]
+HashSize: total[MSByte] ... total[LSByte]
+sizeof(total): chksum = ((ck[0] ^ chksum >>24)<<16) | ck[1]<<8 | ck[2]
*/
static const uint8_t kHashSerialType = static_cast<uint8_t>((0x0F ^ HashLength ^ sizeof(HashValueType)) << 4);
static const uint8_t kHashSerialTypeMask = static_cast<uint8_t>(0xF0 | (0x10 - HashSize));
static const int32_t kHashSerialTypeIndex = kHashSerialTotalIndex - 1;
/*
kHashSerialType kHashSerialTypeMask
MurmurHash3A 0b101000nn 15 ^ 1 ^ 4 = 0xA0 0xF0 | 0x10 - 4 = 0xFC
MurmurHash128x64 0b0101nnnn 15 ^ 2 ^ 8 = 0x50 0xF0 | 0x10 - 16 = 0xF0
MurmurHash128x86 0b1111nnnn 15 ^ 4 ^ 4 = 0xF0 0xF0 | 0x10 - 16 = 0xF0
MurmurHash64x64 0b01100nnn 15 ^ 1 ^ 8 = 0x60 0xF0 | 0x10 - 8 = 0xF8
MurmurHash64x86 0b10010nnn 15 ^ 2 ^ 4 = 0x90 0xF0 | 0x10 - 8 = 0xF8
*/
#undef BASE64_ENCODED_SIZE
};
}
#endif

View File

@ -1,68 +0,0 @@
#if !defined(INCREMENTAL_HASHER_IMPL_HEADER)
#define INCREMENTAL_HASHER_IMPL_HEADER
#include "PMurHash.h"
#include "PMurHash128.h"
namespace MurmurHash {
template<typename HashValueType, int32_t HashLength>
class IncrementalMurmurHash3A {
public:
NAN_INLINE IncrementalMurmurHash3A(const uint32_t seed = 0)
: hstate((HashValueType) seed), carry(0) {}
NAN_INLINE IncrementalMurmurHash3A(const uint8_t * const serial)
{
ReadHashBytesMSB<HashLength>( &serial[0], &hstate );
ReadHashBytesMSB<HashLength>( &serial[sizeof(hstate)], &carry );
}
NAN_INLINE void Serialize(uint8_t *serial) const
{
WriteHashBytesMSB<HashLength>( &hstate, &serial[0] );
WriteHashBytesMSB<HashLength>( &carry, &serial[sizeof(hstate)] );
}
NAN_INLINE void Update(const void *data, int32_t length)
{
PMurHash32_Process( &hstate, &carry, data, static_cast<int>(length) );
}
NAN_INLINE void Digest(HashValueType hash[HashLength], uint32_t total) const
{
*hash = PMurHash32_Result( hstate, carry, total );
}
private:
HashValueType hstate, carry;
};
template<typename HashValueType, int32_t HashLength>
class IncrementalMurmurHash128 {
public:
NAN_INLINE IncrementalMurmurHash128(const uint32_t seed = 0) : carry() {
HashValueType *p = hstate + HashLength;
do {
*--p = (HashValueType) seed;
} while (p > hstate);
}
NAN_INLINE IncrementalMurmurHash128(const uint8_t * const serial)
{
ReadHashBytesMSB<HashLength>( &serial[0], hstate );
ReadHashBytesMSB<HashLength>( &serial[sizeof(hstate)], carry );
}
NAN_INLINE void Serialize(uint8_t *serial) const
{
WriteHashBytesMSB<HashLength>( hstate, &serial[0] );
WriteHashBytesMSB<HashLength>( carry, &serial[sizeof(hstate)] );
}
NAN_INLINE void Update(const void *data, int32_t length)
{
PMurHash128_Process( hstate, carry, data, static_cast<int>(length) );
}
NAN_INLINE void Digest(HashValueType hash[HashLength], uint32_t total) const
{
PMurHash128_Result( hstate, carry, total, hash );
}
private:
HashValueType hstate[HashLength], carry[HashLength];
};
}
#endif

View File

@ -1,54 +0,0 @@
#if !defined(INPUTDATA_HEADER)
#define INPUTDATA_HEADER
#ifndef NODE_MURMURHASH_KEY_BUFFER_SIZE
# define NODE_MURMURHASH_KEY_BUFFER_SIZE 1024
#endif
namespace MurmurHash {
using v8::Local;
using v8::Value;
using v8::String;
class InputData {
public:
enum Type {
Static, Own, ExternalBuffer
};
NAN_INLINE InputData(char staticBuffer[NODE_MURMURHASH_KEY_BUFFER_SIZE] = keyBuffer);
NAN_INLINE void Setup(Local<Value> key, const enum Nan::Encoding encoding, const bool validEncoding = true);
NAN_INLINE bool IsValid() const;
NAN_INLINE bool IsFromBuffer() const;
NAN_INLINE const char * Error() const;
NAN_INLINE size_t length() const;
NAN_INLINE char* operator*();
NAN_INLINE const char* operator*() const;
NAN_INLINE ~InputData();
NAN_INLINE static void ReadEncodingString(const Local<String>& type);
static bool DetermineEncoding(enum Nan::Encoding& enc);
static OutputType DetermineOutputType();
private:
char *staticBufferPtr;
char *buffer;
size_t size;
Type type;
const char *error;
NAN_INLINE void reset(char *buf = NULL, size_t siz = 0, Type t = Static);
NAN_INLINE char *EnsureBuffer(size_t bytelength, Type& type);
static char keyBuffer[NODE_MURMURHASH_KEY_BUFFER_SIZE];
static char encCstr[sizeof("utf-16le")];
InputData(const InputData&);
void operator=(const InputData&);
};
}
#include "inputdata_impl.h"
#endif

View File

@ -1,153 +0,0 @@
#if !defined(INPUTDATA_HEADER)
# error 'inputdata_impl.h' is not supposed to be included directly. Include 'inputdata.h' instead.
#endif
namespace MurmurHash {
NAN_INLINE InputData::InputData(char *staticBuffer) : staticBufferPtr(staticBuffer),
buffer(NULL), size(0), type(Static), error("string or Buffer is required") {}
NAN_INLINE void InputData::Setup(Local<Value> key, const enum Nan::Encoding encoding, const bool validEncoding)
{
if ( !validEncoding ) {
error = "\"encoding\" must be a valid string encoding";
return;
}
if ( encoding == Nan::BUFFER) {
if ( node::Buffer::HasInstance(key) ) {
reset(
node::Buffer::Data(key),
node::Buffer::Length(key),
ExternalBuffer);
}
} else {
ssize_t maxLength = (encoding == Nan::UTF8)
? 3 * key.As<String>()->Length()
: Nan::DecodeBytes(key, encoding);
if ( maxLength != -1 ) {
Type type;
char *data = EnsureBuffer((size_t) maxLength, type);
reset(data, (size_t) Nan::DecodeWrite( data, maxLength, key, encoding ), type);
}
}
}
NAN_INLINE const char * InputData::Error(void) const
{
return error;
}
NAN_INLINE bool InputData::IsValid(void) const
{
return buffer != NULL;
}
NAN_INLINE bool InputData::IsFromBuffer(void) const
{
return type == ExternalBuffer;
}
NAN_INLINE size_t InputData::length() const { return size; }
NAN_INLINE void InputData::reset(char *buf, size_t siz, Type t)
{
if ( type == Own ) delete[] buffer;
if ( siz == 0 || buf == NULL ) {
size = 0;
type = t;
if ( t == ExternalBuffer ) {
buffer = staticBufferPtr;
} else {
buffer = buf;
}
} else {
buffer = buf;
size = siz;
type = t;
}
}
NAN_INLINE char* InputData::operator*() { return buffer; }
NAN_INLINE const char* InputData::operator*() const { return buffer; }
NAN_INLINE InputData::~InputData()
{
if ( type == Own ) delete[] buffer;
}
NAN_INLINE void InputData::ReadEncodingString(const Local<String>& type)
{
size_t length = type->Length();
if ( length < sizeof(encCstr) ) {
encCstr[Nan::DecodeWrite(encCstr, length, type)] = 0;
} else
encCstr[0] = 0;
}
bool InputData::DetermineEncoding(enum Nan::Encoding& enc)
{
if ( StringEqualLower(encCstr, "utf16le") ||
StringEqualLower(encCstr, "utf-16le") ) {
enc = Nan::UCS2;
return true;
} else if ( StringEqualLower(encCstr, "base64") ) {
enc = Nan::BASE64;
return true;
} else if ( StringEqualLower(encCstr, "binary") ) {
enc = Nan::BINARY;
return true;
} else if ( StringEqualLower(encCstr, "ascii") ) {
enc = Nan::ASCII;
return true;
} else if ( StringEqualLower(encCstr, "utf-8") ) {
enc = Nan::UTF8;
return true;
} else if ( StringEqualLower(encCstr, "ucs-2") ) {
enc = Nan::UCS2;
return true;
} else if ( StringEqualLower(encCstr, "utf8") ) {
enc = Nan::UTF8;
return true;
} else if ( StringEqualLower(encCstr, "ucs2") ) {
enc = Nan::UCS2;
return true;
} else if ( StringEqualLower(encCstr, "hex") ) {
enc = Nan::HEX;
return true;
}
return false;
}
OutputType InputData::DetermineOutputType()
{
if ( StringEqualLower(encCstr, "buffer") ) {
return BufferOutputType;
} else if ( StringEqualLower(encCstr, "number") ) {
return NumberOutputType;
} else if ( StringEqualLower(encCstr, "base64") ) {
return Base64StringOutputType;
} else if ( StringEqualLower(encCstr, "binary") ) {
return BinaryStringOutputType;
} else if ( StringEqualLower(encCstr, "hex") ) {
return HexStringOutputType;
}
return UnknownOutputType;
}
NAN_INLINE char *InputData::EnsureBuffer(size_t bytelength, Type& type)
{
if ( bytelength <= NODE_MURMURHASH_KEY_BUFFER_SIZE ) {
type = Static;
return staticBufferPtr;
} else {
type = Own;
return new char[bytelength];
}
}
char InputData::keyBuffer[NODE_MURMURHASH_KEY_BUFFER_SIZE];
char InputData::encCstr[sizeof("utf-16le")];
}

View File

@ -1,528 +0,0 @@
//-----------------------------------------------------------------------------
// MurmurHash2 was written by Austin Appleby, and is placed in the public
// domain. The author hereby disclaims copyright to this source code.
// Note - This code makes a few assumptions about how your machine behaves -
// 1. We can read a 4-byte value from any address without crashing
// 2. sizeof(int) == 4
// And it has a few limitations -
// 1. It will not work incrementally.
// 2. It will not produce the same results on little-endian and big-endian
// machines.
#include "MurmurHash2.h"
//-----------------------------------------------------------------------------
// Platform-specific functions and macros
// Microsoft Visual Studio
#if defined(_MSC_VER)
#define FORCE_INLINE __forceinline
#define BIG_CONSTANT(x) (x)
// Other compilers
#else // defined(_MSC_VER)
#define FORCE_INLINE inline __attribute__((always_inline))
#define BIG_CONSTANT(x) (x##LLU)
#endif // !defined(_MSC_VER)
#include "endianness.h"
//-----------------------------------------------------------------------------
uint32_t MurmurHash2 ( const void * key, int len, uint32_t seed )
{
// 'm' and 'r' are mixing constants generated offline.
// They're not really 'magic', they just happen to work well.
const uint32_t m = 0x5bd1e995;
const int r = 24;
// Initialize the hash to a 'random' value
uint32_t h = seed ^ len;
// Mix 4 bytes at a time into the hash
const unsigned char * data = (const unsigned char *)key;
while(len >= 4)
{
uint32_t k = *(uint32_t*)data;
k *= m;
k ^= k >> r;
k *= m;
h *= m;
h ^= k;
data += 4;
len -= 4;
}
// Handle the last few bytes of the input array
switch(len)
{
case 3: h ^= data[2] << 16;
case 2: h ^= data[1] << 8;
case 1: h ^= data[0];
h *= m;
};
// Do a few final mixes of the hash to ensure the last few
// bytes are well-incorporated.
h ^= h >> 13;
h *= m;
h ^= h >> 15;
return h;
}
//-----------------------------------------------------------------------------
// MurmurHash2, 64-bit versions, by Austin Appleby
// The same caveats as 32-bit MurmurHash2 apply here - beware of alignment
// and endian-ness issues if used across multiple platforms.
// 64-bit hash for 64-bit platforms
uint64_t MurmurHash64A ( const void * key, int len, uint64_t seed )
{
const uint64_t m = BIG_CONSTANT(0xc6a4a7935bd1e995);
const int r = 47;
uint64_t h = seed ^ (len * m);
const uint64_t * data = (const uint64_t *)key;
const uint64_t * end = data + (len/8);
while(data != end)
{
uint64_t k = getblock64(data++);
k *= m;
k ^= k >> r;
k *= m;
h ^= k;
h *= m;
}
const unsigned char * data2 = (const unsigned char*)data;
switch(len & 7)
{
case 7: h ^= uint64_t(data2[6]) << 48;
case 6: h ^= uint64_t(data2[5]) << 40;
case 5: h ^= uint64_t(data2[4]) << 32;
case 4: h ^= uint64_t(data2[3]) << 24;
case 3: h ^= uint64_t(data2[2]) << 16;
case 2: h ^= uint64_t(data2[1]) << 8;
case 1: h ^= uint64_t(data2[0]);
h *= m;
};
h ^= h >> r;
h *= m;
h ^= h >> r;
return h;
}
// 64-bit hash for 32-bit platforms
uint64_t MurmurHash64B ( const void * key, int len, uint64_t seed )
{
const uint32_t m = 0x5bd1e995;
const int r = 24;
uint32_t h1 = uint32_t(seed) ^ len;
uint32_t h2 = uint32_t(seed >> 32);
const uint32_t * data = (const uint32_t *)key;
while(len >= 8)
{
uint32_t k1 = getblock32(data++);
k1 *= m; k1 ^= k1 >> r; k1 *= m;
h1 *= m; h1 ^= k1;
len -= 4;
uint32_t k2 = getblock32(data++);
k2 *= m; k2 ^= k2 >> r; k2 *= m;
h2 *= m; h2 ^= k2;
len -= 4;
}
if(len >= 4)
{
uint32_t k1 = getblock32(data++);
k1 *= m; k1 ^= k1 >> r; k1 *= m;
h1 *= m; h1 ^= k1;
len -= 4;
}
switch(len)
{
case 3: h2 ^= ((unsigned char*)data)[2] << 16;
case 2: h2 ^= ((unsigned char*)data)[1] << 8;
case 1: h2 ^= ((unsigned char*)data)[0];
h2 *= m;
};
h1 ^= h2 >> 18; h1 *= m;
h2 ^= h1 >> 22; h2 *= m;
h1 ^= h2 >> 17; h1 *= m;
h2 ^= h1 >> 19; h2 *= m;
uint64_t h = h1;
h = (h << 32) | h2;
return h;
}
//-----------------------------------------------------------------------------
// MurmurHash2A, by Austin Appleby
// This is a variant of MurmurHash2 modified to use the Merkle-Damgard
// construction. Bulk speed should be identical to Murmur2, small-key speed
// will be 10%-20% slower due to the added overhead at the end of the hash.
// This variant fixes a minor issue where null keys were more likely to
// collide with each other than expected, and also makes the function
// more amenable to incremental implementations.
#define mmix(h,k) { k *= m; k ^= k >> r; k *= m; h *= m; h ^= k; }
uint32_t MurmurHash2A ( const void * key, int len, uint32_t seed )
{
const uint32_t m = 0x5bd1e995;
const int r = 24;
uint32_t l = len;
const unsigned char * data = (const unsigned char *)key;
uint32_t h = seed;
while(len >= 4)
{
uint32_t k = *(uint32_t*)data;
mmix(h,k);
data += 4;
len -= 4;
}
uint32_t t = 0;
switch(len)
{
case 3: t ^= data[2] << 16;
case 2: t ^= data[1] << 8;
case 1: t ^= data[0];
};
mmix(h,t);
mmix(h,l);
h ^= h >> 13;
h *= m;
h ^= h >> 15;
return h;
}
//-----------------------------------------------------------------------------
// CMurmurHash2A, by Austin Appleby
// This is a sample implementation of MurmurHash2A designed to work
// incrementally.
// Usage -
// CMurmurHash2A hasher
// hasher.Begin(seed);
// hasher.Add(data1,size1);
// hasher.Add(data2,size2);
// ...
// hasher.Add(dataN,sizeN);
// uint32_t hash = hasher.End()
class CMurmurHash2A
{
public:
void Begin ( uint32_t seed = 0 )
{
m_hash = seed;
m_tail = 0;
m_count = 0;
m_size = 0;
}
void Add ( const unsigned char * data, int len )
{
m_size += len;
MixTail(data,len);
while(len >= 4)
{
uint32_t k = *(uint32_t*)data;
mmix(m_hash,k);
data += 4;
len -= 4;
}
MixTail(data,len);
}
uint32_t End ( void )
{
mmix(m_hash,m_tail);
mmix(m_hash,m_size);
m_hash ^= m_hash >> 13;
m_hash *= m;
m_hash ^= m_hash >> 15;
return m_hash;
}
private:
static const uint32_t m = 0x5bd1e995;
static const int r = 24;
void MixTail ( const unsigned char * & data, int & len )
{
while( len && ((len<4) || m_count) )
{
m_tail |= (*data++) << (m_count * 8);
m_count++;
len--;
if(m_count == 4)
{
mmix(m_hash,m_tail);
m_tail = 0;
m_count = 0;
}
}
}
uint32_t m_hash;
uint32_t m_tail;
uint32_t m_count;
uint32_t m_size;
};
//-----------------------------------------------------------------------------
// MurmurHashNeutral2, by Austin Appleby
// Same as MurmurHash2, but endian- and alignment-neutral.
// Half the speed though, alas.
uint32_t MurmurHashNeutral2 ( const void * key, int len, uint32_t seed )
{
const uint32_t m = 0x5bd1e995;
const int r = 24;
uint32_t h = seed ^ len;
const unsigned char * data = (const unsigned char *)key;
while(len >= 4)
{
uint32_t k;
k = data[0];
k |= data[1] << 8;
k |= data[2] << 16;
k |= data[3] << 24;
k *= m;
k ^= k >> r;
k *= m;
h *= m;
h ^= k;
data += 4;
len -= 4;
}
switch(len)
{
case 3: h ^= data[2] << 16;
case 2: h ^= data[1] << 8;
case 1: h ^= data[0];
h *= m;
};
h ^= h >> 13;
h *= m;
h ^= h >> 15;
return h;
}
//-----------------------------------------------------------------------------
// MurmurHashAligned2, by Austin Appleby
// Same algorithm as MurmurHash2, but only does aligned reads - should be safer
// on certain platforms.
// Performance will be lower than MurmurHash2
#define MIX(h,k,m) { k *= m; k ^= k >> r; k *= m; h *= m; h ^= k; }
uint32_t MurmurHashAligned2 ( const void * key, int len, uint32_t seed )
{
const uint32_t m = 0x5bd1e995;
const int r = 24;
const unsigned char * data = (const unsigned char *)key;
uint32_t h = seed ^ len;
int align = (uint64_t)data & 3;
if(align && (len >= 4))
{
// Pre-load the temp registers
uint32_t t = 0, d = 0;
switch(align)
{
case 1: t |= data[2] << 16;
case 2: t |= data[1] << 8;
case 3: t |= data[0];
}
t <<= (8 * align);
data += 4-align;
len -= 4-align;
int sl = 8 * (4-align);
int sr = 8 * align;
// Mix
while(len >= 4)
{
d = *(uint32_t *)data;
t = (t >> sr) | (d << sl);
uint32_t k = t;
MIX(h,k,m);
t = d;
data += 4;
len -= 4;
}
// Handle leftover data in temp registers
d = 0;
if(len >= align)
{
switch(align)
{
case 3: d |= data[2] << 16;
case 2: d |= data[1] << 8;
case 1: d |= data[0];
}
uint32_t k = (t >> sr) | (d << sl);
MIX(h,k,m);
data += align;
len -= align;
//----------
// Handle tail bytes
switch(len)
{
case 3: h ^= data[2] << 16;
case 2: h ^= data[1] << 8;
case 1: h ^= data[0];
h *= m;
};
}
else
{
switch(len)
{
case 3: d |= data[2] << 16;
case 2: d |= data[1] << 8;
case 1: d |= data[0];
case 0: h ^= (t >> sr) | (d << sl);
h *= m;
}
}
h ^= h >> 13;
h *= m;
h ^= h >> 15;
return h;
}
else
{
while(len >= 4)
{
uint32_t k = *(uint32_t *)data;
MIX(h,k,m);
data += 4;
len -= 4;
}
//----------
// Handle tail bytes
switch(len)
{
case 3: h ^= data[2] << 16;
case 2: h ^= data[1] << 8;
case 1: h ^= data[0];
h *= m;
};
h ^= h >> 13;
h *= m;
h ^= h >> 15;
return h;
}
}
//-----------------------------------------------------------------------------

View File

@ -1,39 +0,0 @@
//-----------------------------------------------------------------------------
// MurmurHash2 was written by Austin Appleby, and is placed in the public
// domain. The author hereby disclaims copyright to this source code.
#ifndef _MURMURHASH2_H_
#define _MURMURHASH2_H_
//-----------------------------------------------------------------------------
// Platform-specific functions and macros
// Microsoft Visual Studio
#if defined(_MSC_VER) && (_MSC_VER < 1600)
typedef unsigned char uint8_t;
typedef unsigned int uint32_t;
typedef unsigned __int64 uint64_t;
// Other compilers
#else // defined(_MSC_VER)
#include <stdint.h>
#endif // !defined(_MSC_VER)
//-----------------------------------------------------------------------------
uint32_t MurmurHash2 ( const void * key, int len, uint32_t seed );
uint64_t MurmurHash64A ( const void * key, int len, uint64_t seed );
uint64_t MurmurHash64B ( const void * key, int len, uint64_t seed );
uint32_t MurmurHash2A ( const void * key, int len, uint32_t seed );
uint32_t MurmurHashNeutral2 ( const void * key, int len, uint32_t seed );
uint32_t MurmurHashAligned2 ( const void * key, int len, uint32_t seed );
//-----------------------------------------------------------------------------
#endif // _MURMURHASH2_H_

View File

@ -1,246 +0,0 @@
/*-----------------------------------------------------------------------------
* MurmurHash3 was written by Austin Appleby, and is placed in the public
* domain.
*
* This implementation was written by Shane Day, and is also public domain.
*
* This is a portable ANSI C implementation of MurmurHash3_x86_32 (Murmur3A)
* with support for progressive processing.
*/
/*-----------------------------------------------------------------------------
If you want to understand the MurmurHash algorithm you would be much better
off reading the original source. Just point your browser at:
http://code.google.com/p/smhasher/source/browse/trunk/MurmurHash3.cpp
What this version provides?
1. Progressive data feeding. Useful when the entire payload to be hashed
does not fit in memory or when the data is streamed through the application.
Also useful when hashing a number of strings with a common prefix. A partial
hash of a prefix string can be generated and reused for each suffix string.
How does it work?
We can only process entire 32 bit chunks of input, except for the very end
that may be shorter. So along with the partial hash we need to give back to
the caller a carry containing up to 3 bytes that we were unable to process.
This carry also needs to record the number of bytes the carry holds. I use
the low 2 bits as a count (0..3) and the carry bytes are shifted into the
high byte in stream order.
To handle endianess I simply use a macro that reads a uint32_t and define
that macro to be a direct read on little endian machines, a read and swap
on big endian machines, or a byte-by-byte read if the endianess is unknown.
-----------------------------------------------------------------------------*/
#include "PMurHash.h"
// /* MSVC warnings we choose to ignore */
// #if defined(_MSC_VER)
// #pragma warning(disable: 4127) /* conditional expression is constant */
// #endif
/*-----------------------------------------------------------------------------
* Endianess, misalignment capabilities and util macros
*
* The following 3 macros are defined in this section. The other macros defined
* are only needed to help derive these 3.
*
* READ_UINT32(x) Read a little endian unsigned 32-bit int
* UNALIGNED_SAFE Defined if READ_UINT32 works on non-word boundaries
* ROTL32(x,r) Rotate x left by r bits
*/
/* I386 or AMD64 */
#if defined(_M_I86) || defined(_M_IX86) || defined(_X86_) || defined(__i386__) || defined(__i386) || defined(i386) \
|| defined(_M_X64) || defined(__x86_64__) || defined(__x86_64) || defined(__amd64__) || defined(__amd64)
#define UNALIGNED_SAFE
#endif
/* I386 or AMD64 */
#if defined(_M_I86) || defined(_M_IX86) || defined(_X86_) || defined(__i386__) || defined(__i386) || defined(i386) \
|| defined(_M_X64) || defined(__x86_64__) || defined(__x86_64) || defined(__amd64__) || defined(__amd64)
#define UNALIGNED_SAFE
#endif
/* Find best way to ROTL */
#if defined(_MSC_VER)
#define FORCE_INLINE __forceinline
#include <stdlib.h> /* Microsoft put _rotl declaration in here */
#define ROTL32(x,y) _rotl(x,y)
#else
#define FORCE_INLINE inline __attribute__((always_inline))
/* gcc recognises this code and generates a rotate instruction for CPUs with one */
#define ROTL32(x,r) (((uint32_t)x << r) | ((uint32_t)x >> (32 - r)))
#endif
#include "endianness.h"
#define READ_UINT32(ptr) getblock32((uint32_t *)ptr)
/*-----------------------------------------------------------------------------
* Core murmurhash algorithm macros */
static const uint32_t kC1 = 0xcc9e2d51;
static const uint32_t kC2 = 0x1b873593;
/* This is the main processing body of the algorithm. It operates
* on each full 32-bits of input. */
FORCE_INLINE void doblock(uint32_t &h1, uint32_t &k1)
{
k1 *= kC1;
k1 = ROTL32(k1,15);
k1 *= kC2;
h1 ^= k1;
h1 = ROTL32(h1,13);
h1 = h1*5+0xe6546b64;
}
/* Append unaligned bytes to carry, forcing hash churn if we have 4 bytes */
/* cnt=bytes to process, h1=name of h1 var, c=carry, n=bytes in c, ptr/len=payload */
FORCE_INLINE void dobytes(int cnt, uint32_t &h1, uint32_t &c, int &n,
const uint8_t *&ptr, int &len)
{
while(cnt--) {
c = c>>8 | (uint32_t)*ptr++<<24;
n++; len--;
if(n==4) {
doblock(h1, c);
n = 0;
}
}
}
/*---------------------------------------------------------------------------*/
/* Main hashing function. Initialise carry to 0 and h1 to 0 or an initial seed
* if wanted. Both ph1 and pcarry are required arguments. */
void PMurHash32_Process(uint32_t *ph1, uint32_t *pcarry, const void *key, int len)
{
uint32_t h1 = *ph1;
uint32_t c = *pcarry;
const uint8_t *ptr = (uint8_t*)key;
const uint8_t *end;
/* Extract carry count from low 2 bits of c value */
int n = c & 3;
#if defined(UNALIGNED_SAFE) && NODE_MURMURHASH_TEST_ALIGNED != 1
/* This CPU handles unaligned word access */
// #pragma message ( "UNALIGNED_SAFE" )
/* Consume any carry bytes */
int i = (4-n) & 3;
if(i && i <= len) {
dobytes(i, h1, c, n, ptr, len);
}
/* Process 32-bit chunks */
end = ptr + (len & ~3);
for( ; ptr < end ; ptr+=4) {
uint32_t k1 = READ_UINT32(ptr);
doblock(h1, k1);
}
#else /*UNALIGNED_SAFE*/
/* This CPU does not handle unaligned word access */
// #pragma message ( "ALIGNED" )
/* Consume enough so that the next data byte is word aligned */
int i = -(intptr_t)(void *)ptr & 3;
if(i && i <= len) {
dobytes(i, h1, c, n, ptr, len);
}
/* We're now aligned. Process in aligned blocks. Specialise for each possible carry count */
end = ptr + (len & ~3);
switch(n) { /* how many bytes in c */
case 0: /* c=[----] w=[3210] b=[3210]=w c'=[----] */
for( ; ptr < end ; ptr+=4) {
uint32_t k1 = READ_UINT32(ptr);
doblock(h1, k1);
}
break;
case 1: /* c=[0---] w=[4321] b=[3210]=c>>24|w<<8 c'=[4---] */
for( ; ptr < end ; ptr+=4) {
uint32_t k1 = c>>24;
c = READ_UINT32(ptr);
k1 |= c<<8;
doblock(h1, k1);
}
break;
case 2: /* c=[10--] w=[5432] b=[3210]=c>>16|w<<16 c'=[54--] */
for( ; ptr < end ; ptr+=4) {
uint32_t k1 = c>>16;
c = READ_UINT32(ptr);
k1 |= c<<16;
doblock(h1, k1);
}
break;
case 3: /* c=[210-] w=[6543] b=[3210]=c>>8|w<<24 c'=[654-] */
for( ; ptr < end ; ptr+=4) {
uint32_t k1 = c>>8;
c = READ_UINT32(ptr);
k1 |= c<<24;
doblock(h1, k1);
}
}
#endif /*UNALIGNED_SAFE*/
/* Advance over whole 32-bit chunks, possibly leaving 1..3 bytes */
len -= len & ~3;
/* Append any remaining bytes into carry */
dobytes(len, h1, c, n, ptr, len);
/* Copy out new running hash and carry */
*ph1 = h1;
*pcarry = (c & ~0xff) | n;
}
/*---------------------------------------------------------------------------*/
/* Finalize a hash. To match the original Murmur3A the total_length must be provided */
uint32_t PMurHash32_Result(uint32_t h, uint32_t carry, uint32_t total_length)
{
uint32_t k1;
int n = carry & 3;
if(n) {
k1 = carry >> (4-n)*8;
k1 *= kC1; k1 = ROTL32(k1,15); k1 *= kC2; h ^= k1;
}
h ^= total_length;
/* fmix */
h ^= h >> 16;
h *= 0x85ebca6b;
h ^= h >> 13;
h *= 0xc2b2ae35;
h ^= h >> 16;
return h;
}
/*---------------------------------------------------------------------------*/
/* All in one go */
uint32_t PMurHash32(const void * key, int len, uint32_t seed)
{
uint32_t carry = 0;
PMurHash32_Process(&seed, &carry, key, len);
return PMurHash32_Result(seed, carry, (uint32_t) len);
}
/* MurmurHash3_x86_32 api */
void PMurHash32(const void * key, int len, uint32_t seed, void * out)
{
uint32_t carry = 0;
PMurHash32_Process(&seed, &carry, key, len);
*(uint32_t*)out = PMurHash32_Result(seed, carry, (uint32_t) len);
}

View File

@ -1,33 +0,0 @@
/*-----------------------------------------------------------------------------
* MurmurHash3 was written by Austin Appleby, and is placed in the public
* domain.
*
* This implementation was written by Shane Day, and is also public domain.
*
* This implementation was modified to match PMurHash128.cpp.
*/
/* ------------------------------------------------------------------------- */
// Microsoft Visual Studio
#if defined(_MSC_VER) && (_MSC_VER < 1600)
typedef unsigned char uint8_t;
typedef unsigned int uint32_t;
// Other compilers
#else // defined(_MSC_VER)
#include <stdint.h>
#endif // !defined(_MSC_VER)
/* ------------------------------------------------------------------------- */
/* Prototypes */
void PMurHash32_Process(uint32_t *ph1, uint32_t *pcarry, const void *key, int len);
uint32_t PMurHash32_Result(uint32_t h1, uint32_t carry, uint32_t total_length);
uint32_t PMurHash32(const void * key, int len, uint32_t seed);
void PMurHash32(const void * key, int len, uint32_t seed, void * out);

View File

@ -1,642 +0,0 @@
/*-----------------------------------------------------------------------------
* MurmurHash3 was written by Austin Appleby, and is placed in the public
* domain.
*
* This is a c++ implementation of MurmurHash3_128 with support for progressive
* processing based on PMurHash implementation written by Shane Day.
*/
/*-----------------------------------------------------------------------------
If you want to understand the MurmurHash algorithm you would be much better
off reading the original source. Just point your browser at:
http://code.google.com/p/smhasher/source/browse/trunk/MurmurHash3.cpp
What this version provides?
1. Progressive data feeding. Useful when the entire payload to be hashed
does not fit in memory or when the data is streamed through the application.
Also useful when hashing a number of strings with a common prefix. A partial
hash of a prefix string can be generated and reused for each suffix string.
How does it work?
We can only process entire 128 bit chunks of input, except for the very end
that may be shorter. So along with the partial hash we need to give back to
the caller a carry containing up to 15 bytes that we were unable to process.
This carry also needs to record the number of bytes the carry holds. I use
the low 4 bits as a count (0..15) and the carry bytes are shifted into the
high byte in stream order.
To handle endianess I simply use a macro that reads an uint and define
that macro to be a direct read on little endian machines, a read and swap
on big endian machines.
-----------------------------------------------------------------------------*/
#include "PMurHash128.h"
/*-----------------------------------------------------------------------------
* Endianess, misalignment capabilities and util macros
*
* The following 5 macros are defined in this section. The other macros defined
* are only needed to help derive these 5.
*
* READ_UINT32(x,i) Read a little endian unsigned 32-bit int at index
* READ_UINT64(x,i) Read a little endian unsigned 64-bit int at index
* UNALIGNED_SAFE Defined if READ_UINTXX works on non-word boundaries
* ROTL32(x,r) Rotate x left by r bits
* ROTL64(x,r) Rotate x left by r bits
* BIG_CONSTANT
* FORCE_INLINE
*/
/* I386 or AMD64 */
#if defined(_M_I86) || defined(_M_IX86) || defined(_X86_) || defined(__i386__) || defined(__i386) || defined(i386) \
|| defined(_M_X64) || defined(__x86_64__) || defined(__x86_64) || defined(__amd64__) || defined(__amd64)
#define UNALIGNED_SAFE
#endif
/* Find best way to ROTL */
#if defined(_MSC_VER)
#define FORCE_INLINE __forceinline
#include <stdlib.h> /* Microsoft put _rotl declaration in here */
#define ROTL32(x,y) _rotl(x,y)
#define ROTL64(x,y) _rotl64(x,y)
#define BIG_CONSTANT(x) (x)
#else
#define FORCE_INLINE inline __attribute__((always_inline))
/* gcc recognises this code and generates a rotate instruction for CPUs with one */
#define ROTL32(x,r) (((uint32_t)x << r) | ((uint32_t)x >> (32 - r)))
#define ROTL64(x,r) (((uint64_t)x << r) | ((uint64_t)x >> (64 - r)))
#define BIG_CONSTANT(x) (x##LLU)
#endif
#include "endianness.h"
#define READ_UINT64(ptr,i) getblock64((uint64_t *)ptr,i)
#define READ_UINT32(ptr,i) getblock32((uint32_t *)ptr,i)
//-----------------------------------------------------------------------------
// Finalization mix - force all bits of a hash block to avalanche
FORCE_INLINE uint32_t fmix32 ( uint32_t h )
{
h ^= h >> 16;
h *= 0x85ebca6b;
h ^= h >> 13;
h *= 0xc2b2ae35;
h ^= h >> 16;
return h;
}
//----------
FORCE_INLINE uint64_t fmix64 ( uint64_t k )
{
k ^= k >> 33;
k *= BIG_CONSTANT(0xff51afd7ed558ccd);
k ^= k >> 33;
k *= BIG_CONSTANT(0xc4ceb9fe1a85ec53);
k ^= k >> 33;
return k;
}
/*-----------------------------------------------------------------------------*
PMurHash128x86
*-----------------------------------------------------------------------------*/
/*-----------------------------------------------------------------------------
* Core murmurhash algorithm macros */
static const uint32_t kC1 = 0x239b961b;
static const uint32_t kC2 = 0xab0e9789;
static const uint32_t kC3 = 0x38b34ae5;
static const uint32_t kC4 = 0xa1e38b93;
/* This is the main processing body of the algorithm. It operates
* on each full 128-bits of input. */
FORCE_INLINE void doblock128x86(uint32_t &h1, uint32_t &h2, uint32_t &h3, uint32_t &h4,
uint32_t &k1, uint32_t &k2, uint32_t &k3, uint32_t &k4)
{
k1 *= kC1; k1 = ROTL32(k1,15); k1 *= kC2; h1 ^= k1;
h1 = ROTL32(h1,19); h1 += h2; h1 = h1*5+0x561ccd1b;
k2 *= kC2; k2 = ROTL32(k2,16); k2 *= kC3; h2 ^= k2;
h2 = ROTL32(h2,17); h2 += h3; h2 = h2*5+0x0bcaa747;
k3 *= kC3; k3 = ROTL32(k3,17); k3 *= kC4; h3 ^= k3;
h3 = ROTL32(h3,15); h3 += h4; h3 = h3*5+0x96cd1c35;
k4 *= kC4; k4 = ROTL32(k4,18); k4 *= kC1; h4 ^= k4;
h4 = ROTL32(h4,13); h4 += h1; h4 = h4*5+0x32ac3b17;
}
/* Append unaligned bytes to carry, forcing hash churn if we have 16 bytes */
/* cnt=bytes to process, h1-h4=hash k1-k4=carry, n=bytes in carry, ptr/len=payload */
FORCE_INLINE void dobytes128x86(int cnt, uint32_t &h1, uint32_t &h2, uint32_t &h3, uint32_t &h4,
uint32_t &k1, uint32_t &k2, uint32_t &k3, uint32_t &k4,
int &n, const uint8_t *&ptr, int &len)
{
for(;cnt--; len--) {
switch(n) {
case 0: case 1: case 2: case 3:
k1 = k1>>8 | (uint32_t)*ptr++<<24;
++n; break;
case 4: case 5: case 6: case 7:
k2 = k2>>8 | (uint32_t)*ptr++<<24;
++n; break;
case 8: case 9: case 10: case 11:
k3 = k3>>8 | (uint32_t)*ptr++<<24;
++n; break;
case 12: case 13: case 14:
k4 = k4>>8 | (uint32_t)*ptr++<<24;
++n; break;
case 15:
k4 = k4>>8 | (uint32_t)*ptr++<<24;
doblock128x86(h1, h2, h3, h4, k1, k2, k3, k4);
n = 0; break;
}
}
}
/* Finalize a hash. To match the original Murmur3_128x86 the total_length must be provided */
void PMurHash128_Result(const uint32_t *ph, const uint32_t *pcarry, uint32_t total_length, uint32_t *out)
{
uint32_t h1 = ph[0];
uint32_t h2 = ph[1];
uint32_t h3 = ph[2];
uint32_t h4 = ph[3];
uint32_t k1, k2, k3, k4 = pcarry[3];
int n = k4 & 15;
switch(n) {
case 1: case 2: case 3: case 4:
k1 = pcarry[0] >> (4-n)*8;
goto finrot_k1;
case 5: case 6: case 7: case 8:
k2 = pcarry[1] >> (8-n)*8;
goto finrot_k21;
case 9: case 10: case 11: case 12:
k3 = pcarry[2] >> (12-n)*8;
goto finrot_k321;
case 13: case 14: case 15:
k4 >>= (16-n)*8;
goto finrot_k4321;
default:
goto skiprot;
}
finrot_k4321:
k4 *= kC4; k4 = ROTL32(k4,18); k4 *= kC1; h4 ^= k4;
k3 = pcarry[2];
finrot_k321:
k3 *= kC3; k3 = ROTL32(k3,17); k3 *= kC4; h3 ^= k3;
k2 = pcarry[1];
finrot_k21:
k2 *= kC2; k2 = ROTL32(k2,16); k2 *= kC3; h2 ^= k2;
k1 = pcarry[0];
finrot_k1:
k1 *= kC1; k1 = ROTL32(k1,15); k1 *= kC2; h1 ^= k1;
skiprot:
//----------
// finalization
h1 ^= total_length; h2 ^= total_length;
h3 ^= total_length; h4 ^= total_length;
h1 += h2; h1 += h3; h1 += h4;
h2 += h1; h3 += h1; h4 += h1;
h1 = fmix32(h1);
h2 = fmix32(h2);
h3 = fmix32(h3);
h4 = fmix32(h4);
h1 += h2; h1 += h3; h1 += h4;
h2 += h1; h3 += h1; h4 += h1;
out[0] = h1;
out[1] = h2;
out[2] = h3;
out[3] = h4;
}
/*---------------------------------------------------------------------------*/
/* Main hashing function. Initialise carry[4] to {0,0,0,0} and h[4] to an initial {seed,seed,seed,seed}
* if wanted. Both ph and pcarry are required arguments. */
void PMurHash128_Process(uint32_t * const ph, uint32_t * const pcarry, const void * const key, int len)
{
uint32_t h1 = ph[0];
uint32_t h2 = ph[1];
uint32_t h3 = ph[2];
uint32_t h4 = ph[3];
uint32_t k1 = pcarry[0];
uint32_t k2 = pcarry[1];
uint32_t k3 = pcarry[2];
uint32_t k4 = pcarry[3];
const uint8_t *ptr = (uint8_t*)key;
const uint8_t *end;
/* Extract carry count from low 4 bits of c value */
int n = k4 & 15;
#if defined(UNALIGNED_SAFE) && NODE_MURMURHASH_TEST_ALIGNED != 1
/* This CPU handles unaligned word access */
// #pragma message ( "UNALIGNED_SAFE" )
/* Consume any carry bytes */
int i = (16-n) & 15;
if(i && i <= len) {
dobytes128x86(i, h1, h2, h3, h4, k1, k2, k3, k4, n, ptr, len);
}
/* Process 128-bit chunks */
end = ptr + (len & ~15);
for( ; ptr < end ; ptr+=16) {
k1 = READ_UINT32(ptr, 0);
k2 = READ_UINT32(ptr, 1);
k3 = READ_UINT32(ptr, 2);
k4 = READ_UINT32(ptr, 3);
doblock128x86(h1, h2, h3, h4, k1, k2, k3, k4);
}
#else /*UNALIGNED_SAFE*/
/* This CPU does not handle unaligned word access */
// #pragma message ( "ALIGNED" )
/* Consume enough so that the next data byte is word aligned */
int i = -(intptr_t)(void *)ptr & 3;
if(i && i <= len) {
dobytes128x86(i, h1, h2, h3, h4, k1, k2, k3, k4, n, ptr, len);
}
/* We're now aligned. Process in aligned blocks. Specialise for each possible carry count */
end = ptr + (len & ~15);
switch(n) { /* how many bytes in c */
case 0: /*
k1=[----] k2=[----] k2=[----] k4=[----] w=[3210 7654 ba98 fedc] b=[3210 7654 ba98 fedc] */
for( ; ptr < end ; ptr+=16) {
k1 = READ_UINT32(ptr, 0);
k2 = READ_UINT32(ptr, 1);
k3 = READ_UINT32(ptr, 2);
k4 = READ_UINT32(ptr, 3);
doblock128x86(h1, h2, h3, h4, k1, k2, k3, k4);
}
break;
case 1: case 2: case 3: /*
k1=[10--] k2=[----] k3=[----] k4=[----] w=[5432 9876 dcba hgfe] b=[3210 7654 ba98 fedc] k1'=[hg--] */
{
const int lshift = n*8, rshift = 32-lshift;
for( ; ptr < end ; ptr+=16) {
uint32_t c = k1>>rshift; // --10
k2 = READ_UINT32(ptr, 0); // 5432
c |= k2<<lshift; // 3210.
k1 = READ_UINT32(ptr, 1); // 9876
k2 = k1<<lshift | k2>>rshift; // 7654.
k4 = READ_UINT32(ptr, 2); // dcba
k3 = k4<<lshift | k1>>rshift; // ba98.
k1 = READ_UINT32(ptr, 3); // hgfe.
k4 = k1<<lshift | k4>>rshift; // fedc.
doblock128x86(h1, h2, h3, h4, c, k2, k3, k4);
}
}
break;
case 4: /*
k1=[3210] k2=[----] k3=[----] k4=[----] w=[7654 ba98 fedc jihg] b=[3210 7654 ba98 fedc] k1'=[jihg] */
for( ; ptr < end ; ptr+=16) {
k2 = READ_UINT32(ptr, 0);
k3 = READ_UINT32(ptr, 1);
k4 = READ_UINT32(ptr, 2);
doblock128x86(h1, h2, h3, h4, k1, k2, k3, k4);
k1 = READ_UINT32(ptr, 3);
}
break;
case 5: case 6: case 7: /*
k1=[3210] k2=[54--] k3=[----] k4=[----] w=[9876 dcba hgfe lkji] b=[3210 7654 ba98 fedc] k1'=[jihg] k2'=[lk--] */
{
const int lshift = n*8-32, rshift = 32-lshift;
for( ; ptr < end ; ptr+=16) {
uint32_t c = k2>>rshift; // --54
k3 = READ_UINT32(ptr, 0); // 9876
c |= k3<<lshift; // 7654.
k4 = READ_UINT32(ptr, 1); // dcba
k3 = k4<<lshift | k3>>rshift; // ba98.
k2 = READ_UINT32(ptr, 2); // hgfe
k4 = k2<<lshift | k4>>rshift; // fedc.
doblock128x86(h1, h2, h3, h4, k1, c, k3, k4);
k1 = k2>>rshift; // --hg
k2 = READ_UINT32(ptr, 3); // lkji.
k1 |= k2<<lshift; // jihg.
}
}
case 8: /*
k1=[3210] k2=[7654] k3=[----] k4=[----] w=[ba98 fedc jihg nmlk] b=[3210 7654 ba98 fedc] k1'=[jihg] k2'=[nmlk] */
for( ; ptr < end ; ptr+=16) {
k3 = READ_UINT32(ptr, 0);
k4 = READ_UINT32(ptr, 1);
doblock128x86(h1, h2, h3, h4, k1, k2, k3, k4);
k1 = READ_UINT32(ptr, 2);
k2 = READ_UINT32(ptr, 3);
}
break;
case 9: case 10: case 11: /*
k1=[3210] k2=[7654] k3=[98--] k4=[----] w=[dcba hgfe lkji ponm] b=[3210 7654 ba98 fedc] k1'=[jihg] k2'=[nmlk] k3'=[po--] */
{
const int lshift = n*8-64, rshift = 32-lshift;
for( ; ptr < end ; ptr+=16) {
uint32_t c = k3>>rshift; // --98
k4 = READ_UINT32(ptr, 0); // dcba
c |= k4<<lshift; // ba98.
k3 = READ_UINT32(ptr, 1); // hgfe
k4 = k3<<lshift | k4>>rshift; // fedc.
doblock128x86(h1, h2, h3, h4, k1, k2, c, k4);
k2 = READ_UINT32(ptr, 2); // lkji
k1 = k2<<lshift | k3>>rshift; // jihg.
k3 = READ_UINT32(ptr, 3); // ponm.
k2 = k3<<lshift | k2>>rshift; // nmlk.
}
}
case 12: /*
k1=[3210] k2=[7654] k3=[ba98] k4=[----] w=[fedc jihg nmlk rqpo] b=[3210 7654 ba98 fedc] k1'=[jihg] k2'=[nmlk] k3'=[rqpo] */
for( ; ptr < end ; ptr+=16) {
k4 = READ_UINT32(ptr, 0);
doblock128x86(h1, h2, h3, h4, k1, k2, k3, k4);
k1 = READ_UINT32(ptr, 1);
k2 = READ_UINT32(ptr, 2);
k3 = READ_UINT32(ptr, 3);
}
break;
default: /* 12 < n <= 15
k1=[3210] k2=[7654] k3=[ba98] k4=[dc--] w=[hgfe lkji ponm tsrq] b=[3210 7654 ba98 fedc] k1'=[jihg] k2'=[nmlk] k3'=[rqpo] k3'=[ts--] */
{
const int lshift = n*8-96, rshift = 32-lshift;
for( ; ptr < end ; ptr+=16) {
uint32_t c = k4>>rshift; // --dc
k4 = READ_UINT32(ptr, 0); // hgfe
c |= k4<<lshift; // fedc.
doblock128x86(h1, h2, h3, h4, k1, k2, k3, c);
k3 = READ_UINT32(ptr, 1); // lkji
k1 = k3<<lshift | k4>>rshift; // jihg.
c = READ_UINT32(ptr, 2); // ponm
k2 = c<<lshift | k3>>rshift; // nmlk.
k4 = READ_UINT32(ptr, 3); // tsrq.
k3 = k4<<lshift | c>>rshift; // rqpo.
}
}
}
#endif /*UNALIGNED_SAFE*/
/* Advance over whole 128-bit chunks, possibly leaving 1..15 bytes */
len -= len & ~15;
/* Append any remaining bytes into carry */
dobytes128x86(len, h1, h2, h3, h4, k1, k2, k3, k4, n, ptr, len);
/* Copy out new running hash and carry */
ph[0] = h1;
ph[1] = h2;
ph[2] = h3;
ph[3] = h4;
pcarry[0] = k1;
pcarry[1] = k2;
pcarry[2] = k3;
pcarry[3] = (k4 & ~0xff) | n;
}
/*---------------------------------------------------------------------------*/
/* All in one go */
/* MurmurHash3_x86_128 api */
void PMurHash128x86(const void * key, const int len, uint32_t seed, void * out)
{
uint32_t carry[4] = {0, 0, 0, 0};
uint32_t h[4] = {seed, seed, seed, seed};
PMurHash128_Process(h, carry, key, len);
PMurHash128_Result(h, carry, (uint32_t) len, (uint32_t *) out);
}
/*-----------------------------------------------------------------------------*
PMurHash128x64
*-----------------------------------------------------------------------------*/
/*-----------------------------------------------------------------------------
* Core murmurhash algorithm macros */
static const uint64_t kC1L = BIG_CONSTANT(0x87c37b91114253d5);
static const uint64_t kC2L = BIG_CONSTANT(0x4cf5ad432745937f);
/* This is the main processing body of the algorithm. It operates
* on each full 128-bits of input. */
FORCE_INLINE void doblock128x64(uint64_t &h1, uint64_t &h2, uint64_t &k1, uint64_t &k2)
{
k1 *= kC1L; k1 = ROTL64(k1,31); k1 *= kC2L; h1 ^= k1;
h1 = ROTL64(h1,27); h1 += h2; h1 = h1*5+0x52dce729;
k2 *= kC2L; k2 = ROTL64(k2,33); k2 *= kC1L; h2 ^= k2;
h2 = ROTL64(h2,31); h2 += h1; h2 = h2*5+0x38495ab5;
}
/* Append unaligned bytes to carry, forcing hash churn if we have 16 bytes */
/* cnt=bytes to process, h1,h2=hash k1,k2=carry, n=bytes in carry, ptr/len=payload */
FORCE_INLINE void dobytes128x64(int cnt, uint64_t &h1, uint64_t &h2, uint64_t &k1, uint64_t &k2,
int &n, const uint8_t *&ptr, int &len)
{
for(;cnt--; len--) {
switch(n) {
case 0: case 1: case 2: case 3:
case 4: case 5: case 6: case 7:
k1 = k1>>8 | (uint64_t)*ptr++<<56;
n++; break;
case 8: case 9: case 10: case 11:
case 12: case 13: case 14:
k2 = k2>>8 | (uint64_t)*ptr++<<56;
n++; break;
case 15:
k2 = k2>>8 | (uint64_t)*ptr++<<56;
doblock128x64(h1, h2, k1, k2);
n = 0; break;
}
}
}
/* Finalize a hash. To match the original Murmur3_128x64 the total_length must be provided */
void PMurHash128_Result(const uint64_t * const ph, const uint64_t * const pcarry,
const uint32_t total_length, uint64_t * const out)
{
uint64_t h1 = ph[0];
uint64_t h2 = ph[1];
uint64_t k1;
uint64_t k2 = pcarry[1];
int n = k2 & 15;
if (n) {
k1 = pcarry[0];
if (n > 8) {
k2 >>= (16-n)*8;
k2 *= kC2L; k2 = ROTL64(k2,33); k2 *= kC1L; h2 ^= k2;
} else {
k1 >>= (8-n)*8;
}
k1 *= kC1L; k1 = ROTL64(k1,31); k1 *= kC2L; h1 ^= k1;
}
//----------
// finalization
h1 ^= total_length; h2 ^= total_length;
h1 += h2;
h2 += h1;
h1 = fmix64(h1);
h2 = fmix64(h2);
h1 += h2;
h2 += h1;
out[0] = h1;
out[1] = h2;
}
/*---------------------------------------------------------------------------*/
/* Main hashing function. Initialise carry[2] to {0,0} and h[2] to an initial {seed,seed}
* if wanted. Both ph and pcarry are required arguments. */
void PMurHash128_Process(uint64_t * const ph, uint64_t * const pcarry, const void * const key, int len)
{
uint64_t h1 = ph[0];
uint64_t h2 = ph[1];
uint64_t k1 = pcarry[0];
uint64_t k2 = pcarry[1];
const uint8_t *ptr = (uint8_t*)key;
const uint8_t *end;
/* Extract carry count from low 4 bits of c value */
int n = k2 & 15;
#if defined(UNALIGNED_SAFE) && NODE_MURMURHASH_TEST_ALIGNED != 1
/* This CPU handles unaligned word access */
// #pragma message ( "UNALIGNED_SAFE" )
/* Consume any carry bytes */
int i = (16-n) & 15;
if(i && i <= len) {
dobytes128x64(i, h1, h2, k1, k2, n, ptr, len);
}
/* Process 128-bit chunks */
end = ptr + (len & ~15);
for( ; ptr < end ; ptr+=16) {
k1 = READ_UINT64(ptr, 0);
k2 = READ_UINT64(ptr, 1);
doblock128x64(h1, h2, k1, k2);
}
#else /*UNALIGNED_SAFE*/
/* This CPU does not handle unaligned word access */
// #pragma message ( "ALIGNED" )
/* Consume enough so that the next data byte is word aligned */
int i = -(intptr_t)(void *)ptr & 7;
if(i && i <= len) {
dobytes128x64(i, h1, h2, k1, k2, n, ptr, len);
}
/* We're now aligned. Process in aligned blocks. Specialise for each possible carry count */
end = ptr + (len & ~15);
switch(n) { /* how many bytes in c */
case 0: /*
k1=[--------] k2=[--------] w=[76543210 fedcba98] b=[76543210 fedcba98] */
for( ; ptr < end ; ptr+=16) {
k1 = READ_UINT64(ptr, 0);
k2 = READ_UINT64(ptr, 1);
doblock128x64(h1, h2, k1, k2);
}
break;
case 1: case 2: case 3: case 4: case 5: case 6: case 7: /*
k1=[10------] k2=[--------] w=[98765432 hgfedcba] b=[76543210 fedcba98] k1'=[hg------] */
{
const int lshift = n*8, rshift = 64-lshift;
for( ; ptr < end ; ptr+=16) {
uint64_t c = k1>>rshift;
k2 = READ_UINT64(ptr, 0);
c |= k2<<lshift;
k1 = READ_UINT64(ptr, 1);
k2 = k2>>rshift | k1<<lshift;
doblock128x64(h1, h2, c, k2);
}
}
break;
case 8: /*
k1=[76543210] k2=[--------] w=[fedcba98 nmlkjihg] b=[76543210 fedcba98] k1`=[nmlkjihg] */
for( ; ptr < end ; ptr+=16) {
k2 = READ_UINT64(ptr, 0);
doblock128x64(h1, h2, k1, k2);
k1 = READ_UINT64(ptr, 1);
}
break;
default: /* 8 < n <= 15
k1=[76543210] k2=[98------] w=[hgfedcba ponmlkji] b=[76543210 fedcba98] k1`=[nmlkjihg] k2`=[po------] */
{
const int lshift = n*8-64, rshift = 64-lshift;
for( ; ptr < end ; ptr+=16) {
uint64_t c = k2 >> rshift;
k2 = READ_UINT64(ptr, 0);
c |= k2 << lshift;
doblock128x64(h1, h2, k1, c);
k1 = k2 >> rshift;
k2 = READ_UINT64(ptr, 1);
k1 |= k2 << lshift;
}
}
}
#endif /*UNALIGNED_SAFE*/
/* Advance over whole 128-bit chunks, possibly leaving 1..15 bytes */
len -= len & ~15;
/* Append any remaining bytes into carry */
dobytes128x64(len, h1, h2, k1, k2, n, ptr, len);
/* Copy out new running hash and carry */
ph[0] = h1;
ph[1] = h2;
pcarry[0] = k1;
pcarry[1] = (k2 & ~0xff) | n;
}
/*---------------------------------------------------------------------------*/
/* All in one go */
/* MurmurHash3_x64_128 api */
void PMurHash128x64(const void * key, const int len, uint32_t seed, void * out)
{
uint64_t carry[2] = {0, 0};
uint64_t h[2] = {seed, seed};
PMurHash128_Process(h, carry, key, len);
PMurHash128_Result(h, carry, (uint32_t) len, (uint64_t *) out);
}

View File

@ -1,38 +0,0 @@
/*-----------------------------------------------------------------------------
* MurmurHash3 was written by Austin Appleby, and is placed in the public
* domain.
*
* This is a c++ implementation of MurmurHash3_128 with support for progressive
* processing based on PMurHash implementation written by Shane Day.
*/
/* ------------------------------------------------------------------------- */
// Microsoft Visual Studio
#if defined(_MSC_VER) && (_MSC_VER < 1600)
typedef unsigned char uint8_t;
typedef unsigned int uint32_t;
typedef unsigned __int64 uint64_t;
// Other compilers
#else // defined(_MSC_VER)
#include <stdint.h>
#endif // !defined(_MSC_VER)
/* ------------------------------------------------------------------------- */
/* Formal prototypes */
// PMurHash128x64
void PMurHash128_Process(uint64_t ph[2], uint64_t pcarry[2], const void *key, int len);
void PMurHash128_Result(const uint64_t ph[2], const uint64_t pcarry[2], uint32_t total_length, uint64_t out[2]);
void PMurHash128x64(const void * key, const int len, uint32_t seed, void * out);
// PMurHash128x86
void PMurHash128_Process(uint32_t ph[4], uint32_t pcarry[4], const void *key, int len);
void PMurHash128_Result(const uint32_t ph[4], const uint32_t pcarry[4], uint32_t total_length, uint32_t out[4]);
void PMurHash128x86(const void * key, const int len, uint32_t seed, void * out);

View File

@ -1,78 +0,0 @@
const union {
uint8_t u8[2];
uint16_t u16;
} EndianMix = {{ 1, 0 }};
FORCE_INLINE bool IsBigEndian()
{
#ifndef NODE_MURMURHASH_TEST_BYTESWAP
// Constant-folded by the compiler.
return EndianMix.u16 != 1;
#else
return true;
#endif // NODE_MURMURHASH_TEST_BYTESWAP
}
#if defined(_MSC_VER)
# include <stdlib.h>
# define BSWAP32(u) _byteswap_ulong(u)
# define BSWAP64(u) _byteswap_uint64(u)
#else
# ifdef __has_builtin
# if __has_builtin(__builtin_bswap32)
# define BSWAP32(u) __builtin_bswap32(u)
# endif // __has_builtin(__builtin_bswap32)
# if __has_builtin(__builtin_bswap64)
# define BSWAP64(u) __builtin_bswap64(u)
# endif // __has_builtin(__builtin_bswap64)
# elif defined(__GNUC__) && ( \
__GNUC__ > 4 || ( \
__GNUC__ == 4 && ( \
__GNUC_MINOR__ >= 3 \
) \
) \
)
# define BSWAP32(u) __builtin_bswap32(u)
# define BSWAP64(u) __builtin_bswap64(u)
# endif // __has_builtin
#endif // defined(_MSC_VER)
#ifndef BSWAP32
FORCE_INLINE uint32_t BSWAP32(uint32_t u)
{
return (((u & 0xff000000) >> 24)
| ((u & 0x00ff0000) >> 8)
| ((u & 0x0000ff00) << 8)
| ((u & 0x000000ff) << 24));
}
#endif
#ifndef BSWAP64
FORCE_INLINE uint64_t BSWAP64(uint64_t u)
{
return (((u & 0xff00000000000000ULL) >> 56)
| ((u & 0x00ff000000000000ULL) >> 40)
| ((u & 0x0000ff0000000000ULL) >> 24)
| ((u & 0x000000ff00000000ULL) >> 8)
| ((u & 0x00000000ff000000ULL) << 8)
| ((u & 0x0000000000ff0000ULL) << 24)
| ((u & 0x000000000000ff00ULL) << 40)
| ((u & 0x00000000000000ffULL) << 56));
}
#endif
FORCE_INLINE uint32_t getblock32 ( const uint32_t * const p, const int i = 0L )
{
if (IsBigEndian()) {
return BSWAP32(p[i]);
} else {
return p[i];
}
}
FORCE_INLINE uint64_t getblock64 ( const uint64_t * const p, const int i = 0L )
{
if (IsBigEndian()) {
return BSWAP64(p[i]);
} else {
return p[i];
}
}

View File

@ -1,188 +0,0 @@
#if !defined(MURMURHASHUTILS_HEADER)
#define MURMURHASHUTILS_HEADER
#define FORCE_INLINE NAN_INLINE
#include "endianness.h"
namespace MurmurHash {
using v8::Local;
using v8::Value;
namespace {
template<int32_t HashLength, typename HashValueType>
static void ReadHashBytesMSB(const uint8_t * in, HashValueType hashp[HashLength])
{
for(HashValueType * const hashend = hashp + HashLength ;;) {
HashValueType val = (HashValueType) *(in++);
for(int length = sizeof(HashValueType) ;--length; ) {
val <<= 8;
val |= (HashValueType) *(in++);
}
*(hashp++) = val;
if (hashp == hashend) break;
}
}
template<int32_t HashLength, typename HashValueType>
static void WriteHashBytesPlatform(const HashValueType hashp[HashLength],
uint8_t * const out, int32_t length = HashSize, int32_t skip = 0)
{
// sanity check
if (length <= 0) return;
// normalize skip
skip &= HashSize - 1;
// normalize length
length = std::min(length, HashSize - skip);
std::memcpy((void *) out, (void *)( (uint8_t *) hashp + skip ), (size_t) length);
}
template<int32_t HashLength, typename HashValueType>
static void WriteHashBytesMSB(const HashValueType hashp[HashLength],
uint8_t * const out, int32_t length = HashSize, int32_t skip = 0)
{
// sanity check
if (length <= 0) return;
// normalize skip
skip &= HashSize - 1;
// normalize length
length = std::min(length, HashSize - skip);
// let hashp point to the last hash value
hashp += (length + skip - 1) / (int32_t) sizeof(HashValueType);
// get first hash value
HashValueType val = *(hashp--);
// preliminary shift value when length is not aligned with hash value type
const int shift = ((-(length + skip)) & ((int32_t) sizeof(HashValueType) - 1));
val >>= 8 * shift;
// set byte pointer at the end of output
uint8_t * outp = out + length;
// get initial number of bytes to write for a single value
length = std::min(length, (int32_t) sizeof(HashValueType) - shift);
for(;; val = *(hashp--)) {
for(;; val >>= 8) {
*(--outp) = (uint8_t) (val & 0x0ff);
if (--length == 0) break;
}
length = std::min((int32_t)(outp - out), (int32_t) sizeof(HashValueType));
if (length == 0) break;
}
}
template<int32_t HashLength, typename HashValueType>
static void WriteHashBytesLSB(const HashValueType hashp[HashLength],
uint8_t * out, int32_t length = HashSize, int32_t skip = 0)
{
// sanity check
if (length <= 0) return;
// normalize skip
skip &= HashSize - 1;
// normalize length
length = std::min(length, HashSize - skip);
// let hashp point to the first hash value
hashp += skip / (int32_t) sizeof(HashValueType);
// get first hash value
HashValueType val = *(hashp++);
// preliminary shift value when length is not aligned with hash value type
const int shift = skip & ((int32_t) sizeof(HashValueType) - 1);
val >>= 8 * shift;
// set termination byte pointer at the end of output
uint8_t * const outt = out + length;
// get initial number of bytes to write for a single value
length = std::min(length, (int32_t) sizeof(HashValueType) - shift);
for(;; val = *(hashp++)) {
for(;; val >>= 8) {
*(out++) = (uint8_t) (val & 0x0ff);
if (--length == 0) break;
}
length = std::min((int32_t)(outt - out), (int32_t) sizeof(HashValueType));
if (length == 0) break;
}
}
template<ByteOrderType OutputByteOrder, int32_t HashLength, typename HashValueType>
NAN_INLINE static void WriteHashBytes(const HashValueType hashp[HashLength],
uint8_t * out, int32_t length = HashSize, int32_t skip = 0)
{
// constant folded
if (OutputByteOrder == LSBFirst && IsBigEndian()) {
WriteHashBytesLSB<HashLength>(hashp, out, length, skip);
} else if (OutputByteOrder == MSBFirst && !IsBigEndian()) {
WriteHashBytesMSB<HashLength>(hashp, out, length, skip);
} else {
WriteHashBytesPlatform<HashLength>(hashp, out, length, skip);
}
}
template<ByteOrderType OutputByteOrder, int32_t HashLength, typename HashValueType>
inline static Local<Value> HashToEncodedString(const HashValueType hashp[HashLength], enum Nan::Encoding enc)
{
Nan::EscapableHandleScope scope;
Local<Value> result;
// constant folded
if (OutputByteOrder == LSBFirst && IsBigEndian()) {
uint8_t str[HashSize];
WriteHashBytesLSB<HashLength>(hashp, str);
result = Nan::Encode((void *) str, (size_t) HashSize, enc);
} else if (OutputByteOrder == MSBFirst && !IsBigEndian()) {
uint8_t str[HashSize];
WriteHashBytesMSB<HashLength>(hashp, str);
result = Nan::Encode((void *) str, (size_t) HashSize, enc);
} else {
result = Nan::Encode((void *) hashp, (size_t) HashSize, enc);
}
return scope.Escape(result);
}
template<ByteOrderType OutputByteOrder, int32_t HashLength, typename HashValueType>
inline static void WriteHashToBuffer(const HashValueType hashp[HashLength],
char * const bufptr, int32_t bufsize,
int32_t offset, int32_t length)
{
int32_t skip = 0;
// normalize
length = std::max(-HashSize, std::min(HashSize, length));
// negative length is counted from the end of the hash
if (length < 0) {
skip = length;
length = -length;
}
// negative offset is counted from the end of the buffer
if ( offset < 0 ) {
offset += bufsize;
}
// still negative
if ( offset < 0 ) {
length += offset;
skip -= offset;
offset = 0;
}
length = std::min(length, bufsize - offset);
WriteHashBytes<OutputByteOrder, HashLength>(hashp, (uint8_t *) bufptr + offset, length, skip);
}
NAN_INLINE static char ToLower(const char c) {
return c >= 'A' && c <= 'Z' ? c + ('a' - 'A') : c;
}
static bool StringEqualLower(const char* a, const char* b) {
do {
if (*a == '\0')
return *b == '\0';
if (*b == '\0') break;
} while (ToLower(*a++) == *b++);
return false;
}
}
}
#endif

View File

@ -1,317 +0,0 @@
#include "static_assert.h"
#include "nodemurmurhash.h"
#include "MurmurHash2.h"
#include "PMurHash.h"
#include "PMurHash128.h"
#include "murmurhashutils.h"
#include "inputdata.h"
#include "asyncworker.h"
NAN_INLINE void MurmurHash2_x64_64 (
const void * key, int len, uint32_t seed, void * out)
{
*(uint64_t *)out = MurmurHash64A( key, len, (uint64_t) seed );
}
NAN_INLINE void MurmurHash2_x86_64 (
const void * key, int len, uint32_t seed, void * out)
{
*(uint64_t *)out = MurmurHash64B( key, len, (uint64_t) seed );
}
namespace MurmurHash {
using v8::Object;
using v8::Uint32;
using v8::Function;
using v8::PropertyAttribute;
using v8::ReadOnly;
using v8::DontDelete;
#define GET_ARG_OFFSET(INFO,INDEX,ARGC) \
((INDEX) + 1 < (ARGC) \
? Nan::To<int32_t>((INFO)[(INDEX) + 1]).FromMaybe(0) \
: 0)
#define GET_ARG_LENGTH(INFO,INDEX,ARGC,DEF) \
((INDEX) + 2 < (ARGC) \
? Nan::To<int32_t>((INFO)[(INDEX) + 2]).FromMaybe(DEF) \
: (DEF))
/**
* Calculate MurmurHash from data
*
* murmurHash(data[, callback])
* murmurHash(data, output[, offset[, length]][, callback])
* murmurHash(data{String}, encoding|output_type[, callback])
* murmurHash(data, output_type[, seed][, callback])
* murmurHash(data, seed[, output[, offset[, length]]][, callback])
* murmurHash(data, seed[, output_type][, callback])
* murmurHash(data, encoding, output_type[, callback])
* murmurHash(data{String}, encoding, output[, offset[, length]][, callback])
* murmurHash(data{String}, encoding, seed[, output[, offset[, length]]][, callback])
* murmurHash(data{String}, encoding, seed[, output_type][, callback])
*
* @param {string|Buffer} data - a byte-string to calculate hash from
* @param {string} encoding - data string encoding, should be:
* 'utf8', 'ucs2', 'ascii', 'hex', 'base64' or 'binary';
* 'binary' by default
* @param {Uint32} seed - murmur hash seed, 0 by default
* @param {Buffer} output - a Buffer object to write hash bytes to;
* the same object will be returned
* @param {number} offset - start writing into output at offset byte;
* negative offset starts from the end of the output buffer
* @param {number} length - a number of bytes to write from calculated hash;
* negative length starts from the end of the hash;
* if absolute value of length is larger than the size of calculated
* hash, bytes are written only up to the hash size
* @param {string} output_type - a string indicating return type:
* 'number' - for murmurHash32 an unsigned 32-bit integer,
* other hashes - hexadecimal string
* 'hex' - hexadecimal string
* 'base64' - base64 string
* 'binary' - binary string
* 'buffer' - a new Buffer object;
* 'number' by default
* @param {Function} callback - optional callback(err, result)
* if provided the hash will be calculated asynchronously using libuv
* worker queue, the return value in this instance will be `undefined`
* and the result will be provided to the callback function;
* Be carefull as reading and writing by multiple threads to the same
* memory may render undetermined results
*
* The order of bytes written to a Buffer or encoded string depends on
* function's endianness.
*
* `data` and `output` arguments might reference the same Buffer object
* or buffers referencing the same memory (views).
*
* @return {number|Buffer|String|undefined}
**/
template<MurmurHashFunctionType HashFunction, typename HashValueType, int32_t HashLength, ByteOrderType OutputByteOrder>
NAN_METHOD(MurmurHash)
{
STATIC_ASSERT((HashSize & 0x1c) && (HashSize|(HashSize-1))+1 == HashSize<<1, "HashSize is not 4, 8 or 16");
InputData data;
OutputType outputType( DefaultOutputType );
uint32_t seed = 0;
/* parse args */
int argc = std::min( 7, info.Length() );
int outputTypeIndex = argc;
int callbackIndex = -1;
bool validEncoding = true;
enum Nan::Encoding encoding = Nan::BUFFER;
if ( argc > 0 && info[argc - 1]->IsFunction() ) {
callbackIndex = --argc;
}
if ( argc > 0 ) {
if ( info[0]->IsString() ) {
encoding = Nan::BINARY;
}
if ( argc >= 2 ) {
if ( info[1]->IsString() ) { // input_encoding or output_type
if ( argc == 2 ) { // try output_type
InputData::ReadEncodingString( info[1].As<String>() );
outputType = InputData::DetermineOutputType();
switch(outputType) {
case HexStringOutputType:
case BinaryStringOutputType:
case Base64StringOutputType:
// ambiguous if input is string
case UnknownOutputType: // input_encoding
if (encoding != Nan::BUFFER) {
validEncoding = InputData::DetermineEncoding( encoding );
outputType = DefaultOutputType; // revert to default
}
break;
default:
void(0); // unambiguous - "number" or "buffer"
}
} else if (encoding == Nan::BUFFER) { // output_type
if ( info[2]->IsNumber() ) {
InputData::ReadEncodingString( info[1].As<String>() );
seed = Nan::To<uint32_t>(info[2]).FromMaybe(0U);
} else if ( info[2]->IsString() ) {
InputData::ReadEncodingString( info[2].As<String>() );
} else {
InputData::ReadEncodingString( info[1].As<String>() );
}
outputType = InputData::DetermineOutputType();
} else { // try input_encoding
InputData::ReadEncodingString( info[1].As<String>() );
if ( !(validEncoding = InputData::DetermineEncoding( encoding )) ) {
outputType = InputData::DetermineOutputType(); // try output_type
if (outputType == UnknownOutputType) {
outputType = DefaultOutputType;
} else {
validEncoding = true;
if ( info[2]->IsNumber() ) seed = Nan::To<uint32_t>(info[2]).FromMaybe(0U);
}
}
outputTypeIndex = 2; // continue from 2
}
} else {
// output or seed
if ( node::Buffer::HasInstance(info[1]) ) {
outputType = ProvidedBufferOutputType;
outputTypeIndex = 1;
} else {
if ( info[1]->IsNumber() ) seed = Nan::To<uint32_t>(info[1]).FromMaybe(0U);
outputTypeIndex = 2; // continue from 2
}
}
if ( outputType == DefaultOutputType ) { // output_type or output or seed
for (; outputTypeIndex < argc; ++outputTypeIndex ) {
if ( info[outputTypeIndex]->IsNumber() ) {
seed = Nan::To<uint32_t>(info[outputTypeIndex]).FromMaybe(0U);
} else if ( info[outputTypeIndex]->IsString() ) {
InputData::ReadEncodingString( info[outputTypeIndex].As<String>() );
outputType = InputData::DetermineOutputType();
break;
} else if ( node::Buffer::HasInstance(info[outputTypeIndex]) ) {
outputType = ProvidedBufferOutputType;
break;
} else
break;
}
}
}
}
if ( callbackIndex > -1 ) {
MurmurHashWorker<HashFunction,HashValueType,HashLength,OutputByteOrder> *asyncWorker;
Nan::Callback *callback = new Nan::Callback(
Local<Function>::Cast(info[callbackIndex]));
if ( argc > 0 ) {
asyncWorker = new MurmurHashWorker<HashFunction,HashValueType,HashLength,OutputByteOrder>(
callback, outputType, seed, info[0], encoding, validEncoding);
} else {
asyncWorker = new MurmurHashWorker<HashFunction,HashValueType,HashLength,OutputByteOrder>(callback);
}
if (outputType == ProvidedBufferOutputType) {
asyncWorker->SaveOutputBuffer(
info[outputTypeIndex],
GET_ARG_OFFSET(info, outputTypeIndex, argc),
GET_ARG_LENGTH(info, outputTypeIndex, argc, HashSize));
}
Nan::AsyncQueueWorker(asyncWorker);
info.GetReturnValue().Set(Nan::Undefined());
} else {
if ( argc > 0 ) {
data.Setup( info[0], encoding, validEncoding );
}
if ( ! data.IsValid() )
return Nan::ThrowTypeError(data.Error());
Local<Value> result;
HashValueType hash[HashLength];
HashFunction( (const void *) *data, (int) data.length(), seed, (void *)hash );
switch(outputType) {
case DefaultOutputType:
case NumberOutputType:
if (HashSize == sizeof(uint32_t)) {
result = Nan::New<Uint32>( (uint32_t) (*hash) );
} else {
result = HashToEncodedString<OutputByteOrder, HashLength>( hash, Nan::HEX );
}
break;
case HexStringOutputType:
result = HashToEncodedString<OutputByteOrder, HashLength>( hash, Nan::HEX );
break;
case BinaryStringOutputType:
result = HashToEncodedString<OutputByteOrder, HashLength>( hash, Nan::BINARY );
break;
case Base64StringOutputType:
result = HashToEncodedString<OutputByteOrder, HashLength>( hash, Nan::BASE64 );
break;
case BufferOutputType:
result = Nan::NewBuffer( HashSize ).ToLocalChecked();
WriteHashBytes<OutputByteOrder, HashLength>(hash, (uint8_t *) node::Buffer::Data(result));
break;
case ProvidedBufferOutputType:
result = info[outputTypeIndex];
WriteHashToBuffer<OutputByteOrder, HashLength>(
hash,
node::Buffer::Data(result),
(int32_t) node::Buffer::Length(result),
GET_ARG_OFFSET(info, outputTypeIndex, argc),
GET_ARG_LENGTH(info, outputTypeIndex, argc, HashSize));
break;
default:
return Nan::ThrowTypeError("Unknown output type: should be \"number\", \"buffer\", \"binary\", \"base64\" or \"hex\"");
}
info.GetReturnValue().Set(result);
}
}
#undef GET_ARG_OFFSET
#undef GET_ARG_LENGTH
template<ByteOrderType OutputByteOrder>
NAN_MODULE_INIT(InitWithOrder)
{
Nan::SetMethod(target, "murmurHash", MurmurHash< PMurHash32, uint32_t, 1, OutputByteOrder>);
Nan::SetMethod(target, "murmurHash32", MurmurHash< PMurHash32, uint32_t, 1, OutputByteOrder>);
Nan::SetMethod(target, "murmurHash64x64", MurmurHash<MurmurHash2_x64_64, uint64_t, 1, OutputByteOrder>);
Nan::SetMethod(target, "murmurHash64x86", MurmurHash<MurmurHash2_x86_64, uint64_t, 1, OutputByteOrder>);
Nan::SetMethod(target, "murmurHash128x64", MurmurHash< PMurHash128x64, uint64_t, 2, OutputByteOrder>);
Nan::SetMethod(target, "murmurHash128x86", MurmurHash< PMurHash128x86, uint32_t, 4, OutputByteOrder>);
#if defined(NODE_MURMURHASH_DEFAULT_32BIT)
Nan::SetMethod(target, "murmurHash64", MurmurHash<MurmurHash2_x86_64, uint64_t, 1, OutputByteOrder>);
Nan::SetMethod(target, "murmurHash128", MurmurHash< PMurHash128x86, uint32_t, 4, OutputByteOrder>);
#else
Nan::SetMethod(target, "murmurHash64", MurmurHash<MurmurHash2_x64_64, uint64_t, 1, OutputByteOrder>);
Nan::SetMethod(target, "murmurHash128", MurmurHash< PMurHash128x64, uint64_t, 2, OutputByteOrder>);
#endif
}
NAN_MODULE_INIT(Init)
{
InitWithOrder<MSBFirst>( target );
Local<Object> bigEndian( Nan::New<Object>() );
InitWithOrder<MSBFirst>( bigEndian );
Nan::DefineOwnProperty(target, Nan::New<String>("BE").ToLocalChecked(), bigEndian,
static_cast<PropertyAttribute>(ReadOnly | DontDelete) ).FromJust();
Local<Object> littleEndian( Nan::New<Object>() );
InitWithOrder<LSBFirst>( littleEndian );
Nan::DefineOwnProperty(target, Nan::New<String>("LE").ToLocalChecked(), littleEndian,
static_cast<PropertyAttribute>(ReadOnly | DontDelete) ).FromJust();
Nan::DefineOwnProperty(target, Nan::New<String>("platform").ToLocalChecked(),
IsBigEndian() ? bigEndian : littleEndian,
static_cast<PropertyAttribute>(ReadOnly | DontDelete) ).FromJust();
}
}
NODE_MODULE(murmurhash, MurmurHash::Init)

View File

@ -1,32 +0,0 @@
#if !defined(NODEMURMURHASH_HEADER)
#define NODEMURMURHASH_HEADER
#include <node.h>
#include <node_buffer.h>
#include <v8.h>
#include <nan.h>
#define HashSize (static_cast<int32_t>(sizeof(HashValueType) * HashLength))
namespace MurmurHash {
typedef void (*MurmurHashFunctionType)(const void *, int, uint32_t, void *);
typedef enum {
DefaultOutputType,
NumberOutputType,
HexStringOutputType,
BinaryStringOutputType,
Base64StringOutputType,
BufferOutputType,
ProvidedBufferOutputType,
UnknownOutputType,
} OutputType;
typedef enum {
MSBFirst,
LSBFirst
} ByteOrderType;
}
#endif

View File

@ -1,20 +0,0 @@
#if !defined(STATIC_ASSERT)
#if (defined(_MSC_VER) && (_MSC_VER >= 1600)) || __cplusplus >= 201103L
#define STATIC_ASSERT(e,m) static_assert(e,m)
#else
#define ASSERT_CONCAT_(a, b) a##b
#define ASSERT_CONCAT(a, b) ASSERT_CONCAT_(a, b)
/* These can't be used after statements in c89. */
#if defined(__COUNTER__)
#define STATIC_ASSERT(e,m) \
enum { ASSERT_CONCAT(static_assert_, __COUNTER__) = 1/((int)!!(e)) }
#else
/* This can't be used twice on the same line so ensure if using in headers
* that the headers are not included twice (by wrapping in #ifndef...#endif)
* Note it doesn't cause an issue when used on same line of separate modules
* compiled with gcc -combine -fwhole-program. */
#define STATIC_ASSERT(e,m) \
enum { ASSERT_CONCAT(assert_line_, __LINE__) = 1/((int)!!(e)) }
#endif
#endif
#endif

View File

@ -1,85 +0,0 @@
/**
* `murmurhash-native/stream` module.
*
* Example:
*
* ```ts
* import { createHash } from "murmurhash-native/stream"
* import * as fs from "fs"
*
* fs.createReadStream("hash_me.txt")
* .pipe(createHash("murmurhash128x64", {seed: 42, encoding: "hex"}))
* .pipe(fs.createWriteStream("hash_me.txt.hash"))
* ```
*
* If you don't need stream interface prefer to use utilities from the [[incremental]] module.
* @module stream
*/
/***/
import { TransformOptions, Transform } from "stream";
import { Encoding, OutputType,
Endianness, IMurHasher } from "./incremental";
export { Encoding, OutputType, Endianness, IMurHasher };
/**
* Lists available algorithm names for [createHash].
*/
export function getHashes(): string[];
/**
* Constructs a new MurmurHash object that can be used to generate murmurhash digests
* from the data stream.
*
* If algorithm is an instance of a MurmurHash or a serialized object,
* the seed option is being ignored.
*
* @param algorithm one of the available murmurhash algorithms,
* a murmur hasher instance or a serialized object.
* @param options hasher or stream options.
*/
export function createHash(algorithm: string|MurmurHash|MurmurHashSerial, options?: MurmurHashOptions): MurmurHash;
/**
* Constructs a new MurmurHash object that can be used to generate murmurhash digests
* from the data stream.
*
* @param algorithm one of the available murmurhash algorithms.
* @param seed initial hash seed as an unsigned 32-bit integer.
*/
export function createHash(algorithm: string, seed?: number): MurmurHash;
/** Options for createHash */
export interface MurmurHashOptions extends TransformOptions {
/** Initial hash seed as an unsigned 32-bit integer. */
seed?: number;
/** Digest byte order. */
endianness?: Endianness;
}
/** A serialized MurmurHash object representation created by [[MurmurHash.toJSON]] function */
export interface MurmurHashSerial {
type: string;
seed: string;
}
/** An incremental murmur hash utility with additional node's stream.Transform api */
export class MurmurHash extends Transform implements IMurHasher {
/** Size in bytes of the serialized hasher. */
static readonly SERIAL_BYTE_LENGTH: number;
readonly SERIAL_BYTE_LENGTH: number;
constructor(algorithm: string|MurmurHash|MurmurHashSerial, options?: MurmurHashOptions);
constructor(algorithm: string, seed?: number);
copy(target: MurmurHash): MurmurHash;
digest(outputType?: OutputType): number|string|Buffer;
digest(output: Buffer, offset?: number, length?: number): Buffer;
toJSON(): MurmurHashSerial;
serialize(): string;
serialize(output: Buffer, offset?: number): Buffer;
update(data: string|Buffer, encoding?: Encoding): this;
update(data: string|Buffer, encoding: Encoding, callback: (err: Error) => void): void;
update(data: string|Buffer, callback: (err: Error) => void): void;
endianness: Endianness;
readonly isBusy: boolean;
readonly total: number;
protected _handle: IMurHasher;
}

View File

@ -1,154 +0,0 @@
"use strict";
var util = require('util');
var binding = require('./incremental');
var algorithms = {};
exports.getHashes = function() {
return Object.keys(binding)
.filter(function(name) {
return 'function' === typeof binding[name];
})
.map(function(name) { return name.toLowerCase(); });
};
Object.keys(binding).forEach(function(name) {
if (binding.hasOwnProperty(name) && 'function' === typeof binding[name]) {
algorithms[name.toLowerCase()] = binding[name];
}
});
algorithms.murmurhash3a = algorithms.murmurhash32 = algorithms.murmurhash32x86 = algorithms.murmurhash;
/* from nodejs lib/crypto.js */
var LazyTransform = require('./lazy_transform');
/**
* Creates and returns a MurmurHash object that can be used to generate murmurhash digests.
*
* Except murmur's `seed` and `endianness` options, the rest of the options are passed to
* stream.Transform constructor.
*
* @param {string|MurmurHash} algorithm|hasher - one of available algorithms
* or a murmur hasher instance
* @param {number|object} seed|options - hasher options
**/
exports.createHash = exports.MurmurHash = MurmurHash;
function MurmurHash(algorithm, options) {
var seed, endianness;
if (!(this instanceof MurmurHash))
return new MurmurHash(algorithm, options);
if (options && 'object' === typeof options) {
seed = options.seed;
endianness = options.endianness;
} else {
seed = options; options = undefined;
}
if (algorithm instanceof MurmurHash) {
this._handle = new algorithm._handle.constructor(algorithm._handle, endianness);
} else if (algorithm) {
// handle object from json
if ('object' === typeof algorithm) {
seed = algorithm.seed;
algorithm = algorithm.type;
}
var Handle = algorithms[algorithm.toLowerCase()];
if (Handle) {
this._handle = new Handle(seed, endianness);
} else {
throw new Error("Algorithm not supported");
}
} else {
throw new TypeError("Must give algorithm string, a serialized state or a MurmurHash instance");
}
LazyTransform.call(this, options);
}
util.inherits(MurmurHash, LazyTransform);
MurmurHash.prototype._transform = function(chunk, encoding, callback) {
if (chunk.length < 8192) { // this constant was chosen experimentally
this._handle.update(chunk, encoding);
callback();
} else {
this._handle.update(chunk, encoding, callback);
}
};
MurmurHash.prototype._flush = function(callback) {
this.push(this._handle.digest());
callback();
};
MurmurHash.prototype.update = function() {
var handle = this._handle;
return handle.update.apply(handle, arguments) && this;
};
MurmurHash.prototype.digest = function() {
var handle = this._handle;
return handle.digest.apply(handle, arguments);
};
MurmurHash.prototype.serialize = function(type, offset) {
return this._handle.serialize(type, offset);
};
MurmurHash.prototype.copy = function(target) {
this._handle.copy(target && target._handle);
return target;
};
MurmurHash.prototype.toJSON = function() {
var handle = this._handle;
return {
type: handle.constructor.name,
seed: handle.toJSON()
};
};
Object.defineProperty(MurmurHash.prototype, 'isBusy', {
get: function() {
return this._handle.isBusy;
},
enumerable: false,
configurable: false
});
Object.defineProperty(MurmurHash.prototype, 'total', {
get: function() {
return this._handle.total;
},
enumerable: true,
configurable: true
});
Object.defineProperty(MurmurHash.prototype, 'endianness', {
get: function() {
return this._handle.endianness;
},
set: function(value) {
this._handle.endianness = value;
},
enumerable: true,
configurable: false
});
Object.defineProperty(MurmurHash.prototype, 'SERIAL_BYTE_LENGTH', {
get: function() {
Object.defineProperty(this, 'SERIAL_BYTE_LENGTH', {
enumerable: true,
writable: true,
configurable: true,
value: this._handle.SERIAL_BYTE_LENGTH
});
return this.SERIAL_BYTE_LENGTH;
},
enumerable: true,
configurable: true,
});

View File

@ -1,43 +0,0 @@
"use strict";
var assert = require('assert');
module.exports = function(buffer, wordnbits, offset, bytesize) {
offset = offset>>>0;
if (bytesize === void(0)) bytesize = buffer.length - offset;
assert.equal(bytesize * 8 % wordnbits, 0);
assert(offset + bytesize <= buffer.length);
switch(wordnbits) {
case 32:
swap32(buffer.slice(offset, offset + bytesize));
break;
case 64:
swap64(buffer.slice(offset, offset + bytesize));
break;
default: assert.fail(wordnbits, [32,64], undefined, 'one of');
}
return buffer;
}
function swap32(buffer) {
if ('function' === typeof buffer.swap32) {
buffer.swap32();
} else {
for(var offset = 0; offset < buffer.length; offset += 4) {
buffer.writeUInt32LE(buffer.readUInt32BE(offset), offset);
}
}
}
function swap64(buffer) {
var hi, lo;
for(var offset = 0; offset < buffer.length; offset += 8) {
hi = buffer.readUInt32BE(offset);
lo = buffer.readUInt32BE(offset + 4);
buffer.writeUInt32LE(lo, offset);
buffer.writeUInt32LE(hi, offset + 4);
}
}

View File

@ -1,65 +0,0 @@
/*
Test byteswap on LE machines
requires that NODE_MURMURHASH_TEST_BYTESWAP=1 in binding.gyp
*/
"use strict";
var test = require("tap").test
, byteOrderSwap = require('./byteorderswap')
, hash = require('../..')
, incr = require('../../incremental')
;
test("should have murmurHash functions", function(t) {
testHash(t, hash.murmurHash32, "My hovercraft is full of eels.", 25, 'afb33896');
testHash(t, hash.murmurHash32, "I will not buy this record, it is scratched.", 0, '9a2bd0a8');
testHash(t, hash.murmurHash64x86, "I will not buy this record, it is scratched.", 0, "56c3338e1b075c45");
testHash(t, hash.murmurHash64x64, "I will not buy this record, it is scratched.", 0, "ead99837ed0bcc9b");
testHash(t, hash.murmurHash128x86, "I will not buy this tobacconist's, it is scratched.", 0, "a27b5b9b66783fefafde9a888ef9f300");
testHash(t, hash.murmurHash128x64, "I will not buy this tobacconist's, it is scratched.", 0, "e32782bdab5406d3739607f02335d767");
testHash(t, hash.LE.murmurHash32, "My hovercraft is full of eels.", 25, 'afb33896');
testHash(t, hash.LE.murmurHash32, "I will not buy this record, it is scratched.", 0, '9a2bd0a8');
testHash(t, hash.LE.murmurHash64x86, "I will not buy this record, it is scratched.", 0, "56c3338e1b075c45");
testHash(t, hash.LE.murmurHash64x64, "I will not buy this record, it is scratched.", 0, "ead99837ed0bcc9b");
testHash(t, hash.LE.murmurHash128x86, "I will not buy this tobacconist's, it is scratched.", 0, "a27b5b9b66783fefafde9a888ef9f300");
testHash(t, hash.LE.murmurHash128x64, "I will not buy this tobacconist's, it is scratched.", 0, "e32782bdab5406d3739607f02335d767");
testHashIncr(t, void(0), incr.MurmurHash, "My hovercraft is full of eels.", 25, 'afb33896');
testHashIncr(t, void(0), incr.MurmurHash, "I will not buy this record, it is scratched.", 0, '9a2bd0a8');
testHashIncr(t, void(0), incr.MurmurHash128x86, "I will not buy this tobacconist's, it is scratched.", 0, "a27b5b9b66783fefafde9a888ef9f300");
testHashIncr(t, void(0), incr.MurmurHash128x64, "I will not buy this tobacconist's, it is scratched.", 0, "e32782bdab5406d3739607f02335d767");
testHashIncr(t, 'LE', incr.MurmurHash, "My hovercraft is full of eels.", 25, 'afb33896');
testHashIncr(t, 'LE', incr.MurmurHash, "I will not buy this record, it is scratched.", 0, '9a2bd0a8');
testHashIncr(t, 'LE', incr.MurmurHash128x86, "I will not buy this tobacconist's, it is scratched.", 0, "a27b5b9b66783fefafde9a888ef9f300");
testHashIncr(t, 'LE', incr.MurmurHash128x64, "I will not buy this tobacconist's, it is scratched.", 0, "e32782bdab5406d3739607f02335d767");
t.end();
});
function testHash(t, murmurHash, input, seed, expectation) {
// console.log(input);
input = swap(input, murmurHash);
// console.log(input);
t.strictEqual(murmurHash(input, seed, 'hex'), expectation);
}
function testHashIncr(t, endian, MurmurHash, input, seed, expectation) {
// console.log(input);
input = swap(input, MurmurHash);
// console.log(input);
t.strictEqual(MurmurHash(seed, endian).update(input).digest('hex'), expectation);
}
function swap(value, hasher) {
var buf = Buffer.from(value, "binary");
switch(hasher.name.toLowerCase()) {
case "murmurhash":
case "murmurhash32":
case "murmurhash64x86":
case "murmurhash128x86":
return byteOrderSwap(buf, 32, 0, buf.length & 0x7ffffffc).toString("binary");
case "murmurhash64x64":
case "murmurhash128x64":
return byteOrderSwap(buf, 64, 0, buf.length & 0x7ffffff8).toString("binary");
default:
throw new Error("unknown function");
}
}

View File

@ -1,61 +0,0 @@
"use strict";
if (!global.gc)
throw new Error("run with --expose-gc");
var gc = global.gc;
var kMaxLength = require('buffer').kMaxLength || 0x3fffffff;
var test = require("tap").test
, hash = require('../..')
;
[
hash.murmurHash,
hash.murmurHash64x64,
hash.murmurHash64x86,
hash.murmurHash128x64,
hash.murmurHash128x86
].forEach(function(murmurHash) {
test(murmurHash.name + " should not crash while under gc stress (input)", function(t) {
var bigone = Buffer.allocUnsafeSlow(kMaxLength);
murmurHash(bigone, function(err) {
t.error(err);
gc(); gc(); gc(); gc();
setImmediate(gc);
setImmediate(function(){ t.end(); });
});
bigone = undefined;
gc(); gc(); gc(); gc();
gc(); gc(); gc(); gc();
gc(); gc(); gc(); gc();
setImmediate(gc);
setImmediate(gc);
setImmediate(gc);
setImmediate(gc);
setImmediate(gc);
});
test(murmurHash.name + " should not crash while under gc stress (input/output)", function(t) {
var bigone = Buffer.allocUnsafeSlow(kMaxLength);
var bigtwo = Buffer.allocUnsafeSlow(kMaxLength);
murmurHash(bigone, bigtwo, 0, function(err) {
t.error(err);
gc(); gc(); gc(); gc();
setImmediate(gc);
setImmediate(function(){ t.end(); });
});
bigtwo = bigone = undefined;
gc(); gc(); gc(); gc();
gc(); gc(); gc(); gc();
gc(); gc(); gc(); gc();
setImmediate(gc);
setImmediate(gc);
setImmediate(gc);
setImmediate(gc);
setImmediate(gc);
});
});

View File

@ -1,39 +0,0 @@
"use strict";
if (!global.gc)
throw new Error("run with --expose-gc");
var gc = global.gc;
var kMaxLength = require('buffer').kMaxLength || 0x3fffffff;
var test = require("tap").test
, incr = require('../../incremental')
;
[
incr.MurmurHash,
incr.MurmurHash128x64,
incr.MurmurHash128x86
].forEach(function(MurmurHash) {
test(MurmurHash.name + " should not crash while under gc stress", function(t) {
var bigone = Buffer.allocUnsafeSlow(kMaxLength);
MurmurHash().update(bigone, function(err) {
t.error(err);
gc(); gc(); gc(); gc();
setImmediate(gc);
setImmediate(function(){ t.end(); });
});
bigone = undefined;
gc(); gc(); gc(); gc();
gc(); gc(); gc(); gc();
gc(); gc(); gc(); gc();
setImmediate(gc);
setImmediate(gc);
setImmediate(gc);
setImmediate(gc);
setImmediate(gc);
});
});

View File

@ -1,29 +0,0 @@
"use strict";
var crypto = require('crypto');
var stream = require('stream');
var Readable = stream.Readable;
require('util').inherits(RandomChunkStream, Readable);
module.exports = RandomChunkStream;
function RandomChunkStream(options) {
this.maxchunksize = options.maxchunksize>>>0;
this.size = options.size>>>0;
this.buffer = Buffer.allocUnsafe(this.size);
this.cursor = 0;
Readable.call(this, options);
}
RandomChunkStream.prototype._read = function() {
var buffer = this.buffer;
var slicelen = (Math.random()*this.maxchunksize|0) + 1;
slicelen = Math.min(slicelen, this.size - this.cursor);
crypto.randomBytes(slicelen).copy(buffer, this.cursor);
var n = this.cursor + slicelen;
this.push(buffer.slice(this.cursor, n));
this.cursor = n;
if (n >= this.size)
this.push(null);
};

View File

@ -1,840 +0,0 @@
"use strict";
var test = require('./uncaughtexceptionwrap_tap').test
, hash = require('..')
;
test("should have murmurHash functions", function(t) {
['murmurHash',
'murmurHash32',
'murmurHash64',
'murmurHash64x64',
'murmurHash64x86',
'murmurHash128x64',
'murmurHash128',
'murmurHash128x86'
].forEach(function(name) {
t.type(hash[name], 'function');
t.strictEqual(hash[name].name, name);
t.type(hash.BE[name], 'function');
t.strictEqual(hash.BE[name].name, name);
t.type(hash.LE[name], 'function');
t.strictEqual(hash.LE[name].name, name);
t.type(hash.platform[name], 'function');
t.strictEqual(hash.platform[name].name, name);
});
t.end();
});
[
[4, 'murmurHash', hash.murmurHash, 0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7'],
[4, 'murmurHash', hash.murmurHash32, 0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7'],
[4, 'murmurHash', hash.BE.murmurHash, 0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7'],
[4, 'murmurHash', hash.BE.murmurHash32, 0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7'],
[4, 'murmurHash', hash.LE.murmurHash, 0, 2180083513, 1364076727,
'00000000', '396ff181', 'b7284e51'],
[4, 'murmurHash', hash.LE.murmurHash32, 0, 2180083513, 1364076727,
'00000000', '396ff181', 'b7284e51'],
[8, 'murmurHash64x64', hash.murmurHash64x64,
'0000000000000000', '952d4201a42f3c31', 'c6a4a7935bd064dc',
'0000000000000000', '952d4201a42f3c31', 'c6a4a7935bd064dc'],
[8, 'murmurHash64x64', hash.BE.murmurHash64x64,
'0000000000000000', '952d4201a42f3c31', 'c6a4a7935bd064dc',
'0000000000000000', '952d4201a42f3c31', 'c6a4a7935bd064dc'],
[8, 'murmurHash64x64', hash.LE.murmurHash64x64,
'0000000000000000', '313c2fa401422d95', 'dc64d05b93a7a4c6',
'0000000000000000', '313c2fa401422d95', 'dc64d05b93a7a4c6'],
[8, 'murmurHash64x86', hash.murmurHash64x86,
'0000000000000000', 'f107ca78f6c98ab0', 'dd9f019f79505248',
'0000000000000000', 'f107ca78f6c98ab0', 'dd9f019f79505248'],
[8, 'murmurHash64x86', hash.BE.murmurHash64x86,
'0000000000000000', 'f107ca78f6c98ab0', 'dd9f019f79505248',
'0000000000000000', 'f107ca78f6c98ab0', 'dd9f019f79505248'],
[8, 'murmurHash64x86', hash.LE.murmurHash64x86,
'0000000000000000', 'b08ac9f678ca07f1', '485250799f019fdd',
'0000000000000000', 'b08ac9f678ca07f1', '485250799f019fdd'],
[16, 'murmurHash128x64', hash.murmurHash128x64,
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583'],
[16, 'murmurHash128x64', hash.BE.murmurHash128x64,
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583'],
[16, 'murmurHash128x64', hash.LE.murmurHash128x64,
'00000000000000000000000000000000', 'ecc93b9d4ddff16a6b44e61e12217485',
'b55cff6ee5ab10468335f878aa2d6251',
'00000000000000000000000000000000', 'ecc93b9d4ddff16a6b44e61e12217485',
'b55cff6ee5ab10468335f878aa2d6251'],
[16, 'murmurHash128x86', hash.murmurHash128x86,
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9'],
[16, 'murmurHash128x86', hash.BE.murmurHash128x86,
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9'],
[16, 'murmurHash128x86', hash.LE.murmurHash128x86,
'00000000000000000000000000000000', 'a9081e05f7499d98f7499d98f7499d98',
'ecadc488b901d254b901d254b901d254',
'00000000000000000000000000000000', 'a9081e05f7499d98f7499d98f7499d98',
'ecadc488b901d254b901d254b901d254']
].forEach(function(args) {
var size = args[ 0]
, label = args[ 1]
, murmurHash = args[ 2]
, seedZeroDefault = args[ 3]
, seedMinusOneDefault = args[ 4]
, seedPlusOneDefault = args[ 5]
, seedZeroHex = args[ 6]
, seedMinusOneHex = args[ 7]
, seedPlusOneHex = args[ 8]
, seedZeroBuffer = Buffer.from(seedZeroHex, 'hex')
, seedMinusOneBuffer = Buffer.from(seedMinusOneHex, 'hex')
, seedPlusOneBuffer = Buffer.from(seedPlusOneHex, 'hex')
, seedZeroBase64 = seedZeroBuffer.toString('base64')
, seedMinusOneBase64 = seedMinusOneBuffer.toString('base64')
, seedPlusOneBase64 = seedPlusOneBuffer.toString('base64')
, seedZeroBinary = seedZeroBuffer.toString('binary')
, seedMinusOneBinary = seedMinusOneBuffer.toString('binary')
, seedPlusOneBinary = seedPlusOneBuffer.toString('binary')
;
test(label, function(t) {
t.type(murmurHash, 'function');
t.test('should not bail on error throw in a callback', function(t) {
t.plan(4);
t.throwsUncaughtException(new Error("mana mana"));
t.strictEqual(undefined, murmurHash('', function(err, foo) {
t.error(err);
t.strictEqual(foo, seedZeroDefault);
throw new Error("mana mana");
}));
});
t.test('should raise error for bad arguments', function(t) {
t.plan(21*3);
function cberr1(err) {
t.type(err, TypeError);
t.strictEqual(err.message, "string or Buffer is required");
}
function cberr2(err) {
t.type(err, TypeError);
t.strictEqual(err.message, "\"encoding\" must be a valid string encoding");
}
function cberr3(err) {
t.type(err, TypeError);
t.strictEqual(err.message, "Unknown output type: should be \"number\", \"buffer\", \"binary\", \"base64\" or \"hex\"");
}
t.strictEqual(undefined, murmurHash(cberr1));
t.strictEqual(undefined, murmurHash({}, cberr1));
t.strictEqual(undefined, murmurHash([], cberr1));
t.strictEqual(undefined, murmurHash(void(0), cberr1));
t.strictEqual(undefined, murmurHash(null, cberr1));
t.strictEqual(undefined, murmurHash(true, cberr1));
t.strictEqual(undefined, murmurHash(false, cberr1));
t.strictEqual(undefined, murmurHash(0, cberr1));
t.strictEqual(undefined, murmurHash(1, cberr1));
t.strictEqual(undefined, murmurHash(-1, cberr1));
t.strictEqual(undefined, murmurHash(new Date(), cberr1));
t.strictEqual(undefined, murmurHash("", "abcdefghijklmno", cberr2));
t.strictEqual(undefined, murmurHash("", "123456", cberr2));
t.strictEqual(undefined, murmurHash("", "12345", cberr2));
t.strictEqual(undefined, murmurHash("", "1234", cberr2));
t.strictEqual(undefined, murmurHash("", "123", cberr2));
t.strictEqual(undefined, murmurHash("", "", cberr2));
t.strictEqual(undefined, murmurHash("", 0, "", cberr3));
t.strictEqual(undefined, murmurHash("", 0, "mumber", cberr3));
t.strictEqual(undefined, murmurHash("", 0, "xxxxxxx", cberr3));
t.strictEqual(undefined, murmurHash("", 0, "utf-8", cberr3));
});
t.test('should create number hash from empty data', function(t) {
t.plan(20*3);
function cbfactory(value) {
return function(err, result) {
t.error(err);
t.strictEqual(result, value);
};
}
t.strictEqual(undefined, murmurHash('', cbfactory(seedZeroDefault)));
t.strictEqual(undefined, murmurHash('', 'number', cbfactory(seedZeroDefault)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), cbfactory(seedZeroDefault)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 'number', cbfactory(seedZeroDefault)));
t.strictEqual(undefined, murmurHash('', -1, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash('', -1, 'number', cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), -1, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), -1, 'number', cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash('', 4294967295, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash('', 4294967295, 'number', cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 4294967295, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 4294967295, 'number', cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash('', 4294967296, cbfactory(seedZeroDefault)));
t.strictEqual(undefined, murmurHash('', 4294967296, 'number', cbfactory(seedZeroDefault)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 4294967296, cbfactory(seedZeroDefault)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 4294967296, 'number', cbfactory(seedZeroDefault)));
t.strictEqual(undefined, murmurHash('', 1, cbfactory(seedPlusOneDefault)));
t.strictEqual(undefined, murmurHash('', 1, 'number', cbfactory(seedPlusOneDefault)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 1, cbfactory(seedPlusOneDefault)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 1, 'number', cbfactory(seedPlusOneDefault)));
});
t.test('should create buffer hash from empty data', function(t) {
t.plan(20*3);
function cbfactoryBuffer(value) {
return function(err, result) {
t.error(err);
t.deepEqual(result, value);
};
}
function cbfactoryHex(value) {
return function(err, result) {
t.error(err);
t.strictEqual(result.toString('hex'), value);
};
}
t.strictEqual(undefined, murmurHash('', 0, 'buffer', cbfactoryBuffer(seedZeroBuffer)));
t.strictEqual(undefined, murmurHash('', 0, 'buffer', cbfactoryHex(seedZeroHex)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 'buffer', cbfactoryBuffer(seedZeroBuffer)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 'buffer', cbfactoryHex(seedZeroHex)));
t.strictEqual(undefined, murmurHash('', -1, 'buffer', cbfactoryBuffer(seedMinusOneBuffer)));
t.strictEqual(undefined, murmurHash('', -1, 'buffer', cbfactoryHex(seedMinusOneHex)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), -1, 'buffer', cbfactoryBuffer(seedMinusOneBuffer)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), -1, 'buffer', cbfactoryHex(seedMinusOneHex)));
t.strictEqual(undefined, murmurHash('', 4294967295, 'buffer', cbfactoryBuffer(seedMinusOneBuffer)));
t.strictEqual(undefined, murmurHash('', 4294967295, 'buffer', cbfactoryHex(seedMinusOneHex)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 4294967295, 'buffer', cbfactoryBuffer(seedMinusOneBuffer)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 4294967295, 'buffer', cbfactoryHex(seedMinusOneHex)));
t.strictEqual(undefined, murmurHash('', 4294967296, 'buffer', cbfactoryBuffer(seedZeroBuffer)));
t.strictEqual(undefined, murmurHash('', 4294967296, 'buffer', cbfactoryHex(seedZeroHex)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 4294967296, 'buffer', cbfactoryBuffer(seedZeroBuffer)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 4294967296, 'buffer', cbfactoryHex(seedZeroHex)));
t.strictEqual(undefined, murmurHash('', 1, 'buffer', cbfactoryBuffer(seedPlusOneBuffer)));
t.strictEqual(undefined, murmurHash('', 1, 'buffer', cbfactoryHex(seedPlusOneHex)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 1, 'buffer', cbfactoryBuffer(seedPlusOneBuffer)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 1, 'buffer', cbfactoryHex(seedPlusOneHex)));
});
t.test('should create string encoded hash from empty data', function(t) {
t.plan(30*3);
function cbfactory(value) {
return function(err, result) {
t.error(err);
t.strictEqual(result, value);
};
}
t.strictEqual(undefined, murmurHash('', 0, 'hex', cbfactory(seedZeroHex)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 'hex', cbfactory(seedZeroHex)));
t.strictEqual(undefined, murmurHash('', -1, 'hex', cbfactory(seedMinusOneHex)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), -1, 'hex', cbfactory(seedMinusOneHex)));
t.strictEqual(undefined, murmurHash('', 4294967295, 'hex', cbfactory(seedMinusOneHex)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 4294967295, 'hex', cbfactory(seedMinusOneHex)));
t.strictEqual(undefined, murmurHash('', 4294967296, 'hex', cbfactory(seedZeroHex)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 4294967296, 'hex', cbfactory(seedZeroHex)));
t.strictEqual(undefined, murmurHash('', 1, 'hex', cbfactory(seedPlusOneHex)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 1, 'hex', cbfactory(seedPlusOneHex)));
t.strictEqual(undefined, murmurHash('', 0, 'base64', cbfactory(seedZeroBase64)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 'base64', cbfactory(seedZeroBase64)));
t.strictEqual(undefined, murmurHash('', -1, 'base64', cbfactory(seedMinusOneBase64)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), -1, 'base64', cbfactory(seedMinusOneBase64)));
t.strictEqual(undefined, murmurHash('', 4294967295, 'base64', cbfactory(seedMinusOneBase64)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 4294967295, 'base64', cbfactory(seedMinusOneBase64)));
t.strictEqual(undefined, murmurHash('', 4294967296, 'base64', cbfactory(seedZeroBase64)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 4294967296, 'base64', cbfactory(seedZeroBase64)));
t.strictEqual(undefined, murmurHash('', 1, 'base64', cbfactory(seedPlusOneBase64)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 1, 'base64', cbfactory(seedPlusOneBase64)));
t.strictEqual(undefined, murmurHash('', 0, 'binary', cbfactory(seedZeroBinary)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 'binary', cbfactory(seedZeroBinary)));
t.strictEqual(undefined, murmurHash('', -1, 'binary', cbfactory(seedMinusOneBinary)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), -1, 'binary', cbfactory(seedMinusOneBinary)));
t.strictEqual(undefined, murmurHash('', 4294967295, 'binary', cbfactory(seedMinusOneBinary)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 4294967295, 'binary', cbfactory(seedMinusOneBinary)));
t.strictEqual(undefined, murmurHash('', 4294967296, 'binary', cbfactory(seedZeroBinary)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 4294967296, 'binary', cbfactory(seedZeroBinary)));
t.strictEqual(undefined, murmurHash('', 1, 'binary', cbfactory(seedPlusOneBinary)));
t.strictEqual(undefined, murmurHash(Buffer.from(''), 1, 'binary', cbfactory(seedPlusOneBinary)));
});
t.test('should utilize different string input encodings', function(t) {
t.plan(10*4+5*3);
function cbfactory(arg) {
return function(err, result) {
t.error(err);
murmurHash(arg, function(err, result2) {
t.error(err);
t.strictEqual(result, result2);
});
};
}
function cbfactory2(value) {
return function(err, result) {
t.error(err);
t.strictEqual(result, value);
};
}
var string = "\u1220łóżko"
, base64 = 'IELzfGtv'
, hex = '2042f37c6b6f'
, hash = murmurHash(string);
t.strictEqual(undefined, murmurHash(Buffer.from(string, 'binary'), cbfactory2(hash)));
t.strictEqual(undefined, murmurHash(string, 'ascii', cbfactory(Buffer.from(string, 'ascii'))));
t.strictEqual(undefined, murmurHash(string, 'ascii', cbfactory2(hash)));
t.strictEqual(undefined, murmurHash(string, 'binary', cbfactory(Buffer.from(string, 'binary'))));
t.strictEqual(undefined, murmurHash(string, 'binary', cbfactory2(hash)));
t.strictEqual(undefined, murmurHash(string, 'utf8', cbfactory(Buffer.from(string, 'utf8'))));
t.strictEqual(undefined, murmurHash(string, 'utf-8', cbfactory(Buffer.from(string, 'utf-8'))));
t.strictEqual(undefined, murmurHash(string, 'ucs2', cbfactory(Buffer.from(string, 'ucs2'))));
t.strictEqual(undefined, murmurHash(string, 'ucs-2', cbfactory(Buffer.from(string, 'ucs-2'))));
t.strictEqual(undefined, murmurHash(string, 'utf16le', cbfactory(Buffer.from(string, 'utf16le'))));
t.strictEqual(undefined, murmurHash(string, 'utf-16le', cbfactory(Buffer.from(string, 'utf-16le'))));
t.strictEqual(undefined, murmurHash(base64, 'base64', cbfactory2(hash)));
t.strictEqual(undefined, murmurHash(base64, 'base64', cbfactory(Buffer.from(base64, 'base64'))));
t.strictEqual(undefined, murmurHash(hex, 'hex', cbfactory2(hash)));
t.strictEqual(undefined, murmurHash(hex, 'hex', cbfactory(Buffer.from(hex, 'hex'))));
});
t.test('should create hash from some random data', function(t) {
t.plan(2*3+5*4+3*4);
function cbfactoryLen() {
return function(err, result) {
t.error(err);
t.equal(result.length, size);
};
}
function cbfactory(arg, seed) {
return function(err, result) {
t.error(err);
if (seed === undefined)
murmurHash(arg, cb);
else
murmurHash(arg, seed, cb);
function cb(err, result2) {
t.error(err);
t.strictEqual(result, result2);
}
};
}
function cbfactory2(assertion, arg, seed, output) {
return function(err, result) {
t.error(err);
if (output === undefined)
murmurHash(arg, seed, cb);
else
murmurHash(arg, seed, output, cb);
function cb(err, result2) {
t.error(err);
t[assertion](result, result2);
}
};
}
var data = '';
for (var i = 0; i < 1000; ++i) data += String.fromCharCode((Math.random()*32768)|0);
var buffer = Buffer.from(data, 'binary');
t.strictEqual(undefined, murmurHash(data, 0, 'buffer', cbfactoryLen()));
t.strictEqual(undefined, murmurHash(buffer, 'buffer', cbfactoryLen()));
t.strictEqual(undefined, murmurHash(data, 'utf8', cbfactory(Buffer.from(data, 'utf8'))));
t.strictEqual(undefined, murmurHash(data, cbfactory(buffer)));
t.strictEqual(undefined, murmurHash(data, -1, cbfactory(buffer, -1)));
t.strictEqual(undefined, murmurHash(data, -1, cbfactory(buffer, 4294967295)));
t.strictEqual(undefined, murmurHash(data, 4294967295, cbfactory(buffer, -1)));
var seed = (Math.random()*4294967296)|0;
t.strictEqual(undefined, murmurHash(data, seed, 'buffer', cbfactory2('notStrictEqual', buffer, seed, 'buffer')));
t.strictEqual(undefined, murmurHash(data, seed, 'buffer', cbfactory2('deepEqual', buffer, seed, 'buffer')));
t.strictEqual(undefined, murmurHash(data, seed, cbfactory2('strictEqual', buffer, seed)));
});
t.test('should interpret 1 argument properly', function(t) {
t.plan(2*3);
function cbfactory(value) {
return function(err, result) {
t.error(err);
t.strictEqual(result, value);
};
}
t.strictEqual(undefined, murmurHash('', cbfactory(seedZeroDefault)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), cbfactory(seedZeroDefault)));
});
t.test('should interpret 2[+2] arguments properly', function(t) {
t.plan(4*4 + 6*5 + 10*4);
function cbfactory(value) {
return function(err, result) {
t.error(err);
t.strictEqual(typeof result, typeof value);
if ('string' === typeof value) {
t.strictEqual(result, value);
} else {
t.deepEqual(result, value);
}
};
}
function cbfactory2() {
var args = [].slice.call(arguments, 0);
return function(err, result) {
t.error(err);
args.push(function(err, result2) {
t.error(err);
t.strictEqual(typeof result, typeof result2);
if ('string' === typeof result2) {
t.strictEqual(result, result2);
} else {
t.deepEqual(result, result2);
}
});
murmurHash.apply(this, args);
};
}
function cbfactory3(value, cb) {
return function(err, result) {
t.error(err);
t.strictEqual(result, value);
cb();
};
}
t.strictEqual(undefined, murmurHash('', 0, cbfactory(seedZeroDefault)));
t.strictEqual(undefined, murmurHash('', -1, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), 0, cbfactory(seedZeroDefault)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash('\u1234', 'utf8',
cbfactory2(Buffer.from('\u1234', 'utf8'))));
t.strictEqual(undefined, murmurHash('\u1234', 'ascii',
cbfactory2(Buffer.from('\u1234', 'ascii'))));
t.strictEqual(undefined, murmurHash('\u1234', 'binary',
cbfactory2(Buffer.from('\u1234', 'binary'))));
t.strictEqual(undefined, murmurHash(Buffer.from([0xFF]),
cbfactory2(Buffer.from('\u12FF', 'binary'))));
t.strictEqual(undefined, murmurHash(Buffer.from([0xFF]), 'number',
cbfactory2(Buffer.from('\u12FF', 'binary'), 'number')));
t.strictEqual(undefined, murmurHash(Buffer.from([0xFF]), 'buffer',
cbfactory2(Buffer.from('\u12FF', 'binary'), 'buffer')));
var buf = Buffer.alloc(size, -1);
t.strictEqual(undefined, murmurHash('', buf, cbfactory3(buf, function() {
t.deepEqual(buf, Buffer.from(seedZeroHex, 'hex'));
})));
var bufb = Buffer.alloc(size, -1);
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), bufb, cbfactory3(bufb, function() {
t.deepEqual(bufb, Buffer.from(seedZeroHex, 'hex'));
})));
var buf2 = Buffer.allocUnsafe(size + 2); buf2.fill(0, 0, 2); buf2.fill(-1, 2);
t.strictEqual(undefined, murmurHash('', buf2, 2, cbfactory3(buf2, function() {
t.deepEqual(buf2, Buffer.concat([Buffer.from([0,0]), Buffer.from(seedZeroHex, 'hex')]));
})));
var buf2b = Buffer.allocUnsafe(size + 2); buf2b.fill(0, 0, 2); buf2b.fill(-1, 2);
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), buf2b, 2, cbfactory3(buf2b, function() {
t.deepEqual(buf2b, Buffer.concat([Buffer.from([0,0]), Buffer.from(seedZeroHex, 'hex')]));
})));
var buf3 = Buffer.alloc(size - 1, -1);
t.strictEqual(undefined, murmurHash('', buf3, -size, cbfactory3(buf3, function() {
t.deepEqual(buf3, Buffer.from(seedZeroHex, 'hex').slice(1));
})));
var buf3b = Buffer.alloc(size - 1, -1);
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), buf3b, -size, cbfactory3(buf3b, function() {
t.deepEqual(buf3b, Buffer.from(seedZeroHex, 'hex').slice(1));
})));
var bufpad = Buffer.alloc(size - 3, -1);
var buf4 = Buffer.allocUnsafe(size + 2); buf4.fill(0, 0, 2); buf4.fill(-1, 2);
t.strictEqual(undefined, murmurHash('', buf4, 2, 3, cbfactory3(buf4, function() {
t.deepEqual(buf4, Buffer.concat([Buffer.from([0,0]),
Buffer.from(seedZeroHex, 'hex').slice(0, 3),
bufpad]));
})));
var buf4b = Buffer.allocUnsafe(size + 2); buf4b.fill(0, 0, 2); buf4b.fill(-1, 2);
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), buf4b, 2, 3, cbfactory3(buf4b, function() {
t.deepEqual(buf4b, Buffer.concat([Buffer.from([0,0]),
Buffer.from(seedZeroHex, 'hex').slice(0, 3),
bufpad]));
})));
var buf5 = Buffer.alloc(size - 1, -1);
t.strictEqual(undefined, murmurHash('', buf5, -size, -3, cbfactory3(buf5, function() {
t.deepEqual(buf5, Buffer.concat([Buffer.from(seedZeroHex, 'hex').slice(size - 3 + 1),
bufpad]));
})));
var buf5b = Buffer.alloc(size - 1, -1);
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), buf5b, -size, -3, cbfactory3(buf5b, function() {
t.deepEqual(buf5b, Buffer.concat([Buffer.from(seedZeroHex, 'hex').slice(size - 3 + 1),
bufpad]));
})));
});
t.test('should interpret 3[+2] arguments properly', function(t) {
t.plan(16*4 + 18*5 + 10*4);
function cbfactory(value) {
return function(err, result) {
t.error(err);
t.strictEqual(typeof result, typeof value);
if ('string' === typeof value) {
t.strictEqual(result, value);
} else {
t.deepEqual(result, value);
}
};
}
function cbfactory2() {
var args = [].slice.call(arguments, 0);
return function(err, result) {
t.error(err);
args.push(function(err, result2) {
t.error(err);
t.strictEqual(typeof result, typeof result2);
if ('string' === typeof result2) {
t.strictEqual(result, result2);
} else {
t.deepEqual(result, result2);
}
});
murmurHash.apply(this, args);
};
}
function cbfactory3(value, cb) {
return function(err, result) {
t.error(err);
t.strictEqual(result, value);
cb();
};
}
t.strictEqual(undefined, murmurHash('', -1, 0, cbfactory(seedZeroDefault)));
t.strictEqual(undefined, murmurHash('', -1, null, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash('', -1, 'number', cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash('', -1, 'number', 1, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash('', -1, 'buffer', cbfactory(Buffer.from(seedMinusOneHex, 'hex'))));
t.strictEqual(undefined, murmurHash('', -1, 'buffer', 1, cbfactory(Buffer.from(seedMinusOneHex, 'hex'))));
t.strictEqual(undefined, murmurHash('', 1, -1, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash('', null, -1, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, 0, cbfactory(seedZeroDefault)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, null, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, 'number', cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, 'number', 1, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, 'buffer', cbfactory(Buffer.from(seedMinusOneHex, 'hex'))));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, 'buffer', 1, cbfactory(Buffer.from(seedMinusOneHex, 'hex'))));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), 1, -1, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), null, -1, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash('\u1234', 'utf8', 100,
cbfactory2(Buffer.from('\u1234', 'utf8'), 100)));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), 'number', 100,
cbfactory2(Buffer.from('\u1234', 'binary'), 100)));
t.strictEqual(undefined, murmurHash('\u1234', 'utf8', 'buffer',
cbfactory2(Buffer.from('\u1234', 'utf8'), 0, 'buffer')));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), 'buffer',
cbfactory2(Buffer.from('\u1234', 'binary'), 0, 'buffer')));
t.strictEqual(undefined, murmurHash('\u1234', 'utf8', 'buffer', -1,
cbfactory2(Buffer.from('\u1234', 'utf8'), 0, 'buffer')));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), 'ignore', 'buffer', -1,
cbfactory2(Buffer.from('\u1234', 'binary'), 0, 'buffer')));
t.strictEqual(undefined, murmurHash('\u1234', null, 'buffer',
cbfactory2(Buffer.from('\u1234', 'binary'), 0, 'buffer')));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), null, 'buffer',
cbfactory2(Buffer.from('\u1234', 'binary'), 0, 'buffer')));
t.strictEqual(undefined, murmurHash('\u1234', null, 'buffer', -1,
cbfactory2(Buffer.from('\u1234', 'binary'), 0, 'buffer')));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), null, 'buffer', -1,
cbfactory2(Buffer.from('\u1234', 'binary'), 0, 'buffer')));
t.strictEqual(undefined, murmurHash('\u1234', null, null,
cbfactory2(Buffer.from('\u1234', 'binary'), 0)));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), null, null,
cbfactory2(Buffer.from('\u1234', 'binary'), 0)));
t.strictEqual(undefined, murmurHash('\u1234', null, null, -1,
cbfactory2(Buffer.from('\u1234', 'binary'), 0)));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), null, null, -1,
cbfactory2(Buffer.from('\u1234', 'binary'), 0)));
t.strictEqual(undefined, murmurHash('\u1234', 'utf8', null,
cbfactory2(Buffer.from('\u1234', 'utf8'), 0)));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), 'number', null,
cbfactory2(Buffer.from('\u1234', 'binary'), 0)));
t.strictEqual(undefined, murmurHash('\u1234', 'utf8', null, -1,
cbfactory2(Buffer.from('\u1234', 'utf8'), 0)));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), 'number', null, -1,
cbfactory2(Buffer.from('\u1234', 'binary'), 0)));
var buf = Buffer.alloc(size, -1);
t.strictEqual(undefined, murmurHash('', -1, buf, cbfactory3(buf, function() {
t.deepEqual(buf, Buffer.from(seedMinusOneHex, 'hex'));
})));
var bufb = Buffer.alloc(size, -1);
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, bufb, cbfactory3(bufb, function() {
t.deepEqual(bufb, Buffer.from(seedMinusOneHex, 'hex'));
})));
var buf2 = Buffer.allocUnsafe(size + 2); buf2.fill(0, 0, 2); buf2.fill(-1, 2);
t.strictEqual(undefined, murmurHash('', -1, buf2, 2, cbfactory3(buf2, function() {
t.deepEqual(buf2, Buffer.concat([Buffer.from([0,0]), Buffer.from(seedMinusOneHex, 'hex')]));
})));
var buf2b = Buffer.allocUnsafe(size + 2); buf2b.fill(0, 0, 2); buf2b.fill(-1, 2);
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, buf2b, 2, cbfactory3(buf2b, function() {
t.deepEqual(buf2b, Buffer.concat([Buffer.from([0,0]), Buffer.from(seedMinusOneHex, 'hex')]));
})));
var buf3 = Buffer.alloc(size - 1, -1);
t.strictEqual(undefined, murmurHash('', -1, buf3, -size, cbfactory3(buf3, function() {
t.deepEqual(buf3, Buffer.from(seedMinusOneHex, 'hex').slice(1));
})));
var buf3b = Buffer.alloc(size - 1, -1);
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, buf3b, -size, cbfactory3(buf3b, function() {
t.deepEqual(buf3b, Buffer.from(seedMinusOneHex, 'hex').slice(1));
})));
var bufpad = Buffer.alloc(size - 3, -1);
var buf4 = Buffer.allocUnsafe(size + 2); buf4.fill(0, 0, 2); buf4.fill(-1, 2);
t.strictEqual(undefined, murmurHash('', -1, buf4, 2, 3, cbfactory3(buf4, function() {
t.deepEqual(buf4, Buffer.concat([Buffer.from([0,0]),
Buffer.from(seedMinusOneHex, 'hex').slice(0, 3),
bufpad]));
})));
var buf4b = Buffer.alloc(size + 2); buf4b.fill(0, 0, 2); buf4b.fill(-1, 2);
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, buf4b, 2, 3, cbfactory3(buf4b, function() {
t.deepEqual(buf4b, Buffer.concat([Buffer.from([0,0]),
Buffer.from(seedMinusOneHex, 'hex').slice(0, 3),
bufpad]));
})));
var buf5 = Buffer.alloc(size - 1, -1);
t.strictEqual(undefined, murmurHash('', -1, buf5, -size, -3, cbfactory3(buf5, function() {
t.deepEqual(buf5, Buffer.concat([Buffer.from(seedMinusOneHex, 'hex').slice(size - 3 + 1),
bufpad]));
})));
var buf5b = Buffer.alloc(size - 1, -1);
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, buf5b, -size, -3, cbfactory3(buf5b, function() {
t.deepEqual(buf5b, Buffer.concat([Buffer.from(seedMinusOneHex, 'hex').slice(size - 3 + 1),
bufpad]));
})));
});
t.test('should interpret 4[+2] arguments properly', function(t) {
t.plan(18*4 + 20*5 + 10*4);
function cbfactory(value) {
return function(err, result) {
t.error(err);
t.strictEqual(typeof result, typeof value);
if ('string' === typeof value) {
t.strictEqual(result, value);
} else {
t.deepEqual(result, value);
}
};
}
function cbfactory2() {
var args = [].slice.call(arguments, 0);
return function(err, result) {
t.error(err);
args.push(function(err, result2) {
t.error(err);
t.strictEqual(typeof result, typeof result2);
if ('string' === typeof result2) {
t.strictEqual(result, result2);
} else {
t.deepEqual(result, result2);
}
});
murmurHash.apply(this, args);
};
}
function cbfactory3(value, cb) {
return function(err, result) {
t.error(err);
t.strictEqual(result, value);
cb();
};
}
t.strictEqual(undefined, murmurHash('', 'utf8', -1, 0, cbfactory(seedZeroDefault)));
t.strictEqual(undefined, murmurHash('', 'utf8', -1, null, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash('', 'utf8', -1, null, 1, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash('', 'utf8', -1, 'number', cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash('', 'utf8', -1, 'number', 1, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash('', 'utf8', -1, 'buffer', cbfactory(Buffer.from(seedMinusOneHex, 'hex'))));
t.strictEqual(undefined, murmurHash('', 'utf8', -1, 'buffer', 1, cbfactory(Buffer.from(seedMinusOneHex, 'hex'))));
t.strictEqual(undefined, murmurHash('', 'utf8', 1, -1, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash('', 'utf8', null, -1, cbfactory(seedZeroDefault)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), 'hex', -1, 0, cbfactory(seedMinusOneHex)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), 'hex', -1, null, cbfactory(seedMinusOneHex)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), 'hex', -1, null, 1, cbfactory(seedMinusOneHex)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), 'number', -1, 'ignore', cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), 'number', -1, 'ignore', 1, cbfactory(seedMinusOneDefault)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), 'buffer', -1, 'ignore', cbfactory(Buffer.from(seedMinusOneHex, 'hex'))));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), 'buffer', -1, 'ignore', 1, cbfactory(Buffer.from(seedMinusOneHex, 'hex'))));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), 'hex', 1, -1, cbfactory(seedPlusOneHex)));
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), 'hex', null, -1, cbfactory(seedZeroHex)));
t.strictEqual(undefined, murmurHash('\u1234', 'utf8', 100, 'buffer',
cbfactory2(Buffer.from('\u1234', 'utf8'), 100, 'buffer')));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), 'buffer', 100, 'ignore',
cbfactory2(Buffer.from('\u1234', 'binary'), 100, 'buffer')));
t.strictEqual(undefined, murmurHash('\u1234', 'utf8', 100, 'buffer', -1,
cbfactory2(Buffer.from('\u1234', 'utf8'), 100, 'buffer')));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), 'buffer', 100, 'ignore', -1,
cbfactory2(Buffer.from('\u1234', 'binary'), 100, 'buffer')));
t.strictEqual(undefined, murmurHash('\u1234', 'utf8', 0, 'buffer',
cbfactory2(Buffer.from('\u1234', 'utf8'), 'buffer')));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), 'buffer', 0, 'ignore',
cbfactory2(Buffer.from('\u1234', 'binary'), 'buffer')));
t.strictEqual(undefined, murmurHash('\u1234', 'utf8', 0, 'buffer', -1,
cbfactory2(Buffer.from('\u1234', 'utf8'), 'buffer')));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), 'buffer', 0, 'ignore', -1,
cbfactory2(Buffer.from('\u1234', 'binary'), 'buffer')));
t.strictEqual(undefined, murmurHash('\u1234', null, 1, 'buffer',
cbfactory2(Buffer.from('\u1234', 'binary'), 1, 'buffer')));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), null, 1, 'buffer',
cbfactory2(Buffer.from('\u1234', 'binary'), 1, 'buffer')));
t.strictEqual(undefined, murmurHash('\u1234', null, 1, 'buffer', -1,
cbfactory2(Buffer.from('\u1234', 'binary'), 1, 'buffer')));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), null, 1, 'buffer', -1,
cbfactory2(Buffer.from('\u1234', 'binary'), 1, 'buffer')));
t.strictEqual(undefined, murmurHash('\u1234', null, 1, null,
cbfactory2(Buffer.from('\u1234', 'binary'), 1)));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), null, 1, null,
cbfactory2(Buffer.from('\u1234', 'binary'), 1)));
t.strictEqual(undefined, murmurHash('\u1234', null, 1, null, -1,
cbfactory2(Buffer.from('\u1234', 'binary'), 1)));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), null, 1, null, -1,
cbfactory2(Buffer.from('\u1234', 'binary'), 1)));
t.strictEqual(undefined, murmurHash('\u1234', 'utf8', 1, null,
cbfactory2(Buffer.from('\u1234', 'utf8'), 1)));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), 'number', 1, null,
cbfactory2(Buffer.from('\u1234', 'binary'), 1)));
t.strictEqual(undefined, murmurHash('\u1234', 'utf8', 1, null, -1,
cbfactory2(Buffer.from('\u1234', 'utf8'), 1)));
t.strictEqual(undefined, murmurHash(Buffer.from('\u1234', 'binary'), 'number', 1, null, -1,
cbfactory2(Buffer.from('\u1234', 'binary'), 1)));
var buf = Buffer.alloc(size, -1);
t.strictEqual(undefined, murmurHash('', 'utf8', -1, buf, cbfactory3(buf, function() {
t.deepEqual(buf, Buffer.from(seedMinusOneHex, 'hex'));
})));
var bufb = Buffer.alloc(size, -1);
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, bufb, cbfactory3(bufb, function() {
t.deepEqual(bufb, Buffer.from(seedMinusOneHex, 'hex'));
})));
var buf2 = Buffer.allocUnsafe(size + 2); buf2.fill(0, 0, 2); buf2.fill(-1, 2);
t.strictEqual(undefined, murmurHash('', 'binary', -1, buf2, 2, cbfactory3(buf2, function() {
t.deepEqual(buf2, Buffer.concat([Buffer.from([0,0]), Buffer.from(seedMinusOneHex, 'hex')]));
})));
var buf2b = Buffer.alloc(size + 2); buf2b.fill(0, 0, 2); buf2b.fill(-1, 2);
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, buf2b, 2, cbfactory3(buf2b, function() {
t.deepEqual(buf2b, Buffer.concat([Buffer.from([0,0]), Buffer.from(seedMinusOneHex, 'hex')]));
})));
var buf3 = Buffer.alloc(size - 1, -1);
t.strictEqual(undefined, murmurHash('', 'ascii', -1, buf3, -size, cbfactory3(buf3, function() {
t.deepEqual(buf3, Buffer.from(seedMinusOneHex, 'hex').slice(1));
})));
var buf3b = Buffer.alloc(size - 1, -1);
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, buf3b, -size, cbfactory3(buf3b, function() {
t.deepEqual(buf3b, Buffer.from(seedMinusOneHex, 'hex').slice(1));
})));
var bufpad = Buffer.alloc(size - 3, -1);
var buf4 = Buffer.allocUnsafe(size + 2); buf4.fill(0, 0, 2); buf4.fill(-1, 2);
t.strictEqual(undefined, murmurHash('', 'ucs2', -1, buf4, 2, 3, cbfactory3(buf4, function() {
t.deepEqual(buf4, Buffer.concat([Buffer.from([0,0]),
Buffer.from(seedMinusOneHex, 'hex').slice(0, 3),
bufpad]));
})));
var buf4b = Buffer.allocUnsafe(size + 2); buf4b.fill(0, 0, 2); buf4b.fill(-1, 2);
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, buf4b, 2, 3, cbfactory3(buf4b, function() {
t.deepEqual(buf4b, Buffer.concat([Buffer.from([0,0]),
Buffer.from(seedMinusOneHex, 'hex').slice(0, 3),
bufpad]));
})));
var buf5 = Buffer.alloc(size - 1, -1);
t.strictEqual(undefined, murmurHash('', 'hex', -1, buf5, -size, -3, cbfactory3(buf5, function() {
t.deepEqual(buf5, Buffer.concat([Buffer.from(seedMinusOneHex, 'hex').slice(size - 3 + 1),
bufpad]));
})));
var buf5b = Buffer.alloc(size - 1, -1);
t.strictEqual(undefined, murmurHash(Buffer.alloc(0), -1, buf5b, -size, -3, cbfactory3(buf5b, function() {
t.deepEqual(buf5b, Buffer.concat([Buffer.from(seedMinusOneHex, 'hex').slice(size - 3 + 1),
bufpad]));
})));
});
t.test('should write hash into the same buffer it is reading from', function(t) {
t.plan(6*5 + 5+12*2);
function cbfactory(buf, cb) {
return function(err, result) {
t.error(err);
t.strictEqual(result, buf);
cb();
};
}
function cbfactory2(buf, offs, len) {
return function(err, result) {
t.error(err);
if (offs === undefined) {
t.deepEqual(result, buf);
} else {
t.deepEqual(result.slice(offs, len), buf);
}
};
}
var data = '';
for (var i = 0; i < 1000; ++i) data += String.fromCharCode((Math.random()*32768)|0);
var databuf = Buffer.from(data, 'utf8');
var seed = (Math.random() * 0x100000000)|0;
var buf = Buffer.allocUnsafe(databuf.length + size); databuf.copy(buf); buf.fill(-1, databuf.length);
t.strictEqual(undefined, murmurHash(buf.slice(0, databuf.length), seed, buf, databuf.length, cbfactory(buf, function() {
t.strictEqual(undefined, murmurHash(data, 'utf8', seed, 'buffer', cbfactory2(buf.slice(databuf.length))));
t.strictEqual(undefined, murmurHash(databuf, seed, 'buffer', cbfactory2(buf.slice(databuf.length))));
})));
var buf2 = Buffer.allocUnsafe(size + databuf.length); databuf.copy(buf2, size); buf2.fill(-1, 0, size);
t.strictEqual(undefined, murmurHash(buf2.slice(size), seed, buf2, cbfactory(buf2, function() {
t.strictEqual(undefined, murmurHash(data, 'utf8', seed, 'buffer', cbfactory2(buf2.slice(0, size))));
t.strictEqual(undefined, murmurHash(databuf, seed, 'buffer', cbfactory2(buf2.slice(0, size))));
})));
var buf3 = Buffer.allocUnsafe(databuf.length); databuf.copy(buf3);
t.strictEqual(undefined, murmurHash(buf3, seed, buf3, cbfactory(buf3, function() {
t.strictEqual(undefined, murmurHash(data, 'utf8', seed, 'buffer', cbfactory2(buf3.slice(0, size))));
t.strictEqual(undefined, murmurHash(databuf, seed, 'buffer', cbfactory2(buf3.slice(0, size))));
t.deepEqual(buf3.slice(size), databuf.slice(size));
})));
var buf4 = Buffer.allocUnsafe(databuf.length); databuf.copy(buf4);
t.strictEqual(undefined, murmurHash(buf4, seed, buf4, -size, cbfactory(buf4, function() {
t.strictEqual(undefined, murmurHash(data, 'utf8', seed, 'buffer', cbfactory2(buf4.slice(databuf.length - size))));
t.strictEqual(undefined, murmurHash(databuf, seed, 'buffer', cbfactory2(buf4.slice(databuf.length - size))));
t.deepEqual(buf4.slice(0, databuf.length - size), databuf.slice(0, databuf.length - size));
})));
var buf5 = Buffer.allocUnsafe(databuf.length); databuf.copy(buf5);
t.strictEqual(undefined, murmurHash(buf5, seed, buf5, 0, size - 1, cbfactory(buf5, function() {
t.strictEqual(undefined, murmurHash(data, 'utf8', seed, 'buffer', cbfactory2(buf5.slice(0, size - 1), 0, size - 1)));
t.strictEqual(undefined, murmurHash(databuf, seed, 'buffer', cbfactory2(buf5.slice(0, size - 1), 0, size - 1)));
t.deepEqual(buf5.slice(size - 1), databuf.slice(size - 1));
})));
var buf6 = Buffer.allocUnsafe(databuf.length); databuf.copy(buf6);
t.strictEqual(undefined, murmurHash(buf6, seed, buf6, -size, -size + 2, cbfactory(buf6, function() {
t.strictEqual(undefined, murmurHash(data, 'utf8', seed, 'buffer',
cbfactory2(buf6.slice(databuf.length - size, databuf.length - 2), 2)));
t.strictEqual(undefined, murmurHash(databuf, seed, 'buffer',
cbfactory2(buf6.slice(databuf.length - size, databuf.length - 2), 2)));
t.deepEqual(buf6.slice(0, databuf.length - size), databuf.slice(0, databuf.length - size));
t.deepEqual(buf6.slice(databuf.length - 2), databuf.slice(databuf.length - 2));
})));
});
t.end();
});
});

View File

@ -1,94 +0,0 @@
"use strict";
var test = require("tap").test
, hash = require('..')
, incr = require('../incremental')
, hash3js = require("murmurhash3js")
;
var TEST_STRINGS = [
"My hovercraft is full of eels.",
"I will not buy this tobacconist's, it is scratched.",
"Mój poduszkowiec jest pełen węgorzy",
"Мээң ховеркрафтым иштинде чылан ышкаш балык долу",
"ჩემი ხომალდი საჰაერო ბალიშზე სავსეა გველთევზებით",
"私のホバークラフトは鰻でいっぱいです",
"എന്‍റെ പറക്കും-പേടകം നിറയെ വ്ളാങ്കുകളാണ്",
"මාගේ වායු පා යානයේ ආඳන් පිරී ඇත",
""
];
[
{hash3js: hash3js.x86.hash32, hash: hash.murmurHash32 , incr: incr.MurmurHash,
serial: 'QmKeaK22XaIAAAJOAi0y', hex: 'ad3e539d', hex2: '00fa73be'},
{hash3js: hash3js.x86.hash128, hash: hash.murmurHash128x86, incr: incr.MurmurHash128x86,
serial: 'sxzPmSzltfMCih+J0+iHa7u24JIgk7fg4Ie24K22dP4AAAJO857u',
hex: '0f2e9b58f3c8452ede465001eaa2308a', hex2: 'beccc960486e04b373c48b0217e02636'},
{hash3js: hash3js.x64.hash128, hash: hash.murmurHash128x64, incr: incr.MurmurHash128x64,
serial: 'pN18VmaZIWFi5XnFQVX5oSCTt+C7tuCSrbbgh7bg3F4AAAJOglRY',
hex: 'e49c0577f67e999b841d202f03c5e88d', hex2: 'e2a976902222cb37e90d01500bc11db3'}
]
.forEach(function(o) {
test(o.hash.name + " should be compatible with murmurhash3js", function(t) {
t.plan((9 + 10)*(57*3));
TEST_STRINGS.forEach(testHash);
for(var i = 10; i-- > 0; ) {
var s = '';
var len = (1 + Math.random()*10000)|0;
for(var n = len; n-- > 0; ) {
s += String.fromCharCode(Math.random()*0x3fff);
}
testHash(s);
}
function testStrings(s) {
var s1 = Buffer.from(s, 'utf8').toString('binary');
var args1 = [s1].concat([].slice.call(arguments, 1));
var args2 = [s, 'utf-8'].concat(args1.slice(1));
t.strictEqual( o.hash3js.apply(o, args1), o.hash.apply(o, args2) );
args1[0] = Buffer.from(s, 'binary').toString('binary');
args2[1] = 'binary';
t.strictEqual( o.hash3js.apply(o, args1), o.hash.apply(o, args2) );
args2.splice(1, 1);
t.strictEqual( o.hash3js.apply(o, args1), o.hash.apply(o, args2) );
}
function testHash(s) {
testStrings(s);
testStrings(s, 0);
testStrings(s, 1);
testStrings(s, -1);
testStrings(s, 0x7ffffff);
testStrings(s, 0x8000000);
testStrings(s, 0xfffffff);
for(var i = 50; i-- > 0; ) {
var seed = (Math.random()*0x100000000)>>>0;
testStrings(s, seed);
}
}
});
test("should be comatible with older serialized data", function(t) {
var hash0 = o.incr(42);
TEST_STRINGS.forEach(function(text) { hash0.update(text); });
t.strictEqual(hash0.digest('hex'), o.hex);
t.strictEqual(o.incr(hash0.serialize()).digest('hex'), o.hex);
TEST_STRINGS.forEach(function(text) { hash0.update(text); });
t.strictEqual(hash0.digest('hex'), o.hex2);
t.strictEqual(o.incr(hash0.serialize()).digest('hex'), o.hex2);
var hash1 = o.incr(o.serial);
t.strictEqual(hash1.digest('hex'), o.hex);
t.strictEqual(o.incr(hash1.serialize()).digest('hex'), o.hex);
t.strictEqual(hash1.serialize(), o.serial);
TEST_STRINGS.forEach(function(text) { hash1.update(text); });
t.strictEqual(hash1.digest('hex'), o.hex2);
t.strictEqual(o.incr(hash1.serialize()).digest('hex'), o.hex2);
t.end();
});
});

View File

@ -1,87 +0,0 @@
"use strict";
var os = require("os")
, crypto = require("crypto")
, test = require("tap").test
, byteOrderSwap = require("./byteswap/byteorderswap")
, hash = require('..')
;
[
[4, 32, 'murmurHash'],
[4, 32, 'murmurHash32'],
[8, 64, 'murmurHash64x86'],
[8, 64, 'murmurHash64x64'],
[16, 32, 'murmurHash128x86'],
[16, 64, 'murmurHash128x64']
].forEach(function(args) {
var size = args[ 0]
, wordBits = args[ 1]
, name = args[ 2]
, murmurHash = hash[name]
, murmurHashBE = hash.BE[name]
, murmurHashLE = hash.LE[name]
, murmurHashCPU = hash.platform[name]
, murmurHashOS = hash[os.endianness()][name]
;
test(name, function(t) {
t.type(murmurHash, 'function');
t.type(murmurHashBE, 'function');
t.type(murmurHashLE, 'function');
t.type(murmurHashCPU, 'function');
t.type(murmurHashOS, 'function');
t.test('should render same results', function(t) {
t.plan(6*2*(1 + 3));
var seed = (Math.random() * 0xFFFFFFFF >>>0) + 1;
['', Buffer.alloc(0), crypto.randomBytes(10007), crypto.randomBytes(10007).toString('binary'), crypto.randomBytes(Math.random() * 100>>>0), crypto.randomBytes(Math.random() * 100>>>0).toString('binary')]
.forEach(function(input) {
t.deepEqual(murmurHash(input, seed, 'buffer'), murmurHashBE(input, seed, 'buffer'));
['hex', 'binary', 'base64'].forEach(function(encoding) {
t.strictEqual(murmurHash(input, seed, encoding), murmurHashBE(input, seed, encoding));
});
t.deepEqual(murmurHashCPU(input, seed, 'buffer'), murmurHashOS(input, seed, 'buffer'));
['hex', 'binary', 'base64'].forEach(function(encoding) {
t.strictEqual(murmurHashCPU(input, seed, encoding), murmurHashOS(input, seed, encoding));
});
});
});
t.test('should render swapped results', function(t) {
t.plan(6*(2 + 3*2));
var seed = (Math.random() * 0xFFFFFFFF >>>0) + 1;
['', Buffer.alloc(0), crypto.randomBytes(10007), crypto.randomBytes(10007).toString('binary'), crypto.randomBytes(Math.random() * 100>>>0), crypto.randomBytes(Math.random() * 100>>>0).toString('binary')]
.forEach(function(input) {
t.deepEqual(
murmurHashLE(input, seed, 'buffer'),
byteOrderSwap(
murmurHashBE(input, seed, 'buffer'), wordBits, 0, size));
t.deepEqual(
byteOrderSwap(
murmurHashLE(input, seed, 'buffer'), wordBits, 0, size),
murmurHashBE(input, seed, 'buffer'));
['hex', 'binary', 'base64'].forEach(function(encoding) {
t.strictEqual(
murmurHashLE(input, seed, encoding),
byteOrderSwap(Buffer.from(
murmurHashBE(input, seed, encoding), encoding), wordBits, 0, size).toString(encoding));
t.strictEqual(
byteOrderSwap(Buffer.from(
murmurHashLE(input, seed, encoding), encoding), wordBits, 0, size).toString(encoding),
murmurHashBE(input, seed, encoding));
});
});
});
t.end();
});
});

View File

@ -1,401 +0,0 @@
"use strict";
var test = require('./uncaughtexceptionwrap_tap').test
, incr = require('../incremental')
, strm = require('../stream')
, hash = require('..')
;
test("should have murmurHash constructors", function(t) {
t.type(incr.MurmurHash, 'function');
t.strictEqual(incr.MurmurHash.name, 'MurmurHash');
t.type(incr.MurmurHash128, 'function');
t.type(incr.MurmurHash128x64, 'function');
t.strictEqual(incr.MurmurHash128x64.name, 'MurmurHash128x64');
t.type(incr.MurmurHash128x86, 'function');
t.strictEqual(incr.MurmurHash128x86.name, 'MurmurHash128x86');
t.end();
});
function wrapStream(name) {
return function(seed, endian) {
var hasher = (seed instanceof strm.MurmurHash)
? new strm.MurmurHash(seed, {endianness: endian})
: new strm.MurmurHash(name, {seed: seed, endianness: endian});
return hasher;
};
}
[
[4, 'MurmurHash', void(0), incr.MurmurHash, hash.murmurHash,
0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7'],
[4, 'MurmurHash', 'BE', incr.MurmurHash, hash.BE.murmurHash,
0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7'],
[4, 'MurmurHash', 'LE', incr.MurmurHash, hash.LE.murmurHash,
0, 2180083513, 1364076727,
'00000000', '396ff181', 'b7284e51'],
[4, 'MurmurHash (stream)', void(0), wrapStream('MurmurHash'), hash.murmurHash,
0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7'],
[4, 'MurmurHash (stream)', 'BE', wrapStream('MurmurHash'), hash.BE.murmurHash,
0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7'],
[4, 'MurmurHash (stream)', 'LE', wrapStream('MurmurHash'), hash.LE.murmurHash,
0, 2180083513, 1364076727,
'00000000', '396ff181', 'b7284e51'],
[16, 'MurmurHash128x64', void(0), incr.MurmurHash128x64, hash.murmurHash128x64,
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583'],
[16, 'MurmurHash128x64', 'BE', incr.MurmurHash128x64, hash.BE.murmurHash128x64,
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583'],
[16, 'MurmurHash128x64', 'LE', incr.MurmurHash128x64, hash.LE.murmurHash128x64,
'00000000000000000000000000000000', 'ecc93b9d4ddff16a6b44e61e12217485',
'b55cff6ee5ab10468335f878aa2d6251',
'00000000000000000000000000000000', 'ecc93b9d4ddff16a6b44e61e12217485',
'b55cff6ee5ab10468335f878aa2d6251'],
[16, 'MurmurHash128x64 (stream)', void(0), wrapStream('MurmurHash128x64'), hash.murmurHash128x64,
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583'],
[16, 'MurmurHash128x64 (stream)', 'BE', wrapStream('MurmurHash128x64'), hash.BE.murmurHash128x64,
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583'],
[16, 'MurmurHash128x64 (stream)', 'LE', wrapStream('MurmurHash128x64'), hash.LE.murmurHash128x64,
'00000000000000000000000000000000', 'ecc93b9d4ddff16a6b44e61e12217485',
'b55cff6ee5ab10468335f878aa2d6251',
'00000000000000000000000000000000', 'ecc93b9d4ddff16a6b44e61e12217485',
'b55cff6ee5ab10468335f878aa2d6251'],
[16, 'MurmurHash128x86', void(0), incr.MurmurHash128x86, hash.murmurHash128x86,
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9'],
[16, 'MurmurHash128x86', 'BE', incr.MurmurHash128x86, hash.BE.murmurHash128x86,
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9'],
[16, 'MurmurHash128x86', 'LE', incr.MurmurHash128x86, hash.LE.murmurHash128x86,
'00000000000000000000000000000000', 'a9081e05f7499d98f7499d98f7499d98',
'ecadc488b901d254b901d254b901d254',
'00000000000000000000000000000000', 'a9081e05f7499d98f7499d98f7499d98',
'ecadc488b901d254b901d254b901d254'],
[16, 'MurmurHash128x86 (stream)', void(0), wrapStream('MurmurHash128x86'), hash.murmurHash128x86,
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9'],
[16, 'MurmurHash128x86 (stream)', 'BE', wrapStream('MurmurHash128x86'), hash.BE.murmurHash128x86,
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9'],
[16, 'MurmurHash128x86 (stream)', 'LE', wrapStream('MurmurHash128x86'), hash.LE.murmurHash128x86,
'00000000000000000000000000000000', 'a9081e05f7499d98f7499d98f7499d98',
'ecadc488b901d254b901d254b901d254',
'00000000000000000000000000000000', 'a9081e05f7499d98f7499d98f7499d98',
'ecadc488b901d254b901d254b901d254']
].forEach(function(args) {
var size = args[ 0]
, label = args[ 1]
, endian = args[ 2]
, MurmurHash = args[ 3]
, murmurHash = args[ 4]
, seedZeroNumber = args[ 5]
, seedMinusOneNumber = args[ 6]
, seedPlusOneNumber = args[ 7]
, seedZeroHex = args[ 8]
, seedMinusOneHex = args[ 9]
, seedPlusOneHex = args[10]
, seedZeroBuffer = Buffer.from(seedZeroHex, 'hex')
, seedMinusOneBuffer = Buffer.from(seedMinusOneHex, 'hex')
, seedPlusOneBuffer = Buffer.from(seedPlusOneHex, 'hex')
, seedZeroBase64 = seedZeroBuffer.toString('base64')
, seedMinusOneBase64 = seedMinusOneBuffer.toString('base64')
, seedPlusOneBase64 = seedPlusOneBuffer.toString('base64')
, seedZeroBinary = seedZeroBuffer.toString('binary')
, seedMinusOneBinary = seedMinusOneBuffer.toString('binary')
, seedPlusOneBinary = seedPlusOneBuffer.toString('binary')
;
test(label, function(t) {
t.type(MurmurHash, 'function');
t.test('should not bail on error throw in a callback', function(t) {
t.plan(3);
t.throwsUncaughtException(new Error("mana mana"));
t.strictEqual(undefined, MurmurHash(void(0), endian).update('', function(err) {
t.error(err);
throw new Error("mana mana");
}));
});
t.test('should raise error for bad arguments', function(t) {
t.plan(1+11*5+6*2);
function cberrAsync(arg) {
var hasher = new MurmurHash();
hasher.endianness = endian || 'platform';
t.strictEqual(undefined, hasher.update(arg, function(err) {
t.type(err, TypeError);
t.strictEqual(err.message, "string or Buffer is required");
t.equal(hasher.isBusy, false);
}));
t.equal(hasher.isBusy, true);
}
function cberrNotThrow(err) {
t.error(err);
}
t.throws(function() {
new MurmurHash(null, endian).update(function() { t.error("should not be called"); });
}, new TypeError("string or Buffer is required") );
cberrAsync(undefined);
cberrAsync({});
cberrAsync([]);
cberrAsync(void(0));
cberrAsync(null);
cberrAsync(true);
cberrAsync(false);
cberrAsync(0);
cberrAsync(1);
cberrAsync(-1);
cberrAsync(new Date());
t.strictEqual(undefined, new MurmurHash(void(0), endian).update("", "abcdefghijklmno", cberrNotThrow));
t.strictEqual(undefined, new MurmurHash(void(0), endian).update("", "123456", cberrNotThrow));
t.strictEqual(undefined, new MurmurHash(void(0), endian).update("", "12345", cberrNotThrow));
t.strictEqual(undefined, new MurmurHash(void(0), endian).update("", "1234", cberrNotThrow));
t.strictEqual(undefined, new MurmurHash(void(0), endian).update("", "123", cberrNotThrow));
t.strictEqual(undefined, new MurmurHash(void(0), endian).update("", "", cberrNotThrow));
});
t.test('should raise in-progress error', function(t) {
t.plan(16);
var hasher = new MurmurHash(void(0), endian);
function cb1(err) {
t.error(err);
t.strictEqual(hasher.isBusy, false);
}
function cberr1(err) {
t.type(err, Error);
t.strictEqual(hasher.isBusy, true);
t.strictEqual(err.message, "Asynchronous update still in progress");
}
t.strictEqual(hasher.isBusy, false);
hasher.update('foobarbaz', cb1);
t.strictEqual(hasher.isBusy, true);
t.strictEqual(undefined, hasher.update('', cberr1));
t.throws(function() { hasher.update(''); }, new Error("Asynchronous update still in progress") );
t.throws(function() { hasher.digest(); }, new Error("Asynchronous update still in progress") );
t.throws(function() { hasher.serialize(); }, new Error("Asynchronous update still in progress") );
t.throws(function() { hasher.toJSON(); }, new Error("Asynchronous update still in progress") );
t.throws(function() { hasher.copy(new MurmurHash(void(0), endian)); }, new Error("Asynchronous update still in progress") );
t.throws(function() { new MurmurHash(void(0), endian).copy(hasher); }, new Error("Asynchronous update still in progress") );
t.throws(function() { new MurmurHash(hasher); }, new Error("Asynchronous update still in progress") );
t.throws(function() { MurmurHash(hasher, endian); }, new Error("Asynchronous update still in progress") );
});
t.test('should async update and create hash from empty data', function(t) {
t.plan(12*12);
function async(hasher, update, bufvalue, hexvalue, numvalue, base64value, binvalue) {
t.strictEqual(undefined, hasher.update(update, function(err) {
t.error(err);
var result = hasher.digest();
t.deepEqual(result, bufvalue);
t.strictEqual(result.toString('hex'), hexvalue);
t.deepEqual(hasher.digest('buffer'), bufvalue);
t.deepEqual(hasher.digest('foobar'), bufvalue);
t.deepEqual(hasher.digest('utf8'), bufvalue);
t.deepEqual(hasher.digest('ucs2'), bufvalue);
t.strictEqual(hasher.digest('number'), numvalue);
t.strictEqual(hasher.digest('hex'), hexvalue);
t.strictEqual(hasher.digest('base64'), base64value);
t.strictEqual(hasher.digest('binary'), binvalue);
}));
}
async(new MurmurHash(void(0), endian), '', seedZeroBuffer, seedZeroHex, seedZeroNumber, seedZeroBase64, seedZeroBinary);
async(new MurmurHash(0, endian), '', seedZeroBuffer, seedZeroHex, seedZeroNumber, seedZeroBase64, seedZeroBinary);
async(new MurmurHash(void(0), endian), Buffer.from(''), seedZeroBuffer, seedZeroHex, seedZeroNumber, seedZeroBase64, seedZeroBinary);
async(new MurmurHash(0, endian), Buffer.from(''), seedZeroBuffer, seedZeroHex, seedZeroNumber, seedZeroBase64, seedZeroBinary);
async(new MurmurHash(-1, endian), '', seedMinusOneBuffer, seedMinusOneHex, seedMinusOneNumber, seedMinusOneBase64, seedMinusOneBinary);
async(new MurmurHash(-1, endian), Buffer.from(''), seedMinusOneBuffer, seedMinusOneHex, seedMinusOneNumber, seedMinusOneBase64, seedMinusOneBinary);
async(new MurmurHash(4294967295, endian), '', seedMinusOneBuffer, seedMinusOneHex, seedMinusOneNumber, seedMinusOneBase64, seedMinusOneBinary);
async(new MurmurHash(4294967295, endian), Buffer.from(''), seedMinusOneBuffer, seedMinusOneHex, seedMinusOneNumber, seedMinusOneBase64, seedMinusOneBinary);
async(new MurmurHash(4294967296, endian), '', seedZeroBuffer, seedZeroHex, seedZeroNumber, seedZeroBase64, seedZeroBinary);
async(new MurmurHash(4294967296, endian), Buffer.from(''), seedZeroBuffer, seedZeroHex, seedZeroNumber, seedZeroBase64, seedZeroBinary);
async(new MurmurHash(1, endian), '', seedPlusOneBuffer, seedPlusOneHex, seedPlusOneNumber, seedPlusOneBase64, seedPlusOneBinary);
async(new MurmurHash(1, endian), Buffer.from(''), seedPlusOneBuffer, seedPlusOneHex, seedPlusOneNumber, seedPlusOneBase64, seedPlusOneBinary);
});
t.test('should utilize different string input encodings', function(t) {
t.plan(11*6);
var string = "\u1220łóżko"
, base64 = '4YigxYLDs8W8a28='
, hex = 'e188a0c582c3b3c5bc6b6f';
function async(string, encoding) {
var hasher1 = new MurmurHash(void(0), endian);
t.strictEqual(undefined, encoding ? hasher1.update(string, encoding, cb) : hasher1.update(string, cb));
function cb(err) {
t.error(err);
encoding = encoding || 'utf8';
var hasher2 = new MurmurHash(void(0), endian);
t.strictEqual(undefined, hasher2.update(Buffer.from(string, encoding), function(err) {
t.error(err);
t.deepEqual(hasher2.digest(), hasher1.digest());
t.deepEqual(hasher2.digest(), murmurHash(string, encoding, 'buffer'));
}));
}
}
async(string);
async(string, 'utf8');
async(string, 'utf-8');
async(string, 'ucs2');
async(string, 'ucs-2');
async(string, 'utf16le');
async(string, 'utf-16le');
async(string, 'ascii');
async(string, 'binary');
async(base64, 'base64');
async(hex, 'hex');
});
t.test('should create hash from some random data', function(t) {
t.plan(6*19);
var data = '';
var strlen = 1000;
for (var i = 0; i < strlen; ++i) data += String.fromCharCode((Math.random()*32768)|0);
var buffer = Buffer.from(data, 'utf8');
function async(seed) {
var hasher1 = new MurmurHash(seed, endian);
t.strictEqual(undefined, hasher1.update(data, function(err) {
t.error(err);
t.equal(hasher1.digest().length, size);
t.equal(hasher1.total, buffer.length);
t.deepEqual(hasher1.digest(), murmurHash(data, 'utf8', seed|0, 'buffer'));
t.strictEqual(hasher1.digest('number'), murmurHash(data, 'utf8', seed));
t.strictEqual(hasher1.digest('hex'), murmurHash(data, 'utf8', seed|0, 'hex'));
t.strictEqual(hasher1.digest('base64'), murmurHash(data, 'utf8', seed|0, 'base64'));
t.strictEqual(hasher1.digest('binary'), murmurHash(data, 'utf8', seed|0, 'binary'));
var hasher2 = new MurmurHash(seed, endian);
t.strictEqual(undefined, hasher2.update(buffer, function(err) {
t.error(err);
t.equal(hasher2.digest().length, size);
t.equal(hasher2.total, buffer.length);
t.deepEqual(hasher2.digest(), murmurHash(buffer, seed, 'buffer'));
t.deepEqual(hasher2.digest(), hasher1.digest());
t.strictEqual(hasher2.digest('number'), hasher1.digest('number'));
t.strictEqual(hasher2.digest('hex'), hasher1.digest('hex'));
t.strictEqual(hasher2.digest('base64'), hasher1.digest('base64'));
t.strictEqual(hasher2.digest('binary'), hasher1.digest('binary'));
}));
}));
}
async();
async(0);
async(-1);
async(1);
async(4294967295);
async((Math.random()*4294967296)|0);
});
t.test('should create hash from some random data incrementally', function(t) {
t.plan(33);
var maxchunksize = 101;
var buffer = Buffer.allocUnsafe(10007);
var seed = (Math.random()*4294967296)|0;
var hasher0 = new MurmurHash(0, endian);
var hasher1 = new MurmurHash(1, endian);
var hasherS = new MurmurHash(seed, endian);
var hasher0str = new MurmurHash(0, endian);
var hasher1str = new MurmurHash(1, endian);
var hasherSstr = new MurmurHash(seed, endian);
var lastErr = null;
var n = 0;
var feed = function() {
if (n < buffer.length) {
var p = n;
var slicelen = (Math.random()*maxchunksize|0) + 1;
for(var j = 0; j < slicelen; ++j) {
if (n >= buffer.length) break;
buffer[n++] = (Math.random()*0x100)|0;
}
var countdown = 0;
var callback = function(err) {
if (err) lastErr = err;
if (!--countdown) feed();
};
hasher0.update(buffer.slice(p, n), callback); ++countdown;
hasher1.update(buffer.slice(p, n), callback); ++countdown;
hasherS.update(buffer.slice(p, n), callback); ++countdown;
hasher0str.update(buffer.slice(p, n).toString('binary'), 'binary', callback); ++countdown;
hasher1str.update(buffer.slice(p, n).toString('binary'), 'binary', callback); ++countdown;
hasherSstr.update(buffer.slice(p, n).toString('binary'), 'binary', callback); ++countdown;
} else
check();
};
feed();
function check() {
t.error(lastErr);
t.equal(n, buffer.length);
t.equal(n, 10007);
t.equal(hasher0.total, buffer.length);
t.equal(hasher1.total, buffer.length);
t.equal(hasherS.total, buffer.length);
t.equal(hasher0str.total, buffer.length);
t.equal(hasher1str.total, buffer.length);
t.equal(hasherSstr.total, buffer.length);
var data = buffer.toString('binary');
t.equal(new MurmurHash(void(0), endian).update(data, 'binary').digest().length, size);
t.equal(new MurmurHash(void(0), endian).update(data, 'binary').total, buffer.length);
t.equal(new MurmurHash(void(0), endian).update(data, 'binary').digest('buffer').length, size);
t.equal(new MurmurHash(void(0), endian).update(buffer).digest().length, size);
t.equal(new MurmurHash(void(0), endian).update(buffer).digest('buffer').length, size);
t.equal(new MurmurHash(void(0), endian).update(buffer).total, buffer.length);
t.strictEqual(new MurmurHash(void(0), endian).update(data, 'binary').digest('number'),
new MurmurHash(void(0), endian).update(buffer).digest('number'));
var d0 = hasher0.digest('number');
var d1 = hasher1.digest('number');
var dS = hasherS.digest('number');
var d0str = hasher0str.digest('number');
var d1str = hasher1str.digest('number');
var dSstr = hasherSstr.digest('number');
t.notStrictEqual(d0, d1);
t.notStrictEqual(d0, dS);
t.strictEqual(d0, d0str);
t.strictEqual(d1, d1str);
t.strictEqual(dS, dSstr);
t.strictEqual(d0, new MurmurHash(void(0), endian).update(buffer).digest('number'));
t.strictEqual(d0, new MurmurHash(void(0), endian).update(data, 'binary').digest('number'));
t.strictEqual(d0, murmurHash(buffer));
t.strictEqual(d0, murmurHash(data));
t.strictEqual(d1, new MurmurHash(1, endian).update(buffer).digest('number'));
t.strictEqual(d1, new MurmurHash(1, endian).update(data, 'binary').digest('number'));
t.strictEqual(d1, murmurHash(buffer, 1));
t.strictEqual(d1, murmurHash(data, 1));
t.strictEqual(dS, new MurmurHash(seed, endian).update(buffer).digest('number'));
t.strictEqual(dS, new MurmurHash(seed, endian).update(data, 'binary').digest('number'));
t.strictEqual(dS, murmurHash(buffer, seed));
t.strictEqual(dS, murmurHash(data, seed));
}
});
t.end();
});
});

View File

@ -1,103 +0,0 @@
"use strict";
var os = require("os")
, crypto = require("crypto")
, test = require("tap").test
, byteOrderSwap = require("./byteswap/byteorderswap")
, incr = require('../incremental')
;
var endianness = os.endianness();
[
[4, 32, 'MurmurHash'],
[16, 32, 'MurmurHash128x86'],
[16, 64, 'MurmurHash128x64']
].forEach(function(args) {
var size = args[ 0]
, wordBits = args[ 1]
, name = args[ 2]
, MurmurHash = incr[name]
;
test(name, function(t) {
t.type(MurmurHash, 'function');
t.test('should render same results', function(t) {
t.plan(6*2*(2 + 3*2));
var seed = (Math.random() * 0xFFFFFFFF >>>0) + 1;
['', Buffer.alloc(0), crypto.randomBytes(10007), crypto.randomBytes(10007).toString('utf8'), crypto.randomBytes(Math.random() * 100>>>0), crypto.randomBytes(Math.random() * 100>>>0).toString('utf8')]
.forEach(function(input) {
t.deepEqual(
new MurmurHash(seed).update(input).digest('buffer'),
MurmurHash(seed, 'BE').update(input).digest('buffer'));
t.deepEqual(
MurmurHash(seed).update(input).digest('buffer'),
new MurmurHash(seed, 'BE').update(input).digest('buffer'));
['hex', 'binary', 'base64'].forEach(function(encoding) {
t.strictEqual(
new MurmurHash(seed).update(input).digest(encoding),
MurmurHash(seed, 'BE').update(input).digest(encoding));
t.strictEqual(
MurmurHash(seed).update(input).digest(encoding),
new MurmurHash(seed, 'BE').update(input).digest(encoding));
});
t.deepEqual(
new MurmurHash(seed, 'platform').update(input).digest('buffer'),
MurmurHash(seed, endianness).update(input).digest('buffer'));
t.deepEqual(
MurmurHash(seed, 'platform').update(input).digest('buffer'),
new MurmurHash(seed, endianness).update(input).digest('buffer'));
['hex', 'binary', 'base64'].forEach(function(encoding) {
t.strictEqual(
new MurmurHash(seed, 'platform').update(input).digest(encoding),
MurmurHash(seed, endianness).update(input).digest(encoding));
t.strictEqual(
MurmurHash(seed, 'platform').update(input).digest(encoding),
new MurmurHash(seed, endianness).update(input).digest(encoding));
});
});
});
t.test('should render swapped results', function(t) {
t.plan(6*(2 + 3*2));
var seed = (Math.random() * 0xFFFFFFFF >>>0) + 1;
['', Buffer.alloc(0), crypto.randomBytes(10007), crypto.randomBytes(10007).toString('utf8'), crypto.randomBytes(Math.random() * 100>>>0), crypto.randomBytes(Math.random() * 100>>>0).toString('utf8')]
.forEach(function(input) {
t.deepEqual(
new MurmurHash(seed, 'LE').update(input).digest('buffer'),
byteOrderSwap(
MurmurHash(seed, 'BE').update(input).digest('buffer'), wordBits, 0, size));
t.deepEqual(
byteOrderSwap(
MurmurHash(seed, 'LE').update(input).digest('buffer'), wordBits, 0, size),
new MurmurHash(seed, 'BE').update(input).digest('buffer'));
['hex', 'binary', 'base64'].forEach(function(encoding) {
t.strictEqual(
new MurmurHash(seed, 'LE').update(input).digest(encoding),
byteOrderSwap(Buffer.from(
MurmurHash(seed, 'BE').update(input).digest(encoding), encoding), wordBits, 0, size).toString(encoding));
t.strictEqual(
byteOrderSwap(Buffer.from(
MurmurHash(seed, 'LE').update(input).digest(encoding), encoding), wordBits, 0, size).toString(encoding),
new MurmurHash(seed, 'BE').update(input).digest(encoding));
});
});
});
t.end();
});
});

View File

@ -1,796 +0,0 @@
// jshint loopfunc:true
"use strict";
var os = require("os")
, test = require("tap").test
, byteOrderSwap = require("./byteswap/byteorderswap")
, incr = require('../incremental')
, strm = require('../stream')
, hash = require('..')
;
test("should have murmurHash constructors", function(t) {
t.type(incr.MurmurHash, 'function');
t.strictEqual(incr.MurmurHash.name, 'MurmurHash');
t.type(incr.MurmurHash128, 'function');
t.type(incr.MurmurHash128x64, 'function');
t.strictEqual(incr.MurmurHash128x64.name, 'MurmurHash128x64');
t.type(incr.MurmurHash128x86, 'function');
t.strictEqual(incr.MurmurHash128x86.name, 'MurmurHash128x86');
t.end();
});
test("should not deserialize foreign serialized data", function(t) {
var serial0 = incr.MurmurHash128x64().serialize();
t.throws(function() { new incr.MurmurHash128x86(serial0); }, new TypeError("Incorrect serialized string"));
t.throws(function() { new incr.MurmurHash(serial0); }, new TypeError("Incorrect size of the serialized string"));
var serial1 = incr.MurmurHash128x86().serialize();
t.throws(function() { new incr.MurmurHash128x64(serial1); }, new TypeError("Incorrect serialized string"));
t.throws(function() { new incr.MurmurHash(serial1); }, new TypeError("Incorrect size of the serialized string"));
var serial2 = incr.MurmurHash().serialize();
t.throws(function() { new incr.MurmurHash128x86(serial2); }, new TypeError("Incorrect size of the serialized string"));
t.throws(function() { new incr.MurmurHash128x64(serial2); }, new TypeError("Incorrect size of the serialized string"));
t.end();
});
function wrapStream(name) {
return function(seed, endian) {
var hasher = (seed instanceof strm.MurmurHash)
? new strm.MurmurHash(seed, {endianness: endian})
: new strm.MurmurHash(name, {seed: seed, endianness: endian});
return hasher;
};
}
[
[4, 32, 'MurmurHash', void(0), incr.MurmurHash, incr.MurmurHash, hash.murmurHash,
0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7',
'e72d7f37'],
[4, 32, 'MurmurHash', 'BE', incr.MurmurHash, incr.MurmurHash, hash.BE.murmurHash,
0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7',
'e72d7f37'],
[4, 32, 'MurmurHash', 'LE', incr.MurmurHash, incr.MurmurHash, hash.LE.murmurHash,
0, 2180083513, 1364076727,
'00000000', '396ff181', 'b7284e51',
'377f2de7'],
[4, 32, 'MurmurHash (stream)', void(0), wrapStream('MurmurHash'), strm.MurmurHash, hash.murmurHash,
0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7',
'e72d7f37'],
[4, 32, 'MurmurHash (stream)', 'BE', wrapStream('MurmurHash'), strm.MurmurHash, hash.BE.murmurHash,
0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7',
'e72d7f37'],
[4, 32, 'MurmurHash (stream)', 'LE', wrapStream('MurmurHash'), strm.MurmurHash, hash.LE.murmurHash,
0, 2180083513, 1364076727,
'00000000', '396ff181', 'b7284e51',
'377f2de7'],
[16, 64, 'MurmurHash128x64', void(0), incr.MurmurHash128x64, incr.MurmurHash128x64, hash.murmurHash128x64,
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'18a573e78e997f9b0be9c4b4595e5875'],
[16, 64, 'MurmurHash128x64', 'BE', incr.MurmurHash128x64, incr.MurmurHash128x64, hash.BE.murmurHash128x64,
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'18a573e78e997f9b0be9c4b4595e5875'],
[16, 64, 'MurmurHash128x64', 'LE', incr.MurmurHash128x64, incr.MurmurHash128x64, hash.LE.murmurHash128x64,
'00000000000000000000000000000000', 'ecc93b9d4ddff16a6b44e61e12217485',
'b55cff6ee5ab10468335f878aa2d6251',
'00000000000000000000000000000000', 'ecc93b9d4ddff16a6b44e61e12217485',
'b55cff6ee5ab10468335f878aa2d6251',
'9b7f998ee773a51875585e59b4c4e90b'],
[16, 64, 'MurmurHash128x64 (stream)', void(0), wrapStream('MurmurHash128x64'), strm.MurmurHash, hash.murmurHash128x64,
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'18a573e78e997f9b0be9c4b4595e5875'],
[16, 64, 'MurmurHash128x64 (stream)', 'BE', wrapStream('MurmurHash128x64'), strm.MurmurHash, hash.BE.murmurHash128x64,
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'18a573e78e997f9b0be9c4b4595e5875'],
[16, 64, 'MurmurHash128x64 (stream)', 'LE', wrapStream('MurmurHash128x64'), strm.MurmurHash, hash.LE.murmurHash128x64,
'00000000000000000000000000000000', 'ecc93b9d4ddff16a6b44e61e12217485',
'b55cff6ee5ab10468335f878aa2d6251',
'00000000000000000000000000000000', 'ecc93b9d4ddff16a6b44e61e12217485',
'b55cff6ee5ab10468335f878aa2d6251',
'9b7f998ee773a51875585e59b4c4e90b'],
[16, 32, 'MurmurHash128x86', void(0), incr.MurmurHash128x86, incr.MurmurHash128x86, hash.murmurHash128x86,
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'cf690ba00d5fb908b2978b4d8d77cbee'],
[16, 32, 'MurmurHash128x86', 'BE', incr.MurmurHash128x86, incr.MurmurHash128x86, hash.BE.murmurHash128x86,
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'cf690ba00d5fb908b2978b4d8d77cbee'],
[16, 32, 'MurmurHash128x86', 'LE', incr.MurmurHash128x86, incr.MurmurHash128x86, hash.LE.murmurHash128x86,
'00000000000000000000000000000000', 'a9081e05f7499d98f7499d98f7499d98',
'ecadc488b901d254b901d254b901d254',
'00000000000000000000000000000000', 'a9081e05f7499d98f7499d98f7499d98',
'ecadc488b901d254b901d254b901d254',
'a00b69cf08b95f0d4d8b97b2eecb778d'],
[16, 32, 'MurmurHash128x86 (stream)', void(0), wrapStream('MurmurHash128x86'), strm.MurmurHash, hash.murmurHash128x86,
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'cf690ba00d5fb908b2978b4d8d77cbee'],
[16, 32, 'MurmurHash128x86 (stream)', 'BE', wrapStream('MurmurHash128x86'), strm.MurmurHash, hash.BE.murmurHash128x86,
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'cf690ba00d5fb908b2978b4d8d77cbee'],
[16, 32, 'MurmurHash128x86 (stream)', 'LE', wrapStream('MurmurHash128x86'), strm.MurmurHash, hash.LE.murmurHash128x86,
'00000000000000000000000000000000', 'a9081e05f7499d98f7499d98f7499d98',
'ecadc488b901d254b901d254b901d254',
'00000000000000000000000000000000', 'a9081e05f7499d98f7499d98f7499d98',
'ecadc488b901d254b901d254b901d254',
'a00b69cf08b95f0d4d8b97b2eecb778d']
].forEach(function(args) {
var size = args[ 0]
, wordBits = args[ 1]
, label = args[ 2]
, endian = args[ 3]
, MurmurHash = args[ 4]
, klass = args[ 5]
, murmurHash = args[ 6]
, seedZeroNumber = args[ 7]
, seedMinusOneNumber = args[ 8]
, seedPlusOneNumber = args[ 9]
, seedZeroHex = args[10]
, seedMinusOneHex = args[11]
, seedPlusOneHex = args[12]
, crashTestHex = args[13]
, seedZeroBuffer = Buffer.from(seedZeroHex, 'hex')
, seedMinusOneBuffer = Buffer.from(seedMinusOneHex, 'hex')
, seedPlusOneBuffer = Buffer.from(seedPlusOneHex, 'hex')
, seedZeroBase64 = seedZeroBuffer.toString('base64')
, seedMinusOneBase64 = seedMinusOneBuffer.toString('base64')
, seedPlusOneBase64 = seedPlusOneBuffer.toString('base64')
, seedZeroBinary = seedZeroBuffer.toString('binary')
, seedMinusOneBinary = seedMinusOneBuffer.toString('binary')
, seedPlusOneBinary = seedPlusOneBuffer.toString('binary')
;
test(label, function(t) {
t.type(MurmurHash, 'function');
t.test("should create hasher", function(t) {
var hasher = new MurmurHash();
t.type(hasher, 'object');
t.type(hasher, klass);
t.type(hasher.update, 'function');
t.type(hasher.digest, 'function');
t.equal(hasher.total, 0);
t.strictEqual(hasher.endianness, 'BE');
t.type(hasher.SERIAL_BYTE_LENGTH, 'number');
t.strictEqual(hasher.isBusy, false);
if (klass === strm.MurmurHash) {
t.deepEqual(Object.keys(hasher), ['_handle', '_options', 'SERIAL_BYTE_LENGTH']);
} else {
t.deepEqual(Object.keys(hasher), ['endianness', 'total', 'SERIAL_BYTE_LENGTH']);
}
t.strictEqual(hasher.total, 0);
hasher = MurmurHash();
t.type(hasher, 'object');
t.type(hasher, klass);
t.type(hasher.update, 'function');
t.type(hasher.digest, 'function');
t.equal(hasher.total, 0);
t.strictEqual(hasher.endianness, 'BE');
t.type(hasher.SERIAL_BYTE_LENGTH, 'number');
t.strictEqual(hasher.isBusy, false);
if (klass === strm.MurmurHash) {
t.deepEqual(Object.keys(hasher), ['_handle', '_options', 'SERIAL_BYTE_LENGTH']);
} else {
t.deepEqual(Object.keys(hasher), ['endianness', 'total', 'SERIAL_BYTE_LENGTH']);
}
t.strictEqual(hasher.total, 0);
t.end();
});
t.test("should create hasher with specified endianness", function(t) {
var hasher = new MurmurHash(null, endian);
t.type(hasher, 'object');
t.type(hasher, klass);
t.type(hasher.update, 'function');
t.type(hasher.digest, 'function');
t.equal(hasher.total, 0);
t.strictEqual(hasher.endianness, endian || 'BE');
t.type(hasher.SERIAL_BYTE_LENGTH, 'number');
t.strictEqual(hasher.isBusy, false);
if (klass === strm.MurmurHash) {
t.deepEqual(Object.keys(hasher), ['_handle', '_options', 'SERIAL_BYTE_LENGTH']);
} else {
t.deepEqual(Object.keys(hasher), ['endianness', 'total', 'SERIAL_BYTE_LENGTH']);
}
t.strictEqual(hasher.total, 0);
hasher = MurmurHash(0, endian, 'foo', 'bar', ['baz']);
t.type(hasher, 'object');
t.type(hasher, klass);
t.type(hasher.update, 'function');
t.type(hasher.digest, 'function');
t.equal(hasher.total, 0);
t.strictEqual(hasher.endianness, endian || 'BE');
t.type(hasher.SERIAL_BYTE_LENGTH, 'number');
t.strictEqual(hasher.isBusy, false);
if (klass === strm.MurmurHash) {
t.deepEqual(Object.keys(hasher), ['_handle', '_options', 'SERIAL_BYTE_LENGTH']);
} else {
t.deepEqual(Object.keys(hasher), ['endianness', 'total', 'SERIAL_BYTE_LENGTH']);
}
t.strictEqual(hasher.total, 0);
hasher = MurmurHash(0, 'platform', 'foo', 'bar', ['baz']);
t.type(hasher, 'object');
t.type(hasher, klass);
t.type(hasher.update, 'function');
t.type(hasher.digest, 'function');
t.equal(hasher.total, 0);
t.strictEqual(hasher.endianness, os.endianness());
hasher.endianness = 'Be';
t.strictEqual(hasher.endianness, 'BE');
hasher.endianness = 'Le';
t.strictEqual(hasher.endianness, 'LE');
hasher.endianness = 'pLaTfOrM';
t.strictEqual(hasher.endianness, os.endianness());
t.type(hasher.SERIAL_BYTE_LENGTH, 'number');
t.strictEqual(hasher.isBusy, false);
if (klass === strm.MurmurHash) {
t.deepEqual(Object.keys(hasher), ['_handle', '_options', 'SERIAL_BYTE_LENGTH']);
} else {
t.deepEqual(Object.keys(hasher), ['endianness', 'total', 'SERIAL_BYTE_LENGTH']);
}
t.strictEqual(hasher.total, 0);
t.end();
});
t.test('should throw error for bad arguments', function(t) {
t.throws(function() { new MurmurHash(void(0), endian).update(); }, new TypeError("string or Buffer is required") );
t.throws(function() { new MurmurHash(void(0), endian).update({}); }, new TypeError("string or Buffer is required") );
t.throws(function() { new MurmurHash(void(0), endian).update([]); }, new TypeError("string or Buffer is required") );
t.throws(function() { new MurmurHash(void(0), endian).update(void(0)); }, new TypeError("string or Buffer is required") );
t.throws(function() { new MurmurHash(void(0), endian).update(null); }, new TypeError("string or Buffer is required") );
t.throws(function() { new MurmurHash(void(0), endian).update(true); }, new TypeError("string or Buffer is required") );
t.throws(function() { new MurmurHash(void(0), endian).update(false); }, new TypeError("string or Buffer is required") );
t.throws(function() { new MurmurHash(void(0), endian).update(0); }, new TypeError("string or Buffer is required") );
t.throws(function() { new MurmurHash(void(0), endian).update(1); }, new TypeError("string or Buffer is required") );
t.throws(function() { new MurmurHash(void(0), endian).update(-1); }, new TypeError("string or Buffer is required") );
t.throws(function() { new MurmurHash(void(0), endian).update(new Date()); }, new TypeError("string or Buffer is required") );
t.throws(function() { new MurmurHash([], endian); }, new TypeError("Expected a seed number, MurmurHash instance or serialized state") );
t.throws(function() { MurmurHash([], endian); }, new TypeError("Expected a seed number, MurmurHash instance or serialized state") );
t.throws(function() { new MurmurHash({}, endian); }, new TypeError("Expected a seed number, MurmurHash instance or serialized state") );
t.throws(function() { new MurmurHash(new Date(), endian); }, new TypeError("Expected a seed number, MurmurHash instance or serialized state") );
t.throws(function() { new MurmurHash(true, endian); }, new TypeError("Expected a seed number, MurmurHash instance or serialized state") );
t.throws(function() { new MurmurHash(false, endian); }, new TypeError("Expected a seed number, MurmurHash instance or serialized state") );
t.throws(function() { new MurmurHash(void(0), ''); }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash(void(0), 'foo'); }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash(void(0), 0); }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash(void(0), []); }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash(void(0), {}); }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash(void(0), new Date()); }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash(void(0), true); }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash(void(0), false); }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash().endianness = ''; }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash().endianness = 'foo'; }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash().endianness = 0; }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash().endianness = []; }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash().endianness = {}; }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash().endianness = new Date(); }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash().endianness = true; }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash().endianness = false; }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash().endianness = null; }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.throws(function() { new MurmurHash().endianness = void(0); }, new TypeError("Unknown endianness: should be \"BE\", \"LE\" or \"platform\"") );
t.notThrow(function() { new MurmurHash(); }, "nothing accepted" );
t.notThrow(function() { new MurmurHash(null); }, "null accepted" );
t.notThrow(function() { new MurmurHash(null, endian); }, "null with endian accepted" );
t.notThrow(function() { new MurmurHash(null, null); }, "null with null accepted" );
t.notThrow(function() { new MurmurHash(void(0)); }, "undefined accepted" );
t.notThrow(function() { new MurmurHash(void(0), endian); }, "undefined with endian accepted" );
t.notThrow(function() { new MurmurHash(void(0), void(0)); }, "undefined with undefined accepted" );
t.notThrow(function() { new MurmurHash().endianness = endian || 'platform'; }, "allowed endianness" );
t.notThrow(function() { new MurmurHash(void(0), endian).update("", "abcdefghijklmno"); }, "invalid encoding should be accepted" );
t.notThrow(function() { new MurmurHash(void(0), endian).update("", "123456"); }, "invalid encoding should be accepted and ignored" );
t.notThrow(function() { new MurmurHash(void(0), endian).update("", "12345"); }, "invalid encoding should be accepted and ignored" );
t.notThrow(function() { new MurmurHash(void(0), endian).update("", "1234"); }, "invalid encoding should be accepted and ignored" );
t.notThrow(function() { new MurmurHash(void(0), endian).update("", "123"); }, "invalid encoding should be accepted and ignored" );
t.notThrow(function() { new MurmurHash(void(0), endian).update("", ""); }, "invalid encoding should be accepted and ignored" );
t.notThrow(function() { new MurmurHash(void(0), endian).digest(""); }, "invalid output type should be accepted and ignored" );
t.notThrow(function() { new MurmurHash(void(0), endian).digest("mumber"); }, "invalid output type should be accepted and ignored" );
t.notThrow(function() { new MurmurHash(void(0), endian).digest("xxxxxxx"); }, "invalid output type should be accepted and ignored" );
t.notThrow(function() { new MurmurHash(void(0), endian).digest("utf-8"); }, "invalid output type should be accepted and ignored" );
var hasher = new MurmurHash(void(0), endian);
t.notThrow(function() { hasher.digest(); }, "first digest ok" );
t.notThrow(function() { hasher.update(''); }, "update ok" );
t.notThrow(function() { hasher.digest(); }, "second digest ok" );
t.notThrow(function() { hasher.update(''); }, "update ok" );
t.notThrow(function() { hasher.digest(); }, "third digest ok" );
t.end();
});
t.test('should create buffer hash from empty data', function(t) {
t.deepEqual(new MurmurHash(void(0), endian).update('').digest('buffer'), seedZeroBuffer);
t.deepEqual(new MurmurHash(void(0), endian).digest('buffer'), seedZeroBuffer);
t.deepEqual(new MurmurHash(0, endian).update('').digest('buffer'), seedZeroBuffer);
t.deepEqual(new MurmurHash(0, endian).digest('buffer'), seedZeroBuffer);
t.deepEqual(new MurmurHash(void(0), endian).update('').digest(), seedZeroBuffer);
t.deepEqual(new MurmurHash(void(0), endian).digest(), seedZeroBuffer);
t.deepEqual(new MurmurHash(0, endian).update('').digest(), seedZeroBuffer);
t.deepEqual(new MurmurHash(0, endian).digest(), seedZeroBuffer);
t.deepEqual(new MurmurHash(void(0), endian).update('').digest('foobar'), seedZeroBuffer);
t.deepEqual(new MurmurHash(void(0), endian).digest('foobar'), seedZeroBuffer);
t.deepEqual(new MurmurHash(0, endian).update('').digest('foobar'), seedZeroBuffer);
t.deepEqual(new MurmurHash(0, endian).digest('foobar'), seedZeroBuffer);
t.deepEqual(new MurmurHash(void(0), endian).update('').digest('utf8'), seedZeroBuffer);
t.deepEqual(new MurmurHash(void(0), endian).digest('utf8'), seedZeroBuffer);
t.deepEqual(new MurmurHash(0, endian).update('').digest('utf8'), seedZeroBuffer);
t.deepEqual(new MurmurHash(0, endian).digest('utf8'), seedZeroBuffer);
t.strictEqual(new MurmurHash(void(0), endian).update('').digest('buffer').toString('hex'), seedZeroHex);
t.strictEqual(new MurmurHash(void(0), endian).digest('buffer').toString('hex'), seedZeroHex);
t.strictEqual(new MurmurHash(0, endian).update('').digest('buffer').toString('hex'), seedZeroHex);
t.strictEqual(new MurmurHash(0, endian).digest('buffer').toString('hex'), seedZeroHex);
t.deepEqual(new MurmurHash(void(0), endian).update(Buffer.from('')).digest('buffer'), seedZeroBuffer);
t.deepEqual(new MurmurHash(0, endian).update(Buffer.from('')).digest('buffer'), seedZeroBuffer);
t.deepEqual(new MurmurHash(void(0), endian).update(Buffer.from('')).digest(), seedZeroBuffer);
t.deepEqual(new MurmurHash(0, endian).update(Buffer.from('')).digest(), seedZeroBuffer);
t.deepEqual(new MurmurHash(void(0), endian).update(Buffer.from('')).digest('foobar'), seedZeroBuffer);
t.deepEqual(new MurmurHash(0, endian).update(Buffer.from('')).digest('foobar'), seedZeroBuffer);
t.deepEqual(new MurmurHash(void(0), endian).update(Buffer.from('')).digest('utf8'), seedZeroBuffer);
t.deepEqual(new MurmurHash(0, endian).update(Buffer.from('')).digest('utf8'), seedZeroBuffer);
t.strictEqual(new MurmurHash(void(0), endian).update(Buffer.from('')).digest('buffer').toString('hex'), seedZeroHex);
t.strictEqual(new MurmurHash(0, endian).update(Buffer.from('')).digest('buffer').toString('hex'), seedZeroHex);
t.deepEqual(new MurmurHash(-1, endian).update('').digest('buffer'), seedMinusOneBuffer);
t.deepEqual(new MurmurHash(-1, endian).update('').digest(), seedMinusOneBuffer);
t.deepEqual(new MurmurHash(-1, endian).update('').digest('foobar'), seedMinusOneBuffer);
t.deepEqual(new MurmurHash(-1, endian).update('').digest('utf8'), seedMinusOneBuffer);
t.strictEqual(new MurmurHash(-1, endian).update('').digest('buffer').toString('hex'), seedMinusOneHex);
t.deepEqual(new MurmurHash(-1, endian).update(Buffer.from('')).digest('buffer'), seedMinusOneBuffer);
t.deepEqual(new MurmurHash(-1, endian).update(Buffer.from('')).digest(), seedMinusOneBuffer);
t.deepEqual(new MurmurHash(-1, endian).update(Buffer.from('')).digest('foobar'), seedMinusOneBuffer);
t.deepEqual(new MurmurHash(-1, endian).update(Buffer.from('')).digest('utf8'), seedMinusOneBuffer);
t.strictEqual(new MurmurHash(-1, endian).update(Buffer.from('')).digest('buffer').toString('hex'), seedMinusOneHex);
t.deepEqual(new MurmurHash(4294967295, endian).update('').digest('buffer'), seedMinusOneBuffer);
t.deepEqual(new MurmurHash(4294967295, endian).update('').digest(), seedMinusOneBuffer);
t.deepEqual(new MurmurHash(4294967295, endian).update('').digest('foobar'), seedMinusOneBuffer);
t.deepEqual(new MurmurHash(4294967295, endian).update('').digest('utf8'), seedMinusOneBuffer);
t.strictEqual(new MurmurHash(4294967295, endian).update('').digest('buffer').toString('hex'), seedMinusOneHex);
t.deepEqual(new MurmurHash(4294967295, endian).update(Buffer.from('')).digest('buffer'), seedMinusOneBuffer);
t.deepEqual(new MurmurHash(4294967295, endian).update(Buffer.from('')).digest(), seedMinusOneBuffer);
t.deepEqual(new MurmurHash(4294967295, endian).update(Buffer.from('')).digest('foobar'), seedMinusOneBuffer);
t.deepEqual(new MurmurHash(4294967295, endian).update(Buffer.from('')).digest('utf8'), seedMinusOneBuffer);
t.strictEqual(new MurmurHash(4294967295, endian).update(Buffer.from('')).digest('buffer').toString('hex'), seedMinusOneHex);
t.deepEqual(new MurmurHash(4294967296, endian).update('').digest('buffer'), seedZeroBuffer);
t.deepEqual(new MurmurHash(4294967296, endian).update('').digest(), seedZeroBuffer);
t.deepEqual(new MurmurHash(4294967296, endian).update('').digest('foobar'), seedZeroBuffer);
t.deepEqual(new MurmurHash(4294967296, endian).update('').digest('utf8'), seedZeroBuffer);
t.strictEqual(new MurmurHash(4294967296, endian).update('').digest('buffer').toString('hex'), seedZeroHex);
t.deepEqual(new MurmurHash(4294967296, endian).update(Buffer.from('')).digest('buffer'), seedZeroBuffer);
t.deepEqual(new MurmurHash(4294967296, endian).update(Buffer.from('')).digest(), seedZeroBuffer);
t.deepEqual(new MurmurHash(4294967296, endian).update(Buffer.from('')).digest('foobar'), seedZeroBuffer);
t.deepEqual(new MurmurHash(4294967296, endian).update(Buffer.from('')).digest('utf8'), seedZeroBuffer);
t.strictEqual(new MurmurHash(4294967296, endian).update(Buffer.from('')).digest('buffer').toString('hex'), seedZeroHex);
t.deepEqual(new MurmurHash(1, endian).update('').digest('buffer'), seedPlusOneBuffer);
t.deepEqual(new MurmurHash(1, endian).update('').digest(), seedPlusOneBuffer);
t.deepEqual(new MurmurHash(1, endian).update('').digest('foobar'), seedPlusOneBuffer);
t.deepEqual(new MurmurHash(1, endian).update('').digest('utf8'), seedPlusOneBuffer);
t.strictEqual(new MurmurHash(1, endian).update('').digest('buffer').toString('hex'), seedPlusOneHex);
t.deepEqual(new MurmurHash(1, endian).update(Buffer.from('')).digest('buffer'), seedPlusOneBuffer);
t.deepEqual(new MurmurHash(1, endian).update(Buffer.from('')).digest(), seedPlusOneBuffer);
t.deepEqual(new MurmurHash(1, endian).update(Buffer.from('')).digest('foobar'), seedPlusOneBuffer);
t.deepEqual(new MurmurHash(1, endian).update(Buffer.from('')).digest('utf8'), seedPlusOneBuffer);
t.strictEqual(new MurmurHash(1, endian).update(Buffer.from('')).digest('buffer').toString('hex'), seedPlusOneHex);
t.end();
});
t.test('should create number hash from empty data', function(t) {
t.strictEqual(new MurmurHash(void(0), endian).update('').digest('number'), seedZeroNumber);
t.strictEqual(new MurmurHash(0, endian).update('').digest('number'), seedZeroNumber);
t.strictEqual(new MurmurHash(void(0), endian).update('', 'foo').digest('number'), seedZeroNumber);
t.strictEqual(new MurmurHash(0, endian).update('', 'foo').digest('number'), seedZeroNumber);
t.strictEqual(new MurmurHash(void(0), endian).update(Buffer.from('')).digest('number'), seedZeroNumber);
t.strictEqual(new MurmurHash(0, endian).update(Buffer.from('')).digest('number'), seedZeroNumber);
t.strictEqual(new MurmurHash(void(0), endian).update(Buffer.from(''), 'foo').digest('number'), seedZeroNumber);
t.strictEqual(new MurmurHash(0, endian).update(Buffer.from(''), 'foo').digest('number'), seedZeroNumber);
t.strictEqual(new MurmurHash(-1, endian).update('').digest('number'), seedMinusOneNumber);
t.strictEqual(new MurmurHash(-1, endian).update('', 'foo').digest('number'), seedMinusOneNumber);
t.strictEqual(new MurmurHash(-1, endian).update(Buffer.from('')).digest('number'), seedMinusOneNumber);
t.strictEqual(new MurmurHash(-1, endian).update(Buffer.from(''), 'foo').digest('number'), seedMinusOneNumber);
t.strictEqual(new MurmurHash(4294967295, endian).update('').digest('number'), seedMinusOneNumber);
t.strictEqual(new MurmurHash(4294967295, endian).update('', 'foo').digest('number'), seedMinusOneNumber);
t.strictEqual(new MurmurHash(4294967295, endian).update(Buffer.from('')).digest('number'), seedMinusOneNumber);
t.strictEqual(new MurmurHash(4294967295, endian).update(Buffer.from(''), 'number').digest('number'), seedMinusOneNumber);
t.strictEqual(new MurmurHash(4294967296, endian).update('').digest('number'), seedZeroNumber);
t.strictEqual(new MurmurHash(4294967296, endian).update('', 'foo').digest('number'), seedZeroNumber);
t.strictEqual(new MurmurHash(4294967296, endian).update(Buffer.from('')).digest('number'), seedZeroNumber);
t.strictEqual(new MurmurHash(4294967296, endian).update(Buffer.from(''), 'number').digest('number'), seedZeroNumber);
t.strictEqual(new MurmurHash(1, endian).update('').digest('number'), seedPlusOneNumber);
t.strictEqual(new MurmurHash(1, endian).update('', 'foo').digest('number'), seedPlusOneNumber);
t.strictEqual(new MurmurHash(1, endian).update(Buffer.from('')).digest('number'), seedPlusOneNumber);
t.strictEqual(new MurmurHash(1, endian).update(Buffer.from(''), 'foo').digest('number'), seedPlusOneNumber);
t.end();
});
t.test('should create string encoded hash from empty data', function(t) {
t.strictEqual(new MurmurHash(void(0), endian).update('').digest('hex'), seedZeroHex);
t.strictEqual(new MurmurHash(0, endian).update('').digest('hex'), seedZeroHex);
t.strictEqual(new MurmurHash(void(0), endian).update(Buffer.from('')).digest('hex'), seedZeroHex);
t.strictEqual(new MurmurHash(0, endian).update(Buffer.from('')).digest('hex'), seedZeroHex);
t.strictEqual(new MurmurHash(-1, endian).update('').digest('hex'), seedMinusOneHex);
t.strictEqual(new MurmurHash(-1, endian).update(Buffer.from('')).digest('hex'), seedMinusOneHex);
t.strictEqual(new MurmurHash(4294967295, endian).update('').digest('hex'), seedMinusOneHex);
t.strictEqual(new MurmurHash(4294967295, endian).update(Buffer.from('')).digest('hex'), seedMinusOneHex);
t.strictEqual(new MurmurHash(4294967296, endian).update('').digest('hex'), seedZeroHex);
t.strictEqual(new MurmurHash(4294967296, endian).update(Buffer.from('')).digest('hex'), seedZeroHex);
t.strictEqual(new MurmurHash(1, endian).update('').digest('hex'), seedPlusOneHex);
t.strictEqual(new MurmurHash(1, endian).update(Buffer.from('')).digest('hex'), seedPlusOneHex);
t.strictEqual(new MurmurHash(void(0), endian).update('').digest('base64'), seedZeroBase64);
t.strictEqual(new MurmurHash(0, endian).update('').digest('base64'), seedZeroBase64);
t.strictEqual(new MurmurHash(void(0), endian).update(Buffer.from('')).digest('base64'), seedZeroBase64);
t.strictEqual(new MurmurHash(0, endian).update(Buffer.from('')).digest('base64'), seedZeroBase64);
t.strictEqual(new MurmurHash(-1, endian).update('').digest('base64'), seedMinusOneBase64);
t.strictEqual(new MurmurHash(-1, endian).update(Buffer.from('')).digest('base64'), seedMinusOneBase64);
t.strictEqual(new MurmurHash(4294967295, endian).update('').digest('base64'), seedMinusOneBase64);
t.strictEqual(new MurmurHash(4294967295, endian).update(Buffer.from('')).digest('base64'), seedMinusOneBase64);
t.strictEqual(new MurmurHash(4294967296, endian).update('').digest('base64'), seedZeroBase64);
t.strictEqual(new MurmurHash(4294967296, endian).update(Buffer.from('')).digest('base64'), seedZeroBase64);
t.strictEqual(new MurmurHash(1, endian).update('').digest('base64'), seedPlusOneBase64);
t.strictEqual(new MurmurHash(1, endian).update(Buffer.from('')).digest('base64'), seedPlusOneBase64);
t.strictEqual(new MurmurHash(void(0), endian).update('').digest('binary'), seedZeroBinary);
t.strictEqual(new MurmurHash(0, endian).update('').digest('binary'), seedZeroBinary);
t.strictEqual(new MurmurHash(void(0), endian).update(Buffer.from('')).digest('binary'), seedZeroBinary);
t.strictEqual(new MurmurHash(0, endian).update(Buffer.from('')).digest('binary'), seedZeroBinary);
t.strictEqual(new MurmurHash(-1, endian).update('').digest('binary'), seedMinusOneBinary);
t.strictEqual(new MurmurHash(-1, endian).update(Buffer.from('')).digest('binary'), seedMinusOneBinary);
t.strictEqual(new MurmurHash(4294967295, endian).update('').digest('binary'), seedMinusOneBinary);
t.strictEqual(new MurmurHash(4294967295, endian).update(Buffer.from('')).digest('binary'), seedMinusOneBinary);
t.strictEqual(new MurmurHash(4294967296, endian).update('').digest('binary'), seedZeroBinary);
t.strictEqual(new MurmurHash(4294967296, endian).update(Buffer.from('')).digest('binary'), seedZeroBinary);
t.strictEqual(new MurmurHash(1, endian).update('').digest('binary'), seedPlusOneBinary);
t.strictEqual(new MurmurHash(1, endian).update(Buffer.from('')).digest('binary'), seedPlusOneBinary);
t.end();
});
t.test('should utilize different string input encodings', function(t) {
var string = "\u1220łóżko"
, base64 = '4YigxYLDs8W8a28='
, hex = 'e188a0c582c3b3c5bc6b6f'
, hash = murmurHash(string, 'utf8', 'buffer');
t.deepEqual(new MurmurHash(void(0), endian).update(string).digest(), hash);
t.deepEqual(new MurmurHash(void(0), endian).update(Buffer.from(string, 'utf8')).digest(), hash);
t.deepEqual(new MurmurHash(void(0), endian).update(string, 'ascii').digest(),
new MurmurHash(void(0), endian).update(Buffer.from(string, 'ascii')).digest());
t.deepEqual(new MurmurHash(void(0), endian).update(string, 'binary').digest(),
new MurmurHash(void(0), endian).update(Buffer.from(string, 'binary')).digest());
t.deepEqual(new MurmurHash(void(0), endian).update(string, 'utf8').digest(),
new MurmurHash(void(0), endian).update(Buffer.from(string, 'utf8')).digest());
t.deepEqual(new MurmurHash(void(0), endian).update(string, 'utf8').digest(), hash);
t.deepEqual(new MurmurHash(void(0), endian).update(string, 'utf-8').digest(),
new MurmurHash(void(0), endian).update(Buffer.from(string, 'utf-8')).digest());
t.deepEqual(new MurmurHash(void(0), endian).update(string, 'utf-8').digest(), hash);
t.deepEqual(new MurmurHash(void(0), endian).update(string, 'ucs2').digest(),
new MurmurHash(void(0), endian).update(Buffer.from(string, 'ucs2')).digest());
t.deepEqual(new MurmurHash(void(0), endian).update(string, 'ucs-2').digest(),
new MurmurHash(void(0), endian).update(Buffer.from(string, 'ucs-2')).digest());
t.deepEqual(new MurmurHash(void(0), endian).update(string, 'utf16le').digest(),
new MurmurHash(void(0), endian).update(Buffer.from(string, 'utf16le')).digest());
t.deepEqual(new MurmurHash(void(0), endian).update(string, 'utf-16le').digest(),
new MurmurHash(void(0), endian).update(Buffer.from(string, 'utf-16le')).digest());
t.deepEqual(new MurmurHash(void(0), endian).update(base64, 'base64').digest(), hash);
t.deepEqual(new MurmurHash(void(0), endian).update(base64, 'base64').digest(),
new MurmurHash(void(0), endian).update(Buffer.from(base64, 'base64')).digest());
t.deepEqual(new MurmurHash(void(0), endian).update(hex, 'hex').digest(), hash);
t.deepEqual(new MurmurHash(void(0), endian).update(hex, 'hex').digest(),
new MurmurHash(void(0), endian).update(Buffer.from(hex, 'hex')).digest());
t.end();
});
t.test('should create hash from some random data', function(t) {
var data = '';
var strlen = 1000;
for (var i = 0; i < strlen; ++i) data += String.fromCharCode((Math.random()*32768)|0);
var buffer = Buffer.from(data, 'utf8');
t.equal(new MurmurHash(void(0), endian).update(data).digest().length, size);
t.equal(new MurmurHash(void(0), endian).update(data).total, buffer.length);
t.equal(new MurmurHash(void(0), endian).update(data).digest('buffer').length, size);
t.equal(new MurmurHash(void(0), endian).update(buffer).digest().length, size);
t.equal(new MurmurHash(void(0), endian).update(buffer).digest('buffer').length, size);
t.equal(new MurmurHash(void(0), endian).update(buffer).total, buffer.length);
t.strictEqual(new MurmurHash(void(0), endian).update(data, 'binary').digest('number'),
new MurmurHash(void(0), endian).update(Buffer.from(data, 'binary')).digest('number'));
t.strictEqual(new MurmurHash(void(0), endian).update(data, 'binary').digest('number'), murmurHash(data, 'binary'));
t.strictEqual(new MurmurHash(void(0), endian).update(data).digest('number'),
new MurmurHash(void(0), endian).update(buffer).digest('number'));
t.strictEqual(new MurmurHash(void(0), endian).update(data).digest('number'), murmurHash(buffer));
t.strictEqual(new MurmurHash(-1, endian).update(data).digest('number'),
new MurmurHash(-1, endian).update(buffer).digest('number'));
t.strictEqual(new MurmurHash(-1, endian).update(data).digest('number'), murmurHash(buffer, -1));
t.strictEqual(new MurmurHash(-1, endian).update(data).digest('number'),
new MurmurHash(4294967295, endian).update(buffer).digest('number'));
t.strictEqual(new MurmurHash(-1, endian).update(data).digest('number'), murmurHash(buffer, 4294967295));
t.strictEqual(new MurmurHash(4294967295, endian).update(data).digest('number'),
new MurmurHash(-1, endian).update(buffer).digest('number'));
t.strictEqual(new MurmurHash(4294967295, endian).update(data).digest('number'), murmurHash(buffer, -1));
var seed = (Math.random()*4294967296)|0;
t.notStrictEqual(new MurmurHash(seed, endian).update(data).digest(), new MurmurHash(seed, endian).update(buffer).digest('buffer'));
t.notStrictEqual(new MurmurHash(seed, endian).update(data).digest(), murmurHash(buffer, seed, 'buffer'));
t.deepEqual(new MurmurHash(seed, endian).update(data).digest('buffer'), new MurmurHash(seed, endian).update(buffer).digest());
t.deepEqual(new MurmurHash(seed, endian).update(data).digest('buffer'), murmurHash(buffer, seed, 'buffer'));
t.strictEqual(new MurmurHash(seed, endian).update(data).digest('number'), new MurmurHash(seed, endian).update(buffer).digest('number'));
t.strictEqual(new MurmurHash(seed, endian).update(data).digest('number'), murmurHash(buffer, seed));
t.end();
});
t.test('should not crash with utf8 characters in encoding string', function(t) {
t.notThrow(function() {
new MurmurHash(void(0), endian).update("łabądź",
"\u1010\u1111\u1212\u1313\u1414\u1515\u1616\u1717",
"\u1010\u1111\u1212\u1313\u1414\u1515\u1616\u1717").digest();
}, "ignores encoding");
var match = new MurmurHash(void(0), endian).update("łabądź", "utf8").digest("buffer");
var hash = new MurmurHash(void(0), endian).update("łabądź").digest();
t.deepEqual(hash, match);
t.type(hash, Buffer, 'hash is buffer');
t.deepEqual(hash, Buffer.from(crashTestHex, 'hex'));
t.end();
});
t.test('should write digest in the provided buffer', function(t) {
var pad = Buffer.alloc(2, 42);
var output = Buffer.allocUnsafe(0);
t.strictEqual(new MurmurHash(void(0), endian).digest(output), output);
t.strictEqual(new MurmurHash(void(0), endian).digest(output, 3), output);
t.strictEqual(new MurmurHash(void(0), endian).digest(output, 3, 3), output);
t.strictEqual(new MurmurHash(void(0), endian).digest(output, 3, -3), output);
t.strictEqual(new MurmurHash(void(0), endian).digest(output, -3), output);
t.strictEqual(new MurmurHash(void(0), endian).digest(output, -3, 3), output);
t.strictEqual(new MurmurHash(void(0), endian).digest(output, -3, -3), output);
output = Buffer.allocUnsafe(size);
t.strictEqual(new MurmurHash(void(0), endian).digest(output), output);
t.deepEqual(output, seedZeroBuffer);
t.strictEqual(new MurmurHash(1, endian).digest(output), output);
t.deepEqual(output, seedPlusOneBuffer);
t.strictEqual(new MurmurHash(-1, endian).digest(output), output);
t.deepEqual(output, seedMinusOneBuffer);
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, 2), output);
t.deepEqual(output, Buffer.concat([pad, seedPlusOneBuffer.slice(0, -2)]));
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, 2, -size + 2), output);
t.deepEqual(output, Buffer.concat([pad, seedPlusOneBuffer.slice(2)]));
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, -size-2), output);
t.deepEqual(output, Buffer.concat([seedPlusOneBuffer.slice(2), pad]));
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, -size, size - 2), output);
t.deepEqual(output, Buffer.concat([seedPlusOneBuffer.slice(0, -2), pad]));
output = Buffer.alloc(size + 2, 42);
t.strictEqual(new MurmurHash(1, endian).digest(output), output);
t.deepEqual(output, Buffer.concat([seedPlusOneBuffer, pad]));
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, 2), output);
t.deepEqual(output, Buffer.concat([pad, seedPlusOneBuffer]));
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, -size), output);
t.deepEqual(output, Buffer.concat([pad, seedPlusOneBuffer]));
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, -size-2), output);
t.deepEqual(output, Buffer.concat([seedPlusOneBuffer, pad]));
output = Buffer.alloc(size - 2, 42);
t.strictEqual(new MurmurHash(1, endian).digest(output, 0), output);
t.deepEqual(output, seedPlusOneBuffer.slice(0, size - 2));
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, -size), output);
t.deepEqual(output, seedPlusOneBuffer.slice(2));
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, 0, -size + 2), output);
t.deepEqual(output, seedPlusOneBuffer.slice(2));
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, -size + 2, -size + 2), output);
t.deepEqual(output, seedPlusOneBuffer.slice(2));
output = Buffer.alloc(3, 42);
t.strictEqual(new MurmurHash(1, endian).digest(output, 0, 1), output);
t.deepEqual(output, Buffer.concat([seedPlusOneBuffer.slice(0, 1), pad]));
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, 0, -3), output);
t.deepEqual(output, seedPlusOneBuffer.slice(-3));
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, -1), output);
t.deepEqual(output, Buffer.concat([pad, seedPlusOneBuffer.slice(0, 1)]));
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, -4, 3), output);
t.deepEqual(output, Buffer.concat([seedPlusOneBuffer.slice(1, 3), pad.slice(0, 1)]));
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, -4, -3), output);
t.deepEqual(output, Buffer.concat([seedPlusOneBuffer.slice(-2), pad.slice(0, 1)]));
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, 1), output);
t.deepEqual(output, Buffer.concat([pad.slice(0, 1), seedPlusOneBuffer.slice(0, 2)]));
output.fill(42);
t.strictEqual(new MurmurHash(1, endian).digest(output, 1, -2), output);
t.deepEqual(output, Buffer.concat([pad.slice(0, 1), seedPlusOneBuffer.slice(-2)]));
t.end();
});
t.test('should create hash from some random data incrementally', function(t) {
var maxchunksize = 101;
var buffer = Buffer.allocUnsafe(10007);
var seed = (Math.random()*4294967296)|0;
var hasher0 = new MurmurHash(0, endian);
var hasher1 = new MurmurHash(1, endian);
var hasherS = new MurmurHash(seed, endian);
var hasher0str = new MurmurHash(0, endian);
var hasher1str = new MurmurHash(1, endian);
var hasherSstr = new MurmurHash(seed, endian);
var n = 0;
while(n < buffer.length) {
var p = n;
var slicelen = (Math.random()*maxchunksize|0) + 1;
for(var j = 0; j < slicelen; ++j) {
if (n >= buffer.length) break;
buffer[n++] = (Math.random()*0x100)|0;
}
hasher0.update(buffer.slice(p, n));
hasher1.update(buffer.slice(p, n));
hasherS.update(buffer.slice(p, n));
hasher0str.update(buffer.slice(p, n).toString('binary'),'binary');
hasher1str.update(buffer.slice(p, n).toString('binary'),'binary');
hasherSstr.update(buffer.slice(p, n).toString('binary'),'binary');
}
t.equal(n, buffer.length);
t.equal(n, 10007);
t.equal(hasher0.total, buffer.length);
t.equal(hasher1.total, buffer.length);
t.equal(hasherS.total, buffer.length);
t.equal(hasher0str.total, buffer.length);
t.equal(hasher1str.total, buffer.length);
t.equal(hasherSstr.total, buffer.length);
var data = buffer.toString('binary');
t.equal(new MurmurHash(void(0), endian).update(data, 'binary').digest().length, size);
t.equal(new MurmurHash(void(0), endian).update(data, 'binary').total, buffer.length);
t.equal(new MurmurHash(void(0), endian).update(data, 'binary').digest('buffer').length, size);
t.equal(new MurmurHash(void(0), endian).update(buffer).digest().length, size);
t.equal(new MurmurHash(void(0), endian).update(buffer).digest('buffer').length, size);
t.equal(new MurmurHash(void(0), endian).update(buffer).total, buffer.length);
t.strictEqual(new MurmurHash(void(0), endian).update(data, 'binary').digest('number'),
new MurmurHash(void(0), endian).update(buffer).digest('number'));
var d0 = hasher0.digest('number');
var d1 = hasher1.digest('number');
var dS = hasherS.digest('number');
var d0str = hasher0str.digest('number');
var d1str = hasher1str.digest('number');
var dSstr = hasherSstr.digest('number');
t.notStrictEqual(d0, d1);
t.notStrictEqual(d0, dS);
t.strictEqual(d0, d0str);
t.strictEqual(d1, d1str);
t.strictEqual(dS, dSstr);
t.strictEqual(d0, new MurmurHash(void(0), endian).update(buffer).digest('number'));
t.strictEqual(d0, new MurmurHash(void(0), endian).update(data, 'binary').digest('number'));
t.strictEqual(d0, murmurHash(buffer));
t.strictEqual(d0, murmurHash(data));
t.strictEqual(d1, new MurmurHash(1, endian).update(buffer).digest('number'));
t.strictEqual(d1, new MurmurHash(1, endian).update(data, 'binary').digest('number'));
t.strictEqual(d1, murmurHash(buffer, 1));
t.strictEqual(d1, murmurHash(data, 1));
t.strictEqual(dS, new MurmurHash(seed, endian).update(buffer).digest('number'));
t.strictEqual(dS, new MurmurHash(seed, endian).update(data, 'binary').digest('number'));
t.strictEqual(dS, murmurHash(buffer, seed));
t.strictEqual(dS, murmurHash(data, seed));
t.end();
});
t.test('should copy internal state and create instance from copy', function(t) {
var seed = (Math.random() * 0xFFFFFFFF >>>0) + 1;
var hasher0 = new MurmurHash(seed, endian).update('foo');
t.throws(function() { hasher0.copy(); }, new TypeError("Target must be another instance of the same murmur hash type utility"));
t.throws(function() { hasher0.copy([]); }, new TypeError("Target must be another instance of the same murmur hash type utility"));
t.throws(function() { hasher0.copy({}); }, new TypeError("Target must be another instance of the same murmur hash type utility"));
t.throws(function() { hasher0.copy(0); }, new TypeError("Target must be another instance of the same murmur hash type utility"));
t.throws(function() { hasher0.copy(true); }, new TypeError("Target must be another instance of the same murmur hash type utility"));
t.throws(function() { hasher0.copy(false); }, new TypeError("Target must be another instance of the same murmur hash type utility"));
t.throws(function() { hasher0.copy(null); }, new TypeError("Target must be another instance of the same murmur hash type utility"));
t.throws(function() { hasher0.copy(hasher0); }, new Error("Target must not be the same instance"));
var hasher1 = new MurmurHash(hasher0);
t.notStrictEqual(hasher1, hasher0);
t.strictEqual(hasher1.endianness, hasher0.endianness);
t.strictEqual(hasher0.digest('hex'), hasher1.digest('hex'));
t.strictEqual(hasher0.update('bar').digest('hex'), hasher1.update('bar').digest('hex'));
t.strictEqual(hasher0.digest('hex'), new MurmurHash(seed, endian).update('foobar').digest('hex'));
t.strictEqual(hasher1.digest('hex'), new MurmurHash(seed, endian).update('foobar').digest('hex'));
var hasher2 = new MurmurHash(0, endian === 'LE' ? 'BE' : 'LE');
t.strictEqual(hasher2.digest('hex'), seedZeroHex);
t.notStrictEqual(hasher2.endianness, hasher0.endianness);
t.strictEqual(hasher0.copy(hasher2), hasher2);
t.notStrictEqual(hasher2.endianness, hasher0.endianness);
var digest2 = hasher2.digest('hex');
var digest = new MurmurHash(seed, endian).update('foobar').digest();
t.strictEqual(digest2, byteOrderSwap(digest, wordBits, 0, size).toString('hex'));
hasher2.endianness = hasher0.endianness;
t.strictEqual(hasher2.endianness, hasher0.endianness);
t.strictEqual(hasher2.digest('hex'), new MurmurHash(seed, endian).update('foobar').digest('hex'));
t.notStrictEqual(new MurmurHash(seed, endian).update('foobar').digest('hex'), seedZeroHex);
t.end();
});
t.test('should serialize internal state and create instance from serial', function(t) {
var seed = (Math.random() * 0xFFFFFFFF >>>0) + 1;
var hasher0 = new MurmurHash(seed, endian).update('foo');
t.throws(function() { hasher0.serialize(Buffer.alloc(0)); }, new Error("Serialized state does not fit in the provided buffer at the given offset"));
t.throws(function() { hasher0.serialize(Buffer.allocUnsafe(1000), -1); }, new Error("Serialized state does not fit in the provided buffer at the given offset"));
t.throws(function() { hasher0.serialize(Buffer.allocUnsafe(1000), 998); }, new Error("Serialized state does not fit in the provided buffer at the given offset"));
t.throws(function() { new MurmurHash('', endian); }, new TypeError("Incorrect size of the serialized string"));
t.throws(function() { new MurmurHash('1234567890abcdef1', endian); }, new TypeError("Incorrect size of the serialized string"));
var buffer = Buffer.alloc(50, 0);
t.throws(function() { new MurmurHash(buffer, endian); }, new TypeError("Incorrect serialized data"));
t.throws(function() { new MurmurHash(Buffer.allocUnsafe(11), endian); }, new TypeError("Incorrect size of the serialized data"));
var serial0 = hasher0.serialize();
t.type(serial0, 'string');
buffer = Buffer.alloc(serial0.length, 0);
t.throws(function() { new MurmurHash(buffer.toString('binary'), endian); }, new TypeError("Incorrect serialized string"));
t.throws(function() { new MurmurHash(buffer, endian); }, new TypeError("Incorrect serialized data"));
var serial0bin = Buffer.allocUnsafe(hasher0.SERIAL_BYTE_LENGTH);
t.strictEqual(hasher0.serialize(serial0bin), serial0bin);
var hasher1 = new MurmurHash(serial0, endian);
t.notStrictEqual(hasher1, hasher0);
t.strictEqual(hasher1.digest('hex'), hasher0.digest('hex'));
t.strictEqual(hasher0.digest('hex'), new MurmurHash(seed, endian).update('foo').digest('hex'));
t.strictEqual(hasher1.update('bar').digest('hex'), hasher0.update('bar').digest('hex'));
t.strictEqual(hasher0.digest('hex'), new MurmurHash(seed, endian).update('foobar').digest('hex'));
t.strictEqual(hasher1.digest('hex'), new MurmurHash(seed, endian).update('foobar').digest('hex'));
var hasher2 = new MurmurHash(serial0bin, endian);
t.strictEqual(hasher2.digest('hex'), new MurmurHash(seed, endian).update('foo').digest('hex'));
t.strictEqual(hasher2.update('bar').digest('hex'), new MurmurHash(seed, endian).update('foobar').digest('hex'));
buffer = Buffer.allocUnsafe(serial0bin.length);
for(var i = 0; i < serial0bin.length; ++i) {
for(var n = 1; n < 0x100; n <<= 1) {
serial0bin.copy(buffer);
t.strictEqual(new MurmurHash(serial0bin, endian).update('bar').digest('hex'),
new MurmurHash(seed, endian).update('foobar').digest('hex'));
buffer[i] = buffer[i] ^ n;
t.throws(function() { new MurmurHash(buffer, endian); }, new TypeError("Incorrect serialized data"));
}
}
t.end();
});
t.end();
});
});

View File

@ -1,656 +0,0 @@
"use strict";
var os = require("os")
, test = require("tap").test
, hash = require('..')
;
test("should have murmurHash functions", function(t) {
['murmurHash',
'murmurHash32',
'murmurHash64',
'murmurHash64x64',
'murmurHash64x86',
'murmurHash128x64',
'murmurHash128',
'murmurHash128x86'
].forEach(function(name) {
t.type(hash[name], 'function');
t.strictEqual(hash[name].name, name);
t.type(hash.BE[name], 'function');
t.strictEqual(hash.BE[name].name, name);
t.type(hash.LE[name], 'function');
t.strictEqual(hash.LE[name].name, name);
t.type(hash.platform[name], 'function');
t.strictEqual(hash.platform[name].name, name);
});
t.strictEqual(hash.platform, hash[os.endianness()]);
t.end();
});
[
[4, 'murmurHash', hash.murmurHash, 0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7',
'50191cd1'],
[4, 'murmurHash', hash.murmurHash32, 0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7',
'50191cd1'],
[4, 'murmurHash', hash.BE.murmurHash, 0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7',
'50191cd1'],
[4, 'murmurHash', hash.BE.murmurHash32, 0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7',
'50191cd1'],
[4, 'murmurHash', hash.LE.murmurHash, 0, 2180083513, 1364076727,
'00000000', '396ff181', 'b7284e51',
'd11c1950'],
[4, 'murmurHash', hash.LE.murmurHash32, 0, 2180083513, 1364076727,
'00000000', '396ff181', 'b7284e51',
'd11c1950'],
[8, 'murmurHash64x64', hash.murmurHash64x64,
'0000000000000000', '952d4201a42f3c31', 'c6a4a7935bd064dc',
'0000000000000000', '952d4201a42f3c31', 'c6a4a7935bd064dc',
'e5718ae073beb171'],
[8, 'murmurHash64x64', hash.BE.murmurHash64x64,
'0000000000000000', '952d4201a42f3c31', 'c6a4a7935bd064dc',
'0000000000000000', '952d4201a42f3c31', 'c6a4a7935bd064dc',
'e5718ae073beb171'],
[8, 'murmurHash64x64', hash.LE.murmurHash64x64,
'0000000000000000', '313c2fa401422d95', 'dc64d05b93a7a4c6',
'0000000000000000', '313c2fa401422d95', 'dc64d05b93a7a4c6',
'71b1be73e08a71e5'],
[8, 'murmurHash64x86', hash.murmurHash64x86,
'0000000000000000', 'f107ca78f6c98ab0', 'dd9f019f79505248',
'0000000000000000', 'f107ca78f6c98ab0', 'dd9f019f79505248',
'20010cf69cfc8f5b'],
[8, 'murmurHash64x86', hash.BE.murmurHash64x86,
'0000000000000000', 'f107ca78f6c98ab0', 'dd9f019f79505248',
'0000000000000000', 'f107ca78f6c98ab0', 'dd9f019f79505248',
'20010cf69cfc8f5b'],
[8, 'murmurHash64x86', hash.LE.murmurHash64x86,
'0000000000000000', 'b08ac9f678ca07f1', '485250799f019fdd',
'0000000000000000', 'b08ac9f678ca07f1', '485250799f019fdd',
'5b8ffc9cf60c0120'],
[16, 'murmurHash128x64', hash.murmurHash128x64,
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'56ca212715c05cb53aa4737dfb49076f'],
[16, 'murmurHash128x64', hash.BE.murmurHash128x64,
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'56ca212715c05cb53aa4737dfb49076f'],
[16, 'murmurHash128x64', hash.LE.murmurHash128x64,
'00000000000000000000000000000000', 'ecc93b9d4ddff16a6b44e61e12217485',
'b55cff6ee5ab10468335f878aa2d6251',
'00000000000000000000000000000000', 'ecc93b9d4ddff16a6b44e61e12217485',
'b55cff6ee5ab10468335f878aa2d6251',
'b55cc0152721ca566f0749fb7d73a43a'],
[16, 'murmurHash128x86', hash.murmurHash128x86,
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'09cc949d260a8dd2de241315de241315'],
[16, 'murmurHash128x86', hash.BE.murmurHash128x86,
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'09cc949d260a8dd2de241315de241315'],
[16, 'murmurHash128x86', hash.LE.murmurHash128x86,
'00000000000000000000000000000000', 'a9081e05f7499d98f7499d98f7499d98',
'ecadc488b901d254b901d254b901d254',
'00000000000000000000000000000000', 'a9081e05f7499d98f7499d98f7499d98',
'ecadc488b901d254b901d254b901d254',
'9d94cc09d28d0a26151324de151324de']
].forEach(function(args) {
var size = args[ 0]
, label = args[ 1]
, murmurHash = args[ 2]
, seedZeroDefault = args[ 3]
, seedMinusOneDefault = args[ 4]
, seedPlusOneDefault = args[ 5]
, seedZeroHex = args[ 6]
, seedMinusOneHex = args[ 7]
, seedPlusOneHex = args[ 8]
, crashTestHex = args[ 9]
, seedZeroBuffer = Buffer.from(seedZeroHex, 'hex')
, seedMinusOneBuffer = Buffer.from(seedMinusOneHex, 'hex')
, seedPlusOneBuffer = Buffer.from(seedPlusOneHex, 'hex')
, seedZeroBase64 = seedZeroBuffer.toString('base64')
, seedMinusOneBase64 = seedMinusOneBuffer.toString('base64')
, seedPlusOneBase64 = seedPlusOneBuffer.toString('base64')
, seedZeroBinary = seedZeroBuffer.toString('binary')
, seedMinusOneBinary = seedMinusOneBuffer.toString('binary')
, seedPlusOneBinary = seedPlusOneBuffer.toString('binary')
;
test(label, function(t) {
t.type(murmurHash, 'function');
t.test('should throw error for bad arguments', function(t) {
t.throws(function() { murmurHash(); }, new TypeError("string or Buffer is required") );
t.throws(function() { murmurHash({}); }, new TypeError("string or Buffer is required") );
t.throws(function() { murmurHash([]); }, new TypeError("string or Buffer is required") );
t.throws(function() { murmurHash(void(0)); }, new TypeError("string or Buffer is required") );
t.throws(function() { murmurHash(null); }, new TypeError("string or Buffer is required") );
t.throws(function() { murmurHash(true); }, new TypeError("string or Buffer is required") );
t.throws(function() { murmurHash(false); }, new TypeError("string or Buffer is required") );
t.throws(function() { murmurHash(0); }, new TypeError("string or Buffer is required") );
t.throws(function() { murmurHash(1); }, new TypeError("string or Buffer is required") );
t.throws(function() { murmurHash(-1); }, new TypeError("string or Buffer is required") );
t.throws(function() { murmurHash(new Date()); }, new TypeError("string or Buffer is required") );
t.throws(function() { murmurHash("", "abcdefghijklmno"); }, new TypeError("\"encoding\" must be a valid string encoding") );
t.throws(function() { murmurHash("", "123456"); }, new TypeError("\"encoding\" must be a valid string encoding") );
t.throws(function() { murmurHash("", "12345"); }, new TypeError("\"encoding\" must be a valid string encoding") );
t.throws(function() { murmurHash("", "1234"); }, new TypeError("\"encoding\" must be a valid string encoding") );
t.throws(function() { murmurHash("", "123"); }, new TypeError("\"encoding\" must be a valid string encoding") );
t.throws(function() { murmurHash("", ""); }, new TypeError("\"encoding\" must be a valid string encoding") );
t.throws(function() { murmurHash(Buffer.alloc(0), 0, ""); }, new TypeError("Unknown output type: should be \"number\", \"buffer\", \"binary\", \"base64\" or \"hex\"") );
t.throws(function() { murmurHash(Buffer.alloc(0), ""); }, new TypeError("Unknown output type: should be \"number\", \"buffer\", \"binary\", \"base64\" or \"hex\"") );
t.throws(function() { murmurHash("", 0, ""); }, new TypeError("Unknown output type: should be \"number\", \"buffer\", \"binary\", \"base64\" or \"hex\"") );
t.throws(function() { murmurHash("", 0, "mumber"); }, new TypeError("Unknown output type: should be \"number\", \"buffer\", \"binary\", \"base64\" or \"hex\"") );
t.throws(function() { murmurHash("", 0, "xxxxxxx"); }, new TypeError("Unknown output type: should be \"number\", \"buffer\", \"binary\", \"base64\" or \"hex\"") );
t.throws(function() { murmurHash("", 0, "utf-8"); }, new TypeError("Unknown output type: should be \"number\", \"buffer\", \"binary\", \"base64\" or \"hex\"") );
t.end();
});
t.test('should create number hash from empty data', function(t) {
t.strictEqual(murmurHash(''), seedZeroDefault);
t.strictEqual(murmurHash('', 'number'), seedZeroDefault);
t.strictEqual(murmurHash(Buffer.from('')), seedZeroDefault);
t.strictEqual(murmurHash(Buffer.from(''), 'number'), seedZeroDefault);
t.strictEqual(murmurHash('', -1), seedMinusOneDefault);
t.strictEqual(murmurHash('', -1, 'number'), seedMinusOneDefault);
t.strictEqual(murmurHash(Buffer.from(''), -1), seedMinusOneDefault);
t.strictEqual(murmurHash(Buffer.from(''), -1, 'number'), seedMinusOneDefault);
t.strictEqual(murmurHash('', 4294967295), seedMinusOneDefault);
t.strictEqual(murmurHash('', 4294967295, 'number'), seedMinusOneDefault);
t.strictEqual(murmurHash(Buffer.from(''), 4294967295), seedMinusOneDefault);
t.strictEqual(murmurHash(Buffer.from(''), 4294967295, 'number'), seedMinusOneDefault);
t.strictEqual(murmurHash('', 4294967296), seedZeroDefault);
t.strictEqual(murmurHash('', 4294967296, 'number'), seedZeroDefault);
t.strictEqual(murmurHash(Buffer.from(''), 4294967296), seedZeroDefault);
t.strictEqual(murmurHash(Buffer.from(''), 4294967296, 'number'), seedZeroDefault);
t.strictEqual(murmurHash('', 1), seedPlusOneDefault);
t.strictEqual(murmurHash('', 1, 'number'), seedPlusOneDefault);
t.strictEqual(murmurHash(Buffer.from(''), 1), seedPlusOneDefault);
t.strictEqual(murmurHash(Buffer.from(''), 1, 'number'), seedPlusOneDefault);
t.end();
});
t.test('should create buffer hash from empty data', function(t) {
t.deepEqual(murmurHash('', 0, 'buffer'), seedZeroBuffer);
t.strictEqual(murmurHash('', 0, 'buffer').toString('hex'), seedZeroHex);
t.deepEqual(murmurHash(Buffer.from(''), 'buffer'), seedZeroBuffer);
t.strictEqual(murmurHash(Buffer.from(''), 'buffer').toString('hex'), seedZeroHex);
t.deepEqual(murmurHash('', -1, 'buffer'), seedMinusOneBuffer);
t.strictEqual(murmurHash('', -1, 'buffer').toString('hex'), seedMinusOneHex);
t.deepEqual(murmurHash(Buffer.from(''), -1, 'buffer'), seedMinusOneBuffer);
t.strictEqual(murmurHash(Buffer.from(''), -1, 'buffer').toString('hex'), seedMinusOneHex);
t.deepEqual(murmurHash('', 4294967295, 'buffer'), seedMinusOneBuffer);
t.strictEqual(murmurHash('', 4294967295, 'buffer').toString('hex'), seedMinusOneHex);
t.deepEqual(murmurHash(Buffer.from(''), 4294967295, 'buffer'), seedMinusOneBuffer);
t.strictEqual(murmurHash(Buffer.from(''), 4294967295, 'buffer').toString('hex'), seedMinusOneHex);
t.deepEqual(murmurHash('', 4294967296, 'buffer'), seedZeroBuffer);
t.strictEqual(murmurHash('', 4294967296, 'buffer').toString('hex'), seedZeroHex);
t.deepEqual(murmurHash(Buffer.from(''), 4294967296, 'buffer'), seedZeroBuffer);
t.strictEqual(murmurHash(Buffer.from(''), 4294967296, 'buffer').toString('hex'), seedZeroHex);
t.deepEqual(murmurHash('', 1, 'buffer'), seedPlusOneBuffer);
t.strictEqual(murmurHash('', 1, 'buffer').toString('hex'), seedPlusOneHex);
t.deepEqual(murmurHash(Buffer.from(''), 1, 'buffer'), seedPlusOneBuffer);
t.strictEqual(murmurHash(Buffer.from(''), 1, 'buffer').toString('hex'), seedPlusOneHex);
t.end();
});
t.test('should create string encoded hash from empty data', function(t) {
t.strictEqual(murmurHash('', 0, 'hex'), seedZeroHex);
t.strictEqual(murmurHash(Buffer.from(''), 'hex'), seedZeroHex);
t.strictEqual(murmurHash('', -1, 'hex'), seedMinusOneHex);
t.strictEqual(murmurHash(Buffer.from(''), -1, 'hex'), seedMinusOneHex);
t.strictEqual(murmurHash('', 4294967295, 'hex'), seedMinusOneHex);
t.strictEqual(murmurHash(Buffer.from(''), 4294967295, 'hex'), seedMinusOneHex);
t.strictEqual(murmurHash('', 4294967296, 'hex'), seedZeroHex);
t.strictEqual(murmurHash(Buffer.from(''), 4294967296, 'hex'), seedZeroHex);
t.strictEqual(murmurHash('', 1, 'hex'), seedPlusOneHex);
t.strictEqual(murmurHash(Buffer.from(''), 1, 'hex'), seedPlusOneHex);
t.strictEqual(murmurHash('', 0, 'base64'), seedZeroBase64);
t.strictEqual(murmurHash(Buffer.from(''), 'base64'), seedZeroBase64);
t.strictEqual(murmurHash('', -1, 'base64'), seedMinusOneBase64);
t.strictEqual(murmurHash(Buffer.from(''), -1, 'base64'), seedMinusOneBase64);
t.strictEqual(murmurHash('', 4294967295, 'base64'), seedMinusOneBase64);
t.strictEqual(murmurHash(Buffer.from(''), 4294967295, 'base64'), seedMinusOneBase64);
t.strictEqual(murmurHash('', 4294967296, 'base64'), seedZeroBase64);
t.strictEqual(murmurHash(Buffer.from(''), 4294967296, 'base64'), seedZeroBase64);
t.strictEqual(murmurHash('', 1, 'base64'), seedPlusOneBase64);
t.strictEqual(murmurHash(Buffer.from(''), 1, 'base64'), seedPlusOneBase64);
t.strictEqual(murmurHash('', 0, 'binary'), seedZeroBinary);
t.strictEqual(murmurHash(Buffer.from(''), 'binary'), seedZeroBinary);
t.strictEqual(murmurHash('', -1, 'binary'), seedMinusOneBinary);
t.strictEqual(murmurHash(Buffer.from(''), -1, 'binary'), seedMinusOneBinary);
t.strictEqual(murmurHash('', 4294967295, 'binary'), seedMinusOneBinary);
t.strictEqual(murmurHash(Buffer.from(''), 4294967295, 'binary'), seedMinusOneBinary);
t.strictEqual(murmurHash('', 4294967296, 'binary'), seedZeroBinary);
t.strictEqual(murmurHash(Buffer.from(''), 4294967296, 'binary'), seedZeroBinary);
t.strictEqual(murmurHash('', 1, 'binary'), seedPlusOneBinary);
t.strictEqual(murmurHash(Buffer.from(''), 1, 'binary'), seedPlusOneBinary);
t.end();
});
t.test('should utilize different string input encodings', function(t) {
var string = "\u1220łóżko"
, base64 = 'IELzfGtv'
, hex = '2042f37c6b6f'
, hash = murmurHash(string);
t.strictEqual(hash,
murmurHash(Buffer.from(string, 'binary')));
t.strictEqual(murmurHash(string, 'ascii'),
murmurHash(Buffer.from(string, 'ascii')));
t.strictEqual(murmurHash(string, 'ascii'), hash);
t.strictEqual(murmurHash(string, 'binary'),
murmurHash(Buffer.from(string, 'binary')));
t.strictEqual(murmurHash(string, 'binary'), hash);
t.strictEqual(murmurHash(string, 'utf8'),
murmurHash(Buffer.from(string, 'utf8')));
t.strictEqual(murmurHash(string, 'utf-8'),
murmurHash(Buffer.from(string, 'utf-8')));
t.strictEqual(murmurHash(string, 'ucs2'),
murmurHash(Buffer.from(string, 'ucs2')));
t.strictEqual(murmurHash(string, 'ucs-2'),
murmurHash(Buffer.from(string, 'ucs-2')));
t.strictEqual(murmurHash(string, 'utf16le'),
murmurHash(Buffer.from(string, 'utf16le')));
t.strictEqual(murmurHash(string, 'utf-16le'),
murmurHash(Buffer.from(string, 'utf-16le')));
t.strictEqual(murmurHash(base64, 'base64'), hash);
t.strictEqual(murmurHash(base64, 'base64'),
murmurHash(Buffer.from(base64, 'base64')));
t.strictEqual(murmurHash(hex, 'hex'), hash);
t.strictEqual(murmurHash(hex, 'hex'),
murmurHash(Buffer.from(hex, 'hex')));
t.end();
});
t.test('should create hash from some random data', function(t) {
var data = '';
for (var i = 0; i < 1000; ++i) data += String.fromCharCode((Math.random()*32768)|0);
var buffer = Buffer.from(data, 'binary');
t.equal(murmurHash(data, 0, 'buffer').length, size);
t.equal(murmurHash(buffer, 'buffer').length, size);
t.strictEqual(murmurHash(data, 'utf8'), murmurHash(Buffer.from(data, 'utf8')));
t.strictEqual(murmurHash(data), murmurHash(buffer));
t.strictEqual(murmurHash(data, -1), murmurHash(buffer, -1));
t.strictEqual(murmurHash(data, -1), murmurHash(buffer, 4294967295));
t.strictEqual(murmurHash(data, 4294967295), murmurHash(buffer, -1));
var seed = (Math.random()*4294967296)|0;
t.notStrictEqual(murmurHash(data, seed, 'buffer'), murmurHash(buffer, seed, 'buffer'));
t.deepEqual(murmurHash(data, seed, 'buffer'), murmurHash(buffer, seed, 'buffer'));
t.strictEqual(murmurHash(data, seed), murmurHash(buffer, seed));
t.end();
});
t.test('should not crash with utf8 characters in encoding string', function(t) {
t.throws(function() {
murmurHash("łabądź",
"\u1010\u1111\u1212\u1313\u1414\u1515\u1616\u1717",
"\u1010\u1111\u1212\u1313\u1414\u1515\u1616\u1717");
}, new TypeError("\"encoding\" must be a valid string encoding"));
var match = murmurHash("łabądź", "binary", "buffer");
var hash = murmurHash("łabądź", "buffer");
t.deepEqual(hash, match);
t.type(hash, Buffer, 'hash is buffer');
t.deepEqual(hash, Buffer.from(crashTestHex, 'hex'));
t.end();
});
t.test('should interpret 1 argument properly', function(t) {
t.strictEqual(murmurHash(''), seedZeroDefault);
t.strictEqual(murmurHash(Buffer.alloc(0)), seedZeroDefault);
t.end();
});
t.test('should interpret 2[+2] arguments properly', function(t) {
t.strictEqual(murmurHash('', 0), seedZeroDefault);
t.strictEqual(murmurHash('', -1), seedMinusOneDefault);
t.strictEqual(murmurHash(Buffer.alloc(0), 0), seedZeroDefault);
t.strictEqual(murmurHash(Buffer.alloc(0), -1), seedMinusOneDefault);
t.strictEqual(murmurHash(Buffer.alloc(0), 0, 'base64'), seedZeroBase64);
t.strictEqual(murmurHash(Buffer.alloc(0), -1, 'base64'), seedMinusOneBase64);
t.strictEqual(murmurHash(Buffer.alloc(0), 'base64', 0), seedZeroBase64);
t.strictEqual(murmurHash(Buffer.alloc(0), 'base64', -1), seedMinusOneBase64);
t.strictEqual(murmurHash('\u1234', 'ucs2'), murmurHash(Buffer.from('\u1234', 'ucs2')));
t.strictEqual(murmurHash('\u1234', 'utf8'), murmurHash(Buffer.from('\u1234', 'utf8')));
t.strictEqual(murmurHash('\u1234', 'ascii'), murmurHash(Buffer.from('\u1234', 'ascii')));
t.strictEqual(murmurHash('\u1234', 'binary'), murmurHash(Buffer.from('\u1234', 'binary')));
t.strictEqual(murmurHash('/w==', 'base64'), murmurHash(Buffer.from('/w==', 'base64')));
t.strictEqual(murmurHash('ff', 'hex'), murmurHash(Buffer.from('ff', 'hex')));
t.strictEqual(murmurHash('ó', 'number'), murmurHash(Buffer.from('ó', 'binary')));
t.deepEqual(murmurHash('ą', 'buffer'), murmurHash(Buffer.from('ą', 'binary'), 0, 'buffer'));
t.strictEqual(murmurHash(Buffer.from([0xFF]), 'hex'),
murmurHash(Buffer.from('\u12FF', 'binary'), 'hex'));
t.strictEqual(murmurHash(Buffer.from([0xFF]), 'number'),
murmurHash(Buffer.from('\u12FF', 'binary'), 'number'));
t.strictEqual(murmurHash(Buffer.from([0xFF]), 'binary'),
murmurHash(Buffer.from('\u12FF', 'binary'), 'binary'));
t.deepEqual(murmurHash(Buffer.from([0xFF]), 'buffer'),
murmurHash(Buffer.from('\u12FF', 'binary'), 'buffer'));
var buf = Buffer.alloc(size, -1);
t.strictEqual(murmurHash('', buf), buf);
t.deepEqual(buf, Buffer.from(seedZeroHex, 'hex'));
buf.fill(-1);
t.strictEqual(murmurHash(Buffer.alloc(0), buf), buf);
t.deepEqual(buf, Buffer.from(seedZeroHex, 'hex'));
var buf2 = Buffer.allocUnsafe(size + 2); buf2.fill(0, 0, 2); buf2.fill(-1, 2);
t.strictEqual(murmurHash('', buf2, 2), buf2);
t.deepEqual(buf2, Buffer.concat([Buffer.from([0,0]), Buffer.from(seedZeroHex, 'hex')]));
buf2.fill(0, 0, 2); buf2.fill(-1, 2);
t.strictEqual(murmurHash(Buffer.alloc(0), buf2, 2), buf2);
t.deepEqual(buf2, Buffer.concat([Buffer.from([0,0]), Buffer.from(seedZeroHex, 'hex')]));
var buf3 = Buffer.alloc(size - 1, -1);
t.strictEqual(murmurHash('', buf3, -size), buf3);
t.deepEqual(buf3, Buffer.from(seedZeroHex, 'hex').slice(1));
buf3.fill(-1);
t.strictEqual(murmurHash(Buffer.alloc(0), buf3, -size), buf3);
t.deepEqual(buf3, Buffer.from(seedZeroHex, 'hex').slice(1));
var bufpad = Buffer.alloc(size - 3, -1);
var buf4 = Buffer.allocUnsafe(size + 2); buf4.fill(0, 0, 2); buf4.fill(-1, 2);
t.strictEqual(murmurHash('', buf4, 2, 3), buf4);
t.deepEqual(buf4, Buffer.concat([Buffer.from([0,0]),
Buffer.from(seedZeroHex, 'hex').slice(0, 3),
bufpad]));
buf4.fill(0, 0, 2); buf4.fill(-1, 2);
t.strictEqual(murmurHash(Buffer.alloc(0), buf4, 2, 3), buf4);
t.deepEqual(buf4, Buffer.concat([Buffer.from([0,0]),
Buffer.from(seedZeroHex, 'hex').slice(0, 3),
bufpad]));
var buf5 = Buffer.alloc(size - 1, -1);
t.strictEqual(murmurHash('', buf5, -size, -3), buf5);
t.deepEqual(buf5, Buffer.concat([Buffer.from(seedZeroHex, 'hex').slice(size - 3 + 1),
bufpad]));
buf5.fill(-1);
t.strictEqual(murmurHash(Buffer.alloc(0), buf5, -size, -3), buf5);
t.deepEqual(buf5, Buffer.concat([Buffer.from(seedZeroHex, 'hex').slice(size - 3 + 1),
bufpad]));
t.end();
});
t.test('should interpret 3[+2] arguments properly', function(t) {
t.strictEqual(murmurHash('', -1, 0), seedZeroDefault);
t.strictEqual(murmurHash('', null, 'base64'), seedZeroBase64);
t.strictEqual(murmurHash('', -1, null), seedMinusOneDefault);
t.strictEqual(murmurHash('', -1, 'number'), seedMinusOneDefault);
t.strictEqual(murmurHash('', -1, 'number', 1), seedMinusOneDefault);
t.strictEqual(murmurHash('', 'number', -1), seedMinusOneDefault);
t.strictEqual(murmurHash('', 'number', -1, 1), seedMinusOneDefault);
t.deepEqual(murmurHash('', -1, 'buffer'), Buffer.from(seedMinusOneHex, 'hex'));
t.deepEqual(murmurHash('', -1, 'buffer', 1), Buffer.from(seedMinusOneHex, 'hex'));
t.strictEqual(murmurHash('', 1, -1), seedMinusOneDefault);
t.strictEqual(murmurHash('', null, -1), seedMinusOneDefault);
t.strictEqual(murmurHash(Buffer.alloc(0), -1, 0), seedZeroDefault);
t.strictEqual(murmurHash(Buffer.alloc(0), -1, null), seedMinusOneDefault);
t.strictEqual(murmurHash(Buffer.alloc(0), -1, 'number'), seedMinusOneDefault);
t.strictEqual(murmurHash(Buffer.alloc(0), -1, 'number', 1), seedMinusOneDefault);
t.deepEqual(murmurHash(Buffer.alloc(0), -1, 'buffer'), Buffer.from(seedMinusOneHex, 'hex'));
t.deepEqual(murmurHash(Buffer.alloc(0), -1, 'buffer', 1), Buffer.from(seedMinusOneHex, 'hex'));
t.deepEqual(murmurHash(Buffer.alloc(0), 1, -1), seedMinusOneDefault);
t.deepEqual(murmurHash(Buffer.alloc(0), null, -1), seedMinusOneDefault);
t.strictEqual(murmurHash('\u1234', 'utf8', 100), murmurHash(Buffer.from('\u1234', 'utf8'), 100));
t.strictEqual(murmurHash(Buffer.from('\u1234', 'binary'), 'number', 100),
murmurHash(Buffer.from('\u1234', 'binary'), 100));
t.deepEqual(murmurHash('\u1234', 'utf8', 'buffer'),
murmurHash(Buffer.from('\u1234', 'utf8'), 0, 'buffer'));
t.deepEqual(murmurHash(Buffer.from('\u1234', 'binary'), 'buffer'),
murmurHash(Buffer.from('\u1234', 'binary'), 0, 'buffer'));
t.deepEqual(murmurHash('\u1234', 'utf8', 'buffer', -1),
murmurHash(Buffer.from('\u1234', 'utf8'), 0, 'buffer'));
t.deepEqual(murmurHash(Buffer.from('\u1234', 'binary'), 'ignore', 'buffer', -1),
murmurHash(Buffer.from('\u1234', 'binary'), 0, 'buffer'));
t.deepEqual(murmurHash('\u1234', null, 'buffer'),
murmurHash(Buffer.from('\u1234', 'binary'), 0, 'buffer'));
t.deepEqual(murmurHash(Buffer.from('\u1234', 'binary'), null, 'buffer'),
murmurHash(Buffer.from('\u1234', 'binary'), 0, 'buffer'));
t.deepEqual(murmurHash('\u1234', null, 'buffer', -1),
murmurHash(Buffer.from('\u1234', 'binary'), 0, 'buffer'));
t.deepEqual(murmurHash(Buffer.from('\u1234', 'binary'), null, 'buffer', -1),
murmurHash(Buffer.from('\u1234', 'binary'), 0, 'buffer'));
t.strictEqual(murmurHash('\u1234', null, null),
murmurHash(Buffer.from('\u1234', 'binary'), 0));
t.strictEqual(murmurHash(Buffer.from('\u1234', 'binary'), null, null),
murmurHash(Buffer.from('\u1234', 'binary'), 0));
t.strictEqual(murmurHash('\u1234', null, null, -1),
murmurHash(Buffer.from('\u1234', 'binary'), 0));
t.strictEqual(murmurHash(Buffer.from('\u1234', 'binary'), null, null, -1),
murmurHash(Buffer.from('\u1234', 'binary'), 0));
t.strictEqual(murmurHash('\u1234', 'utf8', null),
murmurHash(Buffer.from('\u1234', 'utf8'), 0));
t.strictEqual(murmurHash(Buffer.from('\u1234', 'binary'), 'number', null),
murmurHash(Buffer.from('\u1234', 'binary'), 0));
t.strictEqual(murmurHash('\u1234', 'utf8', null, -1),
murmurHash(Buffer.from('\u1234', 'utf8'), 0));
t.strictEqual(murmurHash(Buffer.from('\u1234', 'binary'), 'number', null, -1),
murmurHash(Buffer.from('\u1234', 'binary'), 0));
var buf = Buffer.alloc(size, -1);
t.strictEqual(murmurHash('', -1, buf), buf);
t.deepEqual(buf, Buffer.from(seedMinusOneHex, 'hex'));
buf.fill(-1);
t.strictEqual(murmurHash(Buffer.alloc(0), -1, buf), buf);
t.deepEqual(buf, Buffer.from(seedMinusOneHex, 'hex'));
var buf2 = Buffer.allocUnsafe(size + 2); buf2.fill(0, 0, 2); buf2.fill(-1, 2);
t.strictEqual(murmurHash('', -1, buf2, 2), buf2);
t.deepEqual(buf2, Buffer.concat([Buffer.from([0,0]), Buffer.from(seedMinusOneHex, 'hex')]));
buf2.fill(0, 0, 2); buf2.fill(-1, 2);
t.strictEqual(murmurHash(Buffer.alloc(0), -1, buf2, 2), buf2);
t.deepEqual(buf2, Buffer.concat([Buffer.from([0,0]), Buffer.from(seedMinusOneHex, 'hex')]));
var buf3 = Buffer.alloc(size - 1, -1);
t.strictEqual(murmurHash('', -1, buf3, -size), buf3);
t.deepEqual(buf3, Buffer.from(seedMinusOneHex, 'hex').slice(1));
buf3.fill(-1);
t.strictEqual(murmurHash(Buffer.alloc(0), -1, buf3, -size), buf3);
t.deepEqual(buf3, Buffer.from(seedMinusOneHex, 'hex').slice(1));
var bufpad = Buffer.alloc(size - 3, -1);
var buf4 = Buffer.allocUnsafe(size + 2); buf4.fill(0, 0, 2); buf4.fill(-1, 2);
t.strictEqual(murmurHash('', -1, buf4, 2, 3), buf4);
t.deepEqual(buf4, Buffer.concat([Buffer.from([0,0]),
Buffer.from(seedMinusOneHex, 'hex').slice(0, 3),
bufpad]));
buf4.fill(0, 0, 2); buf4.fill(-1, 2);
t.strictEqual(murmurHash(Buffer.alloc(0), -1, buf4, 2, 3), buf4);
t.deepEqual(buf4, Buffer.concat([Buffer.from([0,0]),
Buffer.from(seedMinusOneHex, 'hex').slice(0, 3),
bufpad]));
var buf5 = Buffer.alloc(size - 1, -1);
t.strictEqual(murmurHash('', -1, buf5, -size, -3), buf5);
t.deepEqual(buf5, Buffer.concat([Buffer.from(seedMinusOneHex, 'hex').slice(size - 3 + 1),
bufpad]));
buf5.fill(-1);
t.strictEqual(murmurHash(Buffer.alloc(0), -1, buf5, -size, -3), buf5);
t.deepEqual(buf5, Buffer.concat([Buffer.from(seedMinusOneHex, 'hex').slice(size - 3 + 1),
bufpad]));
t.end();
});
t.test('should interpret 4[+2] arguments properly', function(t) {
t.strictEqual(murmurHash('', 'utf8', -1, 0), seedZeroDefault);
t.strictEqual(murmurHash('', 'utf8', -1, null), seedMinusOneDefault);
t.strictEqual(murmurHash('', 'utf8', -1, null, 1), seedMinusOneDefault);
t.strictEqual(murmurHash('', 'utf8', -1, 'number'), seedMinusOneDefault);
t.strictEqual(murmurHash('', 'utf8', -1, 'number', 1), seedMinusOneDefault);
t.deepEqual(murmurHash('', 'utf8', -1, 'buffer'), Buffer.from(seedMinusOneHex, 'hex'));
t.deepEqual(murmurHash('', 'utf8', -1, 'buffer', 1), Buffer.from(seedMinusOneHex, 'hex'));
t.strictEqual(murmurHash('', 'utf8', 1, -1), seedMinusOneDefault);
t.strictEqual(murmurHash('', 'utf8', null, -1), seedZeroDefault);
t.strictEqual(murmurHash(Buffer.alloc(0), 'hex', -1, 0), seedMinusOneHex);
t.strictEqual(murmurHash(Buffer.alloc(0), 'hex', -1, null), seedMinusOneHex);
t.strictEqual(murmurHash(Buffer.alloc(0), 'hex', -1, null, 1), seedMinusOneHex);
t.strictEqual(murmurHash(Buffer.alloc(0), 'hex', -1, 'ignore'), seedMinusOneHex);
t.strictEqual(murmurHash(Buffer.alloc(0), 'hex', -1, 'ignore', 1), seedMinusOneHex);
t.deepEqual(murmurHash(Buffer.alloc(0), 'buffer', -1, 'ignore'), Buffer.from(seedMinusOneHex, 'hex'));
t.deepEqual(murmurHash(Buffer.alloc(0), 'buffer', -1, 'ignore', 1), Buffer.from(seedMinusOneHex, 'hex'));
t.strictEqual(murmurHash(Buffer.alloc(0), 'number', 1, -1), seedPlusOneDefault);
t.strictEqual(murmurHash(Buffer.alloc(0), 'number', null, -1), seedZeroDefault);
t.deepEqual(murmurHash('\u1234', 'utf8', 100, 'buffer'),
murmurHash(Buffer.from('\u1234', 'utf8'), 100, 'buffer'));
t.deepEqual(murmurHash(Buffer.from('\u1234', 'binary'), 'buffer', 100, 'ignore'),
murmurHash(Buffer.from('\u1234', 'binary'), 100, 'buffer'));
t.deepEqual(murmurHash('\u1234', 'utf8', 100, 'buffer', -1),
murmurHash(Buffer.from('\u1234', 'utf8'), 100, 'buffer'));
t.deepEqual(murmurHash(Buffer.from('\u1234', 'binary'), 'buffer', 100, 'ignore', -1),
murmurHash(Buffer.from('\u1234', 'binary'), 100, 'buffer'));
t.deepEqual(murmurHash('\u1234', 'utf8', 0, 'buffer'),
murmurHash(Buffer.from('\u1234', 'utf8'), 'buffer'));
t.deepEqual(murmurHash(Buffer.from('\u1234', 'binary'), 'buffer', 0, 'ignore'),
murmurHash(Buffer.from('\u1234', 'binary'), 'buffer'));
t.deepEqual(murmurHash('\u1234', 'utf8', 0, 'buffer', -1),
murmurHash(Buffer.from('\u1234', 'utf8'), 'buffer'));
t.deepEqual(murmurHash(Buffer.from('\u1234', 'binary'), 'buffer', 0, 'ignore', -1),
murmurHash(Buffer.from('\u1234', 'binary'), 'buffer'));
t.deepEqual(murmurHash('\u1234', null, 1, 'buffer'),
murmurHash(Buffer.from('\u1234', 'binary'), 1, 'buffer'));
t.deepEqual(murmurHash(Buffer.from('\u1234', 'binary'), null, 1, 'buffer'),
murmurHash(Buffer.from('\u1234', 'binary'), 1, 'buffer'));
t.deepEqual(murmurHash('\u1234', null, 1, 'buffer', -1),
murmurHash(Buffer.from('\u1234', 'binary'), 1, 'buffer'));
t.deepEqual(murmurHash(Buffer.from('\u1234', 'binary'), null, 1, 'buffer', -1),
murmurHash(Buffer.from('\u1234', 'binary'), 1, 'buffer'));
t.strictEqual(murmurHash('\u1234', null, 1, null),
murmurHash(Buffer.from('\u1234', 'binary'), 1));
t.strictEqual(murmurHash(Buffer.from('\u1234', 'binary'), null, 1, null),
murmurHash(Buffer.from('\u1234', 'binary'), 1));
t.strictEqual(murmurHash('\u1234', null, 1, null, -1),
murmurHash(Buffer.from('\u1234', 'binary'), 1));
t.strictEqual(murmurHash(Buffer.from('\u1234', 'binary'), null, 1, null, -1),
murmurHash(Buffer.from('\u1234', 'binary'), 1));
t.strictEqual(murmurHash('\u1234', 'utf8', 1, null),
murmurHash(Buffer.from('\u1234', 'utf8'), 1));
t.strictEqual(murmurHash(Buffer.from('\u1234', 'binary'), 'number', 1, null),
murmurHash(Buffer.from('\u1234', 'binary'), 1));
t.strictEqual(murmurHash('\u1234', 'utf8', 1, null, -1),
murmurHash(Buffer.from('\u1234', 'utf8'), 1));
t.strictEqual(murmurHash(Buffer.from('\u1234', 'binary'), 'number', 1, null, -1),
murmurHash(Buffer.from('\u1234', 'binary'), 1));
var buf = Buffer.alloc(size, -1);
t.strictEqual(murmurHash('', 'utf8', -1, buf), buf);
t.deepEqual(buf, Buffer.from(seedMinusOneHex, 'hex'));
buf.fill(-1);
t.strictEqual(murmurHash(Buffer.alloc(0), -1, buf, 0), buf);
t.deepEqual(buf, Buffer.from(seedMinusOneHex, 'hex'));
var buf2 = Buffer.allocUnsafe(size + 2); buf2.fill(0, 0, 2); buf2.fill(-1, 2);
t.strictEqual(murmurHash('', 'binary', -1, buf2, 2), buf2);
t.deepEqual(buf2, Buffer.concat([Buffer.from([0,0]), Buffer.from(seedMinusOneHex, 'hex')]));
buf2.fill(0, 0, 2); buf2.fill(-1, 2);
t.strictEqual(murmurHash(Buffer.alloc(0), -1, buf2, 2), buf2);
t.deepEqual(buf2, Buffer.concat([Buffer.from([0,0]), Buffer.from(seedMinusOneHex, 'hex')]));
var buf3 = Buffer.alloc(size - 1, -1);
t.strictEqual(murmurHash('', 'ascii', -1, buf3, -size), buf3);
t.deepEqual(buf3, Buffer.from(seedMinusOneHex, 'hex').slice(1));
buf3.fill(-1);
t.strictEqual(murmurHash(Buffer.alloc(0), -1, buf3, -size), buf3);
t.deepEqual(buf3, Buffer.from(seedMinusOneHex, 'hex').slice(1));
var bufpad = Buffer.alloc(size - 3, -1);
var buf4 = Buffer.allocUnsafe(size + 2); buf4.fill(0, 0, 2); buf4.fill(-1, 2);
t.strictEqual(murmurHash('', 'ucs2', -1, buf4, 2, 3), buf4);
t.deepEqual(buf4, Buffer.concat([Buffer.from([0,0]),
Buffer.from(seedMinusOneHex, 'hex').slice(0, 3),
bufpad]));
buf4.fill(0, 0, 2); buf4.fill(-1, 2);
t.strictEqual(murmurHash(Buffer.alloc(0), -1, buf4, 2, 3), buf4);
t.deepEqual(buf4, Buffer.concat([Buffer.from([0,0]),
Buffer.from(seedMinusOneHex, 'hex').slice(0, 3),
bufpad]));
var buf5 = Buffer.alloc(size - 1, -1);
t.strictEqual(murmurHash('', 'hex', -1, buf5, -size, -3), buf5);
t.deepEqual(buf5, Buffer.concat([Buffer.from(seedMinusOneHex, 'hex').slice(size - 3 + 1),
bufpad]));
buf5.fill(-1);
t.strictEqual(murmurHash(Buffer.alloc(0), -1, buf5, -size, -3), buf5);
t.deepEqual(buf5, Buffer.concat([Buffer.from(seedMinusOneHex, 'hex').slice(size - 3 + 1),
bufpad]));
t.end();
});
t.test('should write hash into the same buffer it is reading from', function(t) {
var data = '';
for (var i = 0; i < 1000; ++i) data += String.fromCharCode((Math.random()*32768)|0);
var databuf = Buffer.from(data, 'utf8');
var seed = (Math.random() * 0x100000000)|0;
var buf = Buffer.concat([databuf, Buffer.allocUnsafe(size)]); buf.fill(-1, databuf.length);
t.strictEqual(murmurHash(buf.slice(0, databuf.length), seed, buf, databuf.length), buf);
t.deepEqual(murmurHash(data, 'utf8', seed, 'buffer'), buf.slice(databuf.length));
t.deepEqual(murmurHash(databuf, seed, 'buffer'), buf.slice(databuf.length));
var buf2 = Buffer.concat([Buffer.allocUnsafe(size), databuf]); buf2.fill(-1, 0, size);
t.strictEqual(murmurHash(buf2.slice(size), seed, buf2), buf2);
t.deepEqual(murmurHash(data, 'utf8', seed, 'buffer'), buf2.slice(0, size));
t.deepEqual(murmurHash(databuf, seed, 'buffer'), buf2.slice(0, size));
var buf3 = Buffer.allocUnsafe(databuf.length); databuf.copy(buf3);
t.strictEqual(murmurHash(buf3, seed, buf3), buf3);
t.deepEqual(murmurHash(data, 'utf8', seed, 'buffer'), buf3.slice(0, size));
t.deepEqual(murmurHash(databuf, seed, 'buffer'), buf3.slice(0, size));
t.deepEqual(buf3.slice(size), databuf.slice(size));
var buf4 = Buffer.allocUnsafe(databuf.length); databuf.copy(buf4);
t.strictEqual(murmurHash(buf4, seed, buf4, -size), buf4);
t.deepEqual(murmurHash(data, 'utf8', seed, 'buffer'), buf4.slice(databuf.length - size));
t.deepEqual(murmurHash(databuf, seed, 'buffer'), buf4.slice(databuf.length - size));
t.deepEqual(buf4.slice(0, databuf.length - size), databuf.slice(0, databuf.length - size));
var buf5 = Buffer.allocUnsafe(databuf.length); databuf.copy(buf5);
t.strictEqual(murmurHash(buf5, seed, buf5, 0, size - 1), buf5);
t.deepEqual(murmurHash(data, 'utf8', seed, 'buffer').slice(0, size - 1), buf5.slice(0, size - 1));
t.deepEqual(murmurHash(databuf, seed, 'buffer').slice(0, size - 1), buf5.slice(0, size - 1));
t.deepEqual(buf5.slice(size - 1), databuf.slice(size - 1));
var buf6 = Buffer.allocUnsafe(databuf.length); databuf.copy(buf6);
t.strictEqual(murmurHash(buf6, seed, buf6, -size, -size + 2), buf6);
t.deepEqual(murmurHash(data, 'utf8', seed, 'buffer').slice(2),
buf6.slice(databuf.length - size, databuf.length - 2));
t.deepEqual(murmurHash(databuf, seed, 'buffer').slice(2),
buf6.slice(databuf.length - size, databuf.length - 2));
t.deepEqual(buf6.slice(0, databuf.length - size), databuf.slice(0, databuf.length - size));
t.deepEqual(buf6.slice(databuf.length - 2), databuf.slice(databuf.length - 2));
t.end();
});
t.end();
});
});

View File

@ -1,190 +0,0 @@
"use strict";
var test = require("tap").test
, promisify = require('../promisify')
, hash = promisify(global.Promise || require('bluebird'))
;
test("should have murmurHash functions", function(t) {
t.plan(8*8);
['murmurHashAsync',
'murmurHash32Async',
'murmurHash64Async',
'murmurHash64x64Async',
'murmurHash64x86Async',
'murmurHash128x64Async',
'murmurHash128Async',
'murmurHash128x86Async'
].forEach(function(name) {
t.type(hash[name], 'function');
t.strictEqual(hash[name].name, name);
t.type(hash.BE[name], 'function');
t.strictEqual(hash.BE[name].name, name);
t.type(hash.LE[name], 'function');
t.strictEqual(hash.LE[name].name, name);
t.type(hash.platform[name], 'function');
t.strictEqual(hash.platform[name].name, name);
});
});
[
[4, 'murmurHash', hash.murmurHashAsync, 0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7'],
[4, 'murmurHash', hash.murmurHash32Async, 0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7'],
[4, 'murmurHash', hash.BE.murmurHashAsync, 0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7'],
[4, 'murmurHash', hash.BE.murmurHash32Async, 0, 2180083513, 1364076727,
'00000000', '81f16f39', '514e28b7'],
[4, 'murmurHash', hash.LE.murmurHashAsync, 0, 2180083513, 1364076727,
'00000000', '396ff181', 'b7284e51'],
[4, 'murmurHash', hash.LE.murmurHash32Async, 0, 2180083513, 1364076727,
'00000000', '396ff181', 'b7284e51'],
[8, 'murmurHash64x64', hash.murmurHash64x64Async,
'0000000000000000', '952d4201a42f3c31', 'c6a4a7935bd064dc',
'0000000000000000', '952d4201a42f3c31', 'c6a4a7935bd064dc'],
[8, 'murmurHash64x64', hash.BE.murmurHash64x64Async,
'0000000000000000', '952d4201a42f3c31', 'c6a4a7935bd064dc',
'0000000000000000', '952d4201a42f3c31', 'c6a4a7935bd064dc'],
[8, 'murmurHash64x64', hash.LE.murmurHash64x64Async,
'0000000000000000', '313c2fa401422d95', 'dc64d05b93a7a4c6',
'0000000000000000', '313c2fa401422d95', 'dc64d05b93a7a4c6'],
[8, 'murmurHash64x86', hash.murmurHash64x86Async,
'0000000000000000', 'f107ca78f6c98ab0', 'dd9f019f79505248',
'0000000000000000', 'f107ca78f6c98ab0', 'dd9f019f79505248'],
[8, 'murmurHash64x86', hash.BE.murmurHash64x86Async,
'0000000000000000', 'f107ca78f6c98ab0', 'dd9f019f79505248',
'0000000000000000', 'f107ca78f6c98ab0', 'dd9f019f79505248'],
[8, 'murmurHash64x86', hash.LE.murmurHash64x86Async,
'0000000000000000', 'b08ac9f678ca07f1', '485250799f019fdd',
'0000000000000000', 'b08ac9f678ca07f1', '485250799f019fdd'],
[16, 'murmurHash128x64', hash.murmurHash128x64Async,
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583'],
[16, 'murmurHash128x64', hash.BE.murmurHash128x64Async,
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583',
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583'],
[16, 'murmurHash128x64', hash.LE.murmurHash128x64Async,
'00000000000000000000000000000000', 'ecc93b9d4ddff16a6b44e61e12217485',
'b55cff6ee5ab10468335f878aa2d6251',
'00000000000000000000000000000000', 'ecc93b9d4ddff16a6b44e61e12217485',
'b55cff6ee5ab10468335f878aa2d6251'],
[16, 'murmurHash128x86', hash.murmurHash128x86Async,
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9'],
[16, 'murmurHash128x86', hash.BE.murmurHash128x86Async,
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9',
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9'],
[16, 'murmurHash128x86', hash.LE.murmurHash128x86Async,
'00000000000000000000000000000000', 'a9081e05f7499d98f7499d98f7499d98',
'ecadc488b901d254b901d254b901d254',
'00000000000000000000000000000000', 'a9081e05f7499d98f7499d98f7499d98',
'ecadc488b901d254b901d254b901d254']
].forEach(function(args) {
var label = args[ 1]
, murmurHash = args[ 2]
, seedZeroDefault = args[ 3]
, seedMinusOneDefault = args[ 4]
, seedPlusOneDefault = args[ 5]
, seedZeroHex = args[ 6]
, seedMinusOneHex = args[ 7]
, seedPlusOneHex = args[ 8]
, seedZeroBuffer = Buffer.from(seedZeroHex, 'hex')
, seedMinusOneBuffer = Buffer.from(seedMinusOneHex, 'hex')
, seedPlusOneBuffer = Buffer.from(seedPlusOneHex, 'hex')
;
test(label, function(t) {
t.type(murmurHash, 'function');
t.test('should throw error for bad arguments', function(t) {
t.plan(21*2);
function testerr(err, expect) {
t.type(err, expect.constructor);
t.strictEqual(err.message, expect.message);
}
murmurHash().catch(function(err) { testerr(err, new TypeError("string or Buffer is required") ); });
murmurHash({}).catch(function(err) { testerr(err, new TypeError("string or Buffer is required") ); });
murmurHash([]).catch(function(err) { testerr(err, new TypeError("string or Buffer is required") ); });
murmurHash(void(0)).catch(function(err) { testerr(err, new TypeError("string or Buffer is required") ); });
murmurHash(null).catch(function(err) { testerr(err, new TypeError("string or Buffer is required") ); });
murmurHash(true).catch(function(err) { testerr(err, new TypeError("string or Buffer is required") ); });
murmurHash(false).catch(function(err) { testerr(err, new TypeError("string or Buffer is required") ); });
murmurHash(0).catch(function(err) { testerr(err, new TypeError("string or Buffer is required") ); });
murmurHash(1).catch(function(err) { testerr(err, new TypeError("string or Buffer is required") ); });
murmurHash(-1).catch(function(err) { testerr(err, new TypeError("string or Buffer is required") ); });
murmurHash(new Date()).catch(function(err) { testerr(err, new TypeError("string or Buffer is required") ); });
murmurHash("", "abcdefghijklmno").catch(function(err) { testerr(err, new TypeError("\"encoding\" must be a valid string encoding") ); });
murmurHash("", "123456").catch(function(err) { testerr(err, new TypeError("\"encoding\" must be a valid string encoding") ); });
murmurHash("", "12345").catch(function(err) { testerr(err, new TypeError("\"encoding\" must be a valid string encoding") ); });
murmurHash("", "1234").catch(function(err) { testerr(err, new TypeError("\"encoding\" must be a valid string encoding") ); });
murmurHash("", "123").catch(function(err) { testerr(err, new TypeError("\"encoding\" must be a valid string encoding") ); });
murmurHash("", "").catch(function(err) { testerr(err, new TypeError("\"encoding\" must be a valid string encoding") ); });
murmurHash("", 0, "").catch(function(err) { testerr(err, new TypeError("Unknown output type: should be \"number\", \"buffer\", \"binary\", \"base64\" or \"hex\"") ); });
murmurHash("", 0, "mumber").catch(function(err) { testerr(err, new TypeError("Unknown output type: should be \"number\", \"buffer\", \"binary\", \"base64\" or \"hex\"") ); });
murmurHash("", 0, "xxxxxxx").catch(function(err) { testerr(err, new TypeError("Unknown output type: should be \"number\", \"buffer\", \"binary\", \"base64\" or \"hex\"") ); });
murmurHash("", 0, "utf-8").catch(function(err) { testerr(err, new TypeError("Unknown output type: should be \"number\", \"buffer\", \"binary\", \"base64\" or \"hex\"") ); });
});
t.test('should create number hash from empty data', function(t) {
t.plan(20);
murmurHash('').then(function(res) { t.strictEqual(res, seedZeroDefault); });
murmurHash('', 'number').then(function(res) { t.strictEqual(res, seedZeroDefault); });
murmurHash(Buffer.from('')).then(function(res) { t.strictEqual(res, seedZeroDefault); });
murmurHash(Buffer.from(''), 'number').then(function(res) { t.strictEqual(res, seedZeroDefault); });
murmurHash('', -1).then(function(res) { t.strictEqual(res, seedMinusOneDefault); });
murmurHash('', -1, 'number').then(function(res) { t.strictEqual(res, seedMinusOneDefault); });
murmurHash(Buffer.from(''), -1).then(function(res) { t.strictEqual(res, seedMinusOneDefault); });
murmurHash(Buffer.from(''), -1, 'number').then(function(res) { t.strictEqual(res, seedMinusOneDefault); });
murmurHash('', 4294967295).then(function(res) { t.strictEqual(res, seedMinusOneDefault); });
murmurHash('', 4294967295, 'number').then(function(res) { t.strictEqual(res, seedMinusOneDefault); });
murmurHash(Buffer.from(''), 4294967295).then(function(res) { t.strictEqual(res, seedMinusOneDefault); });
murmurHash(Buffer.from(''), 4294967295, 'number').then(function(res) { t.strictEqual(res, seedMinusOneDefault); });
murmurHash('', 4294967296).then(function(res) { t.strictEqual(res, seedZeroDefault); });
murmurHash('', 4294967296, 'number').then(function(res) { t.strictEqual(res, seedZeroDefault); });
murmurHash(Buffer.from(''), 4294967296).then(function(res) { t.strictEqual(res, seedZeroDefault); });
murmurHash(Buffer.from(''), 4294967296, 'number').then(function(res) { t.strictEqual(res, seedZeroDefault); });
murmurHash('', 1).then(function(res) { t.strictEqual(res, seedPlusOneDefault); });
murmurHash('', 1, 'number').then(function(res) { t.strictEqual(res, seedPlusOneDefault); });
murmurHash(Buffer.from(''), 1).then(function(res) { t.strictEqual(res, seedPlusOneDefault); });
murmurHash(Buffer.from(''), 1, 'number').then(function(res) { t.strictEqual(res, seedPlusOneDefault); });
});
t.test('should create buffer hash from empty data', function(t) {
t.plan(20);
murmurHash('', 0, 'buffer').then(function(res) { t.deepEqual(res, seedZeroBuffer); });
murmurHash('', 0, 'buffer').then(function(res) { t.strictEqual(res.toString('hex'), seedZeroHex); });
murmurHash(Buffer.from(''), 'buffer').then(function(res) { t.deepEqual(res, seedZeroBuffer); });
murmurHash(Buffer.from(''), 'buffer').then(function(res) { t.strictEqual(res.toString('hex'), seedZeroHex); });
murmurHash('', -1, 'buffer').then(function(res) { t.deepEqual(res, seedMinusOneBuffer); });
murmurHash('', -1, 'buffer').then(function(res) { t.strictEqual(res.toString('hex'), seedMinusOneHex); });
murmurHash(Buffer.from(''), -1, 'buffer').then(function(res) { t.deepEqual(res, seedMinusOneBuffer); });
murmurHash(Buffer.from(''), -1, 'buffer').then(function(res) { t.strictEqual(res.toString('hex'), seedMinusOneHex); });
murmurHash('', 4294967295, 'buffer').then(function(res) { t.deepEqual(res, seedMinusOneBuffer); });
murmurHash('', 4294967295, 'buffer').then(function(res) { t.strictEqual(res.toString('hex'), seedMinusOneHex); });
murmurHash(Buffer.from(''), 4294967295, 'buffer').then(function(res) { t.deepEqual(res, seedMinusOneBuffer); });
murmurHash(Buffer.from(''), 4294967295, 'buffer').then(function(res) { t.strictEqual(res.toString('hex'), seedMinusOneHex); });
murmurHash('', 4294967296, 'buffer').then(function(res) { t.deepEqual(res, seedZeroBuffer); });
murmurHash('', 4294967296, 'buffer').then(function(res) { t.strictEqual(res.toString('hex'), seedZeroHex); });
murmurHash(Buffer.from(''), 4294967296, 'buffer').then(function(res) { t.deepEqual(res, seedZeroBuffer); });
murmurHash(Buffer.from(''), 4294967296, 'buffer').then(function(res) { t.strictEqual(res.toString('hex'), seedZeroHex); });
murmurHash('', 1, 'buffer').then(function(res) { t.deepEqual(res, seedPlusOneBuffer); });
murmurHash('', 1, 'buffer').then(function(res) { t.strictEqual(res.toString('hex'), seedPlusOneHex); });
murmurHash(Buffer.from(''), 1, 'buffer').then(function(res) { t.deepEqual(res, seedPlusOneBuffer); });
murmurHash(Buffer.from(''), 1, 'buffer').then(function(res) { t.strictEqual(res.toString('hex'), seedPlusOneHex); });
});
t.end();
});
});

View File

@ -1,44 +0,0 @@
"use strict";
var test = require("tap").test;
var RandomChunkStream = require('./randomchunkstream');
var stream = require('stream');
test("RandomChunkStream", function(t) {
var s = new RandomChunkStream({size: 10000, maxchunksize:10});
t.equal(s.size, 10000);
t.equal(s.maxchunksize, 10);
t.type(s.buffer, 'Buffer');
t.equal(s.cursor, 0);
var p = new stream.PassThrough({encoding:'binary'});
var counts = 0, countp = 0, sizes = 0, sizep = 0;
var destbuf = Buffer.allocUnsafeSlow(10000);
var deststr = '';
p.on('data', function(data) {
t.type(data, 'string');
t.ok(data.length <= 10);
deststr += data;
sizep += data.length;
countp++;
});
s.pipe(p);
s.on('data', function(data) {
t.type(data, 'Buffer');
t.ok(data.length <= 10);
data.copy(destbuf, sizes);
sizes += data.length;
counts++;
});
s.on('end', function() {
t.equal(sizes, 10000);
t.ok(counts >= 10000/10 );
t.equal(s.cursor, 10000);
t.deepEqual(s.buffer, destbuf);
p.on('end', function() {
t.equal(sizep, 10000);
t.ok(countp >= 10000/10 );
t.strictEqual(s.buffer.toString('binary'), deststr);
t.end();
});
});
});

View File

@ -1,378 +0,0 @@
"use strict";
var test = require("tap").test
, stream = require('stream')
, strm = require('../stream')
, hash = require('..')
, RandomChunkStream = require('./randomchunkstream.js')
;
function testStream(hasher, data, encoding, cb) {
if ('function' === typeof encoding) {
cb = encoding; encoding = null;
}
hasher.end(data, encoding);
hasher.on('readable', function() {
var hash = hasher.read();
hash && cb(hash);
});
}
test("should have algorithms", function(t) {
t.type(strm.getHashes, 'function');
t.type(strm.getHashes(), Array);
t.ok(strm.getHashes().length > 0);
strm.getHashes().forEach(function(s) { t.type(s, 'string'); });
t.end();
});
test('should throw error for bad arguments', function(t) {
t.throws(function() { strm.createHash(); }, new TypeError("Must give algorithm string, a serialized state or a MurmurHash instance") );
t.throws(function() { strm.createHash("foo"); }, new Error("Algorithm not supported") );
t.end();
});
[
[4, 'murmurhash', hash.murmurHash,
'00000000', '81f16f39', '514e28b7'],
[4, 'murmurhash3a', hash.murmurHash,
'00000000', '81f16f39', '514e28b7'],
[4, 'murmurhash32', hash.murmurHash,
'00000000', '81f16f39', '514e28b7'],
[4, 'murmurhash32x86', hash.murmurHash,
'00000000', '81f16f39', '514e28b7'],
[16, 'murmurhash128x64', hash.murmurHash128x64,
'00000000000000000000000000000000', '6af1df4d9d3bc9ec857421121ee6446b',
'4610abe56eff5cb551622daa78f83583'],
[16, 'murmurhash128x86', hash.murmurHash128x86,
'00000000000000000000000000000000', '051e08a9989d49f7989d49f7989d49f7',
'88c4adec54d201b954d201b954d201b9']
].forEach(function(args) {
var size = args[ 0]
, algorithm = args[ 1]
, murmurHash = args[ 2]
, seedZeroHex = args[ 3]
, seedMinusOneHex = args[ 4]
, seedPlusOneHex = args[ 5]
, seedZeroBuffer = Buffer.from(seedZeroHex, 'hex')
, seedMinusOneBuffer = Buffer.from(seedMinusOneHex, 'hex')
, seedPlusOneBuffer = Buffer.from(seedPlusOneHex, 'hex')
, seedZeroBase64 = seedZeroBuffer.toString('base64')
, seedMinusOneBase64 = seedMinusOneBuffer.toString('base64')
, seedPlusOneBase64 = seedPlusOneBuffer.toString('base64')
, seedZeroBinary = seedZeroBuffer.toString('binary')
, seedMinusOneBinary = seedMinusOneBuffer.toString('binary')
, seedPlusOneBinary = seedPlusOneBuffer.toString('binary')
;
test(algorithm, function(t) {
t.test("should have algorithm", function(t) {
t.type(strm.createHash(algorithm), strm.MurmurHash);
t.type(strm.createHash(algorithm.toUpperCase()), strm.MurmurHash);
t.end();
});
t.test('should throw error for bad arguments', function(t) {
t.throws(function() {
strm.createHash(algorithm, {encoding:'xyz'}).write('');
}, new Error("Unknown encoding: xyz") );
t.end();
});
t.test('should create hex hash from empty data', function(t) {
t.plan(20);
function cbfactory(value) {
return function(result) {
t.strictEqual(result, value);
};
}
testStream(strm.createHash(algorithm, {encoding:'hex'}), '', cbfactory(seedZeroHex));
testStream(strm.createHash(algorithm, {encoding:'hex'}), '', 'binary', cbfactory(seedZeroHex));
testStream(strm.createHash(algorithm, {encoding:'hex'}), Buffer.from(''), cbfactory(seedZeroHex));
testStream(strm.createHash(algorithm, {encoding:'hex'}), Buffer.from(''), 'binary', cbfactory(seedZeroHex));
testStream(strm.createHash(algorithm, {seed:-1, encoding:'hex'}), '', cbfactory(seedMinusOneHex));
testStream(strm.createHash(algorithm, {seed:-1, encoding:'hex'}), '', 'binary', cbfactory(seedMinusOneHex));
testStream(strm.createHash(algorithm, {seed:-1, encoding:'hex'}), Buffer.from(''), cbfactory(seedMinusOneHex));
testStream(strm.createHash(algorithm, {seed:-1, encoding:'hex'}), Buffer.from(''), 'binary', cbfactory(seedMinusOneHex));
testStream(strm.createHash(algorithm, {seed:4294967295, encoding:'hex'}), '', cbfactory(seedMinusOneHex));
testStream(strm.createHash(algorithm, {seed:4294967295, encoding:'hex'}), '', 'binary', cbfactory(seedMinusOneHex));
testStream(strm.createHash(algorithm, {seed:4294967295, encoding:'hex'}), Buffer.from(''), cbfactory(seedMinusOneHex));
testStream(strm.createHash(algorithm, {seed:4294967295, encoding:'hex'}), Buffer.from(''), 'binary', cbfactory(seedMinusOneHex));
testStream(strm.createHash(algorithm, {seed:4294967296, encoding:'hex'}), '', cbfactory(seedZeroHex));
testStream(strm.createHash(algorithm, {seed:4294967296, encoding:'hex'}), '', 'binary', cbfactory(seedZeroHex));
testStream(strm.createHash(algorithm, {seed:4294967296, encoding:'hex'}), Buffer.from(''), cbfactory(seedZeroHex));
testStream(strm.createHash(algorithm, {seed:4294967296, encoding:'hex'}), Buffer.from(''), 'binary', cbfactory(seedZeroHex));
testStream(strm.createHash(algorithm, {seed:1, encoding:'hex'}), '', cbfactory(seedPlusOneHex));
testStream(strm.createHash(algorithm, {seed:1, encoding:'hex'}), '', 'binary', cbfactory(seedPlusOneHex));
testStream(strm.createHash(algorithm, {seed:1, encoding:'hex'}), Buffer.from(''), cbfactory(seedPlusOneHex));
testStream(strm.createHash(algorithm, {seed:1, encoding:'hex'}), Buffer.from(''), 'binary', cbfactory(seedPlusOneHex));
});
t.test('should create buffer hash from empty data', function(t) {
t.plan(20);
function cbfactory(value) {
return function(result) {
t.deepEqual(result, value);
};
}
testStream(strm.createHash(algorithm), '', cbfactory(seedZeroBuffer));
testStream(strm.createHash(algorithm), '', 'binary', cbfactory(seedZeroBuffer));
testStream(strm.createHash(algorithm), Buffer.from(''), cbfactory(seedZeroBuffer));
testStream(strm.createHash(algorithm), Buffer.from(''), 'binary', cbfactory(seedZeroBuffer));
testStream(strm.createHash(algorithm, {seed:-1}), '', cbfactory(seedMinusOneBuffer));
testStream(strm.createHash(algorithm, {seed:-1}), '', 'binary', cbfactory(seedMinusOneBuffer));
testStream(strm.createHash(algorithm, {seed:-1}), Buffer.from(''), cbfactory(seedMinusOneBuffer));
testStream(strm.createHash(algorithm, {seed:-1}), Buffer.from(''), 'binary', cbfactory(seedMinusOneBuffer));
testStream(strm.createHash(algorithm, {seed:4294967295}), '', cbfactory(seedMinusOneBuffer));
testStream(strm.createHash(algorithm, {seed:4294967295}), '', 'binary', cbfactory(seedMinusOneBuffer));
testStream(strm.createHash(algorithm, {seed:4294967295}), Buffer.from(''), cbfactory(seedMinusOneBuffer));
testStream(strm.createHash(algorithm, {seed:4294967295}), Buffer.from(''), 'binary', cbfactory(seedMinusOneBuffer));
testStream(strm.createHash(algorithm, {seed:4294967296}), '', cbfactory(seedZeroBuffer));
testStream(strm.createHash(algorithm, {seed:4294967296}), '', 'binary', cbfactory(seedZeroBuffer));
testStream(strm.createHash(algorithm, {seed:4294967296}), Buffer.from(''), cbfactory(seedZeroBuffer));
testStream(strm.createHash(algorithm, {seed:4294967296}), Buffer.from(''), 'binary', cbfactory(seedZeroBuffer));
testStream(strm.createHash(algorithm, {seed:1}), '', cbfactory(seedPlusOneBuffer));
testStream(strm.createHash(algorithm, {seed:1}), '', 'binary', cbfactory(seedPlusOneBuffer));
testStream(strm.createHash(algorithm, {seed:1}), Buffer.from(''), cbfactory(seedPlusOneBuffer));
testStream(strm.createHash(algorithm, {seed:1}), Buffer.from(''), 'binary', cbfactory(seedPlusOneBuffer));
});
t.test('should create string encoded hash from empty data', function(t) {
t.plan(20*2);
function cbfactory(value) {
return function(result) {
t.strictEqual(result, value);
};
}
testStream(strm.createHash(algorithm, {encoding:'base64'}), '', cbfactory(seedZeroBase64));
testStream(strm.createHash(algorithm, {encoding:'binary'}), '', cbfactory(seedZeroBinary));
testStream(strm.createHash(algorithm, {encoding:'base64'}), '', 'binary', cbfactory(seedZeroBase64));
testStream(strm.createHash(algorithm, {encoding:'binary'}), '', 'binary', cbfactory(seedZeroBinary));
testStream(strm.createHash(algorithm, {encoding:'base64'}), Buffer.from(''), cbfactory(seedZeroBase64));
testStream(strm.createHash(algorithm, {encoding:'binary'}), Buffer.from(''), cbfactory(seedZeroBinary));
testStream(strm.createHash(algorithm, {encoding:'base64'}), Buffer.from(''), 'binary', cbfactory(seedZeroBase64));
testStream(strm.createHash(algorithm, {encoding:'binary'}), Buffer.from(''), 'binary', cbfactory(seedZeroBinary));
testStream(strm.createHash(algorithm, {seed:-1, encoding:'base64'}), '', cbfactory(seedMinusOneBase64));
testStream(strm.createHash(algorithm, {seed:-1, encoding:'binary'}), '', cbfactory(seedMinusOneBinary));
testStream(strm.createHash(algorithm, {seed:-1, encoding:'base64'}), '', 'binary', cbfactory(seedMinusOneBase64));
testStream(strm.createHash(algorithm, {seed:-1, encoding:'binary'}), '', 'binary', cbfactory(seedMinusOneBinary));
testStream(strm.createHash(algorithm, {seed:-1, encoding:'base64'}), Buffer.from(''), cbfactory(seedMinusOneBase64));
testStream(strm.createHash(algorithm, {seed:-1, encoding:'binary'}), Buffer.from(''), cbfactory(seedMinusOneBinary));
testStream(strm.createHash(algorithm, {seed:-1, encoding:'base64'}), Buffer.from(''), 'binary', cbfactory(seedMinusOneBase64));
testStream(strm.createHash(algorithm, {seed:-1, encoding:'binary'}), Buffer.from(''), 'binary', cbfactory(seedMinusOneBinary));
testStream(strm.createHash(algorithm, {seed:4294967295, encoding:'base64'}), '', cbfactory(seedMinusOneBase64));
testStream(strm.createHash(algorithm, {seed:4294967295, encoding:'binary'}), '', cbfactory(seedMinusOneBinary));
testStream(strm.createHash(algorithm, {seed:4294967295, encoding:'base64'}), '', 'binary', cbfactory(seedMinusOneBase64));
testStream(strm.createHash(algorithm, {seed:4294967295, encoding:'binary'}), '', 'binary', cbfactory(seedMinusOneBinary));
testStream(strm.createHash(algorithm, {seed:4294967295, encoding:'base64'}), Buffer.from(''), cbfactory(seedMinusOneBase64));
testStream(strm.createHash(algorithm, {seed:4294967295, encoding:'binary'}), Buffer.from(''), cbfactory(seedMinusOneBinary));
testStream(strm.createHash(algorithm, {seed:4294967295, encoding:'base64'}), Buffer.from(''), 'binary', cbfactory(seedMinusOneBase64));
testStream(strm.createHash(algorithm, {seed:4294967295, encoding:'binary'}), Buffer.from(''), 'binary', cbfactory(seedMinusOneBinary));
testStream(strm.createHash(algorithm, {seed:4294967296, encoding:'base64'}), '', cbfactory(seedZeroBase64));
testStream(strm.createHash(algorithm, {seed:4294967296, encoding:'binary'}), '', cbfactory(seedZeroBinary));
testStream(strm.createHash(algorithm, {seed:4294967296, encoding:'base64'}), '', 'binary', cbfactory(seedZeroBase64));
testStream(strm.createHash(algorithm, {seed:4294967296, encoding:'binary'}), '', 'binary', cbfactory(seedZeroBinary));
testStream(strm.createHash(algorithm, {seed:4294967296, encoding:'base64'}), Buffer.from(''), cbfactory(seedZeroBase64));
testStream(strm.createHash(algorithm, {seed:4294967296, encoding:'binary'}), Buffer.from(''), cbfactory(seedZeroBinary));
testStream(strm.createHash(algorithm, {seed:4294967296, encoding:'base64'}), Buffer.from(''), 'binary', cbfactory(seedZeroBase64));
testStream(strm.createHash(algorithm, {seed:4294967296, encoding:'binary'}), Buffer.from(''), 'binary', cbfactory(seedZeroBinary));
testStream(strm.createHash(algorithm, {seed:1, encoding:'base64'}), '', cbfactory(seedPlusOneBase64));
testStream(strm.createHash(algorithm, {seed:1, encoding:'binary'}), '', cbfactory(seedPlusOneBinary));
testStream(strm.createHash(algorithm, {seed:1, encoding:'base64'}), '', 'binary', cbfactory(seedPlusOneBase64));
testStream(strm.createHash(algorithm, {seed:1, encoding:'binary'}), '', 'binary', cbfactory(seedPlusOneBinary));
testStream(strm.createHash(algorithm, {seed:1, encoding:'base64'}), Buffer.from(''), cbfactory(seedPlusOneBase64));
testStream(strm.createHash(algorithm, {seed:1, encoding:'binary'}), Buffer.from(''), cbfactory(seedPlusOneBinary));
testStream(strm.createHash(algorithm, {seed:1, encoding:'base64'}), Buffer.from(''), 'binary', cbfactory(seedPlusOneBase64));
testStream(strm.createHash(algorithm, {seed:1, encoding:'binary'}), Buffer.from(''), 'binary', cbfactory(seedPlusOneBinary));
});
t.test('should utilize different string input encodings', function(t) {
t.plan(17);
function cbfactory(arg) {
return function(result) {
var result2 = murmurHash(arg, 'buffer');
t.deepEqual(result, result2);
};
}
function cbfactory2(value) {
return function(result) {
t.deepEqual(result, value);
};
}
var string = "\u1220łóżko"
, base64 = 'IELzfGtv'
, hex = '2042f37c6b6f'
, hash = murmurHash(string,'buffer');
testStream(strm.createHash(algorithm), Buffer.from(string, 'binary'), cbfactory2(hash));
testStream(strm.createHash(algorithm), string, cbfactory(Buffer.from(string, 'binary')));
testStream(strm.createHash(algorithm), string, cbfactory(Buffer.from(string, 'binary')));
testStream(strm.createHash(algorithm), string, 'ascii', cbfactory(Buffer.from(string, 'ascii')));
testStream(strm.createHash(algorithm), string, 'ascii', cbfactory2(hash));
testStream(strm.createHash(algorithm), string, 'binary', cbfactory(Buffer.from(string, 'binary')));
testStream(strm.createHash(algorithm), string, 'binary', cbfactory2(hash));
testStream(strm.createHash(algorithm), string, 'utf8', cbfactory(Buffer.from(string, 'utf8')));
testStream(strm.createHash(algorithm), string, 'utf-8', cbfactory(Buffer.from(string, 'utf-8')));
testStream(strm.createHash(algorithm), string, 'ucs2', cbfactory(Buffer.from(string, 'ucs2')));
testStream(strm.createHash(algorithm), string, 'ucs-2', cbfactory(Buffer.from(string, 'ucs-2')));
testStream(strm.createHash(algorithm), string, 'utf16le', cbfactory(Buffer.from(string, 'utf16le')));
testStream(strm.createHash(algorithm), string, 'utf-16le', cbfactory(Buffer.from(string, 'utf-16le')));
testStream(strm.createHash(algorithm), base64, 'base64', cbfactory2(hash));
testStream(strm.createHash(algorithm), base64, 'base64', cbfactory(Buffer.from(base64, 'base64')));
testStream(strm.createHash(algorithm), hex, 'hex', cbfactory2(hash));
testStream(strm.createHash(algorithm), hex, 'hex', cbfactory(Buffer.from(hex, 'hex')));
});
t.test('should create hash from some random data', function(t) {
t.plan(2+5+3);
function cbfactoryLen() {
return function(result) {
t.equal(result.length, size);
};
}
function cbfactory(arg, seed) {
return function(result) {
var result2 = (seed === undefined)
? murmurHash(arg, 0, 'hex')
: murmurHash(arg, seed, 'hex');
t.strictEqual(result, result2);
};
}
function cbfactory2(assertion, arg, seed, output) {
return function(result) {
var result2 = murmurHash(arg, seed, output);
t[assertion](result, result2);
};
}
var data = '';
for (var i = 0; i < 1000; ++i) data += String.fromCharCode((Math.random()*32768)|0);
var buffer = Buffer.from(data, 'binary');
testStream(strm.createHash(algorithm, {seed:0}), data, cbfactoryLen());
testStream(strm.createHash(algorithm), buffer, cbfactoryLen());
testStream(strm.createHash(algorithm, {encoding: 'hex'}), data, 'utf8', cbfactory(Buffer.from(data, 'utf8')));
testStream(strm.createHash(algorithm, {encoding: 'hex'}), data, cbfactory(buffer));
testStream(strm.createHash(algorithm, {seed: -1, encoding: 'hex'}), data, cbfactory(buffer, -1));
testStream(strm.createHash(algorithm, {seed: -1, encoding: 'hex'}), data, cbfactory(buffer, 4294967295));
testStream(strm.createHash(algorithm, {seed: 4294967295, encoding: 'hex'}), data, cbfactory(buffer, -1));
var seed = (Math.random()*4294967296)|0;
testStream(strm.createHash(algorithm, {seed: seed}), data, cbfactory2('notStrictEqual', buffer, seed, 'buffer'));
testStream(strm.createHash(algorithm, {seed: seed}), data, cbfactory2('deepEqual', buffer, seed, 'buffer'));
testStream(strm.createHash(algorithm, {seed: seed, encoding: 'hex'}), data, cbfactory2('strictEqual', buffer, seed, 'hex'));
});
[101, 10009, 32768].forEach(function(maxchunksize) {
t.test('should create hash from some random data incrementally', function(t) {
t.plan(4+11+7+17);
var src = new RandomChunkStream({size: maxchunksize*23-1, maxchunksize:maxchunksize});
var strsrc = new stream.PassThrough({encoding: 'binary'});
var seed = (Math.random()*4294967296)|0;
var hasher0 = strm.createHash(algorithm, {seed: 0, encoding: 'hex'});
var hasher1 = strm.createHash(algorithm, {seed: 1, encoding: 'hex'});
var hasherS = strm.createHash(algorithm, {seed: seed, encoding: 'hex'});
var hasher0str = strm.createHash(algorithm, {seed: 0, encoding: 'hex'});
var hasher1str = strm.createHash(algorithm, {seed: 1, encoding: 'hex'});
var hasherSstr = strm.createHash(algorithm, {seed: seed, encoding: 'hex'});
var bufchunks = 0, bufsize = 0;
var strchunks = 0, strsize = 0;
src.pipe(strsrc);
src.pipe(hasher0);
src.pipe(hasher1);
src.pipe(hasherS);
src.once('data', function(data) {
t.type(data, 'Buffer');
t.ok(data.length <= src.maxchunksize);
}).on('data', function(data) {
bufchunks++;
bufsize += data.length;
});
strsrc.pipe(hasher0str);
strsrc.pipe(hasher1str);
strsrc.pipe(hasherSstr);
strsrc.once('data', function(data) {
t.type(data, 'string');
t.ok(data.length <= src.maxchunksize);
}).on('data', function(data) {
strchunks++;
strsize += data.length;
});
var countdown = 0;
hasher0str.once('readable', done); ++countdown;
hasher1str.once('readable', done); ++countdown;
hasherSstr.once('readable', done); ++countdown;
hasher0.once('readable', done); ++countdown;
hasher1.once('readable', done); ++countdown;
hasherS.once('readable', done); ++countdown;
function done() {
if (--countdown) return;
var buffer = src.buffer;
t.equal(bufsize, buffer.length);
t.equal(strsize, buffer.length);
t.equal(bufchunks, strchunks);
t.ok(bufchunks >= src.size / src.maxchunksize);
t.ok(strchunks >= src.size / src.maxchunksize);
t.equal(hasher0str._handle.total, buffer.length);
t.equal(hasher1str._handle.total, buffer.length);
t.equal(hasherSstr._handle.total, buffer.length);
t.equal(hasher0._handle.total, buffer.length);
t.equal(hasher1._handle.total, buffer.length);
t.equal(hasherS._handle.total, buffer.length);
var data = buffer.toString('binary');
t.equal(strm.createHash(algorithm).update(data, 'binary').digest().length, size);
t.equal(strm.createHash(algorithm).update(data, 'binary').total, buffer.length);
t.equal(strm.createHash(algorithm).update(data, 'binary').digest('buffer').length, size);
t.equal(strm.createHash(algorithm).update(buffer).digest().length, size);
t.equal(strm.createHash(algorithm).update(buffer).digest('buffer').length, size);
t.equal(strm.createHash(algorithm).update(buffer).total, buffer.length);
t.strictEqual(strm.createHash(algorithm).update(data, 'binary').digest('number'),
strm.createHash(algorithm).update(buffer).digest('number'));
var d0 = hasher0.read();
var d1 = hasher1.read();
var dS = hasherS.read();
var d0str = hasher0str.read();
var d1str = hasher1str.read();
var dSstr = hasherSstr.read();
t.notStrictEqual(d0, d1);
t.notStrictEqual(d0, dS);
t.strictEqual(d0, d0str);
t.strictEqual(d1, d1str);
t.strictEqual(dS, dSstr);
t.strictEqual(d0, strm.createHash(algorithm).update(buffer).digest('hex'));
t.strictEqual(d0, strm.createHash(algorithm).update(data, 'binary').digest('hex'));
t.strictEqual(d0, murmurHash(buffer, 0, 'hex'));
t.strictEqual(d0, murmurHash(data, 0, 'hex'));
t.strictEqual(d1, strm.createHash(algorithm, {seed: 1}).update(buffer).digest('hex'));
t.strictEqual(d1, strm.createHash(algorithm, {seed: 1}).update(data, 'binary').digest('hex'));
t.strictEqual(d1, murmurHash(buffer, 1, 'hex'));
t.strictEqual(d1, murmurHash(data, 1, 'hex'));
t.strictEqual(dS, strm.createHash(algorithm, {seed: seed}).update(buffer).digest('hex'));
t.strictEqual(dS, strm.createHash(algorithm, {seed: seed}).update(data, 'binary').digest('hex'));
t.strictEqual(dS, murmurHash(buffer, seed, 'hex'));
t.strictEqual(dS, murmurHash(data, seed, 'hex'));
}
});
});
t.test('should JSON serialize and deserialize', function(t) {
t.plan(3);
var seed = (Math.random() * 0xFFFFFFFF >>>0) + 1;
var hasher0 = strm.createHash(algorithm, {seed: seed, encoding: 'hex'});
hasher0.write('foo');
var json = JSON.stringify(hasher0);
t.type(json, 'string');
var hasher1 = strm.createHash(JSON.parse(json), {encoding: 'hex'});
hasher0.once('readable', function() {
var hash0 = hasher0.read();
hasher1.once('readable', function() {
var hash1 = hasher1.read();
t.strictEqual(hash1, hash0);
t.strictEqual(hash1, murmurHash('foo', seed, 'hex'));
});
});
hasher0.end();
hasher1.end();
});
t.end();
});
});

View File

@ -1 +0,0 @@
*.js

View File

@ -1,19 +0,0 @@
/* a small restrictive subset declaration for tap */
declare module 'tap' {
export interface Test {
end(): void;
error(error: Error, message?: string, extra?: any): void;
ok(obj: any, message?: string, extra?: any): void;
plan(count: number): void;
strictEqual<T>(found: T, wanted: T, message?: string, extra?: any): void;
deepEqual<T>(found: T, wanted: T, message?: string, extra?: any): void;
test(label: string, f: (t: Test) => void): void;
test(label: string, opt: Object, f: (t: Test) => PromiseLike<any>): void;
test(label: string, opt: Object, f: (t: Test) => void): void;
threw(error: any): any;
type(object: any, type: string|Function, message?: string, extra?: any): void;
}
export function test(label: string, f: (t: Test) => void): void;
export function test(label: string, opt: Object, f: (t: Test) => void): void;
export function test(label: string, opt: Object, f: (t: Test) => PromiseLike<any>): void;
}

View File

@ -1,188 +0,0 @@
import * as os from "os";
import { IMurHasherConstructor,
MurmurHash,
MurmurHash128, MurmurHash128x64, MurmurHash128x86
} from "../../incremental";
import { Test, test } from "tap";
interface Expected {
hashSize: number,
zero: number|string,
zeroHex: string,
resultBE: number|string,
resultLE: number|string,
resultSeed: number|string,
resultHexBE: string,
resultHexLE: string,
resultHexBuf1: string,
resultHexBuf2: string,
resultHexBuf3: string,
encInputResult: number|string,
encInputResultHexBE: string,
encInputResultHexLE: string
}
const expected32: Expected = {
hashSize: 4,
zero: 0,
zeroHex: '00000000',
resultBE: 1954665850,
resultLE: 1954665850,
resultSeed: 1336289403,
resultHexBE: '7481d57a',
resultHexLE: '7ad58174',
resultHexBuf1: '7481d57a00000000',
resultHexBuf2: '747481d57a000000',
resultHexBuf3: '747481d57a748100',
encInputResult: 864439591,
encInputResultHexBE: '33864d27',
encInputResultHexLE: '274d8633'
}
const expected128x64: Expected = {
hashSize: 16,
zero: '00000000000000000000000000000000',
zeroHex: '00000000000000000000000000000000',
resultBE: '446359de1c29805fa508517dd4794ae5',
resultLE: '5f80291cde596344e54a79d47d5108a5',
resultSeed: 'b4e3da8506ea610f59dd2bbc5fc0f630',
resultHexBE: '446359de1c29805fa508517dd4794ae5',
resultHexLE: '5f80291cde596344e54a79d47d5108a5',
resultHexBuf1: '446359de1c29805fa508517dd4794ae500000000000000000000000000000000',
resultHexBuf2: '44446359de1c29805fa508517dd4794ae5000000000000000000000000000000',
resultHexBuf3: '44446359de4463805fa508517dd4794ae5000000000000000000000000000000',
encInputResult: 'd9fec742697ae491b938477241504bf9',
encInputResultHexBE: '91e47a6942c7fed9f94b5041724738b9',
encInputResultHexLE: 'd9fec742697ae491b938477241504bf9'
}
const expected128x86: Expected = {
hashSize: 16,
zero: '00000000000000000000000000000000',
zeroHex: '00000000000000000000000000000000',
resultBE: '017b2e639c477ec35f45a23854bdd956',
resultLE: '632e7b01c37e479c38a2455f56d9bd54',
resultSeed: '1c1d24b3766cbee3fd13dd8bb03afe8e',
resultHexBE: '017b2e639c477ec35f45a23854bdd956',
resultHexLE: '632e7b01c37e479c38a2455f56d9bd54',
resultHexBuf1: '017b2e639c477ec35f45a23854bdd95600000000000000000000000000000000',
resultHexBuf2: '01017b2e639c477ec35f45a23854bdd956000000000000000000000000000000',
resultHexBuf3: '01017b2e63017b7ec35f45a23854bdd956000000000000000000000000000000',
encInputResult: 'e5cc4e78aca42dc9362fb0e1362fb0e1',
encInputResultHexBE: '784ecce5c92da4ace1b02f36e1b02f36',
encInputResultHexLE: 'e5cc4e78aca42dc9362fb0e1362fb0e1'
}
const expected128: Expected = os.arch() === 'x64' ? expected128x64 : expected128x86;
test("MurmurHash should have arguments", (t) => testIMurHashArgs(MurmurHash, expected32, t));
test("MurmurHash should have arguments for callback update", (t) => testIMurUpdateCallback(MurmurHash, expected32, t));
test("MurmurHash128 should have arguments", (t) => testIMurHashArgs(MurmurHash128, expected128, t));
test("MurmurHash128 should have arguments for callback update", (t) => testIMurUpdateCallback(MurmurHash128, expected128, t));
test("MurmurHash128x86 should have arguments", (t) => testIMurHashArgs(MurmurHash128x86, expected128x86, t));
test("MurmurHash128x86 should have arguments for callback update", (t) => testIMurUpdateCallback(MurmurHash128x86, expected128x86, t));
test("MurmurHash128x64 should have arguments", (t) => testIMurHashArgs(MurmurHash128x64, expected128x64, t));
test("MurmurHash128x64 should have arguments for callback update", (t) => testIMurUpdateCallback(MurmurHash128x64, expected128x64, t));
function testIMurHashArgs(murmurhasher: IMurHasherConstructor, expected: Expected, t: Test) {
// constructor();
let mmh = new murmurhasher();
t.strictEqual(mmh.SERIAL_BYTE_LENGTH, murmurhasher.SERIAL_BYTE_LENGTH);
t.strictEqual(mmh.total, 0);
t.strictEqual(mmh.isBusy, false);
t.strictEqual(mmh.endianness, "BE");
let serial0 = Buffer.alloc(mmh.SERIAL_BYTE_LENGTH);
t.strictEqual(mmh.serialize(serial0), serial0);
t.type(mmh.digest(), Buffer);
t.strictEqual((mmh.digest() as Buffer).toString('hex'), expected.zeroHex);
t.strictEqual((mmh.digest("buffer") as Buffer).toString('hex'), expected.zeroHex);
t.strictEqual(mmh.digest("number"), expected.zero);
t.strictEqual(mmh.update('dead'), mmh);
t.strictEqual(mmh.total, 4);
t.strictEqual(mmh.update(Buffer.from('baca')), mmh);
t.strictEqual(mmh.total, 8);
t.strictEqual(mmh.update('ca'), mmh);
t.strictEqual(mmh.total, 10);
t.strictEqual((mmh.digest() as Buffer).toString('hex'), expected.resultHexBE);
t.strictEqual((mmh.digest("buffer") as Buffer).toString('hex'), expected.resultHexBE);
t.strictEqual(mmh.digest("hex"), expected.resultHexBE);
t.strictEqual(mmh.digest("number"), expected.resultBE);
let digest = Buffer.alloc(expected.hashSize*2);
t.strictEqual(mmh.digest(digest), digest);
t.strictEqual(digest.toString('hex'), expected.resultHexBuf1);
t.strictEqual(mmh.digest(digest, 1), digest);
t.strictEqual(digest.toString('hex'), expected.resultHexBuf2);
t.strictEqual(mmh.digest(digest, 5, 2), digest);
t.strictEqual(digest.toString('hex'), expected.resultHexBuf3);
mmh.endianness = "LE";
t.strictEqual(mmh.endianness, "LE");
t.strictEqual(mmh.digest("number"), expected.resultLE);
t.strictEqual(mmh.digest("hex"), expected.resultHexLE);
t.strictEqual((mmh.digest("buffer") as Buffer).toString('hex'), expected.resultHexLE);
let serial = mmh.serialize();
t.strictEqual(serial, mmh.toJSON());
// constructor(serial: string|Buffer, endianness?: Endianness);
let mmhclone = new murmurhasher(serial, "LE");
t.strictEqual(mmhclone.total, 10);
t.strictEqual(mmhclone.endianness, "LE");
mmhclone = new murmurhasher(serial);
t.strictEqual(mmhclone.total, 10);
t.strictEqual(mmhclone.endianness, "BE");
t.strictEqual(mmhclone.digest("number"), expected.resultBE);
let mmh2 = new murmurhasher();
t.strictEqual(mmh2.digest("number"), expected.zero);
t.strictEqual(mmh.copy(mmh2), mmh2);
t.strictEqual(mmh2.digest("number"), expected.resultBE);
// constructor(hash: IMurHasher, endianness?: Endianness);
let mmh3 = new murmurhasher(mmh2, "LE");
t.strictEqual(mmh3.endianness, "LE");
mmh3 = new murmurhasher(mmh2);
t.strictEqual(mmh3.endianness, "BE");
t.strictEqual(mmh3.digest("number"), expected.resultBE);
// constructor(seed: number, endianness?: Endianness);
let mmhseed = new murmurhasher(123456, "platform");
t.strictEqual(mmhseed.endianness, os.endianness());
mmhseed = new murmurhasher(123456);
t.strictEqual(mmhseed.endianness, "BE");
t.strictEqual(mmhseed.update('deadba'), mmhseed);
t.strictEqual(mmhseed.update('caca'), mmhseed);
t.strictEqual(mmhseed.digest("number"), expected.resultSeed);
// constructor(serial: string|Buffer, endianness?: Endianness);
let mmhser = new murmurhasher(serial0, "LE");
t.strictEqual(mmhser.endianness, "LE");
mmhser = new murmurhasher(serial0);
t.strictEqual(mmhser.endianness, "BE");
t.strictEqual(new murmurhasher(serial0).copy(mmh), mmh);
t.strictEqual(mmh.total, 0);
t.strictEqual(mmh.endianness, "LE");
t.strictEqual(mmh.update('deadba', 'hex'), mmh);
t.strictEqual(mmh.total, 3);
t.strictEqual(mmh.update('caca', 'hex'), mmh);
t.strictEqual(mmh.total, 5);
t.strictEqual(mmh.digest("number"), expected.encInputResult);
t.strictEqual(mmh.digest("hex"), expected.encInputResultHexLE);
mmh.endianness = "BE";
t.strictEqual(mmh.endianness, "BE");
t.strictEqual(mmh.digest("hex"), expected.encInputResultHexBE);
t.end();
}
function testIMurUpdateCallback(murmurhasher: IMurHasherConstructor, expected: Expected, t: Test) {
t.plan(11);
let mmh = new murmurhasher()
t.strictEqual(mmh.total, 0);
t.strictEqual(mmh.isBusy, false);
t.strictEqual(mmh.update(Buffer.from("dead"), (err: Error) => {
t.error(err);
t.strictEqual(mmh.isBusy, false);
t.strictEqual(mmh.update("bacaca", "ascii", (err: Error) => {
t.error(err);
t.strictEqual(mmh.isBusy, false);
t.strictEqual(mmh.digest("hex"), expected.resultHexBE);
}), undefined);
t.strictEqual(mmh.isBusy, true);
}), undefined);
t.strictEqual(mmh.isBusy, true);
}

View File

@ -1,963 +0,0 @@
import * as os from "os";
import * as Bluebird from "bluebird";
import { MurmurHashFnI, murmurHash, murmurHash32,
BE, LE, platform } from "../..";
import * as promisify from "../../promisify";
import { Test, test } from "tap";
const asyncMMH = promisify(Bluebird);
interface Expected {
readResult: string,
result: number,
resultSeed: number,
resultHex: string,
resultSeedHex: string,
resultBase64: string,
resultSeedBase64: string,
encInputResult: number,
encInputResultSeed: number,
encInputResultHex: string,
encInputResultSeedHex: string,
encInputResultBase64: string,
encInputResultSeedBase64: string,
result24: number,
result24Seed: number,
encInputResult24: number,
encInputResult24Seed: number
}
const findExpected32 = (): Expected => {
switch(os.endianness()) {
case 'BE': return expected32BE;
case 'LE': return expected32LE;
default:
throw new Error("unsupported endianness");
}
}
const expected32BE: Expected = {
readResult: 'readUInt32BE',
result: 1954665850,
resultSeed: 1336289403,
resultHex: '7481d57a',
resultSeedHex: '4fa6287b',
resultBase64: 'dIHVeg==',
resultSeedBase64: 'T6Yoew==',
encInputResult: 864439591,
encInputResultSeed: 764471894,
encInputResultHex: '33864d27',
encInputResultSeedHex: '2d90ea56',
encInputResultBase64: 'M4ZNJw==',
encInputResultSeedBase64: 'LZDqVg==',
result24: 7635413,
result24Seed: 5219880,
encInputResult24: 3376717,
encInputResult24Seed: 2986218
}
const expected32LE: Expected = {
readResult: 'readUInt32LE',
result: 1954665850,
resultSeed: 1336289403,
resultHex: '7ad58174',
resultSeedHex: '7b28a64f',
resultBase64: 'etWBdA==',
resultSeedBase64: 'eyimTw==',
encInputResult: 864439591,
encInputResultSeed: 764471894,
encInputResultHex: '274d8633',
encInputResultSeedHex: '56ea902d',
encInputResultBase64: 'J02GMw==',
encInputResultSeedBase64: 'VuqQLQ==',
result24: 8050049,
result24Seed: 8071334,
encInputResult24: 2575750,
encInputResult24Seed: 5696144
}
test("check arguments of murmurHash", (t) => testMurmurHash32(murmurHash, expected32BE, t));
test("check arguments of BE.murmurHash", (t) => testMurmurHash32(BE.murmurHash, expected32BE, t));
test("check arguments of LE.murmurHash", (t) => testMurmurHash32(LE.murmurHash, expected32LE, t));
test("check arguments of platform.murmurHash", (t) => testMurmurHash32(platform.murmurHash, findExpected32(), t));
test("check arguments of murmurHash32", (t) => testMurmurHash32(murmurHash32, expected32BE, t));
test("check arguments of BE.murmurHash32", (t) => testMurmurHash32(BE.murmurHash32, expected32BE, t));
test("check arguments of LE.murmurHash32", (t) => testMurmurHash32(LE.murmurHash32, expected32LE, t));
test("check arguments of platform.murmurHash32", (t) => testMurmurHash32(platform.murmurHash32, findExpected32(), t));
test("check arguments of murmurHash w/ callback", (t) => testMurmurHash32Callback(murmurHash, expected32BE, t));
test("check arguments of BE.murmurHash w/ callback", (t) => testMurmurHash32Callback(BE.murmurHash, expected32BE, t));
test("check arguments of LE.murmurHash w/ callback", (t) => testMurmurHash32Callback(LE.murmurHash, expected32LE, t));
test("check arguments of platform.murmurHash w/ callback", (t) => testMurmurHash32Callback(platform.murmurHash, findExpected32(), t));
test("check arguments of murmurHash32 w/ callback", (t) => testMurmurHash32Callback(murmurHash32, expected32BE, t));
test("check arguments of BE.murmurHash32 w/ callback", (t) => testMurmurHash32Callback(BE.murmurHash32, expected32BE, t));
test("check arguments of LE.murmurHash32 w/ callback", (t) => testMurmurHash32Callback(LE.murmurHash32, expected32LE, t));
test("check arguments of platform.murmurHash32 w/ callback", (t) => testMurmurHash32Callback(platform.murmurHash32, findExpected32(), t));
test("check arguments of async murmurHash", (t) => testMurmurHash32Async(asyncMMH.murmurHashAsync, expected32BE, t));
test("check arguments of async BE.murmurHash", (t) => testMurmurHash32Async(asyncMMH.BE.murmurHashAsync, expected32BE, t));
test("check arguments of async LE.murmurHash", (t) => testMurmurHash32Async(asyncMMH.LE.murmurHashAsync, expected32LE, t));
test("check arguments of async platform.murmurHash", (t) => testMurmurHash32Async(asyncMMH.platform.murmurHashAsync, findExpected32(), t));
test("check arguments of async murmurHash32", (t) => testMurmurHash32Async(asyncMMH.murmurHash32Async, expected32BE, t));
test("check arguments of async BE.murmurHash32", (t) => testMurmurHash32Async(asyncMMH.BE.murmurHash32Async, expected32BE, t));
test("check arguments of async LE.murmurHash32", (t) => testMurmurHash32Async(asyncMMH.LE.murmurHash32Async, expected32LE, t));
test("check arguments of async platform.murmurHash32", (t) => testMurmurHash32Async(asyncMMH.platform.murmurHash32Async, findExpected32(), t));
function testMurmurHash32(murmurHash: MurmurHashFnI, expected: Expected, t: Test): void {
// murmurHash(data)
t.strictEqual(murmurHash("deadbacaca"), expected.result);
t.strictEqual(murmurHash(Buffer.from("deadbacaca")), expected.result);
// murmurHash(data, output[, offset[, length]])
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash("deadbacaca", buf), buf);
t.strictEqual(buf[expected.readResult](0), expected.result);
t.strictEqual(buf.readUInt32BE(4), 0);
buf.fill(0);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), buf), buf);
t.strictEqual(buf[expected.readResult](0), expected.result);
t.strictEqual(buf.readUInt32BE(4), 0);
buf.fill(0);
t.strictEqual(murmurHash("deadbacaca", buf, 2), buf);
t.strictEqual(buf.readUInt16BE(0), 0);
t.strictEqual(buf[expected.readResult](2), expected.result);
t.strictEqual(buf.readUInt16BE(6), 0);
buf.fill(0);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), buf, 2), buf);
t.strictEqual(buf.readUInt16BE(0), 0);
t.strictEqual(buf[expected.readResult](2), expected.result);
t.strictEqual(buf.readUInt16BE(6), 0);
buf.fill(0);
t.strictEqual(murmurHash("deadbacaca", buf, 2, 3), buf);
t.strictEqual(buf.readUInt16BE(0), 0);
t.strictEqual(buf.readUIntBE(2, 3), expected.result24);
t.strictEqual(buf.readUIntBE(5, 3), 0);
buf.fill(0);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), buf, 2, 3), buf);
t.strictEqual(buf.readUInt16BE(0), 0);
t.strictEqual(buf.readUIntBE(2, 3), expected.result24);
t.strictEqual(buf.readUIntBE(5, 3), 0);
// murmurHash(data{String}, encoding|output_type[, seed])
t.strictEqual(murmurHash("deadbacaca", "ascii"), expected.result);
t.strictEqual(murmurHash("deadbacaca", "ascii", 123456), expected.resultSeed);
t.strictEqual(murmurHash("deadbacaca", "hex"), expected.encInputResult);
t.strictEqual(murmurHash("deadbacaca", "hex", 123456), expected.encInputResultSeed);
t.strictEqual(murmurHash("deadbacaca", "number"), expected.result);
t.strictEqual(murmurHash("deadbacaca", "number", 123456), expected.resultSeed);
{
let res = murmurHash("deadbacaca", "buffer") as Buffer;
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.result);
}
{
let res = murmurHash("deadbacaca", "buffer", 123456) as Buffer;
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
}
// murmurHash(data{Buffer}, output_type[, seed])
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), "hex"), expected.resultHex);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), "hex", 123456), expected.resultSeedHex);
{
let res = murmurHash(Buffer.from("deadbacaca"), "buffer") as Buffer;
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.result);
}
{
let res = murmurHash(Buffer.from("deadbacaca"), "buffer", 123456) as Buffer;
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
}
// murmurHash(data, seed)
t.strictEqual(murmurHash("deadbacaca", 123456), expected.resultSeed);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), 123456), expected.resultSeed);
// murmurHash(data, seed, output[, offset[, length]])
buf.fill(0);
t.strictEqual(murmurHash("deadbacaca", 123456, buf), buf);
t.strictEqual(buf[expected.readResult](0), expected.resultSeed);
t.strictEqual(buf.readUInt32BE(4), 0);
buf.fill(0);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), 123456, buf), buf);
t.strictEqual(buf[expected.readResult](0), expected.resultSeed);
t.strictEqual(buf.readUInt32BE(4), 0);
buf.fill(0);
t.strictEqual(murmurHash("deadbacaca", 123456, buf, 2), buf);
t.strictEqual(buf.readUInt16BE(0), 0);
t.strictEqual(buf[expected.readResult](2), expected.resultSeed);
t.strictEqual(buf.readUInt16BE(6), 0);
buf.fill(0);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), 123456, buf, 2), buf);
t.strictEqual(buf.readUInt16BE(0), 0);
t.strictEqual(buf[expected.readResult](2), expected.resultSeed);
t.strictEqual(buf.readUInt16BE(6), 0);
buf.fill(0);
t.strictEqual(murmurHash("deadbacaca", 123456, buf, 2, 3), buf);
t.strictEqual(buf.readUInt16BE(0), 0);
t.strictEqual(buf.readUIntBE(2, 3), expected.result24Seed);
t.strictEqual(buf.readUIntBE(5, 3), 0);
buf.fill(0);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), 123456, buf, 2, 3), buf);
t.strictEqual(buf.readUInt16BE(0), 0);
t.strictEqual(buf.readUIntBE(2, 3), expected.result24Seed);
t.strictEqual(buf.readUIntBE(5, 3), 0);
// murmurHash(data, seed, output_type)
t.strictEqual(murmurHash("deadbacaca", 123456, "number"), expected.resultSeed);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), 123456, "number"), expected.resultSeed);
t.strictEqual(murmurHash("deadbacaca", 123456, "hex"), expected.resultSeedHex);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), 123456, "hex"), expected.resultSeedHex);
t.strictEqual(murmurHash("deadbacaca", 123456, "base64"), expected.resultSeedBase64);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), 123456, "base64"), expected.resultSeedBase64);
{
let res = murmurHash("deadbacaca", 123456, "buffer") as Buffer;
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
}
{
let res = murmurHash(Buffer.from("deadbacaca"), 123456, "buffer") as Buffer;
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
}
// murmurHash(data, encoding, output_type)
t.strictEqual(murmurHash("deadbacaca", "hex", "number"), expected.encInputResult);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), "hex", "number"), expected.result); // input encoding ignored
t.strictEqual(murmurHash("deadbacaca", "hex", "hex"), expected.encInputResultHex);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), "hex", "hex"), expected.resultHex); // input encoding ignored
t.strictEqual(murmurHash("deadbacaca", "hex", "base64"), expected.encInputResultBase64);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), "hex", "base64"), expected.resultBase64); // input encoding ignored
{
let res = murmurHash("deadbacaca", "hex", "buffer") as Buffer;
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.encInputResult);
}
{
let res = murmurHash(Buffer.from("deadbacaca"), "hex", "buffer") as Buffer; // input encoding ignored
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.result);
}
// murmurHash(data{string}, encoding, output[, offset[, length]])
buf.fill(0);
t.strictEqual(murmurHash("deadbacaca", "hex", buf), buf);
t.strictEqual(buf[expected.readResult](0), expected.encInputResult);
t.strictEqual(buf.readUInt32BE(4), 0);
buf.fill(0);
t.strictEqual(murmurHash("deadbacaca", "hex", buf, 2), buf);
t.strictEqual(buf.readUInt16BE(0), 0);
t.strictEqual(buf[expected.readResult](2), expected.encInputResult);
t.strictEqual(buf.readUInt16BE(6), 0);
buf.fill(0);
t.strictEqual(murmurHash("deadbacaca", "hex", buf, 2, 3), buf);
t.strictEqual(buf.readUInt16BE(0), 0);
t.strictEqual(buf.readUIntBE(2, 3), expected.encInputResult24);
t.strictEqual(buf.readUIntBE(5, 3), 0);
// murmurHash(data{string}, encoding, seed)
t.strictEqual(murmurHash("deadbacaca", "ascii", 123456), expected.resultSeed);
t.strictEqual(murmurHash("deadbacaca", "hex", 123456), expected.encInputResultSeed);
// murmurHash(data{string}, encoding, seed, output[, offset[, length]])
buf.fill(0);
t.strictEqual(murmurHash("deadbacaca", "hex", 123456, buf), buf);
t.strictEqual(buf[expected.readResult](0), expected.encInputResultSeed);
t.strictEqual(buf.readUInt32BE(4), 0);
buf.fill(0);
t.strictEqual(murmurHash("deadbacaca", "hex", 123456, buf, 2), buf);
t.strictEqual(buf.readUInt16BE(0), 0);
t.strictEqual(buf[expected.readResult](2), expected.encInputResultSeed);
t.strictEqual(buf.readUInt16BE(6), 0);
buf.fill(0);
t.strictEqual(murmurHash("deadbacaca", "hex", 123456, buf, 2, 3), buf);
t.strictEqual(buf.readUInt16BE(0), 0);
t.strictEqual(buf.readUIntBE(2, 3), expected.encInputResult24Seed);
t.strictEqual(buf.readUIntBE(5, 3), 0);
// murmurHash(data{string}, encoding, seed, output_type)
t.strictEqual(murmurHash("deadbacaca", "hex", 123456, "number"), expected.encInputResultSeed);
t.strictEqual(murmurHash("deadbacaca", "hex", 123456, "hex"), expected.encInputResultSeedHex);
t.strictEqual(murmurHash("deadbacaca", "hex", 123456, "base64"), expected.encInputResultSeedBase64);
{
let res = murmurHash("deadbacaca", "hex", 123456, "buffer") as Buffer;
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.encInputResultSeed);
}
t.end();
}
function testMurmurHash32Callback(murmurHash: MurmurHashFnI, expected: Expected, t: Test): void {
t.plan(234);
// murmurHash(data, callback)
t.strictEqual(murmurHash("deadbacaca", (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.result);
}), undefined);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.result);
}), undefined);
// murmurHash(data, output[, offset[, length]], callback)
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash("deadbacaca", buf, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res[expected.readResult](0), expected.result);
t.strictEqual(res.readUInt32BE(4), 0);
}), undefined);
}
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), buf, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res[expected.readResult](0), expected.result);
t.strictEqual(res.readUInt32BE(4), 0);
}), undefined);
}
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash("deadbacaca", buf, 2, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res[expected.readResult](2), expected.result);
t.strictEqual(res.readUInt16BE(6), 0);
}), undefined);
}
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), buf, 2, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res[expected.readResult](2), expected.result);
t.strictEqual(res.readUInt16BE(6), 0);
}), undefined);
}
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash("deadbacaca", buf, 2, 3, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res.readUIntBE(2, 3), expected.result24);
t.strictEqual(res.readUIntBE(5, 3), 0);
}), undefined);
}
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), buf, 2, 3, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res.readUIntBE(2, 3), expected.result24);
t.strictEqual(res.readUIntBE(5, 3), 0);
}), undefined);
}
// murmurHash(data{String}, encoding|output_type[, seed], callback)
t.strictEqual(murmurHash("deadbacaca", "ascii", (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.result);
}), undefined);
t.strictEqual(murmurHash("deadbacaca", "ascii", 123456, (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.resultSeed);
}), undefined);
t.strictEqual(murmurHash("deadbacaca", "hex", (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.encInputResult);
}), undefined);
t.strictEqual(murmurHash("deadbacaca", "hex", 123456, (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.encInputResultSeed);
}), undefined);
t.strictEqual(murmurHash("deadbacaca", "number", (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.result);
}), undefined);
t.strictEqual(murmurHash("deadbacaca", "number", 123456, (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.resultSeed);
}), undefined);
t.strictEqual(murmurHash("deadbacaca", "buffer", (err: Error, res: Buffer) => {
t.error(err);
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.result);
}), undefined);
t.strictEqual(murmurHash("deadbacaca", "buffer", 123456, (err: Error, res: Buffer) => {
t.error(err);
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
}), undefined);
// murmurHash(data{Buffer}, output_type[, seed], callback)
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), "hex", (err: Error, res: string) => {
t.error(err);
t.strictEqual(res, expected.resultHex);
}), undefined);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), "hex", 123456, (err: Error, res: string) => {
t.error(err);
t.strictEqual(res, expected.resultSeedHex);
}), undefined);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), "buffer", (err: Error, res: Buffer) => {
t.error(err);
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.result);
}), undefined);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), "buffer", 123456, (err: Error, res: Buffer) => {
t.error(err);
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
}), undefined);
// murmurHash(data, seed, callback)
t.strictEqual(murmurHash("deadbacaca", 123456, (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.resultSeed);
}), undefined);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), 123456, (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.resultSeed);
}), undefined);
// murmurHash(data, seed, output[, offset[, length]], callback)
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash("deadbacaca", 123456, buf, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
t.strictEqual(res.readUInt32BE(4), 0);
}), undefined);
}
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), 123456, buf, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
t.strictEqual(res.readUInt32BE(4), 0);
}), undefined);
}
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash("deadbacaca", 123456, buf, 2, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res[expected.readResult](2), expected.resultSeed);
t.strictEqual(res.readUInt16BE(6), 0);
}), undefined);
}
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), 123456, buf, 2, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res[expected.readResult](2), expected.resultSeed);
t.strictEqual(res.readUInt16BE(6), 0);
}), undefined);
}
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash("deadbacaca", 123456, buf, 2, 3, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res.readUIntBE(2, 3), expected.result24Seed);
t.strictEqual(res.readUIntBE(5, 3), 0);
}), undefined);
}
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), 123456, buf, 2, 3, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res.readUIntBE(2, 3), expected.result24Seed);
t.strictEqual(res.readUIntBE(5, 3), 0);
}), undefined);
}
// murmurHash(data, seed, output_type, callback)
t.strictEqual(murmurHash("deadbacaca", 123456, "number", (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.resultSeed);
}), undefined);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), 123456, "number", (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.resultSeed);
}), undefined);
t.strictEqual(murmurHash("deadbacaca", 123456, "hex", (err: Error, res: string) => {
t.error(err);
t.strictEqual(res, expected.resultSeedHex);
}), undefined);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), 123456, "hex", (err: Error, res: string) => {
t.error(err);
t.strictEqual(res, expected.resultSeedHex);
}), undefined);
t.strictEqual(murmurHash("deadbacaca", 123456, "base64", (err: Error, res: string) => {
t.error(err);
t.strictEqual(res, expected.resultSeedBase64);
}), undefined);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), 123456, "base64", (err: Error, res: string) => {
t.error(err);
t.strictEqual(res, expected.resultSeedBase64);
}), undefined);
t.strictEqual(murmurHash("deadbacaca", 123456, "buffer", (err: Error, res: Buffer) => {
t.error(err);
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
}), undefined);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), 123456, "buffer", (err: Error, res: Buffer) => {
t.error(err);
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
}), undefined);
// murmurHash(data, encoding, output_type, callback)
t.strictEqual(murmurHash("deadbacaca", "hex", "number", (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.encInputResult);
}), undefined);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), "hex", "number", (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.result); // input encoding ignored
}), undefined);
t.strictEqual(murmurHash("deadbacaca", "hex", "hex", (err: Error, res: string) => {
t.error(err);
t.strictEqual(res, expected.encInputResultHex);
}), undefined);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), "hex", "hex", (err: Error, res: string) => {
t.error(err);
t.strictEqual(res, expected.resultHex); // input encoding ignored
}), undefined);
t.strictEqual(murmurHash("deadbacaca", "hex", "base64", (err: Error, res: string) => {
t.error(err);
t.strictEqual(res, expected.encInputResultBase64);
}), undefined);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), "hex", "base64", (err: Error, res: string) => {
t.error(err);
t.strictEqual(res, expected.resultBase64); // input encoding ignored
}), undefined);
t.strictEqual(murmurHash("deadbacaca", "hex", "buffer", (err: Error, res: Buffer) => {
t.error(err);
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.encInputResult);
}), undefined);
t.strictEqual(murmurHash(Buffer.from("deadbacaca"), "hex", "buffer", (err: Error, res: Buffer) => {
t.error(err);
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.result); // input encoding ignored
}), undefined);
// murmurHash(data{string}, encoding, output[, offset[, length]], callback)
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash("deadbacaca", "hex", buf, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res[expected.readResult](0), expected.encInputResult);
t.strictEqual(res.readUInt32BE(4), 0);
}), undefined);
}
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash("deadbacaca", "hex", buf, 2, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res[expected.readResult](2), expected.encInputResult);
t.strictEqual(res.readUInt16BE(6), 0);
}), undefined);
}
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash("deadbacaca", "hex", buf, 2, 3, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res.readUIntBE(2, 3), expected.encInputResult24);
t.strictEqual(res.readUIntBE(5, 3), 0);
}), undefined);
}
// murmurHash(data{string}, encoding, seed, callback)
t.strictEqual(murmurHash("deadbacaca", "ascii", 123456, (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.resultSeed);
}), undefined);
t.strictEqual(murmurHash("deadbacaca", "hex", 123456, (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.encInputResultSeed);
}), undefined);
// murmurHash(data{string}, encoding, seed, output[, offset[, length]], callback)
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash("deadbacaca", "hex", 123456, buf, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res[expected.readResult](0), expected.encInputResultSeed);
t.strictEqual(res.readUInt32BE(4), 0);
}), undefined);
}
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash("deadbacaca", "hex", 123456, buf, 2, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res[expected.readResult](2), expected.encInputResultSeed);
t.strictEqual(res.readUInt16BE(6), 0);
}), undefined);
}
{
let buf: Buffer = Buffer.alloc(8);
t.strictEqual(murmurHash("deadbacaca", "hex", 123456, buf, 2, 3, (err: Error, res: Buffer) => {
t.error(err);
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res.readUIntBE(2, 3), expected.encInputResult24Seed);
t.strictEqual(res.readUIntBE(5, 3), 0);
}), undefined);
}
// murmurHash(data{string}, encoding, seed, output_type, callback)
t.strictEqual(murmurHash("deadbacaca", "hex", 123456, "number", (err: Error, res: number) => {
t.error(err);
t.strictEqual(res, expected.encInputResultSeed);
}), undefined);
t.strictEqual(murmurHash("deadbacaca", "hex", 123456, "hex", (err: Error, res: string) => {
t.error(err);
t.strictEqual(res, expected.encInputResultSeedHex);
}), undefined);
t.strictEqual(murmurHash("deadbacaca", "hex", 123456, "base64", (err: Error, res: string) => {
t.error(err);
t.strictEqual(res, expected.encInputResultSeedBase64);
}), undefined);
t.strictEqual(murmurHash("deadbacaca", "hex", 123456, "buffer", (err: Error, res: Buffer) => {
t.error(err);
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.encInputResultSeed);
}), undefined);
}
function testMurmurHash32Async(murmurHash: promisify.MurmurHashFnAsyncI, expected: Expected, t: Test): PromiseLike<void[]> {
return Bluebird.resolve()
.then(() => testMurmurHash32AsyncInternal(murmurHash, expected, t))
.catch(t.threw);
}
function testMurmurHash32AsyncInternal(murmurHash: promisify.MurmurHashFnAsyncI, expected: Expected, t: Test): PromiseLike<void[]> {
t.plan(122);
let promises: PromiseLike<void>[] = [];
// murmurHash(data)
promises.push(murmurHash("deadbacaca").then((res: number) => {
t.strictEqual(res, expected.result);
}));
promises.push(murmurHash(Buffer.from("deadbacaca")).then((res: number) => {
t.strictEqual(res, expected.result);
}));
// murmurHash(data, output[, offset[, length]])
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash("deadbacaca", buf).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res[expected.readResult](0), expected.result);
t.strictEqual(res.readUInt32BE(4), 0);
}));
}
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash(Buffer.from("deadbacaca"), buf).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res[expected.readResult](0), expected.result);
t.strictEqual(res.readUInt32BE(4), 0);
}));
}
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash("deadbacaca", buf, 2).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res[expected.readResult](2), expected.result);
t.strictEqual(res.readUInt16BE(6), 0);
}));
}
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash(Buffer.from("deadbacaca"), buf, 2).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res[expected.readResult](2), expected.result);
t.strictEqual(res.readUInt16BE(6), 0);
}));
}
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash("deadbacaca", buf, 2, 3).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res.readUIntBE(2, 3), expected.result24);
t.strictEqual(res.readUIntBE(5, 3), 0);
}));
}
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash(Buffer.from("deadbacaca"), buf, 2, 3).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res.readUIntBE(2, 3), expected.result24);
t.strictEqual(res.readUIntBE(5, 3), 0);
}));
}
// murmurHash(data{String}, encoding|output_type[, seed])
promises.push(murmurHash("deadbacaca", "ascii").then((res: number) => {
t.strictEqual(res, expected.result);
}));
promises.push(murmurHash("deadbacaca", "ascii", 123456).then((res: number) => {
t.strictEqual(res, expected.resultSeed);
}));
promises.push(murmurHash("deadbacaca", "hex").then((res: number) => {
t.strictEqual(res, expected.encInputResult);
}));
promises.push(murmurHash("deadbacaca", "hex", 123456).then((res: number) => {
t.strictEqual(res, expected.encInputResultSeed);
}));
promises.push(murmurHash("deadbacaca", "number").then((res: number) => {
t.strictEqual(res, expected.result);
}));
promises.push(murmurHash("deadbacaca", "number", 123456).then((res: number) => {
t.strictEqual(res, expected.resultSeed);
}));
promises.push(murmurHash("deadbacaca", "buffer").then((res: Buffer) => {
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.result);
}));
promises.push(murmurHash("deadbacaca", "buffer", 123456).then((res: Buffer) => {
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
}));
// murmurHash(data{Buffer}, output_type[, seed])
promises.push(murmurHash(Buffer.from("deadbacaca"), "hex").then((res: string) => {
t.strictEqual(res, expected.resultHex);
}));
promises.push(murmurHash(Buffer.from("deadbacaca"), "hex", 123456).then((res: string) => {
t.strictEqual(res, expected.resultSeedHex);
}));
promises.push(murmurHash(Buffer.from("deadbacaca"), "buffer").then((res: Buffer) => {
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.result);
}));
promises.push(murmurHash(Buffer.from("deadbacaca"), "buffer", 123456).then((res: Buffer) => {
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
}));
// murmurHash(data, seed)
promises.push(murmurHash("deadbacaca", 123456).then((res: number) => {
t.strictEqual(res, expected.resultSeed);
}));
promises.push(murmurHash(Buffer.from("deadbacaca"), 123456).then((res: number) => {
t.strictEqual(res, expected.resultSeed);
}));
// murmurHash(data, seed, output[, offset[, length]])
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash("deadbacaca", 123456, buf).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
t.strictEqual(res.readUInt32BE(4), 0);
}));
}
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash(Buffer.from("deadbacaca"), 123456, buf).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
t.strictEqual(res.readUInt32BE(4), 0);
}));
}
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash("deadbacaca", 123456, buf, 2).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res[expected.readResult](2), expected.resultSeed);
t.strictEqual(res.readUInt16BE(6), 0);
}));
}
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash(Buffer.from("deadbacaca"), 123456, buf, 2).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res[expected.readResult](2), expected.resultSeed);
t.strictEqual(res.readUInt16BE(6), 0);
}));
}
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash("deadbacaca", 123456, buf, 2, 3).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res.readUIntBE(2, 3), expected.result24Seed);
t.strictEqual(res.readUIntBE(5, 3), 0);
}));
}
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash(Buffer.from("deadbacaca"), 123456, buf, 2, 3).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res.readUIntBE(2, 3), expected.result24Seed);
t.strictEqual(res.readUIntBE(5, 3), 0);
}));
}
// murmurHash(data, seed, output_type)
promises.push(murmurHash("deadbacaca", 123456, "number").then((res: number) => {
t.strictEqual(res, expected.resultSeed);
}));
promises.push(murmurHash(Buffer.from("deadbacaca"), 123456, "number").then((res: number) => {
t.strictEqual(res, expected.resultSeed);
}));
promises.push(murmurHash("deadbacaca", 123456, "hex").then((res: string) => {
t.strictEqual(res, expected.resultSeedHex);
}));
promises.push(murmurHash(Buffer.from("deadbacaca"), 123456, "hex").then((res: string) => {
t.strictEqual(res, expected.resultSeedHex);
}));
promises.push(murmurHash("deadbacaca", 123456, "base64").then((res: string) => {
t.strictEqual(res, expected.resultSeedBase64);
}));
promises.push(murmurHash(Buffer.from("deadbacaca"), 123456, "base64").then((res: string) => {
t.strictEqual(res, expected.resultSeedBase64);
}));
promises.push(murmurHash("deadbacaca", 123456, "buffer").then((res: Buffer) => {
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
}));
promises.push(murmurHash(Buffer.from("deadbacaca"), 123456, "buffer").then((res: Buffer) => {
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.resultSeed);
}));
// murmurHash(data, encoding, output_type)
promises.push(murmurHash("deadbacaca", "hex", "number").then((res: number) => {
t.strictEqual(res, expected.encInputResult);
}));
promises.push(murmurHash(Buffer.from("deadbacaca"), "hex", "number").then((res: number) => {
t.strictEqual(res, expected.result); // input encoding ignored
}));
promises.push(murmurHash("deadbacaca", "hex", "hex").then((res: string) => {
t.strictEqual(res, expected.encInputResultHex);
}));
promises.push(murmurHash(Buffer.from("deadbacaca"), "hex", "hex").then((res: string) => {
t.strictEqual(res, expected.resultHex); // input encoding ignored
}));
promises.push(murmurHash("deadbacaca", "hex", "base64").then((res: string) => {
t.strictEqual(res, expected.encInputResultBase64);
}));
promises.push(murmurHash(Buffer.from("deadbacaca"), "hex", "base64").then((res: string) => {
t.strictEqual(res, expected.resultBase64); // input encoding ignored
}));
promises.push(murmurHash("deadbacaca", "hex", "buffer").then((res: Buffer) => {
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.encInputResult);
}));
promises.push(murmurHash(Buffer.from("deadbacaca"), "hex", "buffer").then((res: Buffer) => {
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.result); // input encoding ignored
}));
// murmurHash(data{string}, encoding, output[, offset[, length]])
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash("deadbacaca", "hex", buf).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res[expected.readResult](0), expected.encInputResult);
t.strictEqual(res.readUInt32BE(4), 0);
}));
}
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash("deadbacaca", "hex", buf, 2).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res[expected.readResult](2), expected.encInputResult);
t.strictEqual(res.readUInt16BE(6), 0);
}));
}
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash("deadbacaca", "hex", buf, 2, 3).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res.readUIntBE(2, 3), expected.encInputResult24);
t.strictEqual(res.readUIntBE(5, 3), 0);
}));
}
// murmurHash(data{string}, encoding, seed)
promises.push(murmurHash("deadbacaca", "ascii", 123456).then((res: number) => {
t.strictEqual(res, expected.resultSeed);
}));
promises.push(murmurHash("deadbacaca", "hex", 123456).then((res: number) => {
t.strictEqual(res, expected.encInputResultSeed);
}));
// murmurHash(data{string}, encoding, seed, output[, offset[, length]])
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash("deadbacaca", "hex", 123456, buf).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res[expected.readResult](0), expected.encInputResultSeed);
t.strictEqual(res.readUInt32BE(4), 0);
}));
}
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash("deadbacaca", "hex", 123456, buf, 2).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res[expected.readResult](2), expected.encInputResultSeed);
t.strictEqual(res.readUInt16BE(6), 0);
}));
}
{
let buf: Buffer = Buffer.alloc(8);
promises.push(murmurHash("deadbacaca", "hex", 123456, buf, 2, 3).then((res: Buffer) => {
t.strictEqual(buf, res);
t.strictEqual(res.readUInt16BE(0), 0);
t.strictEqual(res.readUIntBE(2, 3), expected.encInputResult24Seed);
t.strictEqual(res.readUIntBE(5, 3), 0);
}));
}
// murmurHash(data{string}, encoding, seed, output_type)
promises.push(murmurHash("deadbacaca", "hex", 123456, "number").then((res: number) => {
t.strictEqual(res, expected.encInputResultSeed);
}));
promises.push(murmurHash("deadbacaca", "hex", 123456, "hex").then((res: string) => {
t.strictEqual(res, expected.encInputResultSeedHex);
}));
promises.push(murmurHash("deadbacaca", "hex", 123456, "base64").then((res: string) => {
t.strictEqual(res, expected.encInputResultSeedBase64);
}));
promises.push(murmurHash("deadbacaca", "hex", 123456, "buffer").then((res: Buffer) => {
t.type(res, Buffer);
t.strictEqual(res.length, 4);
t.strictEqual(res[expected.readResult](0), expected.encInputResultSeed);
}));
return Bluebird.all(promises);
}

View File

@ -1,61 +0,0 @@
// import * as os from "os";
import { getHashes, createHash } from "../../stream";
import { test } from "tap";
test("shoud have algorithms", (t) => {
let hashes = getHashes();
t.type(hashes, Array);
t.deepEqual(hashes.sort(), [
"murmurhash",
"murmurhash128",
"murmurhash128x64",
"murmurhash128x86"]);
t.end();
});
test("should have stream and hasher methods", (t) => {
t.plan(4);
let hasher = createHash("murmurhash");
t.strictEqual(hasher.digest('number'), 0);
hasher.setEncoding('hex');
hasher.write("dead");
hasher.end("bacaca");
hasher.once('readable', () => {
t.strictEqual(hasher.endianness, "BE");
hasher.endianness = "LE";
t.strictEqual(hasher.endianness, "LE");
t.strictEqual(hasher.read(), "7481d57a");
});
});
test("should create hash with options", (t) => {
t.plan(13);
let hasher = createHash("murmurhash", {endianness: "LE", seed: 123456});
t.strictEqual(hasher.endianness, "LE");
t.strictEqual(hasher.update("dead", "hex"), hasher);
t.strictEqual(hasher.digest("number"), 3787564060);
t.strictEqual(hasher.digest("hex"), "1ca4c1e1");
let serial = hasher.toJSON();
t.type(serial, Object);
hasher = createHash(serial);
t.strictEqual(hasher.endianness, "BE");
t.strictEqual(hasher.digest("number"), 3787564060);
t.strictEqual(hasher.digest("hex"), "e1c1a41c");
hasher = createHash(serial, {endianness: "LE"});
t.strictEqual(hasher.endianness, "LE");
t.strictEqual(hasher.digest("number"), 3787564060);
t.strictEqual(hasher.digest("hex"), "1ca4c1e1");
let hasher2 = createHash(hasher);
hasher.setEncoding('base64');
hasher.end("bacaca", "hex");
hasher.once('readable', () => {
t.strictEqual(hasher.read(), "VuqQLQ==");
});
hasher2.setEncoding('base64');
hasher2.end("bacaca", "hex");
hasher2.once('readable', () => {
t.strictEqual(hasher2.read(), "VuqQLQ==");
});
});

View File

@ -1,33 +0,0 @@
{
"compilerOptions": {
"alwaysStrict": true,
"inlineSourceMap": true,
"lib": [
"es2015"
],
"listEmittedFiles": true,
"listFiles": false,
"noImplicitAny": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"suppressImplicitAnyIndexErrors": true,
"target": "es5"
},
"exclude": [],
"typedocOptions": {
"excludeExternals": true,
"excludeNotExported": false,
"externalPattern": "**/*node*",
"exclude": ["src", "test"],
"ignoreCompilerErrors": true,
"includeDeclarations": true,
"mode": "modules",
"module": "commonjs",
"name": "murmurhash-native",
"out": "doc",
"readme": "README.me",
"target": "ES5",
"theme": "default",
"sourcefile-url-prefix": "https://github.com/royaltm/node-murmurhash-native/blob/master/"
}
}

View File

@ -1,62 +0,0 @@
"use strict";
/*
Patches tap to allow expecting an uncaught exception with t.throwsUncaughtException(wanted)
*/
var assert = require('assert');
var tap = require("tap");
if (module === require.main) {
tap.pass('ok')
return;
}
var originalThrew = tap.threw;
assert.strictEqual(typeof originalThrew, 'function', 'tap.threw should be a function');
tap.threw = threw.bind(tap);
tap.Test.prototype.threw = threw;
var uncaughtExceptionHandler;
function threw(error) {
if (uncaughtExceptionHandler == null) {
originalThrew(error);
}
else {
try {
uncaughtExceptionHandler(error);
}
catch(e) {
originalThrew(e);
}
}
}
tap.Test.prototype.throwsUncaughtException = function throwsUncaughtException(wanted, message, extra) {
var t = this;
message = message || 'should throw uncaught exception';
if (uncaughtExceptionHandler != null) {
throw new Error('Only one throwsUncaughtException guard may be active at a time!');
}
var teardownError = new Error(message);
Error.captureStackTrace(teardownError, throwsUncaughtException);
t.teardown(function() {
if (uncaughtExceptionHandler != null) {
uncaughtExceptionHandler = null;
throw teardownError;
}
});
uncaughtExceptionHandler = function(error) {
uncaughtExceptionHandler = null;
t.assertStack = error.stack;
t.throws(function() {
throw error
}, wanted, message, extra);
};
};
module.exports = tap;