Skip to content

Commit c21f45e

Browse files
authored
Merge pull request #154 from gregolsky/perf
Improve load() performance + minor fixes
2 parents 5955e68 + c13a578 commit c21f45e

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

52 files changed

+969
-474
lines changed

.travis.yml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@ node_js:
55
- "10"
66
env:
77
matrix:
8-
- RAVENDB_TEST_SERVER_PATH=./RavenDB/Server/Raven.Server
8+
- RAVENDB_SERVER_DOWNLOAD="https://hibernatingrhinos.com/downloads/RavenDB%20for%20Linux%20x64/latest?buildType=stable&version=4.0"
9+
RAVENDB_TEST_SERVER_PATH=./RavenDB/Server/Raven.Server
910
RAVENDB_TEST_SERVER_CERTIFICATE_PATH=./certs/server.pfx
1011
RAVENDB_TEST_CLIENT_CERT_PASSPHRASE=client11
1112
RAVENDB_TEST_CLIENT_CERT_PATH=./certs/nodejs.pem
@@ -14,7 +15,7 @@ env:
1415
before_install:
1516
- sudo apt-get -qq update
1617
- sudo apt-get install -y libunwind8 wget libicu52 libssl-dev curl unzip gettext libcurl4-openssl-dev zlib1g uuid-dev bzip2 openssl
17-
- wget -O RavenDB.tar.bz2 https://hibernatingrhinos.com/downloads/RavenDB%20for%20Linux%20x64/latest?buildType=nightly
18+
- wget -O RavenDB.tar.bz2 "$RAVENDB_SERVER_DOWNLOAD"
1819
- tar xjf RavenDB.tar.bz2
1920
- mkdir certs; pushd certs
2021
- openssl genrsa -out ca.key 2048

package-lock.json

Lines changed: 1 addition & 46 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "ravendb",
3-
"version": "4.0.3",
3+
"version": "4.0.4",
44
"description": "RavenDB client for Node.js",
55
"files": [
66
"dist/"
@@ -76,7 +76,6 @@
7676
"bluebird": "^3.4.7",
7777
"change-case": "^3.0.2",
7878
"md5-hex": "^2.0.0",
79-
"merge2": "^1.2.2",
8079
"moment": "^2.22.0",
8180
"pluralize": "^4.0.0",
8281
"qs": "^6.5.2",
@@ -86,7 +85,6 @@
8685
"semaphore": "^1.1.0",
8786
"stream-json": "^1.1.1",
8887
"string-builder": "^0.1.6",
89-
"through2": "^2.0.3",
9088
"util.promisify": "^1.0.0",
9189
"uuid": "^3.2.1",
9290
"verror": "^1.10.0",

perf/common.js

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
import { DocumentStore } from "../src";
2+
3+
// tslint:disable-next-line:no-var-requires
4+
const settings = require("./settings.json");
5+
6+
export function getStore() {
7+
return new DocumentStore(settings.urls, settings.database);
8+
}

perf/data/load_data.json

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

perf/data/simple_data.json

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
{
2+
"Results": [
3+
{
4+
"TestKey": "TestValue",
5+
"NumberKey": 1,
6+
"@metadata": {
7+
"@collection": "hahahah",
8+
"Raven-Node-Type": "TypeName",
9+
"@nested-object-types": {
10+
"start": "date"
11+
},
12+
"@attachments": [
13+
{
14+
"Name": "test.png",
15+
"Hash": "asdij%*(J#JSA",
16+
"ContentType": "aaaa"
17+
}
18+
]
19+
}
20+
}
21+
],
22+
"Includes": {}
23+
}

perf/loadPipeline.js

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
const { DocumentStore } = require("../dist");
2+
const { GetDocumentsCommand } = require("../dist/Documents/Commands/GetDocumentsCommand");
3+
const { TransformKeysJsonStream } = require("../dist/Mapping/Json/Streams/TransformKeysJsonStream");
4+
const fs = require('fs');
5+
const parser = require("stream-json")
6+
const pick = require("stream-json/filters/Pick")
7+
const { ignore } = require("stream-json/filters/Ignore")
8+
const { replace } = require("stream-json/filters/Replace")
9+
const streamArray = require("stream-json/streamers/StreamArray")
10+
const streamObject = require("stream-json/streamers/StreamObject")
11+
const streamValues = require("stream-json/streamers/StreamValues")
12+
const StreamUtil = require("../dist/Utility/StreamUtil");
13+
const stream = require("readable-stream");
14+
const Asm = require('stream-json/Assembler');
15+
16+
const store = new DocumentStore("http://localhost:8080", "Perf");
17+
store.initialize();
18+
19+
async function bench(name, attempts, run) {
20+
const benchName = `${name} x${ attempts }`;
21+
console.time(benchName);
22+
for (let n = 0; n < attempts; n++) {
23+
await run();
24+
}
25+
console.timeEnd(benchName);
26+
}
27+
28+
(async function main() {
29+
{
30+
const name = "4.0.4-load-full-pipeline";
31+
await bench(name, 10, loadPipeline);
32+
await bench(name, 50, loadPipeline);
33+
await bench(name, 100, loadPipeline);
34+
}
35+
36+
// {
37+
// const name = "stream-json-with-proper-casing";
38+
// // enhancedStreamJson();
39+
// await bench(name, 10, enhancedStreamJson);
40+
// await bench(name, 50, enhancedStreamJson);
41+
// await bench(name, 100, enhancedStreamJson);
42+
// }
43+
44+
store.dispose();
45+
}());
46+
47+
async function loadPipeline() {
48+
const dataStream = fs.createReadStream("./data/load_data.json");
49+
let body;
50+
const results = await GetDocumentsCommand
51+
.parseDocumentsResultResponseAsync(dataStream, store.conventions, _ => body = _);
52+
}
53+
54+
async function rawStreamJson() {
55+
const dataStream = fs.createReadStream("./data/load_data.json");
56+
const parserStream = parser();
57+
const asm = Asm.connectTo(parserStream);
58+
const donePromise = new Promise(resolve => {
59+
asm.on('done', asm => {
60+
resolve();
61+
});
62+
});
63+
await StreamUtil.pipelineAsync([
64+
dataStream,
65+
parserStream,
66+
ignore({ filter: "asasas" })
67+
]);
68+
await donePromise;
69+
}
70+
71+
72+
73+
async function enhancedStreamJson() {
74+
const dataStream = fs.createReadStream("./data/load_data.json");
75+
const streams = [
76+
dataStream,
77+
parser({
78+
packKeys: true,
79+
packStrings: true,
80+
packValues: true,
81+
packNumbers: true,
82+
streamNumbers: false,
83+
streamValues: false,
84+
streamKeys: false,
85+
streamStrings: false
86+
}),
87+
new TransformKeysJsonStream({
88+
getCurrentTransform: buildEntityKeysTransform("camel")
89+
})
90+
];
91+
const asm = Asm.connectTo(streams[streams.length - 1]);
92+
const donePromise = new Promise(resolve => {
93+
asm.on('done', asm => {
94+
resolve(asm.current);
95+
});
96+
});
97+
await StreamUtil.pipelineAsync(streams);
98+
const result = await donePromise;
99+
// console.log(JSON.stringify(result, null, 2));
100+
}

0 commit comments

Comments
 (0)