1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
|
import { writeFile, readFile } from "fs";
import { promisify } from "util";
import { DatabaseInterface } from "./database.model";
import { Context } from "./context.model";
import { NaiveError, NaiveErrorCode as e } from "./error.model";
import { last } from "./util";
const splitPath = (path: string): string[] => path.split("/").filter(k => k);
const write = promisify(writeFile);
const read = promisify(readFile);
export const DEFAULT_CTX = {
logger: console.log,
cachePath: `${__dirname}/db.json`,
maxDbSizeMB: 6
};
/**
* Implementation of NoSQL DB that uses paths and objects.
*
* See DatabaseInterface for docs on public API
*
* Uses a plain object as a buffer and reads/writes to a
* plain JSON file. A better implementation could be backed
* by somethig a little nicer and not hold the buffer
* in memory
*/
export class Database implements DatabaseInterface {
private buff: any = {};
constructor(private ctx: Context = DEFAULT_CTX) {}
async init(): Promise<void> {
try {
const buff = await read(this.ctx.cachePath);
this.buff = JSON.parse(buff.toString());
} catch (e) {
this.ctx.logger("Failed to init database, using empty object");
this.ctx.logger(e);
}
}
// this currently runs synchronously,
// but only because we hold the entire DB in memory
// (which obviously becomes a bad idea at some point)
async read(path: string): Promise<Object> {
const pathParts = splitPath(path);
return this.resolve(pathParts);
}
async write(path: string, toWrite: Object): Promise<void> {
const pathParts = splitPath(path);
const writeTo = this.resolve(pathParts, false, 1);
writeTo[last(pathParts)] = toWrite;
await this.serialize();
}
async flush(): Promise<void> {
this.buff = {};
await this.serialize();
}
toString() {
return JSON.stringify(this.buff);
}
/**
* Resolve the object located at path.
*
* If isRead == true, no new nodes will
* be created, and the function will return
* null if a null node is encountered on the path.
* Else, we create each node on the path.
*
* Level is used to determine how deep
* to recurse on path. Callers interested in
* writing may wish to stop higher up the tree.
*/
private resolve(
pathParts: string[],
isRead: boolean = true,
level: number = 0
): any {
// start at the root of our buffer
let lastNode = this.buff;
let node;
for (let i = 0; i < pathParts.length - level; i++) {
const part: string = pathParts[i];
// handle null node
if (!lastNode[part]) {
// if we're reading from the object
// we want to stop as soon
// as we hit a null node
if (isRead) {
return null;
}
// but if we're writing and the node is missing,
// we should make it and continue
else {
lastNode[part] = {};
}
}
// traverse to the next node
node = lastNode[part];
lastNode = node;
}
return node;
}
/**
* Serialize the current buffer
* into a plain file.
*
* Change the path by injecting custom
* Context
*
* Throws OUT_OF_SPACE
*/
private serialize(): Promise<void> {
if (!this.hasSpace()) throw new NaiveError(e.OUT_OF_SPACE);
return write(this.ctx.cachePath, this.toString());
}
private hasSpace(): boolean {
// convert from MB to B
return this.toString().length <= this.ctx.maxDbSizeMB * 1024 ** 2;
}
}
|