-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathstream.ts
102 lines (102 loc) · 3.23 KB
/
stream.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
export interface FSReadFileAsChunksOptions {
/**
* Expected maximum chunk size per read.
*
* It is not guaranteed that the chunk will be fully used in a single call, unless the property {@linkcode reduceChunks} is defined to `true`.
* @default {1024}
*/
chunkSize?: number;
/**
* Whether to reduce number of chunks.
* @default {false}
*/
reduceChunks?: boolean;
}
/**
* Read the file as chunks, emulate {@linkcode ReadableStream}, asynchronously.
*
* To read the file as chunks in asynchronously and without conditions, use {@linkcode ReadableStream} is more efficiency.
*
* > **🛡️ Runtime Permissions**
* >
* > - File System - Read \[Deno: `read`; NodeJS 🧪: `fs-read`\]
* > - *Resources*
* @param {string | URL} filePath Path of the file.
* @param {FSReadFileAsChunksOptions} [options={}] Options.
* @returns {AsyncGenerator<Uint8Array>} The `ReadableStream` emulator.
*/
export async function* readFileAsChunks(filePath: string | URL, options: FSReadFileAsChunksOptions = {}): AsyncGenerator<Uint8Array> {
const {
chunkSize = 1024,
reduceChunks = false
}: FSReadFileAsChunksOptions = options;
using file: Deno.FsFile = await Deno.open(filePath);
let chunkGlobal: Uint8Array = new Uint8Array([]);
let state: number | null = 0;
while (state !== null) {
const chunkCurrent: Uint8Array = new Uint8Array(chunkSize);
state = await file.read(chunkCurrent);
if (
state === null ||
state === 0
) {
continue;
}
if (reduceChunks) {
chunkGlobal = Uint8Array.from([...chunkGlobal, ...chunkCurrent.slice(0, state)]);
while (chunkGlobal.length >= chunkSize) {
yield chunkGlobal.slice(0, chunkSize);
chunkGlobal = chunkGlobal.slice(chunkSize);
}
} else {
yield chunkCurrent.slice(0, state);
}
}
if (reduceChunks && chunkGlobal.length > 0) {
yield chunkGlobal;
}
}
/**
* Read the file as chunks, emulate {@linkcode ReadableStream}, synchronously.
*
* To read the file as chunks in asynchronously and without conditions, use {@linkcode ReadableStream} is more efficiency.
*
* > **🛡️ Runtime Permissions**
* >
* > - File System - Read \[Deno: `read`; NodeJS 🧪: `fs-read`\]
* > - *Resources*
* @param {string | URL} filePath Path of the file.
* @param {FSReadFileAsChunksOptions} [options={}] Options.
* @returns {Generator<Uint8Array>} The `ReadableStream` emulator.
*/
export function* readFileAsChunksSync(filePath: string | URL, options: FSReadFileAsChunksOptions = {}): Generator<Uint8Array> {
const {
chunkSize = 1024,
reduceChunks = false
}: FSReadFileAsChunksOptions = options;
using file: Deno.FsFile = Deno.openSync(filePath);
let chunkGlobal: Uint8Array = new Uint8Array([]);
let state: number | null = 0;
while (state !== null) {
const chunkCurrent: Uint8Array = new Uint8Array(chunkSize);
state = file.readSync(chunkCurrent);
if (
state === null ||
state === 0
) {
continue;
}
if (reduceChunks) {
chunkGlobal = Uint8Array.from([...chunkGlobal, ...chunkCurrent.slice(0, state)]);
while (chunkGlobal.length >= chunkSize) {
yield chunkGlobal.slice(0, chunkSize);
chunkGlobal = chunkGlobal.slice(chunkSize);
}
} else {
yield chunkCurrent.slice(0, state);
}
}
if (reduceChunks && chunkGlobal.length > 0) {
yield chunkGlobal;
}
}