This repository has been archived by the owner on Feb 12, 2024. It is now read-only.
/
get.js
158 lines (136 loc) · 4.64 KB
/
get.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
import { exporter, recursive } from 'ipfs-unixfs-exporter'
import errCode from 'err-code'
import { normalizeCidPath } from '../utils.js'
import { withTimeoutOption } from 'ipfs-core-utils/with-timeout-option'
import { CID } from 'multiformats/cid'
import { pack } from 'it-tar'
import { pipe } from 'it-pipe'
import Pako from 'pako'
import toBuffer from 'it-to-buffer'
// https://www.gnu.org/software/gzip/manual/gzip.html
const DEFAULT_COMPRESSION_LEVEL = 6
/**
* @typedef {object} Context
* @property {import('ipfs-repo').IPFSRepo} repo
* @property {import('../types').Preload} preload
*
* @param {Context} context
*/
export function createGet ({ repo, preload }) {
/**
* @type {import('ipfs-core-types/src/root').API<{}>["get"]}
*/
async function * get (ipfsPath, options = {}) {
if (options.compressionLevel != null && (options.compressionLevel < -1 || options.compressionLevel > 9)) {
throw errCode(new Error('Compression level must be between -1 and 9'), 'ERR_INVALID_PARAMS')
}
if (options.preload !== false) {
let pathComponents
try {
pathComponents = normalizeCidPath(ipfsPath).split('/')
} catch (/** @type {any} */ err) {
throw errCode(err, 'ERR_INVALID_PATH')
}
preload(CID.parse(pathComponents[0]))
}
const ipfsPathOrCid = CID.asCID(ipfsPath) || ipfsPath
const file = await exporter(ipfsPathOrCid, repo.blocks, options)
if (file.type === 'file' || file.type === 'raw') {
const args = []
if (!options.compress || options.archive === true) {
args.push([{
header: {
name: file.path,
mode: file.type === 'file' && file.unixfs.mode,
mtime: file.type === 'file' && file.unixfs.mtime ? new Date(file.unixfs.mtime.secs * 1000) : undefined,
size: file.size,
type: 'file'
},
body: file.content()
}],
pack()
)
} else {
args.push(
file.content
)
}
if (options.compress) {
args.push(
/**
* @param {AsyncIterable<Uint8Array>} source
*/
async function * (source) {
const buf = await toBuffer(source)
yield Pako.gzip(buf, {
level: options.compressionLevel || DEFAULT_COMPRESSION_LEVEL
})
}
)
}
// @ts-expect-error cannot derive type
yield * pipe(...args)
return
}
if (file.type === 'directory') {
/** @type {any[]} */
const args = [
recursive(ipfsPathOrCid, repo.blocks, options),
/**
* @param {AsyncIterable<import('ipfs-unixfs-exporter').UnixFSEntry>} source
*/
async function * (source) {
for await (const entry of source) {
/** @type {import('it-tar').TarImportCandidate} */
const output = {
header: {
name: entry.path,
size: entry.size
}
}
if (entry.type === 'file') {
output.header.type = 'file'
output.header.mode = entry.unixfs.mode != null ? entry.unixfs.mode : undefined
output.header.mtime = entry.unixfs.mtime ? new Date(entry.unixfs.mtime.secs * 1000) : undefined
output.body = entry.content()
} else if (entry.type === 'raw') {
output.header.type = 'file'
output.body = entry.content()
} else if (entry.type === 'directory') {
output.header.type = 'directory'
output.header.mode = entry.unixfs.mode != null ? entry.unixfs.mode : undefined
output.header.mtime = entry.unixfs.mtime ? new Date(entry.unixfs.mtime.secs * 1000) : undefined
} else {
throw errCode(new Error('Not a UnixFS node'), 'ERR_NOT_UNIXFS')
}
yield output
}
},
pack()
]
if (options.compress) {
if (!options.archive) {
throw errCode(new Error('file is not regular'), 'ERR_INVALID_PATH')
}
if (options.compress) {
args.push(
/**
* @param {AsyncIterable<Uint8Array>} source
*/
async function * (source) {
const buf = await toBuffer(source)
yield Pako.gzip(buf, {
level: options.compressionLevel || DEFAULT_COMPRESSION_LEVEL
})
}
)
}
}
// @ts-expect-error cannot derive type
yield * pipe(...args)
return
}
throw errCode(new Error('Not a UnixFS node'), 'ERR_NOT_UNIXFS')
}
return withTimeoutOption(get)
}