Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

fix: replace slice with subarray for increased performance #4210

Merged
merged 6 commits into from Sep 23, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion packages/ipfs-cli/package.json
Expand Up @@ -85,7 +85,6 @@
"ipfs-http-client": "^58.0.1",
"ipfs-utils": "^9.0.6",
"it-concat": "^2.0.0",
"it-map": "^1.0.6",
"it-merge": "^1.0.3",
"it-pipe": "^2.0.3",
"it-split": "^1.0.0",
Expand All @@ -110,6 +109,7 @@
"ipfs-repo": "^15.0.3",
"it-all": "^1.0.4",
"it-first": "^1.0.4",
"it-map": "^1.0.6",
"it-to-buffer": "^2.0.0",
"nanoid": "^4.0.0",
"ncp": "^2.0.0",
Expand Down
10 changes: 8 additions & 2 deletions packages/ipfs-cli/src/commands/cat.js
Expand Up @@ -6,6 +6,7 @@ import parseDuration from 'parse-duration'
* @property {string} Argv.ipfsPath
* @property {number} Argv.offset
* @property {number} Argv.length
* @property {boolean} Argv.preload
* @property {number} Argv.timeout
*/

Expand All @@ -26,14 +27,19 @@ const command = {
number: true,
describe: 'Maximum number of bytes to read'
},
preload: {
boolean: true,
default: true,
describe: 'Preload this object when adding'
},
timeout: {
string: true,
coerce: parseDuration
}
},

async handler ({ ctx: { ipfs, print }, ipfsPath, offset, length, timeout }) {
for await (const buf of ipfs.cat(ipfsPath, { offset, length, timeout })) {
async handler ({ ctx: { ipfs, print }, ipfsPath, offset, length, preload, timeout }) {
for await (const buf of ipfs.cat(ipfsPath, { offset, length, preload, timeout })) {
print.write(buf)
}
}
Expand Down
2 changes: 0 additions & 2 deletions packages/ipfs-cli/src/commands/get.js
Expand Up @@ -8,7 +8,6 @@ import {
stripControlCharacters
} from '../utils.js'
import { extract } from 'it-tar'
import map from 'it-map'

/**
* @typedef {object} Argv
Expand Down Expand Up @@ -110,7 +109,6 @@ const command = {
await fs.promises.mkdir(path.dirname(outputPath), { recursive: true })
await pipe(
body,
(source) => map(source, buf => buf.slice()),
toIterable.sink(fs.createWriteStream(outputPath))
)
} else if (header.type === 'directory') {
Expand Down
16 changes: 15 additions & 1 deletion packages/ipfs-cli/test/cat.spec.js
Expand Up @@ -9,7 +9,8 @@ import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string'
const defaultOptions = {
offset: undefined,
length: undefined,
timeout: undefined
timeout: undefined,
preload: true
}

describe('cat', () => {
Expand Down Expand Up @@ -81,4 +82,17 @@ describe('cat', () => {
const out = await cli(`cat ${cid} --timeout=1s`, { ipfs, raw: true })
expect(out).to.deep.equal(buf)
})

it('should cat a file without preloading', async () => {
const cid = CID.parse('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB')
const buf = uint8ArrayFromString('hello world')

ipfs.cat.withArgs(cid.toString(), {
...defaultOptions,
preload: false
}).returns([buf])

const out = await cli(`cat ${cid} --preload=false`, { ipfs, raw: true })
expect(out).to.deep.equal(buf)
})
})
7 changes: 1 addition & 6 deletions packages/ipfs-cli/test/get.spec.js
Expand Up @@ -9,7 +9,6 @@ import sinon from 'sinon'
import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string'
import { pack } from 'it-tar'
import { pipe } from 'it-pipe'
import map from 'it-map'
import toBuffer from 'it-to-buffer'
import { clean } from './utils/clean.js'
import Pako from 'pako'
Expand All @@ -27,11 +26,7 @@ const defaultOptions = {
async function * tarballed (files) {
yield * pipe(
files,
pack(),
/**
* @param {AsyncIterable<Uint8Array>} source
*/
(source) => map(source, buf => buf.slice())
pack()
)
}

Expand Down
Expand Up @@ -11,7 +11,7 @@ export async function hamtHashFn (buf) {
// Murmur3 outputs 128 bit but, accidentally, IPFS Go's
// implementation only uses the first 64, so we must do the same
// for parity..
.slice(0, 8)
.subarray(0, 8)
// Invert buffer because that's how Go impl does it
.reverse()
}
4 changes: 2 additions & 2 deletions packages/ipfs-core/src/components/files/write.js
Expand Up @@ -334,7 +334,7 @@ const limitAsyncStreamBytes = (stream, limit) => {
emitted += buf.length

if (emitted > limit) {
yield buf.slice(0, limit - emitted)
yield buf.subarray(0, limit - emitted)

return
}
Expand All @@ -353,7 +353,7 @@ const asyncZeroes = (count, chunkSize = MFS_MAX_CHUNK_SIZE) => {

async function * _asyncZeroes () {
while (true) {
yield buf.slice()
yield buf
}
}

Expand Down
13 changes: 2 additions & 11 deletions packages/ipfs-core/src/components/get.js
Expand Up @@ -6,7 +6,6 @@ import { CID } from 'multiformats/cid'
import { pack } from 'it-tar'
import { pipe } from 'it-pipe'
import Pako from 'pako'
import map from 'it-map'
import toBuffer from 'it-to-buffer'

// https://www.gnu.org/software/gzip/manual/gzip.html
Expand Down Expand Up @@ -57,11 +56,7 @@ export function createGet ({ repo, preload }) {
},
body: file.content()
}],
pack(),
/**
* @param {AsyncIterable<Uint8Array>} source
*/
(source) => map(source, buf => buf.slice())
pack()
)
} else {
args.push(
Expand Down Expand Up @@ -126,11 +121,7 @@ export function createGet ({ repo, preload }) {
yield output
}
},
pack(),
/**
* @param {AsyncIterable<Uint8Array>} source
*/
(source) => map(source, buf => buf.slice())
pack()
]

if (options.compress) {
Expand Down
Expand Up @@ -77,15 +77,15 @@ export class WebSocketMessageChannel {
return
}

const header = buf.slice(offset, HEADER_SIZE + offset)
const header = buf.subarray(offset, HEADER_SIZE + offset)
const length = header.readUInt32BE(1)
offset += HEADER_SIZE

if (buf.length < (length + offset)) {
return
}

const message = buf.slice(offset, offset + length)
const message = buf.subarray(offset, offset + length)
const deserialized = this.handler.deserialize(message)
this.source.push(deserialized)
})
Expand Down
6 changes: 1 addition & 5 deletions packages/ipfs-http-gateway/src/resources/gateway.js
Expand Up @@ -132,11 +132,7 @@ export const Gateway = {
}

const { source, contentType } = await detectContentType(ipfsPath, ipfs.cat(data.cid, catOptions))
const responseStream = toStream.readable((async function * () {
for await (const chunk of source) {
yield chunk.slice() // Convert BufferList to Buffer
}
})())
const responseStream = toStream.readable(source)

const res = h.response(responseStream).code(rangeResponse ? 206 : 200)

Expand Down
13 changes: 10 additions & 3 deletions packages/ipfs-http-response/src/utils/content-type.js
Expand Up @@ -28,11 +28,11 @@ export const detectContentType = async (path, source) => {

if (done) {
return {
source: map(stream, (buf) => buf.slice())
source: map(stream, (buf) => buf.subarray())
}
}

fileSignature = await fileTypeFromBuffer(value.slice())
fileSignature = await fileTypeFromBuffer(value.subarray())

output = (async function * () { // eslint-disable-line require-await
yield value
Expand Down Expand Up @@ -62,7 +62,14 @@ export const detectContentType = async (path, source) => {
}

if (output != null) {
return { source: map(output, (buf) => buf.slice()), contentType }
return {
source: (async function * () {
for await (const list of output) {
yield * list
}
}()),
contentType
}
}

return { source, contentType }
Expand Down