Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Commit

Permalink
fix: mfs blob import for files larger than 262144b (#4251)
Browse files Browse the repository at this point in the history
Turn the `Blob`/`File` object into a `ReadableStream` then turn that into an `AsyncIterator<Uint8Array>` the same as the other input types.

Fixes #3601
Fixes #3576
  • Loading branch information
achingbrain committed Nov 17, 2022
1 parent b722041 commit 6be5906
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 50 deletions.
3 changes: 2 additions & 1 deletion packages/interface-ipfs-core/package.json
Expand Up @@ -112,7 +112,8 @@
"pako": "^2.0.4",
"readable-stream": "^4.0.0",
"sinon": "^14.0.0",
"uint8arrays": "^4.0.2"
"uint8arrays": "^4.0.2",
"wherearewe": "^2.0.1"
},
"browser": {
"fs": false,
Expand Down
22 changes: 22 additions & 0 deletions packages/interface-ipfs-core/src/files/stat.js
Expand Up @@ -11,6 +11,7 @@ import { identity } from 'multiformats/hashes/identity'
import { randomBytes } from 'iso-random-stream'
import isShardAtPath from '../utils/is-shard-at-path.js'
import * as raw from 'multiformats/codecs/raw'
import { isBrowser } from 'wherearewe'

/**
* @typedef {import('ipfsd-ctl').Factory} Factory
Expand Down Expand Up @@ -103,6 +104,27 @@ export function testStat (factory, options) {
})
})

it('should stat a large browser File', async function () {
if (!isBrowser) {
this.skip()
}

const filePath = `/stat-${Math.random()}/large-file-${Math.random()}.txt`
const blob = new Blob([largeFile])

await ipfs.files.write(filePath, blob, {
create: true,
parents: true
})

await expect(ipfs.files.stat(filePath)).to.eventually.include({
size: largeFile.length,
cumulativeSize: 490800,
blocks: 2,
type: 'file'
})
})

it('stats a raw node', async () => {
const filePath = `/stat-${Math.random()}/large-file-${Math.random()}.txt`

Expand Down
1 change: 1 addition & 0 deletions packages/ipfs-core/package.json
Expand Up @@ -102,6 +102,7 @@
"any-signal": "^3.0.0",
"array-shuffle": "^3.0.0",
"blockstore-core": "^2.0.1",
"browser-readablestream-to-it": "^2.0.0",
"dag-jose": "^3.0.1",
"datastore-core": "^8.0.1",
"datastore-pubsub": "^6.0.0",
Expand Down
51 changes: 2 additions & 49 deletions packages/ipfs-core/src/components/files/utils/to-async-iterator.js
@@ -1,9 +1,7 @@
import errCode from 'err-code'
import { logger } from '@libp2p/logger'
import {
MFS_MAX_CHUNK_SIZE
} from '../../../utils.js'
import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string'
import browserStreamToIt from 'browser-readablestream-to-it'

const log = logger('ipfs:mfs:utils:to-async-iterator')

Expand Down Expand Up @@ -44,52 +42,7 @@ export function toAsyncIterator (content) {
if (global.Blob && content instanceof global.Blob) {
// HTML5 Blob objects (including Files)
log('Content was an HTML5 Blob')

let index = 0

const iterator = {
next: () => {
if (index > content.size) {
return {
done: true
}
}

return new Promise((resolve, reject) => {
const chunk = content.slice(index, MFS_MAX_CHUNK_SIZE)
index += MFS_MAX_CHUNK_SIZE

const reader = new global.FileReader()

/**
* @param {{ error?: Error }} ev
*/
const handleLoad = (ev) => {
// @ts-expect-error No overload matches this call.
reader.removeEventListener('loadend', handleLoad, false)

if (ev.error) {
return reject(ev.error)
}

resolve({
done: false,
value: new Uint8Array(/** @type {ArrayBuffer} */(reader.result))
})
}

// @ts-expect-error No overload matches this call.
reader.addEventListener('loadend', handleLoad)
reader.readAsArrayBuffer(chunk)
})
}
}

return {
[Symbol.asyncIterator]: () => {
return iterator
}
}
return browserStreamToIt(content.stream())
}

throw errCode(new Error(`Don't know how to convert ${content} into an async iterator`), 'ERR_INVALID_PARAMS')
Expand Down

0 comments on commit 6be5906

Please sign in to comment.