Your ID is {this.state.id}
Your IPFS version is {this.state.version}
Your IPFS protocol version is {this.state.protocol_version}
name.publish() and name.resolve()initializing...
",
"contributors": [
"Victor Bjelkholm "
],
"license": "MIT",
"dependencies": {
"ipfs-http-client": "../.."
},
"devDependencies": {
"react": "~16.6.3",
"react-dom": "~16.6.3",
"parcel-bundler": "^1.12.4"
},
"browserslist": [
"last 2 versions and not dead and > 2%"
]
}
================================================
FILE: examples/upload-file-via-browser/src/App.js
================================================
/* eslint-disable no-console */
'use strict'
const React = require('react')
const ipfsClient = require('ipfs-http-client')
class App extends React.Component {
constructor () {
super()
this.state = {
added_file_hash: null
}
this.ipfs = ipfsClient('/ip4/127.0.0.1/tcp/5001')
// bind methods
this.captureFile = this.captureFile.bind(this)
this.saveToIpfs = this.saveToIpfs.bind(this)
this.handleSubmit = this.handleSubmit.bind(this)
}
captureFile (event) {
event.stopPropagation()
event.preventDefault()
if (document.getElementById('keepFilename').checked) {
this.saveToIpfsWithFilename(event.target.files)
} else {
this.saveToIpfs(event.target.files)
}
}
// Example #1
// Add file to IPFS and return a CID
async saveToIpfs (files) {
const source = this.ipfs.add(
[...files],
{
progress: (prog) => console.log(`received: ${prog}`)
}
)
try {
for await (const file of source) {
console.log(file)
this.setState({ added_file_hash: file.path })
}
} catch (err) {
console.error(err)
}
}
// Example #2
// Add file to IPFS and wrap it in a directory to keep the original filename
async saveToIpfsWithFilename (files) {
const file = [...files][0]
const fileDetails = {
path: file.name,
content: file
}
const options = {
wrapWithDirectory: true,
progress: (prog) => console.log(`received: ${prog}`)
}
const source = this.ipfs.add(fileDetails, options)
try {
for await (const file of source) {
console.log(file)
this.setState({ added_file_hash: file.cid.toString() })
}
} catch (err) {
console.error(err)
}
}
handleSubmit (event) {
event.preventDefault()
}
render () {
return (
)
}
}
module.exports = App
================================================
FILE: examples/upload-file-via-browser/src/index.js
================================================
/* eslint-disable no-unused-vars */
'use strict'
const React = require('react')
const ReactDOM = require('react-dom')
const App = require('./App')
ReactDOM.render( , document.getElementById('root'))
================================================
FILE: greenkeeper.json
================================================
{
"groups": {
"default": {
"packages": [
"package.json"
]
}
}
}
================================================
FILE: maintainer.json
================================================
{
"repoLeadMaintainer": {
"name": "Alan Shaw",
"email": "alan.shaw@protocol.ai",
"username": "alanshaw"
},
"workingGroup": {
"name": "JS IPFS",
"entryPoint": "https://github.com/ipfs/js-core"
}
}
================================================
FILE: package.json
================================================
{
"name": "ipfs-http-client",
"version": "42.0.0",
"description": "A client library for the IPFS HTTP API",
"keywords": [
"ipfs"
],
"homepage": "https://github.com/ipfs/js-ipfs-http-client",
"bugs": "https://github.com/ipfs/js-ipfs-http-client/issues",
"license": "(Apache-2.0 OR MIT)",
"leadMaintainer": "Alan Shaw ",
"files": [
"src",
"dist"
],
"main": "src/index.js",
"browser": {
"./src/add/form-data.js": "./src/add/form-data.browser.js",
"./src/lib/buffer-to-form-data.js": "./src/lib/buffer-to-form-data.browser.js",
"ipfs-utils/src/files/glob-source": false
},
"repository": "github:ipfs/js-ipfs-http-client",
"scripts": {
"test": "aegir test",
"test:node": "aegir test -t node",
"test:browser": "aegir test -t browser",
"test:webworker": "aegir test -t webworker",
"test:electron-main": "aegir test -t electron-main",
"test:electron-renderer": "aegir test -t electron-renderer",
"test:chrome": "aegir test -t browser -t webworker -- --browsers ChromeHeadless",
"test:firefox": "aegir test -t browser -t webworker -- --browsers FirefoxHeadless",
"lint": "aegir lint",
"build": "aegir build",
"release": "aegir release ",
"release-minor": "aegir release --type minor ",
"release-major": "aegir release --type major ",
"coverage": "npx nyc -r html npm run test:node -- --bail"
},
"dependencies": {
"abort-controller": "^3.0.0",
"bignumber.js": "^9.0.0",
"bs58": "^4.0.1",
"buffer": "^5.4.2",
"cids": "~0.7.1",
"debug": "^4.1.0",
"form-data": "^3.0.0",
"ipfs-block": "~0.8.1",
"ipfs-utils": "^0.7.1",
"ipld-dag-cbor": "^0.15.1",
"ipld-dag-pb": "^0.18.2",
"ipld-raw": "^4.0.1",
"it-tar": "^1.1.1",
"it-to-stream": "^0.1.1",
"iterable-ndjson": "^1.1.0",
"ky": "^0.15.0",
"ky-universal": "^0.3.0",
"merge-options": "^2.0.0",
"multiaddr": "^7.2.1",
"multiaddr-to-uri": "^5.1.0",
"multibase": "~0.6.0",
"multicodec": "^1.0.0",
"multihashes": "~0.4.14",
"parse-duration": "^0.1.1",
"stream-to-it": "^0.2.0"
},
"devDependencies": {
"aegir": "^20.4.1",
"async": "^3.1.0",
"browser-process-platform": "~0.1.1",
"go-ipfs-dep": "^0.4.23-3",
"interface-ipfs-core": "^0.131.7",
"ipfsd-ctl": "^3.0.0",
"it-all": "^1.0.1",
"it-concat": "^1.0.0",
"it-pipe": "^1.1.0",
"nock": "^11.7.2"
},
"engines": {
"node": ">=10.3.0",
"npm": ">=3.0.0"
},
"contributors": [
"Alan Shaw ",
"Alan Shaw ",
"Alex Mingoia ",
"Alex Potsides ",
"Antonio Tenorio-Fornés ",
"Bruno Barbieri ",
"Clemo ",
"Connor Keenan ",
"Daniel Constantin ",
"Danny ",
"David Braun ",
"David Dias ",
"Dietrich Ayala ",
"Diogo Silva ",
"Dmitriy Ryajov ",
"Dmitry Nikulin ",
"Donatas Stundys ",
"Fil ",
"Filip Š ",
"Francisco Baio Dias ",
"Friedel Ziegelmayer ",
"Gar ",
"Gavin McDermott ",
"Gopalakrishna Palem ",
"Greenkeeper ",
"Haad ",
"Harlan T Wood ",
"Harlan T Wood ",
"Henrique Dias ",
"Holodisc ",
"Hugo Dias ",
"Hugo Dias ",
"JGAntunes ",
"Jacob Heun ",
"James Halliday ",
"Jason Carver ",
"Jason Papakostas ",
"Jeff Downie ",
"Jeromy ",
"Jeromy ",
"Jim Pick ",
"Joe Turgeon ",
"Jonathan ",
"Juan Batiz-Benet ",
"Kevin Wang ",
"Kristoffer Ström ",
"Marcin Rataj ",
"Matt Bell ",
"Matt Ober ",
"Maxime Lathuilière ",
"Michael Bradley ",
"Michael Muré ",
"Michael Muré ",
"Mikeal Rogers ",
"Mitar ",
"Mithgol ",
"Mohamed Abdulaziz ",
"Nitin Patel <31539366+niinpatel@users.noreply.github.com>",
"Nuno Nogueira ",
"Níckolas Goline ",
"Oli Evans ",
"Orie Steele ",
"Paul Cowgill ",
"Pedro Santos ",
"Pedro Santos ",
"Pedro Teixeira ",
"Pete Thomas ",
"Richard Littauer ",
"Richard Schneider ",
"Roman Khafizianov ",
"SeungWon ",
"Stephen Whitmore ",
"Tara Vancil ",
"Teri Chadbourne ",
"Travis Person ",
"Travis Person ",
"Vasco Santos ",
"Vasco Santos ",
"Victor Bjelkholm ",
"Volker Mische ",
"Zhiyuan Lin ",
"dirkmc ",
"dmitriy ryajov ",
"elsehow ",
"ethers ",
"greenkeeper[bot] <23040076+greenkeeper[bot]@users.noreply.github.com>",
"greenkeeper[bot] ",
"haad ",
"kumavis ",
"leekt216 ",
"nginnever ",
"noah the goodra ",
"phillmac ",
"priecint ",
"samuli ",
"sarthak khandelwal ",
"shunkin ",
"victorbjelkholm ",
"Łukasz Magiera ",
"Łukasz Magiera "
]
}
================================================
FILE: src/add/form-data.browser.js
================================================
'use strict'
/* eslint-env browser */
const normaliseInput = require('ipfs-utils/src/files/normalise-input')
const mtimeToObject = require('../lib/mtime-to-object')
exports.toFormData = async input => {
const files = normaliseInput(input)
const formData = new FormData()
let i = 0
for await (const file of files) {
const headers = {}
if (file.mtime !== undefined && file.mtime !== null) {
const mtime = mtimeToObject(file.mtime)
if (mtime) {
headers.mtime = mtime.secs
headers['mtime-nsecs'] = mtime.nsecs
}
}
if (file.mode !== undefined && file.mode !== null) {
headers.mode = file.mode.toString(8).padStart(4, '0')
}
if (file.content) {
// In the browser there's _currently_ no streaming upload, buffer up our
// async iterator chunks and append a big Blob :(
// One day, this will be browser streams
const bufs = []
for await (const chunk of file.content) {
bufs.push(chunk)
}
formData.append(`file-${i}`, new Blob(bufs, { type: 'application/octet-stream' }), encodeURIComponent(file.path), {
header: headers
})
} else {
formData.append(`dir-${i}`, new Blob([], { type: 'application/x-directory' }), encodeURIComponent(file.path), {
header: headers
})
}
i++
}
return formData
}
================================================
FILE: src/add/form-data.js
================================================
'use strict'
const FormData = require('form-data')
const { Buffer } = require('buffer')
const toStream = require('it-to-stream')
const normaliseInput = require('ipfs-utils/src/files/normalise-input')
const { isElectronRenderer } = require('ipfs-utils/src/env')
const mtimeToObject = require('../lib/mtime-to-object')
exports.toFormData = async input => {
const files = normaliseInput(input)
const formData = new FormData()
let i = 0
for await (const file of files) {
const headers = {}
if (file.mtime !== undefined && file.mtime !== null) {
const mtime = mtimeToObject(file.mtime)
if (mtime) {
headers.mtime = mtime.secs
headers['mtime-nsecs'] = mtime.nsecs
}
}
if (file.mode !== undefined && file.mode !== null) {
headers.mode = file.mode.toString(8).padStart(4, '0')
}
if (file.content) {
// In Node.js, FormData can be passed a stream so no need to buffer
formData.append(
`file-${i}`,
// FIXME: add a `path` property to the stream so `form-data` doesn't set
// a Content-Length header that is only the sum of the size of the
// header/footer when knownLength option (below) is null.
Object.assign(
toStream.readable(file.content),
{ path: file.path || `file-${i}` }
),
{
filepath: encodeURIComponent(file.path),
contentType: 'application/octet-stream',
knownLength: file.content.length, // Send Content-Length header if known
header: headers
}
)
} else {
formData.append(`dir-${i}`, Buffer.alloc(0), {
filepath: encodeURIComponent(file.path),
contentType: 'application/x-directory',
header: headers
})
}
i++
}
return formData
}
// TODO remove this when upstream fix for ky-universal is merged
// https://github.com/sindresorhus/ky-universal/issues/9
// also this should only be necessary when nodeIntegration is false in electron renderer
if (isElectronRenderer) {
exports.toFormData = require('./form-data.browser').toFormData
}
================================================
FILE: src/add/index.js
================================================
'use strict'
const ndjson = require('iterable-ndjson')
const CID = require('cids')
const configure = require('../lib/configure')
const toIterable = require('stream-to-it/source')
const { toFormData } = require('./form-data')
const toCamel = require('../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async function * add (input, options) {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('stream-channels', true)
if (options.chunker) searchParams.set('chunker', options.chunker)
if (options.cidVersion) searchParams.set('cid-version', options.cidVersion)
if (options.cidBase) searchParams.set('cid-base', options.cidBase)
if (options.enableShardingExperiment != null) searchParams.set('enable-sharding-experiment', options.enableShardingExperiment)
if (options.hashAlg) searchParams.set('hash', options.hashAlg)
if (options.onlyHash != null) searchParams.set('only-hash', options.onlyHash)
if (options.pin != null) searchParams.set('pin', options.pin)
if (options.progress) searchParams.set('progress', true)
if (options.quiet != null) searchParams.set('quiet', options.quiet)
if (options.quieter != null) searchParams.set('quieter', options.quieter)
if (options.rawLeaves != null) searchParams.set('raw-leaves', options.rawLeaves)
if (options.shardSplitThreshold) searchParams.set('shard-split-threshold', options.shardSplitThreshold)
if (options.silent) searchParams.set('silent', options.silent)
if (options.trickle != null) searchParams.set('trickle', options.trickle)
if (options.wrapWithDirectory != null) searchParams.set('wrap-with-directory', options.wrapWithDirectory)
if (options.preload != null) searchParams.set('preload', options.preload)
if (options.fileImportConcurrency != null) searchParams.set('file-import-concurrency', options.fileImportConcurrency)
if (options.blockWriteConcurrency != null) searchParams.set('block-write-concurrency', options.blockWriteConcurrency)
const res = await ky.post('add', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams,
body: await toFormData(input)
})
for await (let file of ndjson(toIterable(res.body))) {
file = toCamel(file)
if (options.progress && file.bytes) {
options.progress(file.bytes)
} else {
yield toCoreInterface(file)
}
}
}
})
function toCoreInterface ({ name, hash, size, mode, mtime, mtimeNsecs }) {
const output = {
path: name,
cid: new CID(hash),
size: parseInt(size)
}
if (mode != null) {
output.mode = parseInt(mode, 8)
}
if (mtime != null) {
output.mtime = {
secs: mtime,
nsecs: mtimeNsecs || 0
}
}
return output
}
================================================
FILE: src/bitswap/index.js
================================================
'use strict'
module.exports = config => ({
wantlist: require('./wantlist')(config),
stat: require('./stat')(config),
unwant: require('./unwant')(config)
})
================================================
FILE: src/bitswap/stat.js
================================================
'use strict'
const configure = require('../lib/configure')
const Big = require('bignumber.js')
const CID = require('cids')
module.exports = configure(({ ky }) => {
return async (options) => {
options = options || {}
const res = await ky.post('bitswap/stat', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
}).json()
return toCoreInterface(res)
}
})
function toCoreInterface (res) {
return {
provideBufLen: res.ProvideBufLen,
wantlist: (res.Wantlist || []).map(k => new CID(k['/'])),
peers: (res.Peers || []),
blocksReceived: new Big(res.BlocksReceived),
dataReceived: new Big(res.DataReceived),
blocksSent: new Big(res.BlocksSent),
dataSent: new Big(res.DataSent),
dupBlksReceived: new Big(res.DupBlksReceived),
dupDataReceived: new Big(res.DupDataReceived)
}
}
================================================
FILE: src/bitswap/unwant.js
================================================
'use strict'
const CID = require('cids')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (cid, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (typeof cid === 'string') {
searchParams.set('arg', cid)
} else {
searchParams.set('arg', new CID(cid).toString())
}
const res = await ky.post('bitswap/unwant', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return res
}
})
================================================
FILE: src/bitswap/wantlist.js
================================================
'use strict'
const CID = require('cids')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (peerId, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (peerId) {
if (typeof peerId === 'string') {
searchParams.set('peer', peerId)
} else {
searchParams.set('peer', new CID(peerId).toString())
}
}
const res = await ky.post('bitswap/wantlist', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return (res.Keys || []).map(k => new CID(k['/']))
}
})
================================================
FILE: src/block/get.js
================================================
'use strict'
const Block = require('ipfs-block')
const CID = require('cids')
const { Buffer } = require('buffer')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (cid, options) => {
cid = new CID(cid)
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${cid}`)
const data = await ky.post('block/get', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).arrayBuffer()
return new Block(Buffer.from(data), cid)
}
})
================================================
FILE: src/block/index.js
================================================
'use strict'
module.exports = config => ({
get: require('./get')(config),
stat: require('./stat')(config),
put: require('./put')(config),
rm: require('./rm')(config)
})
================================================
FILE: src/block/put.js
================================================
'use strict'
const Block = require('ipfs-block')
const CID = require('cids')
const multihash = require('multihashes')
const configure = require('../lib/configure')
const toFormData = require('../lib/buffer-to-form-data')
module.exports = configure(({ ky }) => {
async function put (data, options) {
options = options || {}
if (Block.isBlock(data)) {
const { name, length } = multihash.decode(data.cid.multihash)
options = {
...options,
format: data.cid.codec,
mhtype: name,
mhlen: length,
version: data.cid.version
}
data = data.data
} else if (options.cid) {
const cid = new CID(options.cid)
const { name, length } = multihash.decode(cid.multihash)
options = {
...options,
format: cid.codec,
mhtype: name,
mhlen: length,
version: cid.version
}
delete options.cid
}
const searchParams = new URLSearchParams(options.searchParams)
if (options.format) searchParams.set('format', options.format)
if (options.mhtype) searchParams.set('mhtype', options.mhtype)
if (options.mhlen) searchParams.set('mhlen', options.mhlen)
if (options.pin != null) searchParams.set('pin', options.pin)
if (options.version != null) searchParams.set('version', options.version)
let res
try {
res = await ky.post('block/put', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams,
body: toFormData(data)
}).json()
} catch (err) {
// Retry with "protobuf"/"cbor" format for go-ipfs
// TODO: remove when https://github.com/ipfs/go-cid/issues/75 resolved
if (options.format === 'dag-pb') {
return put(data, { ...options, format: 'protobuf' })
} else if (options.format === 'dag-cbor') {
return put(data, { ...options, format: 'cbor' })
}
throw err
}
return new Block(data, new CID(res.Key))
}
return put
})
================================================
FILE: src/block/rm.js
================================================
'use strict'
const CID = require('cids')
const ndjson = require('iterable-ndjson')
const configure = require('../lib/configure')
const toIterable = require('stream-to-it/source')
module.exports = configure(({ ky }) => {
return async function * rm (cid, options) {
options = options || {}
if (!Array.isArray(cid)) {
cid = [cid]
}
const searchParams = new URLSearchParams()
searchParams.set('stream-channels', true)
searchParams.set('force', options.force || false)
searchParams.set('quiet', options.quiet || false)
cid.forEach(cid => {
searchParams.append('arg', new CID(cid).toString())
})
const res = await ky.post('block/rm', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
for await (const removed of ndjson(toIterable(res.body))) {
yield toCoreInterface(removed)
}
}
})
function toCoreInterface (removed) {
const out = {
cid: new CID(removed.Hash)
}
if (removed.Error) {
out.error = new Error(removed.Error)
}
return out
}
================================================
FILE: src/block/stat.js
================================================
'use strict'
const CID = require('cids')
const { Buffer } = require('buffer')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (cid, options) => {
options = options || {}
if (Buffer.isBuffer(cid)) {
cid = new CID(cid)
}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${cid}`)
const res = await ky.post('block/stat', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return { cid: new CID(res.Key), size: res.Size }
}
})
================================================
FILE: src/bootstrap/add.js
================================================
'use strict'
const Multiaddr = require('multiaddr')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (addr, options) => {
if (addr && typeof addr === 'object' && !Multiaddr.isMultiaddr(addr)) {
options = addr
addr = null
}
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (addr) searchParams.set('arg', `${addr}`)
if (options.default != null) searchParams.set('default', options.default)
const res = await ky.post('bootstrap/add', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return res
}
})
================================================
FILE: src/bootstrap/index.js
================================================
'use strict'
module.exports = config => ({
add: require('./add')(config),
rm: require('./rm')(config),
list: require('./list')(config)
})
================================================
FILE: src/bootstrap/list.js
================================================
'use strict'
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (options) => {
options = options || {}
const res = await ky.post('bootstrap/list', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
}).json()
return res
}
})
================================================
FILE: src/bootstrap/rm.js
================================================
'use strict'
const Multiaddr = require('multiaddr')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (addr, options) => {
if (addr && typeof addr === 'object' && !Multiaddr.isMultiaddr(addr)) {
options = addr
addr = null
}
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (addr) searchParams.set('arg', `${addr}`)
if (options.all != null) searchParams.set('all', options.all)
const res = await ky.post('bootstrap/rm', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return res
}
})
================================================
FILE: src/cat.js
================================================
'use strict'
const CID = require('cids')
const { Buffer } = require('buffer')
const configure = require('./lib/configure')
const toIterable = require('stream-to-it/source')
module.exports = configure(({ ky }) => {
return async function * cat (path, options) {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (typeof path === 'string') {
searchParams.set('arg', path)
} else {
searchParams.set('arg', new CID(path).toString())
}
if (options.offset) searchParams.set('offset', options.offset)
if (options.length) searchParams.set('length', options.length)
const res = await ky.post('cat', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
for await (const chunk of toIterable(res.body)) {
yield Buffer.from(chunk)
}
}
})
================================================
FILE: src/commands.js
================================================
'use strict'
const configure = require('./lib/configure')
module.exports = configure(({ ky }) => {
return options => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (options.flags != null) searchParams.set('flags', options.flags)
return ky.post('commands', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
}
})
================================================
FILE: src/config/get.js
================================================
'use strict'
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (key, options) => {
if (key && typeof key === 'object') {
options = key
key = null
}
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (key) searchParams.set('arg', key)
const url = key ? 'config' : 'config/show'
const data = await ky.post(url, {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return key ? data.Value : data
}
})
================================================
FILE: src/config/index.js
================================================
'use strict'
module.exports = config => ({
get: require('./get')(config),
set: require('./set')(config),
replace: require('./replace')(config),
profiles: require('./profiles')(config)
})
================================================
FILE: src/config/profiles/apply.js
================================================
'use strict'
const configure = require('../../lib/configure')
module.exports = configure(({ ky }) => {
return async (profile, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', profile)
if (options.dryRun != null) searchParams.set('dry-run', options.dryRun)
const res = await ky.post('config/profile/apply', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return {
original: res.OldCfg, updated: res.NewCfg
}
}
})
================================================
FILE: src/config/profiles/index.js
================================================
'use strict'
module.exports = config => ({
apply: require('./apply')(config),
list: require('./list')(config)
})
================================================
FILE: src/config/profiles/list.js
================================================
'use strict'
const configure = require('../../lib/configure')
const toCamel = require('../../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async (options) => {
options = options || {}
const res = await ky.post('config/profile/list', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
}).json()
return res.map(profile => toCamel(profile))
}
})
================================================
FILE: src/config/replace.js
================================================
'use strict'
const { Buffer } = require('buffer')
const configure = require('../lib/configure')
const toFormData = require('../lib/buffer-to-form-data')
module.exports = configure(({ ky }) => {
return async (config, options) => {
options = options || {}
const res = await ky.post('config/replace', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams,
body: toFormData(Buffer.from(JSON.stringify(config)))
}).text()
return res
}
})
================================================
FILE: src/config/set.js
================================================
'use strict'
const configure = require('../lib/configure')
const toCamel = require('../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async (key, value, options) => {
options = options || {}
if (typeof key !== 'string') {
throw new Error('Invalid key type')
}
const searchParams = new URLSearchParams(options.searchParams)
if (typeof value === 'boolean') {
searchParams.set('bool', true)
value = value.toString()
} else if (typeof value !== 'string') {
searchParams.set('json', true)
value = JSON.stringify(value)
}
searchParams.set('arg', key)
searchParams.append('arg', value)
const res = await ky.post('config', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return toCamel(res)
}
})
================================================
FILE: src/dag/get.js
================================================
'use strict'
const dagPB = require('ipld-dag-pb')
const dagCBOR = require('ipld-dag-cbor')
const raw = require('ipld-raw')
const configure = require('../lib/configure')
const resolvers = {
'dag-cbor': dagCBOR.resolver,
'dag-pb': dagPB.resolver,
raw: raw.resolver
}
module.exports = config => {
const getBlock = require('../block/get')(config)
const dagResolve = require('./resolve')(config)
return configure(({ ky }) => {
return async (cid, path, options) => {
if (typeof path === 'object') {
options = path
path = null
}
options = options || {}
const resolved = await dagResolve(cid, path, options)
const block = await getBlock(resolved.cid, options)
const dagResolver = resolvers[block.cid.codec]
if (!dagResolver) {
throw Object.assign(
new Error(`Missing IPLD format "${block.cid.codec}"`),
{ missingMulticodec: cid.codec }
)
}
return dagResolver.resolve(block.data, resolved.remPath)
}
})(config)
}
================================================
FILE: src/dag/index.js
================================================
'use strict'
module.exports = config => ({
get: require('./get')(config),
put: require('./put')(config),
resolve: require('./resolve')(config)
})
================================================
FILE: src/dag/put.js
================================================
'use strict'
const dagCBOR = require('ipld-dag-cbor')
const CID = require('cids')
const multihash = require('multihashes')
const configure = require('../lib/configure')
const toFormData = require('../lib/buffer-to-form-data')
module.exports = configure(({ ky }) => {
return async (dagNode, options) => {
options = options || {}
if (options.hash) {
options.hashAlg = options.hash
delete options.hash
}
if (options.cid && (options.format || options.hashAlg)) {
throw new Error('Failed to put DAG node. Provide either `cid` OR `format` and `hashAlg` options')
} else if ((options.format && !options.hashAlg) || (!options.format && options.hashAlg)) {
throw new Error('Failed to put DAG node. Provide `format` AND `hashAlg` options')
}
if (options.cid) {
const cid = new CID(options.cid)
options = {
...options,
format: cid.codec,
hashAlg: multihash.decode(cid.multihash).name
}
delete options.cid
}
options = {
format: 'dag-cbor',
hashAlg: 'sha2-256',
inputEnc: 'raw',
...options
}
let serialized
if (options.format === 'dag-cbor') {
serialized = dagCBOR.util.serialize(dagNode)
} else if (options.format === 'dag-pb') {
serialized = dagNode.serialize()
} else {
// FIXME Hopefully already serialized...can we use IPLD to serialise instead?
serialized = dagNode
}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('format', options.format)
searchParams.set('hash', options.hashAlg)
searchParams.set('input-enc', options.inputEnc)
if (options.pin != null) searchParams.set('pin', options.pin)
const res = await ky.post('dag/put', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams,
body: toFormData(serialized)
}).json()
return new CID(res.Cid['/'])
}
})
================================================
FILE: src/dag/resolve.js
================================================
'use strict'
const CID = require('cids')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (cid, path, options) => {
if (typeof path === 'object') {
options = path
path = null
}
options = options || {}
const cidPath = path
? [cid, path].join(path.startsWith('/') ? '' : '/')
: `${cid}`
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', cidPath)
const res = await ky.post('dag/resolve', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return { cid: new CID(res.Cid['/']), remPath: res.RemPath }
}
})
================================================
FILE: src/dht/find-peer.js
================================================
'use strict'
const { Buffer } = require('buffer')
const CID = require('cids')
const multiaddr = require('multiaddr')
const ndjson = require('iterable-ndjson')
const configure = require('../lib/configure')
const toIterable = require('stream-to-it/source')
module.exports = configure(({ ky }) => {
return async function findPeer (peerId, options) {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${Buffer.isBuffer(peerId) ? new CID(peerId) : peerId}`)
if (options.verbose != null) searchParams.set('verbose', options.verbose)
const res = await ky.post('dht/findpeer', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
for await (const message of ndjson(toIterable(res.body))) {
// 3 = QueryError
// https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18
// https://github.com/ipfs/go-ipfs/blob/eb11f569b064b960d1aba4b5b8ca155a3bd2cb21/core/commands/dht.go#L388-L389
if (message.Type === 3) {
throw new Error(message.Extra)
}
// 2 = FinalPeer
// https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18
if (message.Type === 2 && message.Responses) {
// There will only be 1:
// https://github.com/ipfs/go-ipfs/blob/eb11f569b064b960d1aba4b5b8ca155a3bd2cb21/core/commands/dht.go#L395-L396
for (const { ID, Addrs } of message.Responses) {
return {
id: ID,
addrs: (Addrs || []).map(a => multiaddr(a))
}
}
}
}
throw new Error('not found')
}
})
================================================
FILE: src/dht/find-provs.js
================================================
'use strict'
const CID = require('cids')
const multiaddr = require('multiaddr')
const ndjson = require('iterable-ndjson')
const configure = require('../lib/configure')
const toIterable = require('stream-to-it/source')
module.exports = configure(({ ky }) => {
return async function * findProvs (cid, options) {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${new CID(cid)}`)
if (options.numProviders) searchParams.set('num-providers', options.numProviders)
if (options.verbose != null) searchParams.set('verbose', options.verbose)
const res = await ky.post('dht/findprovs', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
for await (const message of ndjson(toIterable(res.body))) {
// 3 = QueryError
// https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18
// https://github.com/libp2p/go-libp2p-kad-dht/blob/master/routing.go#L525-L526
if (message.Type === 3) {
throw new Error(message.Extra)
}
// 4 = Provider
// https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L20
if (message.Type === 4 && message.Responses) {
for (const { ID, Addrs } of message.Responses) {
yield {
id: ID,
addrs: (Addrs || []).map(a => multiaddr(a))
}
}
}
}
}
})
================================================
FILE: src/dht/get.js
================================================
'use strict'
const { Buffer } = require('buffer')
const ndjson = require('iterable-ndjson')
const toIterable = require('stream-to-it/source')
const encodeBufferURIComponent = require('../lib/encode-buffer-uri-component')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async function get (key, options) {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (options.verbose != null) searchParams.set('verbose', options.verbose)
if (!Buffer.isBuffer(key)) {
throw new Error('invalid key')
}
const res = await ky.post(`dht/get?key=${encodeBufferURIComponent(key)}&${searchParams}`, {
timeout: options.timeout,
signal: options.signal,
headers: options.headers
})
for await (const message of ndjson(toIterable(res.body))) {
// 3 = QueryError
// https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18
// https://github.com/ipfs/go-ipfs/blob/eb11f569b064b960d1aba4b5b8ca155a3bd2cb21/core/commands/dht.go#L472-L473
if (message.Type === 3) {
throw new Error(message.Extra)
}
// 5 = Value
// https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L21
if (message.Type === 5) {
return message.Extra
}
}
throw new Error('not found')
}
})
================================================
FILE: src/dht/index.js
================================================
'use strict'
module.exports = config => ({
get: require('./get')(config),
put: require('./put')(config),
findProvs: require('./find-provs')(config),
findPeer: require('./find-peer')(config),
provide: require('./provide')(config),
// find closest peerId to given peerId
query: require('./query')(config)
})
================================================
FILE: src/dht/provide.js
================================================
'use strict'
const CID = require('cids')
const multiaddr = require('multiaddr')
const ndjson = require('iterable-ndjson')
const configure = require('../lib/configure')
const toIterable = require('stream-to-it/source')
const toCamel = require('../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async function * provide (cids, options) {
cids = Array.isArray(cids) ? cids : [cids]
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
cids.forEach(cid => searchParams.append('arg', `${new CID(cid)}`))
if (options.recursive != null) searchParams.set('recursive', options.recursive)
if (options.verbose != null) searchParams.set('verbose', options.verbose)
const res = await ky.post('dht/provide', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
for await (let message of ndjson(toIterable(res.body))) {
// 3 = QueryError
// https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18
// https://github.com/ipfs/go-ipfs/blob/eb11f569b064b960d1aba4b5b8ca155a3bd2cb21/core/commands/dht.go#L283-L284
if (message.Type === 3) {
throw new Error(message.Extra)
}
message = toCamel(message)
message.id = new CID(message.id)
if (message.responses) {
message.responses = message.responses.map(({ ID, Addrs }) => ({
id: ID,
addrs: (Addrs || []).map(a => multiaddr(a))
}))
} else {
message.responses = []
}
yield message
}
}
})
================================================
FILE: src/dht/put.js
================================================
'use strict'
const { Buffer } = require('buffer')
const CID = require('cids')
const multiaddr = require('multiaddr')
const ndjson = require('iterable-ndjson')
const configure = require('../lib/configure')
const toIterable = require('stream-to-it/source')
const encodeBufferURIComponent = require('../lib/encode-buffer-uri-component')
const toCamel = require('../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async function * put (key, value, options) {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (options.verbose != null) searchParams.set('verbose', options.verbose)
key = Buffer.isBuffer(key) ? encodeBufferURIComponent(key) : encodeURIComponent(key)
value = Buffer.isBuffer(value) ? encodeBufferURIComponent(value) : encodeURIComponent(value)
const url = `dht/put?arg=${key}&arg=${value}&${searchParams}`
const res = await ky.post(url, {
timeout: options.timeout,
signal: options.signal,
headers: options.headers
})
for await (let message of ndjson(toIterable(res.body))) {
// 3 = QueryError
// https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18
// https://github.com/ipfs/go-ipfs/blob/eb11f569b064b960d1aba4b5b8ca155a3bd2cb21/core/commands/dht.go#L472-L473
if (message.Type === 3) {
throw new Error(message.Extra)
}
message = toCamel(message)
message.id = new CID(message.id)
if (message.responses) {
message.responses = message.responses.map(({ ID, Addrs }) => ({
id: ID,
addrs: (Addrs || []).map(a => multiaddr(a))
}))
}
yield message
}
}
})
================================================
FILE: src/dht/query.js
================================================
'use strict'
const CID = require('cids')
const ndjson = require('iterable-ndjson')
const multiaddr = require('multiaddr')
const toIterable = require('stream-to-it/source')
const configure = require('../lib/configure')
const toCamel = require('../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async function * query (peerId, options) {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${Buffer.isBuffer(peerId) ? new CID(peerId) : peerId}`)
if (options.verbose != null) searchParams.set('verbose', options.verbose)
const res = await ky.post('dht/query', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
for await (let message of ndjson(toIterable(res.body))) {
message = toCamel(message)
message.id = new CID(message.id)
message.responses = (message.responses || []).map(({ ID, Addrs }) => ({
id: ID,
addrs: (Addrs || []).map(a => multiaddr(a))
}))
yield message
}
}
})
================================================
FILE: src/diag/cmds.js
================================================
'use strict'
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return options => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (options.verbose != null) searchParams.set('verbose', options.verbose)
return ky.post('diag/cmds', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
}
})
================================================
FILE: src/diag/index.js
================================================
'use strict'
module.exports = config => ({
net: require('./net')(config),
sys: require('./sys')(config),
cmds: require('./cmds')(config)
})
================================================
FILE: src/diag/net.js
================================================
'use strict'
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return options => {
options = options || {}
return ky.post('diag/net', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
}).json()
}
})
================================================
FILE: src/diag/sys.js
================================================
'use strict'
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return options => {
options = options || {}
return ky.post('diag/sys', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
}).json()
}
})
================================================
FILE: src/dns.js
================================================
'use strict'
const configure = require('./lib/configure')
module.exports = configure(({ ky }) => {
return async (domain, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', domain)
if (options.recursive != null) searchParams.set('recursive', options.recursive)
const res = await ky.post('dns', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return res.Path
}
})
================================================
FILE: src/files/chmod.js
================================================
'use strict'
const configure = require('../lib/configure')
const modeToString = require('../lib/mode-to-string')
module.exports = configure(({ ky }) => {
return function chmod (path, mode, options) {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.append('arg', path)
searchParams.append('mode', modeToString(mode))
if (options.flush != null) searchParams.set('flush', options.flush)
if (options.hashAlg) searchParams.set('hash', options.hashAlg)
if (options.parents != null) searchParams.set('parents', options.parents)
return ky.post('files/chmod', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).text()
}
})
================================================
FILE: src/files/cp.js
================================================
'use strict'
const CID = require('cids')
const configure = require('../lib/configure')
const { findSources } = require('./utils')
module.exports = configure(({ ky }) => {
return (...args) => {
const { sources, options } = findSources(args)
const searchParams = new URLSearchParams(options.searchParams)
sources.forEach(src => searchParams.append('arg', CID.isCID(src) ? `/ipfs/${src}` : src))
if (options.flush != null) searchParams.set('flush', options.flush)
if (options.hashAlg) searchParams.set('hash', options.hashAlg)
if (options.parents != null) searchParams.set('parents', options.parents)
if (options.shardSplitThreshold != null) searchParams.set('shardSplitThreshold', options.shardSplitThreshold)
return ky.post('files/cp', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).text()
}
})
================================================
FILE: src/files/flush.js
================================================
'use strict'
const configure = require('../lib/configure')
const CID = require('cids')
module.exports = configure(({ ky }) => {
return async (path, options) => {
if (typeof path !== 'string') {
options = path
path = '/'
}
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', path)
const res = await ky.post('files/flush', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return new CID(res.Cid)
}
})
================================================
FILE: src/files/index.js
================================================
'use strict'
module.exports = config => ({
chmod: require('./chmod')(config),
cp: require('./cp')(config),
mkdir: require('./mkdir')(config),
flush: require('./flush')(config),
stat: require('./stat')(config),
rm: require('./rm')(config),
ls: require('./ls')(config),
read: require('./read')(config),
touch: require('./touch')(config),
write: require('./write')(config),
mv: require('./mv')(config)
})
================================================
FILE: src/files/ls.js
================================================
'use strict'
const CID = require('cids')
const ndjson = require('iterable-ndjson')
const toIterable = require('stream-to-it/source')
const configure = require('../lib/configure')
const toCamelWithMetadata = require('../lib/object-to-camel-with-metadata')
module.exports = configure(({ ky }) => {
return async function * ls (path, options) {
if (typeof path !== 'string') {
options = path
path = '/'
}
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', CID.isCID(path) ? `/ipfs/${path}` : path)
searchParams.set('stream', options.stream == null ? true : options.stream)
if (options.cidBase) searchParams.set('cid-base', options.cidBase)
searchParams.set('long', options.long == null ? true : options.long)
// TODO: remove after go-ipfs 0.5 is released
searchParams.set('l', options.long == null ? true : options.long)
const res = await ky.post('files/ls', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
for await (const result of ndjson(toIterable(res.body))) {
// go-ipfs does not yet support the "stream" option
if ('Entries' in result) {
for (const entry of result.Entries || []) {
yield toCoreInterface(toCamelWithMetadata(entry))
}
} else {
yield toCoreInterface(toCamelWithMetadata(result))
}
}
}
})
function toCoreInterface (entry) {
if (entry.hash) entry.cid = new CID(entry.hash)
delete entry.hash
return entry
}
================================================
FILE: src/files/mkdir.js
================================================
'use strict'
const configure = require('../lib/configure')
const modeToString = require('../lib/mode-to-string')
const mtimeToObject = require('../lib/mtime-to-object')
module.exports = configure(({ ky }) => {
return (path, options) => {
options = options || {}
const mtime = mtimeToObject(options.mtime)
const searchParams = new URLSearchParams(options.searchParams)
searchParams.append('arg', path)
if (options.cidVersion != null) searchParams.set('cid-version', options.cidVersion)
if (options.flush != null) searchParams.set('flush', options.flush)
if (options.hashAlg) searchParams.set('hash', options.hashAlg)
if (options.parents != null) searchParams.set('parents', options.parents)
if (options.shardSplitThreshold != null) searchParams.set('shardSplitThreshold', options.shardSplitThreshold)
if (mtime) {
searchParams.set('mtime', mtime.secs)
if (mtime.nsecs != null) {
searchParams.set('mtimeNsecs', mtime.nsecs)
}
}
if (options.mode != null) searchParams.set('mode', modeToString(options.mode))
return ky.post('files/mkdir', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).text()
}
})
================================================
FILE: src/files/mv.js
================================================
'use strict'
const CID = require('cids')
const configure = require('../lib/configure')
const { findSources } = require('./utils')
module.exports = configure(({ ky }) => {
return (...args) => {
const { sources, options } = findSources(args)
const searchParams = new URLSearchParams(options.searchParams)
sources.forEach(src => searchParams.append('arg', CID.isCID(src) ? `/ipfs/${src}` : src))
if (options.flush != null) searchParams.set('flush', options.flush)
if (options.hashAlg) searchParams.set('hash', options.hashAlg)
if (options.parents != null) searchParams.set('parents', options.parents)
if (options.shardSplitThreshold != null) searchParams.set('shardSplitThreshold', options.shardSplitThreshold)
return ky.post('files/mv', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).text()
}
})
================================================
FILE: src/files/read.js
================================================
'use strict'
const { Buffer } = require('buffer')
const configure = require('../lib/configure')
const toIterable = require('stream-to-it/source')
module.exports = configure(({ ky }) => {
return async function * read (path, options) {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.append('arg', `${path}`)
if (options.length != null) searchParams.set('length', options.length)
if (options.offset != null) searchParams.set('offset', options.offset)
const res = await ky.post('files/read', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
for await (const chunk of toIterable(res.body)) {
yield Buffer.from(chunk)
}
}
})
================================================
FILE: src/files/rm.js
================================================
'use strict'
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return (path, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.append('arg', path)
if (options.recursive != null) searchParams.set('recursive', options.recursive)
if (options.force != null) searchParams.set('force', options.force)
if (options.shardSplitThreshold != null) searchParams.set('shardSplitThreshold', options.shardSplitThreshold)
return ky.post('files/rm', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).text()
}
})
================================================
FILE: src/files/stat.js
================================================
'use strict'
const CID = require('cids')
const configure = require('../lib/configure')
const toCamelWithMetadata = require('../lib/object-to-camel-with-metadata')
module.exports = configure(({ ky }) => {
return async (path, options) => {
if (typeof path !== 'string') {
options = path
path = '/'
}
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', path)
if (options.cidBase) searchParams.set('cid-base', options.cidBase)
if (options.hash != null) searchParams.set('hash', options.hash)
if (options.size != null) searchParams.set('size', options.size)
if (options.withLocal != null) searchParams.set('with-local', options.withLocal)
const res = await ky.post('files/stat', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
res.WithLocality = res.WithLocality || false
return toCoreInterface(toCamelWithMetadata(res))
}
})
function toCoreInterface (entry) {
entry.cid = new CID(entry.hash)
delete entry.hash
return entry
}
================================================
FILE: src/files/touch.js
================================================
'use strict'
const configure = require('../lib/configure')
const mtimeToObject = require('../lib/mtime-to-object')
module.exports = configure(({ ky }) => {
return function touch (path, options) {
options = options || {}
const mtime = mtimeToObject(options.mtime)
const searchParams = new URLSearchParams(options.searchParams)
searchParams.append('arg', path)
if (mtime) {
searchParams.set('mtime', mtime.secs)
searchParams.set('mtimeNsecs', mtime.nsecs)
}
if (options.flush != null) searchParams.set('flush', options.flush)
if (options.hashAlg) searchParams.set('hash', options.hashAlg)
if (options.parents != null) searchParams.set('parents', options.parents)
return ky.post('files/touch', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).text()
}
})
================================================
FILE: src/files/utils.js
================================================
'use strict'
exports.findSources = (args) => {
let options = {}
let sources = []
if (!Array.isArray(args[args.length - 1]) && typeof args[args.length - 1] === 'object') {
options = args.pop()
}
if (args.length === 1 && Array.isArray(args[0])) {
// support ipfs.files.cp([src, dest], opts)
sources = args[0]
} else {
// support ipfs.files.cp(src, dest, opts) and ipfs.files.cp(src1, src2, dest, opts)
sources = args
}
return {
sources,
options
}
}
================================================
FILE: src/files/write.js
================================================
'use strict'
const configure = require('../lib/configure')
const toFormData = require('../lib/buffer-to-form-data')
const modeToString = require('../lib/mode-to-string')
const mtimeToObject = require('../lib/mtime-to-object')
module.exports = configure(({ ky }) => {
return async (path, input, options) => {
options = options || {}
const mtime = mtimeToObject(options.mtime)
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', path)
searchParams.set('stream-channels', true)
if (options.cidVersion) searchParams.set('cid-version', options.cidVersion)
if (options.create != null) searchParams.set('create', options.create)
if (options.hashAlg) searchParams.set('hash', options.hashAlg)
if (options.length != null) searchParams.set('length', options.length)
if (options.offset != null) searchParams.set('offset', options.offset)
if (options.parents != null) searchParams.set('parents', options.parents)
if (options.rawLeaves != null) searchParams.set('raw-leaves', options.rawLeaves)
if (options.truncate != null) searchParams.set('truncate', options.truncate)
if (options.shardSplitThreshold != null) searchParams.set('shardSplitThreshold', options.shardSplitThreshold)
if (mtime) {
searchParams.set('mtime', mtime.secs)
if (mtime.nsecs != null) {
searchParams.set('mtimeNsecs', mtime.nsecs)
}
}
const res = await ky.post('files/write', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams,
body: toFormData(input, {
mode: options.mode != null ? modeToString(options.mode) : undefined,
mtime: mtime ? mtime.secs : undefined,
mtimeNsecs: mtime ? mtime.nsecs : undefined
}) // TODO: support inputs other than buffer as per spec
})
return res.text()
}
})
================================================
FILE: src/get-endpoint-config.js
================================================
'use strict'
const configure = require('./lib/configure')
module.exports = configure(({ apiAddr, apiPath }) => {
const url = new URL(apiAddr)
return () => ({
host: url.hostname,
port: url.port,
protocol: url.protocol.split(':')[0], // remove ":"
'api-path': apiPath
})
})
================================================
FILE: src/get.js
================================================
'use strict'
const configure = require('./lib/configure')
const Tar = require('it-tar')
const { Buffer } = require('buffer')
const CID = require('cids')
const toIterable = require('stream-to-it/source')
module.exports = configure(({ ky }) => {
return async function * get (path, options) {
options = options || {}
const searchParams = new URLSearchParams()
searchParams.set('arg', `${Buffer.isBuffer(path) ? new CID(path) : path}`)
if (options.compress !== undefined) {
searchParams.set('compress', options.compress)
}
if (options.compressionLevel !== undefined) {
searchParams.set('compression-level', options.compressionLevel)
}
if (options.offset) {
searchParams.set('offset', options.offset)
}
if (options.length) {
searchParams.set('length', options.length)
}
const res = await ky.post('get', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
const extractor = Tar.extract()
for await (const { header, body } of extractor(toIterable(res.body))) {
if (header.type === 'directory') {
yield {
path: header.name
}
} else {
yield {
path: header.name,
content: body
}
}
}
}
})
================================================
FILE: src/id.js
================================================
'use strict'
const configure = require('./lib/configure')
const toCamel = require('./lib/object-to-camel')
const multiaddr = require('multiaddr')
module.exports = configure(({ ky }) => {
return async options => {
options = options || {}
const res = await ky.post('id', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
}).json()
const output = toCamel(res)
if (output.addresses) {
output.addresses = output.addresses.map(ma => multiaddr(ma))
}
return output
}
})
================================================
FILE: src/index.js
================================================
'use strict'
const { Buffer } = require('buffer')
const CID = require('cids')
const multiaddr = require('multiaddr')
const multibase = require('multibase')
const multicodec = require('multicodec')
const multihash = require('multihashes')
const globSource = require('ipfs-utils/src/files/glob-source')
const urlSource = require('ipfs-utils/src/files/url-source')
function ipfsClient (config) {
return {
add: require('./add')(config),
bitswap: require('./bitswap')(config),
block: require('./block')(config),
bootstrap: require('./bootstrap')(config),
cat: require('./cat')(config),
commands: require('./commands')(config),
config: require('./config')(config),
dag: require('./dag')(config),
dht: require('./dht')(config),
diag: require('./diag')(config),
dns: require('./dns')(config),
files: require('./files')(config),
get: require('./get')(config),
getEndpointConfig: require('./get-endpoint-config')(config),
id: require('./id')(config),
key: require('./key')(config),
log: require('./log')(config),
ls: require('./ls')(config),
mount: require('./mount')(config),
name: require('./name')(config),
object: require('./object')(config),
pin: require('./pin')(config),
ping: require('./ping')(config),
pubsub: require('./pubsub')(config),
refs: require('./refs')(config),
repo: require('./repo')(config),
resolve: require('./resolve')(config),
stats: require('./stats')(config),
stop: require('./stop')(config),
shutdown: require('./stop')(config),
swarm: require('./swarm')(config),
version: require('./version')(config)
}
}
Object.assign(ipfsClient, { Buffer, CID, multiaddr, multibase, multicodec, multihash, globSource, urlSource })
module.exports = ipfsClient
================================================
FILE: src/key/export.js
================================================
'use strict'
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return (name, password, options) => {
if (typeof password !== 'string') {
options = password
password = null
}
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', name)
if (password) searchParams.set('password', password)
return ky.post('key/export', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).text()
}
})
================================================
FILE: src/key/gen.js
================================================
'use strict'
const configure = require('../lib/configure')
const toCamel = require('../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async (name, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', name)
if (options.type) searchParams.set('type', options.type)
if (options.size != null) searchParams.set('size', options.size)
const res = await ky.post('key/gen', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return toCamel(res)
}
})
================================================
FILE: src/key/import.js
================================================
'use strict'
const configure = require('../lib/configure')
const toCamel = require('../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async (name, pem, password, options) => {
if (typeof password !== 'string') {
options = password
password = null
}
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', name)
searchParams.set('pem', pem)
if (password) searchParams.set('password', password)
const res = await ky.post('key/import', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return toCamel(res)
}
})
================================================
FILE: src/key/index.js
================================================
'use strict'
module.exports = config => ({
gen: require('./gen')(config),
list: require('./list')(config),
rename: require('./rename')(config),
rm: require('./rm')(config),
export: require('./export')(config),
import: require('./import')(config)
})
================================================
FILE: src/key/list.js
================================================
'use strict'
const configure = require('../lib/configure')
const toCamel = require('../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async options => {
options = options || {}
const res = await ky.post('key/list', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
}).json()
return (res.Keys || []).map(k => toCamel(k))
}
})
================================================
FILE: src/key/rename.js
================================================
'use strict'
const configure = require('../lib/configure')
const toCamel = require('../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async (oldName, newName, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', oldName)
searchParams.append('arg', newName)
if (options.force != null) searchParams.set('force', options.force)
const res = await ky.post('key/rename', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return toCamel(res)
}
})
================================================
FILE: src/key/rm.js
================================================
'use strict'
const configure = require('../lib/configure')
const toCamel = require('../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async (name, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', name)
const res = await ky.post('key/rm', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return toCamel(res.Keys[0])
}
})
================================================
FILE: src/lib/buffer-to-form-data.browser.js
================================================
'use strict'
/* eslint-env browser */
module.exports = buf => {
const formData = new FormData()
formData.append('file', new Blob([buf], { type: 'application/octet-stream' }))
return formData
}
================================================
FILE: src/lib/buffer-to-form-data.js
================================================
'use strict'
const FormData = require('form-data')
const { isElectronRenderer } = require('ipfs-utils/src/env')
module.exports = (buf, { mode, mtime, mtimeNsecs } = {}) => {
const headers = {}
if (mode != null) {
headers.mode = mode
}
if (mtime != null) {
headers.mtime = mtime
if (mtimeNsecs != null) {
headers['mtime-nsecs'] = mtimeNsecs
}
}
const formData = new FormData()
formData.append('file', buf, {
header: headers
})
return formData
}
// TODO remove this when upstream fix for ky-universal is merged
// https://github.com/sindresorhus/ky-universal/issues/9
// also this should only be necessary when nodeIntegration is false in electron renderer
if (isElectronRenderer) {
module.exports = require('./buffer-to-form-data.browser')
}
================================================
FILE: src/lib/configure.js
================================================
'use strict'
/* eslint-env browser */
const ky = require('ky-universal').default
const { isBrowser, isWebWorker } = require('ipfs-utils/src/env')
const toUri = require('multiaddr-to-uri')
const errorHandler = require('./error-handler')
const mergeOptions = require('merge-options').bind({ ignoreUndefined: true })
const parseDuration = require('parse-duration')
// Set default configuration and call create function with them
module.exports = create => config => {
config = config || {}
if (typeof config === 'string') {
config = { apiAddr: config }
} else if (config.constructor && config.constructor.isMultiaddr) {
config = { apiAddr: config }
} else {
config = { ...config }
}
config.apiAddr = (config.apiAddr || getDefaultApiAddr(config)).toString()
config.apiAddr = config.apiAddr.startsWith('/') ? toUri(config.apiAddr) : config.apiAddr
config.apiAddr = trimEnd(config.apiAddr, '/')
const apiAddrPath = getNonRootPath(config.apiAddr)
// Use configured apiPath, or path on the end of apiAddr (if there is one) or default to /api/v0
config.apiPath = config.apiPath || config['api-path'] || apiAddrPath || '/api/v0'
config.apiPath = trimEnd(config.apiPath, '/')
// If user passed apiAddr with a path, trim it from the end (it is now apiPath)
config.apiAddr = apiAddrPath ? trimEnd(config.apiAddr, apiAddrPath) : config.apiAddr
const defaults = {
prefixUrl: config.apiAddr + config.apiPath,
timeout: parseTimeout(config.timeout) || 60000 * 20,
headers: config.headers,
hooks: {
afterResponse: [errorHandler]
}
}
const k = ky.extend(defaults)
const client = ['get', 'post', 'put', 'delete', 'patch', 'head']
.reduce((client, key) => {
client[key] = wrap(k[key], defaults)
return client
}, wrap(k, defaults))
return create({
ky: client,
...config
})
}
function getDefaultApiAddr ({ protocol, host, port }) {
if (isBrowser || isWebWorker) {
if (!protocol) {
protocol = location.protocol.startsWith('http')
? trimEnd(location.protocol, ':')
: 'http'
}
host = host || location.hostname
port = port || location.port
return `${protocol}://${host}${port ? ':' + port : ''}`
}
return `${protocol || 'http'}://${host || 'localhost'}:${port || 5001}`
}
// returns the passed function wrapped in a function that ignores
// undefined values in the passed `options` object
function wrap (fn, defaults) {
return (input, options) => {
if (options.timeout) options.timeout = parseTimeout(options.timeout)
return fn(input, mergeOptions(defaults, options))
}
}
function parseTimeout (value) {
return typeof value === 'string' ? parseDuration(value) : value
}
const trimEnd = (str, end) => str.endsWith(end) ? str.slice(0, -end.length) : str
// Get the path from a URL is it is not /
function getNonRootPath (url) {
if (url) {
const { pathname } = new URL(url)
return pathname === '/' ? null : pathname
}
}
================================================
FILE: src/lib/encode-buffer-uri-component.js
================================================
'use strict'
// https://github.com/ipfs/js-ipfs-http-client/issues/569
module.exports = function encodeBuffer (buf) {
let uriEncoded = ''
for (const byte of buf) {
// https://tools.ietf.org/html/rfc3986#page-14
// ALPHA (%41-%5A and %61-%7A), DIGIT (%30-%39), hyphen (%2D), period (%2E),
// underscore (%5F), or tilde (%7E)
if (
(byte >= 0x41 && byte <= 0x5A) ||
(byte >= 0x61 && byte <= 0x7A) ||
(byte >= 0x30 && byte <= 0x39) ||
(byte === 0x2D) ||
(byte === 0x2E) ||
(byte === 0x5F) ||
(byte === 0x7E)
) {
uriEncoded += String.fromCharCode(byte)
} else {
uriEncoded += `%${byte.toString(16).padStart(2, '0')}`
}
}
return uriEncoded
}
================================================
FILE: src/lib/error-handler.js
================================================
'use strict'
const { HTTPError } = require('ky-universal')
const log = require('debug')('ipfs-http-client:lib:error-handler')
const { isNode, isElectronMain } = require('ipfs-utils/src/env')
function isJsonResponse (res) {
return (res.headers.get('Content-Type') || '').startsWith('application/json')
}
module.exports = async function errorHandler (input, options, response) {
if (response.ok) {
// FIXME: remove when fixed https://github.com/sindresorhus/ky-universal/issues/8
//
// ky clones the response for each handler. In Node.js the response body is
// piped to 2 PassThroughs, one becomes the real body and the other is used
// in the clone.
//
// If the body in the clone is not consumed or destroyed the highwater mark
// will be reached (for large payloads) and stop the real body from flowing.
if (isNode || isElectronMain) response.body.destroy()
return
}
let msg
try {
if (isJsonResponse(response)) {
const data = await response.json()
log(data)
msg = data.Message || data.message
} else {
msg = await response.text()
}
} catch (err) {
log('Failed to parse error response', err)
// Failed to extract/parse error message from response
msg = err.message
}
const error = new HTTPError(response)
// If we managed to extract a message from the response, use it
if (msg) {
error.message = msg
}
throw error
}
================================================
FILE: src/lib/mode-to-string.js
================================================
'use strict'
module.exports = (mode) => {
if (mode === undefined || mode === null) {
return undefined
}
if (typeof mode === 'string' || mode instanceof String) {
return mode
}
return mode.toString(8).padStart(4, '0')
}
================================================
FILE: src/lib/mtime-to-object.js
================================================
'use strict'
module.exports = function parseMtime (mtime) {
if (mtime == null) {
return undefined
}
// Javascript Date
if (mtime instanceof Date) {
const ms = mtime.getTime()
const secs = Math.floor(ms / 1000)
return {
secs: secs,
nsecs: (ms - (secs * 1000)) * 1000
}
}
// { secs, nsecs }
if (Object.prototype.hasOwnProperty.call(mtime, 'secs')) {
return {
secs: mtime.secs,
nsecs: mtime.nsecs
}
}
// UnixFS TimeSpec
if (Object.prototype.hasOwnProperty.call(mtime, 'Seconds')) {
return {
secs: mtime.Seconds,
nsecs: mtime.FractionalNanoseconds
}
}
// process.hrtime()
if (Array.isArray(mtime)) {
return {
secs: mtime[0],
nsecs: mtime[1]
}
}
/*
TODO: https://github.com/ipfs/aegir/issues/487
// process.hrtime.bigint()
if (typeof mtime === 'bigint') {
const secs = mtime / BigInt(1e9)
const nsecs = mtime - (secs * BigInt(1e9))
return {
secs: parseInt(secs),
nsecs: parseInt(nsecs)
}
}
*/
}
================================================
FILE: src/lib/object-to-camel-with-metadata.js
================================================
'use strict'
const toCamel = require('./object-to-camel')
function toCamelWithMetadata (entry) {
const file = toCamel(entry)
if (Object.prototype.hasOwnProperty.call(file, 'mode')) {
file.mode = parseInt(file.mode, 8)
}
if (Object.prototype.hasOwnProperty.call(file, 'mtime')) {
file.mtime = {
secs: file.mtime,
nsecs: file.mtimeNsecs || 0
}
delete file.mtimeNsecs
}
return file
}
module.exports = toCamelWithMetadata
================================================
FILE: src/lib/object-to-camel.js
================================================
'use strict'
// Convert object properties to camel case.
// NOT recursive!
// e.g.
// AgentVersion => agentVersion
// ID => id
module.exports = obj => {
if (obj == null) return obj
const caps = /^[A-Z]+$/
return Object.keys(obj).reduce((camelObj, k) => {
if (caps.test(k)) { // all caps
camelObj[k.toLowerCase()] = obj[k]
} else if (caps.test(k[0])) { // pascal
camelObj[k[0].toLowerCase() + k.slice(1)] = obj[k]
} else {
camelObj[k] = obj[k]
}
return camelObj
}, {})
}
================================================
FILE: src/log/index.js
================================================
'use strict'
module.exports = config => ({
tail: require('./tail')(config),
ls: require('./ls')(config),
level: require('./level')(config)
})
================================================
FILE: src/log/level.js
================================================
'use strict'
const configure = require('../lib/configure')
const toCamel = require('../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async (subsystem, level, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', subsystem)
searchParams.append('arg', level)
const res = await ky.post('log/level', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return toCamel(res)
}
})
================================================
FILE: src/log/ls.js
================================================
'use strict'
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async options => {
options = options || {}
const res = await ky.post('log/ls', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
}).json()
return res.Strings
}
})
================================================
FILE: src/log/tail.js
================================================
'use strict'
const ndjson = require('iterable-ndjson')
const configure = require('../lib/configure')
const toIterable = require('stream-to-it/source')
module.exports = configure(({ ky }) => {
return async function * tail (options) {
options = options || {}
const res = await ky.post('log/tail', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
})
yield * ndjson(toIterable(res.body))
}
})
================================================
FILE: src/ls.js
================================================
'use strict'
const { Buffer } = require('buffer')
const CID = require('cids')
const ndjson = require('iterable-ndjson')
const toIterable = require('stream-to-it/source')
const configure = require('./lib/configure')
module.exports = configure(({ ky }) => {
return async function * ls (path, options) {
options = options || {}
const searchParams = new URLSearchParams()
searchParams.set('arg', `${Buffer.isBuffer(path) ? new CID(path) : path}`)
searchParams.set('stream', options.stream == null ? true : options.stream)
if (options.long != null) searchParams.set('long', options.long)
if (options.unsorted != null) searchParams.set('unsorted', options.unsorted)
if (options.recursive != null) searchParams.set('recursive', options.recursive)
const res = await ky.post('ls', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
for await (let result of ndjson(toIterable(res.body))) {
result = result.Objects
if (!result) {
throw new Error('expected .Objects in results')
}
result = result[0]
if (!result) {
throw new Error('expected one array in results.Objects')
}
result = result.Links
if (!Array.isArray(result)) {
throw new Error('expected one array in results.Objects[0].Links')
}
for (const link of result) {
const entry = {
name: link.Name,
path: path + '/' + link.Name,
size: link.Size,
cid: new CID(link.Hash),
type: typeOf(link),
depth: link.Depth || 1
}
if (link.Mode) {
entry.mode = parseInt(link.Mode, 8)
}
if (link.Mtime !== undefined && link.Mtime !== null) {
entry.mtime = {
secs: link.Mtime
}
if (link.MtimeNsecs !== undefined && link.MtimeNsecs !== null) {
entry.mtime.nsecs = link.MtimeNsecs
}
}
yield entry
}
}
}
})
function typeOf (link) {
switch (link.Type) {
case 1:
case 5:
return 'dir'
case 2:
return 'file'
default:
return 'unknown'
}
}
================================================
FILE: src/mount.js
================================================
'use strict'
const configure = require('./lib/configure')
const toCamel = require('./lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async options => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (options.ipfsPath != null) searchParams.set('ipfs-path', options.ipfsPath)
if (options.ipnsPath != null) searchParams.set('ipns-path', options.ipnsPath)
const res = await ky.post('dns', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return toCamel(res)
}
})
================================================
FILE: src/name/index.js
================================================
'use strict'
module.exports = config => ({
publish: require('./publish')(config),
resolve: require('./resolve')(config),
pubsub: require('./pubsub')(config)
})
================================================
FILE: src/name/publish.js
================================================
'use strict'
const configure = require('../lib/configure')
const toCamel = require('../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async (path, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', path)
if (options.allowOffline != null) searchParams.set('allow-offline', options.allowOffline)
if (options.key) searchParams.set('key', options.key)
if (options.lifetime) searchParams.set('lifetime', options.lifetime)
if (options.quieter != null) searchParams.set('quieter', options.quieter)
if (options.resolve != null) searchParams.set('resolve', options.resolve)
if (options.ttl) searchParams.set('ttl', options.ttl)
const res = await ky.post('name/publish', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return toCamel(res)
}
})
================================================
FILE: src/name/pubsub/cancel.js
================================================
'use strict'
const configure = require('../../lib/configure')
const toCamel = require('../../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async (name, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', name)
const res = await ky.post('name/pubsub/cancel', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return toCamel(res)
}
})
================================================
FILE: src/name/pubsub/index.js
================================================
'use strict'
module.exports = config => ({
cancel: require('./cancel')(config),
state: require('./state')(config),
subs: require('./subs')(config)
})
================================================
FILE: src/name/pubsub/state.js
================================================
'use strict'
const configure = require('../../lib/configure')
const toCamel = require('../../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async options => {
options = options || {}
const res = await ky.post('name/pubsub/state', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
}).json()
return toCamel(res)
}
})
================================================
FILE: src/name/pubsub/subs.js
================================================
'use strict'
const configure = require('../../lib/configure')
module.exports = configure(({ ky }) => {
return async (name, options) => {
options = options || {}
const res = await ky.post('name/pubsub/subs', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
}).json()
return res.Strings || []
}
})
================================================
FILE: src/name/resolve.js
================================================
'use strict'
const ndjson = require('iterable-ndjson')
const configure = require('../lib/configure')
const toIterable = require('stream-to-it/source')
module.exports = configure(({ ky }) => {
return async function * (path, options) {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', path)
searchParams.set('stream', options.stream == null ? true : options.stream)
if (options.dhtRecordCount != null) searchParams.set('dht-record-count', options.dhtRecordCount)
if (options.dhtTimeout != null) searchParams.set('dht-timeout', options.dhtTimeout)
if (options.noCache != null) searchParams.set('nocache', options.noCache)
if (options.recursive != null) searchParams.set('recursive', options.recursive)
const res = await ky.post('name/resolve', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
for await (const result of ndjson(toIterable(res.body))) {
yield result.Path
}
}
})
================================================
FILE: src/object/data.js
================================================
'use strict'
const { Buffer } = require('buffer')
const CID = require('cids')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async function data (cid, options) {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${Buffer.isBuffer(cid) ? new CID(cid) : cid}`)
const data = await ky.post('object/data', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).arrayBuffer()
return Buffer.from(data)
}
})
================================================
FILE: src/object/get.js
================================================
'use strict'
const { Buffer } = require('buffer')
const CID = require('cids')
const { DAGNode, DAGLink } = require('ipld-dag-pb')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (cid, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${Buffer.isBuffer(cid) ? new CID(cid) : cid}`)
searchParams.set('data-encoding', 'base64')
const res = await ky.post('object/get', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return new DAGNode(
Buffer.from(res.Data, 'base64'),
(res.Links || []).map(l => new DAGLink(l.Name, l.Size, l.Hash))
)
}
})
================================================
FILE: src/object/index.js
================================================
'use strict'
module.exports = config => ({
data: require('./data')(config),
get: require('./get')(config),
links: require('./links')(config),
new: require('./new')(config),
patch: require('./patch')(config),
put: require('./put')(config),
stat: require('./stat')(config)
})
================================================
FILE: src/object/links.js
================================================
'use strict'
const { Buffer } = require('buffer')
const CID = require('cids')
const { DAGLink } = require('ipld-dag-pb')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (cid, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${Buffer.isBuffer(cid) ? new CID(cid) : cid}`)
const res = await ky.post('object/links', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return (res.Links || []).map(l => new DAGLink(l.Name, l.Size, l.Hash))
}
})
================================================
FILE: src/object/new.js
================================================
'use strict'
const CID = require('cids')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (template, options) => {
if (typeof template !== 'string') {
options = template
template = null
}
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (template) searchParams.set('arg', template)
const { Hash } = await ky.post('object/new', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return new CID(Hash)
}
})
================================================
FILE: src/object/patch/add-link.js
================================================
'use strict'
const { Buffer } = require('buffer')
const CID = require('cids')
const configure = require('../../lib/configure')
module.exports = configure(({ ky }) => {
return async (cid, dLink, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${Buffer.isBuffer(cid) ? new CID(cid) : cid}`)
searchParams.append('arg', dLink.Name || dLink.name || null)
searchParams.append('arg', (dLink.Hash || dLink.cid || '').toString() || null)
const { Hash } = await ky.post('object/patch/add-link', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return new CID(Hash)
}
})
================================================
FILE: src/object/patch/append-data.js
================================================
'use strict'
const { Buffer } = require('buffer')
const CID = require('cids')
const configure = require('../../lib/configure')
const toFormData = require('../../lib/buffer-to-form-data')
module.exports = configure(({ ky }) => {
return async (cid, data, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${Buffer.isBuffer(cid) ? new CID(cid) : cid}`)
const { Hash } = await ky.post('object/patch/append-data', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams,
body: toFormData(data)
}).json()
return new CID(Hash)
}
})
================================================
FILE: src/object/patch/index.js
================================================
'use strict'
module.exports = config => ({
addLink: require('./add-link')(config),
appendData: require('./append-data')(config),
rmLink: require('./rm-link')(config),
setData: require('./set-data')(config)
})
================================================
FILE: src/object/patch/rm-link.js
================================================
'use strict'
const { Buffer } = require('buffer')
const CID = require('cids')
const configure = require('../../lib/configure')
module.exports = configure(({ ky }) => {
return async (cid, dLink, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${Buffer.isBuffer(cid) ? new CID(cid) : cid}`)
searchParams.append('arg', dLink.Name || dLink.name || null)
const { Hash } = await ky.post('object/patch/rm-link', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return new CID(Hash)
}
})
================================================
FILE: src/object/patch/set-data.js
================================================
'use strict'
const { Buffer } = require('buffer')
const CID = require('cids')
const configure = require('../../lib/configure')
const toFormData = require('../../lib/buffer-to-form-data')
module.exports = configure(({ ky }) => {
return async (cid, data, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${Buffer.isBuffer(cid) ? new CID(cid) : cid}`)
const { Hash } = await ky.post('object/patch/set-data', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams,
body: toFormData(data)
}).json()
return new CID(Hash)
}
})
================================================
FILE: src/object/put.js
================================================
'use strict'
const CID = require('cids')
const { DAGNode } = require('ipld-dag-pb')
const { Buffer } = require('buffer')
const configure = require('../lib/configure')
const toFormData = require('../lib/buffer-to-form-data')
module.exports = configure(({ ky }) => {
return async (obj, options) => {
options = options || {}
let tmpObj = {
Data: null,
Links: []
}
if (Buffer.isBuffer(obj)) {
if (!options.enc) {
tmpObj = {
Data: obj.toString(),
Links: []
}
}
} else if (DAGNode.isDAGNode(obj)) {
tmpObj = {
Data: obj.Data.toString(),
Links: obj.Links.map(l => ({
Name: l.Name,
Hash: l.Hash.toString(),
Size: l.Tsize
}))
}
} else if (typeof obj === 'object') {
tmpObj.Data = obj.Data.toString()
tmpObj.Links = obj.Links
} else {
throw new Error('obj not recognized')
}
let buf
if (Buffer.isBuffer(obj) && options.enc) {
buf = obj
} else {
buf = Buffer.from(JSON.stringify(tmpObj))
}
const searchParams = new URLSearchParams(options.searchParams)
if (options.enc) searchParams.set('inputenc', options.enc)
if (options.pin != null) searchParams.set('pin', options.pin)
if (options.quiet != null) searchParams.set('quiet', options.quiet)
const { Hash } = await ky.post('object/put', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams,
body: toFormData(buf)
}).json()
return new CID(Hash)
}
})
================================================
FILE: src/object/stat.js
================================================
'use strict'
const { Buffer } = require('buffer')
const CID = require('cids')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (cid, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${Buffer.isBuffer(cid) ? new CID(cid) : cid}`)
let res
try {
res = await ky.post('object/stat', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
} catch (err) {
if (err.name === 'TimeoutError') {
err.message = `failed to get block for ${Buffer.isBuffer(cid) ? new CID(cid) : cid}: context deadline exceeded`
}
throw err
}
return res
}
})
================================================
FILE: src/pin/add.js
================================================
'use strict'
const CID = require('cids')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (paths, options) => {
paths = Array.isArray(paths) ? paths : [paths]
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
paths.forEach(path => searchParams.append('arg', `${path}`))
if (options.recursive != null) searchParams.set('recursive', options.recursive)
const res = await ky.post('pin/add', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return (res.Pins || []).map(cid => ({ cid: new CID(cid) }))
}
})
================================================
FILE: src/pin/index.js
================================================
'use strict'
module.exports = config => ({
add: require('./add')(config),
rm: require('./rm')(config),
ls: require('./ls')(config)
})
================================================
FILE: src/pin/ls.js
================================================
'use strict'
const ndjson = require('iterable-ndjson')
const CID = require('cids')
const configure = require('../lib/configure')
const toIterable = require('stream-to-it/source')
module.exports = configure(({ ky }) => {
return async function * ls (path, options) {
if (path && path.type) {
options = path
path = null
}
path = path || []
path = Array.isArray(path) ? path : [path]
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('stream', options.stream == null ? true : options.stream)
path.forEach(p => searchParams.append('arg', `${p}`))
if (options.type) searchParams.set('type', options.type)
const res = await ky.post('pin/ls', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
for await (const pin of ndjson(toIterable(res.body))) {
if (pin.Keys) { // non-streaming response
for (const cid of Object.keys(pin.Keys)) {
yield { cid: new CID(cid), type: pin.Keys[cid].Type }
}
return
}
yield { cid: new CID(pin.Cid), type: pin.Type }
}
}
})
================================================
FILE: src/pin/rm.js
================================================
'use strict'
const CID = require('cids')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (path, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${path}`)
if (options.recursive != null) searchParams.set('recursive', options.recursive)
const res = await ky.post('pin/rm', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return (res.Pins || []).map(cid => ({ cid: new CID(cid) }))
}
})
================================================
FILE: src/ping.js
================================================
'use strict'
const ndjson = require('iterable-ndjson')
const configure = require('./lib/configure')
const toIterable = require('stream-to-it/source')
const toCamel = require('./lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async function * ping (peerId, options) {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${peerId}`)
if (options.count != null) searchParams.set('count', options.count)
const res = await ky.post('ping', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
for await (const chunk of ndjson(toIterable(res.body))) {
yield toCamel(chunk)
}
}
})
================================================
FILE: src/pubsub/index.js
================================================
'use strict'
module.exports = config => ({
ls: require('./ls')(config),
peers: require('./peers')(config),
publish: require('./publish')(config),
subscribe: require('./subscribe')(config),
unsubscribe: require('./unsubscribe')(config)
})
================================================
FILE: src/pubsub/ls.js
================================================
'use strict'
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (options) => {
options = options || {}
const { Strings } = await ky.post('pubsub/ls', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
}).json()
return Strings || []
}
})
================================================
FILE: src/pubsub/peers.js
================================================
'use strict'
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (topic, options) => {
if (!options && typeof topic === 'object') {
options = topic
topic = null
}
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', topic)
const { Strings } = await ky.post('pubsub/peers', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return Strings || []
}
})
================================================
FILE: src/pubsub/publish.js
================================================
'use strict'
const { Buffer } = require('buffer')
const configure = require('../lib/configure')
const encodeBuffer = require('../lib/encode-buffer-uri-component')
module.exports = configure(({ ky }) => {
return async (topic, data, options) => {
options = options || {}
data = Buffer.from(data)
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', topic)
const res = await ky.post(`pubsub/pub?${searchParams}&arg=${encodeBuffer(data)}`, {
timeout: options.timeout,
signal: options.signal,
headers: options.headers
}).text()
return res
}
})
================================================
FILE: src/pubsub/subscribe.js
================================================
'use strict'
const ndjson = require('iterable-ndjson')
const bs58 = require('bs58')
const { Buffer } = require('buffer')
const log = require('debug')('ipfs-http-client:pubsub:subscribe')
const configure = require('../lib/configure')
const toIterable = require('stream-to-it/source')
const SubscriptionTracker = require('./subscription-tracker')
module.exports = configure((config) => {
const ky = config.ky
const subsTracker = SubscriptionTracker.singleton()
const publish = require('./publish')(config)
return async (topic, handler, options) => {
options = options || {}
options.signal = subsTracker.subscribe(topic, handler, options.signal)
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', topic)
if (options.discover != null) searchParams.set('discover', options.discover)
let res
// In Firefox, the initial call to fetch does not resolve until some data
// is received. If this doesn't happen within 1 second send an empty message
// to kickstart the process.
const ffWorkaround = setTimeout(async () => {
log(`Publishing empty message to "${topic}" to resolve subscription request`)
try {
await publish(topic, Buffer.alloc(0), options)
} catch (err) {
log('Failed to publish empty message', err)
}
}, 1000)
try {
res = await ky.post('pubsub/sub', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
} catch (err) { // Initial subscribe fail, ensure we clean up
subsTracker.unsubscribe(topic, handler)
throw err
}
clearTimeout(ffWorkaround)
readMessages(ndjson(toIterable(res.body)), {
onMessage: handler,
onEnd: () => subsTracker.unsubscribe(topic, handler),
onError: options.onError
})
}
})
async function readMessages (msgStream, { onMessage, onEnd, onError }) {
onError = onError || log
try {
for await (const msg of msgStream) {
try {
onMessage({
from: bs58.encode(Buffer.from(msg.from, 'base64')).toString(),
data: Buffer.from(msg.data, 'base64'),
seqno: Buffer.from(msg.seqno, 'base64'),
topicIDs: msg.topicIDs
})
} catch (err) {
err.message = `Failed to parse pubsub message: ${err.message}`
onError(err, false, msg) // Not fatal
}
}
} catch (err) {
// FIXME: In testing with Chrome, err.type is undefined (should not be!)
// Temporarily use the name property instead.
if (err.type !== 'aborted' && err.name !== 'AbortError') {
onError(err, true) // Fatal
}
} finally {
onEnd()
}
}
================================================
FILE: src/pubsub/subscription-tracker.js
================================================
'use strict'
const AbortController = require('abort-controller')
class SubscriptionTracker {
constructor () {
this._subs = new Map()
}
static singleton () {
if (SubscriptionTracker.instance) return SubscriptionTracker.instance
SubscriptionTracker.instance = new SubscriptionTracker()
return SubscriptionTracker.instance
}
subscribe (topic, handler, signal) {
const topicSubs = this._subs.get(topic) || []
if (topicSubs.find(s => s.handler === handler)) {
throw new Error(`Already subscribed to ${topic} with this handler`)
}
// Create controller so a call to unsubscribe can cancel the request
const controller = new AbortController()
this._subs.set(topic, [{ handler, controller }].concat(topicSubs))
// If there is an external signal, forward the abort event
if (signal) {
signal.addEventListener('abort', () => this.unsubscribe(topic, handler))
}
return controller.signal
}
unsubscribe (topic, handler) {
const subs = this._subs.get(topic) || []
let unsubs
if (handler) {
this._subs.set(topic, subs.filter(s => s.handler !== handler))
unsubs = subs.filter(s => s.handler === handler)
} else {
this._subs.set(topic, [])
unsubs = subs
}
unsubs.forEach(s => s.controller.abort())
}
}
module.exports = SubscriptionTracker
================================================
FILE: src/pubsub/unsubscribe.js
================================================
'use strict'
const configure = require('../lib/configure')
const SubscriptionTracker = require('./subscription-tracker')
module.exports = configure(({ ky }) => {
const subsTracker = SubscriptionTracker.singleton()
// eslint-disable-next-line require-await
return async (topic, handler) => subsTracker.unsubscribe(topic, handler)
})
================================================
FILE: src/refs/index.js
================================================
'use strict'
const configure = require('../lib/configure')
const { Buffer } = require('buffer')
const CID = require('cids')
const ndjson = require('iterable-ndjson')
const toIterable = require('stream-to-it/source')
const toCamel = require('../lib/object-to-camel')
module.exports = config => {
const refs = (configure(({ ky }) => {
return async function * refs (args, options) {
options = options || {}
const searchParams = new URLSearchParams()
if (options.format !== undefined) {
searchParams.set('format', options.format)
}
if (options.edges !== undefined) {
searchParams.set('edges', options.edges)
}
if (options.unique !== undefined) {
searchParams.set('unique', options.unique)
}
if (options.recursive !== undefined) {
searchParams.set('recursive', options.recursive)
}
if (options.maxDepth !== undefined) {
searchParams.set('max-depth', options.maxDepth)
}
if (!Array.isArray(args)) {
args = [args]
}
for (const arg of args) {
searchParams.append('arg', `${Buffer.isBuffer(arg) ? new CID(arg) : arg}`)
}
const res = await ky.post('refs', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
for await (const file of ndjson(toIterable(res.body))) {
yield toCamel(file)
}
}
}))(config)
refs.local = require('./local')(config)
return refs
}
================================================
FILE: src/refs/local.js
================================================
'use strict'
const configure = require('../lib/configure')
const ndjson = require('iterable-ndjson')
const toIterable = require('stream-to-it/source')
const toCamel = require('../lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async function * refsLocal (options) {
options = options || {}
const res = await ky.post('refs/local', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers
})
for await (const file of ndjson(toIterable(res.body))) {
yield toCamel(file)
}
}
})
================================================
FILE: src/repo/gc.js
================================================
'use strict'
const CID = require('cids')
const ndjson = require('iterable-ndjson')
const configure = require('../lib/configure')
const toIterable = require('stream-to-it/source')
module.exports = configure(({ ky }) => {
return async function * gc (peerId, options) {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (options.streamErrors) searchParams.set('stream-errors', options.streamErrors)
const res = await ky.post('repo/gc', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
for await (const gcResult of ndjson(toIterable(res.body))) {
yield {
err: gcResult.Error ? new Error(gcResult.Error) : null,
cid: (gcResult.Key || {})['/'] ? new CID(gcResult.Key['/']) : null
}
}
}
})
================================================
FILE: src/repo/index.js
================================================
'use strict'
module.exports = config => ({
gc: require('./gc')(config),
stat: require('./stat')(config),
version: require('./version')(config)
})
================================================
FILE: src/repo/stat.js
================================================
'use strict'
const Big = require('bignumber.js')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async options => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (options.sizeOnly) searchParams.set('size-only', options.sizeOnly)
const res = await ky.post('repo/stat', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return {
numObjects: new Big(res.NumObjects),
repoSize: new Big(res.RepoSize),
repoPath: res.RepoPath,
version: res.Version,
storageMax: new Big(res.StorageMax)
}
}
})
================================================
FILE: src/repo/version.js
================================================
'use strict'
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async options => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (options.sizeOnly) searchParams.set('size-only', options.sizeOnly)
const res = await ky.post('repo/version', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return res.Version
}
})
================================================
FILE: src/resolve.js
================================================
'use strict'
const configure = require('./lib/configure')
module.exports = configure(({ ky }) => {
return async (path, options) => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
searchParams.set('arg', `${path}`)
if (options.cidBase) searchParams.set('cid-base', options.cidBase)
if (options.dhtRecordCount) searchParams.set('dht-record-count', options.dhtRecordCount)
if (options.dhtTimeout) searchParams.set('dht-timeout', options.dhtTimeout)
if (options.recursive != null) searchParams.set('recursive', options.recursive)
const res = await ky.post('resolve', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return res.Path
}
})
================================================
FILE: src/stats/bw.js
================================================
'use strict'
const ndjson = require('iterable-ndjson')
const Big = require('bignumber.js')
const configure = require('../lib/configure')
const toIterable = require('stream-to-it/source')
module.exports = configure(({ ky }) => {
return async function * bw (options) {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (options.interval) searchParams.set('interval', options.interval)
if (options.peer) searchParams.set('peer', options.peer)
if (options.poll != null) searchParams.set('poll', options.poll)
if (options.proto) searchParams.set('proto', options.proto)
const res = await ky.post('stats/bw', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
})
for await (const stats of ndjson(toIterable(res.body))) {
yield {
totalIn: new Big(stats.TotalIn),
totalOut: new Big(stats.TotalOut),
rateIn: new Big(stats.RateIn),
rateOut: new Big(stats.RateOut)
}
}
}
})
================================================
FILE: src/stats/index.js
================================================
'use strict'
module.exports = config => ({
bitswap: require('../bitswap/stat')(config),
bw: require('./bw')(config),
repo: require('../repo/stat')(config)
})
================================================
FILE: src/stop.js
================================================
'use strict'
const configure = require('./lib/configure')
module.exports = configure(({ ky }) => {
return options => {
options = options || {}
return ky.post('shutdown', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
}).text()
}
})
================================================
FILE: src/swarm/addrs.js
================================================
'use strict'
const multiaddr = require('multiaddr')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async options => {
options = options || {}
const res = await ky.post('swarm/addrs', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
}).json()
return Object.keys(res.Addrs).map(id => ({
id,
addrs: (res.Addrs[id] || []).map(a => multiaddr(a))
}))
}
})
================================================
FILE: src/swarm/connect.js
================================================
'use strict'
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (addrs, options) => {
addrs = Array.isArray(addrs) ? addrs : [addrs]
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
addrs.forEach(addr => searchParams.append('arg', addr))
const res = await ky.post('swarm/connect', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return res.Strings || []
}
})
================================================
FILE: src/swarm/disconnect.js
================================================
'use strict'
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async (addrs, options) => {
addrs = Array.isArray(addrs) ? addrs : [addrs]
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
addrs.forEach(addr => searchParams.append('arg', `${addr}`))
const res = await ky.post('swarm/disconnect', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return res.Strings || []
}
})
================================================
FILE: src/swarm/index.js
================================================
'use strict'
module.exports = config => ({
addrs: require('./addrs')(config),
connect: require('./connect')(config),
disconnect: require('./disconnect')(config),
localAddrs: require('./localAddrs')(config),
peers: require('./peers')(config)
})
================================================
FILE: src/swarm/localAddrs.js
================================================
'use strict'
const multiaddr = require('multiaddr')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async options => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (options.id != null) searchParams.append('id', options.id)
const res = await ky.post('swarm/addrs/local', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return (res.Strings || []).map(a => multiaddr(a))
}
})
================================================
FILE: src/swarm/peers.js
================================================
'use strict'
const multiaddr = require('multiaddr')
const configure = require('../lib/configure')
module.exports = configure(({ ky }) => {
return async options => {
options = options || {}
const searchParams = new URLSearchParams(options.searchParams)
if (options.direction != null) searchParams.append('direction', options.direction)
if (options.latency != null) searchParams.append('latency', options.latency)
if (options.streams != null) searchParams.append('streams', options.streams)
if (options.verbose != null) searchParams.append('verbose', options.verbose)
const res = await ky.post('swarm/peers', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams
}).json()
return (res.Peers || []).map(peer => {
const info = {}
try {
info.addr = multiaddr(peer.Addr)
info.peer = peer.Peer
} catch (error) {
info.error = error
info.rawPeerInfo = peer
}
if (peer.Muxer) {
info.muxer = peer.Muxer
}
if (peer.Latency) {
info.latency = peer.Latency
}
if (peer.Streams) {
info.streams = peer.Streams
}
if (peer.Direction != null) {
info.direction = peer.Direction
}
return info
})
}
})
================================================
FILE: src/update.js
================================================
'use strict'
const configure = require('./lib/configure')
module.exports = configure(({ ky }) => {
return options => {
options = options || {}
return ky.post('update', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
}).text()
}
})
================================================
FILE: src/version.js
================================================
'use strict'
const configure = require('./lib/configure')
const toCamel = require('./lib/object-to-camel')
module.exports = configure(({ ky }) => {
return async options => {
options = options || {}
const res = await ky.post('version', {
timeout: options.timeout,
signal: options.signal,
headers: options.headers,
searchParams: options.searchParams
}).json()
return toCamel(res)
}
})
================================================
FILE: test/commands.spec.js
================================================
/* eslint-env mocha */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const f = require('./utils/factory')()
describe('.commands', function () {
this.timeout(60 * 1000)
let ipfs
before(async () => {
ipfs = (await f.spawn()).api
})
after(() => f.clean())
it('lists commands', async () => {
const res = await ipfs.commands()
expect(res).to.exist()
})
})
================================================
FILE: test/constructor.spec.js
================================================
/* eslint-env mocha, browser */
'use strict'
const multiaddr = require('multiaddr')
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const f = require('./utils/factory')()
const ipfsClient = require('../src/index.js')
describe('ipfs-http-client constructor tests', () => {
describe('parameter permuations', () => {
it('none', () => {
const ipfs = ipfsClient()
if (typeof self !== 'undefined') {
const { hostname, port } = self.location
expectConfig(ipfs, { host: hostname, port })
} else {
expectConfig(ipfs, {})
}
})
it('opts', () => {
const host = 'wizard.world'
const port = '999'
const protocol = 'https'
const ipfs = ipfsClient({ host, port, protocol })
expectConfig(ipfs, { host, port, protocol })
})
it('multiaddr dns4 string (implicit http)', () => {
const host = 'foo.com'
const port = '1001'
const protocol = 'http' // default to http if not specified in multiaddr
const addr = `/dns4/${host}/tcp/${port}`
const ipfs = ipfsClient(addr)
expectConfig(ipfs, { host, port, protocol })
})
it('multiaddr dns4 string (explicit https)', () => {
const host = 'foo.com'
const port = '1001'
const protocol = 'https'
const addr = `/dns4/${host}/tcp/${port}/${protocol}`
const ipfs = ipfsClient(addr)
expectConfig(ipfs, { host, port, protocol })
})
it('multiaddr ipv4 string (implicit http)', () => {
const host = '101.101.101.101'
const port = '1001'
const protocol = 'http'
const addr = `/ip4/${host}/tcp/${port}`
const ipfs = ipfsClient(addr)
expectConfig(ipfs, { host, port, protocol })
})
it('multiaddr ipv4 string (explicit https)', () => {
const host = '101.101.101.101'
const port = '1001'
const protocol = 'https'
const addr = `/ip4/${host}/tcp/${port}/${protocol}`
const ipfs = ipfsClient(addr)
expectConfig(ipfs, { host, port, protocol })
})
it('multiaddr instance', () => {
const host = 'ace.place'
const port = '1001'
const addr = multiaddr(`/dns4/${host}/tcp/${port}`)
const ipfs = ipfsClient(addr)
expectConfig(ipfs, { host, port })
})
it('host and port strings', () => {
const host = '1.1.1.1'
const port = '9999'
const ipfs = ipfsClient({ host, port })
expectConfig(ipfs, { host, port })
})
it('host, port and api path', () => {
const host = '10.100.100.255'
const port = '9999'
const apiPath = '/future/api/v1/'
const ipfs = ipfsClient({ host, port, apiPath })
expectConfig(ipfs, { host, port, apiPath: apiPath.slice(0, -1) })
})
it('throws on invalid multiaddr', () => {
expect(() => ipfsClient('/dns4')).to.throw('invalid address')
expect(() => ipfsClient('/hello')).to.throw('no protocol with name')
expect(() => ipfsClient('/dns4/ipfs.io')).to.throw()
})
})
describe('integration', () => {
let ipfsd
before(async function () {
this.timeout(60 * 1000) // slow CI
ipfsd = await f.spawn()
})
after(() => f.clean())
it('can connect to an ipfs http api', async () => {
await clientWorks(ipfsClient(ipfsd.apiAddr))
})
})
})
async function clientWorks (client) {
const id = await client.id()
expect(id).to.have.a.property('id')
expect(id).to.have.a.property('publicKey')
}
function expectConfig (ipfs, { host, port, protocol, apiPath }) {
const conf = ipfs.getEndpointConfig()
expect(conf.host).to.be.oneOf([host, 'localhost', ''])
expect(conf.port).to.be.oneOf([port, '5001', '80'])
expect(conf.protocol).to.equal(protocol || 'http')
expect(conf['api-path']).to.equal(apiPath || '/api/v0')
}
================================================
FILE: test/custom-headers.spec.js
================================================
/* eslint-env mocha */
'use strict'
const { isNode } = require('ipfs-utils/src/env')
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const ipfsClient = require('../src')
describe('custom headers', function () {
// do not test in browser
if (!isNode) { return }
let ipfs
// initialize ipfs with custom headers
before(() => {
ipfs = ipfsClient({
host: 'localhost',
port: 6001,
protocol: 'http',
headers: {
authorization: 'Bearer ' + 'YOLO'
}
})
})
it('are supported', (done) => {
// spin up a test http server to inspect the requests made by the library
const server = require('http').createServer((req, res) => {
req.on('data', () => {})
req.on('end', () => {
res.writeHead(200)
res.write(JSON.stringify({}))
res.end()
// ensure custom headers are present
expect(req.headers.authorization).to.equal('Bearer ' + 'YOLO')
server.close()
done()
})
})
server.listen(6001, () => {
ipfs.id((err, res) => {
if (err) {
throw err
}
// this call is used to test that headers are being sent.
})
})
})
})
================================================
FILE: test/dag.spec.js
================================================
/* eslint-env mocha */
/* eslint max-nested-callbacks: ["error", 8] */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const { DAGNode } = require('ipld-dag-pb')
const CID = require('cids')
const f = require('./utils/factory')()
let ipfs
describe('.dag', function () {
this.timeout(20 * 1000)
before(async function () {
ipfs = (await f.spawn()).api
})
after(() => f.clean())
it('should be able to put and get a DAG node with format dag-pb', async () => {
const data = Buffer.from('some data')
const node = new DAGNode(data)
let cid = await ipfs.dag.put(node, { format: 'dag-pb', hashAlg: 'sha2-256' })
cid = cid.toV0()
expect(cid.codec).to.equal('dag-pb')
cid = cid.toBaseEncodedString('base58btc')
// expect(cid).to.equal('bafybeig3t3eugdchignsgkou3ly2mmy4ic4gtfor7inftnqn3yq4ws3a5u')
expect(cid).to.equal('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr')
const result = await ipfs.dag.get(cid)
expect(result.value.Data).to.deep.equal(data)
})
it('should be able to put and get a DAG node with format dag-cbor', async () => {
const cbor = { foo: 'dag-cbor-bar' }
let cid = await ipfs.dag.put(cbor, { format: 'dag-cbor', hashAlg: 'sha2-256' })
expect(cid.codec).to.equal('dag-cbor')
cid = cid.toBaseEncodedString('base32')
expect(cid).to.equal('bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce')
const result = await ipfs.dag.get(cid)
expect(result.value).to.deep.equal(cbor)
})
it('should callback with error when missing DAG resolver for multicodec from requested CID', async () => {
const block = await ipfs.block.put(Buffer.from([0, 1, 2, 3]), {
cid: new CID('z8mWaJ1dZ9fH5EetPuRsj8jj26pXsgpsr')
})
await expect(ipfs.dag.get(block.cid)).to.be.rejectedWith('Missing IPLD format "git-raw"')
})
})
================================================
FILE: test/diag.spec.js
================================================
/* eslint-env mocha */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const platform = require('browser-process-platform')
const f = require('./utils/factory')()
describe('.diag', function () {
this.timeout(50 * 1000)
// go-ipfs does not support these on Windows
if (platform === 'win32') { return }
let ipfs
before(async () => {
ipfs = (await f.spawn()).api
})
after(() => f.clean())
describe('api API', () => {
// Disabled in go-ipfs 0.4.10
it.skip('.diag.net', async () => {
const res = await ipfs.diag.net()
expect(res).to.exist()
})
it('.diag.sys', async () => {
const res = await ipfs.diag.sys()
expect(res).to.exist()
expect(res).to.have.a.property('memory')
expect(res).to.have.a.property('diskinfo')
})
it('.diag.cmds', async () => {
const res = await ipfs.diag.cmds()
expect(res).to.exist()
})
})
})
================================================
FILE: test/endpoint-config.spec.js
================================================
/* eslint-env mocha */
/* eslint max-nested-callbacks: ["error", 8] */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const ipfsClient = require('../src')
describe('.getEndpointConfig', () => {
it('should return the endpoint configuration', function () {
const ipfs = ipfsClient('https://127.0.0.1:5501/ipfs/api/')
const endpoint = ipfs.getEndpointConfig()
expect(endpoint.host).to.equal('127.0.0.1')
expect(endpoint.protocol).to.equal('https')
expect(endpoint['api-path']).to.equal('/ipfs/api')
expect(endpoint.port).to.equal('5501')
})
})
================================================
FILE: test/exports.spec.js
================================================
/* eslint-env mocha, browser */
'use strict'
const CID = require('cids')
const multiaddr = require('multiaddr')
const multibase = require('multibase')
const multicodec = require('multicodec')
const multihash = require('multihashes')
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const IpfsHttpClient = require('../')
describe('exports', () => {
it('should export the expected types and utilities', () => {
expect(IpfsHttpClient.Buffer).to.equal(Buffer)
expect(IpfsHttpClient.CID).to.equal(CID)
expect(IpfsHttpClient.multiaddr).to.equal(multiaddr)
expect(IpfsHttpClient.multibase).to.equal(multibase)
expect(IpfsHttpClient.multicodec).to.equal(multicodec)
expect(IpfsHttpClient.multihash).to.equal(multihash)
})
})
================================================
FILE: test/files-mfs.spec.js
================================================
/* eslint-env mocha */
/* eslint max-nested-callbacks: ["error", 8] */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const loadFixture = require('aegir/fixtures')
const mh = require('multihashes')
const all = require('it-all')
const pipe = require('it-pipe')
const { TimeoutError } = require('ky-universal')
const f = require('./utils/factory')()
const testfile = loadFixture('test/fixtures/testfile.txt')
// TODO: Test against all algorithms Object.keys(mh.names)
// This subset is known to work with both go-ipfs and js-ipfs as of 2017-09-05
const HASH_ALGS = [
'sha1',
'sha2-256',
'sha2-512',
// 'keccak-224', // go throws
'keccak-256',
// 'keccak-384', // go throws
'keccak-512'
]
describe('.files (the MFS API part)', function () {
this.timeout(20 * 1000)
let ipfs
const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP'
before(async () => {
ipfs = (await f.spawn()).api
})
after(() => f.clean())
it('.add file for testing', async () => {
const res = await all(ipfs.add(testfile))
expect(res).to.have.length(1)
expect(res[0].cid.toString()).to.equal(expectedMultihash)
expect(res[0].path).to.equal(expectedMultihash)
})
it('.add with Buffer module', async () => {
const { Buffer } = require('buffer')
const expectedBufferMultihash = 'QmWfVY9y3xjsixTgbd9AorQxH7VtMpzfx2HaWtsoUYecaX'
const file = Buffer.from('hello')
const res = await all(ipfs.add(file))
expect(res).to.have.length(1)
expect(res[0].cid.toString()).to.equal(expectedBufferMultihash)
expect(res[0].path).to.equal(expectedBufferMultihash)
})
it('.add with empty path and buffer content', async () => {
const expectedHash = 'QmWfVY9y3xjsixTgbd9AorQxH7VtMpzfx2HaWtsoUYecaX'
const content = Buffer.from('hello')
const res = await all(ipfs.add([{ path: '', content }]))
expect(res).to.have.length(1)
expect(res[0].cid.toString()).to.equal(expectedHash)
expect(res[0].path).to.equal(expectedHash)
})
it('.add with cid-version=1 and raw-leaves=false', async () => {
const expectedCid = 'bafybeifogzovjqrcxvgt7g36y7g63hvwvoakledwk4b2fr2dl4wzawpnny'
const options = { cidVersion: 1, rawLeaves: false }
const res = await all(ipfs.add(testfile, options))
expect(res).to.have.length(1)
expect(res[0].cid.toString()).to.equal(expectedCid)
expect(res[0].path).to.equal(expectedCid)
})
it('.add with only-hash=true', async () => {
const content = String(Math.random() + Date.now())
const files = await all(ipfs.add(Buffer.from(content), { onlyHash: true }))
expect(files).to.have.length(1)
// 'ipfs.object.get()' should timeout because content wasn't actually added
return expect(ipfs.object.get(files[0].cid, { timeout: 2000 }))
.to.be.rejectedWith(TimeoutError)
})
it('.add with options', async () => {
const res = await all(ipfs.add(testfile, { pin: false }))
expect(res).to.have.length(1)
expect(res[0].cid.toString()).to.equal(expectedMultihash)
expect(res[0].path).to.equal(expectedMultihash)
})
it('.add pins by default', async () => {
const newContent = Buffer.from(String(Math.random()))
const initialPins = await all(ipfs.pin.ls())
await all(ipfs.add(newContent))
const pinsAfterAdd = await all(ipfs.pin.ls())
expect(pinsAfterAdd.length).to.eql(initialPins.length + 1)
})
it('.add with pin=false', async () => {
const newContent = Buffer.from(String(Math.random()))
const initialPins = await all(ipfs.pin.ls())
await all(ipfs.add(newContent, { pin: false }))
const pinsAfterAdd = await all(ipfs.pin.ls())
expect(pinsAfterAdd.length).to.eql(initialPins.length)
})
HASH_ALGS.forEach((name) => {
it(`.add with hash=${name} and raw-leaves=false`, async () => {
const content = String(Math.random() + Date.now())
const file = {
path: content + '.txt',
content: Buffer.from(content)
}
const options = { hashAlg: name, rawLeaves: false }
const res = await all(ipfs.add([file], options))
expect(res).to.have.length(1)
const { cid } = res[0]
expect(mh.decode(cid.multihash).name).to.equal(name)
})
})
it('.add file with progress option', async () => {
let progress
let progressCount = 0
const progressHandler = (p) => {
progressCount += 1
progress = p
}
const res = await all(ipfs.add(testfile, { progress: progressHandler }))
expect(res).to.have.length(1)
expect(progress).to.be.equal(testfile.byteLength)
expect(progressCount).to.be.equal(1)
})
it('.add big file with progress option', async () => {
let progress = 0
let progressCount = 0
const progressHandler = (p) => {
progressCount += 1
progress = p
}
// TODO: needs to be using a big file
const res = await all(ipfs.add(testfile, { progress: progressHandler }))
expect(res).to.have.length(1)
expect(progress).to.be.equal(testfile.byteLength)
expect(progressCount).to.be.equal(1)
})
it('.add directory with progress option', async () => {
let progress = 0
let progressCount = 0
const progressHandler = (p) => {
progressCount += 1
progress = p
}
// TODO: needs to be using a directory
const res = await all(ipfs.add(testfile, { progress: progressHandler }))
expect(res).to.have.length(1)
expect(progress).to.be.equal(testfile.byteLength)
expect(progressCount).to.be.equal(1)
})
it('.add without progress options', async () => {
const res = await all(ipfs.add(testfile))
expect(res).to.have.length(1)
})
HASH_ALGS.forEach((name) => {
it(`.add with hash=${name} and raw-leaves=false`, async () => {
const content = String(Math.random() + Date.now())
const file = {
path: content + '.txt',
content: Buffer.from(content)
}
const options = { hashAlg: name, rawLeaves: false }
const res = await all(ipfs.add([file], options))
expect(res).to.have.length(1)
const { cid } = res[0]
expect(mh.decode(cid.multihash).name).to.equal(name)
})
})
it('.add with object chunks and iterable content', async () => {
const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm'
const res = await pipe(
[{ content: [Buffer.from('test')] }],
ipfs.add,
all
)
expect(res).to.have.length(1)
res[0].cid = res[0].cid.toString()
expect(res[0]).to.deep.equal({ path: expectedCid, cid: expectedCid, size: 12 })
})
it('.add with iterable', async () => {
const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm'
const res = await all(ipfs.add([Buffer.from('test')]))
expect(res).to.have.length(1)
res[0].cid = res[0].cid.toString()
expect(res[0]).to.deep.equal({ path: expectedCid, cid: expectedCid, size: 12 })
})
it('files.mkdir', async () => {
await ipfs.files.mkdir('/test-folder')
})
it('files.flush', async () => {
await ipfs.files.flush('/')
})
it('files.cp', async () => {
const folder = `/test-folder-${Math.random()}`
await ipfs.files.mkdir(folder)
await ipfs.files.cp([
'/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP',
`${folder}/test-file-${Math.random()}`
])
})
it('files.cp with non-array arguments', async () => {
const folder = `/test-folder-${Math.random()}`
await ipfs.files.mkdir(folder)
await ipfs.files.cp(
'/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP',
`${folder}/test-file-${Math.random()}`
)
})
it('files.mv', async () => {
const folder = `/test-folder-${Math.random()}`
const source = `${folder}/test-file-${Math.random()}`
const dest = `${folder}/test-file-${Math.random()}`
await ipfs.files.mkdir(folder)
await ipfs.files.cp(
'/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP',
source
)
await ipfs.files.mv([
source,
dest
])
})
it('files.mv with non-array arguments', async () => {
const folder = `/test-folder-${Math.random()}`
const source = `${folder}/test-file-${Math.random()}`
const dest = `${folder}/test-file-${Math.random()}`
await ipfs.files.mkdir(folder)
await ipfs.files.cp(
'/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP',
source
)
await ipfs.files.mv(
source,
dest
)
})
it('files.ls', async () => {
const folder = `/test-folder-${Math.random()}`
const file = `${folder}/test-file-${Math.random()}`
await ipfs.files.mkdir(folder)
await ipfs.files.write(file, Buffer.from('Hello, world'), {
create: true
})
const files = await all(ipfs.files.ls(folder))
expect(files.length).to.equal(1)
})
it('files.ls mfs root by default', async () => {
const folder = `test-folder-${Math.random()}`
await ipfs.files.mkdir(`/${folder}`)
const files = await all(ipfs.files.ls())
expect(files.find(file => file.name === folder)).to.be.ok()
})
it('files.write', async () => {
await ipfs.files.write('/test-folder/test-file-2.txt', Buffer.from('hello world'), {
create: true
})
const buf = Buffer.concat(await all(ipfs.files.read('/test-folder/test-file-2.txt')))
expect(buf.toString()).to.be.equal('hello world')
})
it('files.write without options', async () => {
await ipfs.files.write('/test-folder/test-file-2.txt', Buffer.from('hello world'))
const buf = Buffer.concat(await all(ipfs.files.read('/test-folder/test-file-2.txt')))
expect(buf.toString()).to.be.equal('hello world')
})
it('files.stat', async () => {
const folder = `/test-folder-${Math.random()}`
const file = `${folder}/test-file-${Math.random()}`
await ipfs.files.mkdir(folder)
await ipfs.files.write(file, testfile, {
create: true
})
const stats = await ipfs.files.stat(file)
stats.cid = stats.cid.toString()
expect(stats).to.deep.equal({
cid: 'QmQhouoDPAnzhVM148yCa9CbUXK65wSEAZBtgrLGHtmdmP',
size: 12,
cumulativeSize: 70,
blocks: 1,
type: 'file',
withLocality: false
})
})
it('files.stat file that does not exist()', async () => {
await expect(ipfs.files.stat('/test-folder/does-not-exist()')).to.be.rejectedWith({
code: 0,
type: 'error'
})
})
it('files.read', async () => {
const folder = `/test-folder-${Math.random()}`
const file = `${folder}/test-file-${Math.random()}`
await ipfs.files.mkdir(folder)
await ipfs.files.write(file, testfile, {
create: true
})
const buf = Buffer.concat(await all(ipfs.files.read(file)))
expect(Buffer.from(buf)).to.deep.equal(testfile)
})
it('files.rm without options', async () => {
await ipfs.files.rm('/test-folder/test-file-2.txt')
})
it('files.rm', async () => {
await ipfs.files.rm('/test-folder', { recursive: true })
})
})
================================================
FILE: test/fixtures/.gitattributes
================================================
* -text
================================================
FILE: test/fixtures/15mb.random
================================================
[File too large to display: 14.3 MB]
================================================
FILE: test/fixtures/r-config.json
================================================
{}
================================================
FILE: test/fixtures/ssl/cert.pem
================================================
-----BEGIN CERTIFICATE-----
MIIDOzCCAiMCCQCVqVeRIp9pFDANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJV
UzENMAsGA1UECAwEVXRhaDEOMAwGA1UEBwwFUHJvdm8xIzAhBgNVBAoMGkFDTUUg
U2lnbmluZyBBdXRob3JpdHkgSW5jMRQwEgYDVQQDDAtleGFtcGxlLmNvbTAeFw0x
ODA4MTQyMDEzNTdaFw0xOTEyMjcyMDEzNTdaMFgxCzAJBgNVBAYTAlVTMQ0wCwYD
VQQIDARVdGFoMQ4wDAYDVQQHDAVQcm92bzEWMBQGA1UECgwNQUNNRSBUZWNoIElu
YzESMBAGA1UEAwwJMTI3LjAuMC4xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
CgKCAQEA6x6mTXV+rC35QW/sPutT1O1cugtnw+UsJx7EGgzyjh7EoXE3gb7sO96P
tOI5zknb0vecckbiVkesmLnAs2iNa1u9EiRr6WHdc+1MfUCxyHRfP731vRZyo0kx
bSXerE0qZ2N3M1XyndZF7VMthKDKIg0ZR0TvdjwLqyLYEHAnRBhJLRS0Oy0fC6Of
VWCO3gIuk1HkTXH+/ZMA/obqrtlisxY85mMdlRz+1PNdZBMf+NxmrXN59uq+JqUu
8/v1oQ8jH2iU9IWeqyawHDEvPW3aDorfaWGyats5Xd3cT2Ph4xF9tBLT+3PDGU8c
oBmTHWDenYn+TCkCseayo1JCO5igJQIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQCr
R7eZxicHjJoRcbsPBDQpzx9uSux3uvpN93pyJrXXHiil/5SE7CYeDqv5+nV2p6HA
6KONUAmpId0iHAEi9u+0/LgPWyYQMzT3sfBhkO8RRaMYI87VuKbk5PFmlZbD843+
Qmg3Se2F7BDnTf88xA6QWR4DCejy+ZHfDRFrh3xfFl4tX1UNgqiTGfjPCzblhWx9
ygzlT+flN2j3NkAlhUEV89pnH4EQWILePMTT4wh2XOQj1VFJ+2ATojHFVUTtNWAJ
xrY/Q9cMYsZ++I8i9bHMZoyc1bSUd5CNFpQdfjVzlgMPT9Jj/fzWIQz+wq0KeRLI
dLWsa2MZr0GZnTU39YwH
-----END CERTIFICATE-----
================================================
FILE: test/fixtures/ssl/privkey.pem
================================================
-----BEGIN RSA PRIVATE KEY-----
MIIEogIBAAKCAQEA6x6mTXV+rC35QW/sPutT1O1cugtnw+UsJx7EGgzyjh7EoXE3
gb7sO96PtOI5zknb0vecckbiVkesmLnAs2iNa1u9EiRr6WHdc+1MfUCxyHRfP731
vRZyo0kxbSXerE0qZ2N3M1XyndZF7VMthKDKIg0ZR0TvdjwLqyLYEHAnRBhJLRS0
Oy0fC6OfVWCO3gIuk1HkTXH+/ZMA/obqrtlisxY85mMdlRz+1PNdZBMf+NxmrXN5
9uq+JqUu8/v1oQ8jH2iU9IWeqyawHDEvPW3aDorfaWGyats5Xd3cT2Ph4xF9tBLT
+3PDGU8coBmTHWDenYn+TCkCseayo1JCO5igJQIDAQABAoIBAH5fbfFqOpie6T8T
wj4bTGbA4bsZkD9JeU7ZiXubA/ABd5xyduwky2JugH0vrvRC3IVrE0qU8OiBA7Le
/EUx5/kRSPFsZBf/wwChRiB4WlYsvllLZ76nRxyepZNN7H5dx3Hkk1gjVREi71jd
ATUtGxfsRG77DV5WbcshIlLLhT9iaohsalmClAFBmwhqnRMvOXHiQyRbvB0fOX08
uVlObOqo9jLB8N5C/ux+wFEP4wi/AxVqs9ih7Ss7T7+pmOCVWhOnbYcoY2jdaJ11
iLK4F3rv/jQ82OwUpzrWsPedmZUzlOO8xdV3b8hOcPHs/BKvYed7aHSn6b5eVKKT
zT8vQoECgYEA+K9pvw9K/7+F810MHG+nZ0gtVWmXJp49jB7zQ6QMIex2sUajY2y9
bEJX8T6rdu3qd+HYU4zl3qt+MUnsVQEBNkLPAn3od0qIWXxu1SL2GF8SDV1xJWK1
Fp0YDe9blaz1JsmSgieNcSoSwqE2V97Wfd/m+EUfyhQt9HX55H5UgAUCgYEA8gkW
0xZKuHhBMYpcES2P5H5q6HN2fcEQycMuS3agAOhrFPYUT1DVNhbfNVmbOvL2NRWI
hXixo5SkuEuq2fjmEoeLPTmeKO5LM4IVqovWCYomSftKDpzw4HRn2jvKzi2+mg8J
qktIMqRqHu/O1NUIsszCIu4c5DzUdhr4N7GXOaECgYAEd1oF1Wd6al0kfsJN7G9s
Om6d/xR43BSs5I1n5JVXMqD7FBKxIW3ReOuNaJu5uhIg7wxsi7ZBJoFQr0wwRqFX
8SE4oTxAkDUcrlBrQYJ785Embkwu6LPp4Q5iia7yZDXO6YXZEo7GvoOxvSV1tInT
nubOBKfKgExG/KttQBuSZQKBgAzYOqPdLP35M8yDQTuQJXDE3LuVVRZ7Zn6uowhS
NU+XBgfIv28uJQKH2DSmmrxYJITQrbwXmaXKv6sgKOMEeIFHPDZ1llUpwEftgWTZ
ovRCpqGKenWoEoh25QQJ5Eto1hKq9aJZ+GznmNIne9yDqcCDaVIdPN9H8yaJa97Y
x+PBAoGAOiK6xAbPyJSKDSTGZzdv8+yeOdNeJjRHxKJs+4YsDchmdumrqow83DBP
7ulIJD9pcmsWj+8fntMcsTX5mvzJd5LsKc7Maa5/LtitsLsynu78QFg4Njj8sAKn
3991i8J98DZ9zqmkxJJhGwstCHG+c+Q7lA6kZ1UdbWJwYwIHjAs=
-----END RSA PRIVATE KEY-----
================================================
FILE: test/fixtures/test-folder/.hiddenTest.txt
================================================
Aha! You found me!
================================================
FILE: test/fixtures/test-folder/add
================================================
'use strict'
const ipfs = require('../src')('localhost', 5001)
const f1 = 'Hello'
const f2 = 'World'
ipfs.add([new Buffer(f1), new Buffer(f2)], function (err, res) {
if (err || !res) return console.log(err)
for (let i = 0; i < res.length; i++) {
console.log(res[i])
}
})
ipfs.add(['./files/hello.txt', './files/ipfs.txt'], function (err, res) {
if (err || !res) return console.log(err)
for (let i = 0; i < res.length; i++) {
console.log(res[i])
}
})
================================================
FILE: test/fixtures/test-folder/cat
================================================
'use strict'
const ipfs = require('../src')('localhost', 5001)
const hash = [
'QmdFyxZXsFiP4csgfM5uPu99AvFiKH62CSPDw5TP92nr7w',
'QmY9cxiHqTFoWamkQVkpmmqzBrY3hCBEL2XNu3NtX74Fuu'
]
ipfs.cat(hash, function (err, res) {
if (err || !res) return console.log(err)
if (res.readable) {
res.pipe(process.stdout)
} else {
console.log(res)
}
})
================================================
FILE: test/fixtures/test-folder/files/hello.txt
================================================
Hello
================================================
FILE: test/fixtures/test-folder/files/ipfs.txt
================================================
IPFS
================================================
FILE: test/fixtures/test-folder/ipfs-add
================================================
#!/usr/bin/env node
'use strict'
const ipfs = require('../src')('localhost', 5001)
const files = process.argv.slice(2)
ipfs.add(files, {recursive: true}, function (err, res) {
if (err || !res) return console.log(err)
for (let i = 0; i < res.length; i++) {
console.log('added', res[i].Hash, res[i].Name)
}
})
================================================
FILE: test/fixtures/test-folder/ls
================================================
'use strict'
const ipfs = require('../src')('localhost', 5001)
const hash = ['QmdbHK6gMiecyjjSoPnfJg6iKMF7v6E2NkoBgGpmyCoevh']
ipfs.ls(hash, function (err, res) {
if (err || !res) return console.log(err)
res.Objects.forEach(function (node) {
console.log(node.Hash)
console.log('Links [%d]', node.Links.length)
node.Links.forEach(function (link, i) {
console.log('[%d]', i, link)
})
})
})
================================================
FILE: test/fixtures/test-folder/version
================================================
'use strict'
const ipfs = require('../src')('localhost', 5001)
ipfs.commands(function (err, res) {
if (err) throw err
console.log(res)
})
================================================
FILE: test/fixtures/testconfig.json
================================================
{
"test": "beep boop",
"Addresses": {
"API": "/ip4/127.0.0.1/tcp/5001",
"Gateway": "/ip4/127.0.0.1/tcp/8080",
"Swarm": [
"/ip4/0.0.0.0/tcp/4001"
]
},
"Bootstrap": [
"/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ",
"/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z",
"/ip4/104.236.179.241/tcp/4001/ipfs/QmSoLpPVmHKQ4XTPdz8tjDFgdeRFkpV8JgYq8JVJ69RrZm",
"/ip4/162.243.248.213/tcp/4001/ipfs/QmSoLueR4xBeUbY9WZ9xGUUxunbKWcrNFTDAadQJmocnWm",
"/ip4/128.199.219.111/tcp/4001/ipfs/QmSoLSafTMBsPKadTEgaXctDQVcqN88CNLHXMkTNwMKPnu",
"/ip4/104.236.76.40/tcp/4001/ipfs/QmSoLV4Bbm51jM9C4gDYZQ9Cy3U6aXMJDAbzgu2fzaDs64",
"/ip4/178.62.158.247/tcp/4001/ipfs/QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd",
"/ip4/178.62.61.185/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3pNLQpmmEk35v6wYtsMGLzSr5QBU3",
"/ip4/104.236.151.122/tcp/4001/ipfs/QmSoLju6m7xTh3DuokvT3886QRYqxAzb1kShaanJgW36yx"
],
"Datastore": {
"Path": "/home/krl/.ipfs/datastore",
"Type": "leveldb"
},
"Discovery": {
"MDNS": {
"Enabled": false,
"Interval": 0
}
},
"Gateway": {
"RootRedirect": "",
"Writable": false
},
"Identity": {
"PeerID": "QmTF1MqD5hFgRztwxeosess2S1PpSm6fpy2jt15gZjr39c",
"PrivKey": "CAASqxIwggknAgEAAoICAQC8RXr0/XtKFAn5ReHm0AoUQYpMxckyISsdKkDiGTXQW6h3IuU7kypoeSUNPerp+/+dJCXe5dY+GnT9jRXbZBUfVWYuEnMbCCnGHhi28wD71TYgfLuJlZjRKy1eBIz57bOtAVY8CnLM4WPLQLhcwgSZ1G83D6sRTKNOOqZMf37V/I1dxDg2kqi0JujBodlIP0WjDyBJvtPfLUSoytYznIOmJfi/K5ofPm54y5zX81lwuRGAdCXDIwCwcIwFY2EC6gWVECDLTTBnr7ZXITNtTBOWx6tIEmTF8K/hQN3KzazVEkYn7p4r0QL4zJnQCBIr09HhvOuNTK9XdqBeQJsVVm2uM+//acgdlYS8V+F8FQQHCihsaKAEYeDSGuKuzEbLohc8donWy+3k5jn/SwY8jJLaXtBJpux1sGdRG+ko5mFdXViII5e8gbiSdC1JiDKdeYmUSAm27IvuGay4FplK8wczZrComdsS3sxpSvTrS6KulNv7WTZEKPAWtMFGX+58krOpDgOkKg8IvwucmXsrYD5Q+razorP3v4QAVhouhxQNKzVpmOYZemOeJrTf5smUBjTGLIp343N36kWqZVp+bqTjl8x5uRbgn/wthDWHsyV81M6OwCCPtiKLJMKkztlArFnnT5sZ3aG7sFj4AgnxcqHHDo4nUAI+t4tOb9WxcAMR2wIDAQABAoICAG7OxetjNSkIWkZoJujeENCTMn60+hGTC/kCYWWxSMb061YTJ6/EkfUjN/dvNc+5DVzDZbamt02d7LU+UFrrsLLcZGNBYJXMXCnKlOk4ZJ/TgSPlxcrYTTTuoKjxLLf8ev+cBdEYpTCIh1+dG+UcG/Ed4scZZams3YCxbCch8tim590EG8Gi18AQFnXAeE3ZT3cE9A/zTGfSENL3btK5j5I+TwTU+MTizcoyrIE9LKr3gaGEuqT6+PDfjMmvD+3TJq3w7Bw7tf2QoVTuqYHugKkBo4Grsbv+SMXek6tFGi/drYTbICTRw1oDsZOK7Ib3CFRACLMFKz3jB8fxZlVMpQgawiyeBPx0Z4xf6+OG0yUGxGUKBcuATzON29NJWplZxPtfClCozdsm/3VCCPe7ruDUoDEWobhvKh3Ax+8Cdvt9ajoon6qNK3QgJqrm8Xc50MtjaPhZASGw1YweOfxAeQRHVYAJHrkBwblNzOnCMT8F4uKpeI4C9rUjKTOJu53qV1shwIM+zUGtmRvJUTKA15twqnntQrkhO990Dtl3tON/yx/D2ilLYCVuEPqfmW5/xiOrabgS1OlXLZ7teat85JOyqgZ1R/uxFzQljRwVsnCG6DlobkGHbnKwHq4zyV5UXZUDTM1QAgw6ksmkDWOQ9iYJrTk7UTmnNeAb0g4G2tJhAoIBAQDyT8qoPmZkONFktdnUUnjAeuNZ55b2u7pBq+1b2N4ZzhZQjCRCJwK3zxDYpoxWmH9etJcvv1RvZRaWngr9vECMzUEJmklZlCpW2fdxhvprABHaywklFt2heOo8krmfvbZIVMQ6Q9xWRz5sfiV+k3C8q2Y6RIMLuGquYzk8o2pAPG5aD7gHxIeq4JRGCXyJyovgX2AOZmtZe9ttc+dhxKm71V8bI0e88JrusKeapUnVJUm8qgP6PBg+F8MZh1U3kjHczKeB1ND3X9oFQtLP9ypsi4Ngqgvw0O3+CGY/nvJ6c6ynryiVcmURdQY66f+57CtPjjdUVsSoR/bce5S2lrHNAoIBAQDG6C5iHVuX9OplWcA70U7Tc4eiac803QRUL+zdDOC74Qw2Coxr5Nsxjz8/i+6DnOCeWkJBh/uGEaxdYYZHU90LVf/NSA3zguGt6Z0g3MMvMNT9KGgoZl2T5Y/CCgCfrt6gTGRy7PmF+uLtgHcsKQLqCzMarOSVyue35ztxuI6NeRPvlNkoJ54F/+jnY1/6MrMTPXpybHXwf40piyLPuyapqBgyWkRBwPORR9Ucdiz7KhBnZhj7NYpwvut/KgufuU3WF9bkknfFlOJzVp8ZgefxHE8lHPuR0PnWyHwv3v5hgALYwenREcM5MUbRZ7tc18FkWBVOPlvszqSGujq3nMpHAoIBAGV2R9OfHVzF9dgH1Yh0aB+g1WYl9S6neNxa027sJkQD6ZAcvmn8z8SLrfAp/QWdoWfUkqHpqb9jQswarVuF4jmTELKmqiQaSIhJiLU+4cjAJLnK3q9rHa6pZNusTJG47ITpCamkFLUD6/2d7LFNp5043/tyCLV1qSYQYj0j6C+xnjuT7WlDP9OrairRehZwe5WeGiitdjHoDP+N0ss7gB8ov0Qrx7Qzw4xC6Et2/q2DiZa4UiYL19LYPFeKNYKpcruT7mgM5ttOhYpCaueuBVOiL4bgbVOPCLigZ8AoHDxuB1PHomTBm9RtfghZRz1gyNntIPntwzb7u0Cjdqfl/dkCggEAauDxo3jg8ZsBpCoA2GOUtpw6gnPWijJElDQYU4MK8wlvNU6fu44ClfPB6ZR4OjI+o/gd5/Z4mca/VoID1Cnk+aVhSV3xWSq3t2pzKuhU3POhTtK6fRLcL49Hmt0jDqq5J2tFAlgBkBOKglHoN0tmLHqOIERMo4yezDusvmOL/crUgoT51tDK4bBr5oGIXfmGLc14ESnkibEQGgWQVAzdLoaLUesdCDP07NirU5rQerlUjSrYO4u+cuyzv+XIzy+T+nle1/037Gwe7hjabqtWBUHP6UJUjzq6NMYPrO1mxN8zKGOyDsw7mWy3/+d8TtwEJ7YI5L0vSeSTlW1WBblzGwKCAQBeL4WoC0rH7B+MBw/+q3NMozlq4pE1vAC/7L06GhTPQi3Sklz/HHfw1g2aeYYIYpZ3S0Q+QHBs+fpspuPIBE0xTZPNJIAPTOW3WxODCya1rl521Syy8t57m9UPyVG3scSbWrJxH99BQVSPZzVMI3WO2XWGYupUIkVo+MW4WKhBkHd6a8inuJmjExDWm+bwaoQgUxNwE3c1C5OFwkAU1ofhNijE02IJX0EOaYjvNELT0Na3B0T7KZ72Ut4u33byPgEHa9SsPEurctbHy2HdpOGHdpxx4r+7ZQUtg1UZ0ZRQN8B8TyozD05AXQ9V8ybdzU1jSpQYWqbfkXl+LllJBa4E"
},
"Log": {
"MaxAgeDays": 0,
"MaxBackups": 1,
"MaxSizeMB": 250
},
"Mounts": {
"FuseAllowOther": false,
"IPFS": "/ipfs",
"IPNS": "/ipns"
},
"SupernodeRouting": {
"Servers": [
"/ip4/104.236.176.52/tcp/4002/ipfs/QmXdb7tWTxdFEQEFgWBqkuYSrZd3mXrC7HxkD4krGNYx2U",
"/ip4/104.236.179.241/tcp/4002/ipfs/QmVRqViDByUxjUMoPnjurjKvZhaEMFDtK35FJXHAM4Lkj6",
"/ip4/104.236.151.122/tcp/4002/ipfs/QmSZwGx8Tn8tmcM4PtDJaMeUQNRhNFdBLVGPzRiNaRJtFH",
"/ip4/162.243.248.213/tcp/4002/ipfs/QmbHVEEepCi7rn7VL7Exxpd2Ci9NNB6ifvqwhsrbRMgQFP",
"/ip4/128.199.219.111/tcp/4002/ipfs/Qmb3brdCYmKG1ycwqCbo6LUwWxTuo3FisnJV2yir7oN92R",
"/ip4/104.236.76.40/tcp/4002/ipfs/QmdRBCV8Cz2dGhoKLkD3YjPwVFECmqADQkx5ZteF2c6Fy4",
"/ip4/178.62.158.247/tcp/4002/ipfs/QmUdiMPci7YoEUBkyFZAh2pAbjqcPr7LezyiPD2artLw3v",
"/ip4/178.62.61.185/tcp/4002/ipfs/QmVw6fGNqBixZE4bewRLT2VXX7fAHUHs8JyidDiJ1P7RUN"
]
},
"Tour": {
"Last": ""
},
"Version": {
"AutoUpdate": "minor",
"Check": "error",
"CheckDate": "0001-01-01T00:00:00Z",
"CheckPeriod": "172800000000000",
"Current": "0.3.0"
}
}
================================================
FILE: test/fixtures/testfile.txt
================================================
Plz add me!
================================================
FILE: test/get.spec.js
================================================
/* eslint-env mocha */
/* eslint max-nested-callbacks: ["error", 8] */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const loadFixture = require('aegir/fixtures')
const all = require('it-all')
const concat = require('it-concat')
const f = require('./utils/factory')()
describe('.get (specific go-ipfs features)', function () {
this.timeout(60 * 1000)
function fixture (path) {
return loadFixture(path, 'interface-ipfs-core')
}
const smallFile = {
cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP',
data: fixture('test/fixtures/testfile.txt')
}
let ipfs
before(async () => {
ipfs = (await f.spawn()).api
await all(ipfs.add(smallFile.data))
})
after(() => f.clean())
it('no compression args', async () => {
const files = await all(ipfs.get(smallFile.cid))
expect(files).to.be.length(1)
const content = await concat(files[0].content)
expect(content.toString()).to.contain(smallFile.data.toString())
})
it('archive true', async () => {
const files = await all(ipfs.get(smallFile.cid, { archive: true }))
expect(files).to.be.length(1)
const content = await concat(files[0].content)
expect(content.toString()).to.contain(smallFile.data.toString())
})
it('err with out of range compression level', async () => {
await expect(all(ipfs.get(smallFile.cid, {
compress: true,
compressionLevel: 10
}))).to.be.rejectedWith('compression level must be between 1 and 9')
})
// TODO Understand why this test started failing
it.skip('with compression level', async () => {
await all(ipfs.get(smallFile.cid, { compress: true, 'compression-level': 1 }))
})
it('add path containing "+"s (for testing get)', async () => {
const filename = 'ti,c64x+mega++mod-pic.txt'
const subdir = 'tmp/c++files'
const expectedCid = 'QmPkmARcqjo5fqK1V1o8cFsuaXxWYsnwCNLJUYS4KeZyff'
const path = `${subdir}/${filename}`
const files = await all(ipfs.add([{
path,
content: Buffer.from(path)
}]))
expect(files[2].cid.toString()).to.equal(expectedCid)
})
it('get path containing "+"s', async () => {
const cid = 'QmPkmARcqjo5fqK1V1o8cFsuaXxWYsnwCNLJUYS4KeZyff'
const files = await all(ipfs.get(cid))
expect(files).to.be.an('array').with.lengthOf(3)
expect(files[0]).to.have.property('path', cid)
expect(files[1]).to.have.property('path', `${cid}/c++files`)
expect(files[2]).to.have.property('path', `${cid}/c++files/ti,c64x+mega++mod-pic.txt`)
})
})
================================================
FILE: test/interface.spec.js
================================================
/* eslint-env mocha */
'use strict'
const tests = require('interface-ipfs-core')
const factory = require('./utils/factory')
const isWindows = process.platform && process.platform === 'win32'
/** @typedef {import("ipfsd-ctl").ControllerOptions} ControllerOptions */
describe('interface-ipfs-core tests', () => {
const commonFactory = factory()
tests.root(commonFactory, {
skip: [
{
name: 'should add with mode as string',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should add with mode as number',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should add with mtime as Date',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should add with mtime as { nsecs, secs }',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should add with mtime as timespec',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should add with mtime as hrtime',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should export a chunk of a file',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should ls with metadata',
reason: 'TODO not implemented in go-ipfs yet'
}
]
})
tests.bitswap(commonFactory)
tests.block(commonFactory, {
skip: [{
name: 'should get a block added as CIDv1 with a CIDv0',
reason: 'go-ipfs does not support the `version` param'
}]
})
tests.bootstrap(commonFactory, {
skip: [{
name: 'should return a list containing the bootstrap peer when called with a valid arg (ip4)',
reason: 'TODO unskip when go-ipfs switches to p2p for libp2p keys'
}, {
name: 'should prevent duplicate inserts of bootstrap peers',
reason: 'TODO unskip when go-ipfs switches to p2p for libp2p keys'
}, {
name: 'should return a list containing the peer removed when called with a valid arg (ip4)',
reason: 'TODO unskip when go-ipfs switches to p2p for libp2p keys'
}]
})
tests.config(commonFactory, {
skip: [
// config.replace
{
name: 'replace',
reason: 'FIXME Waiting for fix on go-ipfs https://github.com/ipfs/js-ipfs-http-client/pull/307#discussion_r69281789 and https://github.com/ipfs/go-ipfs/issues/2927'
},
{
name: 'should list config profiles',
reason: 'TODO: Not implemented in go-ipfs'
},
{
name: 'should strip private key from diff output',
reason: 'TODO: Not implemented in go-ipfs'
}
]
})
tests.dag(commonFactory, {
skip: [
// dag.tree
{
name: 'tree',
reason: 'TODO vmx 2018-02-22: Currently the tree API is not exposed in go-ipfs'
},
// dag.get:
{
name: 'should get a dag-pb node local value',
reason: 'FIXME vmx 2018-02-22: Currently not supported in go-ipfs, it might be possible once https://github.com/ipfs/go-ipfs/issues/4728 is done'
},
{
name: 'should get dag-pb value via dag-cbor node',
reason: 'FIXME vmx 2018-02-22: Currently not supported in go-ipfs, it might be possible once https://github.com/ipfs/go-ipfs/issues/4728 is done'
},
{
name: 'should get by CID string + path',
reason: 'FIXME vmx 2018-02-22: Currently not supported in go-ipfs, it might be possible once https://github.com/ipfs/go-ipfs/issues/4728 is done'
},
{
name: 'should get only a CID, due to resolving locally only',
reason: 'FIXME: go-ipfs does not support localResolve option'
}
]
})
tests.dht(commonFactory)
tests.files(commonFactory, {
skip: [
{
name: 'should ls directory',
reason: 'TODO unskip when go-ipfs supports --long https://github.com/ipfs/go-ipfs/pull/6528'
},
{
name: 'should list a file directly',
reason: 'TODO unskip when go-ipfs supports --long https://github.com/ipfs/go-ipfs/pull/6528'
},
{
name: 'should ls directory and include metadata',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should read from outside of mfs',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should ls from outside of mfs',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should change file mode',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should change directory mode',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should change file mode as string',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should change file mode to 0',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should update file mtime',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should update directory mtime',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should make directory and specify mode',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should make directory and specify mtime',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should write file and specify mode',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should write file and specify mtime',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should respect metadata when copying files',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should respect metadata when copying directories',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should respect metadata when copying from outside of mfs',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should have default mtime',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should set mtime as Date',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should set mtime as { nsecs, secs }',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should set mtime as timespec',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should set mtime as hrtime',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should make directory and have default mode',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should make directory and specify mode as string',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should make directory and specify mode as number',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should make directory and specify mtime as Date',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should make directory and specify mtime as { nsecs, secs }',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should make directory and specify mtime as timespec',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should make directory and specify mtime as hrtime',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should write file and specify mode as a string',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should write file and specify mode as a number',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should write file and specify mtime as Date',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should write file and specify mtime as { nsecs, secs }',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should write file and specify mtime as timespec',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should write file and specify mtime as hrtime',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should stat file with mode',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should stat file with mtime',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should stat dir with mode',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should stat dir with mtime',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should stat sharded dir with mode',
reason: 'TODO not implemented in go-ipfs yet'
},
{
name: 'should stat sharded dir with mtime',
reason: 'TODO not implemented in go-ipfs yet'
}
]
})
tests.key(commonFactory, {
skip: [
// key.export
{
name: 'export',
reason: 'TODO not implemented in go-ipfs yet'
},
// key.import
{
name: 'import',
reason: 'TODO not implemented in go-ipfs yet'
}
]
})
tests.miscellaneous(commonFactory)
tests.name(factory(
{
ipfsOptions: {
offline: true
}
}
), {
skip: [
{
name: 'should resolve a record from peerid as cidv1 in base32',
reason: 'TODO not implemented in go-ipfs yet: https://github.com/ipfs/go-ipfs/issues/5287'
}
]
})
tests.namePubsub(factory(
{
ipfsOptions: {
EXPERIMENTAL: {
ipnsPubsub: true
}
}
}
), {
skip: [
// name.pubsub.cancel
{
name: 'should cancel a subscription correctly returning true',
reason: 'go-ipfs is really slow for publishing and resolving ipns records, unless in offline mode'
},
// name.pubsub.subs
{
name: 'should get the list of subscriptions updated after a resolve',
reason: 'go-ipfs is really slow for publishing and resolving ipns records, unless in offline mode'
}
]
})
tests.object(commonFactory)
tests.pin(commonFactory)
tests.ping(commonFactory, {
skip: [
{
name: 'should fail when pinging a peer that is not available',
reason: 'FIXME go-ipfs return success with text: Looking up peer '
}
]
})
tests.pubsub(factory({}, {
go: {
args: ['--enable-pubsub-experiment']
}
}), {
skip: isWindows ? [
// pubsub.subscribe
{
name: 'should send/receive 100 messages',
reason: 'FIXME https://github.com/ipfs/interface-ipfs-core/pull/188#issuecomment-354673246 and https://github.com/ipfs/go-ipfs/issues/4778'
},
{
name: 'should receive multiple messages',
reason: 'FIXME https://github.com/ipfs/interface-ipfs-core/pull/188#issuecomment-354673246 and https://github.com/ipfs/go-ipfs/issues/4778'
}
] : null
})
tests.repo(commonFactory)
tests.stats(commonFactory)
tests.swarm(commonFactory)
})
================================================
FILE: test/key.spec.js
================================================
/* eslint-env mocha */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const f = require('./utils/factory')()
describe('.key', function () {
this.timeout(50 * 1000)
let ipfs
before(async () => {
ipfs = (await f.spawn()).api
})
after(() => f.clean())
describe('.gen', () => {
it('create a new rsa key', async () => {
const res = await ipfs.key.gen('foobarsa', { type: 'rsa', size: 2048 })
expect(res).to.exist()
})
it('create a new ed25519 key', async () => {
const res = await ipfs.key.gen('bazed', { type: 'ed25519' })
expect(res).to.exist()
})
})
describe('.list', () => {
it('both keys show up + self', async () => {
const res = await ipfs.key.list()
expect(res).to.exist()
expect(res.length).to.equal(3)
})
})
})
================================================
FILE: test/lib.configure.spec.js
================================================
/* eslint-env mocha, browser */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const Multiaddr = require('multiaddr')
const { isBrowser, isWebWorker } = require('ipfs-utils/src/env')
const configure = require('../src/lib/configure')
describe('lib/configure', () => {
it('should accept no config', () => {
configure(config => {
if (isBrowser || isWebWorker) {
expect(config.apiAddr).to.eql(location.origin)
} else {
expect(config.apiAddr).to.eql('http://localhost:5001')
}
})()
})
it('should accept string multiaddr', () => {
const input = '/ip4/127.0.0.1/tcp/5001'
configure(config => {
expect(config.apiAddr).to.eql('http://127.0.0.1:5001')
})(input)
})
it('should accept string url', () => {
const input = 'http://127.0.0.1:5001'
configure(config => {
expect(config.apiAddr).to.eql('http://127.0.0.1:5001')
})(input)
})
it('should accept multiaddr instance', () => {
const input = Multiaddr('/ip4/127.0.0.1/tcp/5001')
configure(config => {
expect(config.apiAddr).to.eql('http://127.0.0.1:5001')
})(input)
})
it('should accept object with protocol, host and port', () => {
const input = { protocol: 'https', host: 'ipfs.io', port: 138 }
configure(config => {
expect(config.apiAddr).to.eql('https://ipfs.io:138')
})(input)
})
it('should accept object with protocol only', () => {
const input = { protocol: 'https' }
configure(config => {
if (isBrowser || isWebWorker) {
expect(config.apiAddr).to.eql(`https://${location.host}`)
} else {
expect(config.apiAddr).to.eql('https://localhost:5001')
}
})(input)
})
it('should accept object with host only', () => {
const input = { host: 'ipfs.io' }
configure(config => {
if (isBrowser || isWebWorker) {
expect(config.apiAddr).to.eql(`http://ipfs.io:${location.port}`)
} else {
expect(config.apiAddr).to.eql('http://ipfs.io:5001')
}
})(input)
})
it('should accept object with port only', () => {
const input = { port: 138 }
configure(config => {
if (isBrowser || isWebWorker) {
expect(config.apiAddr).to.eql(`http://${location.hostname}:138`)
} else {
expect(config.apiAddr).to.eql('http://localhost:138')
}
})(input)
})
})
================================================
FILE: test/lib.error-handler.spec.js
================================================
/* eslint-env mocha */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const { HTTPError } = require('ky-universal')
const throwsAsync = require('./utils/throws-async')
const errorHandler = require('../src/lib/error-handler')
describe('lib/error-handler', () => {
it('should parse json error response', async () => {
const res = {
ok: false,
headers: { get: () => 'application/json' },
json: () => Promise.resolve({
Message: 'boom',
Code: 0,
Type: 'error'
}),
status: 500
}
const err = await throwsAsync(errorHandler(null, null, res))
expect(err instanceof HTTPError).to.be.true()
expect(err.message).to.eql('boom')
expect(err.response.status).to.eql(500)
})
it('should gracefully fail on parse json', async () => {
const res = {
ok: false,
headers: { get: () => 'application/json' },
json: () => 'boom', // not valid json!
status: 500
}
const err = await throwsAsync(errorHandler(null, null, res))
expect(err instanceof HTTPError).to.be.true()
})
it('should gracefully fail on read text', async () => {
const res = {
ok: false,
headers: { get: () => 'text/plain' },
text: () => Promise.reject(new Error('boom')),
status: 500
}
const err = await throwsAsync(errorHandler(null, null, res))
expect(err instanceof HTTPError).to.be.true()
})
})
================================================
FILE: test/log.spec.js
================================================
/* eslint-env mocha */
/* eslint max-nested-callbacks: ["error", 8] */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const all = require('it-all')
const f = require('./utils/factory')()
describe('.log', function () {
this.timeout(100 * 1000)
let ipfs
before(async () => {
ipfs = (await f.spawn()).api
})
after(() => f.clean())
it('.log.tail', async () => {
const i = setInterval(async () => {
try {
await all(ipfs.add(Buffer.from('just adding some data to generate logs')))
} catch (_) {
// this can error if the test has finished and we're shutting down the node
}
}, 1000)
for await (const message of ipfs.log.tail()) {
clearInterval(i)
expect(message).to.be.an('object')
break
}
})
it('.log.ls', async () => {
const res = await ipfs.log.ls()
expect(res).to.exist()
expect(res).to.be.an('array')
})
it('.log.level', async () => {
const res = await ipfs.log.level('all', 'error')
expect(res).to.exist()
expect(res).to.be.an('object')
expect(res).to.not.have.property('error')
expect(res).to.have.property('message')
})
})
================================================
FILE: test/node/swarm.js
================================================
/* eslint-env mocha */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const nock = require('nock')
const ipfsClient = require('../../src')
describe('.swarm.peers', function () {
this.timeout(50 * 1000) // slow CI
const ipfs = ipfsClient('/ip4/127.0.0.1/tcp/5001')
const apiUrl = 'http://127.0.0.1:5001'
it('handles a peer response', async () => {
const response = { Peers: [{ Addr: '/ip4/104.131.131.82/tcp/4001', Peer: 'QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ', Latency: '', Muxer: '', Streams: null }] }
const scope = nock(apiUrl)
.post('/api/v0/swarm/peers')
.query(true)
.reply(200, response)
const res = await ipfs.swarm.peers()
expect(res).to.be.a('array')
expect(res.length).to.equal(1)
expect(res[0].error).to.not.exist()
expect(res[0].addr.toString()).to.equal(response.Peers[0].Addr)
expect(res[0].peer.toString()).to.equal(response.Peers[0].Peer)
expect(scope.isDone()).to.equal(true)
})
it('handles an ip6 quic peer', async () => {
const response = { Peers: [{ Addr: '/ip6/2001:8a0:7ac5:4201:3ac9:86ff:fe31:7095/udp/4001/quic', Peer: 'QmcgpsyWgH8Y8ajJz1Cu72KnS5uo2Aa2LpzU7kinSupNKC', Latency: '', Muxer: '', Streams: null }] }
const scope = nock(apiUrl)
.post('/api/v0/swarm/peers')
.query(true)
.reply(200, response)
const res = await ipfs.swarm.peers()
expect(res).to.be.a('array')
expect(res.length).to.equal(1)
expect(res[0].error).to.not.exist()
expect(res[0].addr.toString()).to.equal(response.Peers[0].Addr)
expect(res[0].peer.toString()).to.equal(response.Peers[0].Peer)
expect(scope.isDone()).to.equal(true)
})
it('handles unvalidatable peer addr', async () => {
const response = { Peers: [{ Addr: '/ip4/104.131.131.82/future-tech', Peer: 'QmcgpsyWgH8Y8ajJz1Cu72KnS5uo2Aa2LpzU7kinSupNKC', Latency: '', Muxer: '', Streams: null }] }
const scope = nock(apiUrl)
.post('/api/v0/swarm/peers')
.query(true)
.reply(200, response)
const res = await ipfs.swarm.peers()
expect(res).to.be.a('array')
expect(res.length).to.equal(1)
expect(res[0].error).to.exist()
expect(res[0].rawPeerInfo).to.deep.equal(response.Peers[0])
expect(scope.isDone()).to.equal(true)
})
it('handles an error response', async () => {
const scope = nock(apiUrl)
.post('/api/v0/swarm/peers')
.query(true)
.replyWithError('something awful happened')
await expect(ipfs.swarm.peers()).to.be.rejectedWith('something awful happened')
expect(scope.isDone()).to.equal(true)
})
})
================================================
FILE: test/node.js
================================================
'use strict'
require('./node/swarm')
================================================
FILE: test/ping.spec.js
================================================
/* eslint-env mocha */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const all = require('it-all')
const f = require('./utils/factory')()
// Determine if a ping response object is a pong, or something else, like a status message
function isPong (pingResponse) {
return Boolean(pingResponse && pingResponse.success && !pingResponse.text)
}
describe('.ping', function () {
this.timeout(20 * 1000)
let ipfs
let other
let otherId
before(async function () {
this.timeout(30 * 1000) // slow CI
ipfs = (await f.spawn()).api
other = (await f.spawn()).api
const ma = (await ipfs.id()).addresses[0]
await other.swarm.connect(ma)
otherId = (await other.id()).id
})
after(() => f.clean())
it('.ping with default count', async () => {
const res = await all(ipfs.ping(otherId))
expect(res).to.be.an('array')
expect(res.filter(isPong)).to.have.lengthOf(10)
res.forEach(packet => {
expect(packet).to.have.keys('success', 'time', 'text')
expect(packet.time).to.be.a('number')
})
const resultMsg = res.find(packet => packet.text.includes('Average latency'))
expect(resultMsg).to.exist()
})
it('.ping with count = 2', async () => {
const res = await all(ipfs.ping(otherId, { count: 2 }))
expect(res).to.be.an('array')
expect(res.filter(isPong)).to.have.lengthOf(2)
res.forEach(packet => {
expect(packet).to.have.keys('success', 'time', 'text')
expect(packet.time).to.be.a('number')
})
const resultMsg = res.find(packet => packet.text.includes('Average latency'))
expect(resultMsg).to.exist()
})
})
================================================
FILE: test/repo.spec.js
================================================
/* eslint-env mocha */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const f = require('./utils/factory')()
describe('.repo', function () {
this.timeout(50 * 1000) // slow CI
let ipfs
before(async () => {
ipfs = (await f.spawn()).api
})
after(() => f.clean())
it('.repo.gc', async () => {
const res = await ipfs.repo.gc()
expect(res).to.exist()
})
it('.repo.stat', async () => {
const res = await ipfs.repo.stat()
expect(res).to.exist()
expect(res).to.have.a.property('numObjects')
expect(res).to.have.a.property('repoSize')
})
it('.repo.version', async () => {
const res = await ipfs.repo.version()
expect(res).to.exist()
})
})
================================================
FILE: test/request-api.spec.js
================================================
/* eslint-env mocha */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const { isNode } = require('ipfs-utils/src/env')
const ipfsClient = require('../src/index.js')
describe('\'deal with HTTP weirdness\' tests', () => {
it('does not crash if no content-type header is provided', async function () {
if (!isNode) return this.skip()
// go-ipfs always (currently) adds a content-type header, even if no content is present,
// the standard behaviour for an http-api is to omit this header if no content is present
const server = require('http').createServer((req, res) => {
// Consume the entire request, before responding.
req.on('data', () => {})
req.on('end', () => {
res.writeHead(200)
res.end()
})
})
await new Promise(resolve => server.listen(6001, resolve))
await ipfsClient('/ip4/127.0.0.1/tcp/6001').config.replace('test/fixtures/r-config.json')
server.close()
})
})
describe('trailer headers', () => {
// TODO: needs fixing https://github.com/ipfs/js-ipfs-http-client/pull/624#issuecomment-344181950
it.skip('should deal with trailer x-stream-error correctly', (done) => {
if (!isNode) { return done() }
const server = require('http').createServer((req, res) => {
res.setHeader('x-chunked-output', '1')
res.setHeader('content-type', 'application/json')
res.setHeader('Trailer', 'X-Stream-Error')
res.addTrailers({ 'X-Stream-Error': JSON.stringify({ Message: 'ups, something went wrong', Code: 500 }) })
res.write(JSON.stringify({ Bytes: 1 }))
res.end()
})
server.listen(6001, () => {
const ipfs = ipfsClient('/ip4/127.0.0.1/tcp/6001')
/* eslint-disable */
ipfs.add(Buffer.from('Hello there!'), (err, res) => {
// TODO: error's are not being correctly
// propagated with Trailer headers yet
// expect(err).to.exist()
expect(res).to.not.equal(0)
server.close(done)
})
/* eslint-enable */
})
})
})
describe('error handling', () => {
it('should handle plain text error response', async function () {
if (!isNode) return this.skip()
const server = require('http').createServer((req, res) => {
// Consume the entire request, before responding.
req.on('data', () => {})
req.on('end', () => {
// Write a text/plain response with a 403 (forbidden) status
res.writeHead(403, { 'Content-Type': 'text/plain' })
res.write('ipfs method not allowed')
res.end()
})
})
await new Promise(resolve => server.listen(6001, resolve))
await expect(ipfsClient('/ip4/127.0.0.1/tcp/6001').config.replace('test/fixtures/r-config.json'))
.to.eventually.be.rejectedWith('ipfs method not allowed')
.and.to.have.nested.property('response.status').that.equals(403)
server.close()
})
it('should handle JSON error response', async function () {
if (!isNode) return this.skip()
const server = require('http').createServer((req, res) => {
// Consume the entire request, before responding.
req.on('data', () => {})
req.on('end', () => {
// Write a application/json response with a 400 (bad request) header
res.writeHead(400, { 'Content-Type': 'application/json' })
res.write(JSON.stringify({ Message: 'client error', Code: 1 }))
res.end()
})
})
await new Promise(resolve => server.listen(6001, resolve))
await expect(ipfsClient('/ip4/127.0.0.1/tcp/6001').config.replace('test/fixtures/r-config.json'))
.to.eventually.be.rejectedWith('client error')
.and.to.have.nested.property('response.status').that.equals(400)
server.close()
})
it('should handle JSON error response with invalid JSON', async function () {
if (!isNode) return this.skip()
const server = require('http').createServer((req, res) => {
// Consume the entire request, before responding.
req.on('data', () => {})
req.on('end', () => {
// Write a application/json response with a 400 (bad request) header
res.writeHead(400, { 'Content-Type': 'application/json' })
res.write('{ Message: ')
res.end()
})
})
await new Promise(resolve => server.listen(6001, resolve))
await expect(ipfsClient('/ip4/127.0.0.1/tcp/6001').config.replace('test/fixtures/r-config.json'))
.to.eventually.be.rejected()
.and.to.have.property('message').that.includes('Unexpected token M in JSON at position 2')
server.close()
})
})
================================================
FILE: test/stats.spec.js
================================================
/* eslint-env mocha */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const all = require('it-all')
const f = require('./utils/factory')()
describe('stats', function () {
this.timeout(50 * 1000) // slow CI
let ipfs
before(async () => {
ipfs = (await f.spawn()).api
})
after(() => f.clean())
it('.stats.bitswap', async () => {
const res = await ipfs.stats.bitswap()
expect(res).to.exist()
expect(res).to.have.a.property('provideBufLen')
expect(res).to.have.a.property('wantlist')
expect(res).to.have.a.property('peers')
expect(res).to.have.a.property('blocksReceived')
expect(res).to.have.a.property('dataReceived')
expect(res).to.have.a.property('blocksSent')
expect(res).to.have.a.property('dataSent')
expect(res).to.have.a.property('dupBlksReceived')
expect(res).to.have.a.property('dupDataReceived')
})
it('.stats.bw', async () => {
const res = (await all(ipfs.stats.bw()))[0]
expect(res).to.exist()
expect(res).to.have.a.property('totalIn')
expect(res).to.have.a.property('totalOut')
expect(res).to.have.a.property('rateIn')
expect(res).to.have.a.property('rateOut')
})
it('.stats.repo', async () => {
const res = await ipfs.stats.repo()
expect(res).to.exist()
expect(res).to.have.a.property('numObjects')
expect(res).to.have.a.property('repoSize')
expect(res).to.have.a.property('repoPath')
expect(res).to.have.a.property('version')
expect(res).to.have.a.property('storageMax')
})
})
================================================
FILE: test/sub-modules.spec.js
================================================
/* eslint-env mocha */
'use strict'
const { expect } = require('interface-ipfs-core/src/utils/mocha')
describe('submodules', () => {
it('bitswap', () => {
const bitswap = require('../src/bitswap')()
expect(bitswap.wantlist).to.be.a('function')
expect(bitswap.stat).to.be.a('function')
expect(bitswap.unwant).to.be.a('function')
})
it('block', () => {
const block = require('../src/block')()
expect(block.get).to.be.a('function')
expect(block.stat).to.be.a('function')
expect(block.put).to.be.a('function')
})
it('bootstrap', () => {
const bootstrap = require('../src/bootstrap')()
expect(bootstrap.add).to.be.a('function')
expect(bootstrap.rm).to.be.a('function')
expect(bootstrap.list).to.be.a('function')
})
it('config', () => {
const cfg = require('../src/config')()
expect(cfg.get).to.be.a('function')
expect(cfg.set).to.be.a('function')
expect(cfg.replace).to.be.a('function')
expect(cfg).to.have.a.property('profiles')
expect(cfg.profiles.list).to.be.a('function')
expect(cfg.profiles.apply).to.be.a('function')
})
it('dht', () => {
const dht = require('../src/dht')()
expect(dht.get).to.be.a('function')
expect(dht.put).to.be.a('function')
expect(dht.findProvs).to.be.a('function')
expect(dht.findPeer).to.be.a('function')
expect(dht.provide).to.be.a('function')
expect(dht.query).to.be.a('function')
})
it('id', () => {
const id = require('../src/id')()
expect(id).to.be.a('function')
})
it('version', () => {
const version = require('../src/version')()
expect(version).to.be.a('function')
})
it('ping', () => {
const ping = require('../src')().ping
expect(ping).to.be.a('function')
})
it('log', () => {
const log = require('../src/log')()
expect(log.ls).to.be.a('function')
expect(log.tail).to.be.a('function')
expect(log.level).to.be.a('function')
})
it('key', () => {
const key = require('../src/key')()
expect(key.gen).to.be.a('function')
expect(key.list).to.be.a('function')
})
it('name', () => {
const name = require('../src/name')()
expect(name.publish).to.be.a('function')
expect(name.resolve).to.be.a('function')
})
it('pin', () => {
const pin = require('../src/pin')()
expect(pin.add).to.be.a('function')
expect(pin.rm).to.be.a('function')
expect(pin.ls).to.be.a('function')
})
it('repo', () => {
const repo = require('../src/repo')()
expect(repo.gc).to.be.a('function')
expect(repo.stat).to.be.a('function')
})
it('stats', () => {
const stats = require('../src/stats')()
expect(stats.bitswap).to.be.a('function')
expect(stats.bw).to.be.a('function')
expect(stats.repo).to.be.a('function')
})
it('swarm', () => {
const swarm = require('../src/swarm')()
expect(swarm.peers).to.be.a('function')
expect(swarm.connect).to.be.a('function')
expect(swarm.disconnect).to.be.a('function')
expect(swarm.addrs).to.be.a('function')
expect(swarm.localAddrs).to.be.a('function')
})
it('diag', () => {
const diag = require('../src/diag')()
expect(diag.net).to.be.a('function')
expect(diag.sys).to.be.a('function')
expect(diag.cmds).to.be.a('function')
})
it('object', () => {
const object = require('../src/object')()
expect(object.get).to.be.a('function')
expect(object.put).to.be.a('function')
expect(object.data).to.be.a('function')
expect(object.links).to.be.a('function')
expect(object.stat).to.be.a('function')
expect(object.new).to.be.a('function')
expect(object.patch.rmLink).to.be.a('function')
expect(object.patch.addLink).to.be.a('function')
expect(object.patch.setData).to.be.a('function')
expect(object.patch.appendData).to.be.a('function')
})
it('pubsub', () => {
const pubsub = require('../src/pubsub')()
expect(pubsub.subscribe).to.be.a('function')
expect(pubsub.unsubscribe).to.be.a('function')
expect(pubsub.publish).to.be.a('function')
expect(pubsub.ls).to.be.a('function')
expect(pubsub.peers).to.be.a('function')
})
it('files regular API', () => {
const filesRegular = require('../src')()
expect(filesRegular.add).to.be.a('function')
expect(filesRegular.get).to.be.a('function')
expect(filesRegular.cat).to.be.a('function')
expect(filesRegular.ls).to.be.a('function')
expect(filesRegular.refs).to.be.a('function')
expect(filesRegular.refs.local).to.be.a('function')
})
it('files MFS API', () => {
const files = require('../src/files')()
expect(files.cp).to.be.a('function')
expect(files.ls).to.be.a('function')
expect(files.mkdir).to.be.a('function')
expect(files.stat).to.be.a('function')
expect(files.rm).to.be.a('function')
expect(files.read).to.be.a('function')
expect(files.write).to.be.a('function')
expect(files.mv).to.be.a('function')
})
it('commands', () => {
const commands = require('../src/commands')()
expect(commands).to.be.a('function')
})
it('mount', () => {
const mount = require('../src/mount')()
expect(mount).to.be.a('function')
})
})
================================================
FILE: test/utils/factory.js
================================================
'use strict'
const { createFactory } = require('ipfsd-ctl')
const merge = require('merge-options')
const { isNode } = require('ipfs-utils/src/env')
const commonOptions = {
test: 'true',
type: 'go',
ipfsHttpModule: require('../../src')
}
const commonOverrides = {
go: {
ipfsBin: isNode ? require('go-ipfs-dep').path() : undefined
}
}
const factory = (options = {}, overrides = {}) => createFactory(
merge(commonOptions, options),
merge(commonOverrides, overrides)
)
module.exports = factory
================================================
FILE: test/utils/throws-async.js
================================================
'use strict'
module.exports = async fnOrPromise => {
try {
await (fnOrPromise.then ? fnOrPromise : fnOrPromise())
} catch (err) {
return err
}
throw new Error('did not throw')
}