Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if (_what === 'all' || _what === 'directory') {
if(utils.fs.exists(this.paths.conf)) {
throw new Error('A pando repository already exists in the current working directory')
}
await utils.fs.mkdir(this.paths.pando)
await utils.fs.mkdir(this.paths.tmp)
await utils.fs.mkdir(this.paths.refs)
await utils.json.write(this.paths.conf, configuration)
await utils.json.write(this.paths.index, {})
await utils.json.write(this.paths.head, 'undefined')
console.log('DIRECTORY BUILD')
}
if (_what === 'all' || _what === 'ipfs') {
this.ipfs = new IPFS({ repo: this.paths.ipfs })
eventify(this.ipfs, 'error').catch(err => { throw err })
await eventify(this.ipfs, 'ready')
this.satellizer = new Satellizer(this.ipfs)
await this.ipfs.stop()
console.log('IPFS IS READY')
}
if (_what === 'all' || _what === 'dao') {
console.log('DOING THE DAO STUFF')
this.dao = await this.pando.dao.create()
}
// resolve()
// } catch (err) {
// reject(err)
// }
// if(utils.fs.exists(this.paths.conf)) {
// throw new Error('A pando repository already exists in the current working directory')
document.addEventListener('DOMContentLoaded', async () => {
// IPFS node setup
const node = await IPFS.create({ repo: String(Math.random() + Date.now()) })
// UI elements
const status = document.getElementById('status')
const output = document.getElementById('output')
output.textContent = ''
function log (txt) {
console.info(txt)
output.textContent += `${txt.trim()}\n`
}
status.innerText = 'Connected to IPFS :)'
const version = await node.version()
async function main () {
// Initialize our IPFS node with the custom repo options
const node = await IPFS.create({
repo: new Repo('/tmp/custom-repo/.ipfs', customRepositoryOptions),
// This just means we dont try to connect to the network which isn't necessary
// to demonstrate custom repos
config: {
Bootstrap: []
}
})
// Test the new repo by adding and fetching some data
console.log('Ready')
const { version } = await node.version()
console.log('Version:', version)
// Once we have the version, let's add a file to IPFS
const filesAdded = await node.add({
const dirCid = err.cid
console.log('resolver.directory', dirCid.toBaseEncodedString())
const data = await resolver.directory(ipfs, url, dirCid)
console.log('resolver.directory', Array.isArray(data) ? data : `returned '${typeof data}'`)
// TODO: redirect so directory always end with `/`
if (typeof data === 'string') {
// return HTML with directory listing
return {
content: textBuffer(data),
contentType: 'text/html',
contentEncoding: 'utf-8'
}
} else if (Array.isArray(data)) {
console.log('resolver.directory.indexes', data)
// return first index file
path = PathUtils.joinURLParts(path, data[0].name)
return getResponse(ipfs, url, path)
}
throw new Error('Invalid output of resolver.directory')
} else if (err.parentDagNode && err.missingLinkName) {
// It may be legitimate error, but it could also be a part of hamt-sharded-directory
// (example: ipns://tr.wikipedia-on-ipfs.org/wiki/Anasayfa.html)
// which is not supported by resolver.cid from ipfs-http-response at this time.
// Until ipfs.resolve support with sharding is added upstream, we use fallback below.
// TODO remove this after ipfs-http-response switch to ipfs.resolve
// or sharding is supported by some other means
try {
const matchingLink = (await ipfs.ls(err.parentDagNode, { resolveType: false })).find(item => item.name === err.missingLinkName)
if (matchingLink) {
console.log('resolver.cid.err.matchingLink', matchingLink)
path = path.replace(matchingLink.path, matchingLink.hash)
console.log('resolver.cid.err.path.after.matchingLink', path)
isLeaf: Boolean(source.Links.length),
length: (await ipfs.block.get(cid)).data.length,
unixfsData
}
} catch (err) {
// dag-pb but not a unixfs.
console.log(err)
}
for (let i = 0; i < source.Links.length; i++) {
await this._getGraphNodes(source.Links[i].Hash.toString(), nodeMap)
}
if (!source.Links.length) classes.push('leaf')
if (nodeData) classes.push('unixfs', nodeData.unixfsData.type)
} else if (Buffer.isBuffer(source)) {
classes.push('raw')
nodeData = { type: 'raw', isLeaf: true, length: source.length }
} else {
// TODO: What IPLD node is this? How to extract the links?
classes.push('leaf')
nodeData = { type: 'unknown', isLeaf: true }
}
nodeMap.set(cid, {
group: 'nodes',
data: { id: cid, ...nodeData },
classes
})
;(source.Links || []).forEach(link => {
nodeMap.set(cid + '->' + link.Hash, {
import async from "async-es";
// import Metascraper from "metascraper";
// import isIPFS from "is-ipfs";
import Home from "./Home";
import Header from "./Header";
import Post from "./Post";
import Profile from "./Profile";
import Stats from "./Stats";
import ProfileCard from "./ProfileCard";
import { IMAGE_TYPES, AUDIO_TYPES } from "./Editor/constants";
import IPFS from "ipfs";
const node = new IPFS({
EXPERIMENTAL: {
pubsub: true,
dht: true
},
start: true
});
export default class App extends Component {
constructor(props) {
super(props);
this.publish = this.publish.bind(this);
this.handleMessage = this.handleMessage.bind(this);
this.toggleEditor = this.toggleEditor.bind(this);
this.setIcon = this.setIcon.bind(this);
this.setBackground = this.setBackground.bind(this);
},
getAll(){
return _seeds
},
remove(id){
delete _seeds[id]
write()
}
}
})()
let ipfs_connected = false
const ipfs = new IPFS({
repo: '../database',
EXPERIMENTAL: {
pubsub: true
},
config: {
Addresses: {
Swarm: [
'/ip4/46.101.244.101/tcp/9090/ws/p2p-websocket-star/',
'/ip4/146.185.173.84/tcp/9090/ws/p2p-websocket-star/',
// '/dns4/ws-star.discovery.libp2p.io/tcp/443/wss/p2p-websocket-star'
]
}
}
})
ipfs.on('ready', () => {
ipfs_connected = true
return () => {
const VERSION_STRING = type === 'js'
? `js-ipfs version: ${require('ipfs/package.json').version}`
: 'ipfs version 0.4.13'
describe('daemon spawning', () => {
it('prints the version', function (done) {
if (!isNode || type === 'proc') {
this.skip()
}
df.version({ exec }, (err, version) => {
expect(err).to.not.exist()
expect(version).to.be.eql(VERSION_STRING)
done()
})
})
describe('spawn a bare node', function () {
this.ipfsd = null
const dirtyChai = require('dirty-chai')
const expect = chai.expect
chai.use(dirtyChai)
const { isNode } = require('ipfs-utils/src/env')
const hat = require('hat')
const IPFSFactory = require('../src')
const JSIPFS = require('ipfs')
const { repoExists } = require('./../src/utils/repo/nodejs')
const tests = [
{ type: 'go', bits: 1024 },
{ type: 'js', bits: 512 },
{ type: 'proc', exec: JSIPFS, bits: 512 }
]
const jsVersion = require('ipfs/package.json').version
const versions = {
js: `js-ipfs version: ${jsVersion}`,
go: `ipfs version ${require('go-ipfs-dep/package.json').version}`,
proc: jsVersion
}
describe('Spawn options', function () {
this.timeout(60 * 1000)
tests.forEach((fOpts) => describe(`${fOpts.type}`, () => {
const VERSION_STRING = versions[fOpts.type]
let f
before(() => {
f = IPFSFactory.create(fOpts)
})
import SetDB from 'set-db';
import {multihash} from 'is-ipfs';
import IPFS from 'ipfs';
const dbHashKey = 'the.index.db';
const dbTopic = process.env.NODE_ENV === 'development' ?
'the.index.development' : 'the.index.production';
export const FILE = 'FILE';
export const COMMENT = 'COMMENT';
var dbHash = localStorage.getItem(dbHashKey);
const node = new IPFS({
EXPERIMENTAL: {
pubsub: true
},
config: {
Addresses: {
Swarm: [
'/dns4/ws-star.discovery.libp2p.io/tcp/443/wss/p2p-websocket-star'
]
}
}
});
export const connect = new Promise((resolve, reject) => {
node.on('ready', () => {
node.id().then(info => console.log('node id is', info.id));
const db = new SetDB(dbTopic, {