Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
add command works, fixed init
  • Loading branch information
nginnever committed Apr 26, 2016
commit b66665ea00f240ddc6e1d526f76a76ba4623d558
108 changes: 10 additions & 98 deletions src/cli/commands/files/add.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,66 +10,7 @@ const streamifier = require('streamifier')
const fs = require('fs')
const async = require('async')
const pathj = require('path')
const glob = require("glob")

let rootPath
let filePath
let i

function addStream (pair) {
i.add(pair)
}


function addDir (path) {
rootPath = pathj.join(rootPath, path)
filePath = pathj.join(filePath, path)
//console.log(rootPath)
//console.log(filePath)
const files = fs.readdirSync(rootPath)
async.forEachSeries(files, (res, callback) => {
const tempPath = pathj.join(filePath, res)
const nestedPath = pathj.join(rootPath, res)
const stat = fs.statSync(nestedPath)
if (stat.isFile()) {
const buffered = fs.readFileSync(nestedPath)
const r = streamifier.createReadStream(buffered)
const filePair = {path: tempPath, stream: r}
//addStream(filePair)
i.add(filePair)
}
if (stat.isDirectory()) {
// TODO check if tempPath is empty, add sentinel empty dir

addDir(res)
}
callback()
}, (err) => {
if (err) {
throw err
}
//console.log('done')
return
})
}

function choosePath (recursive, path, stats) {
//console.log(utils.isDaemonOn())
//console.log(path)
if (stats.isFile()) {
const buffered = fs.readFileSync(path)
const r = streamifier.createReadStream(buffered)
path = path.substring(path.lastIndexOf('/') + 1, path.length)
const filePair = {path: path, stream: r}
//addStream(filePair)
i.add(filePair)
} else if (stats.isDirectory()) {
console.log(path)
rootPath = path
filePath = path.substring(path.lastIndexOf('/') + 1, path.length)
addDir('')
}
}
const glob = require('glob')

module.exports = Command.extend({
desc: 'Add a file to IPFS using the UnixFS data format',
Expand All @@ -88,13 +29,13 @@ module.exports = Command.extend({
if (!path) {
throw new Error('Error: Argument \'path\' is required')
}

s = fs.statSync(path)

if (s.isDirectory() && recursive == false) {
if (s.isDirectory() && recursive === false) {
throw new Error('Error: ' + process.cwd() + ' is a directory, use the \'-r\' flag to specify directories')
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you use backticks for these error messages, like this:

throw new Error(`Error: ${process.cwd()} is a directory, use the '-r' flag to specify directories`)

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also I think the Error: in the front is not needed, in go-ipfs that's just the log level

}
if(path === '.' && recursive === true) {
}
if (path === '.' && recursive === true) {
path = process.cwd()
s = fs.statSync(process.cwd())
} else if (path === '.' && recursive === false) {
Expand All @@ -104,7 +45,10 @@ module.exports = Command.extend({
}
}

glob(pathj.join(path,'/**/*'), (err, res) => {
glob(pathj.join(path, '/**/*'), (err, res) => {
if (err) {
throw err
}
if (res.length === 0) {
res = pathj.join(process.cwd(), path)
}
Expand All @@ -114,14 +58,6 @@ module.exports = Command.extend({
}
if (utils.isDaemonOn()) {
throw new Error('daemon running is not supported yet')
// TODO create files.add js-ipfs-api
/*return ipfs.add(pair.stream, (err, res) => {
if (err) {
log.error(err)
throw err
}
console.log('added', res[0].Hash)
})*/
}
const i = ipfs.files.add()
i.on('data', (file) => {
Expand All @@ -131,10 +67,7 @@ module.exports = Command.extend({
const index = path.lastIndexOf('/')
async.eachLimit(res, 10, (element, callback) => {
const addPath = element.substring(index + 1, element.length)
//console.log(element)
if (fs.statSync(element).isDirectory()) {
const filePair = {path: addPath}
i.write(filePair)
callback()
} else {
const buffered = fs.readFileSync(element)
Expand All @@ -157,28 +90,7 @@ module.exports = Command.extend({
i.write(filePair)
i.end()
}
})
})
})

// console.log(utils.isDaemonOn())
// utils.getIPFS((err, ipfs) => {
// if (err) {
// throw err
// }
// //console.log(ipfs)
// if (path.charAt(0) !== '/') {
// path = process.cwd() + '/' + path
// }
// ipfs.files.add(path, {
// recursive: recursive
// }, (err, stats) => {
// if (err) {
// return console.log(err)
// }
// if (stats) {
// console.log('added', bs58.encode(stats.Hash).toString(), stats.Name)
// }
// })
// })
}
})
2 changes: 2 additions & 0 deletions src/cli/commands/files/cat.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
'use strict'

const Command = require('ronin').Command
const debug = require('debug')
const IPFS = require('../../../core')
Expand Down
12 changes: 9 additions & 3 deletions src/cli/commands/files/get.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
'use strict'

const Command = require('ronin').Command
const debug = require('debug')
const IPFS = require('../../../core')
Expand All @@ -11,13 +13,17 @@ module.exports = Command.extend({
options: {},

run: (path, options) => {
let dir
let filepath
let ws

var node = new IPFS()
if (!path) {
throw new Error("Argument 'path' is required")
}
if (!options) {
options = {}
var dir = process.cwd()
dir = process.cwd()
} else {
if (options.slice(-1) !== '/') {
options += '/'
Expand All @@ -30,9 +36,9 @@ module.exports = Command.extend({
}
data.on('file', (data) => {
if (data.path.lastIndexOf('/') === -1) {
var filepath = data.path
filepath = data.path
if (data.dir === false) {
var ws = fs.createWriteStream(dir + data.path)
ws = fs.createWriteStream(dir + data.path)
data.stream.pipe(ws)
} else {
try {
Expand Down
20 changes: 7 additions & 13 deletions src/core/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,17 +10,12 @@ const DAGService = mDAG.DAGService
const peerId = require('peer-id')
const PeerInfo = require('peer-info')
const multiaddr = require('multiaddr')
const importer = require('ipfs-unixfs-engine').importer
const exporter = require('ipfs-unixfs-engine').exporter
const Importer = require('ipfs-unixfs-engine').importer
const Exporter = require('ipfs-unixfs-engine').exporter
const libp2p = require('libp2p-ipfs')
const init = require('./init')
const IPFSRepo = require('ipfs-repo')
const UnixFS = require('ipfs-unixfs')
const glob = require("glob")
const path = require('path')
const fs = require('fs')
const streamifier = require('streamifier')
const async = require('async')

exports = module.exports = IPFS

Expand Down Expand Up @@ -400,13 +395,13 @@ function IPFS (repo) {
arr = undefined
}
if (callback === undefined) {
callback = function noop() {}
callback = function noop () {}
}
if (arr === undefined) {
return new importer(dagS)
return new Importer(dagS)
}

const i = new importer(dagS)
const i = new Importer(dagS)
const res = []

i.on('data', (info) => {
Expand All @@ -422,7 +417,6 @@ function IPFS (repo) {
})

i.end()

},
cat: (hash, callback) => {
dagS.get(hash, (err, fetchedNode) => {
Expand All @@ -433,13 +427,13 @@ function IPFS (repo) {
if (data.type === 'directory') {
callback('This dag node is a directory', null)
} else {
const exportEvent = exporter(hash, dagS)
const exportEvent = Exporter(hash, dagS)
callback(null, exportEvent)
}
})
},
get: (hash, callback) => {
var exportFile = exporter(hash, dagS)
var exportFile = Exporter(hash, dagS)
callback(null, exportFile)
}
}
Expand Down
38 changes: 34 additions & 4 deletions src/core/init.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ const peerId = require('peer-id')
const IpfsBlocks = require('ipfs-blocks').BlockService
const IpfsDagService = require('ipfs-merkle-dag').DAGService
const path = require('path')
const glob = require("glob")
const async = require('async')
const streamifier = require('streamifier')
const fs = require('fs')

module.exports = (repo, opts, callback) => {
opts = opts || {}
Expand Down Expand Up @@ -69,11 +73,37 @@ module.exports = (repo, opts, callback) => {

const initDocsPath = path.join(__dirname, '../init-files/init-docs')

importer.import(initDocsPath, dag, {
recursive: true
}, doneImport)
const i = new importer(dag)
i.on('data', (file) => {
})
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why an empty listener?


glob(path.join(initDocsPath,'/**/*'), (err, res) => {
const index = __dirname.lastIndexOf('/')
async.eachLimit(res, 10, (element, callback) => {
const addPath = element.substring(index + 1, element.length)
if (fs.statSync(element).isDirectory()) {
callback()
} else {
const buffered = fs.readFileSync(element)
const r = streamifier.createReadStream(buffered)
const filePair = {path: addPath, stream: r}
i.write(filePair)
callback()
}
}, (err) => {
if (err) {
throw err
}
i.end()
return
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

no need for return

})
})

i.on('end', () => {
doneImport(null)
})

function doneImport (err, stat) {
function doneImport (err) {
if (err) { return callback(err) }

// All finished!
Expand Down
1 change: 1 addition & 0 deletions test/core-tests/test-files.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
/* eslint-env mocha */
'use strict'

const bl = require('bl')
const expect = require('chai').expect
Expand Down
14 changes: 7 additions & 7 deletions test/core-tests/test-init-node.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,14 @@ describe('init (Node.js specific)', function () {
it('init docs are written', (done) => {
ipfs.init({ bits: 64 }, (err) => {
expect(err).to.not.exist

// Check for default assets
var multihash = new Buffer('12205e7c3ce237f936c76faf625e90f7751a9f5eeb048f59873303c215e9cce87599', 'hex')
ipfs.object.get(multihash, {}, (err, node) => {
expect(err).to.not.exist
expect(node.links).to.exist
done()
})
setTimeout(() => {
ipfs.object.get(multihash, {}, (err, node) => {
expect(err).to.not.exist
expect(node.links).to.exist
done()
})
}, 1000)
})
})

Expand Down