Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit dea13f3

Browse files
authored
chore: fix linting and update to latest aegir
1 parent 8b13957 commit dea13f3

12 files changed

+63
-55
lines changed

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
docs
2+
yarn.lock
13
**/node_modules/
24
**/*.log
35
test/repo-tests*

package.json

Lines changed: 21 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -7,16 +7,16 @@
77
"fs": false
88
},
99
"scripts": {
10-
"lint": "aegir-lint",
11-
"build": "aegir-build",
12-
"test": "aegir-test",
13-
"test:node": "aegir-test --env node",
14-
"test:browser": "aegir-test --env browser",
15-
"release": "aegir-release",
16-
"release-minor": "aegir-release --type minor",
17-
"release-major": "aegir-release --type major",
18-
"coverage": "aegir-coverage",
19-
"coverage-publish": "aegir-coverage publish"
10+
"lint": "aegir lint",
11+
"build": "aegir build",
12+
"test": "aegir test",
13+
"test:node": "aegir test --target node",
14+
"test:browser": "aegir test --target browser",
15+
"release": "aegir release",
16+
"release-minor": "aegir release --type minor",
17+
"release-major": "aegir release --type major",
18+
"coverage": "aegir coverage",
19+
"coverage-publish": "aegir coverage --provider coveralls"
2020
},
2121
"pre-commit": [
2222
"lint",
@@ -40,36 +40,36 @@
4040
},
4141
"homepage": "https://github.com/ipfs/js-ipfs-unixfs-engine#readme",
4242
"devDependencies": {
43-
"aegir": "^11.0.2",
43+
"aegir": "^12.1.3",
4444
"chai": "^4.1.2",
4545
"dirty-chai": "^2.0.1",
46-
"ipfs": "^0.26.0",
47-
"ipfs-block-service": "^0.12.0",
48-
"ipfs-repo": "^0.17.0",
46+
"ipfs": "~0.26.0",
47+
"ipfs-block-service": "~0.13.0",
48+
"ipfs-repo": "~0.18.2",
4949
"ncp": "^2.0.0",
5050
"pre-commit": "^1.2.2",
5151
"pull-generate": "^2.2.0",
5252
"pull-zip": "^2.0.1",
5353
"rimraf": "^2.6.2",
54-
"sinon": "^4.0.1",
54+
"sinon": "^4.1.1",
5555
"split": "^1.0.1"
5656
},
5757
"dependencies": {
58-
"async": "^2.5.0",
58+
"async": "^2.6.0",
5959
"bs58": "^4.0.1",
6060
"cids": "~0.5.2",
61-
"deep-extend": "^0.5.0",
62-
"ipfs-unixfs": "^0.1.13",
63-
"ipld-dag-pb": "^0.11.2",
64-
"ipld-resolver": "^0.13.4",
61+
"deep-extend": "~0.5.0",
62+
"ipfs-unixfs": "~0.1.14",
63+
"ipld-dag-pb": "~0.11.3",
64+
"ipld-resolver": "~0.14.1",
6565
"left-pad": "^1.1.3",
6666
"lodash": "^4.17.4",
6767
"multihashes": "~0.4.12",
6868
"multihashing-async": "~0.4.7",
6969
"pull-batch": "^1.0.0",
7070
"pull-block": "1.2.0",
7171
"pull-cat": "^1.1.11",
72-
"pull-defer": "^0.2.2",
72+
"pull-defer": "~0.2.2",
7373
"pull-pair": "^1.1.0",
7474
"pull-paramap": "^1.2.2",
7575
"pull-pause": "0.0.1",

src/builder/builder.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ module.exports = function (createChunker, ipldResolver, createReducer, _options)
104104
if (options.progress && typeof options.progress === 'function') {
105105
options.progress(chunk.byteLength)
106106
}
107-
return new Buffer(chunk)
107+
return Buffer.from(chunk)
108108
}),
109109
pull.map(buffer => new UnixFS('file', buffer)),
110110
pull.asyncMap((fileNode, callback) => {

src/exporter/file.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ module.exports = (node, name, pathRest, ipldResolver) => {
1111
function getData (node) {
1212
try {
1313
const file = UnixFS.unmarshal(node.data)
14-
return file.data || new Buffer(0)
14+
return file.data || Buffer.alloc(0)
1515
} catch (err) {
1616
throw new Error('Failed to unmarshal node')
1717
}

src/hamt/consumable-buffer.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ module.exports = class ConsumableBuffer {
5252
this._currentBitPos -= taking
5353
if (this._currentBitPos < 0) {
5454
this._currentBitPos = 7
55-
this._currentBytePos --
55+
this._currentBytePos--
5656
}
5757
}
5858

src/importer/dir-sharded.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ const hashFn = function (value, callback) {
2424
// for parity..
2525
const justHash = hash.slice(2, 10)
2626
const length = justHash.length
27-
const result = new Buffer(length)
27+
const result = Buffer.alloc(length)
2828
// TODO: invert buffer because that's how Go impl does it
2929
for (let i = 0; i < length; i++) {
3030
result[length - i - 1] = justHash[i]
@@ -138,7 +138,7 @@ function flush (options, bucket, path, ipldResolver, source, callback) {
138138
function haveLinks (links) {
139139
// go-ipfs uses little endian, that's why we have to
140140
// reverse the bit field before storing it
141-
const data = new Buffer(children.bitField().reverse())
141+
const data = Buffer.from(children.bitField().reverse())
142142
const dir = new UnixFS('hamt-sharded-directory', data)
143143
dir.fanout = bucket.tableSize()
144144
dir.hashType = options.hashFn.code

test/test-dag-api.js

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,9 @@ describe('with dag-api', () => {
158158

159159
const expected = extend({}, defaultResults, strategies[strategy])
160160

161-
describe(strategy + ' importer', () => {
161+
describe(strategy + ' importer', function () {
162+
this.timeout(20 * 1000)
163+
162164
let node
163165

164166
const options = {

test/test-dirbuilder-sharding.js

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,9 @@ const setImmediate = require('async/setImmediate')
1717
const leftPad = require('left-pad')
1818

1919
module.exports = (repo) => {
20-
describe('dirbuilder sharding', () => {
20+
describe('dirbuilder sharding', function () {
21+
this.timeout(20 * 1000)
22+
2123
let ipldResolver
2224

2325
before(() => {
@@ -37,7 +39,7 @@ module.exports = (repo) => {
3739
pull.values([
3840
{
3941
path: 'a/b',
40-
content: pull.values([new Buffer('i have the best bytes')])
42+
content: pull.values([Buffer.from('i have the best bytes')])
4143
}
4244
]),
4345
importer(ipldResolver, options),
@@ -62,7 +64,7 @@ module.exports = (repo) => {
6264
pull.values([
6365
{
6466
path: 'a/b',
65-
content: pull.values([new Buffer('i have the best bytes')])
67+
content: pull.values([Buffer.from('i have the best bytes')])
6668
}
6769
]),
6870
importer(ipldResolver, options),
@@ -161,7 +163,7 @@ module.exports = (repo) => {
161163
i++
162164
const pushable = {
163165
path: 'big/' + leftPad(i.toString(), 4, '0'),
164-
content: pull.values([new Buffer(i.toString())])
166+
content: pull.values([Buffer.from(i.toString())])
165167
}
166168
push.push(pushable)
167169
setImmediate(callback)
@@ -257,7 +259,7 @@ module.exports = (repo) => {
257259
}
258260
const pushed = {
259261
path: dir.concat(leftPad(i.toString(), 4, '0')).join('/'),
260-
content: pull.values([new Buffer(i.toString())])
262+
content: pull.values([Buffer.from(i.toString())])
261263
}
262264
push.push(pushed)
263265
pending--

test/test-hamt.js

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ describe('HAMT', () => {
4343
it('can get that value', (callback) => {
4444
bucket.get('key', (err, result) => {
4545
expect(err).to.not.exist()
46-
expect(result).to.be.eql('value')
46+
expect(result).to.eql('value')
4747
callback()
4848
})
4949
})
@@ -55,7 +55,7 @@ describe('HAMT', () => {
5555
it('can get that value', (callback) => {
5656
bucket.get('key', (err, result) => {
5757
expect(err).to.not.exist()
58-
expect(result).to.be.eql('a different value')
58+
expect(result).to.eql('a different value')
5959
callback()
6060
})
6161
})
@@ -78,36 +78,40 @@ describe('HAMT', () => {
7878
})
7979

8080
describe('many keys', () => {
81-
let bucket, keys, masterHead
81+
let bucket
82+
let keys
83+
let masterHead
8284

8385
it('can create an empty one', () => {
8486
bucket = HAMT(options)
8587
})
8688

87-
it('accepts putting many keys', (callback) => {
89+
it('accepts putting many keys', (done) => {
8890
const max = 400
8991
keys = new Array(max)
9092
for (let i = 1; i <= max; i++) {
9193
keys[i - 1] = i.toString()
9294
}
9395

94-
each(keys, (key, callback) => bucket.put(key, key, callback), callback)
96+
each(keys, (key, callback) => bucket.put(key, key, callback), done)
9597
})
9698

97-
it('can remove all the keys and still find remaining', (callback) => {
99+
it('can remove all the keys and still find remaining', function (done) {
100+
this.timeout(10 * 1000)
101+
98102
masterHead = keys.pop()
99103
iterate()
100104

101105
function iterate () {
102106
const head = keys.shift()
103107
if (!head) {
104-
callback()
108+
done()
105109
return // early
106110
}
107111

108112
bucket.get(head, (err, value) => {
109113
expect(err).to.not.exist()
110-
expect(value).to.be.eql(head)
114+
expect(value).to.eql(head)
111115
bucket.del(head, afterDel)
112116
})
113117

@@ -120,18 +124,14 @@ describe('HAMT', () => {
120124
expect(err).to.not.exist()
121125
expect(value).to.be.undefined()
122126

123-
each(
124-
keys,
125-
onEachKey,
126-
reiterate
127-
)
127+
each(keys, onEachKey, reiterate)
128128
}
129129
}
130130

131131
function onEachKey (key, callback) {
132132
bucket.get(key, (err, value) => {
133133
expect(err).to.not.exist()
134-
expect(value).to.be.eql(key)
134+
expect(value).to.eql(key)
135135
callback()
136136
})
137137
}

test/test-importer-flush.js

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ module.exports = (repo) => {
3939

4040
source.push({
4141
path: 'a',
42-
content: pull.values([new Buffer('hey')])
42+
content: pull.values([Buffer.from('hey')])
4343
})
4444

4545
importer.flush((err, hash) => {
@@ -74,7 +74,7 @@ module.exports = (repo) => {
7474

7575
source.push({
7676
path: 'b/c',
77-
content: pull.values([new Buffer('hey')])
77+
content: pull.values([Buffer.from('hey')])
7878
})
7979

8080
importer.flush((err, hash) => {
@@ -129,7 +129,7 @@ module.exports = (repo) => {
129129
const filePath = dirPath + '/filename'
130130
const file = {
131131
path: filePath,
132-
content: pull.values([new Buffer('file with path ' + filePath)])
132+
content: pull.values([Buffer.from('file with path ' + filePath)])
133133
}
134134
source.push(file)
135135
if (currentDir.depth === 0 || childCount + 1 === maxEntriesPerDir) {

test/test-importer.js

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ const strategyOverrides = {
108108
}
109109

110110
module.exports = (repo) => {
111-
strategies.forEach(strategy => {
111+
strategies.forEach((strategy) => {
112112
const baseFiles = strategyBaseFiles[strategy]
113113
const defaultResults = extend({}, baseFiles, {
114114
'foo/bar/200Bytes.txt': extend({}, baseFiles['200Bytes.txt'], {
@@ -160,7 +160,9 @@ module.exports = (repo) => {
160160

161161
const expected = extend({}, defaultResults, strategies[strategy])
162162

163-
describe(strategy + ' importer', () => {
163+
describe(strategy + ' importer', function () {
164+
this.timeout(20 * 1000)
165+
164166
let ipldResolver
165167

166168
const options = {

test/test-nested-dir-import-export.js

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,10 @@ module.exports = (repo) => {
2525
it('imports', (done) => {
2626
pull(
2727
pull.values([
28-
{ path: 'a/b/c/d/e', content: pull.values([new Buffer('banana')]) },
29-
{ path: 'a/b/c/d/f', content: pull.values([new Buffer('strawberry')]) },
30-
{ path: 'a/b/g', content: pull.values([new Buffer('ice')]) },
31-
{ path: 'a/b/h', content: pull.values([new Buffer('cream')]) }
28+
{ path: 'a/b/c/d/e', content: pull.values([Buffer.from('banana')]) },
29+
{ path: 'a/b/c/d/f', content: pull.values([Buffer.from('strawberry')]) },
30+
{ path: 'a/b/g', content: pull.values([Buffer.from('ice')]) },
31+
{ path: 'a/b/h', content: pull.values([Buffer.from('cream')]) }
3232
]),
3333
unixFSEngine.importer(ipldResolver),
3434
pull.collect((err, files) => {

0 commit comments

Comments
 (0)