diff --git a/.aegir.js b/.aegir.cjs similarity index 100% rename from .aegir.js rename to .aegir.cjs diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..6c241b6 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,69 @@ +name: ci +on: + push: + branches: + - master + pull_request: + branches: + - master + +jobs: + check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - run: npm install + - run: npx aegir lint + - uses: gozala/typescript-error-reporter-action@v1.0.8 + - run: npx aegir build + - run: npx aegir dep-check + - uses: ipfs/aegir/actions/bundle-size@master + name: size + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + test-node: + needs: check + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [windows-latest, ubuntu-latest, macos-latest] + node: [14, 16] + fail-fast: true + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node }} + - run: npm install + - run: npx aegir test -t node --cov --bail + - uses: codecov/codecov-action@v1 + test-chrome: + needs: check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - run: npm install + - run: npx aegir test -t browser -t webworker --bail + test-firefox: + needs: check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - run: npm install + - run: npx aegir test -t browser -t webworker --bail -- --browsers FirefoxHeadless + test-electron-main: + needs: check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - run: npm install + - run: npm run pretest + - run: npx xvfb-maybe aegir test -t electron-main --bail -f dist/cjs/node-test/*js + test-electron-renderer: + needs: check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - run: npm install + - run: npm run pretest + - run: npx xvfb-maybe aegir test -t electron-renderer --bail -f dist/cjs/browser-test/*js diff --git a/.gitignore b/.gitignore index 9691fab..35d852c 100644 --- a/.gitignore +++ b/.gitignore @@ -36,3 +36,4 @@ node_modules lib dist docs +types diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 6a473b1..0000000 --- a/.travis.yml +++ /dev/null @@ -1,48 +0,0 @@ -language: node_js -cache: npm -dist: bionic - -branches: - only: - - master - - /^release\/.*$/ - -stages: - - check - - test - - cov - -node_js: - - 'lts/*' - - 'node' - -os: - - linux - - osx - - windows - -script: npx aegir test -t node --cov --bail -after_success: npx nyc report --reporter=text-lcov > coverage.lcov && npx codecov - -jobs: - include: - - stage: check - script: - - npx aegir dep-check - - cd examples/full-s3-repo && npm i && cd ../../ - - npm run lint - - - stage: test - name: chrome - addons: - chrome: stable - script: npx aegir test -t browser - - - stage: test - name: firefox - addons: - firefox: latest - script: npx aegir test -t browser -- --browser firefox - -notifications: - email: false diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 0000000..14478a3 --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,5 @@ +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. diff --git a/LICENSE b/LICENSE-MIT similarity index 88% rename from LICENSE rename to LICENSE-MIT index 15ede75..749aa1e 100644 --- a/LICENSE +++ b/LICENSE-MIT @@ -1,6 +1,4 @@ -MIT License - -Copyright (c) 2017 IPFS +The MIT License (MIT) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -9,13 +7,13 @@ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/README.md b/README.md index 60658b3..7eeb08e 100644 --- a/README.md +++ b/README.md @@ -31,10 +31,11 @@ $ npm install datastore-s3 If the flag `createIfMissing` is not set or is false, then the bucket must be created prior to using datastore-s3. Please see the AWS docs for information on how to configure the S3 instance. A bucket name is required to be set at the s3 instance level, see the below example. ```js -const S3 = require('aws-sdk').S3 +import S3 from 'aws-sdk/clients/s3.js' +import { S3Datastore } from 'datastore-s3' + const s3Instance = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) -const S3Store = require('datastore-s3') -const store = new S3Store('.ipfs/datastore', { +const store = new S3Datastore('.ipfs/datastore', { s3: s3Instance createIfMissing: false }) @@ -55,4 +56,4 @@ Small note: If editing the Readme, please conform to the [standard-readme](https ## License -MIT 2018 © IPFS +[Apache-2.0](LICENSE-APACHE) OR [MIT](LICENSE-MIT) diff --git a/examples/full-s3-repo/create-s3-repo.js b/examples/full-s3-repo/create-s3-repo.js index 811ea73..6d00261 100644 --- a/examples/full-s3-repo/create-s3-repo.js +++ b/examples/full-s3-repo/create-s3-repo.js @@ -1,13 +1,16 @@ -'use strict' +import { S3Datastore } from 'datastore-s3' +import { createRepo } from 'ipfs-repo' +import { BlockstoreDatastoreAdapter } from 'blockstore-datastore-adapter' +import { ShardingDatastore } from 'datastore-core/sharding' +import { NextToLast } from 'datastore-core/shard' +import * as raw from 'multiformats/codecs/raw' +import * as json from 'multiformats/codecs/json' +import * as dagPb from '@ipld/dag-pb' +import * as dagCbor from '@ipld/dag-cbor' - -const DatastoreS3 = require('datastore-s3') -const { createRepo } = require('ipfs-repo') -const BlockstoreDatastoreAdapter = require('blockstore-datastore-adapter') -const { ShardingDatastore, shard: { NextToLast } } = require('datastore-core') -const { codecs: { raw, json } } = require('multiformats/basics') -const dagPb = require('@ipld/dag-pb') -const dagCbor = require('@ipld/dag-cbor') +/** + * @typedef {import('multiformats/codecs/interface').BlockCodec} BlockCodec + */ /** * A convenience method for creating an S3 backed IPFS repo @@ -16,13 +19,17 @@ const dagCbor = require('@ipld/dag-cbor') * @param {import('aws-sdk/clients/s3')} s3 * @param {import('ipfs-repo').RepoLock} repoLock */ -const createS3Repo = (path, s3, repoLock) => { +export const createS3Repo = (path, s3, repoLock) => { const storeConfig = { s3, createIfMissing: true } - // These are the codecs we want to support, you may wish to add others + /** + * These are the codecs we want to support, you may wish to add others + * + * @type {Record} + */ const codecs = { [raw.code]: raw, [raw.name]: raw, @@ -47,30 +54,28 @@ const createS3Repo = (path, s3, repoLock) => { return createRepo(path, loadCodec, { root: new ShardingDatastore( - new DatastoreS3(path, storeConfig), + new S3Datastore(path, storeConfig), new NextToLast(2) ), blocks: new BlockstoreDatastoreAdapter( new ShardingDatastore( - new DatastoreS3(`${path}blocks`, storeConfig), + new S3Datastore(`${path}blocks`, storeConfig), new NextToLast(2) ) ), datastore: new ShardingDatastore( - new DatastoreS3(`${path}datastore`, storeConfig), + new S3Datastore(`${path}datastore`, storeConfig), new NextToLast(2) ), keys: new ShardingDatastore( - new DatastoreS3(`${path}keys`, storeConfig), + new S3Datastore(`${path}keys`, storeConfig), new NextToLast(2) ), pins: new ShardingDatastore( - new DatastoreS3(`${path}pins`, storeConfig), + new S3Datastore(`${path}pins`, storeConfig), new NextToLast(2) ) }, { repoLock }) } - -module.exports = createS3Repo diff --git a/examples/full-s3-repo/index.js b/examples/full-s3-repo/index.js index 6cb5c96..1434e2d 100644 --- a/examples/full-s3-repo/index.js +++ b/examples/full-s3-repo/index.js @@ -1,10 +1,8 @@ -'use strict' - -const IPFS = require('ipfs') -const toBuffer = require('it-to-buffer') -const createRepo = require('./create-s3-repo') -const S3 = require('aws-sdk/clients/s3') -const S3Lock = require('./s3-lock') +import IPFS from 'ipfs-core' +import toBuffer from 'it-to-buffer' +import { createS3Repo } from './create-s3-repo' +import S3 from 'aws-sdk/clients/s3.js' +import { S3Lock } from './s3-lock' async function main () { // Configure S3 as normal @@ -22,7 +20,7 @@ async function main () { const repoLock = new S3Lock(s3) // Create the repo - const s3Repo = createRepo('/', s3, repoLock) + const s3Repo = createS3Repo('/', s3, repoLock) // Create a new IPFS node with our S3 backed Repo console.log('Start ipfs') diff --git a/examples/full-s3-repo/package.json b/examples/full-s3-repo/package.json index 3448df6..f7df748 100644 --- a/examples/full-s3-repo/package.json +++ b/examples/full-s3-repo/package.json @@ -13,11 +13,11 @@ "@ipld/dag-cbor": "^6.0.9", "@ipld/dag-pb": "^2.1.9", "aws-sdk": "^2.885.0", - "blockstore-datastore-adapter": "^1.0.0", - "datastore-core": "^5.0.1", + "blockstore-datastore-adapter": "^2.0.1", + "datastore-core": "^6.0.6", "datastore-s3": "../../", - "ipfs": "^0.58.1", - "ipfs-repo": "^11.0.1", + "ipfs-core": "^0.10.6", + "ipfs-repo": "^12.0.0", "it-to-buffer": "^2.0.0", "multiformats": "^9.4.5" } diff --git a/examples/full-s3-repo/s3-lock.js b/examples/full-s3-repo/s3-lock.js index e7b4421..4e37e65 100644 --- a/examples/full-s3-repo/s3-lock.js +++ b/examples/full-s3-repo/s3-lock.js @@ -1,7 +1,7 @@ 'use strict' -const PATH = require('path') -const { fromString: uint8ArrayFromString } = require('uint8arrays') +import PATH from 'path' +import { fromString as uint8ArrayFromString } from 'uint8arrays' /** * Uses an object in an S3 bucket as a lock to signal that an IPFS repo is in use. @@ -13,7 +13,7 @@ const { fromString: uint8ArrayFromString } = require('uint8arrays') * @typedef {import('ipfs-repo').LockCloser} LockCloser */ -class S3Lock { +export class S3Lock { /** * @param {import('aws-sdk/clients/s3')} s3 */ @@ -153,5 +153,3 @@ class S3Lock { return true } } - -module.exports = S3Lock diff --git a/package.json b/package.json index 8959dde..d89c5cd 100644 --- a/package.json +++ b/package.json @@ -4,20 +4,28 @@ "description": "IPFS datastore implementation backed by s3", "leadMaintainer": "Jacob Heun", "main": "src/index.js", - "types": "dist/src/index.d.ts", + "type": "module", + "types": "types/src/index.d.ts", "files": [ - "dist", - "src" + "*", + "!**/*.tsbuildinfo" ], + "exports": { + ".": { + "import": "./src/index.js" + } + }, "scripts": { + "clean": "rimraf dist types", + "prepare": "aegir build --no-bundle && cp -R types dist", "lint": "aegir ts -p check && aegir lint", + "build": "aegir build", + "release": "aegir release --target node", + "release-minor": "aegir release --type minor --target node", + "release-major": "aegir release --type major --target node", + "pretest": "aegir build --esm-tests", "test": "aegir test", - "test:node": "aegir test --target node", - "release": "aegir release --target node --docs", - "release-minor": "aegir release --type minor --target node --docs", - "release-major": "aegir release --type major --target node --docs", - "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", - "docs": "aegir docs" + "dep-check": "aegir dep-check -i rimraf" }, "repository": { "type": "git", @@ -30,15 +38,21 @@ "datastore", "s3" ], - "license": "MIT", + "license": "(Apache-2.0 OR MIT)", "bugs": { "url": "https://github.com/ipfs/js-datastore-s3/issues" }, "homepage": "https://github.com/ipfs/js-datastore-s3#readme", + "eslintConfig": { + "extends": "ipfs", + "parserOptions": { + "sourceType": "module" + } + }, "dependencies": { "buffer": "^6.0.3", - "datastore-core": "^5.0.0", - "interface-datastore": "^5.1.1", + "datastore-core": "^6.0.5", + "interface-datastore": "^6.0.2", "it-filter": "^1.0.2", "it-to-buffer": "^2.0.0", "uint8arrays": "^3.0.0" @@ -47,7 +61,7 @@ "aegir": "^35.0.2", "assert": "^2.0.0", "aws-sdk": "^2.579.0", - "interface-datastore-tests": "^1.0.0", + "interface-datastore-tests": "^2.0.3", "sinon": "^11.1.2", "util": "^0.12.3" }, diff --git a/src/index.js b/src/index.js index 89fae56..1c92312 100644 --- a/src/index.js +++ b/src/index.js @@ -1,14 +1,10 @@ -'use strict' - -const { Buffer } = require('buffer') -const filter = require('it-filter') -const { - Adapter, - Key, - Errors -} = require('interface-datastore') -const { fromString: unint8arrayFromString } = require('uint8arrays') -const toBuffer = require('it-to-buffer') +import { Buffer } from 'buffer' +import filter from 'it-filter' +import { Key } from 'interface-datastore' +import { BaseDatastore } from 'datastore-core/base' +import * as Errors from 'datastore-core/errors' +import { fromString as unint8arrayFromString } from 'uint8arrays' +import toBuffer from 'it-to-buffer' /** * @typedef {import('interface-datastore').Pair} Pair @@ -24,7 +20,7 @@ const toBuffer = require('it-to-buffer') * Keys need to be sanitized before use, as they are written * to the file system as is. */ -class S3Datastore extends Adapter { +export class S3Datastore extends BaseDatastore { /** * @param {string} path * @param {import('./types').S3DatastoreOptions} opts @@ -80,7 +76,7 @@ class S3Datastore extends Adapter { Key: this._getFullKey(key), Body: Buffer.from(val, val.byteOffset, val.byteLength) }).promise() - } catch (err) { + } catch (/** @type {any} */ err) { if (err.code === 'NoSuchBucket' && this.createIfMissing) { await this.opts.s3.createBucket({ Bucket: this.bucket @@ -125,7 +121,7 @@ class S3Datastore extends Adapter { // @ts-ignore s3 types define their own Blob as an empty interface return await toBuffer(data.Body) - } catch (err) { + } catch (/** @type {any} */ err) { if (err.statusCode === 404) { throw Errors.notFoundError(err) } @@ -145,7 +141,7 @@ class S3Datastore extends Adapter { Key: this._getFullKey(key) }).promise() return true - } catch (err) { + } catch (/** @type {any} */ err) { if (err.code === 'NotFound') { return false } @@ -164,7 +160,7 @@ class S3Datastore extends Adapter { Bucket: this.bucket, Key: this._getFullKey(key) }).promise() - } catch (err) { + } catch (/** @type {any} */ err) { throw Errors.dbDeleteFailedError(err) } } @@ -233,7 +229,7 @@ class S3Datastore extends Adapter { // recursively fetch keys yield * this._listKeys(params) } - } catch (err) { + } catch (/** @type {any} */ err) { throw new Error(err.code) } } @@ -252,7 +248,7 @@ class S3Datastore extends Adapter { } yield res - } catch (err) { + } catch (/** @type {any} */ err) { // key was deleted while we are iterating over the results if (err.statusCode !== 404) { throw err @@ -290,7 +286,7 @@ class S3Datastore extends Adapter { Bucket: this.bucket, Key: this.path }).promise() - } catch (err) { + } catch (/** @type {any} */ err) { if (err.statusCode !== 404) { throw Errors.dbOpenFailedError(err) } @@ -299,5 +295,3 @@ class S3Datastore extends Adapter { async close () {} } - -module.exports = S3Datastore diff --git a/test/index.spec.js b/test/index.spec.js index 61c5b66..f4f5b6b 100644 --- a/test/index.spec.js +++ b/test/index.spec.js @@ -1,33 +1,31 @@ /* eslint-env mocha */ -'use strict' -const { expect } = require('aegir/utils/chai') +import { expect } from 'aegir/utils/chai.js' +import { Buffer } from 'buffer' +import sinon from 'sinon' +import { Key } from 'interface-datastore' +import S3 from 'aws-sdk/clients/s3.js' +import { interfaceDatastoreTests } from 'interface-datastore-tests' -const { Buffer } = require('buffer') -const sinon = require('sinon') -const Key = require('interface-datastore').Key -const S3 = require('aws-sdk').S3 - -const S3Mock = require('./utils/s3-mock') -const { s3Resolve, s3Reject, S3Error } = S3Mock -const S3Store = require('../src') +import { s3Mock, s3Resolve, s3Reject, S3Error } from './utils/s3-mock.js' +import { S3Datastore } from '../src/index.js' describe('S3Datastore', () => { describe('construction', () => { it('requires a bucket', () => { const s3 = new S3({ params: { Bucket: null } }) expect( - () => new S3Store('.ipfs/datastore', { s3 }) + () => new S3Datastore('.ipfs/datastore', { s3 }) ).to.throw() }) it('createIfMissing defaults to false', () => { const s3 = new S3({ params: { Bucket: 'test' } }) - const store = new S3Store('.ipfs', { s3 }) + const store = new S3Datastore('.ipfs', { s3 }) expect(store.createIfMissing).to.equal(false) }) it('createIfMissing can be set to true', () => { const s3 = new S3({ params: { Bucket: 'test' } }) - const store = new S3Store('.ipfs', { s3, createIfMissing: true }) + const store = new S3Datastore('.ipfs', { s3, createIfMissing: true }) expect(store.createIfMissing).to.equal(true) }) }) @@ -35,7 +33,7 @@ describe('S3Datastore', () => { describe('put', () => { it('should include the path in the key', () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) - const store = new S3Store('.ipfs/datastore', { s3 }) + const store = new S3Datastore('.ipfs/datastore', { s3 }) sinon.replace(s3, 'upload', (params) => { expect(params.Key).to.equal('.ipfs/datastore/z/key') @@ -47,7 +45,7 @@ describe('S3Datastore', () => { it('should turn Uint8Arrays into Buffers', () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) - const store = new S3Store('.ipfs/datastore', { s3 }) + const store = new S3Datastore('.ipfs/datastore', { s3 }) sinon.replace(s3, 'upload', (params) => { expect(Buffer.isBuffer(params.Body)).to.be.true() @@ -59,7 +57,7 @@ describe('S3Datastore', () => { it('should create the bucket when missing if createIfMissing is true', () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) - const store = new S3Store('.ipfs/datastore', { s3, createIfMissing: true }) + const store = new S3Datastore('.ipfs/datastore', { s3, createIfMissing: true }) // 1. On the first call upload will fail with a NoSuchBucket error // 2. This should result in the `createBucket` standin being called @@ -83,7 +81,7 @@ describe('S3Datastore', () => { it('should not create the bucket when missing if createIfMissing is false', async () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) - const store = new S3Store('.ipfs/datastore', { s3, createIfMissing: false }) + const store = new S3Datastore('.ipfs/datastore', { s3, createIfMissing: false }) let bucketCreated = false sinon.replace(s3, 'upload', (params) => { @@ -100,7 +98,7 @@ describe('S3Datastore', () => { try { await store.put(new Key('/z/key'), Buffer.from('test data')) - } catch (err) { + } catch (/** @type {any} */ err) { expect(bucketCreated).to.equal(false) expect(err).to.have.property('code', 'ERR_DB_WRITE_FAILED') } @@ -108,7 +106,7 @@ describe('S3Datastore', () => { it('should return a standard error when the put fails', async () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) - const store = new S3Store('.ipfs/datastore', { s3 }) + const store = new S3Datastore('.ipfs/datastore', { s3 }) sinon.replace(s3, 'upload', (params) => { expect(params.Key).to.equal('.ipfs/datastore/z/key') @@ -117,7 +115,7 @@ describe('S3Datastore', () => { try { await store.put(new Key('/z/key'), Buffer.from('test data')) - } catch (err) { + } catch (/** @type {any} */ err) { expect(err.code).to.equal('ERR_DB_WRITE_FAILED') } }) @@ -126,7 +124,7 @@ describe('S3Datastore', () => { describe('get', () => { it('should include the path in the fetch key', () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) - const store = new S3Store('.ipfs/datastore', { s3 }) + const store = new S3Datastore('.ipfs/datastore', { s3 }) sinon.replace(s3, 'getObject', (params) => { expect(params).to.have.property('Key', '.ipfs/datastore/z/key') @@ -138,7 +136,7 @@ describe('S3Datastore', () => { it('should return a standard not found error code if the key isn\'t found', async () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) - const store = new S3Store('.ipfs/datastore', { s3 }) + const store = new S3Datastore('.ipfs/datastore', { s3 }) sinon.replace(s3, 'getObject', (params) => { expect(params).to.have.property('Key', '.ipfs/datastore/z/key') @@ -147,7 +145,7 @@ describe('S3Datastore', () => { try { await store.get(new Key('/z/key')) - } catch (err) { + } catch (/** @type {any} */ err) { expect(err.code).to.equal('ERR_NOT_FOUND') } }) @@ -156,7 +154,7 @@ describe('S3Datastore', () => { describe('delete', () => { it('should return a standard delete error if deletion fails', async () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) - const store = new S3Store('.ipfs/datastore', { s3 }) + const store = new S3Datastore('.ipfs/datastore', { s3 }) sinon.replace(s3, 'deleteObject', (params) => { expect(params).to.have.property('Key', '.ipfs/datastore/z/key') @@ -165,7 +163,7 @@ describe('S3Datastore', () => { try { await store.delete(new Key('/z/key')) - } catch (err) { + } catch (/** @type {any} */ err) { expect(err.code).to.equal('ERR_DB_DELETE_FAILED') } }) @@ -174,7 +172,7 @@ describe('S3Datastore', () => { describe('open', () => { it('should return a standard open error if the head request fails with an unknown error', async () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) - const store = new S3Store('.ipfs/datastore', { s3 }) + const store = new S3Datastore('.ipfs/datastore', { s3 }) sinon.replace(s3, 'headObject', (_) => { return s3Reject(new Error('unknown')) @@ -182,20 +180,20 @@ describe('S3Datastore', () => { try { await store.open() - } catch (err) { + } catch (/** @type {any} */ err) { expect(err.code).to.equal('ERR_DB_OPEN_FAILED') } }) }) describe('interface-datastore', () => { - require('interface-datastore-tests')({ + interfaceDatastoreTests({ setup () { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) - S3Mock(s3) - return new S3Store('.ipfs/datastore', { s3 }) + s3Mock(s3) + return new S3Datastore('.ipfs/datastore', { s3 }) }, teardown () { } diff --git a/test/utils/s3-mock.js b/test/utils/s3-mock.js index 5eac029..a774f6c 100644 --- a/test/utils/s3-mock.js +++ b/test/utils/s3-mock.js @@ -1,11 +1,9 @@ -'use strict' +import { expect } from 'aegir/utils/chai.js' +import sinon from 'sinon' +import { Buffer } from 'buffer' +import AWS from 'aws-sdk' -const { expect } = require('aegir/utils/chai') -const sinon = require('sinon') -const { Buffer } = require('buffer') -const AWS = require('aws-sdk') - -class S3Error extends Error { +export class S3Error extends Error { /** * @param {string} message * @param {number} [code] @@ -21,7 +19,7 @@ class S3Error extends Error { * @template T * @param {T} [res] */ -const s3Resolve = (res) => { +export const s3Resolve = (res) => { const request = new AWS.Request(new AWS.Service(), 'op') sinon.replace(request, 'promise', () => { @@ -35,7 +33,7 @@ const s3Resolve = (res) => { * @template T * @param {T} err */ -const s3Reject = (err) => { +export const s3Reject = (err) => { const request = new AWS.Request(new AWS.Service(), 'op') sinon.replace(request, 'promise', () => { @@ -51,7 +49,7 @@ const s3Reject = (err) => { * @param {import('aws-sdk/clients/s3')} s3 * @returns {void} */ -module.exports = function (s3) { +export function s3Mock (s3) { const mocks = {} /** @type {Record} */ const storage = {} @@ -158,7 +156,3 @@ module.exports = function (s3) { return s3Resolve({}) }) } - -module.exports.S3Error = S3Error -module.exports.s3Resolve = s3Resolve -module.exports.s3Reject = s3Reject diff --git a/tsconfig.json b/tsconfig.json index 493f8c3..376ad55 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,11 +1,10 @@ { "extends": "aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "types" }, "include": [ "src", - "test", - "examples" + "test" ] }