diff --git a/.eslintrc b/.eslintrc deleted file mode 100644 index 5641670c..00000000 --- a/.eslintrc +++ /dev/null @@ -1,35 +0,0 @@ -{ - "parser": "@babel/eslint-parser", - "parserOptions": { - "sourceType": "script", - "ecmaVersion": 2020, - "requireConfigFile": false - }, - "env": { - "node": true, - "es6": true - }, - "plugins": [ - "no-only-tests" - ], - "rules": { - "curly": [2, "multi-line"], - "consistent-return": 0, - "quotes": [2, "single", { "avoidEscape": true, "allowTemplateLiterals": true }], - "semi": [2, "always"], - "strict": ["error", "safe"], - "no-const-assign": "error", - "no-undef": 2, - "no-underscore-dangle": 0, - "no-use-before-define": [2, "nofunc"], - "no-unused-vars": [2, { "vars": "all", "args": "none", "ignoreRestSiblings": true }], - "no-shadow": 2, - "keyword-spacing": "error", - "eol-last": "error", - "prefer-const": "error", - "no-only-tests/no-only-tests": "error", - "no-trailing-spaces": "error", - "space-before-blocks": "error", - "space-in-parens": "error" - } -} diff --git a/.eslintrc.js b/.eslintrc.js new file mode 100644 index 00000000..05b30cf2 --- /dev/null +++ b/.eslintrc.js @@ -0,0 +1,66 @@ +'use strict'; + +const eslintConfig = { + overrides: [], + 'parser': '@babel/eslint-parser', + 'parserOptions': { + 'sourceType': 'script', + 'ecmaVersion': 2020, + 'requireConfigFile': false + }, + 'env': { + 'node': true, + 'es6': true + }, + 'plugins': [ + 'no-only-tests' + ], + 'rules': { + 'curly': [2, 'multi-line'], + 'consistent-return': 0, + 'quotes': [2, 'single', { 'avoidEscape': true, 'allowTemplateLiterals': true }], + 'semi': [2, 'always'], + 'strict': ['error', 'safe'], + 'no-const-assign': 'error', + 'no-undef': 2, + 'no-underscore-dangle': 0, + 'no-use-before-define': [2, 'nofunc'], + 'no-unused-vars': [2, { 'vars': 'all', 'args': 'none', 'ignoreRestSiblings': true }], + 'no-shadow': 2, + 'keyword-spacing': 'error', + 'eol-last': 'error', + 'prefer-const': 'error', + 'no-only-tests/no-only-tests': 'error', + 'no-trailing-spaces': 'error', + 'space-before-blocks': 'error', + 'space-in-parens': 'error' + } +}; + +const tslintConfig = { + extends: [ + 'plugin:@typescript-eslint/recommended', + ], + parser: '@typescript-eslint/parser', + files: ['*.ts'], + plugins: [ + '@typescript-eslint', + 'no-only-tests', + ], + rules: { + ...eslintConfig.rules, + '@typescript-eslint/no-var-requires': 0, + '@typescript-eslint/no-use-before-define': ['error'], + strict: 0, + '@typescript-eslint/ban-ts-comment': ['warn'], + 'no-shadow': 'off', + '@typescript-eslint/no-shadow': ['warn'], + 'no-unused-vars': 'off', + '@typescript-eslint/no-unused-vars': 'warn', + '@typescript-eslint/no-explicit-any': 'off', + }, +}; + +eslintConfig.overrides.push(tslintConfig); + +module.exports = eslintConfig; diff --git a/.gitignore b/.gitignore index 346d2c4c..f9ad7fac 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,8 @@ test/models/photo.js* src/decorators.js* src/data_types.js* src/raw.js* + +# Logs +logs +*.log +dist/ \ No newline at end of file diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100755 index 00000000..7e154687 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,4 @@ +#!/usr/bin/env sh +. "$(dirname -- "$0")/_/husky.sh" + +npm run lint-staged diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..baf37623 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,35 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "Mocha Current File", + "program": "${workspaceFolder}/node_modules/mocha/bin/_mocha", + "args": [ + "--timeout", + "999999", + "--colors", + "${file}" + ], + "console": "integratedTerminal", + "internalConsoleOptions": "neverOpen", + "preLaunchTask": "npm: pretest", + }, + { + "type": "node", + "request": "launch", + "name": "Mocha All", + "program": "${workspaceFolder}/node_modules/mocha/bin/_mocha", + "args": [ + "--timeout", + "999999", + "--colors", + "${workspaceFolder}/test" + ], + "console": "integratedTerminal", + "internalConsoleOptions": "neverOpen", + "preLaunchTask": "npm: pretest", + } + ] +} \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 00000000..bca147be --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,16 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "type": "npm", + "script": "pretest", + "problemMatcher": [], + "label": "npm: pretest", + "detail": "tsc && ./test/prepare.sh", + "group": { + "kind": "build", + "isDefault": true + } + }, + ] +} \ No newline at end of file diff --git a/package.json b/package.json index 28bbd67e..517abada 100644 --- a/package.json +++ b/package.json @@ -3,16 +3,22 @@ "version": "2.10.3", "description": "JavaScript Object-relational mapping alchemy", "main": "index.js", + "browser": "dist/browser.js", "types": "index.d.ts", "files": [ + "dist", "src", "index.js", "index.d.ts" ], "scripts": { "jsdoc": "rm -rf docs/api && jsdoc -c .jsdoc.json -d docs/api -t node_modules/@cara/minami", + "clean": "tsc -b --clean", + "lint-staged": "lint-staged", "prepack": "tsc", - "pretest": "tsc && ./test/prepare.sh", + "prepack:browser": "rm -rf dist && tsc -p tsconfig.browser.json", + "prepublishOnly": "npm run prepack && npm run prepack:browser", + "pretest": "npm run prepack && ./test/prepare.sh", "test": "./test/start.sh", "test:local": "./test/start.sh", "test:unit": "./test/start.sh unit", @@ -21,12 +27,14 @@ "test:mysql2": "./test/start.sh test/integration/mysql2.test.js", "test:postgres": "./test/start.sh test/integration/postgres.test.js", "test:sqlite": "./test/start.sh test/integration/sqlite.test.js", + "test:sqljs": "./test/start.sh test/integration/sqljs.test.js", "test:custom": "./test/start.sh test/integration/custom.test.js", "test:sqlcipher": "./test/start.sh test/integration/sqlcipher.test.js", "test:dts": "./test/start.sh dts", "test:coverage": "nyc ./test/start.sh && nyc report --reporter=lcov", "lint": "eslint ./", - "lint:fix": "eslint . --fix" + "lint:fix": "eslint . --fix", + "prepare": "husky install" }, "repository": { "type": "git", @@ -48,6 +56,11 @@ "engines": { "node": ">= 12.0.0" }, + "lint-staged": { + "*.{js,ts}": [ + "eslint --no-ignore --fix" + ] + }, "dependencies": { "dayjs": "^1.10.3", "debug": "^3.1.0", @@ -56,12 +69,14 @@ "pluralize": "^7.0.0", "reflect-metadata": "^0.1.13", "sqlstring": "^2.3.0", + "tslib": "^2.5.0", "validator": "^13.5.2" }, "peerDependencies": { "mysql": "^2.17.1", "mysql2": "^2.3.0", "pg": "^8.5.1", + "sql.js": "^1.8.0", "sqlite3": "^5.0.2" }, "peerDependenciesMeta": { @@ -76,6 +91,9 @@ }, "sqlite3": { "optional": true + }, + "sql.js": { + "optional": true } }, "devDependencies": { @@ -85,16 +103,22 @@ "@journeyapps/sqlcipher": "^5.2.0", "@types/mocha": "^9.0.0", "@types/node": "^16.10.1", + "@types/sql.js": "^1.4.4", + "@typescript-eslint/eslint-plugin": "^5.59.2", + "@typescript-eslint/parser": "^5.59.2", "eslint": "^7.20.0", "eslint-plugin-no-only-tests": "^3.0.0", "expect.js": "^0.3.1", + "husky": "^8.0.3", "jsdoc": "^3.6.3", + "lint-staged": "^13.2.2", "mocha": "^8.2.1", "mysql": "^2.17.1", "mysql2": "^2.3.0", "nyc": "^15.1.0", "pg": "^8.5.1", "sinon": "^10.0.0", + "sql.js": "^1.8.0", "sqlite3": "^5.0.2", "typescript": "^4.6.2" } diff --git a/src/bone.js b/src/bone.js index 7e5944af..c0b08a4a 100644 --- a/src/bone.js +++ b/src/bone.js @@ -1495,7 +1495,7 @@ class Bone { } } else if (unset && affectedRows === instances.length) { // otherwise, use last insert id to generate bulk inserted ids - if (driver.type === 'sqlite') { + if (['sqlite', 'sqljs'].includes(driver.type)) { for (let i = instances.length - 1; i >= 0; i--) { instances[i][primaryKey] = insertId--; } diff --git a/src/browser.ts b/src/browser.ts new file mode 100644 index 00000000..daaa3a48 --- /dev/null +++ b/src/browser.ts @@ -0,0 +1,60 @@ +const Logger = require('./drivers/abstract/logger'); +const Spell = require('./spell'); +const Bone = require('./bone'); +const Collection = require('./collection'); +const { invokable: DataTypes, LENGTH_VARIANTS } = require('./data_types'); +const sequelize = require('./adapters/sequelize'); +const { heresql } = require('./utils/string'); + +const Realm = require('./realm/base'); +const AbstractDriver = require('./drivers/abstract'); +const { isBone } = require('./utils'); + +/** + * @typedef {Object} RawSql + * @property {boolean} __raw + * @property {string} value + * @property {string} type + */ + + +/** + * Connect models to database. Need to provide both connect options and models. + * @alias module:index.connect + * @param {Object} opts + * @param {string} opts.client - client name + * @param {string|Bone[]} opts.models - an array of models + * @returns {Pool} the connection pool in case we need to perform raw query + */ +export const connect = async function connect(opts) { + opts = { Bone, ...opts }; + if (opts.Bone.driver) throw new Error('connected already'); + const realm = new Realm(opts); + await realm.connect(); + return realm; +}; + +export const disconnect = async function disconnect(realm, ...args) { + if (realm instanceof Realm && realm.connected) { + return await realm.disconnect(...args); + } +}; + +Object.assign(Realm.prototype, { DataTypes }); +export default Realm; + +export { Bone }; +export { Collection }; +export { DataTypes }; +export { Logger }; +export { Spell }; +export { sequelize }; +export { heresql }; +export * from './hint'; +export * from './decorators'; +export { AbstractDriver }; +export { default as Raw } from './raw'; +export { LENGTH_VARIANTS }; +export { isBone }; + +// TODO: missing migrations and MYSQL, PG, SQLITE drivers diff --git a/src/drivers/abstract/spellbook.js b/src/drivers/abstract/spellbook.js index 521c97bf..5b2bc0c0 100644 --- a/src/drivers/abstract/spellbook.js +++ b/src/drivers/abstract/spellbook.js @@ -52,7 +52,7 @@ function formatSelectExpr(spell, values) { const list = map[qualifier]; if (list) { for (const selectExpr of list) selects.add(selectExpr); - } else if (groups.length === 0 && Model.driver.type !== 'sqlite' && !isAggregate) { + } else if (groups.length === 0 && !['sqlite', 'sqljs'].includes(Model.driver.type) && !isAggregate) { selects.add(`${escapeId(qualifier)}.*`); } } diff --git a/src/drivers/sqljs/index.ts b/src/drivers/sqljs/index.ts new file mode 100644 index 00000000..212a90de --- /dev/null +++ b/src/drivers/sqljs/index.ts @@ -0,0 +1,73 @@ +import dayjs from 'dayjs'; +import { performance } from 'perf_hooks'; + +import SqliteDriver from '../sqlite'; +import { SqljsConnectionOptions, SqljsQueryQuery, SqljsQueryValues } from './interface'; +import { SqljsConnection } from './sqljs-connection'; + +import { calculateDuration } from '../../utils'; +import { SpellMeta } from '../../spell'; + +interface DriverOptions extends Omit { + database: string; +} + +export default class SqljsDriver extends SqliteDriver { + constructor(opts: DriverOptions) { + super(opts); + this.type = 'sqljs'; + } + + type: string; + /** + * @override + */ + // @ts-ignore + pool: SqljsConnection; + + /** + * @override + */ + // @ts-ignore + createPool(opts: DriverOptions) { + const { database, ...restOpts } = opts; + return new SqljsConnection({ + ...restOpts, + name: database, + }); + } + + async getConnection() { + return await this.pool.getConnection(); + } + + async query(query: SqljsQueryQuery, values?: SqljsQueryValues, spell?: SpellMeta) { + const connection = await this.getConnection(); + + // sql.js does not support Date as parameterized value + if (Array.isArray(values)) { + values = values.map((entry) => { + if (entry instanceof Date) { + return dayjs(entry).format('YYYY-MM-DD HH:mm:ss.SSS Z'); + } + return entry; + }); + } + + // @ts-ignore + const { logger } = this; + const logOpts = { ...spell, query }; + const sql = logger.format(query, values, spell); + const start = performance.now(); + let result; + + try { + result = await connection.query(query, values, spell); + } catch (err) { + logger.logQueryError(err, sql, calculateDuration(start), logOpts); + throw err; + } + logger.tryLogQuery(sql, calculateDuration(start), logOpts, result); + return result; + } +} diff --git a/src/drivers/sqljs/interface.ts b/src/drivers/sqljs/interface.ts new file mode 100644 index 00000000..05669e1e --- /dev/null +++ b/src/drivers/sqljs/interface.ts @@ -0,0 +1,20 @@ +import type { Database } from 'sql.js'; + +export type SqljsQueryQuery = Parameters[0]; +export type SqljsQueryValues = Parameters[1]; + +export interface SqljsConnectionQueryResult { + fields: string[]; + rows: any[]; +} + +export interface BaseConnectionOptions { + name: string; + version?: number; + logger: any; +} + +export interface SqljsConnectionOptions extends BaseConnectionOptions { + data?: ArrayLike | Buffer | null; + initSqlJs?: (options: Omit) => Promise; +} diff --git a/src/drivers/sqljs/sqljs-connection.ts b/src/drivers/sqljs/sqljs-connection.ts new file mode 100644 index 00000000..da19e9f0 --- /dev/null +++ b/src/drivers/sqljs/sqljs-connection.ts @@ -0,0 +1,144 @@ +import type { Database, QueryExecResult } from 'sql.js'; + +import type { SqljsConnectionOptions, SqljsConnectionQueryResult, SqljsQueryQuery, SqljsQueryValues } from './interface'; +import type { SpellMeta } from '../../spell'; + +/** + * 组装和转换结果 + */ +function dataConvert(result: QueryExecResult) { + if (!result) return result; + + const { columns, values } = result; + + return { + fields: columns, + rows: values.map((val) => { + return columns.reduce((prev, col, index) => { + prev[col] = val[index]; + return prev; + }, {}); + }), + }; +} + +function normalizeResult(res: QueryExecResult[]): SqljsConnectionQueryResult { + if (res?.[0]) { + const hydratedData = dataConvert(res[0]); + return hydratedData; + } + // 空结果 + return { + rows: [], + fields: [], + }; +} + +// SELECT users.id AS "users:id", ... +// => [ { users: { id, ... } } ] +function nest(rows, fields, spell) { + const { Model } = spell; + const { tableAlias } = Model; + const results: any[] = []; + + for (const row of rows) { + const result = {}; + const qualified = Object.keys(row).some(entry => entry.includes(':')); + for (const key in row) { + const parts = key.split(':'); + const [qualifier, column] = qualified + ? (parts.length > 1 ? parts : ['', key]) + : [Model.attributeMap.hasOwnProperty(key) ? tableAlias : '', key]; + const obj = result[qualifier] || (result[qualifier] = {}); + obj[column] = row[key]; + } + results.push(result); + } + + return { rows: results, fields }; +} + +async function defaultInitSqlJs(options: SqljsConnectionOptions): Promise { + const { default: initSqlJs } = await import('sql.js'); + const SQL = await initSqlJs(); + + const { data = null } = options; + const database = new SQL.Database(data); + + return database; +} + +export class SqljsConnection { + constructor(private options: SqljsConnectionOptions) {} + + private database: Database | undefined = undefined; + + async getConnection() { + if (this.database) { + return this; + } + + const { initSqlJs = defaultInitSqlJs } = this.options; + + // Create a database + this.database = await initSqlJs(this.options); + + return this; + } + + release() { + // noop + } + + async close() { + if (!this.database) { + console.warn('close: database is null'); + return true; + } + + this.database = undefined; + } + + async query(query: string | { sql: string, nestTables?: boolean}, values?: SqljsQueryValues, spell?: SpellMeta) { + const { sql, nestTables } = typeof query === 'string' + ? { sql: query, nestTables: false } + : query; + + if (/^(?:pragma|select)/i.test(sql)) { + const result = await this._executeSQL(sql, values); + if (nestTables) return nest(result.rows, result.fields, spell); + return result; + } + + return await this._runSQL(sql, values); + } + + async _runSQL(query: SqljsQueryQuery, values?: SqljsQueryValues) { + if (!this.database) { + throw new Error('database not opened!'); + } + + this.database.run(query, values); + + const affectedRows = this.database.getRowsModified(); + + // 模拟 node-sqlite3 的行为 + const lastInsertRowRet = await this._executeSQL( + 'SELECT last_insert_rowid() as lastId;', + ); + const lastId = lastInsertRowRet?.rows?.[0]?.lastId; + return { + insertId: lastId, + affectedRows, + }; + } + + async _executeSQL(query: SqljsQueryQuery, values?: SqljsQueryValues) { + if (!this.database) { + throw new Error('database not opened!'); + } + + const res = this.database.exec(query, values); + return normalizeResult(res); + } +} diff --git a/src/realm.js b/src/realm/base.js similarity index 80% rename from src/realm.js rename to src/realm/base.js index d7fb11d9..c00a5bb1 100644 --- a/src/realm.js +++ b/src/realm/base.js @@ -1,15 +1,12 @@ 'use strict'; -const fs = require('fs').promises; -const path = require('path'); - -const Bone = require('./bone'); -const { findDriver, AbstractDriver } = require('./drivers'); -const { camelCase } = require('./utils/string'); -const { isBone } = require('./utils'); -const sequelize = require('./adapters/sequelize'); -const Raw = require('./raw').default; -const { LEGACY_TIMESTAMP_MAP } = require('./constants'); +const Bone = require('../bone'); +const AbstractDriver = require('../drivers/abstract'); +const { camelCase } = require('../utils/string'); +const { isBone } = require('../utils'); +const sequelize = require('../adapters/sequelize'); +const Raw = require('../raw').default; +const { LEGACY_TIMESTAMP_MAP } = require('../constants'); const SequelizeBone = sequelize(Bone); @@ -20,30 +17,6 @@ const SequelizeBone = sequelize(Bone); * @property {Array} fields */ -/** - * find models in directory - * @param {string} dir - * @returns {Array.} - */ -async function findModels(dir) { - if (!dir || typeof dir !== 'string') { - throw new Error(`Unexpected models dir (${dir})`); - } - const entries = await fs.readdir(dir, { withFileTypes: true }); - const models = []; - - for (const entry of entries) { - const extname = path.extname(entry.name); - if (entry.isFile() && ['.js', '.mjs'].includes(extname)) { - const exports = require(path.join(dir, entry.name)); - const model = exports.__esModule ? exports.default : exports; - if (isBone(model)) models.push(model); - } - } - - return models; -} - /** * construct model attributes entirely from column definitions * @param {Bone} model @@ -105,7 +78,7 @@ function createSpine(opts) { const rReplacementKey = /\s:(\w+)\b/g; -class Realm { +class BaseRealm { constructor(opts = {}) { const { dialect = 'mysql', @@ -122,7 +95,8 @@ class Realm { for (const model of opts.models) models[model.name] = model; } - const DriverClass = CustomDriver && CustomDriver.prototype instanceof AbstractDriver? CustomDriver : findDriver(dialect); + const DriverClass = this.getDriverClass(CustomDriver, dialect); + const driver = new DriverClass({ client, database, @@ -133,7 +107,7 @@ class Realm { client, dialect: driver.dialect, database, - driver: CustomDriver, + driver: DriverClass, ...restOpts, define: { underscored: true, ...opts.define }, }; @@ -144,6 +118,13 @@ class Realm { this.options = Spine.options = options; } + getDriverClass(CustomDriver, dialect) { + if (CustomDriver && CustomDriver.prototype instanceof AbstractDriver) { + return CustomDriver; + } + throw new Error('DriverClass must be a subclass of AbstractDriver'); + } + define(name, attributes, opts = {}, descriptors = {}) { const Model = class extends this.Bone { static name = name; @@ -153,15 +134,12 @@ class Realm { return Model; } - async connect() { - const { models: dir } = this.options; + async getModels() { + return Object.values(this.models); + } - let models; - if (dir) { - models = Array.isArray(dir) ? dir : (await findModels(dir)); - } else { - models = Object.values(this.models); - } + async connect() { + let models = await this.getModels(); for (const model of models) this.Bone.models[model.name] = model; // models could be connected already if cached @@ -261,7 +239,7 @@ class Realm { // instance.raw raw(sql) { - return Realm.raw(sql); + return BaseRealm.raw(sql); } /** @@ -277,4 +255,4 @@ class Realm { static SequelizeBone = SequelizeBone; } -module.exports = Realm; +module.exports = BaseRealm; diff --git a/src/realm/index.js b/src/realm/index.js new file mode 100644 index 00000000..75886a86 --- /dev/null +++ b/src/realm/index.js @@ -0,0 +1,67 @@ +'use strict'; + +const fs = require('fs').promises; +const path = require('path'); + +const { findDriver, AbstractDriver } = require('../drivers'); +const { isBone } = require('../utils'); + +const BaseRealm = require('./base'); + +/** + * + * @typedef {Object} QueryResult + * @property {Array} rows + * @property {Array} fields + */ + +/** + * find models in directory + * @param {string} dir + * @returns {Array.} + */ +async function findModels(dir) { + if (!dir || typeof dir !== 'string') { + throw new Error(`Unexpected models dir (${dir})`); + } + const entries = await fs.readdir(dir, { withFileTypes: true }); + const models = []; + + for (const entry of entries) { + const extname = path.extname(entry.name); + if (entry.isFile() && ['.js', '.mjs'].includes(extname)) { + const model = require(path.join(dir, entry.name)); + if (isBone(model)) models.push(model); + } + } + + return models; +} + +class Realm extends BaseRealm { + /** + * @override + */ + getDriverClass(CustomDriver, dialect) { + return CustomDriver && CustomDriver.prototype instanceof AbstractDriver + ? CustomDriver + : findDriver(dialect); + } + + /** + * @override + */ + async getModels() { + const { models: dir } = this.options; + + let models; + if (dir) { + models = Array.isArray(dir) ? dir : (await findModels(dir)); + } else { + models = await super.getModels(); + } + return models; + } +} + +module.exports = Realm; diff --git a/test/integration/sqljs.test.js b/test/integration/sqljs.test.js new file mode 100644 index 00000000..2ddbf025 --- /dev/null +++ b/test/integration/sqljs.test.js @@ -0,0 +1,139 @@ +'use strict'; + +const assert = require('assert').strict; +const path = require('path'); +const fs = require('fs').promises; +const sinon = require('sinon'); + +const { raw, Bone } = require('../..'); +const Realm = require('../../src/realm/base'); +const { checkDefinitions } = require('./helpers'); +const { default: SqljsDriver } = require('../../src/drivers/sqljs'); + +async function migrate(dbDriver) { + let content = await fs.readFile(path.resolve(__dirname, '../dumpfile.sql'), 'utf-8'); + content = content + .replace(/bigint\(\d+\) AUTO_INCREMENT/ig, 'INTEGER') + .replace(/tinyint\(1\) DEFAULT 0/ig, 'boolean DEFAULT false'); + await dbDriver.query(content); +} + +describe('integration tests for sqljs', () => { + before(async function () { + const modelDir = path.resolve(__dirname, '../models'); + const files = await fs.readdir(modelDir); + const models = files + .filter(file => path.extname(file) === '.js') + .map(file => { + const model = require(path.resolve(modelDir, file)); + return model?.default || model; + }); + + const realm = new Realm({ + database: 'sqljs', + driver: SqljsDriver, + models, + }); + // migrate + await migrate(realm.driver); + await realm.connect(); + }); + + describe('=> Table definitions (sqljs)', () => { + beforeEach(async () => { + await Bone.driver.dropTable('notes'); + }); + + after(async () => { + await Bone.driver.dropTable('notes'); + }); + + it('should be able to create table with INTEGER PRIMARY KEY', async () => { + const { INTEGER } = Bone.DataTypes; + class Note extends Bone { } + Note.init({ + id: { type: INTEGER, primaryKey: true }, + public: { type: INTEGER }, + }); + + await Note.sync(); + await checkDefinitions('notes', { + id: { dataType: 'integer', primaryKey: true }, + public: { dataType: 'integer', primaryKey: false }, + }); + }); + + it('should be able to create table with BIGINT(actual: INTEGER) PRIMARY KEY', async () => { + const { BIGINT, INTEGER } = Bone.DataTypes; + class Note extends Bone { } + Note.init({ + id: { type: BIGINT, primaryKey: true }, + public: { type: INTEGER }, + }); + + await Note.sync(); + await checkDefinitions('notes', { + id: { dataType: 'integer', primaryKey: true }, + public: { dataType: 'integer', primaryKey: false }, + }); + }); + }); + + describe('=> upsert (sqljs)', function () { + const Post = require('../models/post'); + const User = require('../models/user'); + + it('upsert', function () { + assert.equal( + new Post({ id: 1, title: 'New Post', createdAt: raw('CURRENT_TIMESTAMP()'), updatedAt: raw('CURRENT_TIMESTAMP()') }).upsert().toString(), + `INSERT INTO "articles" ("id", "title", "is_private", "word_count", "gmt_create", "gmt_modified") VALUES (1, 'New Post', false, 0, CURRENT_TIMESTAMP(), CURRENT_TIMESTAMP()) ON CONFLICT ("id") DO UPDATE SET "id"=EXCLUDED."id", "title"=EXCLUDED."title", "is_private"=EXCLUDED."is_private", "word_count"=EXCLUDED."word_count", "gmt_modified"=EXCLUDED."gmt_modified"` + ); + const date = new Date(2017, 11, 12); + const fakeDate = date.getTime(); + sinon.useFakeTimers(fakeDate); + assert.equal( + new Post({ id: 1, title: 'New Post', createdAt: date, updatedAt: date }).upsert().toString(), + `INSERT INTO "articles" ("id", "title", "is_private", "word_count", "gmt_create", "gmt_modified") VALUES (1, 'New Post', false, 0, '2017-12-12 00:00:00.000', '2017-12-12 00:00:00.000') ON CONFLICT ("id") DO UPDATE SET "id"=EXCLUDED."id", "title"=EXCLUDED."title", "is_private"=EXCLUDED."is_private", "word_count"=EXCLUDED."word_count", "gmt_modified"=EXCLUDED."gmt_modified"` + ); + assert.equal( + new Post({ title: 'New Post', createdAt: date, updatedAt: date }).upsert().toString(), + `INSERT INTO "articles" ("title", "is_private", "word_count", "gmt_create", "gmt_modified") VALUES ('New Post', false, 0, '2017-12-12 00:00:00.000', '2017-12-12 00:00:00.000') ON CONFLICT ("id") DO UPDATE SET "title"=EXCLUDED."title", "is_private"=EXCLUDED."is_private", "word_count"=EXCLUDED."word_count", "gmt_modified"=EXCLUDED."gmt_modified"` + ); + // default set createdAt + assert.equal( + new Post({ id: 1, title: 'New Post' }).upsert().toString(), + `INSERT INTO "articles" ("id", "title", "is_private", "word_count", "gmt_create", "gmt_modified") VALUES (1, 'New Post', false, 0, '2017-12-12 00:00:00.000', '2017-12-12 00:00:00.000') ON CONFLICT ("id") DO UPDATE SET "id"=EXCLUDED."id", "title"=EXCLUDED."title", "is_private"=EXCLUDED."is_private", "word_count"=EXCLUDED."word_count", "gmt_modified"=EXCLUDED."gmt_modified"` + ); + + assert.equal( + Post.upsert({ title: 'New Post' }).toSqlString(), + `INSERT INTO "articles" ("title", "is_private", "word_count", "gmt_create", "gmt_modified") VALUES ('New Post', false, 0, '2017-12-12 00:00:00.000', '2017-12-12 00:00:00.000') ON CONFLICT ("id") DO UPDATE SET "title"=EXCLUDED."title", "is_private"=EXCLUDED."is_private", "word_count"=EXCLUDED."word_count", "gmt_modified"=EXCLUDED."gmt_modified"` + ); + + assert.equal( + Post.upsert({ title: 'New Post', id: 1 }).toSqlString(), + `INSERT INTO "articles" ("id", "title", "is_private", "word_count", "gmt_create", "gmt_modified") VALUES (1, 'New Post', false, 0, '2017-12-12 00:00:00.000', '2017-12-12 00:00:00.000') ON CONFLICT ("id") DO UPDATE SET "id"=EXCLUDED."id", "title"=EXCLUDED."title", "is_private"=EXCLUDED."is_private", "word_count"=EXCLUDED."word_count", "gmt_modified"=EXCLUDED."gmt_modified"` + ); + + assert.equal( + User.upsert({ email: 'dk@souls.com', nickname: 'Yhorm' }).toSqlString(), + `INSERT INTO "users" ("email", "nickname", "status", "level", "gmt_create") VALUES ('dk@souls.com', 'Yhorm', 1, 1, '2017-12-12 00:00:00.000') ON CONFLICT ("email") DO UPDATE SET "email"=EXCLUDED."email", "nickname"=EXCLUDED."nickname", "status"=EXCLUDED."status", "level"=EXCLUDED."level"` + ); + + assert.equal( + User.upsert({ email: 'dk@souls.com', nickname: 'Yhorm', id: 1 }).toSqlString(), + `INSERT INTO "users" ("id", "email", "nickname", "status", "level", "gmt_create") VALUES (1, 'dk@souls.com', 'Yhorm', 1, 1, '2017-12-12 00:00:00.000') ON CONFLICT ("id") DO UPDATE SET "id"=EXCLUDED."id", "email"=EXCLUDED."email", "nickname"=EXCLUDED."nickname", "status"=EXCLUDED."status", "level"=EXCLUDED."level"` + ); + }); + }); + + describe('=> suites for (sqljs)', () => { + // 时序问题: string.test 必须先放在第一个 + require('./suite/string.test'); + require('./suite/querying.test'); + require('./suite/associations.test'); + require('./suite/data_types.test'); + require('./suite/definitions.test'); + require('./suite/migrations.test'); + }); +}); diff --git a/test/integration/suite/data_types.test.js b/test/integration/suite/data_types.test.js index 16e921c4..ef9e9451 100644 --- a/test/integration/suite/data_types.test.js +++ b/test/integration/suite/data_types.test.js @@ -232,7 +232,7 @@ describe('=> Data Types - INTEGER', function() { return true; }, /invalid integer/i); } - if (Bone.driver.type === 'sqlite') { + if (['sqlite', 'sqljs'].includes(Bone.driver.type)) { await assert.doesNotReject(async () => { const note1 = await Note.create({ word_count: 'foo' }); assert.equal(note1.word_count, 'foo'); @@ -274,7 +274,7 @@ describe('=> Data types - DATE', function() { assert.equal(result.length, 1); }); - if (Bone.driver.type === 'sqlite') { + if (['sqlite', 'sqljs'].includes(Bone.driver.type)) { await assert.doesNotReject(async function() { await Note.where({ createdAt: 'invalid date' }); return true; @@ -331,7 +331,7 @@ describe('=> Data types - DATEONLY', function() { assert.equal(result.length, 1); }); - if (Bone.driver.type === 'sqlite') { + if (['sqlite', 'sqljs'].includes(Bone.driver.type)) { await assert.doesNotReject(async function() { await Note.where({ createdAt: 'invalid date' }); return true; diff --git a/test/integration/suite/definitions.test.js b/test/integration/suite/definitions.test.js index ae0d2a78..d18ba2e5 100644 --- a/test/integration/suite/definitions.test.js +++ b/test/integration/suite/definitions.test.js @@ -31,7 +31,7 @@ describe('=> Table definitions', () => { it('should be able to create table with unique column', async () => { // sqlite PRAGMA table_info can't get columns' constraint type(unique or not) - if (Bone.driver.type === 'sqlite') { + if (['sqlite', 'sqljs'].includes(Bone.driver.type)) { const querySpy = sinon.spy(Bone.driver, 'query'); await Bone.driver.createTable('notes', { title: { type: STRING, allowNull: false }, diff --git a/test/integration/suite/string.test.js b/test/integration/suite/string.test.js index dabbc62a..32f4fac6 100644 --- a/test/integration/suite/string.test.js +++ b/test/integration/suite/string.test.js @@ -20,7 +20,7 @@ describe('=> Concat', function() { it('concat single', async function() { let result; - if (Book.driver.type === 'sqlite') { + if (['sqlite', 'sqljs'].includes(Book.driver.type)) { result = await Book.select('printf("%s%s", isbn, name) as fullname').where('price=?', 21); } else { result = await Book.select('CONCAT(isbn ,name) as fullname').where('price=?', 21); @@ -31,7 +31,7 @@ describe('=> Concat', function() { it('concat more than one', async function() { let result; - if (Book.driver.type === 'sqlite') { + if (['sqlite', 'sqljs'].includes(Book.driver.type)) { // sqlite || result = await Book.select('printf("%s%s", isbn, name) as fullname').order('price'); } else { diff --git a/test/start.sh b/test/start.sh index 68f35ae3..c5cff924 100755 --- a/test/start.sh +++ b/test/start.sh @@ -8,8 +8,8 @@ function run { args=("${args[@]:1}"); fi echo ""; - printf '"%s" ' "${args[@]}" | xargs echo "> DEBUG=leoric mocha --exit --timeout 5000 ${file}"; - printf '"%s" ' "${args[@]}" | DEBUG=leoric NODE_OPTIONS=--enable-source-maps xargs mocha --exit --timeout 5000 ${file} || exit $?; + printf '"%s" ' "${args[@]}" | xargs echo "> DEBUG=leoric mocha -R dot --exit --timeout 5000 ${file}"; + printf '"%s" ' "${args[@]}" | DEBUG=leoric NODE_OPTIONS=--enable-source-maps xargs mocha -R dot --exit --timeout 5000 ${file} || exit $?; } ## diff --git a/test/types/basics.test.ts b/test/types/basics.test.ts index e743503a..f06ecd8a 100644 --- a/test/types/basics.test.ts +++ b/test/types/basics.test.ts @@ -436,7 +436,7 @@ describe('=> Basics (TypeScript)', function() { }); it('spell.delete()', async function() { - const [ post, post2 ] = await Post.bulkCreate([ + await Post.bulkCreate([ { title: 'Leah', wordCount: 20 }, { title: 'Cain', wordCount: 10 }, ]); diff --git a/test/types/custom_driver.test.ts b/test/types/custom_driver.test.ts index 7f10fd70..6662950c 100644 --- a/test/types/custom_driver.test.ts +++ b/test/types/custom_driver.test.ts @@ -38,7 +38,7 @@ class MySpellbook extends SqliteDriver.Spellbook { } formatUpdate(spell: SpellMeta): SpellBookFormatResult { - const a = super.formatDelete(spell); + // const a = super.formatDelete(spell); const { Model, sets, whereConditions } = spell; const { shardingKey } = Model; diff --git a/test/types/sequelize.test.ts b/test/types/sequelize.test.ts index 53073feb..be7765a0 100644 --- a/test/types/sequelize.test.ts +++ b/test/types/sequelize.test.ts @@ -1393,7 +1393,7 @@ describe('=> sequelize (TypeScript)', function() { }); it('spell.delete()', async function() { - const [ post, post2 ] = await Post.bulkCreate([ + await Post.bulkCreate([ { title: 'Leah', wordCount: 20 }, { title: 'Cain', wordCount: 10 }, ]); diff --git a/test/unit/drivers/sqljs/index.test.js b/test/unit/drivers/sqljs/index.test.js new file mode 100644 index 00000000..9af59f86 --- /dev/null +++ b/test/unit/drivers/sqljs/index.test.js @@ -0,0 +1,214 @@ +'use strict'; + +const assert = require('assert').strict; +const path = require('path'); +const fs = require('fs').promises; +const dayjs = require('dayjs'); + +const { heresql } = require('../../../../src/utils/string'); +const { default: SqlJSDriver } = require('../../../../src/drivers/sqljs/index.js'); + +const { INTEGER, BIGINT, STRING, DATE, BOOLEAN, JSONB } = SqlJSDriver.DataTypes; + +const options = { + database: '/tmp/leoric.sqljs', + logger: console, +}; + +const driver = new SqlJSDriver(options); + +async function migrate(dbDriver) { + let content = await fs.readFile(path.resolve(__dirname, '../../../dumpfile.sql'), 'utf-8'); + content = content + .replace(/bigint\(\d+\) AUTO_INCREMENT/ig, 'INTEGER') + .replace(/tinyint\(1\) DEFAULT 0/ig, 'boolean DEFAULT false'); + await dbDriver.query(content); +} + +describe('=> sql.js driver', () => { + before(async () => { + await migrate(driver); + }); + + it('dialect', () => { + assert.equal(driver.dialect, 'sqljs'); + }); + + it('driver.logger.logQuery', async () => { + const result = []; + const driver2 = new SqlJSDriver({ + ...options, + logger(sql, duration, opts, res) { + result.push([ sql, duration, opts, res ]); + }, + }); + await migrate(driver2); + await driver2.query('SELECT ?, ? FROM users WHERE email = ? AND status = ?', ['id', 'nickname', 'yhorm@giant.com', 1]); + // 0 是 migration + const [ sql, duration, opts, res ] = result[1]; + assert.equal(sql, "SELECT 'id', 'nickname' FROM users WHERE email = 'yhorm@giant.com' AND status = 1"); + assert.ok(duration >= 0); + assert.ok(res); + assert.ok(opts); + assert.equal(opts.query, 'SELECT ?, ? FROM users WHERE email = ? AND status = ?'); + }); + + it('driver.logger.logQueryError', async () => { + const result = []; + const driver2 = new SqlJSDriver({ + ...options, + logger: { + logQueryError(err, sql, duration, opts) { + result.push([ err, sql, duration, opts ]); + }, + }, + }); + await migrate(driver2); + await assert.rejects(async () => await driver2.query('SELECT x, ? FROM users WHERE email = ? AND status = ?', ['nickname', 'yhorm@giant.com', 1])); + const [ err, sql, duration, opts ] = result[0]; + assert.equal(sql, "SELECT x, 'nickname' FROM users WHERE email = 'yhorm@giant.com' AND status = 1"); + assert.ok(duration >= 0); + assert.ok(err); + assert(/no such column/.test(err.message)); + assert.ok(opts); + assert.equal(opts.query, 'SELECT x, ? FROM users WHERE email = ? AND status = ?'); + }); + + it('driver.querySchemaInfo()', async () => { + const schemaInfo = await driver.querySchemaInfo(null, 'articles'); + assert.ok(schemaInfo.articles); + const columns = schemaInfo.articles; + const props = [ + 'columnName', 'columnType', 'dataType', + 'defaultValue', + 'allowNull', + ]; + for (const column of columns) { + for (const prop of props) assert.ok(column.hasOwnProperty(prop)); + } + + const columnMap = columns.reduce((result, column) => { + result[column.columnName] = column; + return result; + }, {}); + assert.equal(columnMap.title.columnType, 'varchar(1000)'); + assert.equal(columnMap.is_private.columnType, 'boolean'); + + assert.equal(columnMap.gmt_create.datetimePrecision, 3); + assert.equal(columnMap.gmt_modified.datetimePrecision, 3); + }); + + it('driver.createTable(table, definitions)', async () => { + await driver.dropTable('notes'); + await driver.createTable('notes', { + id: { type: BIGINT, primaryKey: true, autoIncrement: true }, + public: { type: INTEGER }, + has_image: { type: BOOLEAN, defaultValue: false }, + }); + }); + + it('driver.truncateTable(table)', async () => { + await driver.dropTable('notes'); + await driver.createTable('notes', { + id: { type: BIGINT, primaryKey: true, autoIncrement: true }, + title: { type: STRING, allowNull: false }, + }); + await driver.query(`INSERT INTO notes (id, title) VALUES (42, 'Untitled')`); + assert.equal((await driver.query('SELECT count(*) AS count FROM notes')).rows[0].count, 1); + await driver.truncateTable('notes'); + assert.equal((await driver.query('SELECT count(*) AS count FROM notes')).rows[0].count, 0); + }); + + it('driver.alterTable(table, changes)', async function() { + await driver.dropTable('notes'); + await driver.createTable('notes', { + id: { type: BIGINT, primaryKey: true, autoIncrement: true }, + title: { type: STRING, allowNull: false }, + }); + await driver.alterTable('notes', { + params: { type: JSONB }, + }); + const result = await driver.describeTable('notes'); + assert.deepEqual(result.params, { + columnName: 'params', + columnType: 'json', + dataType: 'json', + allowNull: true, + defaultValue: null, + primaryKey: false, + datetimePrecision: null, + }); + }); + + it('driver.alterTable(table, changes) should not break table', async function() { + await driver.dropTable('notes'); + await driver.createTable('notes', { + id: { type: BIGINT, primaryKey: true, autoIncrement: true }, + title: { type: new STRING(255) }, + }); + await driver.query('INSERT INTO notes (title) VALUES (NULL)'); + await assert.rejects(async function() { + await driver.alterTable('notes', { + title: { type: new STRING(127), allowNull: false, modify: true }, + }); + }, /NOT NULL/); + // should rollback if failed to alter table + const tableInfo = await driver.describeTable('notes'); + assert.deepEqual(tableInfo.title, { + columnName: 'title', + columnType: 'varchar(255)', + dataType: 'varchar', + allowNull: true, + defaultValue: null, + primaryKey: false, + datetimePrecision: null, + }); + const result = await driver.query('SELECT * FROM notes'); + assert.equal(result.rows.length, 1); + }); +}); + +describe('=> SQLite driver.query()', () => { + beforeEach(async () => { + await driver.dropTable('notes'); + }); + + it('should handle timestamp correctly', async () => { + await driver.createTable('notes', { title: STRING, createdAt: DATE }); + const createdAt = new Date(); + await driver.query('INSERT INTO notes (title, created_at) VALUES (?, ?)', [ + 'Leah', createdAt, + ]); + const { + rows: [ + { created_at } + ] + } = await driver.query(heresql(` + SELECT datetime(created_at, 'localtime') AS created_at FROM notes + `)); + assert.equal(created_at, dayjs(createdAt).format('YYYY-MM-DD HH:mm:ss')); + }); + + it('should handle boolean correctly', async () => { + await driver.createTable('notes', { title: STRING, isPrivate: BOOLEAN }); + await driver.query('INSERT INTO notes (title, is_private) VALUES (?, ?)', [ + 'Leah', true, + ]); + const { + rows: [ + { is_private } + ] + } = await driver.query('SELECT is_private FROM notes'); + assert.equal(is_private, 1); + }); + + it('should support async stack trace', async function() { + await assert.rejects(async function() { + await driver.query('SELECT * FROM missing'); + }, function(err) { + assert(err instanceof Error); + assert(/no such table/i.test(err.message)); + return err.stack.includes(path.basename(__filename)); + }); + }); +}); diff --git a/tsconfig.browser.json b/tsconfig.browser.json new file mode 100644 index 00000000..dd87ad3d --- /dev/null +++ b/tsconfig.browser.json @@ -0,0 +1,13 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "target": "ES5", + "module": "ESNext", + "outDir": "dist", + "declaration": true, + "allowJs": true, + }, + "include": [ + "./src", + ] +} \ No newline at end of file diff --git a/tsconfig.json b/tsconfig.json index 808ffb85..c5e0b2ef 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,12 +1,21 @@ { "compilerOptions": { "target": "es2018", - "moduleResolution": "Node", "module": "CommonJS", + "moduleResolution": "Node", "experimentalDecorators": true, "emitDecoratorMetadata": true, "esModuleInterop": true, + "allowSyntheticDefaultImports": true, "sourceMap": true, - "strictNullChecks": true - } + "noUnusedLocals": true, + "strictNullChecks": true, + "declaration": false, + "downlevelIteration": true, + "importHelpers": true, + }, + "exclude": [ + "dist", + "Readme.md" + ] }