diff --git a/package-lock.json b/package-lock.json index e86c024..25b90f2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,15 +1,18 @@ { "name": "taibun", - "version": "0.0.1", + "version": "1.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "taibun", - "version": "0.0.1", + "version": "1.0.0", "license": "MIT", "devDependencies": { "jest": "^29.7.0" + }, + "engines": { + "node": ">=18" } }, "node_modules/@ampproject/remapping": { @@ -1694,20 +1697,6 @@ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", "dev": true }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, "node_modules/function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", diff --git a/package.json b/package.json index b2ab00a..b713197 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "taibun", - "version": "0.0.1", + "version": "1.0.0", "description": "Taiwanese Hokkien Transliterator and Tokeniser", "main": "taibun/index.js", "scripts": { @@ -10,8 +10,21 @@ "type": "git", "url": "git+https://github.com/andreihar/taibun.js.git" }, + "files": [ + "taibun/**/*", + "LICENSE", + "README.md" + ], "keywords": [ - "hokkien" + "taiwan", + "taiwanese", + "taigi", + "hokkien", + "romanization", + "transliteration", + "transliterator", + "tokenization", + "tokenizer" ], "author": "Andrei Harbachov", "license": "MIT", diff --git a/taibun/index.js b/taibun/index.js index 51d3026..469ab39 100644 --- a/taibun/index.js +++ b/taibun/index.js @@ -1,8 +1,17 @@ -const fs = require('fs'); -const path = require('path'); +let wordDict, tradDict; + +if (typeof window === 'undefined') { + // Node js + const fs = require('fs'); + const path = require('path'); + wordDict = JSON.parse(fs.readFileSync(path.join(__dirname, 'data/words.json'), 'utf8')); + tradDict = JSON.parse(fs.readFileSync(path.join(__dirname, 'data/simplified.json'), 'utf8')); +} else { + // Browser + wordDict = require('./data/words.json'); + tradDict = require('./data/simplified.json'); +} -const wordDict = JSON.parse(fs.readFileSync(path.join(__dirname, 'data/words.json'), 'utf8')); -const tradDict = JSON.parse(fs.readFileSync(path.join(__dirname, 'data/simplified.json'), 'utf8')); const simplifiedDict = Object.entries(tradDict).reduce((acc, [k, v]) => ({ ...acc, [v]: k }), { '臺': '台' }); // Helper to check if the character is a Chinese character @@ -66,6 +75,9 @@ class Converter { // Convert tokenised text into specified transliteration system get(input) { + if (!input.trim()) { + return ""; + } let converted = new (require('./index.js').Tokeniser)().tokenise(toTraditional(input)); converted = this.toneSandhiPosition(converted).map(i => this.convertTokenised(i).trim()).join(' ').trim(); if (this.punctuation === 'format') {