diff options
author | Joel Kronqvist <joel.h.kronqvist@gmail.com> | 2022-03-05 19:02:27 +0200 |
---|---|---|
committer | Joel Kronqvist <joel.h.kronqvist@gmail.com> | 2022-03-05 19:02:27 +0200 |
commit | 5d309ff52cd399a6b71968a6b9a70c8ac0b98981 (patch) | |
tree | 360f7eb50f956e2367ef38fa1fc6ac7ac5258042 /node_modules/mysql2/lib/parsers | |
parent | b500a50f1b97d93c98b36ed9a980f8188d648147 (diff) | |
download | LYLLRuoka-5d309ff52cd399a6b71968a6b9a70c8ac0b98981.tar.gz LYLLRuoka-5d309ff52cd399a6b71968a6b9a70c8ac0b98981.zip |
Added node_modules for the updating to work properly.
Diffstat (limited to 'node_modules/mysql2/lib/parsers')
-rw-r--r-- | node_modules/mysql2/lib/parsers/binary_parser.js | 186 | ||||
-rw-r--r-- | node_modules/mysql2/lib/parsers/parser_cache.js | 53 | ||||
-rw-r--r-- | node_modules/mysql2/lib/parsers/string.js | 29 | ||||
-rw-r--r-- | node_modules/mysql2/lib/parsers/text_parser.js | 202 |
4 files changed, 470 insertions, 0 deletions
diff --git a/node_modules/mysql2/lib/parsers/binary_parser.js b/node_modules/mysql2/lib/parsers/binary_parser.js new file mode 100644 index 0000000..bbd2959 --- /dev/null +++ b/node_modules/mysql2/lib/parsers/binary_parser.js @@ -0,0 +1,186 @@ +'use strict'; + +const FieldFlags = require('../constants/field_flags.js'); +const Charsets = require('../constants/charsets.js'); +const Types = require('../constants/types.js'); +const helpers = require('../helpers'); +const genFunc = require('generate-function'); +const parserCache = require('./parser_cache.js'); +const typeNames = []; +for (const t in Types) { + typeNames[Types[t]] = t; +} + +function readCodeFor(field, config, options, fieldNum) { + const supportBigNumbers = + options.supportBigNumbers || config.supportBigNumbers; + const bigNumberStrings = options.bigNumberStrings || config.bigNumberStrings; + const timezone = options.timezone || config.timezone; + const dateStrings = options.dateStrings || config.dateStrings; + const unsigned = field.flags & FieldFlags.UNSIGNED; + switch (field.columnType) { + case Types.TINY: + return unsigned ? 'packet.readInt8();' : 'packet.readSInt8();'; + case Types.SHORT: + return unsigned ? 'packet.readInt16();' : 'packet.readSInt16();'; + case Types.LONG: + case Types.INT24: // in binary protocol int24 is encoded in 4 bytes int32 + return unsigned ? 'packet.readInt32();' : 'packet.readSInt32();'; + case Types.YEAR: + return 'packet.readInt16()'; + case Types.FLOAT: + return 'packet.readFloat();'; + case Types.DOUBLE: + return 'packet.readDouble();'; + case Types.NULL: + return 'null;'; + case Types.DATE: + case Types.DATETIME: + case Types.TIMESTAMP: + case Types.NEWDATE: + if (helpers.typeMatch(field.columnType, dateStrings, Types)) { + return `packet.readDateTimeString(${field.decimals});`; + } + return `packet.readDateTime('${timezone}');`; + case Types.TIME: + return 'packet.readTimeString()'; + case Types.DECIMAL: + case Types.NEWDECIMAL: + if (config.decimalNumbers) { + return 'packet.parseLengthCodedFloat();'; + } + return 'packet.readLengthCodedString("ascii");'; + case Types.GEOMETRY: + return 'packet.parseGeometryValue();'; + case Types.JSON: + // Since for JSON columns mysql always returns charset 63 (BINARY), + // we have to handle it according to JSON specs and use "utf8", + // see https://github.com/sidorares/node-mysql2/issues/409 + return 'JSON.parse(packet.readLengthCodedString("utf8"));'; + case Types.LONGLONG: + if (!supportBigNumbers) { + return unsigned + ? 'packet.readInt64JSNumber();' + : 'packet.readSInt64JSNumber();'; + } + if (bigNumberStrings) { + return unsigned + ? 'packet.readInt64String();' + : 'packet.readSInt64String();'; + } + return unsigned ? 'packet.readInt64();' : 'packet.readSInt64();'; + + default: + if (field.characterSet === Charsets.BINARY) { + return 'packet.readLengthCodedBuffer();'; + } + return `packet.readLengthCodedString(fields[${fieldNum}].encoding)`; + } +} + +function compile(fields, options, config) { + const parserFn = genFunc(); + let i = 0; + const nullBitmapLength = Math.floor((fields.length + 7 + 2) / 8); + + /* eslint-disable no-trailing-spaces */ + /* eslint-disable no-spaced-func */ + /* eslint-disable no-unexpected-multiline */ + + parserFn('(function(){'); + parserFn('return class BinaryRow {'); + parserFn('constructor() {'); + parserFn('}'); + + parserFn('next(packet, fields, options) {'); + if (options.rowsAsArray) { + parserFn(`const result = new Array(${fields.length});`); + } else { + parserFn("const result = {};"); + } + + const resultTables = {}; + let resultTablesArray = []; + + if (options.nestTables === true) { + for (i = 0; i < fields.length; i++) { + resultTables[fields[i].table] = 1; + } + resultTablesArray = Object.keys(resultTables); + for (i = 0; i < resultTablesArray.length; i++) { + parserFn(`result[${helpers.srcEscape(resultTablesArray[i])}] = {};`); + } + } + + parserFn('packet.readInt8();'); // status byte + for (i = 0; i < nullBitmapLength; ++i) { + parserFn(`const nullBitmaskByte${i} = packet.readInt8();`); + } + + let lvalue = ''; + let currentFieldNullBit = 4; + let nullByteIndex = 0; + let fieldName = ''; + let tableName = ''; + + for (i = 0; i < fields.length; i++) { + fieldName = helpers.srcEscape(fields[i].name); + parserFn(`// ${fieldName}: ${typeNames[fields[i].columnType]}`); + + if (typeof options.nestTables === 'string') { + tableName = helpers.srcEscape(fields[i].table); + lvalue = `result[${helpers.srcEscape( + fields[i].table + options.nestTables + fields[i].name + )}]`; + } else if (options.nestTables === true) { + tableName = helpers.srcEscape(fields[i].table); + lvalue = `result[${tableName}][${fieldName}]`; + } else if (options.rowsAsArray) { + lvalue = `result[${i.toString(10)}]`; + } else { + lvalue = `result[${helpers.srcEscape(fields[i].name)}]`; + } + + // TODO: this used to be an optimisation ( if column marked as NOT_NULL don't include code to check null + // bitmap at all, but it seems that we can't rely on this flag, see #178 + // TODO: benchmark performance difference + // + // if (fields[i].flags & FieldFlags.NOT_NULL) { // don't need to check null bitmap if field can't be null. + // result.push(lvalue + ' = ' + readCodeFor(fields[i], config)); + // } else if (fields[i].columnType == Types.NULL) { + // result.push(lvalue + ' = null;'); + // } else { + parserFn(`if (nullBitmaskByte${nullByteIndex} & ${currentFieldNullBit})`); + parserFn(`${lvalue} = null;`); + parserFn('else'); + parserFn(`${lvalue} = ${readCodeFor(fields[i], config, options, i)}`); + // } + currentFieldNullBit *= 2; + if (currentFieldNullBit === 0x100) { + currentFieldNullBit = 1; + nullByteIndex++; + } + } + + parserFn('return result;'); + parserFn('}'); + parserFn('};')('})()'); + + /* eslint-enable no-trailing-spaces */ + /* eslint-enable no-spaced-func */ + /* eslint-enable no-unexpected-multiline */ + + if (config.debug) { + helpers.printDebugWithCode( + 'Compiled binary protocol row parser', + parserFn.toString() + ); + } + return parserFn.toFunction(); +} + +function getBinaryParser(fields, options, config) { + return parserCache.getParser('binary', fields, options, config, compile); +} + +module.exports = getBinaryParser; diff --git a/node_modules/mysql2/lib/parsers/parser_cache.js b/node_modules/mysql2/lib/parsers/parser_cache.js new file mode 100644 index 0000000..509b5c6 --- /dev/null +++ b/node_modules/mysql2/lib/parsers/parser_cache.js @@ -0,0 +1,53 @@ +'use strict'; + +const LRU = require('lru-cache'); + +const parserCache = new LRU({ + max: 15000 +}); + +function keyFromFields(type, fields, options, config) { + let res = + `${type}` + + `/${typeof options.nestTables}` + + `/${options.nestTables}` + + `/${options.rowsAsArray}` + + `/${options.supportBigNumbers || config.supportBigNumbers}` + + `/${options.bigNumberStrings || config.bigNumberStrings}` + + `/${typeof options.typeCast}` + + `/${options.timezone || config.timezone}` + + `/${options.decimalNumbers}` + + `/${options.dateStrings}`; + for (let i = 0; i < fields.length; ++i) { + const field = fields[i]; + res += `/${field.name}:${field.columnType}:${field.length}:${field.schema}:${field.table}:${field.flags}:${field.characterSet}`; + } + return res; +} + +function getParser(type, fields, options, config, compiler) { + const key = keyFromFields(type, fields, options, config); + let parser = parserCache.get(key); + + if (parser) { + return parser; + } + + parser = compiler(fields, options, config); + parserCache.set(key, parser); + return parser; +} + +function setMaxCache(max) { + parserCache.max = max; +} + +function clearCache() { + parserCache.reset(); +} + +module.exports = { + getParser: getParser, + setMaxCache: setMaxCache, + clearCache: clearCache +}; diff --git a/node_modules/mysql2/lib/parsers/string.js b/node_modules/mysql2/lib/parsers/string.js new file mode 100644 index 0000000..5523fb2 --- /dev/null +++ b/node_modules/mysql2/lib/parsers/string.js @@ -0,0 +1,29 @@ +'use strict'; + +const Iconv = require('iconv-lite'); + +exports.decode = function(buffer, encoding, start, end, options) { + if (Buffer.isEncoding(encoding)) { + return buffer.toString(encoding, start, end); + } + + const decoder = Iconv.getDecoder(encoding, options || {}); + + const res = decoder.write(buffer.slice(start, end)); + const trail = decoder.end(); + + return trail ? res + trail : res; +}; + +exports.encode = function(string, encoding, options) { + if (Buffer.isEncoding(encoding)) { + return Buffer.from(string, encoding); + } + + const encoder = Iconv.getEncoder(encoding, options || {}); + + const res = encoder.write(string); + const trail = encoder.end(); + + return trail && trail.length > 0 ? Buffer.concat([res, trail]) : res; +}; diff --git a/node_modules/mysql2/lib/parsers/text_parser.js b/node_modules/mysql2/lib/parsers/text_parser.js new file mode 100644 index 0000000..0af4a7a --- /dev/null +++ b/node_modules/mysql2/lib/parsers/text_parser.js @@ -0,0 +1,202 @@ +'use strict'; + +const Types = require('../constants/types.js'); +const Charsets = require('../constants/charsets.js'); +const helpers = require('../helpers'); +const genFunc = require('generate-function'); +const parserCache = require('./parser_cache.js'); + +const typeNames = []; +for (const t in Types) { + typeNames[Types[t]] = t; +} + +function readCodeFor(type, charset, encodingExpr, config, options) { + const supportBigNumbers = + options.supportBigNumbers || config.supportBigNumbers; + const bigNumberStrings = options.bigNumberStrings || config.bigNumberStrings; + const timezone = options.timezone || config.timezone; + const dateStrings = options.dateStrings || config.dateStrings; + + switch (type) { + case Types.TINY: + case Types.SHORT: + case Types.LONG: + case Types.INT24: + case Types.YEAR: + return 'packet.parseLengthCodedIntNoBigCheck()'; + case Types.LONGLONG: + if (supportBigNumbers && bigNumberStrings) { + return 'packet.parseLengthCodedIntString()'; + } + return `packet.parseLengthCodedInt(${supportBigNumbers})`; + case Types.FLOAT: + case Types.DOUBLE: + return 'packet.parseLengthCodedFloat()'; + case Types.NULL: + return 'packet.readLengthCodedNumber()'; + case Types.DECIMAL: + case Types.NEWDECIMAL: + if (config.decimalNumbers) { + return 'packet.parseLengthCodedFloat()'; + } + return 'packet.readLengthCodedString("ascii")'; + case Types.DATE: + if (helpers.typeMatch(type, dateStrings, Types)) { + return 'packet.readLengthCodedString("ascii")'; + } + return `packet.parseDate('${timezone}')`; + case Types.DATETIME: + case Types.TIMESTAMP: + if (helpers.typeMatch(type, dateStrings, Types)) { + return 'packet.readLengthCodedString("ascii")'; + } + return `packet.parseDateTime('${timezone}')`; + case Types.TIME: + return 'packet.readLengthCodedString("ascii")'; + case Types.GEOMETRY: + return 'packet.parseGeometryValue()'; + case Types.JSON: + // Since for JSON columns mysql always returns charset 63 (BINARY), + // we have to handle it according to JSON specs and use "utf8", + // see https://github.com/sidorares/node-mysql2/issues/409 + return 'JSON.parse(packet.readLengthCodedString("utf8"))'; + default: + if (charset === Charsets.BINARY) { + return 'packet.readLengthCodedBuffer()'; + } + return `packet.readLengthCodedString(${encodingExpr})`; + } +} + +function compile(fields, options, config) { + // use global typeCast if current query doesn't specify one + if ( + typeof config.typeCast === 'function' && + typeof options.typeCast !== 'function' + ) { + options.typeCast = config.typeCast; + } + + function wrap(field, _this) { + return { + type: typeNames[field.columnType], + length: field.columnLength, + db: field.schema, + table: field.table, + name: field.name, + string: function() { + return _this.packet.readLengthCodedString(field.encoding); + }, + buffer: function() { + return _this.packet.readLengthCodedBuffer(); + }, + geometry: function() { + return _this.packet.parseGeometryValue(); + } + }; + } + + const parserFn = genFunc(); + + /* eslint-disable no-trailing-spaces */ + /* eslint-disable no-spaced-func */ + /* eslint-disable no-unexpected-multiline */ + parserFn('(function () {')( + 'return class TextRow {' + ); + + // constructor method + parserFn('constructor(fields) {'); + // node-mysql typeCast compatibility wrapper + // see https://github.com/mysqljs/mysql/blob/96fdd0566b654436624e2375c7b6604b1f50f825/lib/protocol/packets/Field.js + if (typeof options.typeCast === 'function') { + parserFn('const _this = this;'); + parserFn('for(let i=0; i<fields.length; ++i) {'); + parserFn('this[`wrap${i}`] = wrap(fields[i], _this);'); + parserFn('}'); + } + parserFn('}'); + + // next method + parserFn('next(packet, fields, options) {'); + parserFn("this.packet = packet;"); + if (options.rowsAsArray) { + parserFn(`const result = new Array(${fields.length});`); + } else { + parserFn("const result = {};"); + } + + const resultTables = {}; + let resultTablesArray = []; + + if (options.nestTables === true) { + for (let i=0; i < fields.length; i++) { + resultTables[fields[i].table] = 1; + } + resultTablesArray = Object.keys(resultTables); + for (let i=0; i < resultTablesArray.length; i++) { + parserFn(`result[${helpers.srcEscape(resultTablesArray[i])}] = {};`); + } + } + + let lvalue = ''; + let fieldName = ''; + for (let i = 0; i < fields.length; i++) { + fieldName = helpers.srcEscape(fields[i].name); + parserFn(`// ${fieldName}: ${typeNames[fields[i].columnType]}`); + if (typeof options.nestTables === 'string') { + lvalue = `result[${helpers.srcEscape( + fields[i].table + options.nestTables + fields[i].name + )}]`; + } else if (options.nestTables === true) { + lvalue = `result[${helpers.srcEscape(fields[i].table)}][${fieldName}]`; + } else if (options.rowsAsArray) { + lvalue = `result[${i.toString(10)}]`; + } else { + lvalue = `result[${fieldName}]`; + } + if (options.typeCast === false) { + parserFn(`${lvalue} = packet.readLengthCodedBuffer();`); + } else { + const encodingExpr = `fields[${i}].encoding`; + const readCode = readCodeFor( + fields[i].columnType, + fields[i].characterSet, + encodingExpr, + config, + options + ); + if (typeof options.typeCast === 'function') { + parserFn(`${lvalue} = options.typeCast(this.wrap${i}, function() { return ${readCode} });`); + } else { + parserFn(`${lvalue} = ${readCode};`); + } + } + } + + parserFn('return result;'); + parserFn('}'); + parserFn('};')('})()'); + + /* eslint-enable no-trailing-spaces */ + /* eslint-enable no-spaced-func */ + /* eslint-enable no-unexpected-multiline */ + + if (config.debug) { + helpers.printDebugWithCode( + 'Compiled text protocol row parser', + parserFn.toString() + ); + } + if (typeof options.typeCast === 'function') { + return parserFn.toFunction({wrap}); + } + return parserFn.toFunction(); +} + +function getTextParser(fields, options, config) { + return parserCache.getParser('text', fields, options, config, compile); +} + +module.exports = getTextParser; |