diff --git a/consuming.html b/consuming.html new file mode 100644 index 0000000..f356f4e --- /dev/null +++ b/consuming.html @@ -0,0 +1,13 @@ + + +
L(A/m)&&T("overflow"),h*=m}var v=t.length+1;i=Q(s-u,v,0==u),L(s/v)>A-o&&T("overflow"),o+=L(s/v),s%=v,t.splice(s++,0,o)}return String.fromCodePoint.apply(String,t)},c=function(e){var r=[],t=(e=q(e)).length,a=128,s=0,o=72,i=!0,n=!1,l=void 0;try{for(var c,u=e[Symbol.iterator]();!(i=(c=u.next()).done);i=!0){var h=c.value;h<128&&r.push(z(h))}}catch(e){n=!0,l=e}finally{try{!i&&u.return&&u.return()}finally{if(n)throw l}}var d=r.length,f=d;for(d&&r.push("-");fL((A-s)/w)&&T("overflow"),s+=(p-a)*w,a=p;var b=!0,S=!1,_=void 0;try{for(var F,x=e[Symbol.iterator]();!(b=(F=x.next()).done);b=!0){var R=F.value;if(RA&&T("overflow"),R==a){for(var $=s,D=36;;D+=36){var j=D<=o?1:o+26<=D?26:D-o;if($>6|192).toString(16).toUpperCase()+"%"+(63&r|128).toString(16).toUpperCase():"%"+(r>>12|224).toString(16).toUpperCase()+"%"+(r>>6&63|128).toString(16).toUpperCase()+"%"+(63&r|128).toString(16).toUpperCase()}function f(e){for(var r="",t=0,a=e.length;tA-Z\\x5E-\\x7E]",'[\\"\\\\]'),Z=new RegExp(K,"g"),G=new RegExp(B,"g"),Y=new RegExp(C("[^]","[A-Za-z0-9\\!\\$\\%\\'\\*\\+\\-\\^\\_\\`\\{\\|\\}\\~]","[\\.]",'[\\"]',J),"g"),W=new RegExp(C("[^]",K,"[\\!\\$\\'\\(\\)\\*\\+\\,\\;\\:\\@]"),"g"),X=W;function ee(e){var r=f(e);return r.match(Z)?r:e}var re={scheme:"mailto",parse:function(e,r){var t=e,a=t.to=t.path?t.path.split(","):[];if(t.path=void 0,t.query){for(var s=!1,o={},i=t.query.split("&"),n=0,l=i.length;n); + + message: string; + errors: Array; + ajv: true; + validation: true; + } + + class MissingRefError extends Error { + constructor(baseId: string, ref: string, message?: string); + static message: (baseId: string, ref: string) => string; + + message: string; + missingRef: string; + missingSchema: string; + } +} + +declare namespace ajv { + type ValidationError = AjvErrors.ValidationError; + + type MissingRefError = AjvErrors.MissingRefError; + + interface Ajv { + /** + * Validate data using schema + * Schema will be compiled and cached (using serialized JSON as key, [fast-json-stable-stringify](https://github.com/epoberezkin/fast-json-stable-stringify) is used to serialize by default). + * @param {string|object|Boolean} schemaKeyRef key, ref or schema object + * @param {Any} data to be validated + * @return {Boolean} validation result. Errors from the last validation will be available in `ajv.errors` (and also in compiled schema: `schema.errors`). + */ + validate(schemaKeyRef: object | string | boolean, data: any): boolean | PromiseLike; + /** + * Create validating function for passed schema. + * @param {object|Boolean} schema schema object + * @return {Function} validating function + */ + compile(schema: object | boolean): ValidateFunction; + /** + * Creates validating function for passed schema with asynchronous loading of missing schemas. + * `loadSchema` option should be a function that accepts schema uri and node-style callback. + * @this Ajv + * @param {object|Boolean} schema schema object + * @param {Boolean} meta optional true to compile meta-schema; this parameter can be skipped + * @param {Function} callback optional node-style callback, it is always called with 2 parameters: error (or null) and validating function. + * @return {PromiseLike} validating function + */ + compileAsync(schema: object | boolean, meta?: Boolean, callback?: (err: Error, validate: ValidateFunction) => any): PromiseLike; + /** + * Adds schema to the instance. + * @param {object|Array} schema schema or array of schemas. If array is passed, `key` and other parameters will be ignored. + * @param {string} key Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`. + * @return {Ajv} this for method chaining + */ + addSchema(schema: Array | object, key?: string): Ajv; + /** + * Add schema that will be used to validate other schemas + * options in META_IGNORE_OPTIONS are alway set to false + * @param {object} schema schema object + * @param {string} key optional schema key + * @return {Ajv} this for method chaining + */ + addMetaSchema(schema: object, key?: string): Ajv; + /** + * Validate schema + * @param {object|Boolean} schema schema to validate + * @return {Boolean} true if schema is valid + */ + validateSchema(schema: object | boolean): boolean; + /** + * Get compiled schema from the instance by `key` or `ref`. + * @param {string} keyRef `key` that was passed to `addSchema` or full schema reference (`schema.id` or resolved id). + * @return {Function} schema validating function (with property `schema`). + */ + getSchema(keyRef: string): ValidateFunction; + /** + * Remove cached schema(s). + * If no parameter is passed all schemas but meta-schemas are removed. + * If RegExp is passed all schemas with key/id matching pattern but meta-schemas are removed. + * Even if schema is referenced by other schemas it still can be removed as other schemas have local references. + * @param {string|object|RegExp|Boolean} schemaKeyRef key, ref, pattern to match key/ref or schema object + * @return {Ajv} this for method chaining + */ + removeSchema(schemaKeyRef?: object | string | RegExp | boolean): Ajv; + /** + * Add custom format + * @param {string} name format name + * @param {string|RegExp|Function} format string is converted to RegExp; function should return boolean (true when valid) + * @return {Ajv} this for method chaining + */ + addFormat(name: string, format: FormatValidator | FormatDefinition): Ajv; + /** + * Define custom keyword + * @this Ajv + * @param {string} keyword custom keyword, should be a valid identifier, should be different from all standard, custom and macro keywords. + * @param {object} definition keyword definition object with properties `type` (type(s) which the keyword applies to), `validate` or `compile`. + * @return {Ajv} this for method chaining + */ + addKeyword(keyword: string, definition: KeywordDefinition): Ajv; + /** + * Get keyword definition + * @this Ajv + * @param {string} keyword pre-defined or custom keyword. + * @return {object|Boolean} custom keyword definition, `true` if it is a predefined keyword, `false` otherwise. + */ + getKeyword(keyword: string): object | boolean; + /** + * Remove keyword + * @this Ajv + * @param {string} keyword pre-defined or custom keyword. + * @return {Ajv} this for method chaining + */ + removeKeyword(keyword: string): Ajv; + /** + * Validate keyword + * @this Ajv + * @param {object} definition keyword definition object + * @param {boolean} throwError true to throw exception if definition is invalid + * @return {boolean} validation result + */ + validateKeyword(definition: KeywordDefinition, throwError: boolean): boolean; + /** + * Convert array of error message objects to string + * @param {Array} errors optional array of validation errors, if not passed errors from the instance are used. + * @param {object} options optional options with properties `separator` and `dataVar`. + * @return {string} human readable string with all errors descriptions + */ + errorsText(errors?: Array | null, options?: ErrorsTextOptions): string; + errors?: Array | null; + } + + interface CustomLogger { + log(...args: any[]): any; + warn(...args: any[]): any; + error(...args: any[]): any; + } + + interface ValidateFunction { + ( + data: any, + dataPath?: string, + parentData?: object | Array, + parentDataProperty?: string | number, + rootData?: object | Array + ): boolean | PromiseLike; + schema?: object | boolean; + errors?: null | Array; + refs?: object; + refVal?: Array; + root?: ValidateFunction | object; + $async?: true; + source?: object; + } + + interface Options { + $data?: boolean; + allErrors?: boolean; + verbose?: boolean; + jsonPointers?: boolean; + uniqueItems?: boolean; + unicode?: boolean; + format?: string; + formats?: object; + unknownFormats?: true | string[] | 'ignore'; + schemas?: Array | object; + schemaId?: '$id' | 'id' | 'auto'; + missingRefs?: true | 'ignore' | 'fail'; + extendRefs?: true | 'ignore' | 'fail'; + loadSchema?: (uri: string, cb?: (err: Error, schema: object) => void) => PromiseLike; + removeAdditional?: boolean | 'all' | 'failing'; + useDefaults?: boolean | 'empty' | 'shared'; + coerceTypes?: boolean | 'array'; + strictDefaults?: boolean | 'log'; + strictKeywords?: boolean | 'log'; + async?: boolean | string; + transpile?: string | ((code: string) => string); + meta?: boolean | object; + validateSchema?: boolean | 'log'; + addUsedSchema?: boolean; + inlineRefs?: boolean | number; + passContext?: boolean; + loopRequired?: number; + ownProperties?: boolean; + multipleOfPrecision?: boolean | number; + errorDataPath?: string, + messages?: boolean; + sourceCode?: boolean; + processCode?: (code: string) => string; + cache?: object; + logger?: CustomLogger | false; + nullable?: boolean; + serialize?: ((schema: object | boolean) => any) | false; + } + + type FormatValidator = string | RegExp | ((data: string) => boolean | PromiseLike); + type NumberFormatValidator = ((data: number) => boolean | PromiseLike); + + interface NumberFormatDefinition { + type: "number", + validate: NumberFormatValidator; + compare?: (data1: number, data2: number) => number; + async?: boolean; + } + + interface StringFormatDefinition { + type?: "string", + validate: FormatValidator; + compare?: (data1: string, data2: string) => number; + async?: boolean; + } + + type FormatDefinition = NumberFormatDefinition | StringFormatDefinition; + + interface KeywordDefinition { + type?: string | Array; + async?: boolean; + $data?: boolean; + errors?: boolean | string; + metaSchema?: object; + // schema: false makes validate not to expect schema (ValidateFunction) + schema?: boolean; + statements?: boolean; + dependencies?: Array; + modifying?: boolean; + valid?: boolean; + // one and only one of the following properties should be present + validate?: SchemaValidateFunction | ValidateFunction; + compile?: (schema: any, parentSchema: object, it: CompilationContext) => ValidateFunction; + macro?: (schema: any, parentSchema: object, it: CompilationContext) => object | boolean; + inline?: (it: CompilationContext, keyword: string, schema: any, parentSchema: object) => string; + } + + interface CompilationContext { + level: number; + dataLevel: number; + schema: any; + schemaPath: string; + baseId: string; + async: boolean; + opts: Options; + formats: { + [index: string]: FormatDefinition | undefined; + }; + compositeRule: boolean; + validate: (schema: object) => boolean; + util: { + copy(obj: any, target?: any): any; + toHash(source: string[]): { [index: string]: true | undefined }; + equal(obj: any, target: any): boolean; + getProperty(str: string): string; + schemaHasRules(schema: object, rules: any): string; + escapeQuotes(str: string): string; + toQuotedString(str: string): string; + getData(jsonPointer: string, dataLevel: number, paths: string[]): string; + escapeJsonPointer(str: string): string; + unescapeJsonPointer(str: string): string; + escapeFragment(str: string): string; + unescapeFragment(str: string): string; + }; + self: Ajv; + } + + interface SchemaValidateFunction { + ( + schema: any, + data: any, + parentSchema?: object, + dataPath?: string, + parentData?: object | Array, + parentDataProperty?: string | number, + rootData?: object | Array + ): boolean | PromiseLike; + errors?: Array; + } + + interface ErrorsTextOptions { + separator?: string; + dataVar?: string; + } + + interface ErrorObject { + keyword: string; + dataPath: string; + schemaPath: string; + params: ErrorParameters; + // Added to validation errors of propertyNames keyword schema + propertyName?: string; + // Excluded if messages set to false. + message?: string; + // These are added with the `verbose` option. + schema?: any; + parentSchema?: object; + data?: any; + } + + type ErrorParameters = RefParams | LimitParams | AdditionalPropertiesParams | + DependenciesParams | FormatParams | ComparisonParams | + MultipleOfParams | PatternParams | RequiredParams | + TypeParams | UniqueItemsParams | CustomParams | + PatternRequiredParams | PropertyNamesParams | + IfParams | SwitchParams | NoParams | EnumParams; + + interface RefParams { + ref: string; + } + + interface LimitParams { + limit: number; + } + + interface AdditionalPropertiesParams { + additionalProperty: string; + } + + interface DependenciesParams { + property: string; + missingProperty: string; + depsCount: number; + deps: string; + } + + interface FormatParams { + format: string + } + + interface ComparisonParams { + comparison: string; + limit: number | string; + exclusive: boolean; + } + + interface MultipleOfParams { + multipleOf: number; + } + + interface PatternParams { + pattern: string; + } + + interface RequiredParams { + missingProperty: string; + } + + interface TypeParams { + type: string; + } + + interface UniqueItemsParams { + i: number; + j: number; + } + + interface CustomParams { + keyword: string; + } + + interface PatternRequiredParams { + missingPattern: string; + } + + interface PropertyNamesParams { + propertyName: string; + } + + interface IfParams { + failingKeyword: string; + } + + interface SwitchParams { + caseIndex: number; + } + + interface NoParams { } + + interface EnumParams { + allowedValues: Array; + } +} + +export = ajv; diff --git a/node_modules/ajv/lib/ajv.js b/node_modules/ajv/lib/ajv.js new file mode 100644 index 0000000..611b938 --- /dev/null +++ b/node_modules/ajv/lib/ajv.js @@ -0,0 +1,497 @@ +'use strict'; + +var compileSchema = require('./compile') + , resolve = require('./compile/resolve') + , Cache = require('./cache') + , SchemaObject = require('./compile/schema_obj') + , stableStringify = require('fast-json-stable-stringify') + , formats = require('./compile/formats') + , rules = require('./compile/rules') + , $dataMetaSchema = require('./data') + , util = require('./compile/util'); + +module.exports = Ajv; + +Ajv.prototype.validate = validate; +Ajv.prototype.compile = compile; +Ajv.prototype.addSchema = addSchema; +Ajv.prototype.addMetaSchema = addMetaSchema; +Ajv.prototype.validateSchema = validateSchema; +Ajv.prototype.getSchema = getSchema; +Ajv.prototype.removeSchema = removeSchema; +Ajv.prototype.addFormat = addFormat; +Ajv.prototype.errorsText = errorsText; + +Ajv.prototype._addSchema = _addSchema; +Ajv.prototype._compile = _compile; + +Ajv.prototype.compileAsync = require('./compile/async'); +var customKeyword = require('./keyword'); +Ajv.prototype.addKeyword = customKeyword.add; +Ajv.prototype.getKeyword = customKeyword.get; +Ajv.prototype.removeKeyword = customKeyword.remove; +Ajv.prototype.validateKeyword = customKeyword.validate; + +var errorClasses = require('./compile/error_classes'); +Ajv.ValidationError = errorClasses.Validation; +Ajv.MissingRefError = errorClasses.MissingRef; +Ajv.$dataMetaSchema = $dataMetaSchema; + +var META_SCHEMA_ID = 'http://json-schema.org/draft-07/schema'; + +var META_IGNORE_OPTIONS = [ 'removeAdditional', 'useDefaults', 'coerceTypes', 'strictDefaults' ]; +var META_SUPPORT_DATA = ['/properties']; + +/** + * Creates validator instance. + * Usage: `Ajv(opts)` + * @param {Object} opts optional options + * @return {Object} ajv instance + */ +function Ajv(opts) { + if (!(this instanceof Ajv)) return new Ajv(opts); + opts = this._opts = util.copy(opts) || {}; + setLogger(this); + this._schemas = {}; + this._refs = {}; + this._fragments = {}; + this._formats = formats(opts.format); + + this._cache = opts.cache || new Cache; + this._loadingSchemas = {}; + this._compilations = []; + this.RULES = rules(); + this._getId = chooseGetId(opts); + + opts.loopRequired = opts.loopRequired || Infinity; + if (opts.errorDataPath == 'property') opts._errorDataPathProperty = true; + if (opts.serialize === undefined) opts.serialize = stableStringify; + this._metaOpts = getMetaSchemaOptions(this); + + if (opts.formats) addInitialFormats(this); + addDefaultMetaSchema(this); + if (typeof opts.meta == 'object') this.addMetaSchema(opts.meta); + if (opts.nullable) this.addKeyword('nullable', {metaSchema: {type: 'boolean'}}); + addInitialSchemas(this); +} + + + +/** + * Validate data using schema + * Schema will be compiled and cached (using serialized JSON as key. [fast-json-stable-stringify](https://github.com/epoberezkin/fast-json-stable-stringify) is used to serialize. + * @this Ajv + * @param {String|Object} schemaKeyRef key, ref or schema object + * @param {Any} data to be validated + * @return {Boolean} validation result. Errors from the last validation will be available in `ajv.errors` (and also in compiled schema: `schema.errors`). + */ +function validate(schemaKeyRef, data) { + var v; + if (typeof schemaKeyRef == 'string') { + v = this.getSchema(schemaKeyRef); + if (!v) throw new Error('no schema with key or ref "' + schemaKeyRef + '"'); + } else { + var schemaObj = this._addSchema(schemaKeyRef); + v = schemaObj.validate || this._compile(schemaObj); + } + + var valid = v(data); + if (v.$async !== true) this.errors = v.errors; + return valid; +} + + +/** + * Create validating function for passed schema. + * @this Ajv + * @param {Object} schema schema object + * @param {Boolean} _meta true if schema is a meta-schema. Used internally to compile meta schemas of custom keywords. + * @return {Function} validating function + */ +function compile(schema, _meta) { + var schemaObj = this._addSchema(schema, undefined, _meta); + return schemaObj.validate || this._compile(schemaObj); +} + + +/** + * Adds schema to the instance. + * @this Ajv + * @param {Object|Array} schema schema or array of schemas. If array is passed, `key` and other parameters will be ignored. + * @param {String} key Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`. + * @param {Boolean} _skipValidation true to skip schema validation. Used internally, option validateSchema should be used instead. + * @param {Boolean} _meta true if schema is a meta-schema. Used internally, addMetaSchema should be used instead. + * @return {Ajv} this for method chaining + */ +function addSchema(schema, key, _skipValidation, _meta) { + if (Array.isArray(schema)){ + for (var i=0; i} errors optional array of validation errors, if not passed errors from the instance are used. + * @param {Object} options optional options with properties `separator` and `dataVar`. + * @return {String} human readable string with all errors descriptions + */ +function errorsText(errors, options) { + errors = errors || this.errors; + if (!errors) return 'No errors'; + options = options || {}; + var separator = options.separator === undefined ? ', ' : options.separator; + var dataVar = options.dataVar === undefined ? 'data' : options.dataVar; + + var text = ''; + for (var i=0; i%\\^`{|}]|%[0-9a-f]{2})|\{[+#./;?&=,!@|]?(?:[a-z0-9_]|%[0-9a-f]{2})+(?::[1-9][0-9]{0,3}|\*)?(?:,(?:[a-z0-9_]|%[0-9a-f]{2})+(?::[1-9][0-9]{0,3}|\*)?)*\})*$/i; +// For the source: https://gist.github.com/dperini/729294 +// For test cases: https://mathiasbynens.be/demo/url-regex +// @todo Delete current URL in favour of the commented out URL rule when this issue is fixed https://github.com/eslint/eslint/issues/7983. +// var URL = /^(?:(?:https?|ftp):\/\/)(?:\S+(?::\S*)?@)?(?:(?!10(?:\.\d{1,3}){3})(?!127(?:\.\d{1,3}){3})(?!169\.254(?:\.\d{1,3}){2})(?!192\.168(?:\.\d{1,3}){2})(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z\u{00a1}-\u{ffff}0-9]+-?)*[a-z\u{00a1}-\u{ffff}0-9]+)(?:\.(?:[a-z\u{00a1}-\u{ffff}0-9]+-?)*[a-z\u{00a1}-\u{ffff}0-9]+)*(?:\.(?:[a-z\u{00a1}-\u{ffff}]{2,})))(?::\d{2,5})?(?:\/[^\s]*)?$/iu; +var URL = /^(?:(?:http[s\u017F]?|ftp):\/\/)(?:(?:[\0-\x08\x0E-\x1F!-\x9F\xA1-\u167F\u1681-\u1FFF\u200B-\u2027\u202A-\u202E\u2030-\u205E\u2060-\u2FFF\u3001-\uD7FF\uE000-\uFEFE\uFF00-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])+(?::(?:[\0-\x08\x0E-\x1F!-\x9F\xA1-\u167F\u1681-\u1FFF\u200B-\u2027\u202A-\u202E\u2030-\u205E\u2060-\u2FFF\u3001-\uD7FF\uE000-\uFEFE\uFF00-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])*)?@)?(?:(?!10(?:\.[0-9]{1,3}){3})(?!127(?:\.[0-9]{1,3}){3})(?!169\.254(?:\.[0-9]{1,3}){2})(?!192\.168(?:\.[0-9]{1,3}){2})(?!172\.(?:1[6-9]|2[0-9]|3[01])(?:\.[0-9]{1,3}){2})(?:[1-9][0-9]?|1[0-9][0-9]|2[01][0-9]|22[0-3])(?:\.(?:1?[0-9]{1,2}|2[0-4][0-9]|25[0-5])){2}(?:\.(?:[1-9][0-9]?|1[0-9][0-9]|2[0-4][0-9]|25[0-4]))|(?:(?:(?:[0-9KSa-z\xA1-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])+-?)*(?:[0-9KSa-z\xA1-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])+)(?:\.(?:(?:[0-9KSa-z\xA1-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])+-?)*(?:[0-9KSa-z\xA1-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])+)*(?:\.(?:(?:[KSa-z\xA1-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]){2,})))(?::[0-9]{2,5})?(?:\/(?:[\0-\x08\x0E-\x1F!-\x9F\xA1-\u167F\u1681-\u1FFF\u200B-\u2027\u202A-\u202E\u2030-\u205E\u2060-\u2FFF\u3001-\uD7FF\uE000-\uFEFE\uFF00-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])*)?$/i; +var UUID = /^(?:urn:uuid:)?[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}$/i; +var JSON_POINTER = /^(?:\/(?:[^~/]|~0|~1)*)*$/; +var JSON_POINTER_URI_FRAGMENT = /^#(?:\/(?:[a-z0-9_\-.!$&'()*+,;:=@]|%[0-9a-f]{2}|~0|~1)*)*$/i; +var RELATIVE_JSON_POINTER = /^(?:0|[1-9][0-9]*)(?:#|(?:\/(?:[^~/]|~0|~1)*)*)$/; + + +module.exports = formats; + +function formats(mode) { + mode = mode == 'full' ? 'full' : 'fast'; + return util.copy(formats[mode]); +} + + +formats.fast = { + // date: http://tools.ietf.org/html/rfc3339#section-5.6 + date: /^\d\d\d\d-[0-1]\d-[0-3]\d$/, + // date-time: http://tools.ietf.org/html/rfc3339#section-5.6 + time: /^(?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d:\d\d)?$/i, + 'date-time': /^\d\d\d\d-[0-1]\d-[0-3]\d[t\s](?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d:\d\d)$/i, + // uri: https://github.com/mafintosh/is-my-json-valid/blob/master/formats.js + uri: /^(?:[a-z][a-z0-9+-.]*:)(?:\/?\/)?[^\s]*$/i, + 'uri-reference': /^(?:(?:[a-z][a-z0-9+-.]*:)?\/?\/)?(?:[^\\\s#][^\s#]*)?(?:#[^\\\s]*)?$/i, + 'uri-template': URITEMPLATE, + url: URL, + // email (sources from jsen validator): + // http://stackoverflow.com/questions/201323/using-a-regular-expression-to-validate-an-email-address#answer-8829363 + // http://www.w3.org/TR/html5/forms.html#valid-e-mail-address (search for 'willful violation') + email: /^[a-z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?(?:\.[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?)*$/i, + hostname: HOSTNAME, + // optimized https://www.safaribooksonline.com/library/view/regular-expressions-cookbook/9780596802837/ch07s16.html + ipv4: /^(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)$/, + // optimized http://stackoverflow.com/questions/53497/regular-expression-that-matches-valid-ipv6-addresses + ipv6: /^\s*(?:(?:(?:[0-9a-f]{1,4}:){7}(?:[0-9a-f]{1,4}|:))|(?:(?:[0-9a-f]{1,4}:){6}(?::[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){5}(?:(?:(?::[0-9a-f]{1,4}){1,2})|:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){4}(?:(?:(?::[0-9a-f]{1,4}){1,3})|(?:(?::[0-9a-f]{1,4})?:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){3}(?:(?:(?::[0-9a-f]{1,4}){1,4})|(?:(?::[0-9a-f]{1,4}){0,2}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){2}(?:(?:(?::[0-9a-f]{1,4}){1,5})|(?:(?::[0-9a-f]{1,4}){0,3}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){1}(?:(?:(?::[0-9a-f]{1,4}){1,6})|(?:(?::[0-9a-f]{1,4}){0,4}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?::(?:(?:(?::[0-9a-f]{1,4}){1,7})|(?:(?::[0-9a-f]{1,4}){0,5}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(?:%.+)?\s*$/i, + regex: regex, + // uuid: http://tools.ietf.org/html/rfc4122 + uuid: UUID, + // JSON-pointer: https://tools.ietf.org/html/rfc6901 + // uri fragment: https://tools.ietf.org/html/rfc3986#appendix-A + 'json-pointer': JSON_POINTER, + 'json-pointer-uri-fragment': JSON_POINTER_URI_FRAGMENT, + // relative JSON-pointer: http://tools.ietf.org/html/draft-luff-relative-json-pointer-00 + 'relative-json-pointer': RELATIVE_JSON_POINTER +}; + + +formats.full = { + date: date, + time: time, + 'date-time': date_time, + uri: uri, + 'uri-reference': URIREF, + 'uri-template': URITEMPLATE, + url: URL, + email: /^[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?$/i, + hostname: hostname, + ipv4: /^(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)$/, + ipv6: /^\s*(?:(?:(?:[0-9a-f]{1,4}:){7}(?:[0-9a-f]{1,4}|:))|(?:(?:[0-9a-f]{1,4}:){6}(?::[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){5}(?:(?:(?::[0-9a-f]{1,4}){1,2})|:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){4}(?:(?:(?::[0-9a-f]{1,4}){1,3})|(?:(?::[0-9a-f]{1,4})?:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){3}(?:(?:(?::[0-9a-f]{1,4}){1,4})|(?:(?::[0-9a-f]{1,4}){0,2}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){2}(?:(?:(?::[0-9a-f]{1,4}){1,5})|(?:(?::[0-9a-f]{1,4}){0,3}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){1}(?:(?:(?::[0-9a-f]{1,4}){1,6})|(?:(?::[0-9a-f]{1,4}){0,4}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?::(?:(?:(?::[0-9a-f]{1,4}){1,7})|(?:(?::[0-9a-f]{1,4}){0,5}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(?:%.+)?\s*$/i, + regex: regex, + uuid: UUID, + 'json-pointer': JSON_POINTER, + 'json-pointer-uri-fragment': JSON_POINTER_URI_FRAGMENT, + 'relative-json-pointer': RELATIVE_JSON_POINTER +}; + + +function isLeapYear(year) { + // https://tools.ietf.org/html/rfc3339#appendix-C + return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); +} + + +function date(str) { + // full-date from http://tools.ietf.org/html/rfc3339#section-5.6 + var matches = str.match(DATE); + if (!matches) return false; + + var year = +matches[1]; + var month = +matches[2]; + var day = +matches[3]; + + return month >= 1 && month <= 12 && day >= 1 && + day <= (month == 2 && isLeapYear(year) ? 29 : DAYS[month]); +} + + +function time(str, full) { + var matches = str.match(TIME); + if (!matches) return false; + + var hour = matches[1]; + var minute = matches[2]; + var second = matches[3]; + var timeZone = matches[5]; + return ((hour <= 23 && minute <= 59 && second <= 59) || + (hour == 23 && minute == 59 && second == 60)) && + (!full || timeZone); +} + + +var DATE_TIME_SEPARATOR = /t|\s/i; +function date_time(str) { + // http://tools.ietf.org/html/rfc3339#section-5.6 + var dateTime = str.split(DATE_TIME_SEPARATOR); + return dateTime.length == 2 && date(dateTime[0]) && time(dateTime[1], true); +} + + +function hostname(str) { + // https://tools.ietf.org/html/rfc1034#section-3.5 + // https://tools.ietf.org/html/rfc1123#section-2 + return str.length <= 255 && HOSTNAME.test(str); +} + + +var NOT_URI_FRAGMENT = /\/|:/; +function uri(str) { + // http://jmrware.com/articles/2009/uri_regexp/URI_regex.html + optional protocol + required "." + return NOT_URI_FRAGMENT.test(str) && URI.test(str); +} + + +var Z_ANCHOR = /[^\\]\\Z/; +function regex(str) { + if (Z_ANCHOR.test(str)) return false; + try { + new RegExp(str); + return true; + } catch(e) { + return false; + } +} diff --git a/node_modules/ajv/lib/compile/index.js b/node_modules/ajv/lib/compile/index.js new file mode 100644 index 0000000..f4d3f0d --- /dev/null +++ b/node_modules/ajv/lib/compile/index.js @@ -0,0 +1,387 @@ +'use strict'; + +var resolve = require('./resolve') + , util = require('./util') + , errorClasses = require('./error_classes') + , stableStringify = require('fast-json-stable-stringify'); + +var validateGenerator = require('../dotjs/validate'); + +/** + * Functions below are used inside compiled validations function + */ + +var ucs2length = util.ucs2length; +var equal = require('fast-deep-equal'); + +// this error is thrown by async schemas to return validation errors via exception +var ValidationError = errorClasses.Validation; + +module.exports = compile; + + +/** + * Compiles schema to validation function + * @this Ajv + * @param {Object} schema schema object + * @param {Object} root object with information about the root schema for this schema + * @param {Object} localRefs the hash of local references inside the schema (created by resolve.id), used for inline resolution + * @param {String} baseId base ID for IDs in the schema + * @return {Function} validation function + */ +function compile(schema, root, localRefs, baseId) { + /* jshint validthis: true, evil: true */ + /* eslint no-shadow: 0 */ + var self = this + , opts = this._opts + , refVal = [ undefined ] + , refs = {} + , patterns = [] + , patternsHash = {} + , defaults = [] + , defaultsHash = {} + , customRules = []; + + root = root || { schema: schema, refVal: refVal, refs: refs }; + + var c = checkCompiling.call(this, schema, root, baseId); + var compilation = this._compilations[c.index]; + if (c.compiling) return (compilation.callValidate = callValidate); + + var formats = this._formats; + var RULES = this.RULES; + + try { + var v = localCompile(schema, root, localRefs, baseId); + compilation.validate = v; + var cv = compilation.callValidate; + if (cv) { + cv.schema = v.schema; + cv.errors = null; + cv.refs = v.refs; + cv.refVal = v.refVal; + cv.root = v.root; + cv.$async = v.$async; + if (opts.sourceCode) cv.source = v.source; + } + return v; + } finally { + endCompiling.call(this, schema, root, baseId); + } + + /* @this {*} - custom context, see passContext option */ + function callValidate() { + /* jshint validthis: true */ + var validate = compilation.validate; + var result = validate.apply(this, arguments); + callValidate.errors = validate.errors; + return result; + } + + function localCompile(_schema, _root, localRefs, baseId) { + var isRoot = !_root || (_root && _root.schema == _schema); + if (_root.schema != root.schema) + return compile.call(self, _schema, _root, localRefs, baseId); + + var $async = _schema.$async === true; + + var sourceCode = validateGenerator({ + isTop: true, + schema: _schema, + isRoot: isRoot, + baseId: baseId, + root: _root, + schemaPath: '', + errSchemaPath: '#', + errorPath: '""', + MissingRefError: errorClasses.MissingRef, + RULES: RULES, + validate: validateGenerator, + util: util, + resolve: resolve, + resolveRef: resolveRef, + usePattern: usePattern, + useDefault: useDefault, + useCustomRule: useCustomRule, + opts: opts, + formats: formats, + logger: self.logger, + self: self + }); + + sourceCode = vars(refVal, refValCode) + vars(patterns, patternCode) + + vars(defaults, defaultCode) + vars(customRules, customRuleCode) + + sourceCode; + + if (opts.processCode) sourceCode = opts.processCode(sourceCode); + // console.log('\n\n\n *** \n', JSON.stringify(sourceCode)); + var validate; + try { + var makeValidate = new Function( + 'self', + 'RULES', + 'formats', + 'root', + 'refVal', + 'defaults', + 'customRules', + 'equal', + 'ucs2length', + 'ValidationError', + sourceCode + ); + + validate = makeValidate( + self, + RULES, + formats, + root, + refVal, + defaults, + customRules, + equal, + ucs2length, + ValidationError + ); + + refVal[0] = validate; + } catch(e) { + self.logger.error('Error compiling schema, function code:', sourceCode); + throw e; + } + + validate.schema = _schema; + validate.errors = null; + validate.refs = refs; + validate.refVal = refVal; + validate.root = isRoot ? validate : _root; + if ($async) validate.$async = true; + if (opts.sourceCode === true) { + validate.source = { + code: sourceCode, + patterns: patterns, + defaults: defaults + }; + } + + return validate; + } + + function resolveRef(baseId, ref, isRoot) { + ref = resolve.url(baseId, ref); + var refIndex = refs[ref]; + var _refVal, refCode; + if (refIndex !== undefined) { + _refVal = refVal[refIndex]; + refCode = 'refVal[' + refIndex + ']'; + return resolvedRef(_refVal, refCode); + } + if (!isRoot && root.refs) { + var rootRefId = root.refs[ref]; + if (rootRefId !== undefined) { + _refVal = root.refVal[rootRefId]; + refCode = addLocalRef(ref, _refVal); + return resolvedRef(_refVal, refCode); + } + } + + refCode = addLocalRef(ref); + var v = resolve.call(self, localCompile, root, ref); + if (v === undefined) { + var localSchema = localRefs && localRefs[ref]; + if (localSchema) { + v = resolve.inlineRef(localSchema, opts.inlineRefs) + ? localSchema + : compile.call(self, localSchema, root, localRefs, baseId); + } + } + + if (v === undefined) { + removeLocalRef(ref); + } else { + replaceLocalRef(ref, v); + return resolvedRef(v, refCode); + } + } + + function addLocalRef(ref, v) { + var refId = refVal.length; + refVal[refId] = v; + refs[ref] = refId; + return 'refVal' + refId; + } + + function removeLocalRef(ref) { + delete refs[ref]; + } + + function replaceLocalRef(ref, v) { + var refId = refs[ref]; + refVal[refId] = v; + } + + function resolvedRef(refVal, code) { + return typeof refVal == 'object' || typeof refVal == 'boolean' + ? { code: code, schema: refVal, inline: true } + : { code: code, $async: refVal && !!refVal.$async }; + } + + function usePattern(regexStr) { + var index = patternsHash[regexStr]; + if (index === undefined) { + index = patternsHash[regexStr] = patterns.length; + patterns[index] = regexStr; + } + return 'pattern' + index; + } + + function useDefault(value) { + switch (typeof value) { + case 'boolean': + case 'number': + return '' + value; + case 'string': + return util.toQuotedString(value); + case 'object': + if (value === null) return 'null'; + var valueStr = stableStringify(value); + var index = defaultsHash[valueStr]; + if (index === undefined) { + index = defaultsHash[valueStr] = defaults.length; + defaults[index] = value; + } + return 'default' + index; + } + } + + function useCustomRule(rule, schema, parentSchema, it) { + if (self._opts.validateSchema !== false) { + var deps = rule.definition.dependencies; + if (deps && !deps.every(function(keyword) { + return Object.prototype.hasOwnProperty.call(parentSchema, keyword); + })) + throw new Error('parent schema must have all required keywords: ' + deps.join(',')); + + var validateSchema = rule.definition.validateSchema; + if (validateSchema) { + var valid = validateSchema(schema); + if (!valid) { + var message = 'keyword schema is invalid: ' + self.errorsText(validateSchema.errors); + if (self._opts.validateSchema == 'log') self.logger.error(message); + else throw new Error(message); + } + } + } + + var compile = rule.definition.compile + , inline = rule.definition.inline + , macro = rule.definition.macro; + + var validate; + if (compile) { + validate = compile.call(self, schema, parentSchema, it); + } else if (macro) { + validate = macro.call(self, schema, parentSchema, it); + if (opts.validateSchema !== false) self.validateSchema(validate, true); + } else if (inline) { + validate = inline.call(self, it, rule.keyword, schema, parentSchema); + } else { + validate = rule.definition.validate; + if (!validate) return; + } + + if (validate === undefined) + throw new Error('custom keyword "' + rule.keyword + '"failed to compile'); + + var index = customRules.length; + customRules[index] = validate; + + return { + code: 'customRule' + index, + validate: validate + }; + } +} + + +/** + * Checks if the schema is currently compiled + * @this Ajv + * @param {Object} schema schema to compile + * @param {Object} root root object + * @param {String} baseId base schema ID + * @return {Object} object with properties "index" (compilation index) and "compiling" (boolean) + */ +function checkCompiling(schema, root, baseId) { + /* jshint validthis: true */ + var index = compIndex.call(this, schema, root, baseId); + if (index >= 0) return { index: index, compiling: true }; + index = this._compilations.length; + this._compilations[index] = { + schema: schema, + root: root, + baseId: baseId + }; + return { index: index, compiling: false }; +} + + +/** + * Removes the schema from the currently compiled list + * @this Ajv + * @param {Object} schema schema to compile + * @param {Object} root root object + * @param {String} baseId base schema ID + */ +function endCompiling(schema, root, baseId) { + /* jshint validthis: true */ + var i = compIndex.call(this, schema, root, baseId); + if (i >= 0) this._compilations.splice(i, 1); +} + + +/** + * Index of schema compilation in the currently compiled list + * @this Ajv + * @param {Object} schema schema to compile + * @param {Object} root root object + * @param {String} baseId base schema ID + * @return {Integer} compilation index + */ +function compIndex(schema, root, baseId) { + /* jshint validthis: true */ + for (var i=0; i= 0xD800 && value <= 0xDBFF && pos < len) { + // high surrogate, and there is a next character + value = str.charCodeAt(pos); + if ((value & 0xFC00) == 0xDC00) pos++; // low surrogate + } + } + return length; +}; diff --git a/node_modules/ajv/lib/compile/util.js b/node_modules/ajv/lib/compile/util.js new file mode 100644 index 0000000..0efa001 --- /dev/null +++ b/node_modules/ajv/lib/compile/util.js @@ -0,0 +1,274 @@ +'use strict'; + + +module.exports = { + copy: copy, + checkDataType: checkDataType, + checkDataTypes: checkDataTypes, + coerceToTypes: coerceToTypes, + toHash: toHash, + getProperty: getProperty, + escapeQuotes: escapeQuotes, + equal: require('fast-deep-equal'), + ucs2length: require('./ucs2length'), + varOccurences: varOccurences, + varReplace: varReplace, + cleanUpCode: cleanUpCode, + finalCleanUpCode: finalCleanUpCode, + schemaHasRules: schemaHasRules, + schemaHasRulesExcept: schemaHasRulesExcept, + schemaUnknownRules: schemaUnknownRules, + toQuotedString: toQuotedString, + getPathExpr: getPathExpr, + getPath: getPath, + getData: getData, + unescapeFragment: unescapeFragment, + unescapeJsonPointer: unescapeJsonPointer, + escapeFragment: escapeFragment, + escapeJsonPointer: escapeJsonPointer +}; + + +function copy(o, to) { + to = to || {}; + for (var key in o) to[key] = o[key]; + return to; +} + + +function checkDataType(dataType, data, negate) { + var EQUAL = negate ? ' !== ' : ' === ' + , AND = negate ? ' || ' : ' && ' + , OK = negate ? '!' : '' + , NOT = negate ? '' : '!'; + switch (dataType) { + case 'null': return data + EQUAL + 'null'; + case 'array': return OK + 'Array.isArray(' + data + ')'; + case 'object': return '(' + OK + data + AND + + 'typeof ' + data + EQUAL + '"object"' + AND + + NOT + 'Array.isArray(' + data + '))'; + case 'integer': return '(typeof ' + data + EQUAL + '"number"' + AND + + NOT + '(' + data + ' % 1)' + + AND + data + EQUAL + data + ')'; + default: return 'typeof ' + data + EQUAL + '"' + dataType + '"'; + } +} + + +function checkDataTypes(dataTypes, data) { + switch (dataTypes.length) { + case 1: return checkDataType(dataTypes[0], data, true); + default: + var code = ''; + var types = toHash(dataTypes); + if (types.array && types.object) { + code = types.null ? '(': '(!' + data + ' || '; + code += 'typeof ' + data + ' !== "object")'; + delete types.null; + delete types.array; + delete types.object; + } + if (types.number) delete types.integer; + for (var t in types) + code += (code ? ' && ' : '' ) + checkDataType(t, data, true); + + return code; + } +} + + +var COERCE_TO_TYPES = toHash([ 'string', 'number', 'integer', 'boolean', 'null' ]); +function coerceToTypes(optionCoerceTypes, dataTypes) { + if (Array.isArray(dataTypes)) { + var types = []; + for (var i=0; i= lvl) throw new Error('Cannot access property/index ' + up + ' levels up, current level is ' + lvl); + return paths[lvl - up]; + } + + if (up > lvl) throw new Error('Cannot access data ' + up + ' levels up, current level is ' + lvl); + data = 'data' + ((lvl - up) || ''); + if (!jsonPointer) return data; + } + + var expr = data; + var segments = jsonPointer.split('/'); + for (var i=0; i' + , $notOp = $isMax ? '>' : '<' + , $errorKeyword = undefined; +}} + +{{? $isDataExcl }} + {{ + var $schemaValueExcl = it.util.getData($schemaExcl.$data, $dataLvl, it.dataPathArr) + , $exclusive = 'exclusive' + $lvl + , $exclType = 'exclType' + $lvl + , $exclIsNumber = 'exclIsNumber' + $lvl + , $opExpr = 'op' + $lvl + , $opStr = '\' + ' + $opExpr + ' + \''; + }} + var schemaExcl{{=$lvl}} = {{=$schemaValueExcl}}; + {{ $schemaValueExcl = 'schemaExcl' + $lvl; }} + + var {{=$exclusive}}; + var {{=$exclType}} = typeof {{=$schemaValueExcl}}; + if ({{=$exclType}} != 'boolean' && {{=$exclType}} != 'undefined' && {{=$exclType}} != 'number') { + {{ var $errorKeyword = $exclusiveKeyword; }} + {{# def.error:'_exclusiveLimit' }} + } else if ({{# def.$dataNotType:'number' }} + {{=$exclType}} == 'number' + ? ( + ({{=$exclusive}} = {{=$schemaValue}} === undefined || {{=$schemaValueExcl}} {{=$op}}= {{=$schemaValue}}) + ? {{=$data}} {{=$notOp}}= {{=$schemaValueExcl}} + : {{=$data}} {{=$notOp}} {{=$schemaValue}} + ) + : ( + ({{=$exclusive}} = {{=$schemaValueExcl}} === true) + ? {{=$data}} {{=$notOp}}= {{=$schemaValue}} + : {{=$data}} {{=$notOp}} {{=$schemaValue}} + ) + || {{=$data}} !== {{=$data}}) { + var op{{=$lvl}} = {{=$exclusive}} ? '{{=$op}}' : '{{=$op}}='; + {{ + if ($schema === undefined) { + $errorKeyword = $exclusiveKeyword; + $errSchemaPath = it.errSchemaPath + '/' + $exclusiveKeyword; + $schemaValue = $schemaValueExcl; + $isData = $isDataExcl; + } + }} +{{??}} + {{ + var $exclIsNumber = typeof $schemaExcl == 'number' + , $opStr = $op; /*used in error*/ + }} + + {{? $exclIsNumber && $isData }} + {{ var $opExpr = '\'' + $opStr + '\''; /*used in error*/ }} + if ({{# def.$dataNotType:'number' }} + ( {{=$schemaValue}} === undefined + || {{=$schemaExcl}} {{=$op}}= {{=$schemaValue}} + ? {{=$data}} {{=$notOp}}= {{=$schemaExcl}} + : {{=$data}} {{=$notOp}} {{=$schemaValue}} ) + || {{=$data}} !== {{=$data}}) { + {{??}} + {{ + if ($exclIsNumber && $schema === undefined) { + {{# def.setExclusiveLimit }} + $schemaValue = $schemaExcl; + $notOp += '='; + } else { + if ($exclIsNumber) + $schemaValue = Math[$isMax ? 'min' : 'max']($schemaExcl, $schema); + + if ($schemaExcl === ($exclIsNumber ? $schemaValue : true)) { + {{# def.setExclusiveLimit }} + $notOp += '='; + } else { + $exclusive = false; + $opStr += '='; + } + } + + var $opExpr = '\'' + $opStr + '\''; /*used in error*/ + }} + + if ({{# def.$dataNotType:'number' }} + {{=$data}} {{=$notOp}} {{=$schemaValue}} + || {{=$data}} !== {{=$data}}) { + {{?}} +{{?}} + {{ $errorKeyword = $errorKeyword || $keyword; }} + {{# def.error:'_limit' }} + } {{? $breakOnError }} else { {{?}} diff --git a/node_modules/ajv/lib/dot/_limitItems.jst b/node_modules/ajv/lib/dot/_limitItems.jst new file mode 100644 index 0000000..a3e078e --- /dev/null +++ b/node_modules/ajv/lib/dot/_limitItems.jst @@ -0,0 +1,10 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.$data }} + +{{ var $op = $keyword == 'maxItems' ? '>' : '<'; }} +if ({{# def.$dataNotType:'number' }} {{=$data}}.length {{=$op}} {{=$schemaValue}}) { + {{ var $errorKeyword = $keyword; }} + {{# def.error:'_limitItems' }} +} {{? $breakOnError }} else { {{?}} diff --git a/node_modules/ajv/lib/dot/_limitLength.jst b/node_modules/ajv/lib/dot/_limitLength.jst new file mode 100644 index 0000000..cfc8dbb --- /dev/null +++ b/node_modules/ajv/lib/dot/_limitLength.jst @@ -0,0 +1,10 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.$data }} + +{{ var $op = $keyword == 'maxLength' ? '>' : '<'; }} +if ({{# def.$dataNotType:'number' }} {{# def.strLength }} {{=$op}} {{=$schemaValue}}) { + {{ var $errorKeyword = $keyword; }} + {{# def.error:'_limitLength' }} +} {{? $breakOnError }} else { {{?}} diff --git a/node_modules/ajv/lib/dot/_limitProperties.jst b/node_modules/ajv/lib/dot/_limitProperties.jst new file mode 100644 index 0000000..da7ea77 --- /dev/null +++ b/node_modules/ajv/lib/dot/_limitProperties.jst @@ -0,0 +1,10 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.$data }} + +{{ var $op = $keyword == 'maxProperties' ? '>' : '<'; }} +if ({{# def.$dataNotType:'number' }} Object.keys({{=$data}}).length {{=$op}} {{=$schemaValue}}) { + {{ var $errorKeyword = $keyword; }} + {{# def.error:'_limitProperties' }} +} {{? $breakOnError }} else { {{?}} diff --git a/node_modules/ajv/lib/dot/allOf.jst b/node_modules/ajv/lib/dot/allOf.jst new file mode 100644 index 0000000..4c28363 --- /dev/null +++ b/node_modules/ajv/lib/dot/allOf.jst @@ -0,0 +1,34 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.setupNextLevel }} + +{{ + var $currentBaseId = $it.baseId + , $allSchemasEmpty = true; +}} + +{{~ $schema:$sch:$i }} + {{? {{# def.nonEmptySchema:$sch }} }} + {{ + $allSchemasEmpty = false; + $it.schema = $sch; + $it.schemaPath = $schemaPath + '[' + $i + ']'; + $it.errSchemaPath = $errSchemaPath + '/' + $i; + }} + + {{# def.insertSubschemaCode }} + + {{# def.ifResultValid }} + {{?}} +{{~}} + +{{? $breakOnError }} + {{? $allSchemasEmpty }} + if (true) { + {{??}} + {{= $closingBraces.slice(0,-1) }} + {{?}} +{{?}} + +{{# def.cleanUp }} diff --git a/node_modules/ajv/lib/dot/anyOf.jst b/node_modules/ajv/lib/dot/anyOf.jst new file mode 100644 index 0000000..086cf2b --- /dev/null +++ b/node_modules/ajv/lib/dot/anyOf.jst @@ -0,0 +1,48 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.setupNextLevel }} + +{{ + var $noEmptySchema = $schema.every(function($sch) { + return {{# def.nonEmptySchema:$sch }}; + }); +}} +{{? $noEmptySchema }} + {{ var $currentBaseId = $it.baseId; }} + var {{=$errs}} = errors; + var {{=$valid}} = false; + + {{# def.setCompositeRule }} + + {{~ $schema:$sch:$i }} + {{ + $it.schema = $sch; + $it.schemaPath = $schemaPath + '[' + $i + ']'; + $it.errSchemaPath = $errSchemaPath + '/' + $i; + }} + + {{# def.insertSubschemaCode }} + + {{=$valid}} = {{=$valid}} || {{=$nextValid}}; + + if (!{{=$valid}}) { + {{ $closingBraces += '}'; }} + {{~}} + + {{# def.resetCompositeRule }} + + {{= $closingBraces }} + + if (!{{=$valid}}) { + {{# def.extraError:'anyOf' }} + } else { + {{# def.resetErrors }} + {{? it.opts.allErrors }} } {{?}} + + {{# def.cleanUp }} +{{??}} + {{? $breakOnError }} + if (true) { + {{?}} +{{?}} diff --git a/node_modules/ajv/lib/dot/coerce.def b/node_modules/ajv/lib/dot/coerce.def new file mode 100644 index 0000000..86e0e18 --- /dev/null +++ b/node_modules/ajv/lib/dot/coerce.def @@ -0,0 +1,61 @@ +{{## def.coerceType: + {{ + var $dataType = 'dataType' + $lvl + , $coerced = 'coerced' + $lvl; + }} + var {{=$dataType}} = typeof {{=$data}}; + {{? it.opts.coerceTypes == 'array'}} + if ({{=$dataType}} == 'object' && Array.isArray({{=$data}})) {{=$dataType}} = 'array'; + {{?}} + + var {{=$coerced}} = undefined; + + {{ var $bracesCoercion = ''; }} + {{~ $coerceToTypes:$type:$i }} + {{? $i }} + if ({{=$coerced}} === undefined) { + {{ $bracesCoercion += '}'; }} + {{?}} + + {{? it.opts.coerceTypes == 'array' && $type != 'array' }} + if ({{=$dataType}} == 'array' && {{=$data}}.length == 1) { + {{=$coerced}} = {{=$data}} = {{=$data}}[0]; + {{=$dataType}} = typeof {{=$data}}; + /*if ({{=$dataType}} == 'object' && Array.isArray({{=$data}})) {{=$dataType}} = 'array';*/ + } + {{?}} + + {{? $type == 'string' }} + if ({{=$dataType}} == 'number' || {{=$dataType}} == 'boolean') + {{=$coerced}} = '' + {{=$data}}; + else if ({{=$data}} === null) {{=$coerced}} = ''; + {{?? $type == 'number' || $type == 'integer' }} + if ({{=$dataType}} == 'boolean' || {{=$data}} === null + || ({{=$dataType}} == 'string' && {{=$data}} && {{=$data}} == +{{=$data}} + {{? $type == 'integer' }} && !({{=$data}} % 1){{?}})) + {{=$coerced}} = +{{=$data}}; + {{?? $type == 'boolean' }} + if ({{=$data}} === 'false' || {{=$data}} === 0 || {{=$data}} === null) + {{=$coerced}} = false; + else if ({{=$data}} === 'true' || {{=$data}} === 1) + {{=$coerced}} = true; + {{?? $type == 'null' }} + if ({{=$data}} === '' || {{=$data}} === 0 || {{=$data}} === false) + {{=$coerced}} = null; + {{?? it.opts.coerceTypes == 'array' && $type == 'array' }} + if ({{=$dataType}} == 'string' || {{=$dataType}} == 'number' || {{=$dataType}} == 'boolean' || {{=$data}} == null) + {{=$coerced}} = [{{=$data}}]; + {{?}} + {{~}} + + {{= $bracesCoercion }} + + if ({{=$coerced}} === undefined) { + {{# def.error:'type' }} + } else { + {{# def.setParentData }} + {{=$data}} = {{=$coerced}}; + {{? !$dataLvl }}if ({{=$parentData}} !== undefined){{?}} + {{=$parentData}}[{{=$parentDataProperty}}] = {{=$coerced}}; + } +#}} diff --git a/node_modules/ajv/lib/dot/comment.jst b/node_modules/ajv/lib/dot/comment.jst new file mode 100644 index 0000000..f959150 --- /dev/null +++ b/node_modules/ajv/lib/dot/comment.jst @@ -0,0 +1,9 @@ +{{# def.definitions }} +{{# def.setupKeyword }} + +{{ var $comment = it.util.toQuotedString($schema); }} +{{? it.opts.$comment === true }} + console.log({{=$comment}}); +{{?? typeof it.opts.$comment == 'function' }} + self._opts.$comment({{=$comment}}, {{=it.util.toQuotedString($errSchemaPath)}}, validate.root.schema); +{{?}} diff --git a/node_modules/ajv/lib/dot/const.jst b/node_modules/ajv/lib/dot/const.jst new file mode 100644 index 0000000..2aa2298 --- /dev/null +++ b/node_modules/ajv/lib/dot/const.jst @@ -0,0 +1,11 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.$data }} + +{{? !$isData }} + var schema{{=$lvl}} = validate.schema{{=$schemaPath}}; +{{?}} +var {{=$valid}} = equal({{=$data}}, schema{{=$lvl}}); +{{# def.checkError:'const' }} +{{? $breakOnError }} else { {{?}} diff --git a/node_modules/ajv/lib/dot/contains.jst b/node_modules/ajv/lib/dot/contains.jst new file mode 100644 index 0000000..925d2c8 --- /dev/null +++ b/node_modules/ajv/lib/dot/contains.jst @@ -0,0 +1,57 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.setupNextLevel }} + + +{{ + var $idx = 'i' + $lvl + , $dataNxt = $it.dataLevel = it.dataLevel + 1 + , $nextData = 'data' + $dataNxt + , $currentBaseId = it.baseId + , $nonEmptySchema = {{# def.nonEmptySchema:$schema }}; +}} + +var {{=$errs}} = errors; +var {{=$valid}}; + +{{? $nonEmptySchema }} + {{# def.setCompositeRule }} + + {{ + $it.schema = $schema; + $it.schemaPath = $schemaPath; + $it.errSchemaPath = $errSchemaPath; + }} + + var {{=$nextValid}} = false; + + for (var {{=$idx}} = 0; {{=$idx}} < {{=$data}}.length; {{=$idx}}++) { + {{ + $it.errorPath = it.util.getPathExpr(it.errorPath, $idx, it.opts.jsonPointers, true); + var $passData = $data + '[' + $idx + ']'; + $it.dataPathArr[$dataNxt] = $idx; + }} + + {{# def.generateSubschemaCode }} + {{# def.optimizeValidate }} + + if ({{=$nextValid}}) break; + } + + {{# def.resetCompositeRule }} + {{= $closingBraces }} + + if (!{{=$nextValid}}) { +{{??}} + if ({{=$data}}.length == 0) { +{{?}} + + {{# def.error:'contains' }} + } else { + {{? $nonEmptySchema }} + {{# def.resetErrors }} + {{?}} + {{? it.opts.allErrors }} } {{?}} + +{{# def.cleanUp }} diff --git a/node_modules/ajv/lib/dot/custom.jst b/node_modules/ajv/lib/dot/custom.jst new file mode 100644 index 0000000..d30588f --- /dev/null +++ b/node_modules/ajv/lib/dot/custom.jst @@ -0,0 +1,191 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.$data }} + +{{ + var $rule = this + , $definition = 'definition' + $lvl + , $rDef = $rule.definition + , $closingBraces = ''; + var $validate = $rDef.validate; + var $compile, $inline, $macro, $ruleValidate, $validateCode; +}} + +{{? $isData && $rDef.$data }} + {{ + $validateCode = 'keywordValidate' + $lvl; + var $validateSchema = $rDef.validateSchema; + }} + var {{=$definition}} = RULES.custom['{{=$keyword}}'].definition; + var {{=$validateCode}} = {{=$definition}}.validate; +{{??}} + {{ + $ruleValidate = it.useCustomRule($rule, $schema, it.schema, it); + if (!$ruleValidate) return; + $schemaValue = 'validate.schema' + $schemaPath; + $validateCode = $ruleValidate.code; + $compile = $rDef.compile; + $inline = $rDef.inline; + $macro = $rDef.macro; + }} +{{?}} + +{{ + var $ruleErrs = $validateCode + '.errors' + , $i = 'i' + $lvl + , $ruleErr = 'ruleErr' + $lvl + , $asyncKeyword = $rDef.async; + + if ($asyncKeyword && !it.async) + throw new Error('async keyword in sync schema'); +}} + + +{{? !($inline || $macro) }}{{=$ruleErrs}} = null;{{?}} +var {{=$errs}} = errors; +var {{=$valid}}; + +{{## def.callRuleValidate: + {{=$validateCode}}.call( + {{? it.opts.passContext }}this{{??}}self{{?}} + {{? $compile || $rDef.schema === false }} + , {{=$data}} + {{??}} + , {{=$schemaValue}} + , {{=$data}} + , validate.schema{{=it.schemaPath}} + {{?}} + , {{# def.dataPath }} + {{# def.passParentData }} + , rootData + ) +#}} + +{{## def.extendErrors:_inline: + for (var {{=$i}}={{=$errs}}; {{=$i}} 0 + : it.util.schemaHasRules(_schema, it.RULES.all)) +#}} + + +{{## def.strLength: + {{? it.opts.unicode === false }} + {{=$data}}.length + {{??}} + ucs2length({{=$data}}) + {{?}} +#}} + + +{{## def.willOptimize: + it.util.varOccurences($code, $nextData) < 2 +#}} + + +{{## def.generateSubschemaCode: + {{ + var $code = it.validate($it); + $it.baseId = $currentBaseId; + }} +#}} + + +{{## def.insertSubschemaCode: + {{= it.validate($it) }} + {{ $it.baseId = $currentBaseId; }} +#}} + + +{{## def._optimizeValidate: + it.util.varReplace($code, $nextData, $passData) +#}} + + +{{## def.optimizeValidate: + {{? {{# def.willOptimize}} }} + {{= {{# def._optimizeValidate }} }} + {{??}} + var {{=$nextData}} = {{=$passData}}; + {{= $code }} + {{?}} +#}} + + +{{## def.cleanUp: {{ out = it.util.cleanUpCode(out); }} #}} + + +{{## def.finalCleanUp: {{ out = it.util.finalCleanUpCode(out, $async); }} #}} + + +{{## def.$data: + {{ + var $isData = it.opts.$data && $schema && $schema.$data + , $schemaValue; + }} + {{? $isData }} + var schema{{=$lvl}} = {{= it.util.getData($schema.$data, $dataLvl, it.dataPathArr) }}; + {{ $schemaValue = 'schema' + $lvl; }} + {{??}} + {{ $schemaValue = $schema; }} + {{?}} +#}} + + +{{## def.$dataNotType:_type: + {{?$isData}} ({{=$schemaValue}} !== undefined && typeof {{=$schemaValue}} != _type) || {{?}} +#}} + + +{{## def.check$dataIsArray: + if (schema{{=$lvl}} === undefined) {{=$valid}} = true; + else if (!Array.isArray(schema{{=$lvl}})) {{=$valid}} = false; + else { +#}} + + +{{## def.beginDefOut: + {{ + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; + }} +#}} + + +{{## def.storeDefOut:_variable: + {{ + var _variable = out; + out = $$outStack.pop(); + }} +#}} + + +{{## def.dataPath:(dataPath || ''){{? it.errorPath != '""'}} + {{= it.errorPath }}{{?}}#}} + +{{## def.setParentData: + {{ + var $parentData = $dataLvl ? 'data' + (($dataLvl-1)||'') : 'parentData' + , $parentDataProperty = $dataLvl ? it.dataPathArr[$dataLvl] : 'parentDataProperty'; + }} +#}} + +{{## def.passParentData: + {{# def.setParentData }} + , {{= $parentData }} + , {{= $parentDataProperty }} +#}} + + +{{## def.iterateProperties: + {{? $ownProperties }} + {{=$dataProperties}} = {{=$dataProperties}} || Object.keys({{=$data}}); + for (var {{=$idx}}=0; {{=$idx}}<{{=$dataProperties}}.length; {{=$idx}}++) { + var {{=$key}} = {{=$dataProperties}}[{{=$idx}}]; + {{??}} + for (var {{=$key}} in {{=$data}}) { + {{?}} +#}} + + +{{## def.noPropertyInData: + {{=$useData}} === undefined + {{? $ownProperties }} + || !{{# def.isOwnProperty }} + {{?}} +#}} + + +{{## def.isOwnProperty: + Object.prototype.hasOwnProperty.call({{=$data}}, '{{=it.util.escapeQuotes($propertyKey)}}') +#}} diff --git a/node_modules/ajv/lib/dot/dependencies.jst b/node_modules/ajv/lib/dot/dependencies.jst new file mode 100644 index 0000000..c41f334 --- /dev/null +++ b/node_modules/ajv/lib/dot/dependencies.jst @@ -0,0 +1,80 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.missing }} +{{# def.setupKeyword }} +{{# def.setupNextLevel }} + + +{{## def.propertyInData: + {{=$data}}{{= it.util.getProperty($property) }} !== undefined + {{? $ownProperties }} + && Object.prototype.hasOwnProperty.call({{=$data}}, '{{=it.util.escapeQuotes($property)}}') + {{?}} +#}} + + +{{ + var $schemaDeps = {} + , $propertyDeps = {} + , $ownProperties = it.opts.ownProperties; + + for ($property in $schema) { + var $sch = $schema[$property]; + var $deps = Array.isArray($sch) ? $propertyDeps : $schemaDeps; + $deps[$property] = $sch; + } +}} + +var {{=$errs}} = errors; + +{{ var $currentErrorPath = it.errorPath; }} + +var missing{{=$lvl}}; +{{ for (var $property in $propertyDeps) { }} + {{ $deps = $propertyDeps[$property]; }} + {{? $deps.length }} + if ({{# def.propertyInData }} + {{? $breakOnError }} + && ({{# def.checkMissingProperty:$deps }})) { + {{# def.errorMissingProperty:'dependencies' }} + {{??}} + ) { + {{~ $deps:$propertyKey }} + {{# def.allErrorsMissingProperty:'dependencies' }} + {{~}} + {{?}} + } {{# def.elseIfValid }} + {{?}} +{{ } }} + +{{ + it.errorPath = $currentErrorPath; + var $currentBaseId = $it.baseId; +}} + + +{{ for (var $property in $schemaDeps) { }} + {{ var $sch = $schemaDeps[$property]; }} + {{? {{# def.nonEmptySchema:$sch }} }} + {{=$nextValid}} = true; + + if ({{# def.propertyInData }}) { + {{ + $it.schema = $sch; + $it.schemaPath = $schemaPath + it.util.getProperty($property); + $it.errSchemaPath = $errSchemaPath + '/' + it.util.escapeFragment($property); + }} + + {{# def.insertSubschemaCode }} + } + + {{# def.ifResultValid }} + {{?}} +{{ } }} + +{{? $breakOnError }} + {{= $closingBraces }} + if ({{=$errs}} == errors) { +{{?}} + +{{# def.cleanUp }} diff --git a/node_modules/ajv/lib/dot/enum.jst b/node_modules/ajv/lib/dot/enum.jst new file mode 100644 index 0000000..357c2e8 --- /dev/null +++ b/node_modules/ajv/lib/dot/enum.jst @@ -0,0 +1,30 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.$data }} + +{{ + var $i = 'i' + $lvl + , $vSchema = 'schema' + $lvl; +}} + +{{? !$isData }} + var {{=$vSchema}} = validate.schema{{=$schemaPath}}; +{{?}} +var {{=$valid}}; + +{{?$isData}}{{# def.check$dataIsArray }}{{?}} + +{{=$valid}} = false; + +for (var {{=$i}}=0; {{=$i}}<{{=$vSchema}}.length; {{=$i}}++) + if (equal({{=$data}}, {{=$vSchema}}[{{=$i}}])) { + {{=$valid}} = true; + break; + } + +{{? $isData }} } {{?}} + +{{# def.checkError:'enum' }} + +{{? $breakOnError }} else { {{?}} diff --git a/node_modules/ajv/lib/dot/errors.def b/node_modules/ajv/lib/dot/errors.def new file mode 100644 index 0000000..5c5752c --- /dev/null +++ b/node_modules/ajv/lib/dot/errors.def @@ -0,0 +1,194 @@ +{{# def.definitions }} + +{{## def._error:_rule: + {{ 'istanbul ignore else'; }} + {{? it.createErrors !== false }} + { + keyword: '{{= $errorKeyword || _rule }}' + , dataPath: (dataPath || '') + {{= it.errorPath }} + , schemaPath: {{=it.util.toQuotedString($errSchemaPath)}} + , params: {{# def._errorParams[_rule] }} + {{? it.opts.messages !== false }} + , message: {{# def._errorMessages[_rule] }} + {{?}} + {{? it.opts.verbose }} + , schema: {{# def._errorSchemas[_rule] }} + , parentSchema: validate.schema{{=it.schemaPath}} + , data: {{=$data}} + {{?}} + } + {{??}} + {} + {{?}} +#}} + + +{{## def._addError:_rule: + if (vErrors === null) vErrors = [err]; + else vErrors.push(err); + errors++; +#}} + + +{{## def.addError:_rule: + var err = {{# def._error:_rule }}; + {{# def._addError:_rule }} +#}} + + +{{## def.error:_rule: + {{# def.beginDefOut}} + {{# def._error:_rule }} + {{# def.storeDefOut:__err }} + + {{? !it.compositeRule && $breakOnError }} + {{ 'istanbul ignore if'; }} + {{? it.async }} + throw new ValidationError([{{=__err}}]); + {{??}} + validate.errors = [{{=__err}}]; + return false; + {{?}} + {{??}} + var err = {{=__err}}; + {{# def._addError:_rule }} + {{?}} +#}} + + +{{## def.extraError:_rule: + {{# def.addError:_rule}} + {{? !it.compositeRule && $breakOnError }} + {{ 'istanbul ignore if'; }} + {{? it.async }} + throw new ValidationError(vErrors); + {{??}} + validate.errors = vErrors; + return false; + {{?}} + {{?}} +#}} + + +{{## def.checkError:_rule: + if (!{{=$valid}}) { + {{# def.error:_rule }} + } +#}} + + +{{## def.resetErrors: + errors = {{=$errs}}; + if (vErrors !== null) { + if ({{=$errs}}) vErrors.length = {{=$errs}}; + else vErrors = null; + } +#}} + + +{{## def.concatSchema:{{?$isData}}' + {{=$schemaValue}} + '{{??}}{{=$schema}}{{?}}#}} +{{## def.appendSchema:{{?$isData}}' + {{=$schemaValue}}{{??}}{{=$schemaValue}}'{{?}}#}} +{{## def.concatSchemaEQ:{{?$isData}}' + {{=$schemaValue}} + '{{??}}{{=it.util.escapeQuotes($schema)}}{{?}}#}} + +{{## def._errorMessages = { + 'false schema': "'boolean schema is false'", + $ref: "'can\\\'t resolve reference {{=it.util.escapeQuotes($schema)}}'", + additionalItems: "'should NOT have more than {{=$schema.length}} items'", + additionalProperties: "'{{? it.opts._errorDataPathProperty }}is an invalid additional property{{??}}should NOT have additional properties{{?}}'", + anyOf: "'should match some schema in anyOf'", + const: "'should be equal to constant'", + contains: "'should contain a valid item'", + dependencies: "'should have {{? $deps.length == 1 }}property {{= it.util.escapeQuotes($deps[0]) }}{{??}}properties {{= it.util.escapeQuotes($deps.join(\", \")) }}{{?}} when property {{= it.util.escapeQuotes($property) }} is present'", + 'enum': "'should be equal to one of the allowed values'", + format: "'should match format \"{{#def.concatSchemaEQ}}\"'", + 'if': "'should match \"' + {{=$ifClause}} + '\" schema'", + _limit: "'should be {{=$opStr}} {{#def.appendSchema}}", + _exclusiveLimit: "'{{=$exclusiveKeyword}} should be boolean'", + _limitItems: "'should NOT have {{?$keyword=='maxItems'}}more{{??}}fewer{{?}} than {{#def.concatSchema}} items'", + _limitLength: "'should NOT be {{?$keyword=='maxLength'}}longer{{??}}shorter{{?}} than {{#def.concatSchema}} characters'", + _limitProperties:"'should NOT have {{?$keyword=='maxProperties'}}more{{??}}fewer{{?}} than {{#def.concatSchema}} properties'", + multipleOf: "'should be multiple of {{#def.appendSchema}}", + not: "'should NOT be valid'", + oneOf: "'should match exactly one schema in oneOf'", + pattern: "'should match pattern \"{{#def.concatSchemaEQ}}\"'", + propertyNames: "'property name \\'{{=$invalidName}}\\' is invalid'", + required: "'{{? it.opts._errorDataPathProperty }}is a required property{{??}}should have required property \\'{{=$missingProperty}}\\'{{?}}'", + type: "'should be {{? $typeIsArray }}{{= $typeSchema.join(\",\") }}{{??}}{{=$typeSchema}}{{?}}'", + uniqueItems: "'should NOT have duplicate items (items ## ' + j + ' and ' + i + ' are identical)'", + custom: "'should pass \"{{=$rule.keyword}}\" keyword validation'", + patternRequired: "'should have property matching pattern \\'{{=$missingPattern}}\\''", + switch: "'should pass \"switch\" keyword validation'", + _formatLimit: "'should be {{=$opStr}} \"{{#def.concatSchemaEQ}}\"'", + _formatExclusiveLimit: "'{{=$exclusiveKeyword}} should be boolean'" +} #}} + + +{{## def.schemaRefOrVal: {{?$isData}}validate.schema{{=$schemaPath}}{{??}}{{=$schema}}{{?}} #}} +{{## def.schemaRefOrQS: {{?$isData}}validate.schema{{=$schemaPath}}{{??}}{{=it.util.toQuotedString($schema)}}{{?}} #}} + +{{## def._errorSchemas = { + 'false schema': "false", + $ref: "{{=it.util.toQuotedString($schema)}}", + additionalItems: "false", + additionalProperties: "false", + anyOf: "validate.schema{{=$schemaPath}}", + const: "validate.schema{{=$schemaPath}}", + contains: "validate.schema{{=$schemaPath}}", + dependencies: "validate.schema{{=$schemaPath}}", + 'enum': "validate.schema{{=$schemaPath}}", + format: "{{#def.schemaRefOrQS}}", + 'if': "validate.schema{{=$schemaPath}}", + _limit: "{{#def.schemaRefOrVal}}", + _exclusiveLimit: "validate.schema{{=$schemaPath}}", + _limitItems: "{{#def.schemaRefOrVal}}", + _limitLength: "{{#def.schemaRefOrVal}}", + _limitProperties:"{{#def.schemaRefOrVal}}", + multipleOf: "{{#def.schemaRefOrVal}}", + not: "validate.schema{{=$schemaPath}}", + oneOf: "validate.schema{{=$schemaPath}}", + pattern: "{{#def.schemaRefOrQS}}", + propertyNames: "validate.schema{{=$schemaPath}}", + required: "validate.schema{{=$schemaPath}}", + type: "validate.schema{{=$schemaPath}}", + uniqueItems: "{{#def.schemaRefOrVal}}", + custom: "validate.schema{{=$schemaPath}}", + patternRequired: "validate.schema{{=$schemaPath}}", + switch: "validate.schema{{=$schemaPath}}", + _formatLimit: "{{#def.schemaRefOrQS}}", + _formatExclusiveLimit: "validate.schema{{=$schemaPath}}" +} #}} + + +{{## def.schemaValueQS: {{?$isData}}{{=$schemaValue}}{{??}}{{=it.util.toQuotedString($schema)}}{{?}} #}} + +{{## def._errorParams = { + 'false schema': "{}", + $ref: "{ ref: '{{=it.util.escapeQuotes($schema)}}' }", + additionalItems: "{ limit: {{=$schema.length}} }", + additionalProperties: "{ additionalProperty: '{{=$additionalProperty}}' }", + anyOf: "{}", + const: "{ allowedValue: schema{{=$lvl}} }", + contains: "{}", + dependencies: "{ property: '{{= it.util.escapeQuotes($property) }}', missingProperty: '{{=$missingProperty}}', depsCount: {{=$deps.length}}, deps: '{{= it.util.escapeQuotes($deps.length==1 ? $deps[0] : $deps.join(\", \")) }}' }", + 'enum': "{ allowedValues: schema{{=$lvl}} }", + format: "{ format: {{#def.schemaValueQS}} }", + 'if': "{ failingKeyword: {{=$ifClause}} }", + _limit: "{ comparison: {{=$opExpr}}, limit: {{=$schemaValue}}, exclusive: {{=$exclusive}} }", + _exclusiveLimit: "{}", + _limitItems: "{ limit: {{=$schemaValue}} }", + _limitLength: "{ limit: {{=$schemaValue}} }", + _limitProperties:"{ limit: {{=$schemaValue}} }", + multipleOf: "{ multipleOf: {{=$schemaValue}} }", + not: "{}", + oneOf: "{ passingSchemas: {{=$passingSchemas}} }", + pattern: "{ pattern: {{#def.schemaValueQS}} }", + propertyNames: "{ propertyName: '{{=$invalidName}}' }", + required: "{ missingProperty: '{{=$missingProperty}}' }", + type: "{ type: '{{? $typeIsArray }}{{= $typeSchema.join(\",\") }}{{??}}{{=$typeSchema}}{{?}}' }", + uniqueItems: "{ i: i, j: j }", + custom: "{ keyword: '{{=$rule.keyword}}' }", + patternRequired: "{ missingPattern: '{{=$missingPattern}}' }", + switch: "{ caseIndex: {{=$caseIndex}} }", + _formatLimit: "{ comparison: {{=$opExpr}}, limit: {{#def.schemaValueQS}}, exclusive: {{=$exclusive}} }", + _formatExclusiveLimit: "{}" +} #}} diff --git a/node_modules/ajv/lib/dot/format.jst b/node_modules/ajv/lib/dot/format.jst new file mode 100644 index 0000000..37f14da --- /dev/null +++ b/node_modules/ajv/lib/dot/format.jst @@ -0,0 +1,106 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} + +{{## def.skipFormat: + {{? $breakOnError }} if (true) { {{?}} + {{ return out; }} +#}} + +{{? it.opts.format === false }}{{# def.skipFormat }}{{?}} + + +{{# def.$data }} + + +{{## def.$dataCheckFormat: + {{# def.$dataNotType:'string' }} + ({{? $unknownFormats != 'ignore' }} + ({{=$schemaValue}} && !{{=$format}} + {{? $allowUnknown }} + && self._opts.unknownFormats.indexOf({{=$schemaValue}}) == -1 + {{?}}) || + {{?}} + ({{=$format}} && {{=$formatType}} == '{{=$ruleType}}' + && !(typeof {{=$format}} == 'function' + ? {{? it.async}} + (async{{=$lvl}} ? await {{=$format}}({{=$data}}) : {{=$format}}({{=$data}})) + {{??}} + {{=$format}}({{=$data}}) + {{?}} + : {{=$format}}.test({{=$data}})))) +#}} + +{{## def.checkFormat: + {{ + var $formatRef = 'formats' + it.util.getProperty($schema); + if ($isObject) $formatRef += '.validate'; + }} + {{? typeof $format == 'function' }} + {{=$formatRef}}({{=$data}}) + {{??}} + {{=$formatRef}}.test({{=$data}}) + {{?}} +#}} + + +{{ + var $unknownFormats = it.opts.unknownFormats + , $allowUnknown = Array.isArray($unknownFormats); +}} + +{{? $isData }} + {{ + var $format = 'format' + $lvl + , $isObject = 'isObject' + $lvl + , $formatType = 'formatType' + $lvl; + }} + var {{=$format}} = formats[{{=$schemaValue}}]; + var {{=$isObject}} = typeof {{=$format}} == 'object' + && !({{=$format}} instanceof RegExp) + && {{=$format}}.validate; + var {{=$formatType}} = {{=$isObject}} && {{=$format}}.type || 'string'; + if ({{=$isObject}}) { + {{? it.async}} + var async{{=$lvl}} = {{=$format}}.async; + {{?}} + {{=$format}} = {{=$format}}.validate; + } + if ({{# def.$dataCheckFormat }}) { +{{??}} + {{ var $format = it.formats[$schema]; }} + {{? !$format }} + {{? $unknownFormats == 'ignore' }} + {{ it.logger.warn('unknown format "' + $schema + '" ignored in schema at path "' + it.errSchemaPath + '"'); }} + {{# def.skipFormat }} + {{?? $allowUnknown && $unknownFormats.indexOf($schema) >= 0 }} + {{# def.skipFormat }} + {{??}} + {{ throw new Error('unknown format "' + $schema + '" is used in schema at path "' + it.errSchemaPath + '"'); }} + {{?}} + {{?}} + {{ + var $isObject = typeof $format == 'object' + && !($format instanceof RegExp) + && $format.validate; + var $formatType = $isObject && $format.type || 'string'; + if ($isObject) { + var $async = $format.async === true; + $format = $format.validate; + } + }} + {{? $formatType != $ruleType }} + {{# def.skipFormat }} + {{?}} + {{? $async }} + {{ + if (!it.async) throw new Error('async format in sync schema'); + var $formatRef = 'formats' + it.util.getProperty($schema) + '.validate'; + }} + if (!(await {{=$formatRef}}({{=$data}}))) { + {{??}} + if (!{{# def.checkFormat }}) { + {{?}} +{{?}} + {{# def.error:'format' }} + } {{? $breakOnError }} else { {{?}} diff --git a/node_modules/ajv/lib/dot/if.jst b/node_modules/ajv/lib/dot/if.jst new file mode 100644 index 0000000..7ccc9b7 --- /dev/null +++ b/node_modules/ajv/lib/dot/if.jst @@ -0,0 +1,75 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.setupNextLevel }} + + +{{## def.validateIfClause:_clause: + {{ + $it.schema = it.schema['_clause']; + $it.schemaPath = it.schemaPath + '._clause'; + $it.errSchemaPath = it.errSchemaPath + '/_clause'; + }} + {{# def.insertSubschemaCode }} + {{=$valid}} = {{=$nextValid}}; + {{? $thenPresent && $elsePresent }} + {{ $ifClause = 'ifClause' + $lvl; }} + var {{=$ifClause}} = '_clause'; + {{??}} + {{ $ifClause = '\'_clause\''; }} + {{?}} +#}} + +{{ + var $thenSch = it.schema['then'] + , $elseSch = it.schema['else'] + , $thenPresent = $thenSch !== undefined && {{# def.nonEmptySchema:$thenSch }} + , $elsePresent = $elseSch !== undefined && {{# def.nonEmptySchema:$elseSch }} + , $currentBaseId = $it.baseId; +}} + +{{? $thenPresent || $elsePresent }} + {{ + var $ifClause; + $it.createErrors = false; + $it.schema = $schema; + $it.schemaPath = $schemaPath; + $it.errSchemaPath = $errSchemaPath; + }} + var {{=$errs}} = errors; + var {{=$valid}} = true; + + {{# def.setCompositeRule }} + {{# def.insertSubschemaCode }} + {{ $it.createErrors = true; }} + {{# def.resetErrors }} + {{# def.resetCompositeRule }} + + {{? $thenPresent }} + if ({{=$nextValid}}) { + {{# def.validateIfClause:then }} + } + {{? $elsePresent }} + else { + {{?}} + {{??}} + if (!{{=$nextValid}}) { + {{?}} + + {{? $elsePresent }} + {{# def.validateIfClause:else }} + } + {{?}} + + if (!{{=$valid}}) { + {{# def.extraError:'if' }} + } + {{? $breakOnError }} else { {{?}} + + {{# def.cleanUp }} +{{??}} + {{? $breakOnError }} + if (true) { + {{?}} +{{?}} + diff --git a/node_modules/ajv/lib/dot/items.jst b/node_modules/ajv/lib/dot/items.jst new file mode 100644 index 0000000..8c0f5ac --- /dev/null +++ b/node_modules/ajv/lib/dot/items.jst @@ -0,0 +1,100 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.setupNextLevel }} + + +{{## def.validateItems:startFrom: + for (var {{=$idx}} = {{=startFrom}}; {{=$idx}} < {{=$data}}.length; {{=$idx}}++) { + {{ + $it.errorPath = it.util.getPathExpr(it.errorPath, $idx, it.opts.jsonPointers, true); + var $passData = $data + '[' + $idx + ']'; + $it.dataPathArr[$dataNxt] = $idx; + }} + + {{# def.generateSubschemaCode }} + {{# def.optimizeValidate }} + + {{? $breakOnError }} + if (!{{=$nextValid}}) break; + {{?}} + } +#}} + +{{ + var $idx = 'i' + $lvl + , $dataNxt = $it.dataLevel = it.dataLevel + 1 + , $nextData = 'data' + $dataNxt + , $currentBaseId = it.baseId; +}} + +var {{=$errs}} = errors; +var {{=$valid}}; + +{{? Array.isArray($schema) }} + {{ /* 'items' is an array of schemas */}} + {{ var $additionalItems = it.schema.additionalItems; }} + {{? $additionalItems === false }} + {{=$valid}} = {{=$data}}.length <= {{= $schema.length }}; + {{ + var $currErrSchemaPath = $errSchemaPath; + $errSchemaPath = it.errSchemaPath + '/additionalItems'; + }} + {{# def.checkError:'additionalItems' }} + {{ $errSchemaPath = $currErrSchemaPath; }} + {{# def.elseIfValid}} + {{?}} + + {{~ $schema:$sch:$i }} + {{? {{# def.nonEmptySchema:$sch }} }} + {{=$nextValid}} = true; + + if ({{=$data}}.length > {{=$i}}) { + {{ + var $passData = $data + '[' + $i + ']'; + $it.schema = $sch; + $it.schemaPath = $schemaPath + '[' + $i + ']'; + $it.errSchemaPath = $errSchemaPath + '/' + $i; + $it.errorPath = it.util.getPathExpr(it.errorPath, $i, it.opts.jsonPointers, true); + $it.dataPathArr[$dataNxt] = $i; + }} + + {{# def.generateSubschemaCode }} + {{# def.optimizeValidate }} + } + + {{# def.ifResultValid }} + {{?}} + {{~}} + + {{? typeof $additionalItems == 'object' && {{# def.nonEmptySchema:$additionalItems }} }} + {{ + $it.schema = $additionalItems; + $it.schemaPath = it.schemaPath + '.additionalItems'; + $it.errSchemaPath = it.errSchemaPath + '/additionalItems'; + }} + {{=$nextValid}} = true; + + if ({{=$data}}.length > {{= $schema.length }}) { + {{# def.validateItems: $schema.length }} + } + + {{# def.ifResultValid }} + {{?}} + +{{?? {{# def.nonEmptySchema:$schema }} }} + {{ /* 'items' is a single schema */}} + {{ + $it.schema = $schema; + $it.schemaPath = $schemaPath; + $it.errSchemaPath = $errSchemaPath; + }} + {{# def.validateItems: 0 }} +{{?}} + +{{? $breakOnError }} + {{= $closingBraces }} + if ({{=$errs}} == errors) { +{{?}} + +{{# def.cleanUp }} diff --git a/node_modules/ajv/lib/dot/missing.def b/node_modules/ajv/lib/dot/missing.def new file mode 100644 index 0000000..a73b9f9 --- /dev/null +++ b/node_modules/ajv/lib/dot/missing.def @@ -0,0 +1,39 @@ +{{## def.checkMissingProperty:_properties: + {{~ _properties:$propertyKey:$i }} + {{?$i}} || {{?}} + {{ + var $prop = it.util.getProperty($propertyKey) + , $useData = $data + $prop; + }} + ( ({{# def.noPropertyInData }}) && (missing{{=$lvl}} = {{= it.util.toQuotedString(it.opts.jsonPointers ? $propertyKey : $prop) }}) ) + {{~}} +#}} + + +{{## def.errorMissingProperty:_error: + {{ + var $propertyPath = 'missing' + $lvl + , $missingProperty = '\' + ' + $propertyPath + ' + \''; + if (it.opts._errorDataPathProperty) { + it.errorPath = it.opts.jsonPointers + ? it.util.getPathExpr($currentErrorPath, $propertyPath, true) + : $currentErrorPath + ' + ' + $propertyPath; + } + }} + {{# def.error:_error }} +#}} + + +{{## def.allErrorsMissingProperty:_error: + {{ + var $prop = it.util.getProperty($propertyKey) + , $missingProperty = it.util.escapeQuotes($propertyKey) + , $useData = $data + $prop; + if (it.opts._errorDataPathProperty) { + it.errorPath = it.util.getPath($currentErrorPath, $propertyKey, it.opts.jsonPointers); + } + }} + if ({{# def.noPropertyInData }}) { + {{# def.addError:_error }} + } +#}} diff --git a/node_modules/ajv/lib/dot/multipleOf.jst b/node_modules/ajv/lib/dot/multipleOf.jst new file mode 100644 index 0000000..5f8dd33 --- /dev/null +++ b/node_modules/ajv/lib/dot/multipleOf.jst @@ -0,0 +1,20 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.$data }} + +var division{{=$lvl}}; +if ({{?$isData}} + {{=$schemaValue}} !== undefined && ( + typeof {{=$schemaValue}} != 'number' || + {{?}} + (division{{=$lvl}} = {{=$data}} / {{=$schemaValue}}, + {{? it.opts.multipleOfPrecision }} + Math.abs(Math.round(division{{=$lvl}}) - division{{=$lvl}}) > 1e-{{=it.opts.multipleOfPrecision}} + {{??}} + division{{=$lvl}} !== parseInt(division{{=$lvl}}) + {{?}} + ) + {{?$isData}} ) {{?}} ) { + {{# def.error:'multipleOf' }} +} {{? $breakOnError }} else { {{?}} diff --git a/node_modules/ajv/lib/dot/not.jst b/node_modules/ajv/lib/dot/not.jst new file mode 100644 index 0000000..e03185a --- /dev/null +++ b/node_modules/ajv/lib/dot/not.jst @@ -0,0 +1,43 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.setupNextLevel }} + +{{? {{# def.nonEmptySchema:$schema }} }} + {{ + $it.schema = $schema; + $it.schemaPath = $schemaPath; + $it.errSchemaPath = $errSchemaPath; + }} + + var {{=$errs}} = errors; + + {{# def.setCompositeRule }} + + {{ + $it.createErrors = false; + var $allErrorsOption; + if ($it.opts.allErrors) { + $allErrorsOption = $it.opts.allErrors; + $it.opts.allErrors = false; + } + }} + {{= it.validate($it) }} + {{ + $it.createErrors = true; + if ($allErrorsOption) $it.opts.allErrors = $allErrorsOption; + }} + + {{# def.resetCompositeRule }} + + if ({{=$nextValid}}) { + {{# def.error:'not' }} + } else { + {{# def.resetErrors }} + {{? it.opts.allErrors }} } {{?}} +{{??}} + {{# def.addError:'not' }} + {{? $breakOnError}} + if (false) { + {{?}} +{{?}} diff --git a/node_modules/ajv/lib/dot/oneOf.jst b/node_modules/ajv/lib/dot/oneOf.jst new file mode 100644 index 0000000..bcce2c6 --- /dev/null +++ b/node_modules/ajv/lib/dot/oneOf.jst @@ -0,0 +1,54 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.setupNextLevel }} + +{{ + var $currentBaseId = $it.baseId + , $prevValid = 'prevValid' + $lvl + , $passingSchemas = 'passingSchemas' + $lvl; +}} + +var {{=$errs}} = errors + , {{=$prevValid}} = false + , {{=$valid}} = false + , {{=$passingSchemas}} = null; + +{{# def.setCompositeRule }} + +{{~ $schema:$sch:$i }} + {{? {{# def.nonEmptySchema:$sch }} }} + {{ + $it.schema = $sch; + $it.schemaPath = $schemaPath + '[' + $i + ']'; + $it.errSchemaPath = $errSchemaPath + '/' + $i; + }} + + {{# def.insertSubschemaCode }} + {{??}} + var {{=$nextValid}} = true; + {{?}} + + {{? $i }} + if ({{=$nextValid}} && {{=$prevValid}}) { + {{=$valid}} = false; + {{=$passingSchemas}} = [{{=$passingSchemas}}, {{=$i}}]; + } else { + {{ $closingBraces += '}'; }} + {{?}} + + if ({{=$nextValid}}) { + {{=$valid}} = {{=$prevValid}} = true; + {{=$passingSchemas}} = {{=$i}}; + } +{{~}} + +{{# def.resetCompositeRule }} + +{{= $closingBraces }} + +if (!{{=$valid}}) { + {{# def.extraError:'oneOf' }} +} else { + {{# def.resetErrors }} +{{? it.opts.allErrors }} } {{?}} diff --git a/node_modules/ajv/lib/dot/pattern.jst b/node_modules/ajv/lib/dot/pattern.jst new file mode 100644 index 0000000..3a37ef6 --- /dev/null +++ b/node_modules/ajv/lib/dot/pattern.jst @@ -0,0 +1,14 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.$data }} + +{{ + var $regexp = $isData + ? '(new RegExp(' + $schemaValue + '))' + : it.usePattern($schema); +}} + +if ({{# def.$dataNotType:'string' }} !{{=$regexp}}.test({{=$data}}) ) { + {{# def.error:'pattern' }} +} {{? $breakOnError }} else { {{?}} diff --git a/node_modules/ajv/lib/dot/properties.jst b/node_modules/ajv/lib/dot/properties.jst new file mode 100644 index 0000000..862067e --- /dev/null +++ b/node_modules/ajv/lib/dot/properties.jst @@ -0,0 +1,244 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.setupNextLevel }} + + +{{## def.validateAdditional: + {{ /* additionalProperties is schema */ + $it.schema = $aProperties; + $it.schemaPath = it.schemaPath + '.additionalProperties'; + $it.errSchemaPath = it.errSchemaPath + '/additionalProperties'; + $it.errorPath = it.opts._errorDataPathProperty + ? it.errorPath + : it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers); + var $passData = $data + '[' + $key + ']'; + $it.dataPathArr[$dataNxt] = $key; + }} + + {{# def.generateSubschemaCode }} + {{# def.optimizeValidate }} +#}} + + +{{ + var $key = 'key' + $lvl + , $idx = 'idx' + $lvl + , $dataNxt = $it.dataLevel = it.dataLevel + 1 + , $nextData = 'data' + $dataNxt + , $dataProperties = 'dataProperties' + $lvl; + + var $schemaKeys = Object.keys($schema || {}) + , $pProperties = it.schema.patternProperties || {} + , $pPropertyKeys = Object.keys($pProperties) + , $aProperties = it.schema.additionalProperties + , $someProperties = $schemaKeys.length || $pPropertyKeys.length + , $noAdditional = $aProperties === false + , $additionalIsSchema = typeof $aProperties == 'object' + && Object.keys($aProperties).length + , $removeAdditional = it.opts.removeAdditional + , $checkAdditional = $noAdditional || $additionalIsSchema || $removeAdditional + , $ownProperties = it.opts.ownProperties + , $currentBaseId = it.baseId; + + var $required = it.schema.required; + if ($required && !(it.opts.$data && $required.$data) && $required.length < it.opts.loopRequired) + var $requiredHash = it.util.toHash($required); +}} + + +var {{=$errs}} = errors; +var {{=$nextValid}} = true; +{{? $ownProperties }} + var {{=$dataProperties}} = undefined; +{{?}} + +{{? $checkAdditional }} + {{# def.iterateProperties }} + {{? $someProperties }} + var isAdditional{{=$lvl}} = !(false + {{? $schemaKeys.length }} + {{? $schemaKeys.length > 8 }} + || validate.schema{{=$schemaPath}}.hasOwnProperty({{=$key}}) + {{??}} + {{~ $schemaKeys:$propertyKey }} + || {{=$key}} == {{= it.util.toQuotedString($propertyKey) }} + {{~}} + {{?}} + {{?}} + {{? $pPropertyKeys.length }} + {{~ $pPropertyKeys:$pProperty:$i }} + || {{= it.usePattern($pProperty) }}.test({{=$key}}) + {{~}} + {{?}} + ); + + if (isAdditional{{=$lvl}}) { + {{?}} + {{? $removeAdditional == 'all' }} + delete {{=$data}}[{{=$key}}]; + {{??}} + {{ + var $currentErrorPath = it.errorPath; + var $additionalProperty = '\' + ' + $key + ' + \''; + if (it.opts._errorDataPathProperty) { + it.errorPath = it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers); + } + }} + {{? $noAdditional }} + {{? $removeAdditional }} + delete {{=$data}}[{{=$key}}]; + {{??}} + {{=$nextValid}} = false; + {{ + var $currErrSchemaPath = $errSchemaPath; + $errSchemaPath = it.errSchemaPath + '/additionalProperties'; + }} + {{# def.error:'additionalProperties' }} + {{ $errSchemaPath = $currErrSchemaPath; }} + {{? $breakOnError }} break; {{?}} + {{?}} + {{?? $additionalIsSchema }} + {{? $removeAdditional == 'failing' }} + var {{=$errs}} = errors; + {{# def.setCompositeRule }} + + {{# def.validateAdditional }} + + if (!{{=$nextValid}}) { + errors = {{=$errs}}; + if (validate.errors !== null) { + if (errors) validate.errors.length = errors; + else validate.errors = null; + } + delete {{=$data}}[{{=$key}}]; + } + + {{# def.resetCompositeRule }} + {{??}} + {{# def.validateAdditional }} + {{? $breakOnError }} if (!{{=$nextValid}}) break; {{?}} + {{?}} + {{?}} + {{ it.errorPath = $currentErrorPath; }} + {{?}} + {{? $someProperties }} + } + {{?}} + } + + {{# def.ifResultValid }} +{{?}} + +{{ var $useDefaults = it.opts.useDefaults && !it.compositeRule; }} + +{{? $schemaKeys.length }} + {{~ $schemaKeys:$propertyKey }} + {{ var $sch = $schema[$propertyKey]; }} + + {{? {{# def.nonEmptySchema:$sch}} }} + {{ + var $prop = it.util.getProperty($propertyKey) + , $passData = $data + $prop + , $hasDefault = $useDefaults && $sch.default !== undefined; + $it.schema = $sch; + $it.schemaPath = $schemaPath + $prop; + $it.errSchemaPath = $errSchemaPath + '/' + it.util.escapeFragment($propertyKey); + $it.errorPath = it.util.getPath(it.errorPath, $propertyKey, it.opts.jsonPointers); + $it.dataPathArr[$dataNxt] = it.util.toQuotedString($propertyKey); + }} + + {{# def.generateSubschemaCode }} + + {{? {{# def.willOptimize }} }} + {{ + $code = {{# def._optimizeValidate }}; + var $useData = $passData; + }} + {{??}} + {{ var $useData = $nextData; }} + var {{=$nextData}} = {{=$passData}}; + {{?}} + + {{? $hasDefault }} + {{= $code }} + {{??}} + {{? $requiredHash && $requiredHash[$propertyKey] }} + if ({{# def.noPropertyInData }}) { + {{=$nextValid}} = false; + {{ + var $currentErrorPath = it.errorPath + , $currErrSchemaPath = $errSchemaPath + , $missingProperty = it.util.escapeQuotes($propertyKey); + if (it.opts._errorDataPathProperty) { + it.errorPath = it.util.getPath($currentErrorPath, $propertyKey, it.opts.jsonPointers); + } + $errSchemaPath = it.errSchemaPath + '/required'; + }} + {{# def.error:'required' }} + {{ $errSchemaPath = $currErrSchemaPath; }} + {{ it.errorPath = $currentErrorPath; }} + } else { + {{??}} + {{? $breakOnError }} + if ({{# def.noPropertyInData }}) { + {{=$nextValid}} = true; + } else { + {{??}} + if ({{=$useData}} !== undefined + {{? $ownProperties }} + && {{# def.isOwnProperty }} + {{?}} + ) { + {{?}} + {{?}} + + {{= $code }} + } + {{?}} {{ /* $hasDefault */ }} + {{?}} {{ /* def.nonEmptySchema */ }} + + {{# def.ifResultValid }} + {{~}} +{{?}} + +{{? $pPropertyKeys.length }} + {{~ $pPropertyKeys:$pProperty }} + {{ var $sch = $pProperties[$pProperty]; }} + + {{? {{# def.nonEmptySchema:$sch}} }} + {{ + $it.schema = $sch; + $it.schemaPath = it.schemaPath + '.patternProperties' + it.util.getProperty($pProperty); + $it.errSchemaPath = it.errSchemaPath + '/patternProperties/' + + it.util.escapeFragment($pProperty); + }} + + {{# def.iterateProperties }} + if ({{= it.usePattern($pProperty) }}.test({{=$key}})) { + {{ + $it.errorPath = it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers); + var $passData = $data + '[' + $key + ']'; + $it.dataPathArr[$dataNxt] = $key; + }} + + {{# def.generateSubschemaCode }} + {{# def.optimizeValidate }} + + {{? $breakOnError }} if (!{{=$nextValid}}) break; {{?}} + } + {{? $breakOnError }} else {{=$nextValid}} = true; {{?}} + } + + {{# def.ifResultValid }} + {{?}} {{ /* def.nonEmptySchema */ }} + {{~}} +{{?}} + + +{{? $breakOnError }} + {{= $closingBraces }} + if ({{=$errs}} == errors) { +{{?}} + +{{# def.cleanUp }} diff --git a/node_modules/ajv/lib/dot/propertyNames.jst b/node_modules/ajv/lib/dot/propertyNames.jst new file mode 100644 index 0000000..ee52b21 --- /dev/null +++ b/node_modules/ajv/lib/dot/propertyNames.jst @@ -0,0 +1,54 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.setupNextLevel }} + +var {{=$errs}} = errors; + +{{? {{# def.nonEmptySchema:$schema }} }} + {{ + $it.schema = $schema; + $it.schemaPath = $schemaPath; + $it.errSchemaPath = $errSchemaPath; + }} + + {{ + var $key = 'key' + $lvl + , $idx = 'idx' + $lvl + , $i = 'i' + $lvl + , $invalidName = '\' + ' + $key + ' + \'' + , $dataNxt = $it.dataLevel = it.dataLevel + 1 + , $nextData = 'data' + $dataNxt + , $dataProperties = 'dataProperties' + $lvl + , $ownProperties = it.opts.ownProperties + , $currentBaseId = it.baseId; + }} + + {{? $ownProperties }} + var {{=$dataProperties}} = undefined; + {{?}} + {{# def.iterateProperties }} + var startErrs{{=$lvl}} = errors; + + {{ var $passData = $key; }} + {{# def.setCompositeRule }} + {{# def.generateSubschemaCode }} + {{# def.optimizeValidate }} + {{# def.resetCompositeRule }} + + if (!{{=$nextValid}}) { + for (var {{=$i}}=startErrs{{=$lvl}}; {{=$i}}= it.opts.loopRequired + , $ownProperties = it.opts.ownProperties; + }} + + {{? $breakOnError }} + var missing{{=$lvl}}; + {{? $loopRequired }} + {{# def.setupLoop }} + var {{=$valid}} = true; + + {{?$isData}}{{# def.check$dataIsArray }}{{?}} + + for (var {{=$i}} = 0; {{=$i}} < {{=$vSchema}}.length; {{=$i}}++) { + {{=$valid}} = {{=$data}}[{{=$vSchema}}[{{=$i}}]] !== undefined + {{? $ownProperties }} + && {{# def.isRequiredOwnProperty }} + {{?}}; + if (!{{=$valid}}) break; + } + + {{? $isData }} } {{?}} + + {{# def.checkError:'required' }} + else { + {{??}} + if ({{# def.checkMissingProperty:$required }}) { + {{# def.errorMissingProperty:'required' }} + } else { + {{?}} + {{??}} + {{? $loopRequired }} + {{# def.setupLoop }} + {{? $isData }} + if ({{=$vSchema}} && !Array.isArray({{=$vSchema}})) { + {{# def.addError:'required' }} + } else if ({{=$vSchema}} !== undefined) { + {{?}} + + for (var {{=$i}} = 0; {{=$i}} < {{=$vSchema}}.length; {{=$i}}++) { + if ({{=$data}}[{{=$vSchema}}[{{=$i}}]] === undefined + {{? $ownProperties }} + || !{{# def.isRequiredOwnProperty }} + {{?}}) { + {{# def.addError:'required' }} + } + } + + {{? $isData }} } {{?}} + {{??}} + {{~ $required:$propertyKey }} + {{# def.allErrorsMissingProperty:'required' }} + {{~}} + {{?}} + {{?}} + + {{ it.errorPath = $currentErrorPath; }} + +{{?? $breakOnError }} + if (true) { +{{?}} diff --git a/node_modules/ajv/lib/dot/uniqueItems.jst b/node_modules/ajv/lib/dot/uniqueItems.jst new file mode 100644 index 0000000..22f82f9 --- /dev/null +++ b/node_modules/ajv/lib/dot/uniqueItems.jst @@ -0,0 +1,62 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.setupKeyword }} +{{# def.$data }} + + +{{? ($schema || $isData) && it.opts.uniqueItems !== false }} + {{? $isData }} + var {{=$valid}}; + if ({{=$schemaValue}} === false || {{=$schemaValue}} === undefined) + {{=$valid}} = true; + else if (typeof {{=$schemaValue}} != 'boolean') + {{=$valid}} = false; + else { + {{?}} + + var i = {{=$data}}.length + , {{=$valid}} = true + , j; + if (i > 1) { + {{ + var $itemType = it.schema.items && it.schema.items.type + , $typeIsArray = Array.isArray($itemType); + }} + {{? !$itemType || $itemType == 'object' || $itemType == 'array' || + ($typeIsArray && ($itemType.indexOf('object') >= 0 || $itemType.indexOf('array') >= 0)) }} + outer: + for (;i--;) { + for (j = i; j--;) { + if (equal({{=$data}}[i], {{=$data}}[j])) { + {{=$valid}} = false; + break outer; + } + } + } + {{??}} + var itemIndices = {}, item; + for (;i--;) { + var item = {{=$data}}[i]; + {{ var $method = 'checkDataType' + ($typeIsArray ? 's' : ''); }} + if ({{= it.util[$method]($itemType, 'item', true) }}) continue; + {{? $typeIsArray}} + if (typeof item == 'string') item = '"' + item; + {{?}} + if (typeof itemIndices[item] == 'number') { + {{=$valid}} = false; + j = itemIndices[item]; + break; + } + itemIndices[item] = i; + } + {{?}} + } + + {{? $isData }} } {{?}} + + if (!{{=$valid}}) { + {{# def.error:'uniqueItems' }} + } {{? $breakOnError }} else { {{?}} +{{??}} + {{? $breakOnError }} if (true) { {{?}} +{{?}} diff --git a/node_modules/ajv/lib/dot/validate.jst b/node_modules/ajv/lib/dot/validate.jst new file mode 100644 index 0000000..f8a1edf --- /dev/null +++ b/node_modules/ajv/lib/dot/validate.jst @@ -0,0 +1,282 @@ +{{# def.definitions }} +{{# def.errors }} +{{# def.defaults }} +{{# def.coerce }} + +{{ /** + * schema compilation (render) time: + * it = { schema, RULES, _validate, opts } + * it.validate - this template function, + * it is used recursively to generate code for subschemas + * + * runtime: + * "validate" is a variable name to which this function will be assigned + * validateRef etc. are defined in the parent scope in index.js + */ }} + +{{ + var $async = it.schema.$async === true + , $refKeywords = it.util.schemaHasRulesExcept(it.schema, it.RULES.all, '$ref') + , $id = it.self._getId(it.schema); +}} + +{{ + if (it.opts.strictKeywords) { + var $unknownKwd = it.util.schemaUnknownRules(it.schema, it.RULES.keywords); + if ($unknownKwd) { + var $keywordsMsg = 'unknown keyword: ' + $unknownKwd; + if (it.opts.strictKeywords === 'log') it.logger.warn($keywordsMsg); + else throw new Error($keywordsMsg); + } + } +}} + +{{? it.isTop }} + var validate = {{?$async}}{{it.async = true;}}async {{?}}function(data, dataPath, parentData, parentDataProperty, rootData) { + 'use strict'; + {{? $id && (it.opts.sourceCode || it.opts.processCode) }} + {{= '/\*# sourceURL=' + $id + ' */' }} + {{?}} +{{?}} + +{{? typeof it.schema == 'boolean' || !($refKeywords || it.schema.$ref) }} + {{ var $keyword = 'false schema'; }} + {{# def.setupKeyword }} + {{? it.schema === false}} + {{? it.isTop}} + {{ $breakOnError = true; }} + {{??}} + var {{=$valid}} = false; + {{?}} + {{# def.error:'false schema' }} + {{??}} + {{? it.isTop}} + {{? $async }} + return data; + {{??}} + validate.errors = null; + return true; + {{?}} + {{??}} + var {{=$valid}} = true; + {{?}} + {{?}} + + {{? it.isTop}} + }; + return validate; + {{?}} + + {{ return out; }} +{{?}} + + +{{? it.isTop }} + {{ + var $top = it.isTop + , $lvl = it.level = 0 + , $dataLvl = it.dataLevel = 0 + , $data = 'data'; + it.rootId = it.resolve.fullPath(it.self._getId(it.root.schema)); + it.baseId = it.baseId || it.rootId; + delete it.isTop; + + it.dataPathArr = [undefined]; + + if (it.schema.default !== undefined && it.opts.useDefaults && it.opts.strictDefaults) { + var $defaultMsg = 'default is ignored in the schema root'; + if (it.opts.strictDefaults === 'log') it.logger.warn($defaultMsg); + else throw new Error($defaultMsg); + } + }} + + var vErrors = null; {{ /* don't edit, used in replace */ }} + var errors = 0; {{ /* don't edit, used in replace */ }} + if (rootData === undefined) rootData = data; {{ /* don't edit, used in replace */ }} +{{??}} + {{ + var $lvl = it.level + , $dataLvl = it.dataLevel + , $data = 'data' + ($dataLvl || ''); + + if ($id) it.baseId = it.resolve.url(it.baseId, $id); + + if ($async && !it.async) throw new Error('async schema in sync schema'); + }} + + var errs_{{=$lvl}} = errors; +{{?}} + +{{ + var $valid = 'valid' + $lvl + , $breakOnError = !it.opts.allErrors + , $closingBraces1 = '' + , $closingBraces2 = ''; + + var $errorKeyword; + var $typeSchema = it.schema.type + , $typeIsArray = Array.isArray($typeSchema); + + if ($typeSchema && it.opts.nullable && it.schema.nullable === true) { + if ($typeIsArray) { + if ($typeSchema.indexOf('null') == -1) + $typeSchema = $typeSchema.concat('null'); + } else if ($typeSchema != 'null') { + $typeSchema = [$typeSchema, 'null']; + $typeIsArray = true; + } + } + + if ($typeIsArray && $typeSchema.length == 1) { + $typeSchema = $typeSchema[0]; + $typeIsArray = false; + } +}} + +{{## def.checkType: + {{ + var $schemaPath = it.schemaPath + '.type' + , $errSchemaPath = it.errSchemaPath + '/type' + , $method = $typeIsArray ? 'checkDataTypes' : 'checkDataType'; + }} + + if ({{= it.util[$method]($typeSchema, $data, true) }}) { +#}} + +{{? it.schema.$ref && $refKeywords }} + {{? it.opts.extendRefs == 'fail' }} + {{ throw new Error('$ref: validation keywords used in schema at path "' + it.errSchemaPath + '" (see option extendRefs)'); }} + {{?? it.opts.extendRefs !== true }} + {{ + $refKeywords = false; + it.logger.warn('$ref: keywords ignored in schema at path "' + it.errSchemaPath + '"'); + }} + {{?}} +{{?}} + +{{? it.schema.$comment && it.opts.$comment }} + {{= it.RULES.all.$comment.code(it, '$comment') }} +{{?}} + +{{? $typeSchema }} + {{? it.opts.coerceTypes }} + {{ var $coerceToTypes = it.util.coerceToTypes(it.opts.coerceTypes, $typeSchema); }} + {{?}} + + {{ var $rulesGroup = it.RULES.types[$typeSchema]; }} + {{? $coerceToTypes || $typeIsArray || $rulesGroup === true || + ($rulesGroup && !$shouldUseGroup($rulesGroup)) }} + {{ + var $schemaPath = it.schemaPath + '.type' + , $errSchemaPath = it.errSchemaPath + '/type'; + }} + {{# def.checkType }} + {{? $coerceToTypes }} + {{# def.coerceType }} + {{??}} + {{# def.error:'type' }} + {{?}} + } + {{?}} +{{?}} + + +{{? it.schema.$ref && !$refKeywords }} + {{= it.RULES.all.$ref.code(it, '$ref') }} + {{? $breakOnError }} + } + if (errors === {{?$top}}0{{??}}errs_{{=$lvl}}{{?}}) { + {{ $closingBraces2 += '}'; }} + {{?}} +{{??}} + {{~ it.RULES:$rulesGroup }} + {{? $shouldUseGroup($rulesGroup) }} + {{? $rulesGroup.type }} + if ({{= it.util.checkDataType($rulesGroup.type, $data) }}) { + {{?}} + {{? it.opts.useDefaults }} + {{? $rulesGroup.type == 'object' && it.schema.properties }} + {{# def.defaultProperties }} + {{?? $rulesGroup.type == 'array' && Array.isArray(it.schema.items) }} + {{# def.defaultItems }} + {{?}} + {{?}} + {{~ $rulesGroup.rules:$rule }} + {{? $shouldUseRule($rule) }} + {{ var $code = $rule.code(it, $rule.keyword, $rulesGroup.type); }} + {{? $code }} + {{= $code }} + {{? $breakOnError }} + {{ $closingBraces1 += '}'; }} + {{?}} + {{?}} + {{?}} + {{~}} + {{? $breakOnError }} + {{= $closingBraces1 }} + {{ $closingBraces1 = ''; }} + {{?}} + {{? $rulesGroup.type }} + } + {{? $typeSchema && $typeSchema === $rulesGroup.type && !$coerceToTypes }} + else { + {{ + var $schemaPath = it.schemaPath + '.type' + , $errSchemaPath = it.errSchemaPath + '/type'; + }} + {{# def.error:'type' }} + } + {{?}} + {{?}} + + {{? $breakOnError }} + if (errors === {{?$top}}0{{??}}errs_{{=$lvl}}{{?}}) { + {{ $closingBraces2 += '}'; }} + {{?}} + {{?}} + {{~}} +{{?}} + +{{? $breakOnError }} {{= $closingBraces2 }} {{?}} + +{{? $top }} + {{? $async }} + if (errors === 0) return data; {{ /* don't edit, used in replace */ }} + else throw new ValidationError(vErrors); {{ /* don't edit, used in replace */ }} + {{??}} + validate.errors = vErrors; {{ /* don't edit, used in replace */ }} + return errors === 0; {{ /* don't edit, used in replace */ }} + {{?}} + }; + + return validate; +{{??}} + var {{=$valid}} = errors === errs_{{=$lvl}}; +{{?}} + +{{# def.cleanUp }} + +{{? $top }} + {{# def.finalCleanUp }} +{{?}} + +{{ + function $shouldUseGroup($rulesGroup) { + var rules = $rulesGroup.rules; + for (var i=0; i < rules.length; i++) + if ($shouldUseRule(rules[i])) + return true; + } + + function $shouldUseRule($rule) { + return it.schema[$rule.keyword] !== undefined || + ($rule.implements && $ruleImplementsSomeKeyword($rule)); + } + + function $ruleImplementsSomeKeyword($rule) { + var impl = $rule.implements; + for (var i=0; i < impl.length; i++) + if (it.schema[impl[i]] !== undefined) + return true; + } +}} diff --git a/node_modules/ajv/lib/dotjs/README.md b/node_modules/ajv/lib/dotjs/README.md new file mode 100644 index 0000000..4d99484 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/README.md @@ -0,0 +1,3 @@ +These files are compiled dot templates from dot folder. + +Do NOT edit them directly, edit the templates and run `npm run build` from main ajv folder. diff --git a/node_modules/ajv/lib/dotjs/_limit.js b/node_modules/ajv/lib/dotjs/_limit.js new file mode 100644 index 0000000..f02a760 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/_limit.js @@ -0,0 +1,157 @@ +'use strict'; +module.exports = function generate__limit(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $errorKeyword; + var $data = 'data' + ($dataLvl || ''); + var $isData = it.opts.$data && $schema && $schema.$data, + $schemaValue; + if ($isData) { + out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; '; + $schemaValue = 'schema' + $lvl; + } else { + $schemaValue = $schema; + } + var $isMax = $keyword == 'maximum', + $exclusiveKeyword = $isMax ? 'exclusiveMaximum' : 'exclusiveMinimum', + $schemaExcl = it.schema[$exclusiveKeyword], + $isDataExcl = it.opts.$data && $schemaExcl && $schemaExcl.$data, + $op = $isMax ? '<' : '>', + $notOp = $isMax ? '>' : '<', + $errorKeyword = undefined; + if ($isDataExcl) { + var $schemaValueExcl = it.util.getData($schemaExcl.$data, $dataLvl, it.dataPathArr), + $exclusive = 'exclusive' + $lvl, + $exclType = 'exclType' + $lvl, + $exclIsNumber = 'exclIsNumber' + $lvl, + $opExpr = 'op' + $lvl, + $opStr = '\' + ' + $opExpr + ' + \''; + out += ' var schemaExcl' + ($lvl) + ' = ' + ($schemaValueExcl) + '; '; + $schemaValueExcl = 'schemaExcl' + $lvl; + out += ' var ' + ($exclusive) + '; var ' + ($exclType) + ' = typeof ' + ($schemaValueExcl) + '; if (' + ($exclType) + ' != \'boolean\' && ' + ($exclType) + ' != \'undefined\' && ' + ($exclType) + ' != \'number\') { '; + var $errorKeyword = $exclusiveKeyword; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ($errorKeyword || '_exclusiveLimit') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: {} '; + if (it.opts.messages !== false) { + out += ' , message: \'' + ($exclusiveKeyword) + ' should be boolean\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += ' } else if ( '; + if ($isData) { + out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'number\') || '; + } + out += ' ' + ($exclType) + ' == \'number\' ? ( (' + ($exclusive) + ' = ' + ($schemaValue) + ' === undefined || ' + ($schemaValueExcl) + ' ' + ($op) + '= ' + ($schemaValue) + ') ? ' + ($data) + ' ' + ($notOp) + '= ' + ($schemaValueExcl) + ' : ' + ($data) + ' ' + ($notOp) + ' ' + ($schemaValue) + ' ) : ( (' + ($exclusive) + ' = ' + ($schemaValueExcl) + ' === true) ? ' + ($data) + ' ' + ($notOp) + '= ' + ($schemaValue) + ' : ' + ($data) + ' ' + ($notOp) + ' ' + ($schemaValue) + ' ) || ' + ($data) + ' !== ' + ($data) + ') { var op' + ($lvl) + ' = ' + ($exclusive) + ' ? \'' + ($op) + '\' : \'' + ($op) + '=\'; '; + if ($schema === undefined) { + $errorKeyword = $exclusiveKeyword; + $errSchemaPath = it.errSchemaPath + '/' + $exclusiveKeyword; + $schemaValue = $schemaValueExcl; + $isData = $isDataExcl; + } + } else { + var $exclIsNumber = typeof $schemaExcl == 'number', + $opStr = $op; + if ($exclIsNumber && $isData) { + var $opExpr = '\'' + $opStr + '\''; + out += ' if ( '; + if ($isData) { + out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'number\') || '; + } + out += ' ( ' + ($schemaValue) + ' === undefined || ' + ($schemaExcl) + ' ' + ($op) + '= ' + ($schemaValue) + ' ? ' + ($data) + ' ' + ($notOp) + '= ' + ($schemaExcl) + ' : ' + ($data) + ' ' + ($notOp) + ' ' + ($schemaValue) + ' ) || ' + ($data) + ' !== ' + ($data) + ') { '; + } else { + if ($exclIsNumber && $schema === undefined) { + $exclusive = true; + $errorKeyword = $exclusiveKeyword; + $errSchemaPath = it.errSchemaPath + '/' + $exclusiveKeyword; + $schemaValue = $schemaExcl; + $notOp += '='; + } else { + if ($exclIsNumber) $schemaValue = Math[$isMax ? 'min' : 'max']($schemaExcl, $schema); + if ($schemaExcl === ($exclIsNumber ? $schemaValue : true)) { + $exclusive = true; + $errorKeyword = $exclusiveKeyword; + $errSchemaPath = it.errSchemaPath + '/' + $exclusiveKeyword; + $notOp += '='; + } else { + $exclusive = false; + $opStr += '='; + } + } + var $opExpr = '\'' + $opStr + '\''; + out += ' if ( '; + if ($isData) { + out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'number\') || '; + } + out += ' ' + ($data) + ' ' + ($notOp) + ' ' + ($schemaValue) + ' || ' + ($data) + ' !== ' + ($data) + ') { '; + } + } + $errorKeyword = $errorKeyword || $keyword; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ($errorKeyword || '_limit') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { comparison: ' + ($opExpr) + ', limit: ' + ($schemaValue) + ', exclusive: ' + ($exclusive) + ' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should be ' + ($opStr) + ' '; + if ($isData) { + out += '\' + ' + ($schemaValue); + } else { + out += '' + ($schemaValue) + '\''; + } + } + if (it.opts.verbose) { + out += ' , schema: '; + if ($isData) { + out += 'validate.schema' + ($schemaPath); + } else { + out += '' + ($schema); + } + out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += ' } '; + if ($breakOnError) { + out += ' else { '; + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/_limitItems.js b/node_modules/ajv/lib/dotjs/_limitItems.js new file mode 100644 index 0000000..a27d118 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/_limitItems.js @@ -0,0 +1,77 @@ +'use strict'; +module.exports = function generate__limitItems(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $errorKeyword; + var $data = 'data' + ($dataLvl || ''); + var $isData = it.opts.$data && $schema && $schema.$data, + $schemaValue; + if ($isData) { + out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; '; + $schemaValue = 'schema' + $lvl; + } else { + $schemaValue = $schema; + } + var $op = $keyword == 'maxItems' ? '>' : '<'; + out += 'if ( '; + if ($isData) { + out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'number\') || '; + } + out += ' ' + ($data) + '.length ' + ($op) + ' ' + ($schemaValue) + ') { '; + var $errorKeyword = $keyword; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ($errorKeyword || '_limitItems') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { limit: ' + ($schemaValue) + ' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should NOT have '; + if ($keyword == 'maxItems') { + out += 'more'; + } else { + out += 'fewer'; + } + out += ' than '; + if ($isData) { + out += '\' + ' + ($schemaValue) + ' + \''; + } else { + out += '' + ($schema); + } + out += ' items\' '; + } + if (it.opts.verbose) { + out += ' , schema: '; + if ($isData) { + out += 'validate.schema' + ($schemaPath); + } else { + out += '' + ($schema); + } + out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += '} '; + if ($breakOnError) { + out += ' else { '; + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/_limitLength.js b/node_modules/ajv/lib/dotjs/_limitLength.js new file mode 100644 index 0000000..789f374 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/_limitLength.js @@ -0,0 +1,82 @@ +'use strict'; +module.exports = function generate__limitLength(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $errorKeyword; + var $data = 'data' + ($dataLvl || ''); + var $isData = it.opts.$data && $schema && $schema.$data, + $schemaValue; + if ($isData) { + out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; '; + $schemaValue = 'schema' + $lvl; + } else { + $schemaValue = $schema; + } + var $op = $keyword == 'maxLength' ? '>' : '<'; + out += 'if ( '; + if ($isData) { + out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'number\') || '; + } + if (it.opts.unicode === false) { + out += ' ' + ($data) + '.length '; + } else { + out += ' ucs2length(' + ($data) + ') '; + } + out += ' ' + ($op) + ' ' + ($schemaValue) + ') { '; + var $errorKeyword = $keyword; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ($errorKeyword || '_limitLength') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { limit: ' + ($schemaValue) + ' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should NOT be '; + if ($keyword == 'maxLength') { + out += 'longer'; + } else { + out += 'shorter'; + } + out += ' than '; + if ($isData) { + out += '\' + ' + ($schemaValue) + ' + \''; + } else { + out += '' + ($schema); + } + out += ' characters\' '; + } + if (it.opts.verbose) { + out += ' , schema: '; + if ($isData) { + out += 'validate.schema' + ($schemaPath); + } else { + out += '' + ($schema); + } + out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += '} '; + if ($breakOnError) { + out += ' else { '; + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/_limitProperties.js b/node_modules/ajv/lib/dotjs/_limitProperties.js new file mode 100644 index 0000000..11dc939 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/_limitProperties.js @@ -0,0 +1,77 @@ +'use strict'; +module.exports = function generate__limitProperties(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $errorKeyword; + var $data = 'data' + ($dataLvl || ''); + var $isData = it.opts.$data && $schema && $schema.$data, + $schemaValue; + if ($isData) { + out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; '; + $schemaValue = 'schema' + $lvl; + } else { + $schemaValue = $schema; + } + var $op = $keyword == 'maxProperties' ? '>' : '<'; + out += 'if ( '; + if ($isData) { + out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'number\') || '; + } + out += ' Object.keys(' + ($data) + ').length ' + ($op) + ' ' + ($schemaValue) + ') { '; + var $errorKeyword = $keyword; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ($errorKeyword || '_limitProperties') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { limit: ' + ($schemaValue) + ' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should NOT have '; + if ($keyword == 'maxProperties') { + out += 'more'; + } else { + out += 'fewer'; + } + out += ' than '; + if ($isData) { + out += '\' + ' + ($schemaValue) + ' + \''; + } else { + out += '' + ($schema); + } + out += ' properties\' '; + } + if (it.opts.verbose) { + out += ' , schema: '; + if ($isData) { + out += 'validate.schema' + ($schemaPath); + } else { + out += '' + ($schema); + } + out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += '} '; + if ($breakOnError) { + out += ' else { '; + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/allOf.js b/node_modules/ajv/lib/dotjs/allOf.js new file mode 100644 index 0000000..4bad914 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/allOf.js @@ -0,0 +1,43 @@ +'use strict'; +module.exports = function generate_allOf(it, $keyword, $ruleType) { + var out = ' '; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $it = it.util.copy(it); + var $closingBraces = ''; + $it.level++; + var $nextValid = 'valid' + $it.level; + var $currentBaseId = $it.baseId, + $allSchemasEmpty = true; + var arr1 = $schema; + if (arr1) { + var $sch, $i = -1, + l1 = arr1.length - 1; + while ($i < l1) { + $sch = arr1[$i += 1]; + if ((it.opts.strictKeywords ? typeof $sch == 'object' && Object.keys($sch).length > 0 : it.util.schemaHasRules($sch, it.RULES.all))) { + $allSchemasEmpty = false; + $it.schema = $sch; + $it.schemaPath = $schemaPath + '[' + $i + ']'; + $it.errSchemaPath = $errSchemaPath + '/' + $i; + out += ' ' + (it.validate($it)) + ' '; + $it.baseId = $currentBaseId; + if ($breakOnError) { + out += ' if (' + ($nextValid) + ') { '; + $closingBraces += '}'; + } + } + } + } + if ($breakOnError) { + if ($allSchemasEmpty) { + out += ' if (true) { '; + } else { + out += ' ' + ($closingBraces.slice(0, -1)) + ' '; + } + } + out = it.util.cleanUpCode(out); + return out; +} diff --git a/node_modules/ajv/lib/dotjs/anyOf.js b/node_modules/ajv/lib/dotjs/anyOf.js new file mode 100644 index 0000000..01551d5 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/anyOf.js @@ -0,0 +1,74 @@ +'use strict'; +module.exports = function generate_anyOf(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $data = 'data' + ($dataLvl || ''); + var $valid = 'valid' + $lvl; + var $errs = 'errs__' + $lvl; + var $it = it.util.copy(it); + var $closingBraces = ''; + $it.level++; + var $nextValid = 'valid' + $it.level; + var $noEmptySchema = $schema.every(function($sch) { + return (it.opts.strictKeywords ? typeof $sch == 'object' && Object.keys($sch).length > 0 : it.util.schemaHasRules($sch, it.RULES.all)); + }); + if ($noEmptySchema) { + var $currentBaseId = $it.baseId; + out += ' var ' + ($errs) + ' = errors; var ' + ($valid) + ' = false; '; + var $wasComposite = it.compositeRule; + it.compositeRule = $it.compositeRule = true; + var arr1 = $schema; + if (arr1) { + var $sch, $i = -1, + l1 = arr1.length - 1; + while ($i < l1) { + $sch = arr1[$i += 1]; + $it.schema = $sch; + $it.schemaPath = $schemaPath + '[' + $i + ']'; + $it.errSchemaPath = $errSchemaPath + '/' + $i; + out += ' ' + (it.validate($it)) + ' '; + $it.baseId = $currentBaseId; + out += ' ' + ($valid) + ' = ' + ($valid) + ' || ' + ($nextValid) + '; if (!' + ($valid) + ') { '; + $closingBraces += '}'; + } + } + it.compositeRule = $it.compositeRule = $wasComposite; + out += ' ' + ($closingBraces) + ' if (!' + ($valid) + ') { var err = '; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('anyOf') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: {} '; + if (it.opts.messages !== false) { + out += ' , message: \'should match some schema in anyOf\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + out += '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError(vErrors); '; + } else { + out += ' validate.errors = vErrors; return false; '; + } + } + out += ' } else { errors = ' + ($errs) + '; if (vErrors !== null) { if (' + ($errs) + ') vErrors.length = ' + ($errs) + '; else vErrors = null; } '; + if (it.opts.allErrors) { + out += ' } '; + } + out = it.util.cleanUpCode(out); + } else { + if ($breakOnError) { + out += ' if (true) { '; + } + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/comment.js b/node_modules/ajv/lib/dotjs/comment.js new file mode 100644 index 0000000..dd66bb8 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/comment.js @@ -0,0 +1,14 @@ +'use strict'; +module.exports = function generate_comment(it, $keyword, $ruleType) { + var out = ' '; + var $schema = it.schema[$keyword]; + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $comment = it.util.toQuotedString($schema); + if (it.opts.$comment === true) { + out += ' console.log(' + ($comment) + ');'; + } else if (typeof it.opts.$comment == 'function') { + out += ' self._opts.$comment(' + ($comment) + ', ' + (it.util.toQuotedString($errSchemaPath)) + ', validate.root.schema);'; + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/const.js b/node_modules/ajv/lib/dotjs/const.js new file mode 100644 index 0000000..15b7c61 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/const.js @@ -0,0 +1,56 @@ +'use strict'; +module.exports = function generate_const(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $data = 'data' + ($dataLvl || ''); + var $valid = 'valid' + $lvl; + var $isData = it.opts.$data && $schema && $schema.$data, + $schemaValue; + if ($isData) { + out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; '; + $schemaValue = 'schema' + $lvl; + } else { + $schemaValue = $schema; + } + if (!$isData) { + out += ' var schema' + ($lvl) + ' = validate.schema' + ($schemaPath) + ';'; + } + out += 'var ' + ($valid) + ' = equal(' + ($data) + ', schema' + ($lvl) + '); if (!' + ($valid) + ') { '; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('const') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { allowedValue: schema' + ($lvl) + ' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should be equal to constant\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += ' }'; + if ($breakOnError) { + out += ' else { '; + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/contains.js b/node_modules/ajv/lib/dotjs/contains.js new file mode 100644 index 0000000..cd4dfab --- /dev/null +++ b/node_modules/ajv/lib/dotjs/contains.js @@ -0,0 +1,82 @@ +'use strict'; +module.exports = function generate_contains(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $data = 'data' + ($dataLvl || ''); + var $valid = 'valid' + $lvl; + var $errs = 'errs__' + $lvl; + var $it = it.util.copy(it); + var $closingBraces = ''; + $it.level++; + var $nextValid = 'valid' + $it.level; + var $idx = 'i' + $lvl, + $dataNxt = $it.dataLevel = it.dataLevel + 1, + $nextData = 'data' + $dataNxt, + $currentBaseId = it.baseId, + $nonEmptySchema = (it.opts.strictKeywords ? typeof $schema == 'object' && Object.keys($schema).length > 0 : it.util.schemaHasRules($schema, it.RULES.all)); + out += 'var ' + ($errs) + ' = errors;var ' + ($valid) + ';'; + if ($nonEmptySchema) { + var $wasComposite = it.compositeRule; + it.compositeRule = $it.compositeRule = true; + $it.schema = $schema; + $it.schemaPath = $schemaPath; + $it.errSchemaPath = $errSchemaPath; + out += ' var ' + ($nextValid) + ' = false; for (var ' + ($idx) + ' = 0; ' + ($idx) + ' < ' + ($data) + '.length; ' + ($idx) + '++) { '; + $it.errorPath = it.util.getPathExpr(it.errorPath, $idx, it.opts.jsonPointers, true); + var $passData = $data + '[' + $idx + ']'; + $it.dataPathArr[$dataNxt] = $idx; + var $code = it.validate($it); + $it.baseId = $currentBaseId; + if (it.util.varOccurences($code, $nextData) < 2) { + out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' '; + } else { + out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' '; + } + out += ' if (' + ($nextValid) + ') break; } '; + it.compositeRule = $it.compositeRule = $wasComposite; + out += ' ' + ($closingBraces) + ' if (!' + ($nextValid) + ') {'; + } else { + out += ' if (' + ($data) + '.length == 0) {'; + } + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('contains') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: {} '; + if (it.opts.messages !== false) { + out += ' , message: \'should contain a valid item\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += ' } else { '; + if ($nonEmptySchema) { + out += ' errors = ' + ($errs) + '; if (vErrors !== null) { if (' + ($errs) + ') vErrors.length = ' + ($errs) + '; else vErrors = null; } '; + } + if (it.opts.allErrors) { + out += ' } '; + } + out = it.util.cleanUpCode(out); + return out; +} diff --git a/node_modules/ajv/lib/dotjs/custom.js b/node_modules/ajv/lib/dotjs/custom.js new file mode 100644 index 0000000..f3e641e --- /dev/null +++ b/node_modules/ajv/lib/dotjs/custom.js @@ -0,0 +1,228 @@ +'use strict'; +module.exports = function generate_custom(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $errorKeyword; + var $data = 'data' + ($dataLvl || ''); + var $valid = 'valid' + $lvl; + var $errs = 'errs__' + $lvl; + var $isData = it.opts.$data && $schema && $schema.$data, + $schemaValue; + if ($isData) { + out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; '; + $schemaValue = 'schema' + $lvl; + } else { + $schemaValue = $schema; + } + var $rule = this, + $definition = 'definition' + $lvl, + $rDef = $rule.definition, + $closingBraces = ''; + var $compile, $inline, $macro, $ruleValidate, $validateCode; + if ($isData && $rDef.$data) { + $validateCode = 'keywordValidate' + $lvl; + var $validateSchema = $rDef.validateSchema; + out += ' var ' + ($definition) + ' = RULES.custom[\'' + ($keyword) + '\'].definition; var ' + ($validateCode) + ' = ' + ($definition) + '.validate;'; + } else { + $ruleValidate = it.useCustomRule($rule, $schema, it.schema, it); + if (!$ruleValidate) return; + $schemaValue = 'validate.schema' + $schemaPath; + $validateCode = $ruleValidate.code; + $compile = $rDef.compile; + $inline = $rDef.inline; + $macro = $rDef.macro; + } + var $ruleErrs = $validateCode + '.errors', + $i = 'i' + $lvl, + $ruleErr = 'ruleErr' + $lvl, + $asyncKeyword = $rDef.async; + if ($asyncKeyword && !it.async) throw new Error('async keyword in sync schema'); + if (!($inline || $macro)) { + out += '' + ($ruleErrs) + ' = null;'; + } + out += 'var ' + ($errs) + ' = errors;var ' + ($valid) + ';'; + if ($isData && $rDef.$data) { + $closingBraces += '}'; + out += ' if (' + ($schemaValue) + ' === undefined) { ' + ($valid) + ' = true; } else { '; + if ($validateSchema) { + $closingBraces += '}'; + out += ' ' + ($valid) + ' = ' + ($definition) + '.validateSchema(' + ($schemaValue) + '); if (' + ($valid) + ') { '; + } + } + if ($inline) { + if ($rDef.statements) { + out += ' ' + ($ruleValidate.validate) + ' '; + } else { + out += ' ' + ($valid) + ' = ' + ($ruleValidate.validate) + '; '; + } + } else if ($macro) { + var $it = it.util.copy(it); + var $closingBraces = ''; + $it.level++; + var $nextValid = 'valid' + $it.level; + $it.schema = $ruleValidate.validate; + $it.schemaPath = ''; + var $wasComposite = it.compositeRule; + it.compositeRule = $it.compositeRule = true; + var $code = it.validate($it).replace(/validate\.schema/g, $validateCode); + it.compositeRule = $it.compositeRule = $wasComposite; + out += ' ' + ($code); + } else { + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; + out += ' ' + ($validateCode) + '.call( '; + if (it.opts.passContext) { + out += 'this'; + } else { + out += 'self'; + } + if ($compile || $rDef.schema === false) { + out += ' , ' + ($data) + ' '; + } else { + out += ' , ' + ($schemaValue) + ' , ' + ($data) + ' , validate.schema' + (it.schemaPath) + ' '; + } + out += ' , (dataPath || \'\')'; + if (it.errorPath != '""') { + out += ' + ' + (it.errorPath); + } + var $parentData = $dataLvl ? 'data' + (($dataLvl - 1) || '') : 'parentData', + $parentDataProperty = $dataLvl ? it.dataPathArr[$dataLvl] : 'parentDataProperty'; + out += ' , ' + ($parentData) + ' , ' + ($parentDataProperty) + ' , rootData ) '; + var def_callRuleValidate = out; + out = $$outStack.pop(); + if ($rDef.errors === false) { + out += ' ' + ($valid) + ' = '; + if ($asyncKeyword) { + out += 'await '; + } + out += '' + (def_callRuleValidate) + '; '; + } else { + if ($asyncKeyword) { + $ruleErrs = 'customErrors' + $lvl; + out += ' var ' + ($ruleErrs) + ' = null; try { ' + ($valid) + ' = await ' + (def_callRuleValidate) + '; } catch (e) { ' + ($valid) + ' = false; if (e instanceof ValidationError) ' + ($ruleErrs) + ' = e.errors; else throw e; } '; + } else { + out += ' ' + ($ruleErrs) + ' = null; ' + ($valid) + ' = ' + (def_callRuleValidate) + '; '; + } + } + } + if ($rDef.modifying) { + out += ' if (' + ($parentData) + ') ' + ($data) + ' = ' + ($parentData) + '[' + ($parentDataProperty) + '];'; + } + out += '' + ($closingBraces); + if ($rDef.valid) { + if ($breakOnError) { + out += ' if (true) { '; + } + } else { + out += ' if ( '; + if ($rDef.valid === undefined) { + out += ' !'; + if ($macro) { + out += '' + ($nextValid); + } else { + out += '' + ($valid); + } + } else { + out += ' ' + (!$rDef.valid) + ' '; + } + out += ') { '; + $errorKeyword = $rule.keyword; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ($errorKeyword || 'custom') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { keyword: \'' + ($rule.keyword) + '\' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should pass "' + ($rule.keyword) + '" keyword validation\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + var def_customError = out; + out = $$outStack.pop(); + if ($inline) { + if ($rDef.errors) { + if ($rDef.errors != 'full') { + out += ' for (var ' + ($i) + '=' + ($errs) + '; ' + ($i) + ' 0 : it.util.schemaHasRules($sch, it.RULES.all))) { + out += ' ' + ($nextValid) + ' = true; if ( ' + ($data) + (it.util.getProperty($property)) + ' !== undefined '; + if ($ownProperties) { + out += ' && Object.prototype.hasOwnProperty.call(' + ($data) + ', \'' + (it.util.escapeQuotes($property)) + '\') '; + } + out += ') { '; + $it.schema = $sch; + $it.schemaPath = $schemaPath + it.util.getProperty($property); + $it.errSchemaPath = $errSchemaPath + '/' + it.util.escapeFragment($property); + out += ' ' + (it.validate($it)) + ' '; + $it.baseId = $currentBaseId; + out += ' } '; + if ($breakOnError) { + out += ' if (' + ($nextValid) + ') { '; + $closingBraces += '}'; + } + } + } + if ($breakOnError) { + out += ' ' + ($closingBraces) + ' if (' + ($errs) + ' == errors) {'; + } + out = it.util.cleanUpCode(out); + return out; +} diff --git a/node_modules/ajv/lib/dotjs/enum.js b/node_modules/ajv/lib/dotjs/enum.js new file mode 100644 index 0000000..90580b9 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/enum.js @@ -0,0 +1,66 @@ +'use strict'; +module.exports = function generate_enum(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $data = 'data' + ($dataLvl || ''); + var $valid = 'valid' + $lvl; + var $isData = it.opts.$data && $schema && $schema.$data, + $schemaValue; + if ($isData) { + out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; '; + $schemaValue = 'schema' + $lvl; + } else { + $schemaValue = $schema; + } + var $i = 'i' + $lvl, + $vSchema = 'schema' + $lvl; + if (!$isData) { + out += ' var ' + ($vSchema) + ' = validate.schema' + ($schemaPath) + ';'; + } + out += 'var ' + ($valid) + ';'; + if ($isData) { + out += ' if (schema' + ($lvl) + ' === undefined) ' + ($valid) + ' = true; else if (!Array.isArray(schema' + ($lvl) + ')) ' + ($valid) + ' = false; else {'; + } + out += '' + ($valid) + ' = false;for (var ' + ($i) + '=0; ' + ($i) + '<' + ($vSchema) + '.length; ' + ($i) + '++) if (equal(' + ($data) + ', ' + ($vSchema) + '[' + ($i) + '])) { ' + ($valid) + ' = true; break; }'; + if ($isData) { + out += ' } '; + } + out += ' if (!' + ($valid) + ') { '; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('enum') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { allowedValues: schema' + ($lvl) + ' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should be equal to one of the allowed values\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += ' }'; + if ($breakOnError) { + out += ' else { '; + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/format.js b/node_modules/ajv/lib/dotjs/format.js new file mode 100644 index 0000000..cd9a569 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/format.js @@ -0,0 +1,150 @@ +'use strict'; +module.exports = function generate_format(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $data = 'data' + ($dataLvl || ''); + if (it.opts.format === false) { + if ($breakOnError) { + out += ' if (true) { '; + } + return out; + } + var $isData = it.opts.$data && $schema && $schema.$data, + $schemaValue; + if ($isData) { + out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; '; + $schemaValue = 'schema' + $lvl; + } else { + $schemaValue = $schema; + } + var $unknownFormats = it.opts.unknownFormats, + $allowUnknown = Array.isArray($unknownFormats); + if ($isData) { + var $format = 'format' + $lvl, + $isObject = 'isObject' + $lvl, + $formatType = 'formatType' + $lvl; + out += ' var ' + ($format) + ' = formats[' + ($schemaValue) + ']; var ' + ($isObject) + ' = typeof ' + ($format) + ' == \'object\' && !(' + ($format) + ' instanceof RegExp) && ' + ($format) + '.validate; var ' + ($formatType) + ' = ' + ($isObject) + ' && ' + ($format) + '.type || \'string\'; if (' + ($isObject) + ') { '; + if (it.async) { + out += ' var async' + ($lvl) + ' = ' + ($format) + '.async; '; + } + out += ' ' + ($format) + ' = ' + ($format) + '.validate; } if ( '; + if ($isData) { + out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'string\') || '; + } + out += ' ('; + if ($unknownFormats != 'ignore') { + out += ' (' + ($schemaValue) + ' && !' + ($format) + ' '; + if ($allowUnknown) { + out += ' && self._opts.unknownFormats.indexOf(' + ($schemaValue) + ') == -1 '; + } + out += ') || '; + } + out += ' (' + ($format) + ' && ' + ($formatType) + ' == \'' + ($ruleType) + '\' && !(typeof ' + ($format) + ' == \'function\' ? '; + if (it.async) { + out += ' (async' + ($lvl) + ' ? await ' + ($format) + '(' + ($data) + ') : ' + ($format) + '(' + ($data) + ')) '; + } else { + out += ' ' + ($format) + '(' + ($data) + ') '; + } + out += ' : ' + ($format) + '.test(' + ($data) + '))))) {'; + } else { + var $format = it.formats[$schema]; + if (!$format) { + if ($unknownFormats == 'ignore') { + it.logger.warn('unknown format "' + $schema + '" ignored in schema at path "' + it.errSchemaPath + '"'); + if ($breakOnError) { + out += ' if (true) { '; + } + return out; + } else if ($allowUnknown && $unknownFormats.indexOf($schema) >= 0) { + if ($breakOnError) { + out += ' if (true) { '; + } + return out; + } else { + throw new Error('unknown format "' + $schema + '" is used in schema at path "' + it.errSchemaPath + '"'); + } + } + var $isObject = typeof $format == 'object' && !($format instanceof RegExp) && $format.validate; + var $formatType = $isObject && $format.type || 'string'; + if ($isObject) { + var $async = $format.async === true; + $format = $format.validate; + } + if ($formatType != $ruleType) { + if ($breakOnError) { + out += ' if (true) { '; + } + return out; + } + if ($async) { + if (!it.async) throw new Error('async format in sync schema'); + var $formatRef = 'formats' + it.util.getProperty($schema) + '.validate'; + out += ' if (!(await ' + ($formatRef) + '(' + ($data) + '))) { '; + } else { + out += ' if (! '; + var $formatRef = 'formats' + it.util.getProperty($schema); + if ($isObject) $formatRef += '.validate'; + if (typeof $format == 'function') { + out += ' ' + ($formatRef) + '(' + ($data) + ') '; + } else { + out += ' ' + ($formatRef) + '.test(' + ($data) + ') '; + } + out += ') { '; + } + } + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('format') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { format: '; + if ($isData) { + out += '' + ($schemaValue); + } else { + out += '' + (it.util.toQuotedString($schema)); + } + out += ' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should match format "'; + if ($isData) { + out += '\' + ' + ($schemaValue) + ' + \''; + } else { + out += '' + (it.util.escapeQuotes($schema)); + } + out += '"\' '; + } + if (it.opts.verbose) { + out += ' , schema: '; + if ($isData) { + out += 'validate.schema' + ($schemaPath); + } else { + out += '' + (it.util.toQuotedString($schema)); + } + out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += ' } '; + if ($breakOnError) { + out += ' else { '; + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/if.js b/node_modules/ajv/lib/dotjs/if.js new file mode 100644 index 0000000..019f61a --- /dev/null +++ b/node_modules/ajv/lib/dotjs/if.js @@ -0,0 +1,104 @@ +'use strict'; +module.exports = function generate_if(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $data = 'data' + ($dataLvl || ''); + var $valid = 'valid' + $lvl; + var $errs = 'errs__' + $lvl; + var $it = it.util.copy(it); + $it.level++; + var $nextValid = 'valid' + $it.level; + var $thenSch = it.schema['then'], + $elseSch = it.schema['else'], + $thenPresent = $thenSch !== undefined && (it.opts.strictKeywords ? typeof $thenSch == 'object' && Object.keys($thenSch).length > 0 : it.util.schemaHasRules($thenSch, it.RULES.all)), + $elsePresent = $elseSch !== undefined && (it.opts.strictKeywords ? typeof $elseSch == 'object' && Object.keys($elseSch).length > 0 : it.util.schemaHasRules($elseSch, it.RULES.all)), + $currentBaseId = $it.baseId; + if ($thenPresent || $elsePresent) { + var $ifClause; + $it.createErrors = false; + $it.schema = $schema; + $it.schemaPath = $schemaPath; + $it.errSchemaPath = $errSchemaPath; + out += ' var ' + ($errs) + ' = errors; var ' + ($valid) + ' = true; '; + var $wasComposite = it.compositeRule; + it.compositeRule = $it.compositeRule = true; + out += ' ' + (it.validate($it)) + ' '; + $it.baseId = $currentBaseId; + $it.createErrors = true; + out += ' errors = ' + ($errs) + '; if (vErrors !== null) { if (' + ($errs) + ') vErrors.length = ' + ($errs) + '; else vErrors = null; } '; + it.compositeRule = $it.compositeRule = $wasComposite; + if ($thenPresent) { + out += ' if (' + ($nextValid) + ') { '; + $it.schema = it.schema['then']; + $it.schemaPath = it.schemaPath + '.then'; + $it.errSchemaPath = it.errSchemaPath + '/then'; + out += ' ' + (it.validate($it)) + ' '; + $it.baseId = $currentBaseId; + out += ' ' + ($valid) + ' = ' + ($nextValid) + '; '; + if ($thenPresent && $elsePresent) { + $ifClause = 'ifClause' + $lvl; + out += ' var ' + ($ifClause) + ' = \'then\'; '; + } else { + $ifClause = '\'then\''; + } + out += ' } '; + if ($elsePresent) { + out += ' else { '; + } + } else { + out += ' if (!' + ($nextValid) + ') { '; + } + if ($elsePresent) { + $it.schema = it.schema['else']; + $it.schemaPath = it.schemaPath + '.else'; + $it.errSchemaPath = it.errSchemaPath + '/else'; + out += ' ' + (it.validate($it)) + ' '; + $it.baseId = $currentBaseId; + out += ' ' + ($valid) + ' = ' + ($nextValid) + '; '; + if ($thenPresent && $elsePresent) { + $ifClause = 'ifClause' + $lvl; + out += ' var ' + ($ifClause) + ' = \'else\'; '; + } else { + $ifClause = '\'else\''; + } + out += ' } '; + } + out += ' if (!' + ($valid) + ') { var err = '; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('if') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { failingKeyword: ' + ($ifClause) + ' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should match "\' + ' + ($ifClause) + ' + \'" schema\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + out += '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError(vErrors); '; + } else { + out += ' validate.errors = vErrors; return false; '; + } + } + out += ' } '; + if ($breakOnError) { + out += ' else { '; + } + out = it.util.cleanUpCode(out); + } else { + if ($breakOnError) { + out += ' if (true) { '; + } + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/index.js b/node_modules/ajv/lib/dotjs/index.js new file mode 100644 index 0000000..2fb1b00 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/index.js @@ -0,0 +1,33 @@ +'use strict'; + +//all requires must be explicit because browserify won't work with dynamic requires +module.exports = { + '$ref': require('./ref'), + allOf: require('./allOf'), + anyOf: require('./anyOf'), + '$comment': require('./comment'), + const: require('./const'), + contains: require('./contains'), + dependencies: require('./dependencies'), + 'enum': require('./enum'), + format: require('./format'), + 'if': require('./if'), + items: require('./items'), + maximum: require('./_limit'), + minimum: require('./_limit'), + maxItems: require('./_limitItems'), + minItems: require('./_limitItems'), + maxLength: require('./_limitLength'), + minLength: require('./_limitLength'), + maxProperties: require('./_limitProperties'), + minProperties: require('./_limitProperties'), + multipleOf: require('./multipleOf'), + not: require('./not'), + oneOf: require('./oneOf'), + pattern: require('./pattern'), + properties: require('./properties'), + propertyNames: require('./propertyNames'), + required: require('./required'), + uniqueItems: require('./uniqueItems'), + validate: require('./validate') +}; diff --git a/node_modules/ajv/lib/dotjs/items.js b/node_modules/ajv/lib/dotjs/items.js new file mode 100644 index 0000000..d5532f0 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/items.js @@ -0,0 +1,141 @@ +'use strict'; +module.exports = function generate_items(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $data = 'data' + ($dataLvl || ''); + var $valid = 'valid' + $lvl; + var $errs = 'errs__' + $lvl; + var $it = it.util.copy(it); + var $closingBraces = ''; + $it.level++; + var $nextValid = 'valid' + $it.level; + var $idx = 'i' + $lvl, + $dataNxt = $it.dataLevel = it.dataLevel + 1, + $nextData = 'data' + $dataNxt, + $currentBaseId = it.baseId; + out += 'var ' + ($errs) + ' = errors;var ' + ($valid) + ';'; + if (Array.isArray($schema)) { + var $additionalItems = it.schema.additionalItems; + if ($additionalItems === false) { + out += ' ' + ($valid) + ' = ' + ($data) + '.length <= ' + ($schema.length) + '; '; + var $currErrSchemaPath = $errSchemaPath; + $errSchemaPath = it.errSchemaPath + '/additionalItems'; + out += ' if (!' + ($valid) + ') { '; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('additionalItems') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { limit: ' + ($schema.length) + ' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should NOT have more than ' + ($schema.length) + ' items\' '; + } + if (it.opts.verbose) { + out += ' , schema: false , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += ' } '; + $errSchemaPath = $currErrSchemaPath; + if ($breakOnError) { + $closingBraces += '}'; + out += ' else { '; + } + } + var arr1 = $schema; + if (arr1) { + var $sch, $i = -1, + l1 = arr1.length - 1; + while ($i < l1) { + $sch = arr1[$i += 1]; + if ((it.opts.strictKeywords ? typeof $sch == 'object' && Object.keys($sch).length > 0 : it.util.schemaHasRules($sch, it.RULES.all))) { + out += ' ' + ($nextValid) + ' = true; if (' + ($data) + '.length > ' + ($i) + ') { '; + var $passData = $data + '[' + $i + ']'; + $it.schema = $sch; + $it.schemaPath = $schemaPath + '[' + $i + ']'; + $it.errSchemaPath = $errSchemaPath + '/' + $i; + $it.errorPath = it.util.getPathExpr(it.errorPath, $i, it.opts.jsonPointers, true); + $it.dataPathArr[$dataNxt] = $i; + var $code = it.validate($it); + $it.baseId = $currentBaseId; + if (it.util.varOccurences($code, $nextData) < 2) { + out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' '; + } else { + out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' '; + } + out += ' } '; + if ($breakOnError) { + out += ' if (' + ($nextValid) + ') { '; + $closingBraces += '}'; + } + } + } + } + if (typeof $additionalItems == 'object' && (it.opts.strictKeywords ? typeof $additionalItems == 'object' && Object.keys($additionalItems).length > 0 : it.util.schemaHasRules($additionalItems, it.RULES.all))) { + $it.schema = $additionalItems; + $it.schemaPath = it.schemaPath + '.additionalItems'; + $it.errSchemaPath = it.errSchemaPath + '/additionalItems'; + out += ' ' + ($nextValid) + ' = true; if (' + ($data) + '.length > ' + ($schema.length) + ') { for (var ' + ($idx) + ' = ' + ($schema.length) + '; ' + ($idx) + ' < ' + ($data) + '.length; ' + ($idx) + '++) { '; + $it.errorPath = it.util.getPathExpr(it.errorPath, $idx, it.opts.jsonPointers, true); + var $passData = $data + '[' + $idx + ']'; + $it.dataPathArr[$dataNxt] = $idx; + var $code = it.validate($it); + $it.baseId = $currentBaseId; + if (it.util.varOccurences($code, $nextData) < 2) { + out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' '; + } else { + out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' '; + } + if ($breakOnError) { + out += ' if (!' + ($nextValid) + ') break; '; + } + out += ' } } '; + if ($breakOnError) { + out += ' if (' + ($nextValid) + ') { '; + $closingBraces += '}'; + } + } + } else if ((it.opts.strictKeywords ? typeof $schema == 'object' && Object.keys($schema).length > 0 : it.util.schemaHasRules($schema, it.RULES.all))) { + $it.schema = $schema; + $it.schemaPath = $schemaPath; + $it.errSchemaPath = $errSchemaPath; + out += ' for (var ' + ($idx) + ' = ' + (0) + '; ' + ($idx) + ' < ' + ($data) + '.length; ' + ($idx) + '++) { '; + $it.errorPath = it.util.getPathExpr(it.errorPath, $idx, it.opts.jsonPointers, true); + var $passData = $data + '[' + $idx + ']'; + $it.dataPathArr[$dataNxt] = $idx; + var $code = it.validate($it); + $it.baseId = $currentBaseId; + if (it.util.varOccurences($code, $nextData) < 2) { + out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' '; + } else { + out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' '; + } + if ($breakOnError) { + out += ' if (!' + ($nextValid) + ') break; '; + } + out += ' }'; + } + if ($breakOnError) { + out += ' ' + ($closingBraces) + ' if (' + ($errs) + ' == errors) {'; + } + out = it.util.cleanUpCode(out); + return out; +} diff --git a/node_modules/ajv/lib/dotjs/multipleOf.js b/node_modules/ajv/lib/dotjs/multipleOf.js new file mode 100644 index 0000000..af087d2 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/multipleOf.js @@ -0,0 +1,77 @@ +'use strict'; +module.exports = function generate_multipleOf(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $data = 'data' + ($dataLvl || ''); + var $isData = it.opts.$data && $schema && $schema.$data, + $schemaValue; + if ($isData) { + out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; '; + $schemaValue = 'schema' + $lvl; + } else { + $schemaValue = $schema; + } + out += 'var division' + ($lvl) + ';if ('; + if ($isData) { + out += ' ' + ($schemaValue) + ' !== undefined && ( typeof ' + ($schemaValue) + ' != \'number\' || '; + } + out += ' (division' + ($lvl) + ' = ' + ($data) + ' / ' + ($schemaValue) + ', '; + if (it.opts.multipleOfPrecision) { + out += ' Math.abs(Math.round(division' + ($lvl) + ') - division' + ($lvl) + ') > 1e-' + (it.opts.multipleOfPrecision) + ' '; + } else { + out += ' division' + ($lvl) + ' !== parseInt(division' + ($lvl) + ') '; + } + out += ' ) '; + if ($isData) { + out += ' ) '; + } + out += ' ) { '; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('multipleOf') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { multipleOf: ' + ($schemaValue) + ' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should be multiple of '; + if ($isData) { + out += '\' + ' + ($schemaValue); + } else { + out += '' + ($schemaValue) + '\''; + } + } + if (it.opts.verbose) { + out += ' , schema: '; + if ($isData) { + out += 'validate.schema' + ($schemaPath); + } else { + out += '' + ($schema); + } + out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += '} '; + if ($breakOnError) { + out += ' else { '; + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/not.js b/node_modules/ajv/lib/dotjs/not.js new file mode 100644 index 0000000..6aea659 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/not.js @@ -0,0 +1,84 @@ +'use strict'; +module.exports = function generate_not(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $data = 'data' + ($dataLvl || ''); + var $errs = 'errs__' + $lvl; + var $it = it.util.copy(it); + $it.level++; + var $nextValid = 'valid' + $it.level; + if ((it.opts.strictKeywords ? typeof $schema == 'object' && Object.keys($schema).length > 0 : it.util.schemaHasRules($schema, it.RULES.all))) { + $it.schema = $schema; + $it.schemaPath = $schemaPath; + $it.errSchemaPath = $errSchemaPath; + out += ' var ' + ($errs) + ' = errors; '; + var $wasComposite = it.compositeRule; + it.compositeRule = $it.compositeRule = true; + $it.createErrors = false; + var $allErrorsOption; + if ($it.opts.allErrors) { + $allErrorsOption = $it.opts.allErrors; + $it.opts.allErrors = false; + } + out += ' ' + (it.validate($it)) + ' '; + $it.createErrors = true; + if ($allErrorsOption) $it.opts.allErrors = $allErrorsOption; + it.compositeRule = $it.compositeRule = $wasComposite; + out += ' if (' + ($nextValid) + ') { '; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('not') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: {} '; + if (it.opts.messages !== false) { + out += ' , message: \'should NOT be valid\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += ' } else { errors = ' + ($errs) + '; if (vErrors !== null) { if (' + ($errs) + ') vErrors.length = ' + ($errs) + '; else vErrors = null; } '; + if (it.opts.allErrors) { + out += ' } '; + } + } else { + out += ' var err = '; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('not') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: {} '; + if (it.opts.messages !== false) { + out += ' , message: \'should NOT be valid\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + out += '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + if ($breakOnError) { + out += ' if (false) { '; + } + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/oneOf.js b/node_modules/ajv/lib/dotjs/oneOf.js new file mode 100644 index 0000000..30988d5 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/oneOf.js @@ -0,0 +1,73 @@ +'use strict'; +module.exports = function generate_oneOf(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $data = 'data' + ($dataLvl || ''); + var $valid = 'valid' + $lvl; + var $errs = 'errs__' + $lvl; + var $it = it.util.copy(it); + var $closingBraces = ''; + $it.level++; + var $nextValid = 'valid' + $it.level; + var $currentBaseId = $it.baseId, + $prevValid = 'prevValid' + $lvl, + $passingSchemas = 'passingSchemas' + $lvl; + out += 'var ' + ($errs) + ' = errors , ' + ($prevValid) + ' = false , ' + ($valid) + ' = false , ' + ($passingSchemas) + ' = null; '; + var $wasComposite = it.compositeRule; + it.compositeRule = $it.compositeRule = true; + var arr1 = $schema; + if (arr1) { + var $sch, $i = -1, + l1 = arr1.length - 1; + while ($i < l1) { + $sch = arr1[$i += 1]; + if ((it.opts.strictKeywords ? typeof $sch == 'object' && Object.keys($sch).length > 0 : it.util.schemaHasRules($sch, it.RULES.all))) { + $it.schema = $sch; + $it.schemaPath = $schemaPath + '[' + $i + ']'; + $it.errSchemaPath = $errSchemaPath + '/' + $i; + out += ' ' + (it.validate($it)) + ' '; + $it.baseId = $currentBaseId; + } else { + out += ' var ' + ($nextValid) + ' = true; '; + } + if ($i) { + out += ' if (' + ($nextValid) + ' && ' + ($prevValid) + ') { ' + ($valid) + ' = false; ' + ($passingSchemas) + ' = [' + ($passingSchemas) + ', ' + ($i) + ']; } else { '; + $closingBraces += '}'; + } + out += ' if (' + ($nextValid) + ') { ' + ($valid) + ' = ' + ($prevValid) + ' = true; ' + ($passingSchemas) + ' = ' + ($i) + '; }'; + } + } + it.compositeRule = $it.compositeRule = $wasComposite; + out += '' + ($closingBraces) + 'if (!' + ($valid) + ') { var err = '; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('oneOf') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { passingSchemas: ' + ($passingSchemas) + ' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should match exactly one schema in oneOf\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + out += '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError(vErrors); '; + } else { + out += ' validate.errors = vErrors; return false; '; + } + } + out += '} else { errors = ' + ($errs) + '; if (vErrors !== null) { if (' + ($errs) + ') vErrors.length = ' + ($errs) + '; else vErrors = null; }'; + if (it.opts.allErrors) { + out += ' } '; + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/pattern.js b/node_modules/ajv/lib/dotjs/pattern.js new file mode 100644 index 0000000..1d74d6b --- /dev/null +++ b/node_modules/ajv/lib/dotjs/pattern.js @@ -0,0 +1,75 @@ +'use strict'; +module.exports = function generate_pattern(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $data = 'data' + ($dataLvl || ''); + var $isData = it.opts.$data && $schema && $schema.$data, + $schemaValue; + if ($isData) { + out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; '; + $schemaValue = 'schema' + $lvl; + } else { + $schemaValue = $schema; + } + var $regexp = $isData ? '(new RegExp(' + $schemaValue + '))' : it.usePattern($schema); + out += 'if ( '; + if ($isData) { + out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'string\') || '; + } + out += ' !' + ($regexp) + '.test(' + ($data) + ') ) { '; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('pattern') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { pattern: '; + if ($isData) { + out += '' + ($schemaValue); + } else { + out += '' + (it.util.toQuotedString($schema)); + } + out += ' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should match pattern "'; + if ($isData) { + out += '\' + ' + ($schemaValue) + ' + \''; + } else { + out += '' + (it.util.escapeQuotes($schema)); + } + out += '"\' '; + } + if (it.opts.verbose) { + out += ' , schema: '; + if ($isData) { + out += 'validate.schema' + ($schemaPath); + } else { + out += '' + (it.util.toQuotedString($schema)); + } + out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += '} '; + if ($breakOnError) { + out += ' else { '; + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/properties.js b/node_modules/ajv/lib/dotjs/properties.js new file mode 100644 index 0000000..34a82c6 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/properties.js @@ -0,0 +1,330 @@ +'use strict'; +module.exports = function generate_properties(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $data = 'data' + ($dataLvl || ''); + var $errs = 'errs__' + $lvl; + var $it = it.util.copy(it); + var $closingBraces = ''; + $it.level++; + var $nextValid = 'valid' + $it.level; + var $key = 'key' + $lvl, + $idx = 'idx' + $lvl, + $dataNxt = $it.dataLevel = it.dataLevel + 1, + $nextData = 'data' + $dataNxt, + $dataProperties = 'dataProperties' + $lvl; + var $schemaKeys = Object.keys($schema || {}), + $pProperties = it.schema.patternProperties || {}, + $pPropertyKeys = Object.keys($pProperties), + $aProperties = it.schema.additionalProperties, + $someProperties = $schemaKeys.length || $pPropertyKeys.length, + $noAdditional = $aProperties === false, + $additionalIsSchema = typeof $aProperties == 'object' && Object.keys($aProperties).length, + $removeAdditional = it.opts.removeAdditional, + $checkAdditional = $noAdditional || $additionalIsSchema || $removeAdditional, + $ownProperties = it.opts.ownProperties, + $currentBaseId = it.baseId; + var $required = it.schema.required; + if ($required && !(it.opts.$data && $required.$data) && $required.length < it.opts.loopRequired) var $requiredHash = it.util.toHash($required); + out += 'var ' + ($errs) + ' = errors;var ' + ($nextValid) + ' = true;'; + if ($ownProperties) { + out += ' var ' + ($dataProperties) + ' = undefined;'; + } + if ($checkAdditional) { + if ($ownProperties) { + out += ' ' + ($dataProperties) + ' = ' + ($dataProperties) + ' || Object.keys(' + ($data) + '); for (var ' + ($idx) + '=0; ' + ($idx) + '<' + ($dataProperties) + '.length; ' + ($idx) + '++) { var ' + ($key) + ' = ' + ($dataProperties) + '[' + ($idx) + ']; '; + } else { + out += ' for (var ' + ($key) + ' in ' + ($data) + ') { '; + } + if ($someProperties) { + out += ' var isAdditional' + ($lvl) + ' = !(false '; + if ($schemaKeys.length) { + if ($schemaKeys.length > 8) { + out += ' || validate.schema' + ($schemaPath) + '.hasOwnProperty(' + ($key) + ') '; + } else { + var arr1 = $schemaKeys; + if (arr1) { + var $propertyKey, i1 = -1, + l1 = arr1.length - 1; + while (i1 < l1) { + $propertyKey = arr1[i1 += 1]; + out += ' || ' + ($key) + ' == ' + (it.util.toQuotedString($propertyKey)) + ' '; + } + } + } + } + if ($pPropertyKeys.length) { + var arr2 = $pPropertyKeys; + if (arr2) { + var $pProperty, $i = -1, + l2 = arr2.length - 1; + while ($i < l2) { + $pProperty = arr2[$i += 1]; + out += ' || ' + (it.usePattern($pProperty)) + '.test(' + ($key) + ') '; + } + } + } + out += ' ); if (isAdditional' + ($lvl) + ') { '; + } + if ($removeAdditional == 'all') { + out += ' delete ' + ($data) + '[' + ($key) + ']; '; + } else { + var $currentErrorPath = it.errorPath; + var $additionalProperty = '\' + ' + $key + ' + \''; + if (it.opts._errorDataPathProperty) { + it.errorPath = it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers); + } + if ($noAdditional) { + if ($removeAdditional) { + out += ' delete ' + ($data) + '[' + ($key) + ']; '; + } else { + out += ' ' + ($nextValid) + ' = false; '; + var $currErrSchemaPath = $errSchemaPath; + $errSchemaPath = it.errSchemaPath + '/additionalProperties'; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('additionalProperties') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { additionalProperty: \'' + ($additionalProperty) + '\' } '; + if (it.opts.messages !== false) { + out += ' , message: \''; + if (it.opts._errorDataPathProperty) { + out += 'is an invalid additional property'; + } else { + out += 'should NOT have additional properties'; + } + out += '\' '; + } + if (it.opts.verbose) { + out += ' , schema: false , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + $errSchemaPath = $currErrSchemaPath; + if ($breakOnError) { + out += ' break; '; + } + } + } else if ($additionalIsSchema) { + if ($removeAdditional == 'failing') { + out += ' var ' + ($errs) + ' = errors; '; + var $wasComposite = it.compositeRule; + it.compositeRule = $it.compositeRule = true; + $it.schema = $aProperties; + $it.schemaPath = it.schemaPath + '.additionalProperties'; + $it.errSchemaPath = it.errSchemaPath + '/additionalProperties'; + $it.errorPath = it.opts._errorDataPathProperty ? it.errorPath : it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers); + var $passData = $data + '[' + $key + ']'; + $it.dataPathArr[$dataNxt] = $key; + var $code = it.validate($it); + $it.baseId = $currentBaseId; + if (it.util.varOccurences($code, $nextData) < 2) { + out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' '; + } else { + out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' '; + } + out += ' if (!' + ($nextValid) + ') { errors = ' + ($errs) + '; if (validate.errors !== null) { if (errors) validate.errors.length = errors; else validate.errors = null; } delete ' + ($data) + '[' + ($key) + ']; } '; + it.compositeRule = $it.compositeRule = $wasComposite; + } else { + $it.schema = $aProperties; + $it.schemaPath = it.schemaPath + '.additionalProperties'; + $it.errSchemaPath = it.errSchemaPath + '/additionalProperties'; + $it.errorPath = it.opts._errorDataPathProperty ? it.errorPath : it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers); + var $passData = $data + '[' + $key + ']'; + $it.dataPathArr[$dataNxt] = $key; + var $code = it.validate($it); + $it.baseId = $currentBaseId; + if (it.util.varOccurences($code, $nextData) < 2) { + out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' '; + } else { + out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' '; + } + if ($breakOnError) { + out += ' if (!' + ($nextValid) + ') break; '; + } + } + } + it.errorPath = $currentErrorPath; + } + if ($someProperties) { + out += ' } '; + } + out += ' } '; + if ($breakOnError) { + out += ' if (' + ($nextValid) + ') { '; + $closingBraces += '}'; + } + } + var $useDefaults = it.opts.useDefaults && !it.compositeRule; + if ($schemaKeys.length) { + var arr3 = $schemaKeys; + if (arr3) { + var $propertyKey, i3 = -1, + l3 = arr3.length - 1; + while (i3 < l3) { + $propertyKey = arr3[i3 += 1]; + var $sch = $schema[$propertyKey]; + if ((it.opts.strictKeywords ? typeof $sch == 'object' && Object.keys($sch).length > 0 : it.util.schemaHasRules($sch, it.RULES.all))) { + var $prop = it.util.getProperty($propertyKey), + $passData = $data + $prop, + $hasDefault = $useDefaults && $sch.default !== undefined; + $it.schema = $sch; + $it.schemaPath = $schemaPath + $prop; + $it.errSchemaPath = $errSchemaPath + '/' + it.util.escapeFragment($propertyKey); + $it.errorPath = it.util.getPath(it.errorPath, $propertyKey, it.opts.jsonPointers); + $it.dataPathArr[$dataNxt] = it.util.toQuotedString($propertyKey); + var $code = it.validate($it); + $it.baseId = $currentBaseId; + if (it.util.varOccurences($code, $nextData) < 2) { + $code = it.util.varReplace($code, $nextData, $passData); + var $useData = $passData; + } else { + var $useData = $nextData; + out += ' var ' + ($nextData) + ' = ' + ($passData) + '; '; + } + if ($hasDefault) { + out += ' ' + ($code) + ' '; + } else { + if ($requiredHash && $requiredHash[$propertyKey]) { + out += ' if ( ' + ($useData) + ' === undefined '; + if ($ownProperties) { + out += ' || ! Object.prototype.hasOwnProperty.call(' + ($data) + ', \'' + (it.util.escapeQuotes($propertyKey)) + '\') '; + } + out += ') { ' + ($nextValid) + ' = false; '; + var $currentErrorPath = it.errorPath, + $currErrSchemaPath = $errSchemaPath, + $missingProperty = it.util.escapeQuotes($propertyKey); + if (it.opts._errorDataPathProperty) { + it.errorPath = it.util.getPath($currentErrorPath, $propertyKey, it.opts.jsonPointers); + } + $errSchemaPath = it.errSchemaPath + '/required'; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('required') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { missingProperty: \'' + ($missingProperty) + '\' } '; + if (it.opts.messages !== false) { + out += ' , message: \''; + if (it.opts._errorDataPathProperty) { + out += 'is a required property'; + } else { + out += 'should have required property \\\'' + ($missingProperty) + '\\\''; + } + out += '\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + $errSchemaPath = $currErrSchemaPath; + it.errorPath = $currentErrorPath; + out += ' } else { '; + } else { + if ($breakOnError) { + out += ' if ( ' + ($useData) + ' === undefined '; + if ($ownProperties) { + out += ' || ! Object.prototype.hasOwnProperty.call(' + ($data) + ', \'' + (it.util.escapeQuotes($propertyKey)) + '\') '; + } + out += ') { ' + ($nextValid) + ' = true; } else { '; + } else { + out += ' if (' + ($useData) + ' !== undefined '; + if ($ownProperties) { + out += ' && Object.prototype.hasOwnProperty.call(' + ($data) + ', \'' + (it.util.escapeQuotes($propertyKey)) + '\') '; + } + out += ' ) { '; + } + } + out += ' ' + ($code) + ' } '; + } + } + if ($breakOnError) { + out += ' if (' + ($nextValid) + ') { '; + $closingBraces += '}'; + } + } + } + } + if ($pPropertyKeys.length) { + var arr4 = $pPropertyKeys; + if (arr4) { + var $pProperty, i4 = -1, + l4 = arr4.length - 1; + while (i4 < l4) { + $pProperty = arr4[i4 += 1]; + var $sch = $pProperties[$pProperty]; + if ((it.opts.strictKeywords ? typeof $sch == 'object' && Object.keys($sch).length > 0 : it.util.schemaHasRules($sch, it.RULES.all))) { + $it.schema = $sch; + $it.schemaPath = it.schemaPath + '.patternProperties' + it.util.getProperty($pProperty); + $it.errSchemaPath = it.errSchemaPath + '/patternProperties/' + it.util.escapeFragment($pProperty); + if ($ownProperties) { + out += ' ' + ($dataProperties) + ' = ' + ($dataProperties) + ' || Object.keys(' + ($data) + '); for (var ' + ($idx) + '=0; ' + ($idx) + '<' + ($dataProperties) + '.length; ' + ($idx) + '++) { var ' + ($key) + ' = ' + ($dataProperties) + '[' + ($idx) + ']; '; + } else { + out += ' for (var ' + ($key) + ' in ' + ($data) + ') { '; + } + out += ' if (' + (it.usePattern($pProperty)) + '.test(' + ($key) + ')) { '; + $it.errorPath = it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers); + var $passData = $data + '[' + $key + ']'; + $it.dataPathArr[$dataNxt] = $key; + var $code = it.validate($it); + $it.baseId = $currentBaseId; + if (it.util.varOccurences($code, $nextData) < 2) { + out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' '; + } else { + out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' '; + } + if ($breakOnError) { + out += ' if (!' + ($nextValid) + ') break; '; + } + out += ' } '; + if ($breakOnError) { + out += ' else ' + ($nextValid) + ' = true; '; + } + out += ' } '; + if ($breakOnError) { + out += ' if (' + ($nextValid) + ') { '; + $closingBraces += '}'; + } + } + } + } + } + if ($breakOnError) { + out += ' ' + ($closingBraces) + ' if (' + ($errs) + ' == errors) {'; + } + out = it.util.cleanUpCode(out); + return out; +} diff --git a/node_modules/ajv/lib/dotjs/propertyNames.js b/node_modules/ajv/lib/dotjs/propertyNames.js new file mode 100644 index 0000000..b2bf295 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/propertyNames.js @@ -0,0 +1,82 @@ +'use strict'; +module.exports = function generate_propertyNames(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $data = 'data' + ($dataLvl || ''); + var $errs = 'errs__' + $lvl; + var $it = it.util.copy(it); + var $closingBraces = ''; + $it.level++; + var $nextValid = 'valid' + $it.level; + out += 'var ' + ($errs) + ' = errors;'; + if ((it.opts.strictKeywords ? typeof $schema == 'object' && Object.keys($schema).length > 0 : it.util.schemaHasRules($schema, it.RULES.all))) { + $it.schema = $schema; + $it.schemaPath = $schemaPath; + $it.errSchemaPath = $errSchemaPath; + var $key = 'key' + $lvl, + $idx = 'idx' + $lvl, + $i = 'i' + $lvl, + $invalidName = '\' + ' + $key + ' + \'', + $dataNxt = $it.dataLevel = it.dataLevel + 1, + $nextData = 'data' + $dataNxt, + $dataProperties = 'dataProperties' + $lvl, + $ownProperties = it.opts.ownProperties, + $currentBaseId = it.baseId; + if ($ownProperties) { + out += ' var ' + ($dataProperties) + ' = undefined; '; + } + if ($ownProperties) { + out += ' ' + ($dataProperties) + ' = ' + ($dataProperties) + ' || Object.keys(' + ($data) + '); for (var ' + ($idx) + '=0; ' + ($idx) + '<' + ($dataProperties) + '.length; ' + ($idx) + '++) { var ' + ($key) + ' = ' + ($dataProperties) + '[' + ($idx) + ']; '; + } else { + out += ' for (var ' + ($key) + ' in ' + ($data) + ') { '; + } + out += ' var startErrs' + ($lvl) + ' = errors; '; + var $passData = $key; + var $wasComposite = it.compositeRule; + it.compositeRule = $it.compositeRule = true; + var $code = it.validate($it); + $it.baseId = $currentBaseId; + if (it.util.varOccurences($code, $nextData) < 2) { + out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' '; + } else { + out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' '; + } + it.compositeRule = $it.compositeRule = $wasComposite; + out += ' if (!' + ($nextValid) + ') { for (var ' + ($i) + '=startErrs' + ($lvl) + '; ' + ($i) + ' 0 : it.util.schemaHasRules($propertySch, it.RULES.all)))) { + $required[$required.length] = $property; + } + } + } + } else { + var $required = $schema; + } + } + if ($isData || $required.length) { + var $currentErrorPath = it.errorPath, + $loopRequired = $isData || $required.length >= it.opts.loopRequired, + $ownProperties = it.opts.ownProperties; + if ($breakOnError) { + out += ' var missing' + ($lvl) + '; '; + if ($loopRequired) { + if (!$isData) { + out += ' var ' + ($vSchema) + ' = validate.schema' + ($schemaPath) + '; '; + } + var $i = 'i' + $lvl, + $propertyPath = 'schema' + $lvl + '[' + $i + ']', + $missingProperty = '\' + ' + $propertyPath + ' + \''; + if (it.opts._errorDataPathProperty) { + it.errorPath = it.util.getPathExpr($currentErrorPath, $propertyPath, it.opts.jsonPointers); + } + out += ' var ' + ($valid) + ' = true; '; + if ($isData) { + out += ' if (schema' + ($lvl) + ' === undefined) ' + ($valid) + ' = true; else if (!Array.isArray(schema' + ($lvl) + ')) ' + ($valid) + ' = false; else {'; + } + out += ' for (var ' + ($i) + ' = 0; ' + ($i) + ' < ' + ($vSchema) + '.length; ' + ($i) + '++) { ' + ($valid) + ' = ' + ($data) + '[' + ($vSchema) + '[' + ($i) + ']] !== undefined '; + if ($ownProperties) { + out += ' && Object.prototype.hasOwnProperty.call(' + ($data) + ', ' + ($vSchema) + '[' + ($i) + ']) '; + } + out += '; if (!' + ($valid) + ') break; } '; + if ($isData) { + out += ' } '; + } + out += ' if (!' + ($valid) + ') { '; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('required') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { missingProperty: \'' + ($missingProperty) + '\' } '; + if (it.opts.messages !== false) { + out += ' , message: \''; + if (it.opts._errorDataPathProperty) { + out += 'is a required property'; + } else { + out += 'should have required property \\\'' + ($missingProperty) + '\\\''; + } + out += '\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += ' } else { '; + } else { + out += ' if ( '; + var arr2 = $required; + if (arr2) { + var $propertyKey, $i = -1, + l2 = arr2.length - 1; + while ($i < l2) { + $propertyKey = arr2[$i += 1]; + if ($i) { + out += ' || '; + } + var $prop = it.util.getProperty($propertyKey), + $useData = $data + $prop; + out += ' ( ( ' + ($useData) + ' === undefined '; + if ($ownProperties) { + out += ' || ! Object.prototype.hasOwnProperty.call(' + ($data) + ', \'' + (it.util.escapeQuotes($propertyKey)) + '\') '; + } + out += ') && (missing' + ($lvl) + ' = ' + (it.util.toQuotedString(it.opts.jsonPointers ? $propertyKey : $prop)) + ') ) '; + } + } + out += ') { '; + var $propertyPath = 'missing' + $lvl, + $missingProperty = '\' + ' + $propertyPath + ' + \''; + if (it.opts._errorDataPathProperty) { + it.errorPath = it.opts.jsonPointers ? it.util.getPathExpr($currentErrorPath, $propertyPath, true) : $currentErrorPath + ' + ' + $propertyPath; + } + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('required') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { missingProperty: \'' + ($missingProperty) + '\' } '; + if (it.opts.messages !== false) { + out += ' , message: \''; + if (it.opts._errorDataPathProperty) { + out += 'is a required property'; + } else { + out += 'should have required property \\\'' + ($missingProperty) + '\\\''; + } + out += '\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += ' } else { '; + } + } else { + if ($loopRequired) { + if (!$isData) { + out += ' var ' + ($vSchema) + ' = validate.schema' + ($schemaPath) + '; '; + } + var $i = 'i' + $lvl, + $propertyPath = 'schema' + $lvl + '[' + $i + ']', + $missingProperty = '\' + ' + $propertyPath + ' + \''; + if (it.opts._errorDataPathProperty) { + it.errorPath = it.util.getPathExpr($currentErrorPath, $propertyPath, it.opts.jsonPointers); + } + if ($isData) { + out += ' if (' + ($vSchema) + ' && !Array.isArray(' + ($vSchema) + ')) { var err = '; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('required') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { missingProperty: \'' + ($missingProperty) + '\' } '; + if (it.opts.messages !== false) { + out += ' , message: \''; + if (it.opts._errorDataPathProperty) { + out += 'is a required property'; + } else { + out += 'should have required property \\\'' + ($missingProperty) + '\\\''; + } + out += '\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + out += '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; } else if (' + ($vSchema) + ' !== undefined) { '; + } + out += ' for (var ' + ($i) + ' = 0; ' + ($i) + ' < ' + ($vSchema) + '.length; ' + ($i) + '++) { if (' + ($data) + '[' + ($vSchema) + '[' + ($i) + ']] === undefined '; + if ($ownProperties) { + out += ' || ! Object.prototype.hasOwnProperty.call(' + ($data) + ', ' + ($vSchema) + '[' + ($i) + ']) '; + } + out += ') { var err = '; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('required') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { missingProperty: \'' + ($missingProperty) + '\' } '; + if (it.opts.messages !== false) { + out += ' , message: \''; + if (it.opts._errorDataPathProperty) { + out += 'is a required property'; + } else { + out += 'should have required property \\\'' + ($missingProperty) + '\\\''; + } + out += '\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + out += '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; } } '; + if ($isData) { + out += ' } '; + } + } else { + var arr3 = $required; + if (arr3) { + var $propertyKey, i3 = -1, + l3 = arr3.length - 1; + while (i3 < l3) { + $propertyKey = arr3[i3 += 1]; + var $prop = it.util.getProperty($propertyKey), + $missingProperty = it.util.escapeQuotes($propertyKey), + $useData = $data + $prop; + if (it.opts._errorDataPathProperty) { + it.errorPath = it.util.getPath($currentErrorPath, $propertyKey, it.opts.jsonPointers); + } + out += ' if ( ' + ($useData) + ' === undefined '; + if ($ownProperties) { + out += ' || ! Object.prototype.hasOwnProperty.call(' + ($data) + ', \'' + (it.util.escapeQuotes($propertyKey)) + '\') '; + } + out += ') { var err = '; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('required') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { missingProperty: \'' + ($missingProperty) + '\' } '; + if (it.opts.messages !== false) { + out += ' , message: \''; + if (it.opts._errorDataPathProperty) { + out += 'is a required property'; + } else { + out += 'should have required property \\\'' + ($missingProperty) + '\\\''; + } + out += '\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + out += '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; } '; + } + } + } + } + it.errorPath = $currentErrorPath; + } else if ($breakOnError) { + out += ' if (true) {'; + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/uniqueItems.js b/node_modules/ajv/lib/dotjs/uniqueItems.js new file mode 100644 index 0000000..c4f6536 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/uniqueItems.js @@ -0,0 +1,86 @@ +'use strict'; +module.exports = function generate_uniqueItems(it, $keyword, $ruleType) { + var out = ' '; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $data = 'data' + ($dataLvl || ''); + var $valid = 'valid' + $lvl; + var $isData = it.opts.$data && $schema && $schema.$data, + $schemaValue; + if ($isData) { + out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; '; + $schemaValue = 'schema' + $lvl; + } else { + $schemaValue = $schema; + } + if (($schema || $isData) && it.opts.uniqueItems !== false) { + if ($isData) { + out += ' var ' + ($valid) + '; if (' + ($schemaValue) + ' === false || ' + ($schemaValue) + ' === undefined) ' + ($valid) + ' = true; else if (typeof ' + ($schemaValue) + ' != \'boolean\') ' + ($valid) + ' = false; else { '; + } + out += ' var i = ' + ($data) + '.length , ' + ($valid) + ' = true , j; if (i > 1) { '; + var $itemType = it.schema.items && it.schema.items.type, + $typeIsArray = Array.isArray($itemType); + if (!$itemType || $itemType == 'object' || $itemType == 'array' || ($typeIsArray && ($itemType.indexOf('object') >= 0 || $itemType.indexOf('array') >= 0))) { + out += ' outer: for (;i--;) { for (j = i; j--;) { if (equal(' + ($data) + '[i], ' + ($data) + '[j])) { ' + ($valid) + ' = false; break outer; } } } '; + } else { + out += ' var itemIndices = {}, item; for (;i--;) { var item = ' + ($data) + '[i]; '; + var $method = 'checkDataType' + ($typeIsArray ? 's' : ''); + out += ' if (' + (it.util[$method]($itemType, 'item', true)) + ') continue; '; + if ($typeIsArray) { + out += ' if (typeof item == \'string\') item = \'"\' + item; '; + } + out += ' if (typeof itemIndices[item] == \'number\') { ' + ($valid) + ' = false; j = itemIndices[item]; break; } itemIndices[item] = i; } '; + } + out += ' } '; + if ($isData) { + out += ' } '; + } + out += ' if (!' + ($valid) + ') { '; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ('uniqueItems') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { i: i, j: j } '; + if (it.opts.messages !== false) { + out += ' , message: \'should NOT have duplicate items (items ## \' + j + \' and \' + i + \' are identical)\' '; + } + if (it.opts.verbose) { + out += ' , schema: '; + if ($isData) { + out += 'validate.schema' + ($schemaPath); + } else { + out += '' + ($schema); + } + out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += ' } '; + if ($breakOnError) { + out += ' else { '; + } + } else { + if ($breakOnError) { + out += ' if (true) { '; + } + } + return out; +} diff --git a/node_modules/ajv/lib/dotjs/validate.js b/node_modules/ajv/lib/dotjs/validate.js new file mode 100644 index 0000000..cd0efc8 --- /dev/null +++ b/node_modules/ajv/lib/dotjs/validate.js @@ -0,0 +1,494 @@ +'use strict'; +module.exports = function generate_validate(it, $keyword, $ruleType) { + var out = ''; + var $async = it.schema.$async === true, + $refKeywords = it.util.schemaHasRulesExcept(it.schema, it.RULES.all, '$ref'), + $id = it.self._getId(it.schema); + if (it.opts.strictKeywords) { + var $unknownKwd = it.util.schemaUnknownRules(it.schema, it.RULES.keywords); + if ($unknownKwd) { + var $keywordsMsg = 'unknown keyword: ' + $unknownKwd; + if (it.opts.strictKeywords === 'log') it.logger.warn($keywordsMsg); + else throw new Error($keywordsMsg); + } + } + if (it.isTop) { + out += ' var validate = '; + if ($async) { + it.async = true; + out += 'async '; + } + out += 'function(data, dataPath, parentData, parentDataProperty, rootData) { \'use strict\'; '; + if ($id && (it.opts.sourceCode || it.opts.processCode)) { + out += ' ' + ('/\*# sourceURL=' + $id + ' */') + ' '; + } + } + if (typeof it.schema == 'boolean' || !($refKeywords || it.schema.$ref)) { + var $keyword = 'false schema'; + var $lvl = it.level; + var $dataLvl = it.dataLevel; + var $schema = it.schema[$keyword]; + var $schemaPath = it.schemaPath + it.util.getProperty($keyword); + var $errSchemaPath = it.errSchemaPath + '/' + $keyword; + var $breakOnError = !it.opts.allErrors; + var $errorKeyword; + var $data = 'data' + ($dataLvl || ''); + var $valid = 'valid' + $lvl; + if (it.schema === false) { + if (it.isTop) { + $breakOnError = true; + } else { + out += ' var ' + ($valid) + ' = false; '; + } + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ($errorKeyword || 'false schema') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: {} '; + if (it.opts.messages !== false) { + out += ' , message: \'boolean schema is false\' '; + } + if (it.opts.verbose) { + out += ' , schema: false , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + } else { + if (it.isTop) { + if ($async) { + out += ' return data; '; + } else { + out += ' validate.errors = null; return true; '; + } + } else { + out += ' var ' + ($valid) + ' = true; '; + } + } + if (it.isTop) { + out += ' }; return validate; '; + } + return out; + } + if (it.isTop) { + var $top = it.isTop, + $lvl = it.level = 0, + $dataLvl = it.dataLevel = 0, + $data = 'data'; + it.rootId = it.resolve.fullPath(it.self._getId(it.root.schema)); + it.baseId = it.baseId || it.rootId; + delete it.isTop; + it.dataPathArr = [undefined]; + if (it.schema.default !== undefined && it.opts.useDefaults && it.opts.strictDefaults) { + var $defaultMsg = 'default is ignored in the schema root'; + if (it.opts.strictDefaults === 'log') it.logger.warn($defaultMsg); + else throw new Error($defaultMsg); + } + out += ' var vErrors = null; '; + out += ' var errors = 0; '; + out += ' if (rootData === undefined) rootData = data; '; + } else { + var $lvl = it.level, + $dataLvl = it.dataLevel, + $data = 'data' + ($dataLvl || ''); + if ($id) it.baseId = it.resolve.url(it.baseId, $id); + if ($async && !it.async) throw new Error('async schema in sync schema'); + out += ' var errs_' + ($lvl) + ' = errors;'; + } + var $valid = 'valid' + $lvl, + $breakOnError = !it.opts.allErrors, + $closingBraces1 = '', + $closingBraces2 = ''; + var $errorKeyword; + var $typeSchema = it.schema.type, + $typeIsArray = Array.isArray($typeSchema); + if ($typeSchema && it.opts.nullable && it.schema.nullable === true) { + if ($typeIsArray) { + if ($typeSchema.indexOf('null') == -1) $typeSchema = $typeSchema.concat('null'); + } else if ($typeSchema != 'null') { + $typeSchema = [$typeSchema, 'null']; + $typeIsArray = true; + } + } + if ($typeIsArray && $typeSchema.length == 1) { + $typeSchema = $typeSchema[0]; + $typeIsArray = false; + } + if (it.schema.$ref && $refKeywords) { + if (it.opts.extendRefs == 'fail') { + throw new Error('$ref: validation keywords used in schema at path "' + it.errSchemaPath + '" (see option extendRefs)'); + } else if (it.opts.extendRefs !== true) { + $refKeywords = false; + it.logger.warn('$ref: keywords ignored in schema at path "' + it.errSchemaPath + '"'); + } + } + if (it.schema.$comment && it.opts.$comment) { + out += ' ' + (it.RULES.all.$comment.code(it, '$comment')); + } + if ($typeSchema) { + if (it.opts.coerceTypes) { + var $coerceToTypes = it.util.coerceToTypes(it.opts.coerceTypes, $typeSchema); + } + var $rulesGroup = it.RULES.types[$typeSchema]; + if ($coerceToTypes || $typeIsArray || $rulesGroup === true || ($rulesGroup && !$shouldUseGroup($rulesGroup))) { + var $schemaPath = it.schemaPath + '.type', + $errSchemaPath = it.errSchemaPath + '/type'; + var $schemaPath = it.schemaPath + '.type', + $errSchemaPath = it.errSchemaPath + '/type', + $method = $typeIsArray ? 'checkDataTypes' : 'checkDataType'; + out += ' if (' + (it.util[$method]($typeSchema, $data, true)) + ') { '; + if ($coerceToTypes) { + var $dataType = 'dataType' + $lvl, + $coerced = 'coerced' + $lvl; + out += ' var ' + ($dataType) + ' = typeof ' + ($data) + '; '; + if (it.opts.coerceTypes == 'array') { + out += ' if (' + ($dataType) + ' == \'object\' && Array.isArray(' + ($data) + ')) ' + ($dataType) + ' = \'array\'; '; + } + out += ' var ' + ($coerced) + ' = undefined; '; + var $bracesCoercion = ''; + var arr1 = $coerceToTypes; + if (arr1) { + var $type, $i = -1, + l1 = arr1.length - 1; + while ($i < l1) { + $type = arr1[$i += 1]; + if ($i) { + out += ' if (' + ($coerced) + ' === undefined) { '; + $bracesCoercion += '}'; + } + if (it.opts.coerceTypes == 'array' && $type != 'array') { + out += ' if (' + ($dataType) + ' == \'array\' && ' + ($data) + '.length == 1) { ' + ($coerced) + ' = ' + ($data) + ' = ' + ($data) + '[0]; ' + ($dataType) + ' = typeof ' + ($data) + '; } '; + } + if ($type == 'string') { + out += ' if (' + ($dataType) + ' == \'number\' || ' + ($dataType) + ' == \'boolean\') ' + ($coerced) + ' = \'\' + ' + ($data) + '; else if (' + ($data) + ' === null) ' + ($coerced) + ' = \'\'; '; + } else if ($type == 'number' || $type == 'integer') { + out += ' if (' + ($dataType) + ' == \'boolean\' || ' + ($data) + ' === null || (' + ($dataType) + ' == \'string\' && ' + ($data) + ' && ' + ($data) + ' == +' + ($data) + ' '; + if ($type == 'integer') { + out += ' && !(' + ($data) + ' % 1)'; + } + out += ')) ' + ($coerced) + ' = +' + ($data) + '; '; + } else if ($type == 'boolean') { + out += ' if (' + ($data) + ' === \'false\' || ' + ($data) + ' === 0 || ' + ($data) + ' === null) ' + ($coerced) + ' = false; else if (' + ($data) + ' === \'true\' || ' + ($data) + ' === 1) ' + ($coerced) + ' = true; '; + } else if ($type == 'null') { + out += ' if (' + ($data) + ' === \'\' || ' + ($data) + ' === 0 || ' + ($data) + ' === false) ' + ($coerced) + ' = null; '; + } else if (it.opts.coerceTypes == 'array' && $type == 'array') { + out += ' if (' + ($dataType) + ' == \'string\' || ' + ($dataType) + ' == \'number\' || ' + ($dataType) + ' == \'boolean\' || ' + ($data) + ' == null) ' + ($coerced) + ' = [' + ($data) + ']; '; + } + } + } + out += ' ' + ($bracesCoercion) + ' if (' + ($coerced) + ' === undefined) { '; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ($errorKeyword || 'type') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { type: \''; + if ($typeIsArray) { + out += '' + ($typeSchema.join(",")); + } else { + out += '' + ($typeSchema); + } + out += '\' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should be '; + if ($typeIsArray) { + out += '' + ($typeSchema.join(",")); + } else { + out += '' + ($typeSchema); + } + out += '\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += ' } else { '; + var $parentData = $dataLvl ? 'data' + (($dataLvl - 1) || '') : 'parentData', + $parentDataProperty = $dataLvl ? it.dataPathArr[$dataLvl] : 'parentDataProperty'; + out += ' ' + ($data) + ' = ' + ($coerced) + '; '; + if (!$dataLvl) { + out += 'if (' + ($parentData) + ' !== undefined)'; + } + out += ' ' + ($parentData) + '[' + ($parentDataProperty) + '] = ' + ($coerced) + '; } '; + } else { + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ($errorKeyword || 'type') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { type: \''; + if ($typeIsArray) { + out += '' + ($typeSchema.join(",")); + } else { + out += '' + ($typeSchema); + } + out += '\' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should be '; + if ($typeIsArray) { + out += '' + ($typeSchema.join(",")); + } else { + out += '' + ($typeSchema); + } + out += '\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + } + out += ' } '; + } + } + if (it.schema.$ref && !$refKeywords) { + out += ' ' + (it.RULES.all.$ref.code(it, '$ref')) + ' '; + if ($breakOnError) { + out += ' } if (errors === '; + if ($top) { + out += '0'; + } else { + out += 'errs_' + ($lvl); + } + out += ') { '; + $closingBraces2 += '}'; + } + } else { + var arr2 = it.RULES; + if (arr2) { + var $rulesGroup, i2 = -1, + l2 = arr2.length - 1; + while (i2 < l2) { + $rulesGroup = arr2[i2 += 1]; + if ($shouldUseGroup($rulesGroup)) { + if ($rulesGroup.type) { + out += ' if (' + (it.util.checkDataType($rulesGroup.type, $data)) + ') { '; + } + if (it.opts.useDefaults) { + if ($rulesGroup.type == 'object' && it.schema.properties) { + var $schema = it.schema.properties, + $schemaKeys = Object.keys($schema); + var arr3 = $schemaKeys; + if (arr3) { + var $propertyKey, i3 = -1, + l3 = arr3.length - 1; + while (i3 < l3) { + $propertyKey = arr3[i3 += 1]; + var $sch = $schema[$propertyKey]; + if ($sch.default !== undefined) { + var $passData = $data + it.util.getProperty($propertyKey); + if (it.compositeRule) { + if (it.opts.strictDefaults) { + var $defaultMsg = 'default is ignored for: ' + $passData; + if (it.opts.strictDefaults === 'log') it.logger.warn($defaultMsg); + else throw new Error($defaultMsg); + } + } else { + out += ' if (' + ($passData) + ' === undefined '; + if (it.opts.useDefaults == 'empty') { + out += ' || ' + ($passData) + ' === null || ' + ($passData) + ' === \'\' '; + } + out += ' ) ' + ($passData) + ' = '; + if (it.opts.useDefaults == 'shared') { + out += ' ' + (it.useDefault($sch.default)) + ' '; + } else { + out += ' ' + (JSON.stringify($sch.default)) + ' '; + } + out += '; '; + } + } + } + } + } else if ($rulesGroup.type == 'array' && Array.isArray(it.schema.items)) { + var arr4 = it.schema.items; + if (arr4) { + var $sch, $i = -1, + l4 = arr4.length - 1; + while ($i < l4) { + $sch = arr4[$i += 1]; + if ($sch.default !== undefined) { + var $passData = $data + '[' + $i + ']'; + if (it.compositeRule) { + if (it.opts.strictDefaults) { + var $defaultMsg = 'default is ignored for: ' + $passData; + if (it.opts.strictDefaults === 'log') it.logger.warn($defaultMsg); + else throw new Error($defaultMsg); + } + } else { + out += ' if (' + ($passData) + ' === undefined '; + if (it.opts.useDefaults == 'empty') { + out += ' || ' + ($passData) + ' === null || ' + ($passData) + ' === \'\' '; + } + out += ' ) ' + ($passData) + ' = '; + if (it.opts.useDefaults == 'shared') { + out += ' ' + (it.useDefault($sch.default)) + ' '; + } else { + out += ' ' + (JSON.stringify($sch.default)) + ' '; + } + out += '; '; + } + } + } + } + } + } + var arr5 = $rulesGroup.rules; + if (arr5) { + var $rule, i5 = -1, + l5 = arr5.length - 1; + while (i5 < l5) { + $rule = arr5[i5 += 1]; + if ($shouldUseRule($rule)) { + var $code = $rule.code(it, $rule.keyword, $rulesGroup.type); + if ($code) { + out += ' ' + ($code) + ' '; + if ($breakOnError) { + $closingBraces1 += '}'; + } + } + } + } + } + if ($breakOnError) { + out += ' ' + ($closingBraces1) + ' '; + $closingBraces1 = ''; + } + if ($rulesGroup.type) { + out += ' } '; + if ($typeSchema && $typeSchema === $rulesGroup.type && !$coerceToTypes) { + out += ' else { '; + var $schemaPath = it.schemaPath + '.type', + $errSchemaPath = it.errSchemaPath + '/type'; + var $$outStack = $$outStack || []; + $$outStack.push(out); + out = ''; /* istanbul ignore else */ + if (it.createErrors !== false) { + out += ' { keyword: \'' + ($errorKeyword || 'type') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { type: \''; + if ($typeIsArray) { + out += '' + ($typeSchema.join(",")); + } else { + out += '' + ($typeSchema); + } + out += '\' } '; + if (it.opts.messages !== false) { + out += ' , message: \'should be '; + if ($typeIsArray) { + out += '' + ($typeSchema.join(",")); + } else { + out += '' + ($typeSchema); + } + out += '\' '; + } + if (it.opts.verbose) { + out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' '; + } + out += ' } '; + } else { + out += ' {} '; + } + var __err = out; + out = $$outStack.pop(); + if (!it.compositeRule && $breakOnError) { + /* istanbul ignore if */ + if (it.async) { + out += ' throw new ValidationError([' + (__err) + ']); '; + } else { + out += ' validate.errors = [' + (__err) + ']; return false; '; + } + } else { + out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; '; + } + out += ' } '; + } + } + if ($breakOnError) { + out += ' if (errors === '; + if ($top) { + out += '0'; + } else { + out += 'errs_' + ($lvl); + } + out += ') { '; + $closingBraces2 += '}'; + } + } + } + } + } + if ($breakOnError) { + out += ' ' + ($closingBraces2) + ' '; + } + if ($top) { + if ($async) { + out += ' if (errors === 0) return data; '; + out += ' else throw new ValidationError(vErrors); '; + } else { + out += ' validate.errors = vErrors; '; + out += ' return errors === 0; '; + } + out += ' }; return validate;'; + } else { + out += ' var ' + ($valid) + ' = errors === errs_' + ($lvl) + ';'; + } + out = it.util.cleanUpCode(out); + if ($top) { + out = it.util.finalCleanUpCode(out, $async); + } + + function $shouldUseGroup($rulesGroup) { + var rules = $rulesGroup.rules; + for (var i = 0; i < rules.length; i++) + if ($shouldUseRule(rules[i])) return true; + } + + function $shouldUseRule($rule) { + return it.schema[$rule.keyword] !== undefined || ($rule.implements && $ruleImplementsSomeKeyword($rule)); + } + + function $ruleImplementsSomeKeyword($rule) { + var impl = $rule.implements; + for (var i = 0; i < impl.length; i++) + if (it.schema[impl[i]] !== undefined) return true; + } + return out; +} diff --git a/node_modules/ajv/lib/keyword.js b/node_modules/ajv/lib/keyword.js new file mode 100644 index 0000000..5fec19a --- /dev/null +++ b/node_modules/ajv/lib/keyword.js @@ -0,0 +1,146 @@ +'use strict'; + +var IDENTIFIER = /^[a-z_$][a-z0-9_$-]*$/i; +var customRuleCode = require('./dotjs/custom'); +var definitionSchema = require('./definition_schema'); + +module.exports = { + add: addKeyword, + get: getKeyword, + remove: removeKeyword, + validate: validateKeyword +}; + + +/** + * Define custom keyword + * @this Ajv + * @param {String} keyword custom keyword, should be unique (including different from all standard, custom and macro keywords). + * @param {Object} definition keyword definition object with properties `type` (type(s) which the keyword applies to), `validate` or `compile`. + * @return {Ajv} this for method chaining + */ +function addKeyword(keyword, definition) { + /* jshint validthis: true */ + /* eslint no-shadow: 0 */ + var RULES = this.RULES; + if (RULES.keywords[keyword]) + throw new Error('Keyword ' + keyword + ' is already defined'); + + if (!IDENTIFIER.test(keyword)) + throw new Error('Keyword ' + keyword + ' is not a valid identifier'); + + if (definition) { + this.validateKeyword(definition, true); + + var dataType = definition.type; + if (Array.isArray(dataType)) { + for (var i=0; i ../ajv-dist/bower.json + cd ../ajv-dist + + if [[ `git status --porcelain` ]]; then + echo "Changes detected. Updating master branch..." + git add -A + git commit -m "updated by travis build #$TRAVIS_BUILD_NUMBER" + git push --quiet origin master > /dev/null 2>&1 + fi + + echo "Publishing tag..." + + git tag $TRAVIS_TAG + git push --tags > /dev/null 2>&1 + + echo "Done" +fi diff --git a/node_modules/ajv/scripts/travis-gh-pages b/node_modules/ajv/scripts/travis-gh-pages new file mode 100755 index 0000000..46ded16 --- /dev/null +++ b/node_modules/ajv/scripts/travis-gh-pages @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +set -e + +if [[ "$TRAVIS_BRANCH" == "master" && "$TRAVIS_PULL_REQUEST" == "false" && $TRAVIS_JOB_NUMBER =~ ".3" ]]; then + git diff --name-only $TRAVIS_COMMIT_RANGE | grep -qE '\.md$|^LICENSE$|travis-gh-pages$' && { + rm -rf ../gh-pages + git clone -b gh-pages --single-branch https://${GITHUB_TOKEN}@github.com/epoberezkin/ajv.git ../gh-pages + mkdir -p ../gh-pages/_source + cp *.md ../gh-pages/_source + cp LICENSE ../gh-pages/_source + currentDir=$(pwd) + cd ../gh-pages + $currentDir/node_modules/.bin/gh-pages-generator + # remove logo from README + sed -i -E "s/]+ajv_logo[^>]+>//" index.md + git config user.email "$GIT_USER_EMAIL" + git config user.name "$GIT_USER_NAME" + git add . + git commit -am "updated by travis build #$TRAVIS_BUILD_NUMBER" + git push --quiet origin gh-pages > /dev/null 2>&1 + } +fi diff --git a/node_modules/ansi-align/CHANGELOG.md b/node_modules/ansi-align/CHANGELOG.md new file mode 100644 index 0000000..0db57f7 --- /dev/null +++ b/node_modules/ansi-align/CHANGELOG.md @@ -0,0 +1,51 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +# [3.0.0](https://github.com/nexdrew/ansi-align/compare/v2.0.0...v3.0.0) (2018-12-17) + + +### Bug Fixes + +* **package:** update string-width to version 3.0.0 ([#50](https://github.com/nexdrew/ansi-align/issues/50)) ([67f0d8f](https://github.com/nexdrew/ansi-align/commit/67f0d8f)) + + +### BREAKING CHANGES + +* **package:** Node 4 no longer supported, please update to Node 6+ or use ansi-align@2.0.0 + + + + +# [2.0.0](https://github.com/nexdrew/ansi-align/compare/v1.1.0...v2.0.0) (2017-05-01) + + +### Features + +* ES2015ify, dropping support for Node <4 ([#30](https://github.com/nexdrew/ansi-align/issues/30)) ([7b43f48](https://github.com/nexdrew/ansi-align/commit/7b43f48)) + + +### BREAKING CHANGES + +* Node 0.10 or 0.12 no longer supported, please update to Node 4+ or use ansi-align@1.1.0 + + + + +# [1.1.0](https://github.com/nexdrew/ansi-align/compare/v1.0.0...v1.1.0) (2016-06-06) + + +### Features + +* support left-alignment as no-op ([#3](https://github.com/nexdrew/ansi-align/issues/3)) ([e581db6](https://github.com/nexdrew/ansi-align/commit/e581db6)) + + + + +# 1.0.0 (2016-04-30) + + +### Features + +* initial commit ([1914d90](https://github.com/nexdrew/ansi-align/commit/1914d90)) diff --git a/node_modules/ansi-align/LICENSE b/node_modules/ansi-align/LICENSE new file mode 100644 index 0000000..ab601b6 --- /dev/null +++ b/node_modules/ansi-align/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2016, Contributors + +Permission to use, copy, modify, and/or distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright notice +and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. diff --git a/node_modules/ansi-align/README.md b/node_modules/ansi-align/README.md new file mode 100644 index 0000000..5e51682 --- /dev/null +++ b/node_modules/ansi-align/README.md @@ -0,0 +1,80 @@ +# ansi-align + +> align-text with ANSI support for CLIs + +[](https://travis-ci.org/nexdrew/ansi-align) +[](https://coveralls.io/github/nexdrew/ansi-align?branch=master) +[](https://github.com/conventional-changelog/standard-version) +[](https://greenkeeper.io/) + +Easily center- or right- align a block of text, carefully ignoring ANSI escape codes. + +E.g. turn this: + + + +Into this: + + + +## Install + +```sh +npm install --save ansi-align +``` + +```js +var ansiAlign = require('ansi-align') +``` + +## API + +### `ansiAlign(text, [opts])` + +Align the given text per the line with the greatest [`string-width`](https://github.com/sindresorhus/string-width), returning a new string (or array). + +#### Arguments + +- `text`: required, string or array + + The text to align. If a string is given, it will be split using either the `opts.split` value or `'\n'` by default. If an array is given, a different array of modified strings will be returned. + +- `opts`: optional, object + + Options to change behavior, see below. + +#### Options + +- `opts.align`: string, default `'center'` + + The alignment mode. Use `'center'` for center-alignment, `'right'` for right-alignment, or `'left'` for left-alignment. Note that the given `text` is assumed to be left-aligned already, so specifying `align: 'left'` just returns the `text` as is (no-op). + +- `opts.split`: string or RegExp, default `'\n'` + + The separator to use when splitting the text. Only used if text is given as a string. + +- `opts.pad`: string, default `' '` + + The value used to left-pad (prepend to) lines of lesser width. Will be repeated as necessary to adjust alignment to the line with the greatest width. + +### `ansiAlign.center(text)` + +Alias for `ansiAlign(text, { align: 'center' })`. + +### `ansiAlign.right(text)` + +Alias for `ansiAlign(text, { align: 'right' })`. + +### `ansiAlign.left(text)` + +Alias for `ansiAlign(text, { align: 'left' })`, which is a no-op. + +## Similar Packages + +- [`center-align`](https://github.com/jonschlinkert/center-align): Very close to this package, except it doesn't support ANSI codes. +- [`left-pad`](https://github.com/camwest/left-pad): Great for left-padding but does not support center alignment or ANSI codes. +- Pretty much anything by the [chalk](https://github.com/chalk) team + +## License + +ISC © Contributors diff --git a/node_modules/ansi-align/index.js b/node_modules/ansi-align/index.js new file mode 100644 index 0000000..67fa826 --- /dev/null +++ b/node_modules/ansi-align/index.js @@ -0,0 +1,61 @@ +'use strict' + +const stringWidth = require('string-width') + +function ansiAlign (text, opts) { + if (!text) return text + + opts = opts || {} + const align = opts.align || 'center' + + // short-circuit `align: 'left'` as no-op + if (align === 'left') return text + + const split = opts.split || '\n' + const pad = opts.pad || ' ' + const widthDiffFn = align !== 'right' ? halfDiff : fullDiff + + let returnString = false + if (!Array.isArray(text)) { + returnString = true + text = String(text).split(split) + } + + let width + let maxWidth = 0 + text = text.map(function (str) { + str = String(str) + width = stringWidth(str) + maxWidth = Math.max(width, maxWidth) + return { + str, + width + } + }).map(function (obj) { + return new Array(widthDiffFn(maxWidth, obj.width) + 1).join(pad) + obj.str + }) + + return returnString ? text.join(split) : text +} + +ansiAlign.left = function left (text) { + return ansiAlign(text, { align: 'left' }) +} + +ansiAlign.center = function center (text) { + return ansiAlign(text, { align: 'center' }) +} + +ansiAlign.right = function right (text) { + return ansiAlign(text, { align: 'right' }) +} + +module.exports = ansiAlign + +function halfDiff (maxWidth, curWidth) { + return Math.floor((maxWidth - curWidth) / 2) +} + +function fullDiff (maxWidth, curWidth) { + return maxWidth - curWidth +} diff --git a/node_modules/ansi-align/package.json b/node_modules/ansi-align/package.json new file mode 100644 index 0000000..47dab59 --- /dev/null +++ b/node_modules/ansi-align/package.json @@ -0,0 +1,70 @@ +{ + "_from": "ansi-align@^3.0.0", + "_id": "ansi-align@3.0.0", + "_inBundle": false, + "_integrity": "sha512-ZpClVKqXN3RGBmKibdfWzqCY4lnjEuoNzU5T0oEFpfd/z5qJHVarukridD4juLO2FXMiwUQxr9WqQtaYa8XRYw==", + "_location": "/ansi-align", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "ansi-align@^3.0.0", + "name": "ansi-align", + "escapedName": "ansi-align", + "rawSpec": "^3.0.0", + "saveSpec": null, + "fetchSpec": "^3.0.0" + }, + "_requiredBy": [ + "/boxen" + ], + "_resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.0.tgz", + "_shasum": "b536b371cf687caaef236c18d3e21fe3797467cb", + "_spec": "ansi-align@^3.0.0", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/boxen", + "author": { + "name": "nexdrew" + }, + "bugs": { + "url": "https://github.com/nexdrew/ansi-align/issues" + }, + "bundleDependencies": false, + "dependencies": { + "string-width": "^3.0.0" + }, + "deprecated": false, + "description": "align-text with ANSI support for CLIs", + "devDependencies": { + "ava": "^1.0.1", + "chalk": "^2.4.1", + "coveralls": "^3.0.2", + "nyc": "^13.1.0", + "standard": "^12.0.1", + "standard-version": "^4.4.0" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/nexdrew/ansi-align#readme", + "keywords": [ + "ansi", + "align", + "cli", + "center", + "pad" + ], + "license": "ISC", + "main": "index.js", + "name": "ansi-align", + "repository": { + "type": "git", + "url": "git+https://github.com/nexdrew/ansi-align.git" + }, + "scripts": { + "coverage": "nyc report --reporter=text-lcov | coveralls", + "pretest": "standard", + "release": "standard-version", + "test": "nyc ava" + }, + "version": "3.0.0" +} diff --git a/node_modules/ansi-regex/index.js b/node_modules/ansi-regex/index.js new file mode 100644 index 0000000..c254480 --- /dev/null +++ b/node_modules/ansi-regex/index.js @@ -0,0 +1,14 @@ +'use strict'; + +module.exports = options => { + options = Object.assign({ + onlyFirst: false + }, options); + + const pattern = [ + '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)', + '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))' + ].join('|'); + + return new RegExp(pattern, options.onlyFirst ? undefined : 'g'); +}; diff --git a/node_modules/ansi-regex/license b/node_modules/ansi-regex/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/ansi-regex/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/ansi-regex/package.json b/node_modules/ansi-regex/package.json new file mode 100644 index 0000000..703c4d2 --- /dev/null +++ b/node_modules/ansi-regex/package.json @@ -0,0 +1,85 @@ +{ + "_from": "ansi-regex@^4.1.0", + "_id": "ansi-regex@4.1.0", + "_inBundle": false, + "_integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "_location": "/ansi-regex", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "ansi-regex@^4.1.0", + "name": "ansi-regex", + "escapedName": "ansi-regex", + "rawSpec": "^4.1.0", + "saveSpec": null, + "fetchSpec": "^4.1.0" + }, + "_requiredBy": [ + "/strip-ansi" + ], + "_resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "_shasum": "8b9f8f08cf1acb843756a839ca8c7e3168c51997", + "_spec": "ansi-regex@^4.1.0", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/strip-ansi", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/chalk/ansi-regex/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Regular expression for matching ANSI escape codes", + "devDependencies": { + "ava": "^0.25.0", + "xo": "^0.23.0" + }, + "engines": { + "node": ">=6" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/chalk/ansi-regex#readme", + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "text", + "regex", + "regexp", + "re", + "match", + "test", + "find", + "pattern" + ], + "license": "MIT", + "name": "ansi-regex", + "repository": { + "type": "git", + "url": "git+https://github.com/chalk/ansi-regex.git" + }, + "scripts": { + "test": "xo && ava", + "view-supported": "node fixtures/view-codes.js" + }, + "version": "4.1.0" +} diff --git a/node_modules/ansi-regex/readme.md b/node_modules/ansi-regex/readme.md new file mode 100644 index 0000000..d19c446 --- /dev/null +++ b/node_modules/ansi-regex/readme.md @@ -0,0 +1,87 @@ +# ansi-regex [](https://travis-ci.org/chalk/ansi-regex) + +> Regular expression for matching [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) + +--- + + + + Get professional support for this package with a Tidelift subscription + + + + Tidelift helps make open source sustainable for maintainers while giving companiesassurances about security, maintenance, and licensing for their dependencies. + + + +--- + + +## Install + +``` +$ npm install ansi-regex +``` + + +## Usage + +```js +const ansiRegex = require('ansi-regex'); + +ansiRegex().test('\u001B[4mcake\u001B[0m'); +//=> true + +ansiRegex().test('cake'); +//=> false + +'\u001B[4mcake\u001B[0m'.match(ansiRegex()); +//=> ['\u001B[4m', '\u001B[0m'] + +'\u001B[4mcake\u001B[0m'.match(ansiRegex({onlyFirst: true})); +//=> ['\u001B[4m'] + +'\u001B]8;;https://github.com\u0007click\u001B]8;;\u0007'.match(ansiRegex()); +//=> ['\u001B]8;;https://github.com\u0007', '\u001B]8;;\u0007'] +``` + + +## API + +### ansiRegex([options]) + +Returns a regex for matching ANSI escape codes. + +#### options + +##### onlyFirst + +Type: `boolean` +Default: `false` *(Matches any ANSI escape codes in a string)* + +Match only the first ANSI escape. + + +## FAQ + +### Why do you test for codes not in the ECMA 48 standard? + +Some of the codes we run as a test are codes that we acquired finding various lists of non-standard or manufacturer specific codes. We test for both standard and non-standard codes, as most of them follow the same or similar format and can be safely matched in strings without the risk of removing actual string content. There are a few non-standard control codes that do not follow the traditional format (i.e. they end in numbers) thus forcing us to exclude them from the test because we cannot reliably match them. + +On the historical side, those ECMA standards were established in the early 90's whereas the VT100, for example, was designed in the mid/late 70's. At that point in time, control codes were still pretty ungoverned and engineers used them for a multitude of things, namely to activate hardware ports that may have been proprietary. Somewhere else you see a similar 'anarchy' of codes is in the x86 architecture for processors; there are a ton of "interrupts" that can mean different things on certain brands of processors, most of which have been phased out. + + +## Security + +To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security). Tidelift will coordinate the fix and disclosure. + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +## License + +MIT diff --git a/node_modules/ansi-styles/index.js b/node_modules/ansi-styles/index.js new file mode 100644 index 0000000..90a871c --- /dev/null +++ b/node_modules/ansi-styles/index.js @@ -0,0 +1,165 @@ +'use strict'; +const colorConvert = require('color-convert'); + +const wrapAnsi16 = (fn, offset) => function () { + const code = fn.apply(colorConvert, arguments); + return `\u001B[${code + offset}m`; +}; + +const wrapAnsi256 = (fn, offset) => function () { + const code = fn.apply(colorConvert, arguments); + return `\u001B[${38 + offset};5;${code}m`; +}; + +const wrapAnsi16m = (fn, offset) => function () { + const rgb = fn.apply(colorConvert, arguments); + return `\u001B[${38 + offset};2;${rgb[0]};${rgb[1]};${rgb[2]}m`; +}; + +function assembleStyles() { + const codes = new Map(); + const styles = { + modifier: { + reset: [0, 0], + // 21 isn't widely supported and 22 does the same thing + bold: [1, 22], + dim: [2, 22], + italic: [3, 23], + underline: [4, 24], + inverse: [7, 27], + hidden: [8, 28], + strikethrough: [9, 29] + }, + color: { + black: [30, 39], + red: [31, 39], + green: [32, 39], + yellow: [33, 39], + blue: [34, 39], + magenta: [35, 39], + cyan: [36, 39], + white: [37, 39], + gray: [90, 39], + + // Bright color + redBright: [91, 39], + greenBright: [92, 39], + yellowBright: [93, 39], + blueBright: [94, 39], + magentaBright: [95, 39], + cyanBright: [96, 39], + whiteBright: [97, 39] + }, + bgColor: { + bgBlack: [40, 49], + bgRed: [41, 49], + bgGreen: [42, 49], + bgYellow: [43, 49], + bgBlue: [44, 49], + bgMagenta: [45, 49], + bgCyan: [46, 49], + bgWhite: [47, 49], + + // Bright color + bgBlackBright: [100, 49], + bgRedBright: [101, 49], + bgGreenBright: [102, 49], + bgYellowBright: [103, 49], + bgBlueBright: [104, 49], + bgMagentaBright: [105, 49], + bgCyanBright: [106, 49], + bgWhiteBright: [107, 49] + } + }; + + // Fix humans + styles.color.grey = styles.color.gray; + + for (const groupName of Object.keys(styles)) { + const group = styles[groupName]; + + for (const styleName of Object.keys(group)) { + const style = group[styleName]; + + styles[styleName] = { + open: `\u001B[${style[0]}m`, + close: `\u001B[${style[1]}m` + }; + + group[styleName] = styles[styleName]; + + codes.set(style[0], style[1]); + } + + Object.defineProperty(styles, groupName, { + value: group, + enumerable: false + }); + + Object.defineProperty(styles, 'codes', { + value: codes, + enumerable: false + }); + } + + const ansi2ansi = n => n; + const rgb2rgb = (r, g, b) => [r, g, b]; + + styles.color.close = '\u001B[39m'; + styles.bgColor.close = '\u001B[49m'; + + styles.color.ansi = { + ansi: wrapAnsi16(ansi2ansi, 0) + }; + styles.color.ansi256 = { + ansi256: wrapAnsi256(ansi2ansi, 0) + }; + styles.color.ansi16m = { + rgb: wrapAnsi16m(rgb2rgb, 0) + }; + + styles.bgColor.ansi = { + ansi: wrapAnsi16(ansi2ansi, 10) + }; + styles.bgColor.ansi256 = { + ansi256: wrapAnsi256(ansi2ansi, 10) + }; + styles.bgColor.ansi16m = { + rgb: wrapAnsi16m(rgb2rgb, 10) + }; + + for (let key of Object.keys(colorConvert)) { + if (typeof colorConvert[key] !== 'object') { + continue; + } + + const suite = colorConvert[key]; + + if (key === 'ansi16') { + key = 'ansi'; + } + + if ('ansi16' in suite) { + styles.color.ansi[key] = wrapAnsi16(suite.ansi16, 0); + styles.bgColor.ansi[key] = wrapAnsi16(suite.ansi16, 10); + } + + if ('ansi256' in suite) { + styles.color.ansi256[key] = wrapAnsi256(suite.ansi256, 0); + styles.bgColor.ansi256[key] = wrapAnsi256(suite.ansi256, 10); + } + + if ('rgb' in suite) { + styles.color.ansi16m[key] = wrapAnsi16m(suite.rgb, 0); + styles.bgColor.ansi16m[key] = wrapAnsi16m(suite.rgb, 10); + } + } + + return styles; +} + +// Make the export immutable +Object.defineProperty(module, 'exports', { + enumerable: true, + get: assembleStyles +}); diff --git a/node_modules/ansi-styles/license b/node_modules/ansi-styles/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/ansi-styles/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/ansi-styles/package.json b/node_modules/ansi-styles/package.json new file mode 100644 index 0000000..f231ff9 --- /dev/null +++ b/node_modules/ansi-styles/package.json @@ -0,0 +1,89 @@ +{ + "_from": "ansi-styles@^3.2.1", + "_id": "ansi-styles@3.2.1", + "_inBundle": false, + "_integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "_location": "/ansi-styles", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "ansi-styles@^3.2.1", + "name": "ansi-styles", + "escapedName": "ansi-styles", + "rawSpec": "^3.2.1", + "saveSpec": null, + "fetchSpec": "^3.2.1" + }, + "_requiredBy": [ + "/chalk", + "/wrap-ansi" + ], + "_resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "_shasum": "41fbb20243e50b12be0f04b8dedbf07520ce841d", + "_spec": "ansi-styles@^3.2.1", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/chalk", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "ava": { + "require": "babel-polyfill" + }, + "bugs": { + "url": "https://github.com/chalk/ansi-styles/issues" + }, + "bundleDependencies": false, + "dependencies": { + "color-convert": "^1.9.0" + }, + "deprecated": false, + "description": "ANSI escape codes for styling strings in the terminal", + "devDependencies": { + "ava": "*", + "babel-polyfill": "^6.23.0", + "svg-term-cli": "^2.1.1", + "xo": "*" + }, + "engines": { + "node": ">=4" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/chalk/ansi-styles#readme", + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "license": "MIT", + "name": "ansi-styles", + "repository": { + "type": "git", + "url": "git+https://github.com/chalk/ansi-styles.git" + }, + "scripts": { + "screenshot": "svg-term --command='node screenshot' --out=screenshot.svg --padding=3 --width=55 --height=3 --at=1000 --no-cursor", + "test": "xo && ava" + }, + "version": "3.2.1" +} diff --git a/node_modules/ansi-styles/readme.md b/node_modules/ansi-styles/readme.md new file mode 100644 index 0000000..3158e2d --- /dev/null +++ b/node_modules/ansi-styles/readme.md @@ -0,0 +1,147 @@ +# ansi-styles [](https://travis-ci.org/chalk/ansi-styles) + +> [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal + +You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings. + + + + +## Install + +``` +$ npm install ansi-styles +``` + + +## Usage + +```js +const style = require('ansi-styles'); + +console.log(`${style.green.open}Hello world!${style.green.close}`); + + +// Color conversion between 16/256/truecolor +// NOTE: If conversion goes to 16 colors or 256 colors, the original color +// may be degraded to fit that color palette. This means terminals +// that do not support 16 million colors will best-match the +// original color. +console.log(style.bgColor.ansi.hsl(120, 80, 72) + 'Hello world!' + style.bgColor.close); +console.log(style.color.ansi256.rgb(199, 20, 250) + 'Hello world!' + style.color.close); +console.log(style.color.ansi16m.hex('#ABCDEF') + 'Hello world!' + style.color.close); +``` + +## API + +Each style has an `open` and `close` property. + + +## Styles + +### Modifiers + +- `reset` +- `bold` +- `dim` +- `italic` *(Not widely supported)* +- `underline` +- `inverse` +- `hidden` +- `strikethrough` *(Not widely supported)* + +### Colors + +- `black` +- `red` +- `green` +- `yellow` +- `blue` +- `magenta` +- `cyan` +- `white` +- `gray` ("bright black") +- `redBright` +- `greenBright` +- `yellowBright` +- `blueBright` +- `magentaBright` +- `cyanBright` +- `whiteBright` + +### Background colors + +- `bgBlack` +- `bgRed` +- `bgGreen` +- `bgYellow` +- `bgBlue` +- `bgMagenta` +- `bgCyan` +- `bgWhite` +- `bgBlackBright` +- `bgRedBright` +- `bgGreenBright` +- `bgYellowBright` +- `bgBlueBright` +- `bgMagentaBright` +- `bgCyanBright` +- `bgWhiteBright` + + +## Advanced usage + +By default, you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module. + +- `style.modifier` +- `style.color` +- `style.bgColor` + +###### Example + +```js +console.log(style.color.green.open); +``` + +Raw escape codes (i.e. without the CSI escape prefix `\u001B[` and render mode postfix `m`) are available under `style.codes`, which returns a `Map` with the open codes as keys and close codes as values. + +###### Example + +```js +console.log(style.codes.get(36)); +//=> 39 +``` + + +## [256 / 16 million (TrueColor) support](https://gist.github.com/XVilka/8346728) + +`ansi-styles` uses the [`color-convert`](https://github.com/Qix-/color-convert) package to allow for converting between various colors and ANSI escapes, with support for 256 and 16 million colors. + +To use these, call the associated conversion function with the intended output, for example: + +```js +style.color.ansi.rgb(100, 200, 15); // RGB to 16 color ansi foreground code +style.bgColor.ansi.rgb(100, 200, 15); // RGB to 16 color ansi background code + +style.color.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code +style.bgColor.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code + +style.color.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color foreground code +style.bgColor.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color background code +``` + + +## Related + +- [ansi-escapes](https://github.com/sindresorhus/ansi-escapes) - ANSI escape codes for manipulating the terminal + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +## License + +MIT diff --git a/node_modules/array-flatten/LICENSE b/node_modules/array-flatten/LICENSE new file mode 100644 index 0000000..983fbe8 --- /dev/null +++ b/node_modules/array-flatten/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/array-flatten/README.md b/node_modules/array-flatten/README.md new file mode 100644 index 0000000..91fa5b6 --- /dev/null +++ b/node_modules/array-flatten/README.md @@ -0,0 +1,43 @@ +# Array Flatten + +[![NPM version][npm-image]][npm-url] +[![NPM downloads][downloads-image]][downloads-url] +[![Build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] + +> Flatten an array of nested arrays into a single flat array. Accepts an optional depth. + +## Installation + +``` +npm install array-flatten --save +``` + +## Usage + +```javascript +var flatten = require('array-flatten') + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9]) +//=> [1, 2, 3, 4, 5, 6, 7, 8, 9] + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9], 2) +//=> [1, 2, 3, [4, [5], 6], 7, 8, 9] + +(function () { + flatten(arguments) //=> [1, 2, 3] +})(1, [2, 3]) +``` + +## License + +MIT + +[npm-image]: https://img.shields.io/npm/v/array-flatten.svg?style=flat +[npm-url]: https://npmjs.org/package/array-flatten +[downloads-image]: https://img.shields.io/npm/dm/array-flatten.svg?style=flat +[downloads-url]: https://npmjs.org/package/array-flatten +[travis-image]: https://img.shields.io/travis/blakeembrey/array-flatten.svg?style=flat +[travis-url]: https://travis-ci.org/blakeembrey/array-flatten +[coveralls-image]: https://img.shields.io/coveralls/blakeembrey/array-flatten.svg?style=flat +[coveralls-url]: https://coveralls.io/r/blakeembrey/array-flatten?branch=master diff --git a/node_modules/array-flatten/array-flatten.js b/node_modules/array-flatten/array-flatten.js new file mode 100644 index 0000000..089117b --- /dev/null +++ b/node_modules/array-flatten/array-flatten.js @@ -0,0 +1,64 @@ +'use strict' + +/** + * Expose `arrayFlatten`. + */ +module.exports = arrayFlatten + +/** + * Recursive flatten function with depth. + * + * @param {Array} array + * @param {Array} result + * @param {Number} depth + * @return {Array} + */ +function flattenWithDepth (array, result, depth) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (depth > 0 && Array.isArray(value)) { + flattenWithDepth(value, result, depth - 1) + } else { + result.push(value) + } + } + + return result +} + +/** + * Recursive flatten function. Omitting depth is slightly faster. + * + * @param {Array} array + * @param {Array} result + * @return {Array} + */ +function flattenForever (array, result) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (Array.isArray(value)) { + flattenForever(value, result) + } else { + result.push(value) + } + } + + return result +} + +/** + * Flatten an array, with the ability to define a depth. + * + * @param {Array} array + * @param {Number} depth + * @return {Array} + */ +function arrayFlatten (array, depth) { + if (depth == null) { + return flattenForever(array, []) + } + + return flattenWithDepth(array, [], depth) +} diff --git a/node_modules/array-flatten/package.json b/node_modules/array-flatten/package.json new file mode 100644 index 0000000..768b7e5 --- /dev/null +++ b/node_modules/array-flatten/package.json @@ -0,0 +1,64 @@ +{ + "_from": "array-flatten@1.1.1", + "_id": "array-flatten@1.1.1", + "_inBundle": false, + "_integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=", + "_location": "/array-flatten", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "array-flatten@1.1.1", + "name": "array-flatten", + "escapedName": "array-flatten", + "rawSpec": "1.1.1", + "saveSpec": null, + "fetchSpec": "1.1.1" + }, + "_requiredBy": [ + "/express" + ], + "_resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "_shasum": "9a5f699051b1e7073328f2a008968b64ea2955d2", + "_spec": "array-flatten@1.1.1", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/express", + "author": { + "name": "Blake Embrey", + "email": "hello@blakeembrey.com", + "url": "http://blakeembrey.me" + }, + "bugs": { + "url": "https://github.com/blakeembrey/array-flatten/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Flatten an array of nested arrays into a single flat array", + "devDependencies": { + "istanbul": "^0.3.13", + "mocha": "^2.2.4", + "pre-commit": "^1.0.7", + "standard": "^3.7.3" + }, + "files": [ + "array-flatten.js", + "LICENSE" + ], + "homepage": "https://github.com/blakeembrey/array-flatten", + "keywords": [ + "array", + "flatten", + "arguments", + "depth" + ], + "license": "MIT", + "main": "array-flatten.js", + "name": "array-flatten", + "repository": { + "type": "git", + "url": "git://github.com/blakeembrey/array-flatten.git" + }, + "scripts": { + "test": "istanbul cover _mocha -- -R spec" + }, + "version": "1.1.1" +} diff --git a/node_modules/asn1/LICENSE b/node_modules/asn1/LICENSE new file mode 100644 index 0000000..9b5dcdb --- /dev/null +++ b/node_modules/asn1/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2011 Mark Cavage, All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE diff --git a/node_modules/asn1/README.md b/node_modules/asn1/README.md new file mode 100644 index 0000000..2208210 --- /dev/null +++ b/node_modules/asn1/README.md @@ -0,0 +1,50 @@ +node-asn1 is a library for encoding and decoding ASN.1 datatypes in pure JS. +Currently BER encoding is supported; at some point I'll likely have to do DER. + +## Usage + +Mostly, if you're *actually* needing to read and write ASN.1, you probably don't +need this readme to explain what and why. If you have no idea what ASN.1 is, +see this: ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc + +The source is pretty much self-explanatory, and has read/write methods for the +common types out there. + +### Decoding + +The following reads an ASN.1 sequence with a boolean. + + var Ber = require('asn1').Ber; + + var reader = new Ber.Reader(Buffer.from([0x30, 0x03, 0x01, 0x01, 0xff])); + + reader.readSequence(); + console.log('Sequence len: ' + reader.length); + if (reader.peek() === Ber.Boolean) + console.log(reader.readBoolean()); + +### Encoding + +The following generates the same payload as above. + + var Ber = require('asn1').Ber; + + var writer = new Ber.Writer(); + + writer.startSequence(); + writer.writeBoolean(true); + writer.endSequence(); + + console.log(writer.buffer); + +## Installation + + npm install asn1 + +## License + +MIT. + +## Bugs + +See . diff --git a/node_modules/asn1/lib/ber/errors.js b/node_modules/asn1/lib/ber/errors.js new file mode 100644 index 0000000..4557b8a --- /dev/null +++ b/node_modules/asn1/lib/ber/errors.js @@ -0,0 +1,13 @@ +// Copyright 2011 Mark Cavage All rights reserved. + + +module.exports = { + + newInvalidAsn1Error: function (msg) { + var e = new Error(); + e.name = 'InvalidAsn1Error'; + e.message = msg || ''; + return e; + } + +}; diff --git a/node_modules/asn1/lib/ber/index.js b/node_modules/asn1/lib/ber/index.js new file mode 100644 index 0000000..387d132 --- /dev/null +++ b/node_modules/asn1/lib/ber/index.js @@ -0,0 +1,27 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var errors = require('./errors'); +var types = require('./types'); + +var Reader = require('./reader'); +var Writer = require('./writer'); + + +// --- Exports + +module.exports = { + + Reader: Reader, + + Writer: Writer + +}; + +for (var t in types) { + if (types.hasOwnProperty(t)) + module.exports[t] = types[t]; +} +for (var e in errors) { + if (errors.hasOwnProperty(e)) + module.exports[e] = errors[e]; +} diff --git a/node_modules/asn1/lib/ber/reader.js b/node_modules/asn1/lib/ber/reader.js new file mode 100644 index 0000000..8a7e4ca --- /dev/null +++ b/node_modules/asn1/lib/ber/reader.js @@ -0,0 +1,262 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var assert = require('assert'); +var Buffer = require('safer-buffer').Buffer; + +var ASN1 = require('./types'); +var errors = require('./errors'); + + +// --- Globals + +var newInvalidAsn1Error = errors.newInvalidAsn1Error; + + + +// --- API + +function Reader(data) { + if (!data || !Buffer.isBuffer(data)) + throw new TypeError('data must be a node Buffer'); + + this._buf = data; + this._size = data.length; + + // These hold the "current" state + this._len = 0; + this._offset = 0; +} + +Object.defineProperty(Reader.prototype, 'length', { + enumerable: true, + get: function () { return (this._len); } +}); + +Object.defineProperty(Reader.prototype, 'offset', { + enumerable: true, + get: function () { return (this._offset); } +}); + +Object.defineProperty(Reader.prototype, 'remain', { + get: function () { return (this._size - this._offset); } +}); + +Object.defineProperty(Reader.prototype, 'buffer', { + get: function () { return (this._buf.slice(this._offset)); } +}); + + +/** + * Reads a single byte and advances offset; you can pass in `true` to make this + * a "peek" operation (i.e., get the byte, but don't advance the offset). + * + * @param {Boolean} peek true means don't move offset. + * @return {Number} the next byte, null if not enough data. + */ +Reader.prototype.readByte = function (peek) { + if (this._size - this._offset < 1) + return null; + + var b = this._buf[this._offset] & 0xff; + + if (!peek) + this._offset += 1; + + return b; +}; + + +Reader.prototype.peek = function () { + return this.readByte(true); +}; + + +/** + * Reads a (potentially) variable length off the BER buffer. This call is + * not really meant to be called directly, as callers have to manipulate + * the internal buffer afterwards. + * + * As a result of this call, you can call `Reader.length`, until the + * next thing called that does a readLength. + * + * @return {Number} the amount of offset to advance the buffer. + * @throws {InvalidAsn1Error} on bad ASN.1 + */ +Reader.prototype.readLength = function (offset) { + if (offset === undefined) + offset = this._offset; + + if (offset >= this._size) + return null; + + var lenB = this._buf[offset++] & 0xff; + if (lenB === null) + return null; + + if ((lenB & 0x80) === 0x80) { + lenB &= 0x7f; + + if (lenB === 0) + throw newInvalidAsn1Error('Indefinite length not supported'); + + if (lenB > 4) + throw newInvalidAsn1Error('encoding too long'); + + if (this._size - offset < lenB) + return null; + + this._len = 0; + for (var i = 0; i < lenB; i++) + this._len = (this._len << 8) + (this._buf[offset++] & 0xff); + + } else { + // Wasn't a variable length + this._len = lenB; + } + + return offset; +}; + + +/** + * Parses the next sequence in this BER buffer. + * + * To get the length of the sequence, call `Reader.length`. + * + * @return {Number} the sequence's tag. + */ +Reader.prototype.readSequence = function (tag) { + var seq = this.peek(); + if (seq === null) + return null; + if (tag !== undefined && tag !== seq) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + seq.toString(16)); + + var o = this.readLength(this._offset + 1); // stored in `length` + if (o === null) + return null; + + this._offset = o; + return seq; +}; + + +Reader.prototype.readInt = function () { + return this._readTag(ASN1.Integer); +}; + + +Reader.prototype.readBoolean = function () { + return (this._readTag(ASN1.Boolean) === 0 ? false : true); +}; + + +Reader.prototype.readEnumeration = function () { + return this._readTag(ASN1.Enumeration); +}; + + +Reader.prototype.readString = function (tag, retbuf) { + if (!tag) + tag = ASN1.OctetString; + + var b = this.peek(); + if (b === null) + return null; + + if (b !== tag) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + b.toString(16)); + + var o = this.readLength(this._offset + 1); // stored in `length` + + if (o === null) + return null; + + if (this.length > this._size - o) + return null; + + this._offset = o; + + if (this.length === 0) + return retbuf ? Buffer.alloc(0) : ''; + + var str = this._buf.slice(this._offset, this._offset + this.length); + this._offset += this.length; + + return retbuf ? str : str.toString('utf8'); +}; + +Reader.prototype.readOID = function (tag) { + if (!tag) + tag = ASN1.OID; + + var b = this.readString(tag, true); + if (b === null) + return null; + + var values = []; + var value = 0; + + for (var i = 0; i < b.length; i++) { + var byte = b[i] & 0xff; + + value <<= 7; + value += byte & 0x7f; + if ((byte & 0x80) === 0) { + values.push(value); + value = 0; + } + } + + value = values.shift(); + values.unshift(value % 40); + values.unshift((value / 40) >> 0); + + return values.join('.'); +}; + + +Reader.prototype._readTag = function (tag) { + assert.ok(tag !== undefined); + + var b = this.peek(); + + if (b === null) + return null; + + if (b !== tag) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + b.toString(16)); + + var o = this.readLength(this._offset + 1); // stored in `length` + if (o === null) + return null; + + if (this.length > 4) + throw newInvalidAsn1Error('Integer too long: ' + this.length); + + if (this.length > this._size - o) + return null; + this._offset = o; + + var fb = this._buf[this._offset]; + var value = 0; + + for (var i = 0; i < this.length; i++) { + value <<= 8; + value |= (this._buf[this._offset++] & 0xff); + } + + if ((fb & 0x80) === 0x80 && i !== 4) + value -= (1 << (i * 8)); + + return value >> 0; +}; + + + +// --- Exported API + +module.exports = Reader; diff --git a/node_modules/asn1/lib/ber/types.js b/node_modules/asn1/lib/ber/types.js new file mode 100644 index 0000000..8aea000 --- /dev/null +++ b/node_modules/asn1/lib/ber/types.js @@ -0,0 +1,36 @@ +// Copyright 2011 Mark Cavage All rights reserved. + + +module.exports = { + EOC: 0, + Boolean: 1, + Integer: 2, + BitString: 3, + OctetString: 4, + Null: 5, + OID: 6, + ObjectDescriptor: 7, + External: 8, + Real: 9, // float + Enumeration: 10, + PDV: 11, + Utf8String: 12, + RelativeOID: 13, + Sequence: 16, + Set: 17, + NumericString: 18, + PrintableString: 19, + T61String: 20, + VideotexString: 21, + IA5String: 22, + UTCTime: 23, + GeneralizedTime: 24, + GraphicString: 25, + VisibleString: 26, + GeneralString: 28, + UniversalString: 29, + CharacterString: 30, + BMPString: 31, + Constructor: 32, + Context: 128 +}; diff --git a/node_modules/asn1/lib/ber/writer.js b/node_modules/asn1/lib/ber/writer.js new file mode 100644 index 0000000..3515acf --- /dev/null +++ b/node_modules/asn1/lib/ber/writer.js @@ -0,0 +1,317 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var assert = require('assert'); +var Buffer = require('safer-buffer').Buffer; +var ASN1 = require('./types'); +var errors = require('./errors'); + + +// --- Globals + +var newInvalidAsn1Error = errors.newInvalidAsn1Error; + +var DEFAULT_OPTS = { + size: 1024, + growthFactor: 8 +}; + + +// --- Helpers + +function merge(from, to) { + assert.ok(from); + assert.equal(typeof (from), 'object'); + assert.ok(to); + assert.equal(typeof (to), 'object'); + + var keys = Object.getOwnPropertyNames(from); + keys.forEach(function (key) { + if (to[key]) + return; + + var value = Object.getOwnPropertyDescriptor(from, key); + Object.defineProperty(to, key, value); + }); + + return to; +} + + + +// --- API + +function Writer(options) { + options = merge(DEFAULT_OPTS, options || {}); + + this._buf = Buffer.alloc(options.size || 1024); + this._size = this._buf.length; + this._offset = 0; + this._options = options; + + // A list of offsets in the buffer where we need to insert + // sequence tag/len pairs. + this._seq = []; +} + +Object.defineProperty(Writer.prototype, 'buffer', { + get: function () { + if (this._seq.length) + throw newInvalidAsn1Error(this._seq.length + ' unended sequence(s)'); + + return (this._buf.slice(0, this._offset)); + } +}); + +Writer.prototype.writeByte = function (b) { + if (typeof (b) !== 'number') + throw new TypeError('argument must be a Number'); + + this._ensure(1); + this._buf[this._offset++] = b; +}; + + +Writer.prototype.writeInt = function (i, tag) { + if (typeof (i) !== 'number') + throw new TypeError('argument must be a Number'); + if (typeof (tag) !== 'number') + tag = ASN1.Integer; + + var sz = 4; + + while ((((i & 0xff800000) === 0) || ((i & 0xff800000) === 0xff800000 >> 0)) && + (sz > 1)) { + sz--; + i <<= 8; + } + + if (sz > 4) + throw newInvalidAsn1Error('BER ints cannot be > 0xffffffff'); + + this._ensure(2 + sz); + this._buf[this._offset++] = tag; + this._buf[this._offset++] = sz; + + while (sz-- > 0) { + this._buf[this._offset++] = ((i & 0xff000000) >>> 24); + i <<= 8; + } + +}; + + +Writer.prototype.writeNull = function () { + this.writeByte(ASN1.Null); + this.writeByte(0x00); +}; + + +Writer.prototype.writeEnumeration = function (i, tag) { + if (typeof (i) !== 'number') + throw new TypeError('argument must be a Number'); + if (typeof (tag) !== 'number') + tag = ASN1.Enumeration; + + return this.writeInt(i, tag); +}; + + +Writer.prototype.writeBoolean = function (b, tag) { + if (typeof (b) !== 'boolean') + throw new TypeError('argument must be a Boolean'); + if (typeof (tag) !== 'number') + tag = ASN1.Boolean; + + this._ensure(3); + this._buf[this._offset++] = tag; + this._buf[this._offset++] = 0x01; + this._buf[this._offset++] = b ? 0xff : 0x00; +}; + + +Writer.prototype.writeString = function (s, tag) { + if (typeof (s) !== 'string') + throw new TypeError('argument must be a string (was: ' + typeof (s) + ')'); + if (typeof (tag) !== 'number') + tag = ASN1.OctetString; + + var len = Buffer.byteLength(s); + this.writeByte(tag); + this.writeLength(len); + if (len) { + this._ensure(len); + this._buf.write(s, this._offset); + this._offset += len; + } +}; + + +Writer.prototype.writeBuffer = function (buf, tag) { + if (typeof (tag) !== 'number') + throw new TypeError('tag must be a number'); + if (!Buffer.isBuffer(buf)) + throw new TypeError('argument must be a buffer'); + + this.writeByte(tag); + this.writeLength(buf.length); + this._ensure(buf.length); + buf.copy(this._buf, this._offset, 0, buf.length); + this._offset += buf.length; +}; + + +Writer.prototype.writeStringArray = function (strings) { + if ((!strings instanceof Array)) + throw new TypeError('argument must be an Array[String]'); + + var self = this; + strings.forEach(function (s) { + self.writeString(s); + }); +}; + +// This is really to solve DER cases, but whatever for now +Writer.prototype.writeOID = function (s, tag) { + if (typeof (s) !== 'string') + throw new TypeError('argument must be a string'); + if (typeof (tag) !== 'number') + tag = ASN1.OID; + + if (!/^([0-9]+\.){3,}[0-9]+$/.test(s)) + throw new Error('argument is not a valid OID string'); + + function encodeOctet(bytes, octet) { + if (octet < 128) { + bytes.push(octet); + } else if (octet < 16384) { + bytes.push((octet >>> 7) | 0x80); + bytes.push(octet & 0x7F); + } else if (octet < 2097152) { + bytes.push((octet >>> 14) | 0x80); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } else if (octet < 268435456) { + bytes.push((octet >>> 21) | 0x80); + bytes.push(((octet >>> 14) | 0x80) & 0xFF); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } else { + bytes.push(((octet >>> 28) | 0x80) & 0xFF); + bytes.push(((octet >>> 21) | 0x80) & 0xFF); + bytes.push(((octet >>> 14) | 0x80) & 0xFF); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } + } + + var tmp = s.split('.'); + var bytes = []; + bytes.push(parseInt(tmp[0], 10) * 40 + parseInt(tmp[1], 10)); + tmp.slice(2).forEach(function (b) { + encodeOctet(bytes, parseInt(b, 10)); + }); + + var self = this; + this._ensure(2 + bytes.length); + this.writeByte(tag); + this.writeLength(bytes.length); + bytes.forEach(function (b) { + self.writeByte(b); + }); +}; + + +Writer.prototype.writeLength = function (len) { + if (typeof (len) !== 'number') + throw new TypeError('argument must be a Number'); + + this._ensure(4); + + if (len <= 0x7f) { + this._buf[this._offset++] = len; + } else if (len <= 0xff) { + this._buf[this._offset++] = 0x81; + this._buf[this._offset++] = len; + } else if (len <= 0xffff) { + this._buf[this._offset++] = 0x82; + this._buf[this._offset++] = len >> 8; + this._buf[this._offset++] = len; + } else if (len <= 0xffffff) { + this._buf[this._offset++] = 0x83; + this._buf[this._offset++] = len >> 16; + this._buf[this._offset++] = len >> 8; + this._buf[this._offset++] = len; + } else { + throw newInvalidAsn1Error('Length too long (> 4 bytes)'); + } +}; + +Writer.prototype.startSequence = function (tag) { + if (typeof (tag) !== 'number') + tag = ASN1.Sequence | ASN1.Constructor; + + this.writeByte(tag); + this._seq.push(this._offset); + this._ensure(3); + this._offset += 3; +}; + + +Writer.prototype.endSequence = function () { + var seq = this._seq.pop(); + var start = seq + 3; + var len = this._offset - start; + + if (len <= 0x7f) { + this._shift(start, len, -2); + this._buf[seq] = len; + } else if (len <= 0xff) { + this._shift(start, len, -1); + this._buf[seq] = 0x81; + this._buf[seq + 1] = len; + } else if (len <= 0xffff) { + this._buf[seq] = 0x82; + this._buf[seq + 1] = len >> 8; + this._buf[seq + 2] = len; + } else if (len <= 0xffffff) { + this._shift(start, len, 1); + this._buf[seq] = 0x83; + this._buf[seq + 1] = len >> 16; + this._buf[seq + 2] = len >> 8; + this._buf[seq + 3] = len; + } else { + throw newInvalidAsn1Error('Sequence too long'); + } +}; + + +Writer.prototype._shift = function (start, len, shift) { + assert.ok(start !== undefined); + assert.ok(len !== undefined); + assert.ok(shift); + + this._buf.copy(this._buf, start + shift, start, start + len); + this._offset += shift; +}; + +Writer.prototype._ensure = function (len) { + assert.ok(len); + + if (this._size - this._offset < len) { + var sz = this._size * this._options.growthFactor; + if (sz - this._offset < len) + sz += len; + + var buf = Buffer.alloc(sz); + + this._buf.copy(buf, 0, 0, this._offset); + this._buf = buf; + this._size = sz; + } +}; + + + +// --- Exported API + +module.exports = Writer; diff --git a/node_modules/asn1/lib/index.js b/node_modules/asn1/lib/index.js new file mode 100644 index 0000000..ede3ab2 --- /dev/null +++ b/node_modules/asn1/lib/index.js @@ -0,0 +1,20 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +// If you have no idea what ASN.1 or BER is, see this: +// ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc + +var Ber = require('./ber/index'); + + + +// --- Exported API + +module.exports = { + + Ber: Ber, + + BerReader: Ber.Reader, + + BerWriter: Ber.Writer + +}; diff --git a/node_modules/asn1/package.json b/node_modules/asn1/package.json new file mode 100644 index 0000000..19d03e8 --- /dev/null +++ b/node_modules/asn1/package.json @@ -0,0 +1,75 @@ +{ + "_from": "asn1@~0.2.3", + "_id": "asn1@0.2.4", + "_inBundle": false, + "_integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "_location": "/asn1", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "asn1@~0.2.3", + "name": "asn1", + "escapedName": "asn1", + "rawSpec": "~0.2.3", + "saveSpec": null, + "fetchSpec": "~0.2.3" + }, + "_requiredBy": [ + "/sshpk" + ], + "_resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", + "_shasum": "8d2475dfab553bb33e77b54e59e880bb8ce23136", + "_spec": "asn1@~0.2.3", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/sshpk", + "author": { + "name": "Joyent", + "url": "joyent.com" + }, + "bugs": { + "url": "https://github.com/joyent/node-asn1/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Mark Cavage", + "email": "mcavage@gmail.com" + }, + { + "name": "David Gwynne", + "email": "loki@animata.net" + }, + { + "name": "Yunong Xiao", + "email": "yunong@joyent.com" + }, + { + "name": "Alex Wilson", + "email": "alex.wilson@joyent.com" + } + ], + "dependencies": { + "safer-buffer": "~2.1.0" + }, + "deprecated": false, + "description": "Contains parsers and serializers for ASN.1 (currently BER only)", + "devDependencies": { + "eslint": "2.13.1", + "eslint-plugin-joyent": "~1.3.0", + "faucet": "0.0.1", + "istanbul": "^0.3.6", + "tape": "^3.5.0" + }, + "homepage": "https://github.com/joyent/node-asn1#readme", + "license": "MIT", + "main": "lib/index.js", + "name": "asn1", + "repository": { + "type": "git", + "url": "git://github.com/joyent/node-asn1.git" + }, + "scripts": { + "test": "tape ./test/ber/*.test.js" + }, + "version": "0.2.4" +} diff --git a/node_modules/assert-plus/AUTHORS b/node_modules/assert-plus/AUTHORS new file mode 100644 index 0000000..1923524 --- /dev/null +++ b/node_modules/assert-plus/AUTHORS @@ -0,0 +1,6 @@ +Dave Eddy +Fred Kuo +Lars-Magnus Skog +Mark Cavage +Patrick Mooney +Rob Gulewich diff --git a/node_modules/assert-plus/CHANGES.md b/node_modules/assert-plus/CHANGES.md new file mode 100644 index 0000000..57d92bf --- /dev/null +++ b/node_modules/assert-plus/CHANGES.md @@ -0,0 +1,14 @@ +# assert-plus Changelog + +## 1.0.0 + +- *BREAKING* assert.number (and derivatives) now accept Infinity as valid input +- Add assert.finite check. Previous assert.number callers should use this if + they expect Infinity inputs to throw. + +## 0.2.0 + +- Fix `assert.object(null)` so it throws +- Fix optional/arrayOf exports for non-type-of asserts +- Add optiona/arrayOf exports for Stream/Date/Regex/uuid +- Add basic unit test coverage diff --git a/node_modules/assert-plus/README.md b/node_modules/assert-plus/README.md new file mode 100644 index 0000000..ec200d1 --- /dev/null +++ b/node_modules/assert-plus/README.md @@ -0,0 +1,162 @@ +# assert-plus + +This library is a super small wrapper over node's assert module that has two +things: (1) the ability to disable assertions with the environment variable +NODE\_NDEBUG, and (2) some API wrappers for argument testing. Like +`assert.string(myArg, 'myArg')`. As a simple example, most of my code looks +like this: + +```javascript + var assert = require('assert-plus'); + + function fooAccount(options, callback) { + assert.object(options, 'options'); + assert.number(options.id, 'options.id'); + assert.bool(options.isManager, 'options.isManager'); + assert.string(options.name, 'options.name'); + assert.arrayOfString(options.email, 'options.email'); + assert.func(callback, 'callback'); + + // Do stuff + callback(null, {}); + } +``` + +# API + +All methods that *aren't* part of node's core assert API are simply assumed to +take an argument, and then a string 'name' that's not a message; `AssertionError` +will be thrown if the assertion fails with a message like: + + AssertionError: foo (string) is required + at test (/home/mark/work/foo/foo.js:3:9) + at Object. (/home/mark/work/foo/foo.js:15:1) + at Module._compile (module.js:446:26) + at Object..js (module.js:464:10) + at Module.load (module.js:353:31) + at Function._load (module.js:311:12) + at Array.0 (module.js:484:10) + at EventEmitter._tickCallback (node.js:190:38) + +from: + +```javascript + function test(foo) { + assert.string(foo, 'foo'); + } +``` + +There you go. You can check that arrays are of a homogeneous type with `Arrayof$Type`: + +```javascript + function test(foo) { + assert.arrayOfString(foo, 'foo'); + } +``` + +You can assert IFF an argument is not `undefined` (i.e., an optional arg): + +```javascript + assert.optionalString(foo, 'foo'); +``` + +Lastly, you can opt-out of assertion checking altogether by setting the +environment variable `NODE_NDEBUG=1`. This is pseudo-useful if you have +lots of assertions, and don't want to pay `typeof ()` taxes to v8 in +production. Be advised: The standard functions re-exported from `assert` are +also disabled in assert-plus if NDEBUG is specified. Using them directly from +the `assert` module avoids this behavior. + +The complete list of APIs is: + +* assert.array +* assert.bool +* assert.buffer +* assert.func +* assert.number +* assert.finite +* assert.object +* assert.string +* assert.stream +* assert.date +* assert.regexp +* assert.uuid +* assert.arrayOfArray +* assert.arrayOfBool +* assert.arrayOfBuffer +* assert.arrayOfFunc +* assert.arrayOfNumber +* assert.arrayOfFinite +* assert.arrayOfObject +* assert.arrayOfString +* assert.arrayOfStream +* assert.arrayOfDate +* assert.arrayOfRegexp +* assert.arrayOfUuid +* assert.optionalArray +* assert.optionalBool +* assert.optionalBuffer +* assert.optionalFunc +* assert.optionalNumber +* assert.optionalFinite +* assert.optionalObject +* assert.optionalString +* assert.optionalStream +* assert.optionalDate +* assert.optionalRegexp +* assert.optionalUuid +* assert.optionalArrayOfArray +* assert.optionalArrayOfBool +* assert.optionalArrayOfBuffer +* assert.optionalArrayOfFunc +* assert.optionalArrayOfNumber +* assert.optionalArrayOfFinite +* assert.optionalArrayOfObject +* assert.optionalArrayOfString +* assert.optionalArrayOfStream +* assert.optionalArrayOfDate +* assert.optionalArrayOfRegexp +* assert.optionalArrayOfUuid +* assert.AssertionError +* assert.fail +* assert.ok +* assert.equal +* assert.notEqual +* assert.deepEqual +* assert.notDeepEqual +* assert.strictEqual +* assert.notStrictEqual +* assert.throws +* assert.doesNotThrow +* assert.ifError + +# Installation + + npm install assert-plus + +## License + +The MIT License (MIT) +Copyright (c) 2012 Mark Cavage + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +## Bugs + +See . diff --git a/node_modules/assert-plus/assert.js b/node_modules/assert-plus/assert.js new file mode 100644 index 0000000..26f944e --- /dev/null +++ b/node_modules/assert-plus/assert.js @@ -0,0 +1,211 @@ +// Copyright (c) 2012, Mark Cavage. All rights reserved. +// Copyright 2015 Joyent, Inc. + +var assert = require('assert'); +var Stream = require('stream').Stream; +var util = require('util'); + + +///--- Globals + +/* JSSTYLED */ +var UUID_REGEXP = /^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$/; + + +///--- Internal + +function _capitalize(str) { + return (str.charAt(0).toUpperCase() + str.slice(1)); +} + +function _toss(name, expected, oper, arg, actual) { + throw new assert.AssertionError({ + message: util.format('%s (%s) is required', name, expected), + actual: (actual === undefined) ? typeof (arg) : actual(arg), + expected: expected, + operator: oper || '===', + stackStartFunction: _toss.caller + }); +} + +function _getClass(arg) { + return (Object.prototype.toString.call(arg).slice(8, -1)); +} + +function noop() { + // Why even bother with asserts? +} + + +///--- Exports + +var types = { + bool: { + check: function (arg) { return typeof (arg) === 'boolean'; } + }, + func: { + check: function (arg) { return typeof (arg) === 'function'; } + }, + string: { + check: function (arg) { return typeof (arg) === 'string'; } + }, + object: { + check: function (arg) { + return typeof (arg) === 'object' && arg !== null; + } + }, + number: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg); + } + }, + finite: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg) && isFinite(arg); + } + }, + buffer: { + check: function (arg) { return Buffer.isBuffer(arg); }, + operator: 'Buffer.isBuffer' + }, + array: { + check: function (arg) { return Array.isArray(arg); }, + operator: 'Array.isArray' + }, + stream: { + check: function (arg) { return arg instanceof Stream; }, + operator: 'instanceof', + actual: _getClass + }, + date: { + check: function (arg) { return arg instanceof Date; }, + operator: 'instanceof', + actual: _getClass + }, + regexp: { + check: function (arg) { return arg instanceof RegExp; }, + operator: 'instanceof', + actual: _getClass + }, + uuid: { + check: function (arg) { + return typeof (arg) === 'string' && UUID_REGEXP.test(arg); + }, + operator: 'isUUID' + } +}; + +function _setExports(ndebug) { + var keys = Object.keys(types); + var out; + + /* re-export standard assert */ + if (process.env.NODE_NDEBUG) { + out = noop; + } else { + out = function (arg, msg) { + if (!arg) { + _toss(msg, 'true', arg); + } + }; + } + + /* standard checks */ + keys.forEach(function (k) { + if (ndebug) { + out[k] = noop; + return; + } + var type = types[k]; + out[k] = function (arg, msg) { + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + + /* optional checks */ + keys.forEach(function (k) { + var name = 'optional' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + + /* arrayOf checks */ + keys.forEach(function (k) { + var name = 'arrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); + + /* optionalArrayOf checks */ + keys.forEach(function (k) { + var name = 'optionalArrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); + + /* re-export built-in assertions */ + Object.keys(assert).forEach(function (k) { + if (k === 'AssertionError') { + out[k] = assert[k]; + return; + } + if (ndebug) { + out[k] = noop; + return; + } + out[k] = assert[k]; + }); + + /* export ourselves (for unit tests _only_) */ + out._setExports = _setExports; + + return out; +} + +module.exports = _setExports(process.env.NODE_NDEBUG); diff --git a/node_modules/assert-plus/package.json b/node_modules/assert-plus/package.json new file mode 100644 index 0000000..0628250 --- /dev/null +++ b/node_modules/assert-plus/package.json @@ -0,0 +1,87 @@ +{ + "_from": "assert-plus@^1.0.0", + "_id": "assert-plus@1.0.0", + "_inBundle": false, + "_integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "_location": "/assert-plus", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "assert-plus@^1.0.0", + "name": "assert-plus", + "escapedName": "assert-plus", + "rawSpec": "^1.0.0", + "saveSpec": null, + "fetchSpec": "^1.0.0" + }, + "_requiredBy": [ + "/dashdash", + "/getpass", + "/http-signature", + "/jsprim", + "/sshpk", + "/verror" + ], + "_resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "_shasum": "f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525", + "_spec": "assert-plus@^1.0.0", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/http-signature", + "author": { + "name": "Mark Cavage", + "email": "mcavage@gmail.com" + }, + "bugs": { + "url": "https://github.com/mcavage/node-assert-plus/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Dave Eddy", + "email": "dave@daveeddy.com" + }, + { + "name": "Fred Kuo", + "email": "fred.kuo@joyent.com" + }, + { + "name": "Lars-Magnus Skog", + "email": "ralphtheninja@riseup.net" + }, + { + "name": "Mark Cavage", + "email": "mcavage@gmail.com" + }, + { + "name": "Patrick Mooney", + "email": "pmooney@pfmooney.com" + }, + { + "name": "Rob Gulewich", + "email": "robert.gulewich@joyent.com" + } + ], + "dependencies": {}, + "deprecated": false, + "description": "Extra assertions on top of node's assert module", + "devDependencies": { + "faucet": "0.0.1", + "tape": "4.2.2" + }, + "engines": { + "node": ">=0.8" + }, + "homepage": "https://github.com/mcavage/node-assert-plus#readme", + "license": "MIT", + "main": "./assert.js", + "name": "assert-plus", + "optionalDependencies": {}, + "repository": { + "type": "git", + "url": "git+https://github.com/mcavage/node-assert-plus.git" + }, + "scripts": { + "test": "tape tests/*.js | ./node_modules/.bin/faucet" + }, + "version": "1.0.0" +} diff --git a/node_modules/asynckit/LICENSE b/node_modules/asynckit/LICENSE new file mode 100644 index 0000000..c9eca5d --- /dev/null +++ b/node_modules/asynckit/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Alex Indigo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/asynckit/README.md b/node_modules/asynckit/README.md new file mode 100644 index 0000000..ddcc7e6 --- /dev/null +++ b/node_modules/asynckit/README.md @@ -0,0 +1,233 @@ +# asynckit [](https://www.npmjs.com/package/asynckit) + +Minimal async jobs utility library, with streams support. + +[](https://travis-ci.org/alexindigo/asynckit) +[](https://travis-ci.org/alexindigo/asynckit) +[](https://ci.appveyor.com/project/alexindigo/asynckit) + +[](https://coveralls.io/github/alexindigo/asynckit?branch=master) +[](https://david-dm.org/alexindigo/asynckit) +[](https://www.bithound.io/github/alexindigo/asynckit) + + + +AsyncKit provides harness for `parallel` and `serial` iterators over list of items represented by arrays or objects. +Optionally it accepts abort function (should be synchronously return by iterator for each item), and terminates left over jobs upon an error event. For specific iteration order built-in (`ascending` and `descending`) and custom sort helpers also supported, via `asynckit.serialOrdered` method. + +It ensures async operations to keep behavior more stable and prevent `Maximum call stack size exceeded` errors, from sync iterators. + +| compression | size | +| :----------------- | -------: | +| asynckit.js | 12.34 kB | +| asynckit.min.js | 4.11 kB | +| asynckit.min.js.gz | 1.47 kB | + + +## Install + +```sh +$ npm install --save asynckit +``` + +## Examples + +### Parallel Jobs + +Runs iterator over provided array in parallel. Stores output in the `result` array, +on the matching positions. In unlikely event of an error from one of the jobs, +will terminate rest of the active jobs (if abort function is provided) +and return error along with salvaged data to the main callback function. + +#### Input Array + +```javascript +var parallel = require('asynckit').parallel + , assert = require('assert') + ; + +var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] + , expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ] + , target = [] + ; + +parallel(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); +}); + +// async job accepts one element from the array +// and a callback function +function asyncJob(item, cb) +{ + // different delays (in ms) per item + var delay = item * 25; + + // pretend different jobs take different time to finish + // and not in consequential order + var timeoutId = setTimeout(function() { + target.push(item); + cb(null, item * 2); + }, delay); + + // allow to cancel "leftover" jobs upon error + // return function, invoking of which will abort this job + return clearTimeout.bind(null, timeoutId); +} +``` + +More examples could be found in [test/test-parallel-array.js](test/test-parallel-array.js). + +#### Input Object + +Also it supports named jobs, listed via object. + +```javascript +var parallel = require('asynckit/parallel') + , assert = require('assert') + ; + +var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 } + , expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 } + , expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ] + , expectedKeys = [ 'first', 'one', 'two', 'four', 'eight', 'sixteen', 'thirtyTwo', 'sixtyFour' ] + , target = [] + , keys = [] + ; + +parallel(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); + assert.deepEqual(keys, expectedKeys); +}); + +// supports full value, key, callback (shortcut) interface +function asyncJob(item, key, cb) +{ + // different delays (in ms) per item + var delay = item * 25; + + // pretend different jobs take different time to finish + // and not in consequential order + var timeoutId = setTimeout(function() { + keys.push(key); + target.push(item); + cb(null, item * 2); + }, delay); + + // allow to cancel "leftover" jobs upon error + // return function, invoking of which will abort this job + return clearTimeout.bind(null, timeoutId); +} +``` + +More examples could be found in [test/test-parallel-object.js](test/test-parallel-object.js). + +### Serial Jobs + +Runs iterator over provided array sequentially. Stores output in the `result` array, +on the matching positions. In unlikely event of an error from one of the jobs, +will not proceed to the rest of the items in the list +and return error along with salvaged data to the main callback function. + +#### Input Array + +```javascript +var serial = require('asynckit/serial') + , assert = require('assert') + ; + +var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] + , expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ] + , target = [] + ; + +serial(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); +}); + +// extended interface (item, key, callback) +// also supported for arrays +function asyncJob(item, key, cb) +{ + target.push(key); + + // it will be automatically made async + // even it iterator "returns" in the same event loop + cb(null, item * 2); +} +``` + +More examples could be found in [test/test-serial-array.js](test/test-serial-array.js). + +#### Input Object + +Also it supports named jobs, listed via object. + +```javascript +var serial = require('asynckit').serial + , assert = require('assert') + ; + +var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] + , expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ] + , target = [] + ; + +var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 } + , expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 } + , expectedTarget = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , target = [] + ; + + +serial(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); +}); + +// shortcut interface (item, callback) +// works for object as well as for the arrays +function asyncJob(item, cb) +{ + target.push(item); + + // it will be automatically made async + // even it iterator "returns" in the same event loop + cb(null, item * 2); +} +``` + +More examples could be found in [test/test-serial-object.js](test/test-serial-object.js). + +_Note: Since _object_ is an _unordered_ collection of properties, +it may produce unexpected results with sequential iterations. +Whenever order of the jobs' execution is important please use `serialOrdered` method._ + +### Ordered Serial Iterations + +TBD + +For example [compare-property](compare-property) package. + +### Streaming interface + +TBD + +## Want to Know More? + +More examples can be found in [test folder](test/). + +Or open an [issue](https://github.com/alexindigo/asynckit/issues) with questions and/or suggestions. + +## License + +AsyncKit is licensed under the MIT license. diff --git a/node_modules/asynckit/bench.js b/node_modules/asynckit/bench.js new file mode 100644 index 0000000..c612f1a --- /dev/null +++ b/node_modules/asynckit/bench.js @@ -0,0 +1,76 @@ +/* eslint no-console: "off" */ + +var asynckit = require('./') + , async = require('async') + , assert = require('assert') + , expected = 0 + ; + +var Benchmark = require('benchmark'); +var suite = new Benchmark.Suite; + +var source = []; +for (var z = 1; z < 100; z++) +{ + source.push(z); + expected += z; +} + +suite +// add tests + +.add('async.map', function(deferred) +{ + var total = 0; + + async.map(source, + function(i, cb) + { + setImmediate(function() + { + total += i; + cb(null, total); + }); + }, + function(err, result) + { + assert.ifError(err); + assert.equal(result[result.length - 1], expected); + deferred.resolve(); + }); +}, {'defer': true}) + + +.add('asynckit.parallel', function(deferred) +{ + var total = 0; + + asynckit.parallel(source, + function(i, cb) + { + setImmediate(function() + { + total += i; + cb(null, total); + }); + }, + function(err, result) + { + assert.ifError(err); + assert.equal(result[result.length - 1], expected); + deferred.resolve(); + }); +}, {'defer': true}) + + +// add listeners +.on('cycle', function(ev) +{ + console.log(String(ev.target)); +}) +.on('complete', function() +{ + console.log('Fastest is ' + this.filter('fastest').map('name')); +}) +// run async +.run({ 'async': true }); diff --git a/node_modules/asynckit/index.js b/node_modules/asynckit/index.js new file mode 100644 index 0000000..455f945 --- /dev/null +++ b/node_modules/asynckit/index.js @@ -0,0 +1,6 @@ +module.exports = +{ + parallel : require('./parallel.js'), + serial : require('./serial.js'), + serialOrdered : require('./serialOrdered.js') +}; diff --git a/node_modules/asynckit/lib/abort.js b/node_modules/asynckit/lib/abort.js new file mode 100644 index 0000000..114367e --- /dev/null +++ b/node_modules/asynckit/lib/abort.js @@ -0,0 +1,29 @@ +// API +module.exports = abort; + +/** + * Aborts leftover active jobs + * + * @param {object} state - current state object + */ +function abort(state) +{ + Object.keys(state.jobs).forEach(clean.bind(state)); + + // reset leftover jobs + state.jobs = {}; +} + +/** + * Cleans up leftover job by invoking abort function for the provided job id + * + * @this state + * @param {string|number} key - job id to abort + */ +function clean(key) +{ + if (typeof this.jobs[key] == 'function') + { + this.jobs[key](); + } +} diff --git a/node_modules/asynckit/lib/async.js b/node_modules/asynckit/lib/async.js new file mode 100644 index 0000000..7f1288a --- /dev/null +++ b/node_modules/asynckit/lib/async.js @@ -0,0 +1,34 @@ +var defer = require('./defer.js'); + +// API +module.exports = async; + +/** + * Runs provided callback asynchronously + * even if callback itself is not + * + * @param {function} callback - callback to invoke + * @returns {function} - augmented callback + */ +function async(callback) +{ + var isAsync = false; + + // check if async happened + defer(function() { isAsync = true; }); + + return function async_callback(err, result) + { + if (isAsync) + { + callback(err, result); + } + else + { + defer(function nextTick_callback() + { + callback(err, result); + }); + } + }; +} diff --git a/node_modules/asynckit/lib/defer.js b/node_modules/asynckit/lib/defer.js new file mode 100644 index 0000000..b67110c --- /dev/null +++ b/node_modules/asynckit/lib/defer.js @@ -0,0 +1,26 @@ +module.exports = defer; + +/** + * Runs provided function on next iteration of the event loop + * + * @param {function} fn - function to run + */ +function defer(fn) +{ + var nextTick = typeof setImmediate == 'function' + ? setImmediate + : ( + typeof process == 'object' && typeof process.nextTick == 'function' + ? process.nextTick + : null + ); + + if (nextTick) + { + nextTick(fn); + } + else + { + setTimeout(fn, 0); + } +} diff --git a/node_modules/asynckit/lib/iterate.js b/node_modules/asynckit/lib/iterate.js new file mode 100644 index 0000000..5d2839a --- /dev/null +++ b/node_modules/asynckit/lib/iterate.js @@ -0,0 +1,75 @@ +var async = require('./async.js') + , abort = require('./abort.js') + ; + +// API +module.exports = iterate; + +/** + * Iterates over each job object + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {object} state - current job status + * @param {function} callback - invoked when all elements processed + */ +function iterate(list, iterator, state, callback) +{ + // store current index + var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; + + state.jobs[key] = runJob(iterator, key, list[key], function(error, output) + { + // don't repeat yourself + // skip secondary callbacks + if (!(key in state.jobs)) + { + return; + } + + // clean up jobs + delete state.jobs[key]; + + if (error) + { + // don't process rest of the results + // stop still active jobs + // and reset the list + abort(state); + } + else + { + state.results[key] = output; + } + + // return salvaged results + callback(error, state.results); + }); +} + +/** + * Runs iterator over provided job element + * + * @param {function} iterator - iterator to invoke + * @param {string|number} key - key/index of the element in the list of jobs + * @param {mixed} item - job description + * @param {function} callback - invoked after iterator is done with the job + * @returns {function|mixed} - job abort function or something else + */ +function runJob(iterator, key, item, callback) +{ + var aborter; + + // allow shortcut if iterator expects only two arguments + if (iterator.length == 2) + { + aborter = iterator(item, async(callback)); + } + // otherwise go with full three arguments + else + { + aborter = iterator(item, key, async(callback)); + } + + return aborter; +} diff --git a/node_modules/asynckit/lib/readable_asynckit.js b/node_modules/asynckit/lib/readable_asynckit.js new file mode 100644 index 0000000..78ad240 --- /dev/null +++ b/node_modules/asynckit/lib/readable_asynckit.js @@ -0,0 +1,91 @@ +var streamify = require('./streamify.js') + , defer = require('./defer.js') + ; + +// API +module.exports = ReadableAsyncKit; + +/** + * Base constructor for all streams + * used to hold properties/methods + */ +function ReadableAsyncKit() +{ + ReadableAsyncKit.super_.apply(this, arguments); + + // list of active jobs + this.jobs = {}; + + // add stream methods + this.destroy = destroy; + this._start = _start; + this._read = _read; +} + +/** + * Destroys readable stream, + * by aborting outstanding jobs + * + * @returns {void} + */ +function destroy() +{ + if (this.destroyed) + { + return; + } + + this.destroyed = true; + + if (typeof this.terminator == 'function') + { + this.terminator(); + } +} + +/** + * Starts provided jobs in async manner + * + * @private + */ +function _start() +{ + // first argument – runner function + var runner = arguments[0] + // take away first argument + , args = Array.prototype.slice.call(arguments, 1) + // second argument - input data + , input = args[0] + // last argument - result callback + , endCb = streamify.callback.call(this, args[args.length - 1]) + ; + + args[args.length - 1] = endCb; + // third argument - iterator + args[1] = streamify.iterator.call(this, args[1]); + + // allow time for proper setup + defer(function() + { + if (!this.destroyed) + { + this.terminator = runner.apply(null, args); + } + else + { + endCb(null, Array.isArray(input) ? [] : {}); + } + }.bind(this)); +} + + +/** + * Implement _read to comply with Readable streams + * Doesn't really make sense for flowing object mode + * + * @private + */ +function _read() +{ + +} diff --git a/node_modules/asynckit/lib/readable_parallel.js b/node_modules/asynckit/lib/readable_parallel.js new file mode 100644 index 0000000..5d2929f --- /dev/null +++ b/node_modules/asynckit/lib/readable_parallel.js @@ -0,0 +1,25 @@ +var parallel = require('../parallel.js'); + +// API +module.exports = ReadableParallel; + +/** + * Streaming wrapper to `asynckit.parallel` + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {stream.Readable#} + */ +function ReadableParallel(list, iterator, callback) +{ + if (!(this instanceof ReadableParallel)) + { + return new ReadableParallel(list, iterator, callback); + } + + // turn on object mode + ReadableParallel.super_.call(this, {objectMode: true}); + + this._start(parallel, list, iterator, callback); +} diff --git a/node_modules/asynckit/lib/readable_serial.js b/node_modules/asynckit/lib/readable_serial.js new file mode 100644 index 0000000..7822698 --- /dev/null +++ b/node_modules/asynckit/lib/readable_serial.js @@ -0,0 +1,25 @@ +var serial = require('../serial.js'); + +// API +module.exports = ReadableSerial; + +/** + * Streaming wrapper to `asynckit.serial` + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {stream.Readable#} + */ +function ReadableSerial(list, iterator, callback) +{ + if (!(this instanceof ReadableSerial)) + { + return new ReadableSerial(list, iterator, callback); + } + + // turn on object mode + ReadableSerial.super_.call(this, {objectMode: true}); + + this._start(serial, list, iterator, callback); +} diff --git a/node_modules/asynckit/lib/readable_serial_ordered.js b/node_modules/asynckit/lib/readable_serial_ordered.js new file mode 100644 index 0000000..3de89c4 --- /dev/null +++ b/node_modules/asynckit/lib/readable_serial_ordered.js @@ -0,0 +1,29 @@ +var serialOrdered = require('../serialOrdered.js'); + +// API +module.exports = ReadableSerialOrdered; +// expose sort helpers +module.exports.ascending = serialOrdered.ascending; +module.exports.descending = serialOrdered.descending; + +/** + * Streaming wrapper to `asynckit.serialOrdered` + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {stream.Readable#} + */ +function ReadableSerialOrdered(list, iterator, sortMethod, callback) +{ + if (!(this instanceof ReadableSerialOrdered)) + { + return new ReadableSerialOrdered(list, iterator, sortMethod, callback); + } + + // turn on object mode + ReadableSerialOrdered.super_.call(this, {objectMode: true}); + + this._start(serialOrdered, list, iterator, sortMethod, callback); +} diff --git a/node_modules/asynckit/lib/state.js b/node_modules/asynckit/lib/state.js new file mode 100644 index 0000000..cbea7ad --- /dev/null +++ b/node_modules/asynckit/lib/state.js @@ -0,0 +1,37 @@ +// API +module.exports = state; + +/** + * Creates initial state object + * for iteration over list + * + * @param {array|object} list - list to iterate over + * @param {function|null} sortMethod - function to use for keys sort, + * or `null` to keep them as is + * @returns {object} - initial state object + */ +function state(list, sortMethod) +{ + var isNamedList = !Array.isArray(list) + , initState = + { + index : 0, + keyedList: isNamedList || sortMethod ? Object.keys(list) : null, + jobs : {}, + results : isNamedList ? {} : [], + size : isNamedList ? Object.keys(list).length : list.length + } + ; + + if (sortMethod) + { + // sort array keys based on it's values + // sort object's keys just on own merit + initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) + { + return sortMethod(list[a], list[b]); + }); + } + + return initState; +} diff --git a/node_modules/asynckit/lib/streamify.js b/node_modules/asynckit/lib/streamify.js new file mode 100644 index 0000000..f56a1c9 --- /dev/null +++ b/node_modules/asynckit/lib/streamify.js @@ -0,0 +1,141 @@ +var async = require('./async.js'); + +// API +module.exports = { + iterator: wrapIterator, + callback: wrapCallback +}; + +/** + * Wraps iterators with long signature + * + * @this ReadableAsyncKit# + * @param {function} iterator - function to wrap + * @returns {function} - wrapped function + */ +function wrapIterator(iterator) +{ + var stream = this; + + return function(item, key, cb) + { + var aborter + , wrappedCb = async(wrapIteratorCallback.call(stream, cb, key)) + ; + + stream.jobs[key] = wrappedCb; + + // it's either shortcut (item, cb) + if (iterator.length == 2) + { + aborter = iterator(item, wrappedCb); + } + // or long format (item, key, cb) + else + { + aborter = iterator(item, key, wrappedCb); + } + + return aborter; + }; +} + +/** + * Wraps provided callback function + * allowing to execute snitch function before + * real callback + * + * @this ReadableAsyncKit# + * @param {function} callback - function to wrap + * @returns {function} - wrapped function + */ +function wrapCallback(callback) +{ + var stream = this; + + var wrapped = function(error, result) + { + return finisher.call(stream, error, result, callback); + }; + + return wrapped; +} + +/** + * Wraps provided iterator callback function + * makes sure snitch only called once, + * but passes secondary calls to the original callback + * + * @this ReadableAsyncKit# + * @param {function} callback - callback to wrap + * @param {number|string} key - iteration key + * @returns {function} wrapped callback + */ +function wrapIteratorCallback(callback, key) +{ + var stream = this; + + return function(error, output) + { + // don't repeat yourself + if (!(key in stream.jobs)) + { + callback(error, output); + return; + } + + // clean up jobs + delete stream.jobs[key]; + + return streamer.call(stream, error, {key: key, value: output}, callback); + }; +} + +/** + * Stream wrapper for iterator callback + * + * @this ReadableAsyncKit# + * @param {mixed} error - error response + * @param {mixed} output - iterator output + * @param {function} callback - callback that expects iterator results + */ +function streamer(error, output, callback) +{ + if (error && !this.error) + { + this.error = error; + this.pause(); + this.emit('error', error); + // send back value only, as expected + callback(error, output && output.value); + return; + } + + // stream stuff + this.push(output); + + // back to original track + // send back value only, as expected + callback(error, output && output.value); +} + +/** + * Stream wrapper for finishing callback + * + * @this ReadableAsyncKit# + * @param {mixed} error - error response + * @param {mixed} output - iterator output + * @param {function} callback - callback that expects final results + */ +function finisher(error, output, callback) +{ + // signal end of the stream + // only for successfully finished streams + if (!error) + { + this.push(null); + } + + // back to original track + callback(error, output); +} diff --git a/node_modules/asynckit/lib/terminator.js b/node_modules/asynckit/lib/terminator.js new file mode 100644 index 0000000..d6eb992 --- /dev/null +++ b/node_modules/asynckit/lib/terminator.js @@ -0,0 +1,29 @@ +var abort = require('./abort.js') + , async = require('./async.js') + ; + +// API +module.exports = terminator; + +/** + * Terminates jobs in the attached state context + * + * @this AsyncKitState# + * @param {function} callback - final callback to invoke after termination + */ +function terminator(callback) +{ + if (!Object.keys(this.jobs).length) + { + return; + } + + // fast forward iteration index + this.index = this.size; + + // abort jobs + abort(this); + + // send back results we have so far + async(callback)(null, this.results); +} diff --git a/node_modules/asynckit/package.json b/node_modules/asynckit/package.json new file mode 100644 index 0000000..80a7595 --- /dev/null +++ b/node_modules/asynckit/package.json @@ -0,0 +1,91 @@ +{ + "_from": "asynckit@^0.4.0", + "_id": "asynckit@0.4.0", + "_inBundle": false, + "_integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", + "_location": "/asynckit", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "asynckit@^0.4.0", + "name": "asynckit", + "escapedName": "asynckit", + "rawSpec": "^0.4.0", + "saveSpec": null, + "fetchSpec": "^0.4.0" + }, + "_requiredBy": [ + "/form-data" + ], + "_resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "_shasum": "c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79", + "_spec": "asynckit@^0.4.0", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/form-data", + "author": { + "name": "Alex Indigo", + "email": "iam@alexindigo.com" + }, + "bugs": { + "url": "https://github.com/alexindigo/asynckit/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Minimal async jobs utility library, with streams support", + "devDependencies": { + "browserify": "^13.0.0", + "browserify-istanbul": "^2.0.0", + "coveralls": "^2.11.9", + "eslint": "^2.9.0", + "istanbul": "^0.4.3", + "obake": "^0.1.2", + "phantomjs-prebuilt": "^2.1.7", + "pre-commit": "^1.1.3", + "reamde": "^1.1.0", + "rimraf": "^2.5.2", + "size-table": "^0.2.0", + "tap-spec": "^4.1.1", + "tape": "^4.5.1" + }, + "homepage": "https://github.com/alexindigo/asynckit#readme", + "keywords": [ + "async", + "jobs", + "parallel", + "serial", + "iterator", + "array", + "object", + "stream", + "destroy", + "terminate", + "abort" + ], + "license": "MIT", + "main": "index.js", + "name": "asynckit", + "pre-commit": [ + "clean", + "lint", + "test", + "browser", + "report", + "size" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/alexindigo/asynckit.git" + }, + "scripts": { + "browser": "browserify -t browserify-istanbul test/lib/browserify_adjustment.js test/test-*.js | obake --coverage | tap-spec", + "clean": "rimraf coverage", + "debug": "tape test/test-*.js", + "lint": "eslint *.js lib/*.js test/*.js", + "report": "istanbul report", + "size": "browserify index.js | size-table asynckit", + "test": "istanbul cover --reporter=json tape -- 'test/test-*.js' | tap-spec", + "win-test": "tape test/test-*.js" + }, + "version": "0.4.0" +} diff --git a/node_modules/asynckit/parallel.js b/node_modules/asynckit/parallel.js new file mode 100644 index 0000000..3c50344 --- /dev/null +++ b/node_modules/asynckit/parallel.js @@ -0,0 +1,43 @@ +var iterate = require('./lib/iterate.js') + , initState = require('./lib/state.js') + , terminator = require('./lib/terminator.js') + ; + +// Public API +module.exports = parallel; + +/** + * Runs iterator over provided array elements in parallel + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function parallel(list, iterator, callback) +{ + var state = initState(list); + + while (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, function(error, result) + { + if (error) + { + callback(error, result); + return; + } + + // looks like it's the last one + if (Object.keys(state.jobs).length === 0) + { + callback(null, state.results); + return; + } + }); + + state.index++; + } + + return terminator.bind(state, callback); +} diff --git a/node_modules/asynckit/serial.js b/node_modules/asynckit/serial.js new file mode 100644 index 0000000..6cd949a --- /dev/null +++ b/node_modules/asynckit/serial.js @@ -0,0 +1,17 @@ +var serialOrdered = require('./serialOrdered.js'); + +// Public API +module.exports = serial; + +/** + * Runs iterator over provided array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serial(list, iterator, callback) +{ + return serialOrdered(list, iterator, null, callback); +} diff --git a/node_modules/asynckit/serialOrdered.js b/node_modules/asynckit/serialOrdered.js new file mode 100644 index 0000000..607eafe --- /dev/null +++ b/node_modules/asynckit/serialOrdered.js @@ -0,0 +1,75 @@ +var iterate = require('./lib/iterate.js') + , initState = require('./lib/state.js') + , terminator = require('./lib/terminator.js') + ; + +// Public API +module.exports = serialOrdered; +// sorting helpers +module.exports.ascending = ascending; +module.exports.descending = descending; + +/** + * Runs iterator over provided sorted array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serialOrdered(list, iterator, sortMethod, callback) +{ + var state = initState(list, sortMethod); + + iterate(list, iterator, state, function iteratorHandler(error, result) + { + if (error) + { + callback(error, result); + return; + } + + state.index++; + + // are we there yet? + if (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, iteratorHandler); + return; + } + + // done here + callback(null, state.results); + }); + + return terminator.bind(state, callback); +} + +/* + * -- Sort methods + */ + +/** + * sort helper to sort array elements in ascending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function ascending(a, b) +{ + return a < b ? -1 : a > b ? 1 : 0; +} + +/** + * sort helper to sort array elements in descending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function descending(a, b) +{ + return -1 * ascending(a, b); +} diff --git a/node_modules/asynckit/stream.js b/node_modules/asynckit/stream.js new file mode 100644 index 0000000..d43465f --- /dev/null +++ b/node_modules/asynckit/stream.js @@ -0,0 +1,21 @@ +var inherits = require('util').inherits + , Readable = require('stream').Readable + , ReadableAsyncKit = require('./lib/readable_asynckit.js') + , ReadableParallel = require('./lib/readable_parallel.js') + , ReadableSerial = require('./lib/readable_serial.js') + , ReadableSerialOrdered = require('./lib/readable_serial_ordered.js') + ; + +// API +module.exports = +{ + parallel : ReadableParallel, + serial : ReadableSerial, + serialOrdered : ReadableSerialOrdered, +}; + +inherits(ReadableAsyncKit, Readable); + +inherits(ReadableParallel, ReadableAsyncKit); +inherits(ReadableSerial, ReadableAsyncKit); +inherits(ReadableSerialOrdered, ReadableAsyncKit); diff --git a/node_modules/aws-sign2/LICENSE b/node_modules/aws-sign2/LICENSE new file mode 100644 index 0000000..a4a9aee --- /dev/null +++ b/node_modules/aws-sign2/LICENSE @@ -0,0 +1,55 @@ +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/node_modules/aws-sign2/README.md b/node_modules/aws-sign2/README.md new file mode 100644 index 0000000..763564e --- /dev/null +++ b/node_modules/aws-sign2/README.md @@ -0,0 +1,4 @@ +aws-sign +======== + +AWS signing. Originally pulled from LearnBoost/knox, maintained as vendor in request, now a standalone module. diff --git a/node_modules/aws-sign2/index.js b/node_modules/aws-sign2/index.js new file mode 100644 index 0000000..fb35f6d --- /dev/null +++ b/node_modules/aws-sign2/index.js @@ -0,0 +1,212 @@ + +/*! + * Copyright 2010 LearnBoost + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Module dependencies. + */ + +var crypto = require('crypto') + , parse = require('url').parse + ; + +/** + * Valid keys. + */ + +var keys = + [ 'acl' + , 'location' + , 'logging' + , 'notification' + , 'partNumber' + , 'policy' + , 'requestPayment' + , 'torrent' + , 'uploadId' + , 'uploads' + , 'versionId' + , 'versioning' + , 'versions' + , 'website' + ] + +/** + * Return an "Authorization" header value with the given `options` + * in the form of "AWS :" + * + * @param {Object} options + * @return {String} + * @api private + */ + +function authorization (options) { + return 'AWS ' + options.key + ':' + sign(options) +} + +module.exports = authorization +module.exports.authorization = authorization + +/** + * Simple HMAC-SHA1 Wrapper + * + * @param {Object} options + * @return {String} + * @api private + */ + +function hmacSha1 (options) { + return crypto.createHmac('sha1', options.secret).update(options.message).digest('base64') +} + +module.exports.hmacSha1 = hmacSha1 + +/** + * Create a base64 sha1 HMAC for `options`. + * + * @param {Object} options + * @return {String} + * @api private + */ + +function sign (options) { + options.message = stringToSign(options) + return hmacSha1(options) +} +module.exports.sign = sign + +/** + * Create a base64 sha1 HMAC for `options`. + * + * Specifically to be used with S3 presigned URLs + * + * @param {Object} options + * @return {String} + * @api private + */ + +function signQuery (options) { + options.message = queryStringToSign(options) + return hmacSha1(options) +} +module.exports.signQuery= signQuery + +/** + * Return a string for sign() with the given `options`. + * + * Spec: + * + * \n + * \n + * \n + * \n + * [headers\n] + * + * + * @param {Object} options + * @return {String} + * @api private + */ + +function stringToSign (options) { + var headers = options.amazonHeaders || '' + if (headers) headers += '\n' + var r = + [ options.verb + , options.md5 + , options.contentType + , options.date ? options.date.toUTCString() : '' + , headers + options.resource + ] + return r.join('\n') +} +module.exports.stringToSign = stringToSign + +/** + * Return a string for sign() with the given `options`, but is meant exclusively + * for S3 presigned URLs + * + * Spec: + * + * \n + * + * + * @param {Object} options + * @return {String} + * @api private + */ + +function queryStringToSign (options){ + return 'GET\n\n\n' + options.date + '\n' + options.resource +} +module.exports.queryStringToSign = queryStringToSign + +/** + * Perform the following: + * + * - ignore non-amazon headers + * - lowercase fields + * - sort lexicographically + * - trim whitespace between ":" + * - join with newline + * + * @param {Object} headers + * @return {String} + * @api private + */ + +function canonicalizeHeaders (headers) { + var buf = [] + , fields = Object.keys(headers) + ; + for (var i = 0, len = fields.length; i < len; ++i) { + var field = fields[i] + , val = headers[field] + , field = field.toLowerCase() + ; + if (0 !== field.indexOf('x-amz')) continue + buf.push(field + ':' + val) + } + return buf.sort().join('\n') +} +module.exports.canonicalizeHeaders = canonicalizeHeaders + +/** + * Perform the following: + * + * - ignore non sub-resources + * - sort lexicographically + * + * @param {String} resource + * @return {String} + * @api private + */ + +function canonicalizeResource (resource) { + var url = parse(resource, true) + , path = url.pathname + , buf = [] + ; + + Object.keys(url.query).forEach(function(key){ + if (!~keys.indexOf(key)) return + var val = '' == url.query[key] ? '' : '=' + encodeURIComponent(url.query[key]) + buf.push(key + val) + }) + + return path + (buf.length ? '?' + buf.sort().join('&') : '') +} +module.exports.canonicalizeResource = canonicalizeResource diff --git a/node_modules/aws-sign2/package.json b/node_modules/aws-sign2/package.json new file mode 100644 index 0000000..8d1d1ae --- /dev/null +++ b/node_modules/aws-sign2/package.json @@ -0,0 +1,50 @@ +{ + "_from": "aws-sign2@~0.7.0", + "_id": "aws-sign2@0.7.0", + "_inBundle": false, + "_integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", + "_location": "/aws-sign2", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "aws-sign2@~0.7.0", + "name": "aws-sign2", + "escapedName": "aws-sign2", + "rawSpec": "~0.7.0", + "saveSpec": null, + "fetchSpec": "~0.7.0" + }, + "_requiredBy": [ + "/request" + ], + "_resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "_shasum": "b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8", + "_spec": "aws-sign2@~0.7.0", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/request", + "author": { + "name": "Mikeal Rogers", + "email": "mikeal.rogers@gmail.com", + "url": "http://www.futurealoof.com" + }, + "bugs": { + "url": "https://github.com/mikeal/aws-sign/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "AWS signing. Originally pulled from LearnBoost/knox, maintained as vendor in request, now a standalone module.", + "devDependencies": {}, + "engines": { + "node": "*" + }, + "homepage": "https://github.com/mikeal/aws-sign#readme", + "license": "Apache-2.0", + "main": "index.js", + "name": "aws-sign2", + "optionalDependencies": {}, + "repository": { + "url": "git+https://github.com/mikeal/aws-sign.git" + }, + "version": "0.7.0" +} diff --git a/node_modules/aws4/.travis.yml b/node_modules/aws4/.travis.yml new file mode 100644 index 0000000..61d0634 --- /dev/null +++ b/node_modules/aws4/.travis.yml @@ -0,0 +1,5 @@ +language: node_js +node_js: + - "0.10" + - "0.12" + - "4.2" diff --git a/node_modules/aws4/LICENSE b/node_modules/aws4/LICENSE new file mode 100644 index 0000000..4f321e5 --- /dev/null +++ b/node_modules/aws4/LICENSE @@ -0,0 +1,19 @@ +Copyright 2013 Michael Hart (michael.hart.au@gmail.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/aws4/README.md b/node_modules/aws4/README.md new file mode 100644 index 0000000..6b002d0 --- /dev/null +++ b/node_modules/aws4/README.md @@ -0,0 +1,523 @@ +aws4 +---- + +[](http://travis-ci.org/mhart/aws4) + +A small utility to sign vanilla node.js http(s) request options using Amazon's +[AWS Signature Version 4](http://docs.amazonwebservices.com/general/latest/gr/signature-version-4.html). + +Can also be used [in the browser](./browser). + +This signature is supported by nearly all Amazon services, including +[S3](http://docs.aws.amazon.com/AmazonS3/latest/API/), +[EC2](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/), +[DynamoDB](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/API.html), +[Kinesis](http://docs.aws.amazon.com/kinesis/latest/APIReference/), +[Lambda](http://docs.aws.amazon.com/lambda/latest/dg/API_Reference.html), +[SQS](http://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/), +[SNS](http://docs.aws.amazon.com/sns/latest/api/), +[IAM](http://docs.aws.amazon.com/IAM/latest/APIReference/), +[STS](http://docs.aws.amazon.com/STS/latest/APIReference/), +[RDS](http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/), +[CloudWatch](http://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/), +[CloudWatch Logs](http://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/), +[CodeDeploy](http://docs.aws.amazon.com/codedeploy/latest/APIReference/), +[CloudFront](http://docs.aws.amazon.com/AmazonCloudFront/latest/APIReference/), +[CloudTrail](http://docs.aws.amazon.com/awscloudtrail/latest/APIReference/), +[ElastiCache](http://docs.aws.amazon.com/AmazonElastiCache/latest/APIReference/), +[EMR](http://docs.aws.amazon.com/ElasticMapReduce/latest/API/), +[Glacier](http://docs.aws.amazon.com/amazonglacier/latest/dev/amazon-glacier-api.html), +[CloudSearch](http://docs.aws.amazon.com/cloudsearch/latest/developerguide/APIReq.html), +[Elastic Load Balancing](http://docs.aws.amazon.com/ElasticLoadBalancing/latest/APIReference/), +[Elastic Transcoder](http://docs.aws.amazon.com/elastictranscoder/latest/developerguide/api-reference.html), +[CloudFormation](http://docs.aws.amazon.com/AWSCloudFormation/latest/APIReference/), +[Elastic Beanstalk](http://docs.aws.amazon.com/elasticbeanstalk/latest/api/), +[Storage Gateway](http://docs.aws.amazon.com/storagegateway/latest/userguide/AWSStorageGatewayAPI.html), +[Data Pipeline](http://docs.aws.amazon.com/datapipeline/latest/APIReference/), +[Direct Connect](http://docs.aws.amazon.com/directconnect/latest/APIReference/), +[Redshift](http://docs.aws.amazon.com/redshift/latest/APIReference/), +[OpsWorks](http://docs.aws.amazon.com/opsworks/latest/APIReference/), +[SES](http://docs.aws.amazon.com/ses/latest/APIReference/), +[SWF](http://docs.aws.amazon.com/amazonswf/latest/apireference/), +[AutoScaling](http://docs.aws.amazon.com/AutoScaling/latest/APIReference/), +[Mobile Analytics](http://docs.aws.amazon.com/mobileanalytics/latest/ug/server-reference.html), +[Cognito Identity](http://docs.aws.amazon.com/cognitoidentity/latest/APIReference/), +[Cognito Sync](http://docs.aws.amazon.com/cognitosync/latest/APIReference/), +[Container Service](http://docs.aws.amazon.com/AmazonECS/latest/APIReference/), +[AppStream](http://docs.aws.amazon.com/appstream/latest/developerguide/appstream-api-rest.html), +[Key Management Service](http://docs.aws.amazon.com/kms/latest/APIReference/), +[Config](http://docs.aws.amazon.com/config/latest/APIReference/), +[CloudHSM](http://docs.aws.amazon.com/cloudhsm/latest/dg/api-ref.html), +[Route53](http://docs.aws.amazon.com/Route53/latest/APIReference/requests-rest.html) and +[Route53 Domains](http://docs.aws.amazon.com/Route53/latest/APIReference/requests-rpc.html). + +Indeed, the only AWS services that *don't* support v4 as of 2014-12-30 are +[Import/Export](http://docs.aws.amazon.com/AWSImportExport/latest/DG/api-reference.html) and +[SimpleDB](http://docs.aws.amazon.com/AmazonSimpleDB/latest/DeveloperGuide/SDB_API.html) +(they only support [AWS Signature Version 2](https://github.com/mhart/aws2)). + +It also provides defaults for a number of core AWS headers and +request parameters, making it very easy to query AWS services, or +build out a fully-featured AWS library. + +Example +------- + +```javascript +var http = require('http'), + https = require('https'), + aws4 = require('aws4') + +// given an options object you could pass to http.request +var opts = {host: 'sqs.us-east-1.amazonaws.com', path: '/?Action=ListQueues'} + +// alternatively (as aws4 can infer the host): +opts = {service: 'sqs', region: 'us-east-1', path: '/?Action=ListQueues'} + +// alternatively (as us-east-1 is default): +opts = {service: 'sqs', path: '/?Action=ListQueues'} + +aws4.sign(opts) // assumes AWS credentials are available in process.env + +console.log(opts) +/* +{ + host: 'sqs.us-east-1.amazonaws.com', + path: '/?Action=ListQueues', + headers: { + Host: 'sqs.us-east-1.amazonaws.com', + 'X-Amz-Date': '20121226T061030Z', + Authorization: 'AWS4-HMAC-SHA256 Credential=ABCDEF/20121226/us-east-1/sqs/aws4_request, ...' + } +} +*/ + +// we can now use this to query AWS using the standard node.js http API +http.request(opts, function(res) { res.pipe(process.stdout) }).end() +/* + + +... +*/ +``` + +More options +------------ + +```javascript +// you can also pass AWS credentials in explicitly (otherwise taken from process.env) +aws4.sign(opts, {accessKeyId: '', secretAccessKey: ''}) + +// can also add the signature to query strings +aws4.sign({service: 's3', path: '/my-bucket?X-Amz-Expires=12345', signQuery: true}) + +// create a utility function to pipe to stdout (with https this time) +function request(o) { https.request(o, function(res) { res.pipe(process.stdout) }).end(o.body || '') } + +// aws4 can infer the HTTP method if a body is passed in +// method will be POST and Content-Type: 'application/x-www-form-urlencoded; charset=utf-8' +request(aws4.sign({service: 'iam', body: 'Action=ListGroups&Version=2010-05-08'})) +/* + +... +*/ + +// can specify any custom option or header as per usual +request(aws4.sign({ + service: 'dynamodb', + region: 'ap-southeast-2', + method: 'POST', + path: '/', + headers: { + 'Content-Type': 'application/x-amz-json-1.0', + 'X-Amz-Target': 'DynamoDB_20120810.ListTables' + }, + body: '{}' +})) +/* +{"TableNames":[]} +... +*/ + +// works with all other services that support Signature Version 4 + +request(aws4.sign({service: 's3', path: '/', signQuery: true})) +/* + +... +*/ + +request(aws4.sign({service: 'ec2', path: '/?Action=DescribeRegions&Version=2014-06-15'})) +/* + +... +*/ + +request(aws4.sign({service: 'sns', path: '/?Action=ListTopics&Version=2010-03-31'})) +/* + +... +*/ + +request(aws4.sign({service: 'sts', path: '/?Action=GetSessionToken&Version=2011-06-15'})) +/* + +... +*/ + +request(aws4.sign({service: 'cloudsearch', path: '/?Action=ListDomainNames&Version=2013-01-01'})) +/* + +... +*/ + +request(aws4.sign({service: 'ses', path: '/?Action=ListIdentities&Version=2010-12-01'})) +/* + +... +*/ + +request(aws4.sign({service: 'autoscaling', path: '/?Action=DescribeAutoScalingInstances&Version=2011-01-01'})) +/* + +... +*/ + +request(aws4.sign({service: 'elasticloadbalancing', path: '/?Action=DescribeLoadBalancers&Version=2012-06-01'})) +/* + +... +*/ + +request(aws4.sign({service: 'cloudformation', path: '/?Action=ListStacks&Version=2010-05-15'})) +/* + +... +*/ + +request(aws4.sign({service: 'elasticbeanstalk', path: '/?Action=ListAvailableSolutionStacks&Version=2010-12-01'})) +/* + +... +*/ + +request(aws4.sign({service: 'rds', path: '/?Action=DescribeDBInstances&Version=2012-09-17'})) +/* + +... +*/ + +request(aws4.sign({service: 'monitoring', path: '/?Action=ListMetrics&Version=2010-08-01'})) +/* + +... +*/ + +request(aws4.sign({service: 'redshift', path: '/?Action=DescribeClusters&Version=2012-12-01'})) +/* + +... +*/ + +request(aws4.sign({service: 'cloudfront', path: '/2014-05-31/distribution'})) +/* + +... +*/ + +request(aws4.sign({service: 'elasticache', path: '/?Action=DescribeCacheClusters&Version=2014-07-15'})) +/* + +... +*/ + +request(aws4.sign({service: 'elasticmapreduce', path: '/?Action=DescribeJobFlows&Version=2009-03-31'})) +/* + +... +*/ + +request(aws4.sign({service: 'route53', path: '/2013-04-01/hostedzone'})) +/* + +... +*/ + +request(aws4.sign({service: 'appstream', path: '/applications'})) +/* +{"_links":{"curie":[{"href":"http://docs.aws.amazon.com/appstream/latest/... +... +*/ + +request(aws4.sign({service: 'cognito-sync', path: '/identitypools'})) +/* +{"Count":0,"IdentityPoolUsages":[],"MaxResults":16,"NextToken":null} +... +*/ + +request(aws4.sign({service: 'elastictranscoder', path: '/2012-09-25/pipelines'})) +/* +{"NextPageToken":null,"Pipelines":[]} +... +*/ + +request(aws4.sign({service: 'lambda', path: '/2014-11-13/functions/'})) +/* +{"Functions":[],"NextMarker":null} +... +*/ + +request(aws4.sign({service: 'ecs', path: '/?Action=ListClusters&Version=2014-11-13'})) +/* + +... +*/ + +request(aws4.sign({service: 'glacier', path: '/-/vaults', headers: {'X-Amz-Glacier-Version': '2012-06-01'}})) +/* +{"Marker":null,"VaultList":[]} +... +*/ + +request(aws4.sign({service: 'storagegateway', body: '{}', headers: { + 'Content-Type': 'application/x-amz-json-1.1', + 'X-Amz-Target': 'StorageGateway_20120630.ListGateways' +}})) +/* +{"Gateways":[]} +... +*/ + +request(aws4.sign({service: 'datapipeline', body: '{}', headers: { + 'Content-Type': 'application/x-amz-json-1.1', + 'X-Amz-Target': 'DataPipeline.ListPipelines' +}})) +/* +{"hasMoreResults":false,"pipelineIdList":[]} +... +*/ + +request(aws4.sign({service: 'opsworks', body: '{}', headers: { + 'Content-Type': 'application/x-amz-json-1.1', + 'X-Amz-Target': 'OpsWorks_20130218.DescribeStacks' +}})) +/* +{"Stacks":[]} +... +*/ + +request(aws4.sign({service: 'route53domains', body: '{}', headers: { + 'Content-Type': 'application/x-amz-json-1.1', + 'X-Amz-Target': 'Route53Domains_v20140515.ListDomains' +}})) +/* +{"Domains":[]} +... +*/ + +request(aws4.sign({service: 'kinesis', body: '{}', headers: { + 'Content-Type': 'application/x-amz-json-1.1', + 'X-Amz-Target': 'Kinesis_20131202.ListStreams' +}})) +/* +{"HasMoreStreams":false,"StreamNames":[]} +... +*/ + +request(aws4.sign({service: 'cloudtrail', body: '{}', headers: { + 'Content-Type': 'application/x-amz-json-1.1', + 'X-Amz-Target': 'CloudTrail_20131101.DescribeTrails' +}})) +/* +{"trailList":[]} +... +*/ + +request(aws4.sign({service: 'logs', body: '{}', headers: { + 'Content-Type': 'application/x-amz-json-1.1', + 'X-Amz-Target': 'Logs_20140328.DescribeLogGroups' +}})) +/* +{"logGroups":[]} +... +*/ + +request(aws4.sign({service: 'codedeploy', body: '{}', headers: { + 'Content-Type': 'application/x-amz-json-1.1', + 'X-Amz-Target': 'CodeDeploy_20141006.ListApplications' +}})) +/* +{"applications":[]} +... +*/ + +request(aws4.sign({service: 'directconnect', body: '{}', headers: { + 'Content-Type': 'application/x-amz-json-1.1', + 'X-Amz-Target': 'OvertureService.DescribeConnections' +}})) +/* +{"connections":[]} +... +*/ + +request(aws4.sign({service: 'kms', body: '{}', headers: { + 'Content-Type': 'application/x-amz-json-1.1', + 'X-Amz-Target': 'TrentService.ListKeys' +}})) +/* +{"Keys":[],"Truncated":false} +... +*/ + +request(aws4.sign({service: 'config', body: '{}', headers: { + 'Content-Type': 'application/x-amz-json-1.1', + 'X-Amz-Target': 'StarlingDoveService.DescribeDeliveryChannels' +}})) +/* +{"DeliveryChannels":[]} +... +*/ + +request(aws4.sign({service: 'cloudhsm', body: '{}', headers: { + 'Content-Type': 'application/x-amz-json-1.1', + 'X-Amz-Target': 'CloudHsmFrontendService.ListAvailableZones' +}})) +/* +{"AZList":["us-east-1a","us-east-1b","us-east-1c"]} +... +*/ + +request(aws4.sign({ + service: 'swf', + body: '{"registrationStatus":"REGISTERED"}', + headers: { + 'Content-Type': 'application/x-amz-json-1.0', + 'X-Amz-Target': 'SimpleWorkflowService.ListDomains' + } +})) +/* +{"domainInfos":[]} +... +*/ + +request(aws4.sign({ + service: 'cognito-identity', + body: '{"MaxResults": 1}', + headers: { + 'Content-Type': 'application/x-amz-json-1.1', + 'X-Amz-Target': 'AWSCognitoIdentityService.ListIdentityPools' + } +})) +/* +{"IdentityPools":[]} +... +*/ + +request(aws4.sign({ + service: 'mobileanalytics', + path: '/2014-06-05/events', + body: JSON.stringify({events:[{ + eventType: 'a', + timestamp: new Date().toISOString(), + session: {}, + }]}), + headers: { + 'Content-Type': 'application/json', + 'X-Amz-Client-Context': JSON.stringify({ + client: {client_id: 'a', app_title: 'a'}, + custom: {}, + env: {platform: 'a'}, + services: {}, + }), + } +})) +/* +(HTTP 202, empty response) +*/ + +// Generate CodeCommit Git access password +var signer = new aws4.RequestSigner({ + service: 'codecommit', + host: 'git-codecommit.us-east-1.amazonaws.com', + method: 'GIT', + path: '/v1/repos/MyAwesomeRepo', +}) +var password = signer.getDateTime() + 'Z' + signer.signature() +``` + +API +--- + +### aws4.sign(requestOptions, [credentials]) + +This calculates and populates the `Authorization` header of +`requestOptions`, and any other necessary AWS headers and/or request +options. Returns `requestOptions` as a convenience for chaining. + +`requestOptions` is an object holding the same options that the node.js +[http.request](http://nodejs.org/docs/latest/api/http.html#http_http_request_options_callback) +function takes. + +The following properties of `requestOptions` are used in the signing or +populated if they don't already exist: + +- `hostname` or `host` (will be determined from `service` and `region` if not given) +- `method` (will use `'GET'` if not given or `'POST'` if there is a `body`) +- `path` (will use `'/'` if not given) +- `body` (will use `''` if not given) +- `service` (will be calculated from `hostname` or `host` if not given) +- `region` (will be calculated from `hostname` or `host` or use `'us-east-1'` if not given) +- `headers['Host']` (will use `hostname` or `host` or be calculated if not given) +- `headers['Content-Type']` (will use `'application/x-www-form-urlencoded; charset=utf-8'` + if not given and there is a `body`) +- `headers['Date']` (used to calculate the signature date if given, otherwise `new Date` is used) + +Your AWS credentials (which can be found in your +[AWS console](https://portal.aws.amazon.com/gp/aws/securityCredentials)) +can be specified in one of two ways: + +- As the second argument, like this: + +```javascript +aws4.sign(requestOptions, { + secretAccessKey: "", + accessKeyId: "", + sessionToken: "" +}) +``` + +- From `process.env`, such as this: + +``` +export AWS_SECRET_ACCESS_KEY="" +export AWS_ACCESS_KEY_ID="" +export AWS_SESSION_TOKEN="" +``` + +(will also use `AWS_ACCESS_KEY` and `AWS_SECRET_KEY` if available) + +The `sessionToken` property and `AWS_SESSION_TOKEN` environment variable are optional for signing +with [IAM STS temporary credentials](http://docs.aws.amazon.com/STS/latest/UsingSTS/using-temp-creds.html). + +Installation +------------ + +With [npm](http://npmjs.org/) do: + +``` +npm install aws4 +``` + +Can also be used [in the browser](./browser). + +Thanks +------ + +Thanks to [@jed](https://github.com/jed) for his +[dynamo-client](https://github.com/jed/dynamo-client) lib where I first +committed and subsequently extracted this code. + +Also thanks to the +[official node.js AWS SDK](https://github.com/aws/aws-sdk-js) for giving +me a start on implementing the v4 signature. + diff --git a/node_modules/aws4/aws4.js b/node_modules/aws4/aws4.js new file mode 100644 index 0000000..124cd7a --- /dev/null +++ b/node_modules/aws4/aws4.js @@ -0,0 +1,332 @@ +var aws4 = exports, + url = require('url'), + querystring = require('querystring'), + crypto = require('crypto'), + lru = require('./lru'), + credentialsCache = lru(1000) + +// http://docs.amazonwebservices.com/general/latest/gr/signature-version-4.html + +function hmac(key, string, encoding) { + return crypto.createHmac('sha256', key).update(string, 'utf8').digest(encoding) +} + +function hash(string, encoding) { + return crypto.createHash('sha256').update(string, 'utf8').digest(encoding) +} + +// This function assumes the string has already been percent encoded +function encodeRfc3986(urlEncodedString) { + return urlEncodedString.replace(/[!'()*]/g, function(c) { + return '%' + c.charCodeAt(0).toString(16).toUpperCase() + }) +} + +// request: { path | body, [host], [method], [headers], [service], [region] } +// credentials: { accessKeyId, secretAccessKey, [sessionToken] } +function RequestSigner(request, credentials) { + + if (typeof request === 'string') request = url.parse(request) + + var headers = request.headers = (request.headers || {}), + hostParts = this.matchHost(request.hostname || request.host || headers.Host || headers.host) + + this.request = request + this.credentials = credentials || this.defaultCredentials() + + this.service = request.service || hostParts[0] || '' + this.region = request.region || hostParts[1] || 'us-east-1' + + // SES uses a different domain from the service name + if (this.service === 'email') this.service = 'ses' + + if (!request.method && request.body) + request.method = 'POST' + + if (!headers.Host && !headers.host) { + headers.Host = request.hostname || request.host || this.createHost() + + // If a port is specified explicitly, use it as is + if (request.port) + headers.Host += ':' + request.port + } + if (!request.hostname && !request.host) + request.hostname = headers.Host || headers.host + + this.isCodeCommitGit = this.service === 'codecommit' && request.method === 'GIT' +} + +RequestSigner.prototype.matchHost = function(host) { + var match = (host || '').match(/([^\.]+)\.(?:([^\.]*)\.)?amazonaws\.com(\.cn)?$/) + var hostParts = (match || []).slice(1, 3) + + // ES's hostParts are sometimes the other way round, if the value that is expected + // to be region equals ‘es’ switch them back + // e.g. search-cluster-name-aaaa00aaaa0aaa0aaaaaaa0aaa.us-east-1.es.amazonaws.com + if (hostParts[1] === 'es') + hostParts = hostParts.reverse() + + return hostParts +} + +// http://docs.aws.amazon.com/general/latest/gr/rande.html +RequestSigner.prototype.isSingleRegion = function() { + // Special case for S3 and SimpleDB in us-east-1 + if (['s3', 'sdb'].indexOf(this.service) >= 0 && this.region === 'us-east-1') return true + + return ['cloudfront', 'ls', 'route53', 'iam', 'importexport', 'sts'] + .indexOf(this.service) >= 0 +} + +RequestSigner.prototype.createHost = function() { + var region = this.isSingleRegion() ? '' : + (this.service === 's3' && this.region !== 'us-east-1' ? '-' : '.') + this.region, + service = this.service === 'ses' ? 'email' : this.service + return service + region + '.amazonaws.com' +} + +RequestSigner.prototype.prepareRequest = function() { + this.parsePath() + + var request = this.request, headers = request.headers, query + + if (request.signQuery) { + + this.parsedPath.query = query = this.parsedPath.query || {} + + if (this.credentials.sessionToken) + query['X-Amz-Security-Token'] = this.credentials.sessionToken + + if (this.service === 's3' && !query['X-Amz-Expires']) + query['X-Amz-Expires'] = 86400 + + if (query['X-Amz-Date']) + this.datetime = query['X-Amz-Date'] + else + query['X-Amz-Date'] = this.getDateTime() + + query['X-Amz-Algorithm'] = 'AWS4-HMAC-SHA256' + query['X-Amz-Credential'] = this.credentials.accessKeyId + '/' + this.credentialString() + query['X-Amz-SignedHeaders'] = this.signedHeaders() + + } else { + + if (!request.doNotModifyHeaders && !this.isCodeCommitGit) { + if (request.body && !headers['Content-Type'] && !headers['content-type']) + headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=utf-8' + + if (request.body && !headers['Content-Length'] && !headers['content-length']) + headers['Content-Length'] = Buffer.byteLength(request.body) + + if (this.credentials.sessionToken && !headers['X-Amz-Security-Token'] && !headers['x-amz-security-token']) + headers['X-Amz-Security-Token'] = this.credentials.sessionToken + + if (this.service === 's3' && !headers['X-Amz-Content-Sha256'] && !headers['x-amz-content-sha256']) + headers['X-Amz-Content-Sha256'] = hash(this.request.body || '', 'hex') + + if (headers['X-Amz-Date'] || headers['x-amz-date']) + this.datetime = headers['X-Amz-Date'] || headers['x-amz-date'] + else + headers['X-Amz-Date'] = this.getDateTime() + } + + delete headers.Authorization + delete headers.authorization + } +} + +RequestSigner.prototype.sign = function() { + if (!this.parsedPath) this.prepareRequest() + + if (this.request.signQuery) { + this.parsedPath.query['X-Amz-Signature'] = this.signature() + } else { + this.request.headers.Authorization = this.authHeader() + } + + this.request.path = this.formatPath() + + return this.request +} + +RequestSigner.prototype.getDateTime = function() { + if (!this.datetime) { + var headers = this.request.headers, + date = new Date(headers.Date || headers.date || new Date) + + this.datetime = date.toISOString().replace(/[:\-]|\.\d{3}/g, '') + + // Remove the trailing 'Z' on the timestamp string for CodeCommit git access + if (this.isCodeCommitGit) this.datetime = this.datetime.slice(0, -1) + } + return this.datetime +} + +RequestSigner.prototype.getDate = function() { + return this.getDateTime().substr(0, 8) +} + +RequestSigner.prototype.authHeader = function() { + return [ + 'AWS4-HMAC-SHA256 Credential=' + this.credentials.accessKeyId + '/' + this.credentialString(), + 'SignedHeaders=' + this.signedHeaders(), + 'Signature=' + this.signature(), + ].join(', ') +} + +RequestSigner.prototype.signature = function() { + var date = this.getDate(), + cacheKey = [this.credentials.secretAccessKey, date, this.region, this.service].join(), + kDate, kRegion, kService, kCredentials = credentialsCache.get(cacheKey) + if (!kCredentials) { + kDate = hmac('AWS4' + this.credentials.secretAccessKey, date) + kRegion = hmac(kDate, this.region) + kService = hmac(kRegion, this.service) + kCredentials = hmac(kService, 'aws4_request') + credentialsCache.set(cacheKey, kCredentials) + } + return hmac(kCredentials, this.stringToSign(), 'hex') +} + +RequestSigner.prototype.stringToSign = function() { + return [ + 'AWS4-HMAC-SHA256', + this.getDateTime(), + this.credentialString(), + hash(this.canonicalString(), 'hex'), + ].join('\n') +} + +RequestSigner.prototype.canonicalString = function() { + if (!this.parsedPath) this.prepareRequest() + + var pathStr = this.parsedPath.path, + query = this.parsedPath.query, + headers = this.request.headers, + queryStr = '', + normalizePath = this.service !== 's3', + decodePath = this.service === 's3' || this.request.doNotEncodePath, + decodeSlashesInPath = this.service === 's3', + firstValOnly = this.service === 's3', + bodyHash + + if (this.service === 's3' && this.request.signQuery) { + bodyHash = 'UNSIGNED-PAYLOAD' + } else if (this.isCodeCommitGit) { + bodyHash = '' + } else { + bodyHash = headers['X-Amz-Content-Sha256'] || headers['x-amz-content-sha256'] || + hash(this.request.body || '', 'hex') + } + + if (query) { + queryStr = encodeRfc3986(querystring.stringify(Object.keys(query).sort().reduce(function(obj, key) { + if (!key) return obj + obj[key] = !Array.isArray(query[key]) ? query[key] : + (firstValOnly ? query[key][0] : query[key].slice().sort()) + return obj + }, {}))) + } + if (pathStr !== '/') { + if (normalizePath) pathStr = pathStr.replace(/\/{2,}/g, '/') + pathStr = pathStr.split('/').reduce(function(path, piece) { + if (normalizePath && piece === '..') { + path.pop() + } else if (!normalizePath || piece !== '.') { + if (decodePath) piece = decodeURIComponent(piece) + path.push(encodeRfc3986(encodeURIComponent(piece))) + } + return path + }, []).join('/') + if (pathStr[0] !== '/') pathStr = '/' + pathStr + if (decodeSlashesInPath) pathStr = pathStr.replace(/%2F/g, '/') + } + + return [ + this.request.method || 'GET', + pathStr, + queryStr, + this.canonicalHeaders() + '\n', + this.signedHeaders(), + bodyHash, + ].join('\n') +} + +RequestSigner.prototype.canonicalHeaders = function() { + var headers = this.request.headers + function trimAll(header) { + return header.toString().trim().replace(/\s+/g, ' ') + } + return Object.keys(headers) + .sort(function(a, b) { return a.toLowerCase() < b.toLowerCase() ? -1 : 1 }) + .map(function(key) { return key.toLowerCase() + ':' + trimAll(headers[key]) }) + .join('\n') +} + +RequestSigner.prototype.signedHeaders = function() { + return Object.keys(this.request.headers) + .map(function(key) { return key.toLowerCase() }) + .sort() + .join(';') +} + +RequestSigner.prototype.credentialString = function() { + return [ + this.getDate(), + this.region, + this.service, + 'aws4_request', + ].join('/') +} + +RequestSigner.prototype.defaultCredentials = function() { + var env = process.env + return { + accessKeyId: env.AWS_ACCESS_KEY_ID || env.AWS_ACCESS_KEY, + secretAccessKey: env.AWS_SECRET_ACCESS_KEY || env.AWS_SECRET_KEY, + sessionToken: env.AWS_SESSION_TOKEN, + } +} + +RequestSigner.prototype.parsePath = function() { + var path = this.request.path || '/', + queryIx = path.indexOf('?'), + query = null + + if (queryIx >= 0) { + query = querystring.parse(path.slice(queryIx + 1)) + path = path.slice(0, queryIx) + } + + // S3 doesn't always encode characters > 127 correctly and + // all services don't encode characters > 255 correctly + // So if there are non-reserved chars (and it's not already all % encoded), just encode them all + if (/[^0-9A-Za-z!'()*\-._~%/]/.test(path)) { + path = path.split('/').map(function(piece) { + return encodeURIComponent(decodeURIComponent(piece)) + }).join('/') + } + + this.parsedPath = { + path: path, + query: query, + } +} + +RequestSigner.prototype.formatPath = function() { + var path = this.parsedPath.path, + query = this.parsedPath.query + + if (!query) return path + + // Services don't support empty query string keys + if (query[''] != null) delete query[''] + + return path + '?' + encodeRfc3986(querystring.stringify(query)) +} + +aws4.RequestSigner = RequestSigner + +aws4.sign = function(request, credentials) { + return new RequestSigner(request, credentials).sign() +} diff --git a/node_modules/aws4/lru.js b/node_modules/aws4/lru.js new file mode 100644 index 0000000..333f66a --- /dev/null +++ b/node_modules/aws4/lru.js @@ -0,0 +1,96 @@ +module.exports = function(size) { + return new LruCache(size) +} + +function LruCache(size) { + this.capacity = size | 0 + this.map = Object.create(null) + this.list = new DoublyLinkedList() +} + +LruCache.prototype.get = function(key) { + var node = this.map[key] + if (node == null) return undefined + this.used(node) + return node.val +} + +LruCache.prototype.set = function(key, val) { + var node = this.map[key] + if (node != null) { + node.val = val + } else { + if (!this.capacity) this.prune() + if (!this.capacity) return false + node = new DoublyLinkedNode(key, val) + this.map[key] = node + this.capacity-- + } + this.used(node) + return true +} + +LruCache.prototype.used = function(node) { + this.list.moveToFront(node) +} + +LruCache.prototype.prune = function() { + var node = this.list.pop() + if (node != null) { + delete this.map[node.key] + this.capacity++ + } +} + + +function DoublyLinkedList() { + this.firstNode = null + this.lastNode = null +} + +DoublyLinkedList.prototype.moveToFront = function(node) { + if (this.firstNode == node) return + + this.remove(node) + + if (this.firstNode == null) { + this.firstNode = node + this.lastNode = node + node.prev = null + node.next = null + } else { + node.prev = null + node.next = this.firstNode + node.next.prev = node + this.firstNode = node + } +} + +DoublyLinkedList.prototype.pop = function() { + var lastNode = this.lastNode + if (lastNode != null) { + this.remove(lastNode) + } + return lastNode +} + +DoublyLinkedList.prototype.remove = function(node) { + if (this.firstNode == node) { + this.firstNode = node.next + } else if (node.prev != null) { + node.prev.next = node.next + } + if (this.lastNode == node) { + this.lastNode = node.prev + } else if (node.next != null) { + node.next.prev = node.prev + } +} + + +function DoublyLinkedNode(key, val) { + this.key = key + this.val = val + this.prev = null + this.next = null +} diff --git a/node_modules/aws4/package.json b/node_modules/aws4/package.json new file mode 100644 index 0000000..533a63d --- /dev/null +++ b/node_modules/aws4/package.json @@ -0,0 +1,104 @@ +{ + "_from": "aws4@^1.8.0", + "_id": "aws4@1.8.0", + "_inBundle": false, + "_integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ==", + "_location": "/aws4", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "aws4@^1.8.0", + "name": "aws4", + "escapedName": "aws4", + "rawSpec": "^1.8.0", + "saveSpec": null, + "fetchSpec": "^1.8.0" + }, + "_requiredBy": [ + "/request" + ], + "_resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", + "_shasum": "f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f", + "_spec": "aws4@^1.8.0", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/request", + "author": { + "name": "Michael Hart", + "email": "michael.hart.au@gmail.com", + "url": "http://github.com/mhart" + }, + "bugs": { + "url": "https://github.com/mhart/aws4/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Signs and prepares requests using AWS Signature Version 4", + "devDependencies": { + "mocha": "^2.4.5", + "should": "^8.2.2" + }, + "homepage": "https://github.com/mhart/aws4#readme", + "keywords": [ + "amazon", + "aws", + "signature", + "s3", + "ec2", + "autoscaling", + "cloudformation", + "elasticloadbalancing", + "elb", + "elasticbeanstalk", + "cloudsearch", + "dynamodb", + "kinesis", + "lambda", + "glacier", + "sqs", + "sns", + "iam", + "sts", + "ses", + "swf", + "storagegateway", + "datapipeline", + "directconnect", + "redshift", + "opsworks", + "rds", + "monitoring", + "cloudtrail", + "cloudfront", + "codedeploy", + "elasticache", + "elasticmapreduce", + "elastictranscoder", + "emr", + "cloudwatch", + "mobileanalytics", + "cognitoidentity", + "cognitosync", + "cognito", + "containerservice", + "ecs", + "appstream", + "keymanagementservice", + "kms", + "config", + "cloudhsm", + "route53", + "route53domains", + "logs" + ], + "license": "MIT", + "main": "aws4.js", + "name": "aws4", + "repository": { + "type": "git", + "url": "git+https://github.com/mhart/aws4.git" + }, + "scripts": { + "test": "mocha ./test/fast.js ./test/slow.js -b -t 100s -R list" + }, + "version": "1.8.0" +} diff --git a/node_modules/basic-auth/HISTORY.md b/node_modules/basic-auth/HISTORY.md new file mode 100644 index 0000000..2c44a01 --- /dev/null +++ b/node_modules/basic-auth/HISTORY.md @@ -0,0 +1,52 @@ +2.0.1 / 2018-09-19 +================== + + * deps: safe-buffer@5.1.2 + +2.0.0 / 2017-09-12 +================== + + * Drop support for Node.js below 0.8 + * Remove `auth(ctx)` signature -- pass in header or `auth(ctx.req)` + * Use `safe-buffer` for improved Buffer API + +1.1.0 / 2016-11-18 +================== + + * Add `auth.parse` for low-level string parsing + +1.0.4 / 2016-05-10 +================== + + * Improve error message when `req` argument is not an object + * Improve error message when `req` missing `headers` property + +1.0.3 / 2015-07-01 +================== + + * Fix regression accepting a Koa context + +1.0.2 / 2015-06-12 +================== + + * Improve error message when `req` argument missing + * perf: enable strict mode + * perf: hoist regular expression + * perf: parse with regular expressions + * perf: remove argument reassignment + +1.0.1 / 2015-05-04 +================== + + * Update readme + +1.0.0 / 2014-07-01 +================== + + * Support empty password + * Support empty username + +0.0.1 / 2013-11-30 +================== + + * Initial release diff --git a/node_modules/basic-auth/LICENSE b/node_modules/basic-auth/LICENSE new file mode 100644 index 0000000..89041f6 --- /dev/null +++ b/node_modules/basic-auth/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2013 TJ Holowaychuk +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015-2016 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/basic-auth/README.md b/node_modules/basic-auth/README.md new file mode 100644 index 0000000..5f3d758 --- /dev/null +++ b/node_modules/basic-auth/README.md @@ -0,0 +1,113 @@ +# basic-auth + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Generic basic auth Authorization header field parser for whatever. + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +``` +$ npm install basic-auth +``` + +## API + + + +```js +var auth = require('basic-auth') +``` + +### auth(req) + +Get the basic auth credentials from the given request. The `Authorization` +header is parsed and if the header is invalid, `undefined` is returned, +otherwise an object with `name` and `pass` properties. + +### auth.parse(string) + +Parse a basic auth authorization header string. This will return an object +with `name` and `pass` properties, or `undefined` if the string is invalid. + +## Example + +Pass a Node.js request object to the module export. If parsing fails +`undefined` is returned, otherwise an object with `.name` and `.pass`. + + + +```js +var auth = require('basic-auth') +var user = auth(req) +// => { name: 'something', pass: 'whatever' } +``` + +A header string from any other location can also be parsed with +`auth.parse`, for example a `Proxy-Authorization` header: + + + +```js +var auth = require('basic-auth') +var user = auth.parse(req.getHeader('Proxy-Authorization')) +``` + +### With vanilla node.js http server + +```js +var http = require('http') +var auth = require('basic-auth') +var compare = require('tsscmp') + +// Create server +var server = http.createServer(function (req, res) { + var credentials = auth(req) + + // Check credentials + // The "check" function will typically be against your user store + if (!credentials || !check(credentials.name, credentials.pass)) { + res.statusCode = 401 + res.setHeader('WWW-Authenticate', 'Basic realm="example"') + res.end('Access denied') + } else { + res.end('Access granted') + } +}) + +// Basic function to validate credentials for example +function check (name, pass) { + var valid = true + + // Simple method to prevent short-circut and use timing-safe compare + valid = compare(name, 'john') && valid + valid = compare(pass, 'secret') && valid + + return valid +} + +// Listen +server.listen(3000) +``` + +# License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/basic-auth/master +[coveralls-url]: https://coveralls.io/r/jshttp/basic-auth?branch=master +[downloads-image]: https://badgen.net/npm/dm/basic-auth +[downloads-url]: https://npmjs.org/package/basic-auth +[node-version-image]: https://badgen.net/npm/node/basic-auth +[node-version-url]: https://nodejs.org/en/download +[npm-image]: https://badgen.net/npm/v/basic-auth +[npm-url]: https://npmjs.org/package/basic-auth +[travis-image]: https://badgen.net/travis/jshttp/basic-auth/master +[travis-url]: https://travis-ci.org/jshttp/basic-auth diff --git a/node_modules/basic-auth/index.js b/node_modules/basic-auth/index.js new file mode 100644 index 0000000..9106e64 --- /dev/null +++ b/node_modules/basic-auth/index.js @@ -0,0 +1,133 @@ +/*! + * basic-auth + * Copyright(c) 2013 TJ Holowaychuk + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015-2016 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var Buffer = require('safe-buffer').Buffer + +/** + * Module exports. + * @public + */ + +module.exports = auth +module.exports.parse = parse + +/** + * RegExp for basic auth credentials + * + * credentials = auth-scheme 1*SP token68 + * auth-scheme = "Basic" ; case insensitive + * token68 = 1*( ALPHA / DIGIT / "-" / "." / "_" / "~" / "+" / "/" ) *"=" + * @private + */ + +var CREDENTIALS_REGEXP = /^ *(?:[Bb][Aa][Ss][Ii][Cc]) +([A-Za-z0-9._~+/-]+=*) *$/ + +/** + * RegExp for basic auth user/pass + * + * user-pass = userid ":" password + * userid = * + * password = *TEXT + * @private + */ + +var USER_PASS_REGEXP = /^([^:]*):(.*)$/ + +/** + * Parse the Authorization header field of a request. + * + * @param {object} req + * @return {object} with .name and .pass + * @public + */ + +function auth (req) { + if (!req) { + throw new TypeError('argument req is required') + } + + if (typeof req !== 'object') { + throw new TypeError('argument req is required to be an object') + } + + // get header + var header = getAuthorization(req) + + // parse header + return parse(header) +} + +/** + * Decode base64 string. + * @private + */ + +function decodeBase64 (str) { + return Buffer.from(str, 'base64').toString() +} + +/** + * Get the Authorization header from request object. + * @private + */ + +function getAuthorization (req) { + if (!req.headers || typeof req.headers !== 'object') { + throw new TypeError('argument req is required to have headers property') + } + + return req.headers.authorization +} + +/** + * Parse basic auth to object. + * + * @param {string} string + * @return {object} + * @public + */ + +function parse (string) { + if (typeof string !== 'string') { + return undefined + } + + // parse header + var match = CREDENTIALS_REGEXP.exec(string) + + if (!match) { + return undefined + } + + // decode user pass + var userPass = USER_PASS_REGEXP.exec(decodeBase64(match[1])) + + if (!userPass) { + return undefined + } + + // return credentials object + return new Credentials(userPass[1], userPass[2]) +} + +/** + * Object to represent user credentials. + * @private + */ + +function Credentials (name, pass) { + this.name = name + this.pass = pass +} diff --git a/node_modules/basic-auth/package.json b/node_modules/basic-auth/package.json new file mode 100644 index 0000000..baf116a --- /dev/null +++ b/node_modules/basic-auth/package.json @@ -0,0 +1,73 @@ +{ + "_from": "basic-auth@~2.0.0", + "_id": "basic-auth@2.0.1", + "_inBundle": false, + "_integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", + "_location": "/basic-auth", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "basic-auth@~2.0.0", + "name": "basic-auth", + "escapedName": "basic-auth", + "rawSpec": "~2.0.0", + "saveSpec": null, + "fetchSpec": "~2.0.0" + }, + "_requiredBy": [ + "/morgan" + ], + "_resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", + "_shasum": "b998279bf47ce38344b4f3cf916d4679bbf51e3a", + "_spec": "basic-auth@~2.0.0", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/morgan", + "bugs": { + "url": "https://github.com/jshttp/basic-auth/issues" + }, + "bundleDependencies": false, + "dependencies": { + "safe-buffer": "5.1.2" + }, + "deprecated": false, + "description": "node.js basic auth parser", + "devDependencies": { + "eslint": "5.6.0", + "eslint-config-standard": "12.0.0", + "eslint-plugin-import": "2.14.0", + "eslint-plugin-markdown": "1.0.0-beta.6", + "eslint-plugin-node": "7.0.1", + "eslint-plugin-promise": "4.0.1", + "eslint-plugin-standard": "4.0.0", + "istanbul": "0.4.5", + "mocha": "5.2.0" + }, + "engines": { + "node": ">= 0.8" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "index.js" + ], + "homepage": "https://github.com/jshttp/basic-auth#readme", + "keywords": [ + "basic", + "auth", + "authorization", + "basicauth" + ], + "license": "MIT", + "name": "basic-auth", + "repository": { + "type": "git", + "url": "git+https://github.com/jshttp/basic-auth.git" + }, + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --check-leaks --reporter spec --bail", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + }, + "version": "2.0.1" +} diff --git a/node_modules/bcrypt-pbkdf/CONTRIBUTING.md b/node_modules/bcrypt-pbkdf/CONTRIBUTING.md new file mode 100644 index 0000000..401d34e --- /dev/null +++ b/node_modules/bcrypt-pbkdf/CONTRIBUTING.md @@ -0,0 +1,13 @@ +# Contributing + +This repository uses [cr.joyent.us](https://cr.joyent.us) (Gerrit) for new +changes. Anyone can submit changes. To get started, see the [cr.joyent.us user +guide](https://github.com/joyent/joyent-gerrit/blob/master/docs/user/README.md). +This repo does not use GitHub pull requests. + +See the [Joyent Engineering +Guidelines](https://github.com/joyent/eng/blob/master/docs/index.md) for general +best practices expected in this repository. + +If you're changing something non-trivial or user-facing, you may want to submit +an issue first. diff --git a/node_modules/bcrypt-pbkdf/LICENSE b/node_modules/bcrypt-pbkdf/LICENSE new file mode 100644 index 0000000..fc58d2a --- /dev/null +++ b/node_modules/bcrypt-pbkdf/LICENSE @@ -0,0 +1,66 @@ +The Blowfish portions are under the following license: + +Blowfish block cipher for OpenBSD +Copyright 1997 Niels Provos +All rights reserved. + +Implementation advice by David Mazieres . + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +The bcrypt_pbkdf portions are under the following license: + +Copyright (c) 2013 Ted Unangst + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + + +Performance improvements (Javascript-specific): + +Copyright 2016, Joyent Inc +Author: Alex Wilson + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/bcrypt-pbkdf/README.md b/node_modules/bcrypt-pbkdf/README.md new file mode 100644 index 0000000..7551f33 --- /dev/null +++ b/node_modules/bcrypt-pbkdf/README.md @@ -0,0 +1,45 @@ +Port of the OpenBSD `bcrypt_pbkdf` function to pure Javascript. `npm`-ified +version of [Devi Mandiri's port](https://github.com/devi/tmp/blob/master/js/bcrypt_pbkdf.js), +with some minor performance improvements. The code is copied verbatim (and +un-styled) from Devi's work. + +This product includes software developed by Niels Provos. + +## API + +### `bcrypt_pbkdf.pbkdf(pass, passlen, salt, saltlen, key, keylen, rounds)` + +Derive a cryptographic key of arbitrary length from a given password and salt, +using the OpenBSD `bcrypt_pbkdf` function. This is a combination of Blowfish and +SHA-512. + +See [this article](http://www.tedunangst.com/flak/post/bcrypt-pbkdf) for +further information. + +Parameters: + + * `pass`, a Uint8Array of length `passlen` + * `passlen`, an integer Number + * `salt`, a Uint8Array of length `saltlen` + * `saltlen`, an integer Number + * `key`, a Uint8Array of length `keylen`, will be filled with output + * `keylen`, an integer Number + * `rounds`, an integer Number, number of rounds of the PBKDF to run + +### `bcrypt_pbkdf.hash(sha2pass, sha2salt, out)` + +Calculate a Blowfish hash, given SHA2-512 output of a password and salt. Used as +part of the inner round function in the PBKDF. + +Parameters: + + * `sha2pass`, a Uint8Array of length 64 + * `sha2salt`, a Uint8Array of length 64 + * `out`, a Uint8Array of length 32, will be filled with output + +## License + +This source form is a 1:1 port from the OpenBSD `blowfish.c` and `bcrypt_pbkdf.c`. +As a result, it retains the original copyright and license. The two files are +under slightly different (but compatible) licenses, and are here combined in +one file. For each of the full license texts see `LICENSE`. diff --git a/node_modules/bcrypt-pbkdf/index.js b/node_modules/bcrypt-pbkdf/index.js new file mode 100644 index 0000000..b1b5ad4 --- /dev/null +++ b/node_modules/bcrypt-pbkdf/index.js @@ -0,0 +1,556 @@ +'use strict'; + +var crypto_hash_sha512 = require('tweetnacl').lowlevel.crypto_hash; + +/* + * This file is a 1:1 port from the OpenBSD blowfish.c and bcrypt_pbkdf.c. As a + * result, it retains the original copyright and license. The two files are + * under slightly different (but compatible) licenses, and are here combined in + * one file. + * + * Credit for the actual porting work goes to: + * Devi Mandiri + */ + +/* + * The Blowfish portions are under the following license: + * + * Blowfish block cipher for OpenBSD + * Copyright 1997 Niels Provos + * All rights reserved. + * + * Implementation advice by David Mazieres . + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +/* + * The bcrypt_pbkdf portions are under the following license: + * + * Copyright (c) 2013 Ted Unangst + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* + * Performance improvements (Javascript-specific): + * + * Copyright 2016, Joyent Inc + * Author: Alex Wilson + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +// Ported from OpenBSD bcrypt_pbkdf.c v1.9 + +var BLF_J = 0; + +var Blowfish = function() { + this.S = [ + new Uint32Array([ + 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, + 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, + 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, + 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, + 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, + 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, + 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, + 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, + 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, + 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, + 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, + 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, + 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, + 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, + 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, + 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, + 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, + 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, + 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, + 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, + 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, + 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, + 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, + 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, + 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, + 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, + 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, + 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, + 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, + 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, + 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, + 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, + 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, + 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, + 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, + 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, + 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, + 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, + 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, + 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, + 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, + 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, + 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, + 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, + 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, + 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, + 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, + 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, + 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, + 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, + 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, + 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, + 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, + 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, + 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, + 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, + 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, + 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, + 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, + 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, + 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, + 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, + 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, + 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a]), + new Uint32Array([ + 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, + 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, + 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, + 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, + 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, + 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, + 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, + 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, + 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, + 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, + 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, + 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, + 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, + 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, + 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, + 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, + 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, + 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, + 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, + 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, + 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, + 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, + 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, + 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, + 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, + 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, + 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, + 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, + 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, + 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, + 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, + 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, + 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, + 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, + 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, + 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, + 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, + 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, + 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, + 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, + 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, + 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, + 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, + 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, + 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, + 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, + 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, + 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, + 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, + 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, + 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, + 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, + 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, + 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, + 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, + 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, + 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, + 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, + 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, + 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, + 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, + 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, + 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, + 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7]), + new Uint32Array([ + 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, + 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, + 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, + 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, + 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, + 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, + 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, + 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, + 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, + 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, + 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, + 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, + 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, + 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, + 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, + 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, + 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, + 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, + 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, + 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, + 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, + 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, + 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, + 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, + 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, + 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, + 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, + 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, + 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, + 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, + 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, + 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, + 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, + 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, + 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, + 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, + 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, + 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, + 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, + 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, + 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, + 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, + 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, + 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, + 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, + 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, + 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, + 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, + 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, + 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, + 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, + 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, + 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, + 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, + 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, + 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, + 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, + 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, + 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, + 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, + 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, + 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, + 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, + 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0]), + new Uint32Array([ + 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, + 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, + 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, + 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, + 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, + 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, + 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, + 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, + 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, + 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, + 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, + 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, + 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, + 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, + 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, + 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, + 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, + 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, + 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, + 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, + 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, + 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, + 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, + 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, + 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, + 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, + 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, + 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, + 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, + 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, + 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, + 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, + 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, + 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, + 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, + 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, + 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, + 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, + 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, + 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, + 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, + 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, + 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, + 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, + 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, + 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, + 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, + 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, + 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, + 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, + 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, + 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, + 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, + 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, + 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, + 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, + 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, + 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, + 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, + 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, + 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, + 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, + 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, + 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6]) + ]; + this.P = new Uint32Array([ + 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, + 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, + 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, + 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, + 0x9216d5d9, 0x8979fb1b]); +}; + +function F(S, x8, i) { + return (((S[0][x8[i+3]] + + S[1][x8[i+2]]) ^ + S[2][x8[i+1]]) + + S[3][x8[i]]); +}; + +Blowfish.prototype.encipher = function(x, x8) { + if (x8 === undefined) { + x8 = new Uint8Array(x.buffer); + if (x.byteOffset !== 0) + x8 = x8.subarray(x.byteOffset); + } + x[0] ^= this.P[0]; + for (var i = 1; i < 16; i += 2) { + x[1] ^= F(this.S, x8, 0) ^ this.P[i]; + x[0] ^= F(this.S, x8, 4) ^ this.P[i+1]; + } + var t = x[0]; + x[0] = x[1] ^ this.P[17]; + x[1] = t; +}; + +Blowfish.prototype.decipher = function(x) { + var x8 = new Uint8Array(x.buffer); + if (x.byteOffset !== 0) + x8 = x8.subarray(x.byteOffset); + x[0] ^= this.P[17]; + for (var i = 16; i > 0; i -= 2) { + x[1] ^= F(this.S, x8, 0) ^ this.P[i]; + x[0] ^= F(this.S, x8, 4) ^ this.P[i-1]; + } + var t = x[0]; + x[0] = x[1] ^ this.P[0]; + x[1] = t; +}; + +function stream2word(data, databytes){ + var i, temp = 0; + for (i = 0; i < 4; i++, BLF_J++) { + if (BLF_J >= databytes) BLF_J = 0; + temp = (temp << 8) | data[BLF_J]; + } + return temp; +}; + +Blowfish.prototype.expand0state = function(key, keybytes) { + var d = new Uint32Array(2), i, k; + var d8 = new Uint8Array(d.buffer); + + for (i = 0, BLF_J = 0; i < 18; i++) { + this.P[i] ^= stream2word(key, keybytes); + } + BLF_J = 0; + + for (i = 0; i < 18; i += 2) { + this.encipher(d, d8); + this.P[i] = d[0]; + this.P[i+1] = d[1]; + } + + for (i = 0; i < 4; i++) { + for (k = 0; k < 256; k += 2) { + this.encipher(d, d8); + this.S[i][k] = d[0]; + this.S[i][k+1] = d[1]; + } + } +}; + +Blowfish.prototype.expandstate = function(data, databytes, key, keybytes) { + var d = new Uint32Array(2), i, k; + + for (i = 0, BLF_J = 0; i < 18; i++) { + this.P[i] ^= stream2word(key, keybytes); + } + + for (i = 0, BLF_J = 0; i < 18; i += 2) { + d[0] ^= stream2word(data, databytes); + d[1] ^= stream2word(data, databytes); + this.encipher(d); + this.P[i] = d[0]; + this.P[i+1] = d[1]; + } + + for (i = 0; i < 4; i++) { + for (k = 0; k < 256; k += 2) { + d[0] ^= stream2word(data, databytes); + d[1] ^= stream2word(data, databytes); + this.encipher(d); + this.S[i][k] = d[0]; + this.S[i][k+1] = d[1]; + } + } + BLF_J = 0; +}; + +Blowfish.prototype.enc = function(data, blocks) { + for (var i = 0; i < blocks; i++) { + this.encipher(data.subarray(i*2)); + } +}; + +Blowfish.prototype.dec = function(data, blocks) { + for (var i = 0; i < blocks; i++) { + this.decipher(data.subarray(i*2)); + } +}; + +var BCRYPT_BLOCKS = 8, + BCRYPT_HASHSIZE = 32; + +function bcrypt_hash(sha2pass, sha2salt, out) { + var state = new Blowfish(), + cdata = new Uint32Array(BCRYPT_BLOCKS), i, + ciphertext = new Uint8Array([79,120,121,99,104,114,111,109,97,116,105, + 99,66,108,111,119,102,105,115,104,83,119,97,116,68,121,110,97,109, + 105,116,101]); //"OxychromaticBlowfishSwatDynamite" + + state.expandstate(sha2salt, 64, sha2pass, 64); + for (i = 0; i < 64; i++) { + state.expand0state(sha2salt, 64); + state.expand0state(sha2pass, 64); + } + + for (i = 0; i < BCRYPT_BLOCKS; i++) + cdata[i] = stream2word(ciphertext, ciphertext.byteLength); + for (i = 0; i < 64; i++) + state.enc(cdata, cdata.byteLength / 8); + + for (i = 0; i < BCRYPT_BLOCKS; i++) { + out[4*i+3] = cdata[i] >>> 24; + out[4*i+2] = cdata[i] >>> 16; + out[4*i+1] = cdata[i] >>> 8; + out[4*i+0] = cdata[i]; + } +}; + +function bcrypt_pbkdf(pass, passlen, salt, saltlen, key, keylen, rounds) { + var sha2pass = new Uint8Array(64), + sha2salt = new Uint8Array(64), + out = new Uint8Array(BCRYPT_HASHSIZE), + tmpout = new Uint8Array(BCRYPT_HASHSIZE), + countsalt = new Uint8Array(saltlen+4), + i, j, amt, stride, dest, count, + origkeylen = keylen; + + if (rounds < 1) + return -1; + if (passlen === 0 || saltlen === 0 || keylen === 0 || + keylen > (out.byteLength * out.byteLength) || saltlen > (1<<20)) + return -1; + + stride = Math.floor((keylen + out.byteLength - 1) / out.byteLength); + amt = Math.floor((keylen + stride - 1) / stride); + + for (i = 0; i < saltlen; i++) + countsalt[i] = salt[i]; + + crypto_hash_sha512(sha2pass, pass, passlen); + + for (count = 1; keylen > 0; count++) { + countsalt[saltlen+0] = count >>> 24; + countsalt[saltlen+1] = count >>> 16; + countsalt[saltlen+2] = count >>> 8; + countsalt[saltlen+3] = count; + + crypto_hash_sha512(sha2salt, countsalt, saltlen + 4); + bcrypt_hash(sha2pass, sha2salt, tmpout); + for (i = out.byteLength; i--;) + out[i] = tmpout[i]; + + for (i = 1; i < rounds; i++) { + crypto_hash_sha512(sha2salt, tmpout, tmpout.byteLength); + bcrypt_hash(sha2pass, sha2salt, tmpout); + for (j = 0; j < out.byteLength; j++) + out[j] ^= tmpout[j]; + } + + amt = Math.min(amt, keylen); + for (i = 0; i < amt; i++) { + dest = i * stride + (count - 1); + if (dest >= origkeylen) + break; + key[dest] = out[i]; + } + keylen -= i; + } + + return 0; +}; + +module.exports = { + BLOCKS: BCRYPT_BLOCKS, + HASHSIZE: BCRYPT_HASHSIZE, + hash: bcrypt_hash, + pbkdf: bcrypt_pbkdf +}; diff --git a/node_modules/bcrypt-pbkdf/package.json b/node_modules/bcrypt-pbkdf/package.json new file mode 100644 index 0000000..33e0f8c --- /dev/null +++ b/node_modules/bcrypt-pbkdf/package.json @@ -0,0 +1,44 @@ +{ + "_from": "bcrypt-pbkdf@^1.0.0", + "_id": "bcrypt-pbkdf@1.0.2", + "_inBundle": false, + "_integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "_location": "/bcrypt-pbkdf", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "bcrypt-pbkdf@^1.0.0", + "name": "bcrypt-pbkdf", + "escapedName": "bcrypt-pbkdf", + "rawSpec": "^1.0.0", + "saveSpec": null, + "fetchSpec": "^1.0.0" + }, + "_requiredBy": [ + "/sshpk" + ], + "_resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "_shasum": "a4301d389b6a43f9b67ff3ca11a3f6637e360e9e", + "_spec": "bcrypt-pbkdf@^1.0.0", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/sshpk", + "bugs": { + "url": "https://github.com/joyent/node-bcrypt-pbkdf/issues" + }, + "bundleDependencies": false, + "dependencies": { + "tweetnacl": "^0.14.3" + }, + "deprecated": false, + "description": "Port of the OpenBSD bcrypt_pbkdf function to pure JS", + "devDependencies": {}, + "homepage": "https://github.com/joyent/node-bcrypt-pbkdf#readme", + "license": "BSD-3-Clause", + "main": "index.js", + "name": "bcrypt-pbkdf", + "repository": { + "type": "git", + "url": "git://github.com/joyent/node-bcrypt-pbkdf.git" + }, + "version": "1.0.2" +} diff --git a/node_modules/body-parser/HISTORY.md b/node_modules/body-parser/HISTORY.md new file mode 100644 index 0000000..a1d3fbf --- /dev/null +++ b/node_modules/body-parser/HISTORY.md @@ -0,0 +1,609 @@ +1.19.0 / 2019-04-25 +=================== + + * deps: bytes@3.1.0 + - Add petabyte (`pb`) support + * deps: http-errors@1.7.2 + - Set constructor name when possible + - deps: setprototypeof@1.1.1 + - deps: statuses@'>= 1.5.0 < 2' + * deps: iconv-lite@0.4.24 + - Added encoding MIK + * deps: qs@6.7.0 + - Fix parsing array brackets after index + * deps: raw-body@2.4.0 + - deps: bytes@3.1.0 + - deps: http-errors@1.7.2 + - deps: iconv-lite@0.4.24 + * deps: type-is@~1.6.17 + - deps: mime-types@~2.1.24 + - perf: prevent internal `throw` on invalid type + +1.18.3 / 2018-05-14 +=================== + + * Fix stack trace for strict json parse error + * deps: depd@~1.1.2 + - perf: remove argument reassignment + * deps: http-errors@~1.6.3 + - deps: depd@~1.1.2 + - deps: setprototypeof@1.1.0 + - deps: statuses@'>= 1.3.1 < 2' + * deps: iconv-lite@0.4.23 + - Fix loading encoding with year appended + - Fix deprecation warnings on Node.js 10+ + * deps: qs@6.5.2 + * deps: raw-body@2.3.3 + - deps: http-errors@1.6.3 + - deps: iconv-lite@0.4.23 + * deps: type-is@~1.6.16 + - deps: mime-types@~2.1.18 + +1.18.2 / 2017-09-22 +=================== + + * deps: debug@2.6.9 + * perf: remove argument reassignment + +1.18.1 / 2017-09-12 +=================== + + * deps: content-type@~1.0.4 + - perf: remove argument reassignment + - perf: skip parameter parsing when no parameters + * deps: iconv-lite@0.4.19 + - Fix ISO-8859-1 regression + - Update Windows-1255 + * deps: qs@6.5.1 + - Fix parsing & compacting very deep objects + * deps: raw-body@2.3.2 + - deps: iconv-lite@0.4.19 + +1.18.0 / 2017-09-08 +=================== + + * Fix JSON strict violation error to match native parse error + * Include the `body` property on verify errors + * Include the `type` property on all generated errors + * Use `http-errors` to set status code on errors + * deps: bytes@3.0.0 + * deps: debug@2.6.8 + * deps: depd@~1.1.1 + - Remove unnecessary `Buffer` loading + * deps: http-errors@~1.6.2 + - deps: depd@1.1.1 + * deps: iconv-lite@0.4.18 + - Add support for React Native + - Add a warning if not loaded as utf-8 + - Fix CESU-8 decoding in Node.js 8 + - Improve speed of ISO-8859-1 encoding + * deps: qs@6.5.0 + * deps: raw-body@2.3.1 + - Use `http-errors` for standard emitted errors + - deps: bytes@3.0.0 + - deps: iconv-lite@0.4.18 + - perf: skip buffer decoding on overage chunk + * perf: prevent internal `throw` when missing charset + +1.17.2 / 2017-05-17 +=================== + + * deps: debug@2.6.7 + - Fix `DEBUG_MAX_ARRAY_LENGTH` + - deps: ms@2.0.0 + * deps: type-is@~1.6.15 + - deps: mime-types@~2.1.15 + +1.17.1 / 2017-03-06 +=================== + + * deps: qs@6.4.0 + - Fix regression parsing keys starting with `[` + +1.17.0 / 2017-03-01 +=================== + + * deps: http-errors@~1.6.1 + - Make `message` property enumerable for `HttpError`s + - deps: setprototypeof@1.0.3 + * deps: qs@6.3.1 + - Fix compacting nested arrays + +1.16.1 / 2017-02-10 +=================== + + * deps: debug@2.6.1 + - Fix deprecation messages in WebStorm and other editors + - Undeprecate `DEBUG_FD` set to `1` or `2` + +1.16.0 / 2017-01-17 +=================== + + * deps: debug@2.6.0 + - Allow colors in workers + - Deprecated `DEBUG_FD` environment variable + - Fix error when running under React Native + - Use same color for same namespace + - deps: ms@0.7.2 + * deps: http-errors@~1.5.1 + - deps: inherits@2.0.3 + - deps: setprototypeof@1.0.2 + - deps: statuses@'>= 1.3.1 < 2' + * deps: iconv-lite@0.4.15 + - Added encoding MS-31J + - Added encoding MS-932 + - Added encoding MS-936 + - Added encoding MS-949 + - Added encoding MS-950 + - Fix GBK/GB18030 handling of Euro character + * deps: qs@6.2.1 + - Fix array parsing from skipping empty values + * deps: raw-body@~2.2.0 + - deps: iconv-lite@0.4.15 + * deps: type-is@~1.6.14 + - deps: mime-types@~2.1.13 + +1.15.2 / 2016-06-19 +=================== + + * deps: bytes@2.4.0 + * deps: content-type@~1.0.2 + - perf: enable strict mode + * deps: http-errors@~1.5.0 + - Use `setprototypeof` module to replace `__proto__` setting + - deps: statuses@'>= 1.3.0 < 2' + - perf: enable strict mode + * deps: qs@6.2.0 + * deps: raw-body@~2.1.7 + - deps: bytes@2.4.0 + - perf: remove double-cleanup on happy path + * deps: type-is@~1.6.13 + - deps: mime-types@~2.1.11 + +1.15.1 / 2016-05-05 +=================== + + * deps: bytes@2.3.0 + - Drop partial bytes on all parsed units + - Fix parsing byte string that looks like hex + * deps: raw-body@~2.1.6 + - deps: bytes@2.3.0 + * deps: type-is@~1.6.12 + - deps: mime-types@~2.1.10 + +1.15.0 / 2016-02-10 +=================== + + * deps: http-errors@~1.4.0 + - Add `HttpError` export, for `err instanceof createError.HttpError` + - deps: inherits@2.0.1 + - deps: statuses@'>= 1.2.1 < 2' + * deps: qs@6.1.0 + * deps: type-is@~1.6.11 + - deps: mime-types@~2.1.9 + +1.14.2 / 2015-12-16 +=================== + + * deps: bytes@2.2.0 + * deps: iconv-lite@0.4.13 + * deps: qs@5.2.0 + * deps: raw-body@~2.1.5 + - deps: bytes@2.2.0 + - deps: iconv-lite@0.4.13 + * deps: type-is@~1.6.10 + - deps: mime-types@~2.1.8 + +1.14.1 / 2015-09-27 +=================== + + * Fix issue where invalid charset results in 400 when `verify` used + * deps: iconv-lite@0.4.12 + - Fix CESU-8 decoding in Node.js 4.x + * deps: raw-body@~2.1.4 + - Fix masking critical errors from `iconv-lite` + - deps: iconv-lite@0.4.12 + * deps: type-is@~1.6.9 + - deps: mime-types@~2.1.7 + +1.14.0 / 2015-09-16 +=================== + + * Fix JSON strict parse error to match syntax errors + * Provide static `require` analysis in `urlencoded` parser + * deps: depd@~1.1.0 + - Support web browser loading + * deps: qs@5.1.0 + * deps: raw-body@~2.1.3 + - Fix sync callback when attaching data listener causes sync read + * deps: type-is@~1.6.8 + - Fix type error when given invalid type to match against + - deps: mime-types@~2.1.6 + +1.13.3 / 2015-07-31 +=================== + + * deps: type-is@~1.6.6 + - deps: mime-types@~2.1.4 + +1.13.2 / 2015-07-05 +=================== + + * deps: iconv-lite@0.4.11 + * deps: qs@4.0.0 + - Fix dropping parameters like `hasOwnProperty` + - Fix user-visible incompatibilities from 3.1.0 + - Fix various parsing edge cases + * deps: raw-body@~2.1.2 + - Fix error stack traces to skip `makeError` + - deps: iconv-lite@0.4.11 + * deps: type-is@~1.6.4 + - deps: mime-types@~2.1.2 + - perf: enable strict mode + - perf: remove argument reassignment + +1.13.1 / 2015-06-16 +=================== + + * deps: qs@2.4.2 + - Downgraded from 3.1.0 because of user-visible incompatibilities + +1.13.0 / 2015-06-14 +=================== + + * Add `statusCode` property on `Error`s, in addition to `status` + * Change `type` default to `application/json` for JSON parser + * Change `type` default to `application/x-www-form-urlencoded` for urlencoded parser + * Provide static `require` analysis + * Use the `http-errors` module to generate errors + * deps: bytes@2.1.0 + - Slight optimizations + * deps: iconv-lite@0.4.10 + - The encoding UTF-16 without BOM now defaults to UTF-16LE when detection fails + - Leading BOM is now removed when decoding + * deps: on-finished@~2.3.0 + - Add defined behavior for HTTP `CONNECT` requests + - Add defined behavior for HTTP `Upgrade` requests + - deps: ee-first@1.1.1 + * deps: qs@3.1.0 + - Fix dropping parameters like `hasOwnProperty` + - Fix various parsing edge cases + - Parsed object now has `null` prototype + * deps: raw-body@~2.1.1 + - Use `unpipe` module for unpiping requests + - deps: iconv-lite@0.4.10 + * deps: type-is@~1.6.3 + - deps: mime-types@~2.1.1 + - perf: reduce try block size + - perf: remove bitwise operations + * perf: enable strict mode + * perf: remove argument reassignment + * perf: remove delete call + +1.12.4 / 2015-05-10 +=================== + + * deps: debug@~2.2.0 + * deps: qs@2.4.2 + - Fix allowing parameters like `constructor` + * deps: on-finished@~2.2.1 + * deps: raw-body@~2.0.1 + - Fix a false-positive when unpiping in Node.js 0.8 + - deps: bytes@2.0.1 + * deps: type-is@~1.6.2 + - deps: mime-types@~2.0.11 + +1.12.3 / 2015-04-15 +=================== + + * Slight efficiency improvement when not debugging + * deps: depd@~1.0.1 + * deps: iconv-lite@0.4.8 + - Add encoding alias UNICODE-1-1-UTF-7 + * deps: raw-body@1.3.4 + - Fix hanging callback if request aborts during read + - deps: iconv-lite@0.4.8 + +1.12.2 / 2015-03-16 +=================== + + * deps: qs@2.4.1 + - Fix error when parameter `hasOwnProperty` is present + +1.12.1 / 2015-03-15 +=================== + + * deps: debug@~2.1.3 + - Fix high intensity foreground color for bold + - deps: ms@0.7.0 + * deps: type-is@~1.6.1 + - deps: mime-types@~2.0.10 + +1.12.0 / 2015-02-13 +=================== + + * add `debug` messages + * accept a function for the `type` option + * use `content-type` to parse `Content-Type` headers + * deps: iconv-lite@0.4.7 + - Gracefully support enumerables on `Object.prototype` + * deps: raw-body@1.3.3 + - deps: iconv-lite@0.4.7 + * deps: type-is@~1.6.0 + - fix argument reassignment + - fix false-positives in `hasBody` `Transfer-Encoding` check + - support wildcard for both type and subtype (`*/*`) + - deps: mime-types@~2.0.9 + +1.11.0 / 2015-01-30 +=================== + + * make internal `extended: true` depth limit infinity + * deps: type-is@~1.5.6 + - deps: mime-types@~2.0.8 + +1.10.2 / 2015-01-20 +=================== + + * deps: iconv-lite@0.4.6 + - Fix rare aliases of single-byte encodings + * deps: raw-body@1.3.2 + - deps: iconv-lite@0.4.6 + +1.10.1 / 2015-01-01 +=================== + + * deps: on-finished@~2.2.0 + * deps: type-is@~1.5.5 + - deps: mime-types@~2.0.7 + +1.10.0 / 2014-12-02 +=================== + + * make internal `extended: true` array limit dynamic + +1.9.3 / 2014-11-21 +================== + + * deps: iconv-lite@0.4.5 + - Fix Windows-31J and X-SJIS encoding support + * deps: qs@2.3.3 + - Fix `arrayLimit` behavior + * deps: raw-body@1.3.1 + - deps: iconv-lite@0.4.5 + * deps: type-is@~1.5.3 + - deps: mime-types@~2.0.3 + +1.9.2 / 2014-10-27 +================== + + * deps: qs@2.3.2 + - Fix parsing of mixed objects and values + +1.9.1 / 2014-10-22 +================== + + * deps: on-finished@~2.1.1 + - Fix handling of pipelined requests + * deps: qs@2.3.0 + - Fix parsing of mixed implicit and explicit arrays + * deps: type-is@~1.5.2 + - deps: mime-types@~2.0.2 + +1.9.0 / 2014-09-24 +================== + + * include the charset in "unsupported charset" error message + * include the encoding in "unsupported content encoding" error message + * deps: depd@~1.0.0 + +1.8.4 / 2014-09-23 +================== + + * fix content encoding to be case-insensitive + +1.8.3 / 2014-09-19 +================== + + * deps: qs@2.2.4 + - Fix issue with object keys starting with numbers truncated + +1.8.2 / 2014-09-15 +================== + + * deps: depd@0.4.5 + +1.8.1 / 2014-09-07 +================== + + * deps: media-typer@0.3.0 + * deps: type-is@~1.5.1 + +1.8.0 / 2014-09-05 +================== + + * make empty-body-handling consistent between chunked requests + - empty `json` produces `{}` + - empty `raw` produces `new Buffer(0)` + - empty `text` produces `''` + - empty `urlencoded` produces `{}` + * deps: qs@2.2.3 + - Fix issue where first empty value in array is discarded + * deps: type-is@~1.5.0 + - fix `hasbody` to be true for `content-length: 0` + +1.7.0 / 2014-09-01 +================== + + * add `parameterLimit` option to `urlencoded` parser + * change `urlencoded` extended array limit to 100 + * respond with 413 when over `parameterLimit` in `urlencoded` + +1.6.7 / 2014-08-29 +================== + + * deps: qs@2.2.2 + - Remove unnecessary cloning + +1.6.6 / 2014-08-27 +================== + + * deps: qs@2.2.0 + - Array parsing fix + - Performance improvements + +1.6.5 / 2014-08-16 +================== + + * deps: on-finished@2.1.0 + +1.6.4 / 2014-08-14 +================== + + * deps: qs@1.2.2 + +1.6.3 / 2014-08-10 +================== + + * deps: qs@1.2.1 + +1.6.2 / 2014-08-07 +================== + + * deps: qs@1.2.0 + - Fix parsing array of objects + +1.6.1 / 2014-08-06 +================== + + * deps: qs@1.1.0 + - Accept urlencoded square brackets + - Accept empty values in implicit array notation + +1.6.0 / 2014-08-05 +================== + + * deps: qs@1.0.2 + - Complete rewrite + - Limits array length to 20 + - Limits object depth to 5 + - Limits parameters to 1,000 + +1.5.2 / 2014-07-27 +================== + + * deps: depd@0.4.4 + - Work-around v8 generating empty stack traces + +1.5.1 / 2014-07-26 +================== + + * deps: depd@0.4.3 + - Fix exception when global `Error.stackTraceLimit` is too low + +1.5.0 / 2014-07-20 +================== + + * deps: depd@0.4.2 + - Add `TRACE_DEPRECATION` environment variable + - Remove non-standard grey color from color output + - Support `--no-deprecation` argument + - Support `--trace-deprecation` argument + * deps: iconv-lite@0.4.4 + - Added encoding UTF-7 + * deps: raw-body@1.3.0 + - deps: iconv-lite@0.4.4 + - Added encoding UTF-7 + - Fix `Cannot switch to old mode now` error on Node.js 0.10+ + * deps: type-is@~1.3.2 + +1.4.3 / 2014-06-19 +================== + + * deps: type-is@1.3.1 + - fix global variable leak + +1.4.2 / 2014-06-19 +================== + + * deps: type-is@1.3.0 + - improve type parsing + +1.4.1 / 2014-06-19 +================== + + * fix urlencoded extended deprecation message + +1.4.0 / 2014-06-19 +================== + + * add `text` parser + * add `raw` parser + * check accepted charset in content-type (accepts utf-8) + * check accepted encoding in content-encoding (accepts identity) + * deprecate `bodyParser()` middleware; use `.json()` and `.urlencoded()` as needed + * deprecate `urlencoded()` without provided `extended` option + * lazy-load urlencoded parsers + * parsers split into files for reduced mem usage + * support gzip and deflate bodies + - set `inflate: false` to turn off + * deps: raw-body@1.2.2 + - Support all encodings from `iconv-lite` + +1.3.1 / 2014-06-11 +================== + + * deps: type-is@1.2.1 + - Switch dependency from mime to mime-types@1.0.0 + +1.3.0 / 2014-05-31 +================== + + * add `extended` option to urlencoded parser + +1.2.2 / 2014-05-27 +================== + + * deps: raw-body@1.1.6 + - assert stream encoding on node.js 0.8 + - assert stream encoding on node.js < 0.10.6 + - deps: bytes@1 + +1.2.1 / 2014-05-26 +================== + + * invoke `next(err)` after request fully read + - prevents hung responses and socket hang ups + +1.2.0 / 2014-05-11 +================== + + * add `verify` option + * deps: type-is@1.2.0 + - support suffix matching + +1.1.2 / 2014-05-11 +================== + + * improve json parser speed + +1.1.1 / 2014-05-11 +================== + + * fix repeated limit parsing with every request + +1.1.0 / 2014-05-10 +================== + + * add `type` option + * deps: pin for safety and consistency + +1.0.2 / 2014-04-14 +================== + + * use `type-is` module + +1.0.1 / 2014-03-20 +================== + + * lower default limits to 100kb diff --git a/node_modules/body-parser/LICENSE b/node_modules/body-parser/LICENSE new file mode 100644 index 0000000..386b7b6 --- /dev/null +++ b/node_modules/body-parser/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/body-parser/README.md b/node_modules/body-parser/README.md new file mode 100644 index 0000000..aba6297 --- /dev/null +++ b/node_modules/body-parser/README.md @@ -0,0 +1,443 @@ +# body-parser + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Node.js body parsing middleware. + +Parse incoming request bodies in a middleware before your handlers, available +under the `req.body` property. + +**Note** As `req.body`'s shape is based on user-controlled input, all +properties and values in this object are untrusted and should be validated +before trusting. For example, `req.body.foo.toString()` may fail in multiple +ways, for example the `foo` property may not be there or may not be a string, +and `toString` may not be a function and instead a string or other user input. + +[Learn about the anatomy of an HTTP transaction in Node.js](https://nodejs.org/en/docs/guides/anatomy-of-an-http-transaction/). + +_This does not handle multipart bodies_, due to their complex and typically +large nature. For multipart bodies, you may be interested in the following +modules: + + * [busboy](https://www.npmjs.org/package/busboy#readme) and + [connect-busboy](https://www.npmjs.org/package/connect-busboy#readme) + * [multiparty](https://www.npmjs.org/package/multiparty#readme) and + [connect-multiparty](https://www.npmjs.org/package/connect-multiparty#readme) + * [formidable](https://www.npmjs.org/package/formidable#readme) + * [multer](https://www.npmjs.org/package/multer#readme) + +This module provides the following parsers: + + * [JSON body parser](#bodyparserjsonoptions) + * [Raw body parser](#bodyparserrawoptions) + * [Text body parser](#bodyparsertextoptions) + * [URL-encoded form body parser](#bodyparserurlencodedoptions) + +Other body parsers you might be interested in: + +- [body](https://www.npmjs.org/package/body#readme) +- [co-body](https://www.npmjs.org/package/co-body#readme) + +## Installation + +```sh +$ npm install body-parser +``` + +## API + + + +```js +var bodyParser = require('body-parser') +``` + +The `bodyParser` object exposes various factories to create middlewares. All +middlewares will populate the `req.body` property with the parsed body when +the `Content-Type` request header matches the `type` option, or an empty +object (`{}`) if there was no body to parse, the `Content-Type` was not matched, +or an error occurred. + +The various errors returned by this module are described in the +[errors section](#errors). + +### bodyParser.json([options]) + +Returns middleware that only parses `json` and only looks at requests where +the `Content-Type` header matches the `type` option. This parser accepts any +Unicode encoding of the body and supports automatic inflation of `gzip` and +`deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). + +#### Options + +The `json` function takes an optional `options` object that may contain any of +the following keys: + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### reviver + +The `reviver` option is passed directly to `JSON.parse` as the second +argument. You can find more information on this argument +[in the MDN documentation about JSON.parse](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#Example.3A_Using_the_reviver_parameter). + +##### strict + +When set to `true`, will only accept arrays and objects; when `false` will +accept anything `JSON.parse` accepts. Defaults to `true`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not a +function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `json`), a mime type (like `application/json`), or +a mime type with a wildcard (like `*/*` or `*/json`). If a function, the `type` +option is called as `fn(req)` and the request is parsed if it returns a truthy +value. Defaults to `application/json`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.raw([options]) + +Returns middleware that parses all bodies as a `Buffer` and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser supports automatic inflation of `gzip` and `deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This will be a `Buffer` object +of the body. + +#### Options + +The `raw` function takes an optional `options` object that may contain any of +the following keys: + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. +If not a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this +can be an extension name (like `bin`), a mime type (like +`application/octet-stream`), or a mime type with a wildcard (like `*/*` or +`application/*`). If a function, the `type` option is called as `fn(req)` +and the request is parsed if it returns a truthy value. Defaults to +`application/octet-stream`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.text([options]) + +Returns middleware that parses all bodies as a string and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser supports automatic inflation of `gzip` and `deflate` encodings. + +A new `body` string containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This will be a string of the +body. + +#### Options + +The `text` function takes an optional `options` object that may contain any of +the following keys: + +##### defaultCharset + +Specify the default character set for the text content if the charset is not +specified in the `Content-Type` header of the request. Defaults to `utf-8`. + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not +a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `txt`), a mime type (like `text/plain`), or a mime +type with a wildcard (like `*/*` or `text/*`). If a function, the `type` +option is called as `fn(req)` and the request is parsed if it returns a +truthy value. Defaults to `text/plain`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.urlencoded([options]) + +Returns middleware that only parses `urlencoded` bodies and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser accepts only UTF-8 encoding of the body and supports automatic +inflation of `gzip` and `deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This object will contain +key-value pairs, where the value can be a string or array (when `extended` is +`false`), or any type (when `extended` is `true`). + +#### Options + +The `urlencoded` function takes an optional `options` object that may contain +any of the following keys: + +##### extended + +The `extended` option allows to choose between parsing the URL-encoded data +with the `querystring` library (when `false`) or the `qs` library (when +`true`). The "extended" syntax allows for rich objects and arrays to be +encoded into the URL-encoded format, allowing for a JSON-like experience +with URL-encoded. For more information, please +[see the qs library](https://www.npmjs.org/package/qs#readme). + +Defaults to `true`, but using the default has been deprecated. Please +research into the difference between `qs` and `querystring` and choose the +appropriate setting. + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### parameterLimit + +The `parameterLimit` option controls the maximum number of parameters that +are allowed in the URL-encoded data. If a request contains more parameters +than this value, a 413 will be returned to the client. Defaults to `1000`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not +a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `urlencoded`), a mime type (like +`application/x-www-form-urlencoded`), or a mime type with a wildcard (like +`*/x-www-form-urlencoded`). If a function, the `type` option is called as +`fn(req)` and the request is parsed if it returns a truthy value. Defaults +to `application/x-www-form-urlencoded`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +## Errors + +The middlewares provided by this module create errors depending on the error +condition during parsing. The errors will typically have a `status`/`statusCode` +property that contains the suggested HTTP response code, an `expose` property +to determine if the `message` property should be displayed to the client, a +`type` property to determine the type of error without matching against the +`message`, and a `body` property containing the read body, if available. + +The following are the common errors emitted, though any error can come through +for various reasons. + +### content encoding unsupported + +This error will occur when the request had a `Content-Encoding` header that +contained an encoding but the "inflation" option was set to `false`. The +`status` property is set to `415`, the `type` property is set to +`'encoding.unsupported'`, and the `charset` property will be set to the +encoding that is unsupported. + +### request aborted + +This error will occur when the request is aborted by the client before reading +the body has finished. The `received` property will be set to the number of +bytes received before the request was aborted and the `expected` property is +set to the number of expected bytes. The `status` property is set to `400` +and `type` property is set to `'request.aborted'`. + +### request entity too large + +This error will occur when the request body's size is larger than the "limit" +option. The `limit` property will be set to the byte limit and the `length` +property will be set to the request body's length. The `status` property is +set to `413` and the `type` property is set to `'entity.too.large'`. + +### request size did not match content length + +This error will occur when the request's length did not match the length from +the `Content-Length` header. This typically occurs when the request is malformed, +typically when the `Content-Length` header was calculated based on characters +instead of bytes. The `status` property is set to `400` and the `type` property +is set to `'request.size.invalid'`. + +### stream encoding should not be set + +This error will occur when something called the `req.setEncoding` method prior +to this middleware. This module operates directly on bytes only and you cannot +call `req.setEncoding` when using this module. The `status` property is set to +`500` and the `type` property is set to `'stream.encoding.set'`. + +### too many parameters + +This error will occur when the content of the request exceeds the configured +`parameterLimit` for the `urlencoded` parser. The `status` property is set to +`413` and the `type` property is set to `'parameters.too.many'`. + +### unsupported charset "BOGUS" + +This error will occur when the request had a charset parameter in the +`Content-Type` header, but the `iconv-lite` module does not support it OR the +parser does not support it. The charset is contained in the message as well +as in the `charset` property. The `status` property is set to `415`, the +`type` property is set to `'charset.unsupported'`, and the `charset` property +is set to the charset that is unsupported. + +### unsupported content encoding "bogus" + +This error will occur when the request had a `Content-Encoding` header that +contained an unsupported encoding. The encoding is contained in the message +as well as in the `encoding` property. The `status` property is set to `415`, +the `type` property is set to `'encoding.unsupported'`, and the `encoding` +property is set to the encoding that is unsupported. + +## Examples + +### Express/Connect top-level generic + +This example demonstrates adding a generic JSON and URL-encoded parser as a +top-level middleware, which will parse the bodies of all incoming requests. +This is the simplest setup. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// parse application/x-www-form-urlencoded +app.use(bodyParser.urlencoded({ extended: false })) + +// parse application/json +app.use(bodyParser.json()) + +app.use(function (req, res) { + res.setHeader('Content-Type', 'text/plain') + res.write('you posted:\n') + res.end(JSON.stringify(req.body, null, 2)) +}) +``` + +### Express route-specific + +This example demonstrates adding body parsers specifically to the routes that +need them. In general, this is the most recommended way to use body-parser with +Express. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// create application/json parser +var jsonParser = bodyParser.json() + +// create application/x-www-form-urlencoded parser +var urlencodedParser = bodyParser.urlencoded({ extended: false }) + +// POST /login gets urlencoded bodies +app.post('/login', urlencodedParser, function (req, res) { + res.send('welcome, ' + req.body.username) +}) + +// POST /api/users gets JSON bodies +app.post('/api/users', jsonParser, function (req, res) { + // create user in req.body +}) +``` + +### Change accepted type for parsers + +All the parsers accept a `type` option which allows you to change the +`Content-Type` that the middleware will parse. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// parse various different custom JSON types as JSON +app.use(bodyParser.json({ type: 'application/*+json' })) + +// parse some custom thing into a Buffer +app.use(bodyParser.raw({ type: 'application/vnd.custom-type' })) + +// parse an HTML body into a string +app.use(bodyParser.text({ type: 'text/html' })) +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/body-parser.svg +[npm-url]: https://npmjs.org/package/body-parser +[travis-image]: https://img.shields.io/travis/expressjs/body-parser/master.svg +[travis-url]: https://travis-ci.org/expressjs/body-parser +[coveralls-image]: https://img.shields.io/coveralls/expressjs/body-parser/master.svg +[coveralls-url]: https://coveralls.io/r/expressjs/body-parser?branch=master +[downloads-image]: https://img.shields.io/npm/dm/body-parser.svg +[downloads-url]: https://npmjs.org/package/body-parser diff --git a/node_modules/body-parser/index.js b/node_modules/body-parser/index.js new file mode 100644 index 0000000..93c3a1f --- /dev/null +++ b/node_modules/body-parser/index.js @@ -0,0 +1,157 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var deprecate = require('depd')('body-parser') + +/** + * Cache of loaded parsers. + * @private + */ + +var parsers = Object.create(null) + +/** + * @typedef Parsers + * @type {function} + * @property {function} json + * @property {function} raw + * @property {function} text + * @property {function} urlencoded + */ + +/** + * Module exports. + * @type {Parsers} + */ + +exports = module.exports = deprecate.function(bodyParser, + 'bodyParser: use individual json/urlencoded middlewares') + +/** + * JSON parser. + * @public + */ + +Object.defineProperty(exports, 'json', { + configurable: true, + enumerable: true, + get: createParserGetter('json') +}) + +/** + * Raw parser. + * @public + */ + +Object.defineProperty(exports, 'raw', { + configurable: true, + enumerable: true, + get: createParserGetter('raw') +}) + +/** + * Text parser. + * @public + */ + +Object.defineProperty(exports, 'text', { + configurable: true, + enumerable: true, + get: createParserGetter('text') +}) + +/** + * URL-encoded parser. + * @public + */ + +Object.defineProperty(exports, 'urlencoded', { + configurable: true, + enumerable: true, + get: createParserGetter('urlencoded') +}) + +/** + * Create a middleware to parse json and urlencoded bodies. + * + * @param {object} [options] + * @return {function} + * @deprecated + * @public + */ + +function bodyParser (options) { + var opts = {} + + // exclude type option + if (options) { + for (var prop in options) { + if (prop !== 'type') { + opts[prop] = options[prop] + } + } + } + + var _urlencoded = exports.urlencoded(opts) + var _json = exports.json(opts) + + return function bodyParser (req, res, next) { + _json(req, res, function (err) { + if (err) return next(err) + _urlencoded(req, res, next) + }) + } +} + +/** + * Create a getter for loading a parser. + * @private + */ + +function createParserGetter (name) { + return function get () { + return loadParser(name) + } +} + +/** + * Load a parser module. + * @private + */ + +function loadParser (parserName) { + var parser = parsers[parserName] + + if (parser !== undefined) { + return parser + } + + // this uses a switch for static require analysis + switch (parserName) { + case 'json': + parser = require('./lib/types/json') + break + case 'raw': + parser = require('./lib/types/raw') + break + case 'text': + parser = require('./lib/types/text') + break + case 'urlencoded': + parser = require('./lib/types/urlencoded') + break + } + + // store to prevent invoking require() + return (parsers[parserName] = parser) +} diff --git a/node_modules/body-parser/lib/read.js b/node_modules/body-parser/lib/read.js new file mode 100644 index 0000000..c102609 --- /dev/null +++ b/node_modules/body-parser/lib/read.js @@ -0,0 +1,181 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var createError = require('http-errors') +var getBody = require('raw-body') +var iconv = require('iconv-lite') +var onFinished = require('on-finished') +var zlib = require('zlib') + +/** + * Module exports. + */ + +module.exports = read + +/** + * Read a request into a buffer and parse. + * + * @param {object} req + * @param {object} res + * @param {function} next + * @param {function} parse + * @param {function} debug + * @param {object} options + * @private + */ + +function read (req, res, next, parse, debug, options) { + var length + var opts = options + var stream + + // flag as parsed + req._body = true + + // read options + var encoding = opts.encoding !== null + ? opts.encoding + : null + var verify = opts.verify + + try { + // get the content stream + stream = contentstream(req, debug, opts.inflate) + length = stream.length + stream.length = undefined + } catch (err) { + return next(err) + } + + // set raw-body options + opts.length = length + opts.encoding = verify + ? null + : encoding + + // assert charset is supported + if (opts.encoding === null && encoding !== null && !iconv.encodingExists(encoding)) { + return next(createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', { + charset: encoding.toLowerCase(), + type: 'charset.unsupported' + })) + } + + // read body + debug('read body') + getBody(stream, opts, function (error, body) { + if (error) { + var _error + + if (error.type === 'encoding.unsupported') { + // echo back charset + _error = createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', { + charset: encoding.toLowerCase(), + type: 'charset.unsupported' + }) + } else { + // set status code on error + _error = createError(400, error) + } + + // read off entire request + stream.resume() + onFinished(req, function onfinished () { + next(createError(400, _error)) + }) + return + } + + // verify + if (verify) { + try { + debug('verify body') + verify(req, res, body, encoding) + } catch (err) { + next(createError(403, err, { + body: body, + type: err.type || 'entity.verify.failed' + })) + return + } + } + + // parse + var str = body + try { + debug('parse body') + str = typeof body !== 'string' && encoding !== null + ? iconv.decode(body, encoding) + : body + req.body = parse(str) + } catch (err) { + next(createError(400, err, { + body: str, + type: err.type || 'entity.parse.failed' + })) + return + } + + next() + }) +} + +/** + * Get the content stream of the request. + * + * @param {object} req + * @param {function} debug + * @param {boolean} [inflate=true] + * @return {object} + * @api private + */ + +function contentstream (req, debug, inflate) { + var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase() + var length = req.headers['content-length'] + var stream + + debug('content-encoding "%s"', encoding) + + if (inflate === false && encoding !== 'identity') { + throw createError(415, 'content encoding unsupported', { + encoding: encoding, + type: 'encoding.unsupported' + }) + } + + switch (encoding) { + case 'deflate': + stream = zlib.createInflate() + debug('inflate body') + req.pipe(stream) + break + case 'gzip': + stream = zlib.createGunzip() + debug('gunzip body') + req.pipe(stream) + break + case 'identity': + stream = req + stream.length = length + break + default: + throw createError(415, 'unsupported content encoding "' + encoding + '"', { + encoding: encoding, + type: 'encoding.unsupported' + }) + } + + return stream +} diff --git a/node_modules/body-parser/lib/types/json.js b/node_modules/body-parser/lib/types/json.js new file mode 100644 index 0000000..2971dc1 --- /dev/null +++ b/node_modules/body-parser/lib/types/json.js @@ -0,0 +1,230 @@ +/*! + * body-parser + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var createError = require('http-errors') +var debug = require('debug')('body-parser:json') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = json + +/** + * RegExp to match the first non-space in a string. + * + * Allowed whitespace is defined in RFC 7159: + * + * ws = *( + * %x20 / ; Space + * %x09 / ; Horizontal tab + * %x0A / ; Line feed or New line + * %x0D ) ; Carriage return + */ + +var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*(.)/ // eslint-disable-line no-control-regex + +/** + * Create a middleware to parse JSON bodies. + * + * @param {object} [options] + * @return {function} + * @public + */ + +function json (options) { + var opts = options || {} + + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var inflate = opts.inflate !== false + var reviver = opts.reviver + var strict = opts.strict !== false + var type = opts.type || 'application/json' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (body) { + if (body.length === 0) { + // special-case empty json body, as it's a common client-side mistake + // TODO: maybe make this configurable or part of "strict" option + return {} + } + + if (strict) { + var first = firstchar(body) + + if (first !== '{' && first !== '[') { + debug('strict violation') + throw createStrictSyntaxError(body, first) + } + } + + try { + debug('parse json') + return JSON.parse(body, reviver) + } catch (e) { + throw normalizeJsonSyntaxError(e, { + message: e.message, + stack: e.stack + }) + } + } + + return function jsonParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // assert charset per RFC 7159 sec 8.1 + var charset = getCharset(req) || 'utf-8' + if (charset.substr(0, 4) !== 'utf-') { + debug('invalid charset') + next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', { + charset: charset, + type: 'charset.unsupported' + })) + return + } + + // read + read(req, res, next, parse, debug, { + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Create strict violation syntax error matching native error. + * + * @param {string} str + * @param {string} char + * @return {Error} + * @private + */ + +function createStrictSyntaxError (str, char) { + var index = str.indexOf(char) + var partial = str.substring(0, index) + '#' + + try { + JSON.parse(partial); /* istanbul ignore next */ throw new SyntaxError('strict violation') + } catch (e) { + return normalizeJsonSyntaxError(e, { + message: e.message.replace('#', char), + stack: e.stack + }) + } +} + +/** + * Get the first non-whitespace character in a string. + * + * @param {string} str + * @return {function} + * @private + */ + +function firstchar (str) { + return FIRST_CHAR_REGEXP.exec(str)[1] +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Normalize a SyntaxError for JSON.parse. + * + * @param {SyntaxError} error + * @param {object} obj + * @return {SyntaxError} + */ + +function normalizeJsonSyntaxError (error, obj) { + var keys = Object.getOwnPropertyNames(error) + + for (var i = 0; i < keys.length; i++) { + var key = keys[i] + if (key !== 'stack' && key !== 'message') { + delete error[key] + } + } + + // replace stack before message for Node.js 0.10 and below + error.stack = obj.stack.replace(error.message, obj.message) + error.message = obj.message + + return error +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/lib/types/raw.js b/node_modules/body-parser/lib/types/raw.js new file mode 100644 index 0000000..f5d1b67 --- /dev/null +++ b/node_modules/body-parser/lib/types/raw.js @@ -0,0 +1,101 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + */ + +var bytes = require('bytes') +var debug = require('debug')('body-parser:raw') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = raw + +/** + * Create a middleware to parse raw bodies. + * + * @param {object} [options] + * @return {function} + * @api public + */ + +function raw (options) { + var opts = options || {} + + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'application/octet-stream' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (buf) { + return buf + } + + return function rawParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // read + read(req, res, next, parse, debug, { + encoding: null, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/lib/types/text.js b/node_modules/body-parser/lib/types/text.js new file mode 100644 index 0000000..083a009 --- /dev/null +++ b/node_modules/body-parser/lib/types/text.js @@ -0,0 +1,121 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var debug = require('debug')('body-parser:text') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = text + +/** + * Create a middleware to parse text bodies. + * + * @param {object} [options] + * @return {function} + * @api public + */ + +function text (options) { + var opts = options || {} + + var defaultCharset = opts.defaultCharset || 'utf-8' + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'text/plain' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (buf) { + return buf + } + + return function textParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // get charset + var charset = getCharset(req) || defaultCharset + + // read + read(req, res, next, parse, debug, { + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/lib/types/urlencoded.js b/node_modules/body-parser/lib/types/urlencoded.js new file mode 100644 index 0000000..b2ca8f1 --- /dev/null +++ b/node_modules/body-parser/lib/types/urlencoded.js @@ -0,0 +1,284 @@ +/*! + * body-parser + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var createError = require('http-errors') +var debug = require('debug')('body-parser:urlencoded') +var deprecate = require('depd')('body-parser') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = urlencoded + +/** + * Cache of parser modules. + */ + +var parsers = Object.create(null) + +/** + * Create a middleware to parse urlencoded bodies. + * + * @param {object} [options] + * @return {function} + * @public + */ + +function urlencoded (options) { + var opts = options || {} + + // notice because option default will flip in next major + if (opts.extended === undefined) { + deprecate('undefined extended: provide extended option') + } + + var extended = opts.extended !== false + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'application/x-www-form-urlencoded' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate query parser + var queryparse = extended + ? extendedparser(opts) + : simpleparser(opts) + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (body) { + return body.length + ? queryparse(body) + : {} + } + + return function urlencodedParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // assert charset + var charset = getCharset(req) || 'utf-8' + if (charset !== 'utf-8') { + debug('invalid charset') + next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', { + charset: charset, + type: 'charset.unsupported' + })) + return + } + + // read + read(req, res, next, parse, debug, { + debug: debug, + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the extended query parser. + * + * @param {object} options + */ + +function extendedparser (options) { + var parameterLimit = options.parameterLimit !== undefined + ? options.parameterLimit + : 1000 + var parse = parser('qs') + + if (isNaN(parameterLimit) || parameterLimit < 1) { + throw new TypeError('option parameterLimit must be a positive number') + } + + if (isFinite(parameterLimit)) { + parameterLimit = parameterLimit | 0 + } + + return function queryparse (body) { + var paramCount = parameterCount(body, parameterLimit) + + if (paramCount === undefined) { + debug('too many parameters') + throw createError(413, 'too many parameters', { + type: 'parameters.too.many' + }) + } + + var arrayLimit = Math.max(100, paramCount) + + debug('parse extended urlencoding') + return parse(body, { + allowPrototypes: true, + arrayLimit: arrayLimit, + depth: Infinity, + parameterLimit: parameterLimit + }) + } +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Count the number of parameters, stopping once limit reached + * + * @param {string} body + * @param {number} limit + * @api private + */ + +function parameterCount (body, limit) { + var count = 0 + var index = 0 + + while ((index = body.indexOf('&', index)) !== -1) { + count++ + index++ + + if (count === limit) { + return undefined + } + } + + return count +} + +/** + * Get parser for module name dynamically. + * + * @param {string} name + * @return {function} + * @api private + */ + +function parser (name) { + var mod = parsers[name] + + if (mod !== undefined) { + return mod.parse + } + + // this uses a switch for static require analysis + switch (name) { + case 'qs': + mod = require('qs') + break + case 'querystring': + mod = require('querystring') + break + } + + // store to prevent invoking require() + parsers[name] = mod + + return mod.parse +} + +/** + * Get the simple query parser. + * + * @param {object} options + */ + +function simpleparser (options) { + var parameterLimit = options.parameterLimit !== undefined + ? options.parameterLimit + : 1000 + var parse = parser('querystring') + + if (isNaN(parameterLimit) || parameterLimit < 1) { + throw new TypeError('option parameterLimit must be a positive number') + } + + if (isFinite(parameterLimit)) { + parameterLimit = parameterLimit | 0 + } + + return function queryparse (body) { + var paramCount = parameterCount(body, parameterLimit) + + if (paramCount === undefined) { + debug('too many parameters') + throw createError(413, 'too many parameters', { + type: 'parameters.too.many' + }) + } + + debug('parse urlencoding') + return parse(body, undefined, undefined, { maxKeys: parameterLimit }) + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/package.json b/node_modules/body-parser/package.json new file mode 100644 index 0000000..fca32b0 --- /dev/null +++ b/node_modules/body-parser/package.json @@ -0,0 +1,92 @@ +{ + "_from": "body-parser@^1.19.0", + "_id": "body-parser@1.19.0", + "_inBundle": false, + "_integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==", + "_location": "/body-parser", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "body-parser@^1.19.0", + "name": "body-parser", + "escapedName": "body-parser", + "rawSpec": "^1.19.0", + "saveSpec": null, + "fetchSpec": "^1.19.0" + }, + "_requiredBy": [ + "/express", + "/json-server" + ], + "_resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz", + "_shasum": "96b2709e57c9c4e09a6fd66a8fd979844f69f08a", + "_spec": "body-parser@^1.19.0", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/json-server", + "bugs": { + "url": "https://github.com/expressjs/body-parser/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Douglas Christopher Wilson", + "email": "doug@somethingdoug.com" + }, + { + "name": "Jonathan Ong", + "email": "me@jongleberry.com", + "url": "http://jongleberry.com" + } + ], + "dependencies": { + "bytes": "3.1.0", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "~1.1.2", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "on-finished": "~2.3.0", + "qs": "6.7.0", + "raw-body": "2.4.0", + "type-is": "~1.6.17" + }, + "deprecated": false, + "description": "Node.js body parsing middleware", + "devDependencies": { + "eslint": "5.16.0", + "eslint-config-standard": "12.0.0", + "eslint-plugin-import": "2.17.2", + "eslint-plugin-markdown": "1.0.0", + "eslint-plugin-node": "8.0.1", + "eslint-plugin-promise": "4.1.1", + "eslint-plugin-standard": "4.0.0", + "istanbul": "0.4.5", + "methods": "1.1.2", + "mocha": "6.1.4", + "safe-buffer": "5.1.2", + "supertest": "4.0.2" + }, + "engines": { + "node": ">= 0.8" + }, + "files": [ + "lib/", + "LICENSE", + "HISTORY.md", + "index.js" + ], + "homepage": "https://github.com/expressjs/body-parser#readme", + "license": "MIT", + "name": "body-parser", + "repository": { + "type": "git", + "url": "git+https://github.com/expressjs/body-parser.git" + }, + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --require test/support/env --reporter spec --check-leaks --bail test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --require test/support/env --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --require test/support/env --reporter spec --check-leaks test/" + }, + "version": "1.19.0" +} diff --git a/node_modules/boxen/index.d.ts b/node_modules/boxen/index.d.ts new file mode 100644 index 0000000..1f54661 --- /dev/null +++ b/node_modules/boxen/index.d.ts @@ -0,0 +1,178 @@ +import {LiteralUnion} from 'type-fest'; +import cliBoxes, {BoxStyle} from 'cli-boxes'; + +declare namespace boxen { + /** + Characters used for custom border. + + @example + ``` + // affffb + // e e + // dffffc + + const border: CustomBorderStyle = { + topLeft: 'a', + topRight: 'b', + bottomRight: 'c', + bottomLeft: 'd', + vertical: 'e', + horizontal: 'f' + }; + ``` + */ + interface CustomBorderStyle extends BoxStyle {} + + /** + Spacing used for `padding` and `margin`. + */ + interface Spacing { + readonly top: number; + readonly right: number; + readonly bottom: number; + readonly left: number; + } + + interface Options { + /** + Color of the box border. + */ + readonly borderColor?: LiteralUnion< + | 'black' + | 'red' + | 'green' + | 'yellow' + | 'blue' + | 'magenta' + | 'cyan' + | 'white' + | 'gray' + | 'grey' + | 'blackBright' + | 'redBright' + | 'greenBright' + | 'yellowBright' + | 'blueBright' + | 'magentaBright' + | 'cyanBright' + | 'whiteBright', + string + >; + + /** + Style of the box border. + + @default BorderStyle.Single + */ + readonly borderStyle?: BorderStyle | CustomBorderStyle; + + /** + Reduce opacity of the border. + + @default false + */ + readonly dimBorder?: boolean; + + /** + Space between the text and box border. + + @default 0 + */ + readonly padding?: number | Spacing; + + /** + Space around the box. + + @default 0 + */ + readonly margin?: number | Spacing; + + /** + Float the box on the available terminal screen space. + + @default 'left' + */ + readonly float?: 'left' | 'right' | 'center'; + + /** + Color of the background. + */ + readonly backgroundColor?: LiteralUnion< + | 'black' + | 'red' + | 'green' + | 'yellow' + | 'blue' + | 'magenta' + | 'cyan' + | 'white' + | 'blackBright' + | 'redBright' + | 'greenBright' + | 'yellowBright' + | 'blueBright' + | 'magentaBright' + | 'cyanBright' + | 'whiteBright', + string + >; + + /** + Align the text in the box based on the widest line. + + @default 'left' + */ + readonly align?: 'left' | 'right' | 'center'; + } +} + +declare const enum BorderStyle { + Single = 'single', + Double = 'double', + Round = 'round', + Bold = 'bold', + SingleDouble = 'singleDouble', + DoubleSingle = 'doubleSingle', + Classic = 'classic' +} + +declare const boxen: { + /** + Creates a box in the terminal. + + @param text - The text inside the box. + @returns The box. + + @example + ``` + import boxen = require('boxen'); + + console.log(boxen('unicorn', {padding: 1})); + // ┌─────────────┐ + // │ │ + // │ unicorn │ + // │ │ + // └─────────────┘ + + console.log(boxen('unicorn', {padding: 1, margin: 1, borderStyle: 'double'})); + // + // ╔═════════════╗ + // ║ ║ + // ║ unicorn ║ + // ║ ║ + // ╚═════════════╝ + // + ``` + */ + (text: string, options?: boxen.Options): string; + + /** + Border styles from [`cli-boxes`](https://github.com/sindresorhus/cli-boxes). + */ + BorderStyle: typeof BorderStyle; + + // TODO: Remove this for the next major release + default: typeof boxen; +}; + +export = boxen; diff --git a/node_modules/boxen/index.js b/node_modules/boxen/index.js new file mode 100644 index 0000000..198a83f --- /dev/null +++ b/node_modules/boxen/index.js @@ -0,0 +1,141 @@ +'use strict'; +const stringWidth = require('string-width'); +const chalk = require('chalk'); +const widestLine = require('widest-line'); +const cliBoxes = require('cli-boxes'); +const camelCase = require('camelcase'); +const ansiAlign = require('ansi-align'); +const termSize = require('term-size'); + +const getObject = detail => { + let obj; + + if (typeof detail === 'number') { + obj = { + top: detail, + right: detail * 3, + bottom: detail, + left: detail * 3 + }; + } else { + obj = Object.assign({ + top: 0, + right: 0, + bottom: 0, + left: 0 + }, detail); + } + + return obj; +}; + +const getBorderChars = borderStyle => { + const sides = [ + 'topLeft', + 'topRight', + 'bottomRight', + 'bottomLeft', + 'vertical', + 'horizontal' + ]; + + let chars; + + if (typeof borderStyle === 'string') { + chars = cliBoxes[borderStyle]; + + if (!chars) { + throw new TypeError(`Invalid border style: ${borderStyle}`); + } + } else { + sides.forEach(key => { + if (!borderStyle[key] || typeof borderStyle[key] !== 'string') { + throw new TypeError(`Invalid border style: ${key}`); + } + }); + + chars = borderStyle; + } + + return chars; +}; + +const isHex = color => color.match(/^#[0-f]{3}(?:[0-f]{3})?$/i); +const isColorValid = color => typeof color === 'string' && ((chalk[color]) || isHex(color)); +const getColorFn = color => isHex(color) ? chalk.hex(color) : chalk[color]; +const getBGColorFn = color => isHex(color) ? chalk.bgHex(color) : chalk[camelCase(['bg', color])]; + +const boxen = (text, opts) => { + opts = Object.assign({ + padding: 0, + borderStyle: 'single', + dimBorder: false, + align: 'left', + float: 'left' + }, opts); + + if (opts.borderColor && !isColorValid(opts.borderColor)) { + throw new Error(`${opts.borderColor} is not a valid borderColor`); + } + + if (opts.backgroundColor && !isColorValid(opts.backgroundColor)) { + throw new Error(`${opts.backgroundColor} is not a valid backgroundColor`); + } + + const chars = getBorderChars(opts.borderStyle); + const padding = getObject(opts.padding); + const margin = getObject(opts.margin); + + const colorizeBorder = x => { + const ret = opts.borderColor ? getColorFn(opts.borderColor)(x) : x; + return opts.dimBorder ? chalk.dim(ret) : ret; + }; + + const colorizeContent = x => opts.backgroundColor ? getBGColorFn(opts.backgroundColor)(x) : x; + + text = ansiAlign(text, {align: opts.align}); + + const NL = '\n'; + const PAD = ' '; + + let lines = text.split(NL); + + if (padding.top > 0) { + lines = new Array(padding.top).fill('').concat(lines); + } + + if (padding.bottom > 0) { + lines = lines.concat(new Array(padding.bottom).fill('')); + } + + const contentWidth = widestLine(text) + padding.left + padding.right; + const paddingLeft = PAD.repeat(padding.left); + const {columns} = termSize(); + let marginLeft = PAD.repeat(margin.left); + + if (opts.float === 'center') { + const padWidth = Math.max((columns - contentWidth) / 2, 0); + marginLeft = PAD.repeat(padWidth); + } else if (opts.float === 'right') { + const padWidth = Math.max(columns - contentWidth - margin.right - 2, 0); + marginLeft = PAD.repeat(padWidth); + } + + const horizontal = chars.horizontal.repeat(contentWidth); + const top = colorizeBorder(NL.repeat(margin.top) + marginLeft + chars.topLeft + horizontal + chars.topRight); + const bottom = colorizeBorder(marginLeft + chars.bottomLeft + horizontal + chars.bottomRight + NL.repeat(margin.bottom)); + const side = colorizeBorder(chars.vertical); + + const middle = lines.map(line => { + const paddingRight = PAD.repeat(contentWidth - stringWidth(line) - padding.left); + return marginLeft + side + colorizeContent(paddingLeft + line + paddingRight) + side; + }).join(NL); + + return top + NL + middle + NL + bottom; +}; + +module.exports = boxen; +// TODO: Remove this for the next major release +module.exports.default = boxen; + +module.exports._borderStyles = cliBoxes; diff --git a/node_modules/boxen/license b/node_modules/boxen/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/boxen/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/boxen/package.json b/node_modules/boxen/package.json new file mode 100644 index 0000000..1ea33f2 --- /dev/null +++ b/node_modules/boxen/package.json @@ -0,0 +1,82 @@ +{ + "_from": "boxen@^3.0.0", + "_id": "boxen@3.2.0", + "_inBundle": false, + "_integrity": "sha512-cU4J/+NodM3IHdSL2yN8bqYqnmlBTidDR4RC7nJs61ZmtGz8VZzM3HLQX0zY5mrSmPtR3xWwsq2jOUQqFZN8+A==", + "_location": "/boxen", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "boxen@^3.0.0", + "name": "boxen", + "escapedName": "boxen", + "rawSpec": "^3.0.0", + "saveSpec": null, + "fetchSpec": "^3.0.0" + }, + "_requiredBy": [ + "/update-notifier" + ], + "_resolved": "https://registry.npmjs.org/boxen/-/boxen-3.2.0.tgz", + "_shasum": "fbdff0de93636ab4450886b6ff45b92d098f45eb", + "_spec": "boxen@^3.0.0", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/update-notifier", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/boxen/issues" + }, + "bundleDependencies": false, + "dependencies": { + "ansi-align": "^3.0.0", + "camelcase": "^5.3.1", + "chalk": "^2.4.2", + "cli-boxes": "^2.2.0", + "string-width": "^3.0.0", + "term-size": "^1.2.0", + "type-fest": "^0.3.0", + "widest-line": "^2.0.0" + }, + "deprecated": false, + "description": "Create boxes in the terminal", + "devDependencies": { + "ava": "^1.4.1", + "nyc": "^13.3.0", + "tsd": "^0.7.2", + "xo": "^0.24.0" + }, + "engines": { + "node": ">=6" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "homepage": "https://github.com/sindresorhus/boxen#readme", + "keywords": [ + "cli", + "box", + "boxes", + "terminal", + "term", + "console", + "ascii", + "unicode", + "border", + "text" + ], + "license": "MIT", + "name": "boxen", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/boxen.git" + }, + "scripts": { + "test": "xo && nyc ava && tsd" + }, + "version": "3.2.0" +} diff --git a/node_modules/boxen/readme.md b/node_modules/boxen/readme.md new file mode 100644 index 0000000..988c700 --- /dev/null +++ b/node_modules/boxen/readme.md @@ -0,0 +1,188 @@ +# boxen [](https://travis-ci.org/sindresorhus/boxen) + +> Create boxes in the terminal + + + + +## Install + +``` +$ npm install boxen +``` + + + + + + +## Usage + +```js +const boxen = require('boxen'); + +console.log(boxen('unicorn', {padding: 1})); +/* +┌─────────────┐ +│ │ +│ unicorn │ +│ │ +└─────────────┘ +*/ + +console.log(boxen('unicorn', {padding: 1, margin: 1, borderStyle: 'double'})); +/* + + ╔═════════════╗ + ║ ║ + ║ unicorn ║ + ║ ║ + ╚═════════════╝ + +*/ +``` + + +## API + +### boxen(text, [options]) + +#### text + +Type: `string` + +Text inside the box. + +#### options + +Type: `Object` + +##### borderColor + +Type: `string` +Values: `black` `red` `green` `yellow` `blue` `magenta` `cyan` `white` `gray` or a hex value like `#ff0000` + +Color of the box border. + +##### borderStyle + +Type: `string | object` +Default: `single` +Values: +- `single` +``` +┌───┐ +│foo│ +└───┘ +``` +- `double` +``` +╔═══╗ +║foo║ +╚═══╝ +``` +- `round` (`single` sides with round corners) +``` +╭───╮ +│foo│ +╰───╯ +``` +- `bold` +``` +┏━━━┓ +┃foo┃ +┗━━━┛ +``` +- `single-double` (`single` on top and bottom, `double` on right and left) +``` +╓───╖ +║foo║ +╙───╜ +``` +- `double-single` (`double` on top and bottom, `single` on right and left) +``` +╒═══╕ +│foo│ +╘═══╛ +``` +- `classic` +``` ++---+ +|foo| ++---+ +``` + +Style of the box border. + +Can be any of the above predefined styles or an object with the following keys: + +```js +{ + topLeft: '+', + topRight: '+', + bottomLeft: '+', + bottomRight: '+', + horizontal: '-', + vertical: '|' +} +``` + +##### dimBorder + +Type: `boolean` +Default: `false` + +Reduce opacity of the border. + +##### padding + +Type: `number | object` +Default: `0` + +Space between the text and box border. + +Accepts a number or an object with any of the `top`, `right`, `bottom`, `left` properties. When a number is specified, the left/right padding is 3 times the top/bottom to make it look nice. + +##### margin + +Type: `number | object` +Default: `0` + +Space around the box. + +Accepts a number or an object with any of the `top`, `right`, `bottom`, `left` properties. When a number is specified, the left/right margin is 3 times the top/bottom to make it look nice. + +##### float + +Type: `string` +Values: `right` `center` `left` +Default: `left` + +Float the box on the available terminal screen space. + +##### backgroundColor + +Type: `string` +Values: `black` `red` `green` `yellow` `blue` `magenta` `cyan` `white` `gray` or a hex value like `#ff0000` + +Color of the background. + +##### align + +Type: `string` +Default: `left` +Values: `left` `center` `right` + +Align the text in the box based on the widest line. + + +## Related + +- [boxen-cli](https://github.com/sindresorhus/boxen-cli) - CLI for this module +- [cli-boxes](https://github.com/sindresorhus/cli-boxes) - Boxes for use in the terminal +- [ink-box](https://github.com/sindresorhus/ink-box) - Box component for Ink that uses this package + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/bytes/History.md b/node_modules/bytes/History.md new file mode 100644 index 0000000..cf6a5bb --- /dev/null +++ b/node_modules/bytes/History.md @@ -0,0 +1,87 @@ +3.1.0 / 2019-01-22 +================== + + * Add petabyte (`pb`) support + +3.0.0 / 2017-08-31 +================== + + * Change "kB" to "KB" in format output + * Remove support for Node.js 0.6 + * Remove support for ComponentJS + +2.5.0 / 2017-03-24 +================== + + * Add option "unit" + +2.4.0 / 2016-06-01 +================== + + * Add option "unitSeparator" + +2.3.0 / 2016-02-15 +================== + + * Drop partial bytes on all parsed units + * Fix non-finite numbers to `.format` to return `null` + * Fix parsing byte string that looks like hex + * perf: hoist regular expressions + +2.2.0 / 2015-11-13 +================== + + * add option "decimalPlaces" + * add option "fixedDecimals" + +2.1.0 / 2015-05-21 +================== + + * add `.format` export + * add `.parse` export + +2.0.2 / 2015-05-20 +================== + + * remove map recreation + * remove unnecessary object construction + +2.0.1 / 2015-05-07 +================== + + * fix browserify require + * remove node.extend dependency + +2.0.0 / 2015-04-12 +================== + + * add option "case" + * add option "thousandsSeparator" + * return "null" on invalid parse input + * support proper round-trip: bytes(bytes(num)) === num + * units no longer case sensitive when parsing + +1.0.0 / 2014-05-05 +================== + + * add negative support. fixes #6 + +0.3.0 / 2014-03-19 +================== + + * added terabyte support + +0.2.1 / 2013-04-01 +================== + + * add .component + +0.2.0 / 2012-10-28 +================== + + * bytes(200).should.eql('200b') + +0.1.0 / 2012-07-04 +================== + + * add bytes to string conversion [yields] diff --git a/node_modules/bytes/LICENSE b/node_modules/bytes/LICENSE new file mode 100644 index 0000000..63e95a9 --- /dev/null +++ b/node_modules/bytes/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2012-2014 TJ Holowaychuk +Copyright (c) 2015 Jed Watson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bytes/Readme.md b/node_modules/bytes/Readme.md new file mode 100644 index 0000000..6ad1ec6 --- /dev/null +++ b/node_modules/bytes/Readme.md @@ -0,0 +1,126 @@ +# Bytes utility + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Utility to parse a string bytes (ex: `1TB`) to bytes (`1099511627776`) and vice-versa. + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```bash +$ npm install bytes +``` + +## Usage + +```js +var bytes = require('bytes'); +``` + +#### bytes.format(number value, [options]): string|null + +Format the given value in bytes into a string. If the value is negative, it is kept as such. If it is a float, it is + rounded. + +**Arguments** + +| Name | Type | Description | +|---------|----------|--------------------| +| value | `number` | Value in bytes | +| options | `Object` | Conversion options | + +**Options** + +| Property | Type | Description | +|-------------------|--------|-----------------------------------------------------------------------------------------| +| decimalPlaces | `number`|`null` | Maximum number of decimal places to include in output. Default value to `2`. | +| fixedDecimals | `boolean`|`null` | Whether to always display the maximum number of decimal places. Default value to `false` | +| thousandsSeparator | `string`|`null` | Example of values: `' '`, `','` and `.`... Default value to `''`. | +| unit | `string`|`null` | The unit in which the result will be returned (B/KB/MB/GB/TB). Default value to `''` (which means auto detect). | +| unitSeparator | `string`|`null` | Separator to use between number and unit. Default value to `''`. | + +**Returns** + +| Name | Type | Description | +|---------|------------------|-------------------------------------------------| +| results | `string`|`null` | Return null upon error. String value otherwise. | + +**Example** + +```js +bytes(1024); +// output: '1KB' + +bytes(1000); +// output: '1000B' + +bytes(1000, {thousandsSeparator: ' '}); +// output: '1 000B' + +bytes(1024 * 1.7, {decimalPlaces: 0}); +// output: '2KB' + +bytes(1024, {unitSeparator: ' '}); +// output: '1 KB' + +``` + +#### bytes.parse(string|number value): number|null + +Parse the string value into an integer in bytes. If no unit is given, or `value` +is a number, it is assumed the value is in bytes. + +Supported units and abbreviations are as follows and are case-insensitive: + + * `b` for bytes + * `kb` for kilobytes + * `mb` for megabytes + * `gb` for gigabytes + * `tb` for terabytes + * `pb` for petabytes + +The units are in powers of two, not ten. This means 1kb = 1024b according to this parser. + +**Arguments** + +| Name | Type | Description | +|---------------|--------|--------------------| +| value | `string`|`number` | String to parse, or number in bytes. | + +**Returns** + +| Name | Type | Description | +|---------|-------------|-------------------------| +| results | `number`|`null` | Return null upon error. Value in bytes otherwise. | + +**Example** + +```js +bytes('1KB'); +// output: 1024 + +bytes('1024'); +// output: 1024 + +bytes(1024); +// output: 1KB +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/visionmedia/bytes.js/master +[coveralls-url]: https://coveralls.io/r/visionmedia/bytes.js?branch=master +[downloads-image]: https://badgen.net/npm/dm/bytes +[downloads-url]: https://npmjs.org/package/bytes +[npm-image]: https://badgen.net/npm/node/bytes +[npm-url]: https://npmjs.org/package/bytes +[travis-image]: https://badgen.net/travis/visionmedia/bytes.js/master +[travis-url]: https://travis-ci.org/visionmedia/bytes.js diff --git a/node_modules/bytes/index.js b/node_modules/bytes/index.js new file mode 100644 index 0000000..4975bfb --- /dev/null +++ b/node_modules/bytes/index.js @@ -0,0 +1,162 @@ +/*! + * bytes + * Copyright(c) 2012-2014 TJ Holowaychuk + * Copyright(c) 2015 Jed Watson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = bytes; +module.exports.format = format; +module.exports.parse = parse; + +/** + * Module variables. + * @private + */ + +var formatThousandsRegExp = /\B(?=(\d{3})+(?!\d))/g; + +var formatDecimalsRegExp = /(?:\.0*|(\.[^0]+)0+)$/; + +var map = { + b: 1, + kb: 1 << 10, + mb: 1 << 20, + gb: 1 << 30, + tb: Math.pow(1024, 4), + pb: Math.pow(1024, 5), +}; + +var parseRegExp = /^((-|\+)?(\d+(?:\.\d+)?)) *(kb|mb|gb|tb|pb)$/i; + +/** + * Convert the given value in bytes into a string or parse to string to an integer in bytes. + * + * @param {string|number} value + * @param {{ + * case: [string], + * decimalPlaces: [number] + * fixedDecimals: [boolean] + * thousandsSeparator: [string] + * unitSeparator: [string] + * }} [options] bytes options. + * + * @returns {string|number|null} + */ + +function bytes(value, options) { + if (typeof value === 'string') { + return parse(value); + } + + if (typeof value === 'number') { + return format(value, options); + } + + return null; +} + +/** + * Format the given value in bytes into a string. + * + * If the value is negative, it is kept as such. If it is a float, + * it is rounded. + * + * @param {number} value + * @param {object} [options] + * @param {number} [options.decimalPlaces=2] + * @param {number} [options.fixedDecimals=false] + * @param {string} [options.thousandsSeparator=] + * @param {string} [options.unit=] + * @param {string} [options.unitSeparator=] + * + * @returns {string|null} + * @public + */ + +function format(value, options) { + if (!Number.isFinite(value)) { + return null; + } + + var mag = Math.abs(value); + var thousandsSeparator = (options && options.thousandsSeparator) || ''; + var unitSeparator = (options && options.unitSeparator) || ''; + var decimalPlaces = (options && options.decimalPlaces !== undefined) ? options.decimalPlaces : 2; + var fixedDecimals = Boolean(options && options.fixedDecimals); + var unit = (options && options.unit) || ''; + + if (!unit || !map[unit.toLowerCase()]) { + if (mag >= map.pb) { + unit = 'PB'; + } else if (mag >= map.tb) { + unit = 'TB'; + } else if (mag >= map.gb) { + unit = 'GB'; + } else if (mag >= map.mb) { + unit = 'MB'; + } else if (mag >= map.kb) { + unit = 'KB'; + } else { + unit = 'B'; + } + } + + var val = value / map[unit.toLowerCase()]; + var str = val.toFixed(decimalPlaces); + + if (!fixedDecimals) { + str = str.replace(formatDecimalsRegExp, '$1'); + } + + if (thousandsSeparator) { + str = str.replace(formatThousandsRegExp, thousandsSeparator); + } + + return str + unitSeparator + unit; +} + +/** + * Parse the string value into an integer in bytes. + * + * If no unit is given, it is assumed the value is in bytes. + * + * @param {number|string} val + * + * @returns {number|null} + * @public + */ + +function parse(val) { + if (typeof val === 'number' && !isNaN(val)) { + return val; + } + + if (typeof val !== 'string') { + return null; + } + + // Test if the string passed is valid + var results = parseRegExp.exec(val); + var floatValue; + var unit = 'b'; + + if (!results) { + // Nothing could be extracted from the given string + floatValue = parseInt(val, 10); + unit = 'b' + } else { + // Retrieve the value and the unit + floatValue = parseFloat(results[1]); + unit = results[4].toLowerCase(); + } + + return Math.floor(map[unit] * floatValue); +} diff --git a/node_modules/bytes/package.json b/node_modules/bytes/package.json new file mode 100644 index 0000000..66c1bd6 --- /dev/null +++ b/node_modules/bytes/package.json @@ -0,0 +1,84 @@ +{ + "_from": "bytes@3.1.0", + "_id": "bytes@3.1.0", + "_inBundle": false, + "_integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==", + "_location": "/bytes", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "bytes@3.1.0", + "name": "bytes", + "escapedName": "bytes", + "rawSpec": "3.1.0", + "saveSpec": null, + "fetchSpec": "3.1.0" + }, + "_requiredBy": [ + "/body-parser", + "/raw-body" + ], + "_resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", + "_shasum": "f6cf7933a360e0588fa9fde85651cdc7f805d1f6", + "_spec": "bytes@3.1.0", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/body-parser", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca", + "url": "http://tjholowaychuk.com" + }, + "bugs": { + "url": "https://github.com/visionmedia/bytes.js/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Jed Watson", + "email": "jed.watson@me.com" + }, + { + "name": "Théo FIDRY", + "email": "theo.fidry@gmail.com" + } + ], + "deprecated": false, + "description": "Utility to parse a string bytes to bytes and vice-versa", + "devDependencies": { + "eslint": "5.12.1", + "mocha": "5.2.0", + "nyc": "13.1.0" + }, + "engines": { + "node": ">= 0.8" + }, + "files": [ + "History.md", + "LICENSE", + "Readme.md", + "index.js" + ], + "homepage": "https://github.com/visionmedia/bytes.js#readme", + "keywords": [ + "byte", + "bytes", + "utility", + "parse", + "parser", + "convert", + "converter" + ], + "license": "MIT", + "name": "bytes", + "repository": { + "type": "git", + "url": "git+https://github.com/visionmedia/bytes.js.git" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --check-leaks --reporter spec", + "test-ci": "nyc --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + }, + "version": "3.1.0" +} diff --git a/node_modules/cacheable-request/LICENSE b/node_modules/cacheable-request/LICENSE new file mode 100644 index 0000000..f27ee9b --- /dev/null +++ b/node_modules/cacheable-request/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Luke Childs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/cacheable-request/README.md b/node_modules/cacheable-request/README.md new file mode 100644 index 0000000..725e7e0 --- /dev/null +++ b/node_modules/cacheable-request/README.md @@ -0,0 +1,206 @@ +# cacheable-request + +> Wrap native HTTP requests with RFC compliant cache support + +[](https://travis-ci.org/lukechilds/cacheable-request) +[](https://coveralls.io/github/lukechilds/cacheable-request?branch=master) +[](https://www.npmjs.com/package/cacheable-request) +[](https://www.npmjs.com/package/cacheable-request) + +[RFC 7234](http://httpwg.org/specs/rfc7234.html) compliant HTTP caching for native Node.js HTTP/HTTPS requests. Caching works out of the box in memory or is easily pluggable with a wide range of storage adapters. + +**Note:** This is a low level wrapper around the core HTTP modules, it's not a high level request library. + +## Features + +- Only stores cacheable responses as defined by RFC 7234 +- Fresh cache entries are served directly from cache +- Stale cache entries are revalidated with `If-None-Match`/`If-Modified-Since` headers +- 304 responses from revalidation requests use cached body +- Updates `Age` header on cached responses +- Can completely bypass cache on a per request basis +- In memory cache by default +- Official support for Redis, MongoDB, SQLite, PostgreSQL and MySQL storage adapters +- Easily plug in your own or third-party storage adapters +- If DB connection fails, cache is automatically bypassed ([disabled by default](#optsautomaticfailover)) +- Adds cache support to any existing HTTP code with minimal changes +- Uses [http-cache-semantics](https://github.com/pornel/http-cache-semantics) internally for HTTP RFC 7234 compliance + +## Install + +```shell +npm install cacheable-request +``` + +## Usage + +```js +const http = require('http'); +const CacheableRequest = require('cacheable-request'); + +// Then instead of +const req = http.request('http://example.com', cb); +req.end(); + +// You can do +const cacheableRequest = new CacheableRequest(http.request); +const cacheReq = cacheableRequest('http://example.com', cb); +cacheReq.on('request', req => req.end()); +// Future requests to 'example.com' will be returned from cache if still valid + +// You pass in any other http.request API compatible method to be wrapped with cache support: +const cacheableRequest = new CacheableRequest(https.request); +const cacheableRequest = new CacheableRequest(electron.net); +``` + +## Storage Adapters + +`cacheable-request` uses [Keyv](https://github.com/lukechilds/keyv) to support a wide range of storage adapters. + +For example, to use Redis as a cache backend, you just need to install the official Redis Keyv storage adapter: + +``` +npm install @keyv/redis +``` + +And then you can pass `CacheableRequest` your connection string: + +```js +const cacheableRequest = new CacheableRequest(http.request, 'redis://user:pass@localhost:6379'); +``` + +[View all official Keyv storage adapters.](https://github.com/lukechilds/keyv#official-storage-adapters) + +Keyv also supports anything that follows the Map API so it's easy to write your own storage adapter or use a third-party solution. + +e.g The following are all valid storage adapters + +```js +const storageAdapter = new Map(); +// or +const storageAdapter = require('./my-storage-adapter'); +// or +const QuickLRU = require('quick-lru'); +const storageAdapter = new QuickLRU({ maxSize: 1000 }); + +const cacheableRequest = new CacheableRequest(http.request, storageAdapter); +``` + +View the [Keyv docs](https://github.com/lukechilds/keyv) for more information on how to use storage adapters. + +## API + +### new cacheableRequest(request, [storageAdapter]) + +Returns the provided request function wrapped with cache support. + +#### request + +Type: `function` + +Request function to wrap with cache support. Should be [`http.request`](https://nodejs.org/api/http.html#http_http_request_options_callback) or a similar API compatible request function. + +#### storageAdapter + +Type: `Keyv storage adapter` +Default: `new Map()` + +A [Keyv](https://github.com/lukechilds/keyv) storage adapter instance, or connection string if using with an official Keyv storage adapter. + +### Instance + +#### cacheableRequest(opts, [cb]) + +Returns an event emitter. + +##### opts + +Type: `object`, `string` + +- Any of the default request functions options. +- Any [`http-cache-semantics`](https://github.com/kornelski/http-cache-semantics#constructor-options) options. +- Any of the following: + +###### opts.cache + +Type: `boolean` +Default: `true` + +If the cache should be used. Setting this to false will completely bypass the cache for the current request. + +###### opts.strictTtl + +Type: `boolean` +Default: `false` + +If set to `true` once a cached resource has expired it is deleted and will have to be re-requested. + +If set to `false` (default), after a cached resource's TTL expires it is kept in the cache and will be revalidated on the next request with `If-None-Match`/`If-Modified-Since` headers. + +###### opts.maxTtl + +Type: `number` +Default: `undefined` + +Limits TTL. The `number` represents milliseconds. + +###### opts.automaticFailover + +Type: `boolean` +Default: `false` + +When set to `true`, if the DB connection fails we will automatically fallback to a network request. DB errors will still be emitted to notify you of the problem even though the request callback may succeed. + +###### opts.forceRefresh + +Type: `boolean` +Default: `false` + +Forces refreshing the cache. If the response could be retrieved from the cache, it will perform a new request and override the cache instead. + +##### cb + +Type: `function` + +The callback function which will receive the response as an argument. + +The response can be either a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage) or a [responselike object](https://github.com/lukechilds/responselike). The response will also have a `fromCache` property set with a boolean value. + +##### .on('request', request) + +`request` event to get the request object of the request. + +**Note:** This event will only fire if an HTTP request is actually made, not when a response is retrieved from cache. However, you should always handle the `request` event to end the request and handle any potential request errors. + +##### .on('response', response) + +`response` event to get the response object from the HTTP request or cache. + +##### .on('error', error) + +`error` event emitted in case of an error with the cache. + +Errors emitted here will be an instance of `CacheableRequest.RequestError` or `CacheableRequest.CacheError`. You will only ever receive a `RequestError` if the request function throws (normally caused by invalid user input). Normal request errors should be handled inside the `request` event. + +To properly handle all error scenarios you should use the following pattern: + +```js +cacheableRequest('example.com', cb) + .on('error', err => { + if (err instanceof CacheableRequest.CacheError) { + handleCacheError(err); // Cache error + } else if (err instanceof CacheableRequest.RequestError) { + handleRequestError(err); // Request function thrown + } + }) + .on('request', req => { + req.on('error', handleRequestError); // Request error emitted + req.end(); + }); +``` + +**Note:** Database connection errors are emitted here, however `cacheable-request` will attempt to re-request the resource and bypass the cache on a connection error. Therefore a database connection error doesn't necessarily mean the request won't be fulfilled. + +## License + +MIT © Luke Childs diff --git a/node_modules/cacheable-request/node_modules/get-stream/buffer-stream.js b/node_modules/cacheable-request/node_modules/get-stream/buffer-stream.js new file mode 100644 index 0000000..2dd7574 --- /dev/null +++ b/node_modules/cacheable-request/node_modules/get-stream/buffer-stream.js @@ -0,0 +1,52 @@ +'use strict'; +const {PassThrough: PassThroughStream} = require('stream'); + +module.exports = options => { + options = {...options}; + + const {array} = options; + let {encoding} = options; + const isBuffer = encoding === 'buffer'; + let objectMode = false; + + if (array) { + objectMode = !(encoding || isBuffer); + } else { + encoding = encoding || 'utf8'; + } + + if (isBuffer) { + encoding = null; + } + + const stream = new PassThroughStream({objectMode}); + + if (encoding) { + stream.setEncoding(encoding); + } + + let length = 0; + const chunks = []; + + stream.on('data', chunk => { + chunks.push(chunk); + + if (objectMode) { + length = chunks.length; + } else { + length += chunk.length; + } + }); + + stream.getBufferedValue = () => { + if (array) { + return chunks; + } + + return isBuffer ? Buffer.concat(chunks, length) : chunks.join(''); + }; + + stream.getBufferedLength = () => length; + + return stream; +}; diff --git a/node_modules/cacheable-request/node_modules/get-stream/index.d.ts b/node_modules/cacheable-request/node_modules/get-stream/index.d.ts new file mode 100644 index 0000000..8961df3 --- /dev/null +++ b/node_modules/cacheable-request/node_modules/get-stream/index.d.ts @@ -0,0 +1,108 @@ +/// +import {Stream} from 'stream'; + +declare class MaxBufferErrorClass extends Error { + readonly name: 'MaxBufferError'; + constructor(); +} + +declare namespace getStream { + interface Options { + /** + Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected with a `MaxBufferError` error. + + @default Infinity + */ + readonly maxBuffer?: number; + } + + interface OptionsWithEncoding extends Options { + /** + [Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream. + + @default 'utf8' + */ + readonly encoding?: EncodingType; + } + + type MaxBufferError = MaxBufferErrorClass; +} + +declare const getStream: { + /** + Get the `stream` as a string. + + @returns A promise that resolves when the end event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode. + + @example + ``` + import * as fs from 'fs'; + import getStream = require('get-stream'); + + (async () => { + const stream = fs.createReadStream('unicorn.txt'); + + console.log(await getStream(stream)); + // ,,))))))));, + // __)))))))))))))), + // \|/ -\(((((''''((((((((. + // -*-==//////(('' . `)))))), + // /|\ ))| o ;-. '((((( ,(, + // ( `| / ) ;))))' ,_))^;(~ + // | | | ,))((((_ _____------~~~-. %,;(;(>';'~ + // o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ + // ; ''''```` `: `:::|\,__,%% );`'; ~ + // | _ ) / `:|`----' `-' + // ______/\/~ | / / + // /~;;.____/;;' / ___--,-( `;;;/ + // / // _;______;'------~~~~~ /;;/\ / + // // | | / ; \;;,\ + // (<_ | ; /',/-----' _> + // \_| ||_ //~;~~~~~~~~~ + // `\_| (,~~ + // \~\ + // ~~ + })(); + ``` + */ + (stream: Stream, options?: getStream.OptionsWithEncoding): Promise; + + /** + Get the `stream` as a buffer. + + It honors the `maxBuffer` option as above, but it refers to byte length rather than string length. + */ + buffer( + stream: Stream, + options?: getStream.OptionsWithEncoding + ): Promise; + + /** + Get the `stream` as an array of values. + + It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen: + + - When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes). + - When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array. + - When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array. + */ + array( + stream: Stream, + options?: getStream.Options + ): Promise; + array( + stream: Stream, + options: getStream.OptionsWithEncoding<'buffer'> + ): Promise; + array( + stream: Stream, + options: getStream.OptionsWithEncoding + ): Promise; + + MaxBufferError: typeof MaxBufferErrorClass; + + // TODO: Remove this for the next major release + default: typeof getStream; +}; + +export = getStream; diff --git a/node_modules/cacheable-request/node_modules/get-stream/index.js b/node_modules/cacheable-request/node_modules/get-stream/index.js new file mode 100644 index 0000000..340ea7d --- /dev/null +++ b/node_modules/cacheable-request/node_modules/get-stream/index.js @@ -0,0 +1,58 @@ +'use strict'; +const pump = require('pump'); +const bufferStream = require('./buffer-stream'); + +class MaxBufferError extends Error { + constructor() { + super('maxBuffer exceeded'); + this.name = 'MaxBufferError'; + } +} + +async function getStream(inputStream, options) { + if (!inputStream) { + return Promise.reject(new Error('Expected a stream')); + } + + options = { + maxBuffer: Infinity, + ...options + }; + + const {maxBuffer} = options; + + let stream; + await new Promise((resolve, reject) => { + const rejectPromise = error => { + if (error) { // A null check + error.bufferedData = stream.getBufferedValue(); + } + + reject(error); + }; + + stream = pump(inputStream, bufferStream(options), error => { + if (error) { + rejectPromise(error); + return; + } + + resolve(); + }); + + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + rejectPromise(new MaxBufferError()); + } + }); + }); + + return stream.getBufferedValue(); +} + +module.exports = getStream; +// TODO: Remove this for the next major release +module.exports.default = getStream; +module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'}); +module.exports.array = (stream, options) => getStream(stream, {...options, array: true}); +module.exports.MaxBufferError = MaxBufferError; diff --git a/node_modules/cacheable-request/node_modules/get-stream/license b/node_modules/cacheable-request/node_modules/get-stream/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/cacheable-request/node_modules/get-stream/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/cacheable-request/node_modules/get-stream/package.json b/node_modules/cacheable-request/node_modules/get-stream/package.json new file mode 100644 index 0000000..5c6bfca --- /dev/null +++ b/node_modules/cacheable-request/node_modules/get-stream/package.json @@ -0,0 +1,81 @@ +{ + "_from": "get-stream@^5.1.0", + "_id": "get-stream@5.1.0", + "_inBundle": false, + "_integrity": "sha512-EXr1FOzrzTfGeL0gQdeFEvOMm2mzMOglyiOXSTpPC+iAjAKftbr3jpCMWynogwYnM+eSj9sHGc6wjIcDvYiygw==", + "_location": "/cacheable-request/get-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "get-stream@^5.1.0", + "name": "get-stream", + "escapedName": "get-stream", + "rawSpec": "^5.1.0", + "saveSpec": null, + "fetchSpec": "^5.1.0" + }, + "_requiredBy": [ + "/cacheable-request" + ], + "_resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.1.0.tgz", + "_shasum": "01203cdc92597f9b909067c3e656cc1f4d3c4dc9", + "_spec": "get-stream@^5.1.0", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/cacheable-request", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/get-stream/issues" + }, + "bundleDependencies": false, + "dependencies": { + "pump": "^3.0.0" + }, + "deprecated": false, + "description": "Get a stream as a string, buffer, or array", + "devDependencies": { + "@types/node": "^11.13.0", + "ava": "^1.4.1", + "into-stream": "^5.0.0", + "tsd": "^0.7.2", + "xo": "^0.24.0" + }, + "engines": { + "node": ">=8" + }, + "files": [ + "index.js", + "index.d.ts", + "buffer-stream.js" + ], + "homepage": "https://github.com/sindresorhus/get-stream#readme", + "keywords": [ + "get", + "stream", + "promise", + "concat", + "string", + "text", + "buffer", + "read", + "data", + "consume", + "readable", + "readablestream", + "array", + "object" + ], + "license": "MIT", + "name": "get-stream", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/get-stream.git" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "version": "5.1.0" +} diff --git a/node_modules/cacheable-request/node_modules/get-stream/readme.md b/node_modules/cacheable-request/node_modules/get-stream/readme.md new file mode 100644 index 0000000..b87a4d3 --- /dev/null +++ b/node_modules/cacheable-request/node_modules/get-stream/readme.md @@ -0,0 +1,123 @@ +# get-stream [](https://travis-ci.org/sindresorhus/get-stream) + +> Get a stream as a string, buffer, or array + + +## Install + +``` +$ npm install get-stream +``` + + +## Usage + +```js +const fs = require('fs'); +const getStream = require('get-stream'); + +(async () => { + const stream = fs.createReadStream('unicorn.txt'); + + console.log(await getStream(stream)); + /* + ,,))))))));, + __)))))))))))))), + \|/ -\(((((''''((((((((. + -*-==//////(('' . `)))))), + /|\ ))| o ;-. '((((( ,(, + ( `| / ) ;))))' ,_))^;(~ + | | | ,))((((_ _____------~~~-. %,;(;(>';'~ + o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ + ; ''''```` `: `:::|\,__,%% );`'; ~ + | _ ) / `:|`----' `-' + ______/\/~ | / / + /~;;.____/;;' / ___--,-( `;;;/ + / // _;______;'------~~~~~ /;;/\ / + // | | / ; \;;,\ + (<_ | ; /',/-----' _> + \_| ||_ //~;~~~~~~~~~ + `\_| (,~~ + \~\ + ~~ + */ +})(); +``` + + +## API + +The methods returns a promise that resolves when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode. + +### getStream(stream, [options]) + +Get the `stream` as a string. + +#### options + +Type: `Object` + +##### encoding + +Type: `string` +Default: `utf8` + +[Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream. + +##### maxBuffer + +Type: `number` +Default: `Infinity` + +Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected with a `getStream.MaxBufferError` error. + +### getStream.buffer(stream, [options]) + +Get the `stream` as a buffer. + +It honors the `maxBuffer` option as above, but it refers to byte length rather than string length. + +### getStream.array(stream, [options]) + +Get the `stream` as an array of values. + +It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen: + +- When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes). + +- When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array. + +- When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array. + + +## Errors + +If the input stream emits an `error` event, the promise will be rejected with the error. The buffered data will be attached to the `bufferedData` property of the error. + +```js +(async () => { + try { + await getStream(streamThatErrorsAtTheEnd('unicorn')); + } catch (error) { + console.log(error.bufferedData); + //=> 'unicorn' + } +})() +``` + + +## FAQ + +### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)? + +This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, buffer, or array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package. + + +## Related + +- [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/cacheable-request/node_modules/lowercase-keys/index.d.ts b/node_modules/cacheable-request/node_modules/lowercase-keys/index.d.ts new file mode 100644 index 0000000..dc90a75 --- /dev/null +++ b/node_modules/cacheable-request/node_modules/lowercase-keys/index.d.ts @@ -0,0 +1,16 @@ +/** +Lowercase the keys of an object. + +@returns A new object with the keys lowercased. + +@example +``` +import lowercaseKeys = require('lowercase-keys'); + +lowercaseKeys({FOO: true, bAr: false}); +//=> {foo: true, bar: false} +``` +*/ +declare function lowercaseKeys(object: {[key: string]: T}): {[key: string]: T}; + +export = lowercaseKeys; diff --git a/node_modules/cacheable-request/node_modules/lowercase-keys/index.js b/node_modules/cacheable-request/node_modules/lowercase-keys/index.js new file mode 100644 index 0000000..357fb8f --- /dev/null +++ b/node_modules/cacheable-request/node_modules/lowercase-keys/index.js @@ -0,0 +1,10 @@ +'use strict'; +module.exports = object => { + const result = {}; + + for (const [key, value] of Object.entries(object)) { + result[key.toLowerCase()] = value; + } + + return result; +}; diff --git a/node_modules/cacheable-request/node_modules/lowercase-keys/license b/node_modules/cacheable-request/node_modules/lowercase-keys/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/cacheable-request/node_modules/lowercase-keys/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/cacheable-request/node_modules/lowercase-keys/package.json b/node_modules/cacheable-request/node_modules/lowercase-keys/package.json new file mode 100644 index 0000000..0f13964 --- /dev/null +++ b/node_modules/cacheable-request/node_modules/lowercase-keys/package.json @@ -0,0 +1,70 @@ +{ + "_from": "lowercase-keys@^2.0.0", + "_id": "lowercase-keys@2.0.0", + "_inBundle": false, + "_integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", + "_location": "/cacheable-request/lowercase-keys", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "lowercase-keys@^2.0.0", + "name": "lowercase-keys", + "escapedName": "lowercase-keys", + "rawSpec": "^2.0.0", + "saveSpec": null, + "fetchSpec": "^2.0.0" + }, + "_requiredBy": [ + "/cacheable-request" + ], + "_resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", + "_shasum": "2603e78b7b4b0006cbca2fbcc8a3202558ac9479", + "_spec": "lowercase-keys@^2.0.0", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/cacheable-request", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/lowercase-keys/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Lowercase the keys of an object", + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + }, + "engines": { + "node": ">=8" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "homepage": "https://github.com/sindresorhus/lowercase-keys#readme", + "keywords": [ + "object", + "assign", + "extend", + "properties", + "lowercase", + "lower-case", + "case", + "keys", + "key" + ], + "license": "MIT", + "name": "lowercase-keys", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/lowercase-keys.git" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "version": "2.0.0" +} diff --git a/node_modules/cacheable-request/node_modules/lowercase-keys/readme.md b/node_modules/cacheable-request/node_modules/lowercase-keys/readme.md new file mode 100644 index 0000000..b1ed061 --- /dev/null +++ b/node_modules/cacheable-request/node_modules/lowercase-keys/readme.md @@ -0,0 +1,32 @@ +# lowercase-keys [](https://travis-ci.org/sindresorhus/lowercase-keys) + +> Lowercase the keys of an object + + +## Install + +``` +$ npm install lowercase-keys +``` + + +## Usage + +```js +const lowercaseKeys = require('lowercase-keys'); + +lowercaseKeys({FOO: true, bAr: false}); +//=> {foo: true, bar: false} +``` + + +## API + +### lowercaseKeys(object) + +Returns a new object with the keys lowercased. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/cacheable-request/package.json b/node_modules/cacheable-request/package.json new file mode 100644 index 0000000..a1672f7 --- /dev/null +++ b/node_modules/cacheable-request/package.json @@ -0,0 +1,94 @@ +{ + "_from": "cacheable-request@^6.0.0", + "_id": "cacheable-request@6.1.0", + "_inBundle": false, + "_integrity": "sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==", + "_location": "/cacheable-request", + "_phantomChildren": { + "pump": "3.0.0" + }, + "_requested": { + "type": "range", + "registry": true, + "raw": "cacheable-request@^6.0.0", + "name": "cacheable-request", + "escapedName": "cacheable-request", + "rawSpec": "^6.0.0", + "saveSpec": null, + "fetchSpec": "^6.0.0" + }, + "_requiredBy": [ + "/got" + ], + "_resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz", + "_shasum": "20ffb8bd162ba4be11e9567d823db651052ca912", + "_spec": "cacheable-request@^6.0.0", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/got", + "author": { + "name": "Luke Childs", + "email": "lukechilds123@gmail.com", + "url": "http://lukechilds.co.uk" + }, + "bugs": { + "url": "https://github.com/lukechilds/cacheable-request/issues" + }, + "bundleDependencies": false, + "dependencies": { + "clone-response": "^1.0.2", + "get-stream": "^5.1.0", + "http-cache-semantics": "^4.0.0", + "keyv": "^3.0.0", + "lowercase-keys": "^2.0.0", + "normalize-url": "^4.1.0", + "responselike": "^1.0.2" + }, + "deprecated": false, + "description": "Wrap native HTTP requests with RFC compliant cache support", + "devDependencies": { + "@keyv/sqlite": "^2.0.0", + "ava": "^1.1.0", + "coveralls": "^3.0.0", + "create-test-server": "3.0.0", + "delay": "^4.0.0", + "eslint-config-xo-lukechilds": "^1.0.0", + "nyc": "^14.1.1", + "pify": "^4.0.0", + "sqlite3": "^4.0.2", + "this": "^1.0.2", + "xo": "^0.23.0" + }, + "engines": { + "node": ">=8" + }, + "files": [ + "src" + ], + "homepage": "https://github.com/lukechilds/cacheable-request#readme", + "keywords": [ + "HTTP", + "HTTPS", + "cache", + "caching", + "layer", + "cacheable", + "RFC 7234", + "RFC", + "7234", + "compliant" + ], + "license": "MIT", + "main": "src/index.js", + "name": "cacheable-request", + "repository": { + "type": "git", + "url": "git+https://github.com/lukechilds/cacheable-request.git" + }, + "scripts": { + "coverage": "nyc report --reporter=text-lcov | coveralls", + "test": "xo && nyc ava" + }, + "version": "6.1.0", + "xo": { + "extends": "xo-lukechilds" + } +} diff --git a/node_modules/cacheable-request/src/index.js b/node_modules/cacheable-request/src/index.js new file mode 100644 index 0000000..3fcea3f --- /dev/null +++ b/node_modules/cacheable-request/src/index.js @@ -0,0 +1,251 @@ +'use strict'; + +const EventEmitter = require('events'); +const urlLib = require('url'); +const normalizeUrl = require('normalize-url'); +const getStream = require('get-stream'); +const CachePolicy = require('http-cache-semantics'); +const Response = require('responselike'); +const lowercaseKeys = require('lowercase-keys'); +const cloneResponse = require('clone-response'); +const Keyv = require('keyv'); + +class CacheableRequest { + constructor(request, cacheAdapter) { + if (typeof request !== 'function') { + throw new TypeError('Parameter `request` must be a function'); + } + + this.cache = new Keyv({ + uri: typeof cacheAdapter === 'string' && cacheAdapter, + store: typeof cacheAdapter !== 'string' && cacheAdapter, + namespace: 'cacheable-request' + }); + + return this.createCacheableRequest(request); + } + + createCacheableRequest(request) { + return (opts, cb) => { + let url; + if (typeof opts === 'string') { + url = normalizeUrlObject(urlLib.parse(opts)); + opts = {}; + } else if (opts instanceof urlLib.URL) { + url = normalizeUrlObject(urlLib.parse(opts.toString())); + opts = {}; + } else { + const [pathname, ...searchParts] = (opts.path || '').split('?'); + const search = searchParts.length > 0 ? + `?${searchParts.join('?')}` : + ''; + url = normalizeUrlObject({ ...opts, pathname, search }); + } + + opts = { + headers: {}, + method: 'GET', + cache: true, + strictTtl: false, + automaticFailover: false, + ...opts, + ...urlObjectToRequestOptions(url) + }; + opts.headers = lowercaseKeys(opts.headers); + + const ee = new EventEmitter(); + const normalizedUrlString = normalizeUrl( + urlLib.format(url), + { + stripWWW: false, + removeTrailingSlash: false, + stripAuthentication: false + } + ); + const key = `${opts.method}:${normalizedUrlString}`; + let revalidate = false; + let madeRequest = false; + + const makeRequest = opts => { + madeRequest = true; + let requestErrored = false; + let requestErrorCallback; + + const requestErrorPromise = new Promise(resolve => { + requestErrorCallback = () => { + if (!requestErrored) { + requestErrored = true; + resolve(); + } + }; + }); + + const handler = response => { + if (revalidate && !opts.forceRefresh) { + response.status = response.statusCode; + const revalidatedPolicy = CachePolicy.fromObject(revalidate.cachePolicy).revalidatedPolicy(opts, response); + if (!revalidatedPolicy.modified) { + const headers = revalidatedPolicy.policy.responseHeaders(); + response = new Response(revalidate.statusCode, headers, revalidate.body, revalidate.url); + response.cachePolicy = revalidatedPolicy.policy; + response.fromCache = true; + } + } + + if (!response.fromCache) { + response.cachePolicy = new CachePolicy(opts, response, opts); + response.fromCache = false; + } + + let clonedResponse; + if (opts.cache && response.cachePolicy.storable()) { + clonedResponse = cloneResponse(response); + + (async () => { + try { + const bodyPromise = getStream.buffer(response); + + await Promise.race([ + requestErrorPromise, + new Promise(resolve => response.once('end', resolve)) + ]); + + if (requestErrored) { + return; + } + + const body = await bodyPromise; + + const value = { + cachePolicy: response.cachePolicy.toObject(), + url: response.url, + statusCode: response.fromCache ? revalidate.statusCode : response.statusCode, + body + }; + + let ttl = opts.strictTtl ? response.cachePolicy.timeToLive() : undefined; + if (opts.maxTtl) { + ttl = ttl ? Math.min(ttl, opts.maxTtl) : opts.maxTtl; + } + + await this.cache.set(key, value, ttl); + } catch (error) { + ee.emit('error', new CacheableRequest.CacheError(error)); + } + })(); + } else if (opts.cache && revalidate) { + (async () => { + try { + await this.cache.delete(key); + } catch (error) { + ee.emit('error', new CacheableRequest.CacheError(error)); + } + })(); + } + + ee.emit('response', clonedResponse || response); + if (typeof cb === 'function') { + cb(clonedResponse || response); + } + }; + + try { + const req = request(opts, handler); + req.once('error', requestErrorCallback); + req.once('abort', requestErrorCallback); + ee.emit('request', req); + } catch (error) { + ee.emit('error', new CacheableRequest.RequestError(error)); + } + }; + + (async () => { + const get = async opts => { + await Promise.resolve(); + + const cacheEntry = opts.cache ? await this.cache.get(key) : undefined; + if (typeof cacheEntry === 'undefined') { + return makeRequest(opts); + } + + const policy = CachePolicy.fromObject(cacheEntry.cachePolicy); + if (policy.satisfiesWithoutRevalidation(opts) && !opts.forceRefresh) { + const headers = policy.responseHeaders(); + const response = new Response(cacheEntry.statusCode, headers, cacheEntry.body, cacheEntry.url); + response.cachePolicy = policy; + response.fromCache = true; + + ee.emit('response', response); + if (typeof cb === 'function') { + cb(response); + } + } else { + revalidate = cacheEntry; + opts.headers = policy.revalidationHeaders(opts); + makeRequest(opts); + } + }; + + const errorHandler = error => ee.emit('error', new CacheableRequest.CacheError(error)); + this.cache.once('error', errorHandler); + ee.on('response', () => this.cache.removeListener('error', errorHandler)); + + try { + await get(opts); + } catch (error) { + if (opts.automaticFailover && !madeRequest) { + makeRequest(opts); + } + + ee.emit('error', new CacheableRequest.CacheError(error)); + } + })(); + + return ee; + }; + } +} + +function urlObjectToRequestOptions(url) { + const options = { ...url }; + options.path = `${url.pathname || '/'}${url.search || ''}`; + delete options.pathname; + delete options.search; + return options; +} + +function normalizeUrlObject(url) { + // If url was parsed by url.parse or new URL: + // - hostname will be set + // - host will be hostname[:port] + // - port will be set if it was explicit in the parsed string + // Otherwise, url was from request options: + // - hostname or host may be set + // - host shall not have port encoded + return { + protocol: url.protocol, + auth: url.auth, + hostname: url.hostname || url.host || 'localhost', + port: url.port, + pathname: url.pathname, + search: url.search + }; +} + +CacheableRequest.RequestError = class extends Error { + constructor(error) { + super(error.message); + this.name = 'RequestError'; + Object.assign(this, error); + } +}; + +CacheableRequest.CacheError = class extends Error { + constructor(error) { + super(error.message); + this.name = 'CacheError'; + Object.assign(this, error); + } +}; + +module.exports = CacheableRequest; diff --git a/node_modules/camelcase/index.d.ts b/node_modules/camelcase/index.d.ts new file mode 100644 index 0000000..58f2069 --- /dev/null +++ b/node_modules/camelcase/index.d.ts @@ -0,0 +1,63 @@ +declare namespace camelcase { + interface Options { + /** + Uppercase the first character: `foo-bar` → `FooBar`. + + @default false + */ + readonly pascalCase?: boolean; + } +} + +declare const camelcase: { + /** + Convert a dash/dot/underscore/space separated string to camelCase or PascalCase: `foo-bar` → `fooBar`. + + @param input - String to convert to camel case. + + @example + ``` + import camelCase = require('camelcase'); + + camelCase('foo-bar'); + //=> 'fooBar' + + camelCase('foo_bar'); + //=> 'fooBar' + + camelCase('Foo-Bar'); + //=> 'fooBar' + + camelCase('Foo-Bar', {pascalCase: true}); + //=> 'FooBar' + + camelCase('--foo.bar', {pascalCase: false}); + //=> 'fooBar' + + camelCase('foo bar'); + //=> 'fooBar' + + console.log(process.argv[3]); + //=> '--foo-bar' + camelCase(process.argv[3]); + //=> 'fooBar' + + camelCase(['foo', 'bar']); + //=> 'fooBar' + + camelCase(['__foo__', '--bar'], {pascalCase: true}); + //=> 'FooBar' + ``` + */ + (input: string | ReadonlyArray, options?: camelcase.Options): string; + + // TODO: Remove this for the next major release, refactor the whole definition to: + // declare function camelcase( + // input: string | ReadonlyArray, + // options?: camelcase.Options + // ): string; + // export = camelcase; + default: typeof camelcase; +}; + +export = camelcase; diff --git a/node_modules/camelcase/index.js b/node_modules/camelcase/index.js new file mode 100644 index 0000000..579f99b --- /dev/null +++ b/node_modules/camelcase/index.js @@ -0,0 +1,76 @@ +'use strict'; + +const preserveCamelCase = string => { + let isLastCharLower = false; + let isLastCharUpper = false; + let isLastLastCharUpper = false; + + for (let i = 0; i < string.length; i++) { + const character = string[i]; + + if (isLastCharLower && /[a-zA-Z]/.test(character) && character.toUpperCase() === character) { + string = string.slice(0, i) + '-' + string.slice(i); + isLastCharLower = false; + isLastLastCharUpper = isLastCharUpper; + isLastCharUpper = true; + i++; + } else if (isLastCharUpper && isLastLastCharUpper && /[a-zA-Z]/.test(character) && character.toLowerCase() === character) { + string = string.slice(0, i - 1) + '-' + string.slice(i - 1); + isLastLastCharUpper = isLastCharUpper; + isLastCharUpper = false; + isLastCharLower = true; + } else { + isLastCharLower = character.toLowerCase() === character && character.toUpperCase() !== character; + isLastLastCharUpper = isLastCharUpper; + isLastCharUpper = character.toUpperCase() === character && character.toLowerCase() !== character; + } + } + + return string; +}; + +const camelCase = (input, options) => { + if (!(typeof input === 'string' || Array.isArray(input))) { + throw new TypeError('Expected the input to be `string | string[]`'); + } + + options = Object.assign({ + pascalCase: false + }, options); + + const postProcess = x => options.pascalCase ? x.charAt(0).toUpperCase() + x.slice(1) : x; + + if (Array.isArray(input)) { + input = input.map(x => x.trim()) + .filter(x => x.length) + .join('-'); + } else { + input = input.trim(); + } + + if (input.length === 0) { + return ''; + } + + if (input.length === 1) { + return options.pascalCase ? input.toUpperCase() : input.toLowerCase(); + } + + const hasUpperCase = input !== input.toLowerCase(); + + if (hasUpperCase) { + input = preserveCamelCase(input); + } + + input = input + .replace(/^[_.\- ]+/, '') + .toLowerCase() + .replace(/[_.\- ]+(\w|$)/g, (_, p1) => p1.toUpperCase()) + .replace(/\d+(\w|$)/g, m => m.toUpperCase()); + + return postProcess(input); +}; + +module.exports = camelCase; +// TODO: Remove this for the next major release +module.exports.default = camelCase; diff --git a/node_modules/camelcase/license b/node_modules/camelcase/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/camelcase/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/camelcase/package.json b/node_modules/camelcase/package.json new file mode 100644 index 0000000..23edbf9 --- /dev/null +++ b/node_modules/camelcase/package.json @@ -0,0 +1,76 @@ +{ + "_from": "camelcase@^5.3.1", + "_id": "camelcase@5.3.1", + "_inBundle": false, + "_integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "_location": "/camelcase", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "camelcase@^5.3.1", + "name": "camelcase", + "escapedName": "camelcase", + "rawSpec": "^5.3.1", + "saveSpec": null, + "fetchSpec": "^5.3.1" + }, + "_requiredBy": [ + "/boxen", + "/yargs-parser" + ], + "_resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "_shasum": "e3c9b31569e106811df242f715725a1f4c494320", + "_spec": "camelcase@^5.3.1", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/boxen", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/camelcase/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Convert a dash/dot/underscore/space separated string to camelCase or PascalCase: `foo-bar` → `fooBar`", + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.1", + "xo": "^0.24.0" + }, + "engines": { + "node": ">=6" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "homepage": "https://github.com/sindresorhus/camelcase#readme", + "keywords": [ + "camelcase", + "camel-case", + "camel", + "case", + "dash", + "hyphen", + "dot", + "underscore", + "separator", + "string", + "text", + "convert", + "pascalcase", + "pascal-case" + ], + "license": "MIT", + "name": "camelcase", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/camelcase.git" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "version": "5.3.1" +} diff --git a/node_modules/camelcase/readme.md b/node_modules/camelcase/readme.md new file mode 100644 index 0000000..fde2726 --- /dev/null +++ b/node_modules/camelcase/readme.md @@ -0,0 +1,99 @@ +# camelcase [](https://travis-ci.org/sindresorhus/camelcase) + +> Convert a dash/dot/underscore/space separated string to camelCase or PascalCase: `foo-bar` → `fooBar` + +--- + + + + Get professional support for 'camelcase' with a Tidelift subscription + + + + Tidelift helps make open source sustainable for maintainers while giving companiesassurances about security, maintenance, and licensing for their dependencies. + + + +--- + +## Install + +``` +$ npm install camelcase +``` + + +## Usage + +```js +const camelCase = require('camelcase'); + +camelCase('foo-bar'); +//=> 'fooBar' + +camelCase('foo_bar'); +//=> 'fooBar' + +camelCase('Foo-Bar'); +//=> 'fooBar' + +camelCase('Foo-Bar', {pascalCase: true}); +//=> 'FooBar' + +camelCase('--foo.bar', {pascalCase: false}); +//=> 'fooBar' + +camelCase('foo bar'); +//=> 'fooBar' + +console.log(process.argv[3]); +//=> '--foo-bar' +camelCase(process.argv[3]); +//=> 'fooBar' + +camelCase(['foo', 'bar']); +//=> 'fooBar' + +camelCase(['__foo__', '--bar'], {pascalCase: true}); +//=> 'FooBar' +``` + + +## API + +### camelCase(input, [options]) + +#### input + +Type: `string` `string[]` + +String to convert to camel case. + +#### options + +Type: `Object` + +##### pascalCase + +Type: `boolean` +Default: `false` + +Uppercase the first character: `foo-bar` → `FooBar` + + +## Security + +To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security). Tidelift will coordinate the fix and disclosure. + + +## Related + +- [decamelize](https://github.com/sindresorhus/decamelize) - The inverse of this module +- [uppercamelcase](https://github.com/SamVerschueren/uppercamelcase) - Like this module, but to PascalCase instead of camelCase +- [titleize](https://github.com/sindresorhus/titleize) - Capitalize every word in string +- [humanize-string](https://github.com/sindresorhus/humanize-string) - Convert a camelized/dasherized/underscored string into a humanized one + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/caseless/LICENSE b/node_modules/caseless/LICENSE new file mode 100644 index 0000000..61789f4 --- /dev/null +++ b/node_modules/caseless/LICENSE @@ -0,0 +1,28 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION +1. Definitions. +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: +You must give any other recipients of the Work or Derivative Works a copy of this License; and +You must cause any modified files to carry prominent notices stating that You changed the files; and +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. +END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/node_modules/caseless/README.md b/node_modules/caseless/README.md new file mode 100644 index 0000000..e5077a2 --- /dev/null +++ b/node_modules/caseless/README.md @@ -0,0 +1,45 @@ +## Caseless -- wrap an object to set and get property with caseless semantics but also preserve caseing. + +This library is incredibly useful when working with HTTP headers. It allows you to get/set/check for headers in a caseless manner while also preserving the caseing of headers the first time they are set. + +## Usage + +```javascript +var headers = {} + , c = caseless(headers) + ; +c.set('a-Header', 'asdf') +c.get('a-header') === 'asdf' +``` + +## has(key) + +Has takes a name and if it finds a matching header will return that header name with the preserved caseing it was set with. + +```javascript +c.has('a-header') === 'a-Header' +``` + +## set(key, value[, clobber=true]) + +Set is fairly straight forward except that if the header exists and clobber is disabled it will add `','+value` to the existing header. + +```javascript +c.set('a-Header', 'fdas') +c.set('a-HEADER', 'more', false) +c.get('a-header') === 'fdsa,more' +``` + +## swap(key) + +Swaps the casing of a header with the new one that is passed in. + +```javascript +var headers = {} + , c = caseless(headers) + ; +c.set('a-Header', 'fdas') +c.swap('a-HEADER') +c.has('a-header') === 'a-HEADER' +headers === {'a-HEADER': 'fdas'} +``` diff --git a/node_modules/caseless/index.js b/node_modules/caseless/index.js new file mode 100644 index 0000000..b194734 --- /dev/null +++ b/node_modules/caseless/index.js @@ -0,0 +1,67 @@ +function Caseless (dict) { + this.dict = dict || {} +} +Caseless.prototype.set = function (name, value, clobber) { + if (typeof name === 'object') { + for (var i in name) { + this.set(i, name[i], value) + } + } else { + if (typeof clobber === 'undefined') clobber = true + var has = this.has(name) + + if (!clobber && has) this.dict[has] = this.dict[has] + ',' + value + else this.dict[has || name] = value + return has + } +} +Caseless.prototype.has = function (name) { + var keys = Object.keys(this.dict) + , name = name.toLowerCase() + ; + for (var i=0;i 0; +} + +function Chalk(options) { + // We check for this.template here since calling `chalk.constructor()` + // by itself will have a `this` of a previously constructed chalk object + if (!this || !(this instanceof Chalk) || this.template) { + const chalk = {}; + applyOptions(chalk, options); + + chalk.template = function () { + const args = [].slice.call(arguments); + return chalkTag.apply(null, [chalk.template].concat(args)); + }; + + Object.setPrototypeOf(chalk, Chalk.prototype); + Object.setPrototypeOf(chalk.template, chalk); + + chalk.template.constructor = Chalk; + + return chalk.template; + } + + applyOptions(this, options); +} + +// Use bright blue on Windows as the normal blue color is illegible +if (isSimpleWindowsTerm) { + ansiStyles.blue.open = '\u001B[94m'; +} + +for (const key of Object.keys(ansiStyles)) { + ansiStyles[key].closeRe = new RegExp(escapeStringRegexp(ansiStyles[key].close), 'g'); + + styles[key] = { + get() { + const codes = ansiStyles[key]; + return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, key); + } + }; +} + +styles.visible = { + get() { + return build.call(this, this._styles || [], true, 'visible'); + } +}; + +ansiStyles.color.closeRe = new RegExp(escapeStringRegexp(ansiStyles.color.close), 'g'); +for (const model of Object.keys(ansiStyles.color.ansi)) { + if (skipModels.has(model)) { + continue; + } + + styles[model] = { + get() { + const level = this.level; + return function () { + const open = ansiStyles.color[levelMapping[level]][model].apply(null, arguments); + const codes = { + open, + close: ansiStyles.color.close, + closeRe: ansiStyles.color.closeRe + }; + return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, model); + }; + } + }; +} + +ansiStyles.bgColor.closeRe = new RegExp(escapeStringRegexp(ansiStyles.bgColor.close), 'g'); +for (const model of Object.keys(ansiStyles.bgColor.ansi)) { + if (skipModels.has(model)) { + continue; + } + + const bgModel = 'bg' + model[0].toUpperCase() + model.slice(1); + styles[bgModel] = { + get() { + const level = this.level; + return function () { + const open = ansiStyles.bgColor[levelMapping[level]][model].apply(null, arguments); + const codes = { + open, + close: ansiStyles.bgColor.close, + closeRe: ansiStyles.bgColor.closeRe + }; + return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, model); + }; + } + }; +} + +const proto = Object.defineProperties(() => {}, styles); + +function build(_styles, _empty, key) { + const builder = function () { + return applyStyle.apply(builder, arguments); + }; + + builder._styles = _styles; + builder._empty = _empty; + + const self = this; + + Object.defineProperty(builder, 'level', { + enumerable: true, + get() { + return self.level; + }, + set(level) { + self.level = level; + } + }); + + Object.defineProperty(builder, 'enabled', { + enumerable: true, + get() { + return self.enabled; + }, + set(enabled) { + self.enabled = enabled; + } + }); + + // See below for fix regarding invisible grey/dim combination on Windows + builder.hasGrey = this.hasGrey || key === 'gray' || key === 'grey'; + + // `__proto__` is used because we must return a function, but there is + // no way to create a function with a different prototype + builder.__proto__ = proto; // eslint-disable-line no-proto + + return builder; +} + +function applyStyle() { + // Support varags, but simply cast to string in case there's only one arg + const args = arguments; + const argsLen = args.length; + let str = String(arguments[0]); + + if (argsLen === 0) { + return ''; + } + + if (argsLen > 1) { + // Don't slice `arguments`, it prevents V8 optimizations + for (let a = 1; a < argsLen; a++) { + str += ' ' + args[a]; + } + } + + if (!this.enabled || this.level <= 0 || !str) { + return this._empty ? '' : str; + } + + // Turns out that on Windows dimmed gray text becomes invisible in cmd.exe, + // see https://github.com/chalk/chalk/issues/58 + // If we're on Windows and we're dealing with a gray color, temporarily make 'dim' a noop. + const originalDim = ansiStyles.dim.open; + if (isSimpleWindowsTerm && this.hasGrey) { + ansiStyles.dim.open = ''; + } + + for (const code of this._styles.slice().reverse()) { + // Replace any instances already present with a re-opening code + // otherwise only the part of the string until said closing code + // will be colored, and the rest will simply be 'plain'. + str = code.open + str.replace(code.closeRe, code.open) + code.close; + + // Close the styling before a linebreak and reopen + // after next line to fix a bleed issue on macOS + // https://github.com/chalk/chalk/pull/92 + str = str.replace(/\r?\n/g, `${code.close}$&${code.open}`); + } + + // Reset the original `dim` if we changed it to work around the Windows dimmed gray issue + ansiStyles.dim.open = originalDim; + + return str; +} + +function chalkTag(chalk, strings) { + if (!Array.isArray(strings)) { + // If chalk() was called by itself or with a string, + // return the string itself as a string. + return [].slice.call(arguments, 1).join(' '); + } + + const args = [].slice.call(arguments, 2); + const parts = [strings.raw[0]]; + + for (let i = 1; i < strings.length; i++) { + parts.push(String(args[i - 1]).replace(/[{}\\]/g, '\\$&')); + parts.push(String(strings.raw[i])); + } + + return template(chalk, parts.join('')); +} + +Object.defineProperties(Chalk.prototype, styles); + +module.exports = Chalk(); // eslint-disable-line new-cap +module.exports.supportsColor = stdoutColor; +module.exports.default = module.exports; // For TypeScript diff --git a/node_modules/chalk/index.js.flow b/node_modules/chalk/index.js.flow new file mode 100644 index 0000000..622caaa --- /dev/null +++ b/node_modules/chalk/index.js.flow @@ -0,0 +1,93 @@ +// @flow strict + +type TemplateStringsArray = $ReadOnlyArray; + +export type Level = $Values<{ + None: 0, + Basic: 1, + Ansi256: 2, + TrueColor: 3 +}>; + +export type ChalkOptions = {| + enabled?: boolean, + level?: Level +|}; + +export type ColorSupport = {| + level: Level, + hasBasic: boolean, + has256: boolean, + has16m: boolean +|}; + +export interface Chalk { + (...text: string[]): string, + (text: TemplateStringsArray, ...placeholders: string[]): string, + constructor(options?: ChalkOptions): Chalk, + enabled: boolean, + level: Level, + rgb(r: number, g: number, b: number): Chalk, + hsl(h: number, s: number, l: number): Chalk, + hsv(h: number, s: number, v: number): Chalk, + hwb(h: number, w: number, b: number): Chalk, + bgHex(color: string): Chalk, + bgKeyword(color: string): Chalk, + bgRgb(r: number, g: number, b: number): Chalk, + bgHsl(h: number, s: number, l: number): Chalk, + bgHsv(h: number, s: number, v: number): Chalk, + bgHwb(h: number, w: number, b: number): Chalk, + hex(color: string): Chalk, + keyword(color: string): Chalk, + + +reset: Chalk, + +bold: Chalk, + +dim: Chalk, + +italic: Chalk, + +underline: Chalk, + +inverse: Chalk, + +hidden: Chalk, + +strikethrough: Chalk, + + +visible: Chalk, + + +black: Chalk, + +red: Chalk, + +green: Chalk, + +yellow: Chalk, + +blue: Chalk, + +magenta: Chalk, + +cyan: Chalk, + +white: Chalk, + +gray: Chalk, + +grey: Chalk, + +blackBright: Chalk, + +redBright: Chalk, + +greenBright: Chalk, + +yellowBright: Chalk, + +blueBright: Chalk, + +magentaBright: Chalk, + +cyanBright: Chalk, + +whiteBright: Chalk, + + +bgBlack: Chalk, + +bgRed: Chalk, + +bgGreen: Chalk, + +bgYellow: Chalk, + +bgBlue: Chalk, + +bgMagenta: Chalk, + +bgCyan: Chalk, + +bgWhite: Chalk, + +bgBlackBright: Chalk, + +bgRedBright: Chalk, + +bgGreenBright: Chalk, + +bgYellowBright: Chalk, + +bgBlueBright: Chalk, + +bgMagentaBright: Chalk, + +bgCyanBright: Chalk, + +bgWhiteBrigh: Chalk, + + supportsColor: ColorSupport +}; + +declare module.exports: Chalk; diff --git a/node_modules/chalk/license b/node_modules/chalk/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/chalk/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/chalk/package.json b/node_modules/chalk/package.json new file mode 100644 index 0000000..a04eed1 --- /dev/null +++ b/node_modules/chalk/package.json @@ -0,0 +1,105 @@ +{ + "_from": "chalk@^2.4.2", + "_id": "chalk@2.4.2", + "_inBundle": false, + "_integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "_location": "/chalk", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "chalk@^2.4.2", + "name": "chalk", + "escapedName": "chalk", + "rawSpec": "^2.4.2", + "saveSpec": null, + "fetchSpec": "^2.4.2" + }, + "_requiredBy": [ + "/boxen", + "/json-server", + "/update-notifier" + ], + "_resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "_shasum": "cd42541677a54333cf541a49108c1432b44c9424", + "_spec": "chalk@^2.4.2", + "_where": "/Users/nate/code/pluralsight/async-programming-promises/node_modules/json-server", + "bugs": { + "url": "https://github.com/chalk/chalk/issues" + }, + "bundleDependencies": false, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "deprecated": false, + "description": "Terminal string styling done right", + "devDependencies": { + "ava": "*", + "coveralls": "^3.0.0", + "execa": "^0.9.0", + "flow-bin": "^0.68.0", + "import-fresh": "^2.0.0", + "matcha": "^0.7.0", + "nyc": "^11.0.2", + "resolve-from": "^4.0.0", + "typescript": "^2.5.3", + "xo": "*" + }, + "engines": { + "node": ">=4" + }, + "files": [ + "index.js", + "templates.js", + "types/index.d.ts", + "index.js.flow" + ], + "homepage": "https://github.com/chalk/chalk#readme", + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "str", + "ansi", + "style", + "styles", + "tty", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "license": "MIT", + "name": "chalk", + "repository": { + "type": "git", + "url": "git+https://github.com/chalk/chalk.git" + }, + "scripts": { + "bench": "matcha benchmark.js", + "coveralls": "nyc report --reporter=text-lcov | coveralls", + "test": "xo && tsc --project types && flow --max-warnings=0 && nyc ava" + }, + "types": "types/index.d.ts", + "version": "2.4.2", + "xo": { + "envs": [ + "node", + "mocha" + ], + "ignores": [ + "test/_flow.js" + ] + } +} diff --git a/node_modules/chalk/readme.md b/node_modules/chalk/readme.md new file mode 100644 index 0000000..d298e2c --- /dev/null +++ b/node_modules/chalk/readme.md @@ -0,0 +1,314 @@ + + + + + + + + + +> Terminal string styling done right + +[](https://travis-ci.org/chalk/chalk) [](https://coveralls.io/github/chalk/chalk?branch=master) [](https://www.youtube.com/watch?v=9auOCbH5Ns4) [](https://github.com/xojs/xo) [](https://github.com/sindresorhus/awesome-nodejs) + +### [See what's new in Chalk 2](https://github.com/chalk/chalk/releases/tag/v2.0.0) + + + + +## Highlights + +- Expressive API +- Highly performant +- Ability to nest styles +- [256/Truecolor color support](#256-and-truecolor-color-support) +- Auto-detects color support +- Doesn't extend `String.prototype` +- Clean and focused +- Actively maintained +- [Used by ~23,000 packages](https://www.npmjs.com/browse/depended/chalk) as of December 31, 2017 + + +## Install + +```console +$ npm install chalk +``` + + + + + + +## Usage + +```js +const chalk = require('chalk'); + +console.log(chalk.blue('Hello world!')); +``` + +Chalk comes with an easy to use composable API where you just chain and nest the styles you want. + +```js +const chalk = require('chalk'); +const log = console.log; + +// Combine styled and normal strings +log(chalk.blue('Hello') + ' World' + chalk.red('!')); + +// Compose multiple styles using the chainable API +log(chalk.blue.bgRed.bold('Hello world!')); + +// Pass in multiple arguments +log(chalk.blue('Hello', 'World!', 'Foo', 'bar', 'biz', 'baz')); + +// Nest styles +log(chalk.red('Hello', chalk.underline.bgBlue('world') + '!')); + +// Nest styles of the same type even (color, underline, background) +log(chalk.green( + 'I am a green line ' + + chalk.blue.underline.bold('with a blue substring') + + ' that becomes green again!' +)); + +// ES2015 template literal +log(` +CPU: ${chalk.red('90%')} +RAM: ${chalk.green('40%')} +DISK: ${chalk.yellow('70%')} +`); + +// ES2015 tagged template literal +log(chalk` +CPU: {red ${cpu.totalPercent}%} +RAM: {green ${ram.used / ram.total * 100}%} +DISK: {rgb(255,131,0) ${disk.used / disk.total * 100}%} +`); + +// Use RGB colors in terminal emulators that support it. +log(chalk.keyword('orange')('Yay for orange colored text!')); +log(chalk.rgb(123, 45, 67).underline('Underlined reddish color')); +log(chalk.hex('#DEADED').bold('Bold gray!')); +``` + +Easily define your own themes: + +```js +const chalk = require('chalk'); + +const error = chalk.bold.red; +const warning = chalk.keyword('orange'); + +console.log(error('Error!')); +console.log(warning('Warning!')); +``` + +Take advantage of console.log [string substitution](https://nodejs.org/docs/latest/api/console.html#console_console_log_data_args): + +```js +const name = 'Sindre'; +console.log(chalk.green('Hello %s'), name); +//=> 'Hello Sindre' +``` + + +## API + +### chalk.` + + + + {title} + {statusCode} {error} + {stack} + + +