1
0
mirror of https://github.com/thangisme/notes.git synced 2024-11-06 20:47:27 -05:00

Merge pull request #20 from pmarsceill/v0.1.6

v0.1.6 release
This commit is contained in:
Patrick Marsceill 2018-11-16 12:01:17 -05:00 committed by GitHub
commit fa40528acd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4135 changed files with 2859 additions and 399324 deletions

1
.gitignore vendored
View File

@ -3,3 +3,4 @@
.sass-cache .sass-cache
_site _site
Gemfile.lock Gemfile.lock
node_modules

View File

@ -1,6 +1,7 @@
{ {
"ignoreFiles" : [ "ignoreFiles" : [
"assets/css/just-the-docs.scss" "assets/css/just-the-docs.scss",
"_sass/vendor/**/*.scss"
], ],
"extends": [ "extends": [
"stylelint-config-primer" "stylelint-config-primer"

View File

@ -18,14 +18,12 @@ description: A Jekyll theme for documentation
baseurl: "/just-the-docs/" # the subpath of your site, e.g. /blog baseurl: "/just-the-docs/" # the subpath of your site, e.g. /blog
# url: "" # the base hostname & protocol for your site, e.g. http://example.com # url: "" # the base hostname & protocol for your site, e.g. http://example.com
sass:
# Load dependancies
load_paths:
- node_modules/
permalink: pretty permalink: pretty
exclude: ["node_modules/", "*.gemspec", "*.gem", "Gemfile", "Gemfile.lock", "package.json", "script/", "LICENSE.txt", "lib/", "bin/", "README.md", "Rakefile"] exclude: ["node_modules/", "*.gemspec", "*.gem", "Gemfile", "Gemfile.lock", "package.json", "script/", "LICENSE.txt", "lib/", "bin/", "README.md", "Rakefile"]
# Enable or disable the site search
search_enabled: true
# Aux links for the upper right navigation # Aux links for the upper right navigation
aux_links: aux_links:
"Just the Docs on GitHub": "Just the Docs on GitHub":

View File

@ -4,7 +4,9 @@
<title>{{ page.title }} - {{ site.title }}</title> <title>{{ page.title }} - {{ site.title }}</title>
<link rel="stylesheet" href="{{ "/assets/css/just-the-docs.css" | absolute_url }}"> <link rel="stylesheet" href="{{ "/assets/css/just-the-docs.css" | absolute_url }}">
{% if site.search_enabled != nil %}
<script type="text/javascript" src="{{ "/assets/js/vendor/lunr.min.js" | absolute_url }}"></script> <script type="text/javascript" src="{{ "/assets/js/vendor/lunr.min.js" | absolute_url }}"></script>
{% endif %}
<script type="text/javascript" src="{{ "/assets/js/just-the-docs.js" | absolute_url }}"></script> <script type="text/javascript" src="{{ "/assets/js/just-the-docs.js" | absolute_url }}"></script>
<meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="viewport" content="width=device-width, initial-scale=1">

View File

@ -16,7 +16,8 @@
</div> </div>
<div class="main-content-wrap"> <div class="main-content-wrap">
<div class="page-header"> <div class="page-header">
<div class="main-content pb-0"> <div class="main-content">
{% if site.search_enabled != nil %}
<div class="search js-search"> <div class="search js-search">
<div class="search-input-wrap"> <div class="search-input-wrap">
<input type="text" class="js-search-input search-input" placeholder="Search {{ site.title }}" aria-label="Search {{ site.title }}" autocomplete="off"> <input type="text" class="js-search-input search-input" placeholder="Search {{ site.title }}" aria-label="Search {{ site.title }}" autocomplete="off">
@ -24,8 +25,9 @@
</div> </div>
<div class="js-search-results search-results-wrap"></div> <div class="js-search-results search-results-wrap"></div>
</div> </div>
{% endif %}
{% if site.aux_links != nil %} {% if site.aux_links != nil %}
<ul class="list-style-none text-small mt-md-2 pb-4 pb-md-0 js-aux-nav aux-nav"> <ul class="list-style-none text-small mt-md-1 mb-md-1 pb-4 pb-md-0 js-aux-nav aux-nav">
{% for link in site.aux_links %} {% for link in site.aux_links %}
<li class="d-inline-block my-0{% unless forloop.last %} mr-2{% endunless %}"><a href="{{ link.last }}">{{ link.first }}</a></li> <li class="d-inline-block my-0{% unless forloop.last %} mr-2{% endunless %}"><a href="{{ link.last }}">{{ link.first }}</a></li>
{% endfor %} {% endfor %}

View File

@ -1,7 +1,7 @@
// //
// Base element style overrides // Base element style overrides
// //
// stylelint-disable selector-no-type // stylelint-disable selector-no-type, selector-max-type
* { * {
box-sizing: border-box; box-sizing: border-box;

View File

@ -1,7 +1,7 @@
// //
// Code and syntax highlighting // Code and syntax highlighting
// //
// stylelint-disable selector-no-qualifying-type, declaration-block-semicolon-newline-after,declaration-block-single-line-max-declarations, selector-no-type // stylelint-disable selector-no-qualifying-type, declaration-block-semicolon-newline-after,declaration-block-single-line-max-declarations, selector-no-type, selector-max-type
code { code {
padding: 0.2em 0.15em; padding: 0.2em 0.15em;

View File

@ -1,7 +1,7 @@
// //
// Styles for rendered markdown in the .main-content container // Styles for rendered markdown in the .main-content container
// //
// stylelint-disable selector-no-type, max-nesting-depth, selector-max-compound-selectors // stylelint-disable selector-no-type, max-nesting-depth, selector-max-compound-selectors, selector-max-type
.page-content { .page-content {
ul, ul,
@ -56,6 +56,23 @@
} }
} }
.task-list {
padding-left: 0;
}
.task-list-item {
display: flex;
align-items: center;
&::before {
content: "";
}
}
.task-list-item-checkbox {
margin-right: 0.6em;
}
hr + * { hr + * {
margin-top: 0; margin-top: 0;
} }

View File

@ -79,8 +79,6 @@
@include mq(md) { @include mq(md) {
background-color: $white; background-color: $white;
border-bottom: $border $border-color;
} }
.main-content { .main-content {
@ -88,8 +86,11 @@
@include mq(md) { @include mq(md) {
display: flex; display: flex;
justify-content: space-between; justify-content: flex-end;
height: 60px;
padding-top: $sp-4; padding-top: $sp-4;
padding-bottom: $sp-4;
border-bottom: $border $border-color;
} }
} }
} }

View File

@ -14,7 +14,7 @@
z-index: 101; z-index: 101;
height: 60px; height: 60px;
padding-top: $sp-4; padding-top: $sp-4;
box-shadow: inset 0 -1px 0 $border-color; border-bottom: $border $border-color;
} }
} }
@ -38,6 +38,8 @@
} }
.navigation-list-item { .navigation-list-item {
position: relative;
&::before { &::before {
position: absolute; position: absolute;
margin-top: 0.3em; margin-top: 0.3em;

View File

@ -17,7 +17,7 @@
display: block; display: block;
padding-top: $sp-1; padding-top: $sp-1;
padding-right: 0; padding-right: 0;
padding-bottom: $sp-4; padding-bottom: 0;
padding-left: 0; padding-left: 0;
margin-bottom: 0; margin-bottom: 0;
background-color: transparent; background-color: transparent;

View File

@ -1,7 +1,7 @@
// //
// Tables // Tables
// //
// stylelint-disable max-nesting-depth, selector-no-type // stylelint-disable max-nesting-depth, selector-no-type, selector-max-type
table { table {
display: block; display: block;

View File

@ -1,7 +1,7 @@
// //
// Typography // Typography
// //
// stylelint-disable primer/selector-no-utility, selector-no-type // stylelint-disable primer/selector-no-utility, selector-no-type, selector-max-type
h1, h1,
.text-alpha { .text-alpha {

View File

@ -7,7 +7,7 @@
// Import dependancies // Import dependancies
// //
@import "./normalize.scss/normalize.scss"; @import "./vendor/normalize.scss/normalize.scss";
// //
// Import Just the docs scss // Import Just the docs scss

View File

@ -155,8 +155,10 @@ function initSearch() {
function ready(){ function ready(){
toggleNav(); toggleNav();
if (typeof lunr !== 'undefined') {
initSearch(); initSearch();
} }
}
// in case the document is already rendered // in case the document is already rendered
if (document.readyState!='loading') ready(); if (document.readyState!='loading') ready();

25
docs/configuration.md Normal file
View File

@ -0,0 +1,25 @@
---
layout: default
title: Configuration
nav_order: 2
---
# Configuration
Just the Docs has some specific configuration parameters that can be definied in your Jekyll site's `config.yml` file.
## Search enabled
```yml
# Enable or disable the site search
search_enabled: true
```
## Aux links
```yml
# Aux links for the upper right navigation
aux_links:
"Just the Docs on GitHub":
- "//github.com/pmarsceill/just-the-docs"
```

View File

@ -1,7 +1,7 @@
--- ---
layout: default layout: default
title: Customization title: Customization
nav_order: 5 nav_order: 6
--- ---
# Customization # Customization

View File

@ -94,6 +94,12 @@ end
- level 2 item - level 2 item
- level 1 item - level 1 item
### And a task list
- [ ] Hello, this is a TODO item
- [ ] Hello, this is another TODO item
- [x] Goodbye, this item is done
### Small image ### Small image
![](https://assets-cdn.github.com/images/icons/emoji/octocat.png) ![](https://assets-cdn.github.com/images/icons/emoji/octocat.png)

View File

@ -1,7 +1,7 @@
--- ---
layout: default layout: default
title: Navigation Structure title: Navigation Structure
nav_order: 4 nav_order: 5
--- ---
# Navigation Structure # Navigation Structure

View File

@ -1,7 +1,7 @@
--- ---
layout: default layout: default
title: Search title: Search
nav_order: 6 nav_order: 7
--- ---
# Search # Search
@ -14,6 +14,8 @@ Just the docs uses [lunr.js](http://lunrjs.com) to add a client-side search inte
## Set up search ## Set up search
### 1. Generate search index
Before you can use search, you must initialize the feature by running this Before you can use search, you must initialize the feature by running this
rake command that comes with the `just-the-docs` rake command that comes with the `just-the-docs`
@ -41,3 +43,12 @@ your search index. Alternatively, you can create the file manually in the
``` ```
_Note: If you don't run this rake command or create this file manually, search will not work (or it will use the search index data from this docs site, not your site's content)._ _Note: If you don't run this rake command or create this file manually, search will not work (or it will use the search index data from this docs site, not your site's content)._
### 2. Enable search in configuration
In your site's `_config.yml` enable search:
```yml
# Enable or disable the site search
search_enabled: true
```

View File

@ -2,7 +2,7 @@
layout: default layout: default
title: Code title: Code
parent: UI Components parent: UI Components
nav_order: 5 nav_order: 6
--- ---
# Code # Code

View File

@ -0,0 +1,98 @@
---
layout: default
title: Lists
parent: UI Components
nav_order: 5
---
# Lists
{:.no_toc}
## Table of contents
{: .no_toc .text-delta }
1. TOC
{:toc}
---
Most lists can be rendered with pure markdown...
## Unordered list
<div class="code-example" markdown="1">
- Item 1
- Item 2
- Item 3
_or_
* Item 1
* Item 2
* Item 3
</div>
```markdown
- Item 1
- Item 2
- Item 3
_or_
* Item 1
* Item 2
* Item 3
```
## Ordered list
<div class="code-example" markdown="1">
1. Item 1
1. Item 2
1. Item 3
</div>
```markdown
1. Item 1
1. Item 2
1. Item 3
```
## Task list
<div class="code-example" markdown="1">
- [ ] hello, this is a todo item
- [ ] hello, this is another todo item
- [x] goodbye, this item is done
</div>
```markdown
- [ ] hello, this is a todo item
- [ ] hello, this is another todo item
- [x] goodbye, this item is done
```
## Definition list
Definition lists require HTML syntax and aren't supported with the GitHub flavored markdown compiler.
<div class="code-example" markdown="1">
<dl>
<dt>Name</dt>
<dd>Godzilla</dd>
<dt>Born</dt>
<dd>1952</dd>
<dt>Birthplace</dt>
<dd>Japan</dd>
<dt>Color</dt>
<dd>Green</dd>
</dl>
</div>
```html
<dl>
<dt>Name</dt>
<dd>Godzilla</dd>
<dt>Born</dt>
<dd>1952</dd>
<dt>Birthplace</dt>
<dd>Japan</dd>
<dt>Color</dt>
<dd>Green</dd>
</dl>
```

View File

@ -1,7 +1,7 @@
--- ---
layout: default layout: default
title: UI Components title: UI Components
nav_order: 2 nav_order: 3
has_children: true has_children: true
parent: UI Components parent: UI Components
permalink: /ui-components permalink: /ui-components

View File

@ -1,7 +1,7 @@
--- ---
layout: default layout: default
title: Utilities title: Utilities
nav_order: 3 nav_order: 4
parent: Utilities parent: Utilities
has_children: true has_children: true
permalink: /utilities permalink: /utilities

View File

@ -2,7 +2,7 @@
Gem::Specification.new do |spec| Gem::Specification.new do |spec|
spec.name = "just-the-docs" spec.name = "just-the-docs"
spec.version = "0.1.5" spec.version = "0.1.6"
spec.authors = ["Patrick Marsceill"] spec.authors = ["Patrick Marsceill"]
spec.email = ["patrick.marsceill@gmail.com"] spec.email = ["patrick.marsceill@gmail.com"]

1
node_modules/.bin/JSONStream generated vendored
View File

@ -1 +0,0 @@
../JSONStream/index.js

1
node_modules/.bin/browserslist generated vendored
View File

@ -1 +0,0 @@
../browserslist/cli.js

1
node_modules/.bin/colorguard generated vendored
View File

@ -1 +0,0 @@
../colorguard/bin/colorguard

1
node_modules/.bin/css-rule-stream generated vendored
View File

@ -1 +0,0 @@
../css-rule-stream/index.js

1
node_modules/.bin/doiuse generated vendored
View File

@ -1 +0,0 @@
../doiuse/cli.js

1
node_modules/.bin/esparse generated vendored
View File

@ -1 +0,0 @@
../esprima/bin/esparse.js

1
node_modules/.bin/esvalidate generated vendored
View File

@ -1 +0,0 @@
../esprima/bin/esvalidate.js

1
node_modules/.bin/js-yaml generated vendored
View File

@ -1 +0,0 @@
../js-yaml/bin/js-yaml.js

1
node_modules/.bin/jsonfilter generated vendored
View File

@ -1 +0,0 @@
../jsonfilter/cli.js

1
node_modules/.bin/semver generated vendored
View File

@ -1 +0,0 @@
../semver/bin/semver

1
node_modules/.bin/specificity generated vendored
View File

@ -1 +0,0 @@
../specificity/bin/specificity

1
node_modules/.bin/strip-indent generated vendored
View File

@ -1 +0,0 @@
../strip-indent/cli.js

1
node_modules/.bin/stylehacks generated vendored
View File

@ -1 +0,0 @@
../stylehacks/dist/cli.js

1
node_modules/.bin/stylelint generated vendored
View File

@ -1 +0,0 @@
../stylelint/bin/stylelint.js

1
node_modules/.bin/window-size generated vendored
View File

@ -1 +0,0 @@
../window-size/cli.js

2
node_modules/JSONStream/.npmignore generated vendored
View File

@ -1,2 +0,0 @@
node_modules/*
node_modules

View File

@ -1,4 +0,0 @@
language: node_js
node_js:
- "0.8"
- "0.10"

View File

@ -1,15 +0,0 @@
Apache License, Version 2.0
Copyright (c) 2011 Dominic Tarr
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

24
node_modules/JSONStream/LICENSE.MIT generated vendored
View File

@ -1,24 +0,0 @@
The MIT License
Copyright (c) 2011 Dominic Tarr
Permission is hereby granted, free of charge,
to any person obtaining a copy of this software and
associated documentation files (the "Software"), to
deal in the Software without restriction, including
without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom
the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,13 +0,0 @@
var request = require('request')
, JSONStream = require('JSONStream')
, es = require('event-stream')
var parser = JSONStream.parse(['rows', true]) //emit parts that match this path (any element of the rows array)
, req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
, logger = es.mapSync(function (data) { //create a stream that logs to stderr,
console.error(data)
return data
})
req.pipe(parser)
parser.pipe(logger)

192
node_modules/JSONStream/index.js generated vendored
View File

@ -1,192 +0,0 @@
#! /usr/bin/env node
var Parser = require('jsonparse')
, through = require('through')
/*
the value of this.stack that creationix's jsonparse has is weird.
it makes this code ugly, but his problem is way harder that mine,
so i'll forgive him.
*/
exports.parse = function (path, map) {
var parser = new Parser()
var stream = through(function (chunk) {
if('string' === typeof chunk)
chunk = new Buffer(chunk)
parser.write(chunk)
},
function (data) {
if(data)
stream.write(data)
stream.queue(null)
})
if('string' === typeof path)
path = path.split('.').map(function (e) {
if (e === '*')
return true
else if (e === '') // '..'.split('.') returns an empty string
return {recurse: true}
else
return e
})
var count = 0, _key
if(!path || !path.length)
path = null
parser.onValue = function () {
if (!this.root && this.stack.length == 1)
stream.root = this.value
if(! path) return
var i = 0 // iterates on path
var j = 0 // iterates on stack
while (i < path.length) {
var key = path[i]
var c
j++
if (key && !key.recurse) {
c = (j === this.stack.length) ? this : this.stack[j]
if (!c) return
if (! check(key, c.key)) return
i++
} else {
i++
var nextKey = path[i]
if (! nextKey) return
while (true) {
c = (j === this.stack.length) ? this : this.stack[j]
if (!c) return
if (check(nextKey, c.key)) { i++; break}
j++
}
}
}
if (j !== this.stack.length) return
count ++
var data = this.value[this.key]
if(null != data)
if(null != (data = map ? map(data) : data))
stream.queue(data)
delete this.value[this.key]
}
parser._onToken = parser.onToken;
parser.onToken = function (token, value) {
parser._onToken(token, value);
if (this.stack.length === 0) {
if (stream.root) {
if(!path)
stream.queue(stream.root)
stream.emit('root', stream.root, count)
count = 0;
stream.root = null;
}
}
}
parser.onError = function (err) {
stream.emit('error', err)
}
return stream
}
function check (x, y) {
if ('string' === typeof x)
return y == x
else if (x && 'function' === typeof x.exec)
return x.exec(y)
else if ('boolean' === typeof x)
return x
else if ('function' === typeof x)
return x(y)
return false
}
exports.stringify = function (op, sep, cl, indent) {
indent = indent || 0
if (op === false){
op = ''
sep = '\n'
cl = ''
} else if (op == null) {
op = '[\n'
sep = '\n,\n'
cl = '\n]\n'
}
//else, what ever you like
var stream
, first = true
, anyData = false
stream = through(function (data) {
anyData = true
var json = JSON.stringify(data, null, indent)
if(first) { first = false ; stream.queue(op + json)}
else stream.queue(sep + json)
},
function (data) {
if(!anyData)
stream.queue(op)
stream.queue(cl)
stream.queue(null)
})
return stream
}
exports.stringifyObject = function (op, sep, cl, indent) {
indent = indent || 0
if (op === false){
op = ''
sep = '\n'
cl = ''
} else if (op == null) {
op = '{\n'
sep = '\n,\n'
cl = '\n}\n'
}
//else, what ever you like
var first = true
, anyData = false
stream = through(function (data) {
anyData = true
var json = JSON.stringify(data[0]) + ':' + JSON.stringify(data[1], null, indent)
if(first) { first = false ; this.queue(op + json)}
else this.queue(sep + json)
},
function (data) {
if(!anyData) this.queue(op)
this.queue(cl)
this.queue(null)
})
return stream
}
if(!module.parent && process.title !== 'browser') {
process.stdin
.pipe(exports.parse(process.argv[2]))
.pipe(exports.stringify('[', ',\n', ']\n', 2))
.pipe(process.stdout)
}

93
node_modules/JSONStream/package.json generated vendored
View File

@ -1,93 +0,0 @@
{
"_args": [
[
"JSONStream@^0.8.4",
"/Users/pmarsceill/_projects/just-the-docs/node_modules/jsonfilter"
]
],
"_from": "JSONStream@>=0.8.4 <0.9.0",
"_id": "JSONStream@0.8.4",
"_inCache": true,
"_installable": true,
"_location": "/JSONStream",
"_npmUser": {
"email": "dominic.tarr@gmail.com",
"name": "dominictarr"
},
"_npmVersion": "1.4.9",
"_phantomChildren": {},
"_requested": {
"name": "JSONStream",
"raw": "JSONStream@^0.8.4",
"rawSpec": "^0.8.4",
"scope": null,
"spec": ">=0.8.4 <0.9.0",
"type": "range"
},
"_requiredBy": [
"/jsonfilter"
],
"_resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-0.8.4.tgz",
"_shasum": "91657dfe6ff857483066132b4618b62e8f4887bd",
"_shrinkwrap": null,
"_spec": "JSONStream@^0.8.4",
"_where": "/Users/pmarsceill/_projects/just-the-docs/node_modules/jsonfilter",
"author": {
"email": "dominic.tarr@gmail.com",
"name": "Dominic Tarr",
"url": "http://bit.ly/dominictarr"
},
"bin": {
"JSONStream": "./index.js"
},
"bugs": {
"url": "https://github.com/dominictarr/JSONStream/issues"
},
"dependencies": {
"jsonparse": "0.0.5",
"through": ">=2.2.7 <3"
},
"description": "rawStream.pipe(JSONStream.parse()).pipe(streamOfObjects)",
"devDependencies": {
"assertions": "~2.2.2",
"event-stream": "~0.7.0",
"it-is": "~1",
"render": "~0.1.1",
"tape": "~2.12.3",
"trees": "~0.0.3"
},
"directories": {},
"dist": {
"shasum": "91657dfe6ff857483066132b4618b62e8f4887bd",
"tarball": "https://registry.npmjs.org/JSONStream/-/JSONStream-0.8.4.tgz"
},
"engines": {
"node": "*"
},
"homepage": "http://github.com/dominictarr/JSONStream",
"keywords": [
"async",
"json",
"parser",
"parsing",
"stream",
"streaming"
],
"maintainers": [
{
"name": "dominictarr",
"email": "dominic.tarr@gmail.com"
}
],
"name": "JSONStream",
"optionalDependencies": {},
"readme": "ERROR: No README data found!",
"repository": {
"type": "git",
"url": "git://github.com/dominictarr/JSONStream.git"
},
"scripts": {
"test": "set -e; for t in test/*.js; do echo '***' $t '***'; node $t; done"
},
"version": "0.8.4"
}

View File

@ -1,178 +0,0 @@
# JSONStream
streaming JSON.parse and stringify
<img src=https://secure.travis-ci.org/dominictarr/JSONStream.png?branch=master>
## example
``` js
var request = require('request')
, JSONStream = require('JSONStream')
, es = require('event-stream')
request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
.pipe(JSONStream.parse('rows.*'))
.pipe(es.mapSync(function (data) {
console.error(data)
return data
}))
```
## JSONStream.parse(path)
parse stream of values that match a path
``` js
JSONStream.parse('rows.*.doc')
```
The `..` operator is the recursive descent operator from [JSONPath](http://goessner.net/articles/JsonPath/), which will match a child at any depth (see examples below).
If your keys have keys that include `.` or `*` etc, use an array instead.
`['row', true, /^doc/]`.
If you use an array, `RegExp`s, booleans, and/or functions. The `..` operator is also available in array representation, using `{recurse: true}`.
any object that matches the path will be emitted as 'data' (and `pipe`d down stream)
If `path` is empty or null, no 'data' events are emitted.
### Examples
query a couchdb view:
``` bash
curl -sS localhost:5984/tests/_all_docs&include_docs=true
```
you will get something like this:
``` js
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
},
]}
```
we are probably most interested in the `rows.*.docs`
create a `Stream` that parses the documents from the feed like this:
``` js
var stream = JSONStream.parse(['rows', true, 'doc']) //rows, ANYTHING, doc
stream.on('data', function(data) {
console.log('received:', data);
});
stream.on('root', function(root, count) {
if (!count) {
console.log('no matches found:', root);
}
});
```
awesome!
### recursive patterns (..)
`JSONStream.parser('docs..value')`
(or `JSONStream.parser(['docs', {recurse: true}, 'value'])` using an array)
will emit every `value` object that is a child, grand-child, etc. of the
`docs` object. In this example, it will match exactly 5 times at various depth
levels, emitting 0, 1, 2, 3 and 4 as results.
```js
{
"total": 5,
"docs": [
{
"key": {
"value": 0,
"some": "property"
}
},
{"value": 1},
{"value": 2},
{"blbl": [{}, {"a":0, "b":1, "value":3}, 10]},
{"value": 4}
]
}
```
## JSONStream.parse(pattern, map)
provide a function that can be used to map or filter
the json output. `map` is passed the value at that node of the pattern,
if `map` return non-nullish (anything but `null` or `undefined`)
that value will be emitted in the stream. If it returns a nullish value,
nothing will be emitted.
## JSONStream.stringify(open, sep, close)
Create a writable stream.
you may pass in custom `open`, `close`, and `seperator` strings.
But, by default, `JSONStream.stringify()` will create an array,
(with default options `open='[\n', sep='\n,\n', close='\n]\n'`)
If you call `JSONStream.stringify(false)`
the elements will only be seperated by a newline.
If you only write one item this will be valid JSON.
If you write many items,
you can use a `RegExp` to split it into valid chunks.
## JSONStream.stringifyObject(open, sep, close)
Very much like `JSONStream.stringify`,
but creates a writable stream for objects instead of arrays.
Accordingly, `open='{\n', sep='\n,\n', close='\n}\n'`.
When you `.write()` to the stream you must supply an array with `[ key, data ]`
as the first argument.
## unix tool
query npm to see all the modules that browserify has ever depended on.
``` bash
curl https://registry.npmjs.org/browserify | JSONStream 'versions.*.dependencies'
```
## numbers
There are occasional problems parsing and unparsing very precise numbers.
I have opened an issue here:
https://github.com/creationix/jsonparse/issues/2
+1
## Acknowlegements
this module depends on https://github.com/creationix/jsonparse
by Tim Caswell
and also thanks to Florent Jaby for teaching me about parsing with:
https://github.com/Floby/node-json-streams
## license
MIT / APACHE2

41
node_modules/JSONStream/test/bool.js generated vendored
View File

@ -1,41 +0,0 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
// stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
JSONStream.parse([true]),
es.writeArray(function (err, lines) {
it(lines).has(expected)
console.error('PASSED')
})
)

View File

@ -1,18 +0,0 @@
var test = require('tape')
var JSONStream = require('../')
var testData = '{"rows":[{"hello":"world"}, {"foo": "bar"}]}'
test('basic parsing', function (t) {
t.plan(2)
var parsed = JSONStream.parse("rows.*")
var parsedKeys = {}
parsed.on('data', function(match) {
parsedKeys[Object.keys(match)[0]] = true
})
parsed.on('end', function() {
t.equal(!!parsedKeys['hello'], true)
t.equal(!!parsedKeys['foo'], true)
})
parsed.write(testData)
parsed.end()
})

View File

@ -1,27 +0,0 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var JSONStream = require('../');
var server = net.createServer(function(client) {
var parser = JSONStream.parse([]);
parser.on('end', function() {
console.log('close')
console.error('PASSED');
server.close();
});
client.pipe(parser);
var n = 4
client.on('data', function () {
if(--n) return
client.end();
})
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
fs.createReadStream(file).pipe(client).on('data', console.log) //.resume();
});

View File

@ -1,29 +0,0 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse('rows..rev')
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
for (var i = 0 ; i < expected.rows.length ; i++)
it(parsed[i]).deepEqual(expected.rows[i].value.rev)
console.error('PASSED')
})

View File

@ -1,29 +0,0 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','depth.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['docs', {recurse: true}, 'value'])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(5)
for (var i = 0 ; i < 5 ; i++)
it(parsed[i]).deepEqual(i)
console.error('PASSED')
})

View File

@ -1,44 +0,0 @@
var JSONStream = require('../')
, stream = require('stream')
, it = require('it-is')
var output = [ [], [] ]
var parser1 = JSONStream.parse(['docs', /./])
parser1.on('data', function(data) {
output[0].push(data)
})
var parser2 = JSONStream.parse(['docs', /./])
parser2.on('data', function(data) {
output[1].push(data)
})
var pending = 2
function onend () {
if (--pending > 0) return
it(output).deepEqual([
[], [{hello: 'world'}]
])
console.error('PASSED')
}
parser1.on('end', onend)
parser2.on('end', onend)
function makeReadableStream() {
var readStream = new stream.Stream()
readStream.readable = true
readStream.write = function (data) { this.emit('data', data) }
readStream.end = function (data) { this.emit('end') }
return readStream
}
var emptyArray = makeReadableStream()
emptyArray.pipe(parser1)
emptyArray.write('{"docs":[]}')
emptyArray.end()
var objectArray = makeReadableStream()
objectArray.pipe(parser2)
objectArray.write('{"docs":[{"hello":"world"}]}')
objectArray.end()

File diff suppressed because it is too large Load Diff

View File

@ -1,15 +0,0 @@
{
"total": 5,
"docs": [
{
"key": {
"value": 0,
"some": "property"
}
},
{"value": 1},
{"value": 2},
{"blbl": [{}, {"a":0, "b":1, "value":3}, 10]},
{"value": 4}
]
}

39
node_modules/JSONStream/test/fn.js generated vendored
View File

@ -1,39 +0,0 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
function fn (s) {
return !isNaN(parseInt(s, 10))
}
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', fn])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})

135
node_modules/JSONStream/test/gen.js generated vendored
View File

@ -1,135 +0,0 @@
return // dont run this test for now since tape is weird and broken on 0.10
var fs = require('fs')
var JSONStream = require('../')
var file = process.argv[2] || '/tmp/JSONStream-test-large.json'
var size = Number(process.argv[3] || 100000)
var tape = require('tape')
// if (process.title !== 'browser') {
tape('out of mem', function (t) {
t.plan(1)
//////////////////////////////////////////////////////
// Produces a random number between arg1 and arg2
//////////////////////////////////////////////////////
var randomNumber = function (min, max) {
var number = Math.floor(Math.random() * (max - min + 1) + min);
return number;
};
//////////////////////////////////////////////////////
// Produces a random string of a length between arg1 and arg2
//////////////////////////////////////////////////////
var randomString = function (min, max) {
// add several spaces to increase chanses of creating 'words'
var chars = ' 0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
var result = '';
var randomLength = randomNumber(min, max);
for (var i = randomLength; i > 0; --i) {
result += chars[Math.round(Math.random() * (chars.length - 1))];
}
return result;
};
//////////////////////////////////////////////////////
// Produces a random JSON document, as a string
//////////////////////////////////////////////////////
var randomJsonDoc = function () {
var doc = {
"CrashOccurenceID": randomNumber(10000, 50000),
"CrashID": randomNumber(1000, 10000),
"SiteName": randomString(10, 25),
"MachineName": randomString(10, 25),
"Date": randomString(26, 26),
"ProcessDuration": randomString(18, 18),
"ThreadIdentityName": null,
"WindowsIdentityName": randomString(15, 40),
"OperatingSystemName": randomString(35, 65),
"DetailedExceptionInformation": randomString(100, 800)
};
doc = JSON.stringify(doc);
doc = doc.replace(/\,/g, ',\n'); // add new lines after each attribute
return doc;
};
//////////////////////////////////////////////////////
// generates test data
//////////////////////////////////////////////////////
var generateTestData = function (cb) {
console.log('generating large data file...');
var stream = fs.createWriteStream(file, {
encoding: 'utf8'
});
var i = 0;
var max = size;
var writing = false
var split = ',\n';
var doc = randomJsonDoc();
stream.write('[');
function write () {
if(writing) return
writing = true
while(++i < max) {
if(Math.random() < 0.001)
console.log('generate..', i + ' / ' + size)
if(!stream.write(doc + split)) {
writing = false
return stream.once('drain', write)
}
}
stream.write(doc + ']')
stream.end();
console.log('END')
}
write()
stream.on('close', cb)
};
//////////////////////////////////////////////////////
// Shows that parsing 100000 instances using JSONStream fails
//
// After several seconds, you will get this crash
// FATAL ERROR: JS Allocation failed - process out of memory
//////////////////////////////////////////////////////
var testJSONStreamParse_causesOutOfMem = function (done) {
var items = 0
console.log('parsing data files using JSONStream...');
var parser = JSONStream.parse([true]);
var stream = fs.createReadStream(file);
stream.pipe(parser);
parser.on('data', function (data) {
items++
if(Math.random() < 0.01) console.log(items, '...')
});
parser.on('end', function () {
t.equal(items, size)
});
};
//////////////////////////////////////////////////////
// main
//////////////////////////////////////////////////////
fs.stat(file, function (err, stat) {
console.log(stat)
if(err)
generateTestData(testJSONStreamParse_causesOutOfMem);
else
testJSONStreamParse_causesOutOfMem()
})
})
// }

40
node_modules/JSONStream/test/map.js generated vendored
View File

@ -1,40 +0,0 @@
var test = require('tape')
var JSONStream = require('../')
test('map function', function (t) {
var actual = []
stream = JSONStream.parse([true], function (e) { return e*10 })
stream.on('data', function (v) { actual.push(v)})
stream.on('end', function () {
t.deepEqual(actual, [10,20,30,40,50,60])
t.end()
})
stream.write(JSON.stringify([1,2,3,4,5,6], null, 2))
stream.end()
})
test('filter function', function (t) {
var actual = []
stream = JSONStream
.parse([true], function (e) { return e%2 ? e : null})
.on('data', function (v) { actual.push(v)})
.on('end', function () {
t.deepEqual(actual, [1,3,5])
t.end()
})
stream.write(JSON.stringify([1,2,3,4,5,6], null, 2))
stream.end()
})

View File

@ -1,42 +0,0 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var it = require('it-is');
var JSONStream = require('../');
var str = fs.readFileSync(file);
var datas = {}
var server = net.createServer(function(client) {
var root_calls = 0;
var data_calls = 0;
var parser = JSONStream.parse(['rows', true, 'key']);
parser.on('root', function(root, count) {
++ root_calls;
});
parser.on('data', function(data) {
++ data_calls;
datas[data] = (datas[data] || 0) + 1
it(data).typeof('string')
});
parser.on('end', function() {
console.log('END')
var min = Infinity
for (var d in datas)
min = min > datas[d] ? datas[d] : min
it(root_calls).equal(3);
it(min).equal(3);
server.close();
});
client.pipe(parser);
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
var msgs = str + ' ' + str + '\n\n' + str
client.end(msgs);
});

View File

@ -1,35 +0,0 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var it = require('it-is');
var JSONStream = require('../');
var str = fs.readFileSync(file);
var server = net.createServer(function(client) {
var root_calls = 0;
var data_calls = 0;
var parser = JSONStream.parse();
parser.on('root', function(root, count) {
++ root_calls;
it(root_calls).notEqual(2);
});
parser.on('error', function(err) {
console.log(err);
server.close();
});
parser.on('end', function() {
console.log('END');
server.close();
});
client.pipe(parser);
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
var msgs = str + '}';
client.end(msgs);
});

28
node_modules/JSONStream/test/null.js generated vendored
View File

@ -1,28 +0,0 @@
var JSONStream = require('../')
var data = [
{ID: 1, optional: null},
{ID: 2, optional: null},
{ID: 3, optional: 20},
{ID: 4, optional: null},
{ID: 5, optional: 'hello'},
{ID: 6, optional: null}
]
var test = require('tape')
test ('null properties', function (t) {
var actual = []
var stream =
JSONStream.parse('*.optional')
.on('data', function (v) { actual.push(v) })
.on('end', function () {
t.deepEqual(actual, [20, 'hello'])
t.end()
})
stream.write(JSON.stringify(data, null, 2))
stream.end()
})

View File

@ -1,28 +0,0 @@
/*
sometimes jsonparse changes numbers slightly.
*/
var r = Math.random()
, Parser = require('jsonparse')
, p = new Parser()
, assert = require('assert')
, times = 20
while (times --) {
assert.equal(JSON.parse(JSON.stringify(r)), r, 'core JSON')
p.onValue = function (v) {
console.error('parsed', v)
assert.equal(
String(v).slice(0,12),
String(r).slice(0,12)
)
}
console.error('correct', r)
p.write (new Buffer(JSON.stringify([r])))
}

View File

@ -1,41 +0,0 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
//JSONStream.parse([/./]),
es.writeArray(function (err, lines) {
it(JSON.parse(lines.join(''))).deepEqual(expected)
console.error('PASSED')
})
)

View File

@ -1,47 +0,0 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
, es = require('event-stream')
, pending = 10
, passed = true
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
for (var ix = 0; ix < pending; ix++) (function (count) {
var expected = {}
, stringify = JSONStream.stringifyObject()
es.connect(
stringify,
es.writeArray(function (err, lines) {
it(JSON.parse(lines.join(''))).deepEqual(expected)
if (--pending === 0) {
console.error('PASSED')
}
})
)
while (count --) {
var key = Math.random().toString(16).slice(2)
expected[key] = randomObj()
stringify.write([ key, expected[key] ])
}
process.nextTick(function () {
stringify.end()
})
})(ix)

35
node_modules/JSONStream/test/test.js generated vendored
View File

@ -1,35 +0,0 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', /\d+/ /*, 'value'*/])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})

View File

@ -1,29 +0,0 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, '..','package.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse([])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it(data).deepEqual(expected)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(1)
console.error('PASSED')
})

View File

@ -1,41 +0,0 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
// stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
JSONStream.parse([/./]),
es.writeArray(function (err, lines) {
it(lines).has(expected)
console.error('PASSED')
})
)

21
node_modules/ajv-keywords/LICENSE generated vendored
View File

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) 2016 Evgeny Poberezkin
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

443
node_modules/ajv-keywords/README.md generated vendored
View File

@ -1,443 +0,0 @@
# ajv-keywords
Custom JSON-Schema keywords for [ajv](https://github.com/epoberezkin/ajv) validator
[![Build Status](https://travis-ci.org/epoberezkin/ajv-keywords.svg?branch=master)](https://travis-ci.org/epoberezkin/ajv-keywords)
[![npm version](https://badge.fury.io/js/ajv-keywords.svg)](https://www.npmjs.com/package/ajv-keywords)
[![npm downloads](https://img.shields.io/npm/dm/ajv-keywords.svg)](https://www.npmjs.com/package/ajv-keywords)
[![Coverage Status](https://coveralls.io/repos/github/epoberezkin/ajv-keywords/badge.svg?branch=master)](https://coveralls.io/github/epoberezkin/ajv-keywords?branch=master)
## Contents
- [Install](#install)
- [Usage](#usage)
- [Keywords](#keywords)
- [typeof](#typeof)
- [instanceof](#instanceof)
- [range and exclusiveRange](#range-and-exclusiverange)
- [propertyNames](#propertynames)
- [if/then/else](#ifthenelse)
- [prohibited](#prohibited)
- [deepProperties](#deepproperties)
- [deepRequired](#deeprequired)
- [regexp](#regexp)
- [dynamicDefaults](#dynamicdefaults)
- [License](#license)
## Install
```
npm install ajv-keywords
```
## Usage
To add all available keywords:
```javascript
var Ajv = require('ajv');
var ajv = new Ajv;
require('ajv-keywords')(ajv);
ajv.validate({ instanceof: 'RegExp' }, /.*/); // true
ajv.validate({ instanceof: 'RegExp' }, '.*'); // false
```
To add a single keyword:
```javascript
require('ajv-keywords')(ajv, 'instanceof');
```
To add multiple keywords:
```javascript
require('ajv-keywords')(ajv, ['typeof', 'instanceof']);
```
To add a single keyword in browser (to avoid adding unused code):
```javascript
require('ajv-keywords/keywords/instanceof')(ajv);
```
## Keywords
### `typeof`
Based on JavaScript `typeof` operation.
The value of the keyword should be a string (`"undefined"`, `"string"`, `"number"`, `"object"`, `"function"`, `"boolean"` or `"symbol"`) or array of strings.
To pass validation the result of `typeof` operation on the value should be equal to the string (or one of the strings in the array).
```
ajv.validate({ typeof: 'undefined' }, undefined); // true
ajv.validate({ typeof: 'undefined' }, null); // false
ajv.validate({ typeof: ['undefined', 'object'] }, null); // true
```
### `instanceof`
Based on JavaScript `instanceof` operation.
The value of the keyword should be a string (`"Object"`, `"Array"`, `"Function"`, `"Number"`, `"String"`, `"Date"`, `"RegExp"` or `"Buffer"`) or array of strings.
To pass validation the result of `data instanceof ...` operation on the value should be true:
```
ajv.validate({ instanceof: 'Array' }, []); // true
ajv.validate({ instanceof: 'Array' }, {}); // false
ajv.validate({ instanceof: ['Array', 'Function'] }, funciton(){}); // true
```
You can add your own constructor function to be recognised by this keyword:
```javascript
function MyClass() {}
var instanceofDefinition = require('ajv-keywords').get('instanceof').definition;
// or require('ajv-keywords/keywords/instanceof').definition;
instanceofDefinition.CONSTRUCTORS.MyClass = MyClass;
ajv.validate({ instanceof: 'MyClass' }, new MyClass); // true
```
### `range` and `exclusiveRange`
Syntax sugar for the combination of minimum and maximum keywords, also fails schema compilation if there are no numbers in the range.
The value of this keyword must be the array consisting of two numbers, the second must be greater or equal than the first one.
If the validated value is not a number the validation passes, otherwise to pass validation the value should be greater (or equal) than the first number and smaller (or equal) than the second number in the array. If `exclusiveRange` keyword is present in the same schema and its value is true, the validated value must not be equal to the range boundaries.
```javascript
var schema = { range: [1, 3] };
ajv.validate(schema, 1); // true
ajv.validate(schema, 2); // true
ajv.validate(schema, 3); // true
ajv.validate(schema, 0.99); // false
ajv.validate(schema, 3.01); // false
var schema = { range: [1, 3], exclusiveRange: true };
ajv.validate(schema, 1.01); // true
ajv.validate(schema, 2); // true
ajv.validate(schema, 2.99); // true
ajv.validate(schema, 1); // false
ajv.validate(schema, 3); // false
```
### `propertyNames`
This keyword allows to define the schema for the property names of the object. The value of this keyword should be a valid JSON schema (v5 schemas are supported with Ajv option `{v5: true}`).
```javascript
var schema = {
type: 'object'
propertyNames: {
anyOf: [
{ format: ipv4 },
{ format: hostname }
]
}
};
var validData = {
'192.128.0.1': {},
'test.example.com': {}
};
var invalidData = {
'1.2.3': {}
};
ajv.validate(schema, validData); // true
ajv.validate(schema, invalidData); // false
```
__Please note__: This keyword will be added to the next version of the JSON-Schema standard (draft-6), after it is published the keyword will be included in Ajv as standard validation keyword.
### `if`/`then`/`else`
These keywords allow to implement conditional validation. Their values should be valid JSON-schemas. At the moment it requires using Ajv with v5 option.
If the data is valid according to the sub-schema in `if` keyword, then the result is equal to the result of data validation against the sub-schema in `then` keyword, otherwise - in `else` keyword (if `else` is absent, the validation succeeds).
```javascript
require('ajv-keywords')(ajv, 'if');
var schema = {
type: 'array',
items: {
type: 'integer',
minimum: 1,
if: { maximum: 10 },
then: { multipleOf: 2 },
else: { multipleOf: 5 }
}
};
var validItems = [ 2, 4, 6, 8, 10, 15, 20, 25 ]; // etc.
var invalidItems = [ 1, 3, 5, 11, 12 ]; // etc.
ajv.validate(schema, validItems); // true
ajv.validate(schema, invalidItems); // false
```
This keyword is [proposed](https://github.com/json-schema-org/json-schema-spec/issues/180) for the future version of JSON-Schema standard.
### `prohibited`
This keyword allows to prohibit that any of the properties in the list is present in the object.
This keyword applies only to objects. If the data is not an object, the validation succeeds.
The value of this keyword should be an array of strings, each string being a property name. For data object to be valid none of the properties in this array should be present in the object.
```
var schema = { prohibited: ['foo', 'bar']};
var validData = { baz: 1 };
var alsoValidData = {};
var invalidDataList = [
{ foo: 1 },
{ bar: 2 },
{ foo: 1, bar: 2}
];
```
### `deepRequired`
This keyword allows to check that some deep properties (identified by JSON pointers) are available. The value should be an array of JSON pointers to the data, starting from the current position in data.
```javascript
var schema = {
type: 'object',
deepRequired: ["/users/1/role"]
};
var validData = {
users: [
{},
{
id: 123,
role: 'admin'
}
]
};
var invalidData = {
users: [
{},
{
id: 123
}
]
};
```
See [json-schema-org/json-schema-spec#203](https://github.com/json-schema-org/json-schema-spec/issues/203#issue-197211916) for an example of the equivalent schema without `deepRequired` keyword.
## `deepProperties`
This keyword allows to validate deep properties (identified by JSON pointers). The value should be an object, where keys are JSON pointers to the data, starting from the current position in data, and the values are corresponding schemas.
```javascript
var schema = {
type: 'object',
deepProperties: {
"/users/1/role": { "enum": ["admin"] }
}
};
var validData = {
users: [
{},
{
id: 123,
role: 'admin'
}
]
};
var alsoValidData = {
users: {
"1": {
id: 123,
role: 'admin'
}
}
};
var invalidData = {
users: [
{},
{
id: 123,
role: 'user'
}
]
};
var alsoInvalidData = {
users: {
"1": {
id: 123,
role: 'user'
}
}
};
```
### `regexp`
This keyword allows to use regular expressions with flags in schemas (the standard `pattern` keyword does not support flags). The value of this keyword can be either a string (the result of `regexp.toString()`) or an object with the properties `pattern` and `flags` (the same strings that should be passed to RegExp constructor).
```javascript
var schema = {
type: 'object',
properties: {
foo: { regexp: '/foo/i' },
bar: { regexp: { pattern: 'bar', flags: 'i' } }
}
};
var validData = {
foo: 'Food',
bar: 'Barmen'
};
var invalidData = {
foo: 'fog',
bar: 'bad'
};
```
### `dynamicDefaults`
This keyword allows to assign dynamic defaults to properties, such as timestamps, unique IDs etc.
This keyword only works if `useDefaults` options is used and not inside `anyOf` keywrods etc., in the same way as [default keyword treated by Ajv](https://github.com/epoberezkin/ajv#assigning-defaults).
The keyword should be added on the object level. Its value should be an object with each property corresponding to a property name, in the same way as in standard `properties` keyword. The value of each property can be:
- an identifier of default function (a string)
- an object with properties `func` (an identifier) and `args` (an object with parameters that will be passed to this function during schema compilation - see examples).
The properties used in `dynamicDefaults` should not be added to `required` keyword (or validation will fail), because unlike `default` this keyword is processed after validation.
There are several predefined dynamic default functions:
- `"timestamp"` - current timestamp in milliseconds
- `"datetime"` - current date and time as string (ISO, valid according to `date-time` format)
- `"date"` - current date as string (ISO, valid according to `date` format)
- `"time"` - current time as string (ISO, valid according to `time` format)
- `"random"` - pseudo-random number in [0, 1) interval
- `"randomint"` - pseudo-random integer number. If string is used as a property value, the function will randomly return 0 or 1. If object `{func: 'randomint', max: N}` is used then the default will be an integer number in [0, N) interval.
- `"seq"` - sequential integer number starting from 0. If string is used as a property value, the default sequence will be used. If object `{func: 'seq', name: 'foo'}` is used then the sequence with name `"foo"` will be used. Sequences are global, even if different ajv instances are used.
```javascript
var schema = {
type: 'object',
dynamicDefaults: {
ts: 'datetime',
r: { func: 'randomint', max: 100 },
id: { func: 'seq', name: 'id' }
},
properties: {
ts: {
type: 'string',
format: 'datetime'
},
r: {
type: 'integer',
minimum: 0,
maximum: 100,
exclusiveMaximum: true
},
id: {
type: 'integer',
minimum: 0
}
}
};
var data = {};
ajv.validate(data); // true
data; // { ts: '2016-12-01T22:07:28.829Z', r: 25, id: 0 }
var data1 = {};
ajv.validate(data1); // true
data1; // { ts: '2016-12-01T22:07:29.832Z', r: 68, id: 1 }
ajv.validate(data1); // true
data1; // didn't change, as all properties were defined
```
You can add your own dynamic default function to be recognised by this keyword:
```javascript
var uuid = require('uuid');
function uuidV4() { return uuid.v4(); }
var definition = require('ajv-keywords').get('dynamicDefaults').definition;
// or require('ajv-keywords/keywords/dynamicDefaults').definition;
definition.DEFAULTS.uuid = uuidV4;
var schema = {
dynamicDefaults: { id: 'uuid' },
properties: { id: { type: 'string', format: 'uuid' } }
};
var data = {};
ajv.validate(schema, data); // true
data; // { id: 'a1183fbe-697b-4030-9bcc-cfeb282a9150' };
var data1 = {};
ajv.validate(schema, data1); // true
data1; // { id: '5b008de7-1669-467a-a5c6-70fa244d7209' }
```
You also can define dynamic default that accepts parameters, e.g. version of uuid:
```javascript
var uuid = require('uuid');
function getUuid(args) {
var version = 'v' + (arvs && args.v || 4);
return function() {
return uuid[version]();
};
}
var definition = require('ajv-keywords').get('dynamicDefaults').definition;
definition.DEFAULTS.uuid = getUuid;
var schema = {
dynamicDefaults: {
id1: 'uuid', // v4
id2: { func: 'uuid', v: 4 }, // v4
id3: { func: 'uuid', v: 1 } // v1
}
};
```
## License
[MIT](https://github.com/JSONScript/ajv-keywords/blob/master/LICENSE)

35
node_modules/ajv-keywords/index.js generated vendored
View File

@ -1,35 +0,0 @@
'use strict';
var KEYWORDS = require('./keywords');
module.exports = defineKeywords;
/**
* Defines one or several keywords in ajv instance
* @param {Ajv} ajv validator instance
* @param {String|Array<String>|undefined} keyword keyword(s) to define
* @return {Ajv} ajv instance (for chaining)
*/
function defineKeywords(ajv, keyword) {
if (Array.isArray(keyword)) {
for (var i=0; i<keyword.length; i++)
get(keyword[i])(ajv);
return ajv;
}
if (keyword) {
get(keyword)(ajv);
return ajv;
}
for (keyword in KEYWORDS) get(keyword)(ajv);
return ajv;
}
defineKeywords.get = get;
function get(keyword) {
var defFunc = KEYWORDS[keyword];
if (!defFunc) throw new Error('Unknown keyword ' + keyword);
return defFunc;
}

View File

@ -1,92 +0,0 @@
'use strict';
var TIME = /^(\d\d):(\d\d):(\d\d)(\.\d+)?(z|[+-]\d\d:\d\d)?$/i;
var DATE_TIME_SEPARATOR = /t|\s/i;
var COMPARE_FORMATS = {
date: compareDate,
time: compareTime,
'date-time': compareDateTime
};
module.exports = function (minMax) {
var keyword = 'format' + minMax;
return function defFunc(ajv) {
if (ajv.RULES.keywords[keyword])
return console.warn('Keyword', keyword, 'is already defined');
defFunc.definition = {
type: 'string',
inline: require('./dotjs/_formatLimit'),
statements: true,
errors: 'full',
metaSchema: {
anyOf: [
{ type: 'string' },
{
type: 'object',
required: [ '$data' ],
properties: {
$data: {
type: 'string',
anyOf: [
{ format: 'relative-json-pointer' },
{ format: 'json-pointer' }
]
}
},
additionalProperties: false
}
]
}
};
ajv.addKeyword(keyword, defFunc.definition);
ajv.addKeyword('formatExclusive' + minMax);
extendFormats(ajv);
return ajv;
};
};
function extendFormats(ajv) {
var formats = ajv._formats;
for (var name in COMPARE_FORMATS) {
var format = formats[name];
if (typeof format != 'object')
format = formats[name] = { validate: format };
if (!format.compare)
format.compare = COMPARE_FORMATS[name];
}
}
function compareDate(d1, d2) {
if (!(d1 && d2)) return;
if (d1 > d2) return 1;
if (d1 < d2) return -1;
if (d1 === d2) return 0;
}
function compareTime(t1, t2) {
if (!(t1 && t2)) return;
t1 = t1.match(TIME);
t2 = t2.match(TIME);
if (!(t1 && t2)) return;
t1 = t1[1] + t1[2] + t1[3] + (t1[4]||'');
t2 = t2[1] + t2[2] + t2[3] + (t2[4]||'');
if (t1 > t2) return 1;
if (t1 < t2) return -1;
if (t1 === t2) return 0;
}
function compareDateTime(dt1, dt2) {
if (!(dt1 && dt2)) return;
dt1 = dt1.split(DATE_TIME_SEPARATOR);
dt2 = dt2.split(DATE_TIME_SEPARATOR);
var res = compareDate(dt1[0], dt2[0]);
if (res === undefined) return;
return res || compareTime(dt1[1], dt2[1]);
}

View File

@ -1,55 +0,0 @@
'use strict';
module.exports = function defFunc(ajv) {
defFunc.definition = {
type: 'object',
macro: function (schema) {
var schemas = [];
for (var pointer in schema)
schemas.push(getSchema(pointer, schema[pointer]));
return { 'allOf': schemas };
},
metaSchema: {
type: 'object',
patternProperties: {
'^(\\/([^~\\/]|~0|~1)*)*(\\/)?$': {
$ref: ajv._opts.v5
? 'https://raw.githubusercontent.com/epoberezkin/ajv/master/lib/refs/json-schema-v5.json#'
: 'http://json-schema.org/draft-04/schema#'
}
},
additionalProperties: false
}
};
ajv.addKeyword('deepProperties', defFunc.definition);
return ajv;
};
function getSchema(jsonPointer, schema) {
var segments = jsonPointer.split('/');
var rootSchema = {};
var pointerSchema = rootSchema;
for (var i=1; i<segments.length; i++) {
var segment = segments[i];
var isLast = i == segments.length - 1;
segment = unescapeJsonPointer(segment);
var properties = pointerSchema.properties = {};
var items = undefined;
if (/[0-9]+/.test(segment)) {
var count = +segment;
items = pointerSchema.items = [];
while (count--) items.push({});
}
pointerSchema = isLast ? schema : {};
properties[segment] = pointerSchema;
if (items) items.push(pointerSchema);
}
return rootSchema;
}
function unescapeJsonPointer(str) {
return str.replace(/~1/g, '/').replace(/~0/g, '~');
}

View File

@ -1,57 +0,0 @@
'use strict';
module.exports = function defFunc(ajv) {
defFunc.definition = {
type: 'object',
inline: function (it, keyword, schema) {
var expr = '';
for (var i=0; i<schema.length; i++) {
if (i) expr += ' && ';
expr += '(' + getData(schema[i], it.dataLevel) + ' !== undefined)';
}
return expr;
},
metaSchema: {
type: 'array',
items: {
type: 'string',
format: 'json-pointer'
}
}
};
ajv.addKeyword('deepRequired', defFunc.definition);
return ajv;
};
function getData(jsonPointer, lvl) {
var data = 'data' + (lvl || '');
if (!jsonPointer) return data;
var expr = data;
var segments = jsonPointer.split('/');
for (var i=1; i<segments.length; i++) {
var segment = segments[i];
data += getProperty(unescapeJsonPointer(segment));
expr += ' && ' + data;
}
return expr;
}
var IDENTIFIER = /^[a-z$_][a-z$_0-9]*$/i;
var INTEGER = /^[0-9]+$/;
var SINGLE_QUOTE = /'|\\/g;
function getProperty(key) {
return INTEGER.test(key)
? '[' + key + ']'
: IDENTIFIER.test(key)
? '.' + key
: "['" + key.replace(SINGLE_QUOTE, '\\$&') + "']";
}
function unescapeJsonPointer(str) {
return str.replace(/~1/g, '/').replace(/~0/g, '~');
}

View File

@ -1,116 +0,0 @@
{{# def.definitions }}
{{# def.errors }}
{{# def.setupKeyword }}
var {{=$valid}} = undefined;
{{## def.skipFormatLimit:
{{=$valid}} = true;
{{ return out; }}
#}}
{{## def.compareFormat:
{{? $isData }}
if ({{=$schemaValue}} === undefined) {{=$valid}} = true;
else if (typeof {{=$schemaValue}} != 'string') {{=$valid}} = false;
else {
{{ $closingBraces += '}'; }}
{{?}}
{{? $isDataFormat }}
if (!{{=$compare}}) {{=$valid}} = true;
else {
{{ $closingBraces += '}'; }}
{{?}}
var {{=$result}} = {{=$compare}}({{=$data}}, {{# def.schemaValueQS }});
if ({{=$result}} === undefined) {{=$valid}} = false;
#}}
{{? it.opts.format === false }}{{# def.skipFormatLimit }}{{?}}
{{
var $schemaFormat = it.schema.format
, $isDataFormat = it.opts.v5 && $schemaFormat.$data
, $closingBraces = '';
}}
{{? $isDataFormat }}
{{
var $schemaValueFormat = it.util.getData($schemaFormat.$data, $dataLvl, it.dataPathArr)
, $format = 'format' + $lvl
, $compare = 'compare' + $lvl;
}}
var {{=$format}} = formats[{{=$schemaValueFormat}}]
, {{=$compare}} = {{=$format}} && {{=$format}}.compare;
{{??}}
{{ var $format = it.formats[$schemaFormat]; }}
{{? !($format && $format.compare) }}
{{# def.skipFormatLimit }}
{{?}}
{{ var $compare = 'formats' + it.util.getProperty($schemaFormat) + '.compare'; }}
{{?}}
{{
var $isMax = $keyword == 'formatMaximum'
, $exclusiveKeyword = 'formatExclusive' + ($isMax ? 'Maximum' : 'Minimum')
, $schemaExcl = it.schema[$exclusiveKeyword]
, $isDataExcl = it.opts.v5 && $schemaExcl && $schemaExcl.$data
, $op = $isMax ? '<' : '>'
, $result = 'result' + $lvl;
}}
{{# def.$data }}
{{? $isDataExcl }}
{{
var $schemaValueExcl = it.util.getData($schemaExcl.$data, $dataLvl, it.dataPathArr)
, $exclusive = 'exclusive' + $lvl
, $opExpr = 'op' + $lvl
, $opStr = '\' + ' + $opExpr + ' + \'';
}}
var schemaExcl{{=$lvl}} = {{=$schemaValueExcl}};
{{ $schemaValueExcl = 'schemaExcl' + $lvl; }}
if (typeof {{=$schemaValueExcl}} != 'boolean' && {{=$schemaValueExcl}} !== undefined) {
{{=$valid}} = false;
{{ var $errorKeyword = $exclusiveKeyword; }}
{{# def.error:'_formatExclusiveLimit' }}
}
{{# def.elseIfValid }}
{{# def.compareFormat }}
var {{=$exclusive}} = {{=$schemaValueExcl}} === true;
if ({{=$valid}} === undefined) {
{{=$valid}} = {{=$exclusive}}
? {{=$result}} {{=$op}} 0
: {{=$result}} {{=$op}}= 0;
}
if (!{{=$valid}}) var op{{=$lvl}} = {{=$exclusive}} ? '{{=$op}}' : '{{=$op}}=';
{{??}}
{{
var $exclusive = $schemaExcl === true
, $opStr = $op; /*used in error*/
if (!$exclusive) $opStr += '=';
var $opExpr = '\'' + $opStr + '\''; /*used in error*/
}}
{{# def.compareFormat }}
if ({{=$valid}} === undefined)
{{=$valid}} = {{=$result}} {{=$op}}{{?!$exclusive}}={{?}} 0;
{{?}}
{{= $closingBraces }}
if (!{{=$valid}}) {
{{ var $errorKeyword = $keyword; }}
{{# def.error:'_formatLimit' }}
}

View File

@ -1,28 +0,0 @@
{{# def.definitions }}
{{# def.errors }}
{{# def.setupKeyword }}
{{
var $key = 'key' + $lvl
, $matched = 'patternMatched' + $lvl
, $closingBraces = ''
, $ownProperties = it.opts.ownProperties;
}}
var {{=$valid}} = true;
{{~ $schema:$pProperty }}
var {{=$matched}} = false;
for (var {{=$key}} in {{=$data}}) {
{{# def.checkOwnProperty }}
{{=$matched}} = {{= it.usePattern($pProperty) }}.test({{=$key}});
if ({{=$matched}}) break;
}
{{ var $missingPattern = it.util.escapeQuotes($pProperty); }}
if (!{{=$matched}}) {
{{=$valid}} = false;
{{# def.addError:'patternRequired' }}
} {{# def.elseIfValid }}
{{~}}
{{= $closingBraces }}

View File

@ -1,73 +0,0 @@
{{# def.definitions }}
{{# def.errors }}
{{# def.setupKeyword }}
{{# def.setupNextLevel }}
{{## def.validateIf:
{{# def.setCompositeRule }}
{{ $it.createErrors = false; }}
{{# def._validateSwitchRule:if }}
{{ $it.createErrors = true; }}
{{# def.resetCompositeRule }}
{{=$ifPassed}} = valid{{=$it.level}};
#}}
{{## def.validateThen:
{{? typeof $sch.then == 'boolean' }}
{{? $sch.then === false }}
{{# def.error:'switch' }}
{{?}}
var valid{{=$it.level}} = {{= $sch.then }};
{{??}}
{{# def._validateSwitchRule:then }}
{{?}}
#}}
{{## def._validateSwitchRule:_clause:
{{
$it.schema = $sch._clause;
$it.schemaPath = $schemaPath + '[' + $caseIndex + ']._clause';
$it.errSchemaPath = $errSchemaPath + '/' + $caseIndex + '/_clause';
}}
{{# def.insertSubschemaCode }}
#}}
{{## def.switchCase:
{{? $sch.if && {{# def.nonEmptySchema:$sch.if }} }}
var {{=$errs}} = errors;
{{# def.validateIf }}
if ({{=$ifPassed}}) {
{{# def.validateThen }}
} else {
{{# def.resetErrors }}
}
{{??}}
{{=$ifPassed}} = true;
{{# def.validateThen }}
{{?}}
#}}
{{
var $ifPassed = 'ifPassed' + it.level
, $currentBaseId = $it.baseId
, $shouldContinue;
}}
var {{=$ifPassed}};
{{~ $schema:$sch:$caseIndex }}
{{? $caseIndex && !$shouldContinue }}
if (!{{=$ifPassed}}) {
{{ $closingBraces+= '}'; }}
{{?}}
{{# def.switchCase }}
{{ $shouldContinue = $sch.continue }}
{{~}}
{{= $closingBraces }}
var {{=$valid}} = valid{{=$it.level}};
{{# def.cleanUp }}

View File

@ -1,3 +0,0 @@
These files are compiled dot templates from dot folder.
Do NOT edit them directly, edit the templates and run `npm run build` from main ajv-keywords folder.

View File

@ -1,176 +0,0 @@
'use strict';
module.exports = function generate__formatLimit(it, $keyword) {
var out = ' ';
var $lvl = it.level;
var $dataLvl = it.dataLevel;
var $schema = it.schema[$keyword];
var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
var $breakOnError = !it.opts.allErrors;
var $errorKeyword;
var $data = 'data' + ($dataLvl || '');
var $valid = 'valid' + $lvl;
out += 'var ' + ($valid) + ' = undefined;';
if (it.opts.format === false) {
out += ' ' + ($valid) + ' = true; ';
return out;
}
var $schemaFormat = it.schema.format,
$isDataFormat = it.opts.v5 && $schemaFormat.$data,
$closingBraces = '';
if ($isDataFormat) {
var $schemaValueFormat = it.util.getData($schemaFormat.$data, $dataLvl, it.dataPathArr),
$format = 'format' + $lvl,
$compare = 'compare' + $lvl;
out += ' var ' + ($format) + ' = formats[' + ($schemaValueFormat) + '] , ' + ($compare) + ' = ' + ($format) + ' && ' + ($format) + '.compare;';
} else {
var $format = it.formats[$schemaFormat];
if (!($format && $format.compare)) {
out += ' ' + ($valid) + ' = true; ';
return out;
}
var $compare = 'formats' + it.util.getProperty($schemaFormat) + '.compare';
}
var $isMax = $keyword == 'formatMaximum',
$exclusiveKeyword = 'formatExclusive' + ($isMax ? 'Maximum' : 'Minimum'),
$schemaExcl = it.schema[$exclusiveKeyword],
$isDataExcl = it.opts.v5 && $schemaExcl && $schemaExcl.$data,
$op = $isMax ? '<' : '>',
$result = 'result' + $lvl;
var $isData = it.opts.$data && $schema && $schema.$data,
$schemaValue;
if ($isData) {
out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; ';
$schemaValue = 'schema' + $lvl;
} else {
$schemaValue = $schema;
}
if ($isDataExcl) {
var $schemaValueExcl = it.util.getData($schemaExcl.$data, $dataLvl, it.dataPathArr),
$exclusive = 'exclusive' + $lvl,
$opExpr = 'op' + $lvl,
$opStr = '\' + ' + $opExpr + ' + \'';
out += ' var schemaExcl' + ($lvl) + ' = ' + ($schemaValueExcl) + '; ';
$schemaValueExcl = 'schemaExcl' + $lvl;
out += ' if (typeof ' + ($schemaValueExcl) + ' != \'boolean\' && ' + ($schemaValueExcl) + ' !== undefined) { ' + ($valid) + ' = false; ';
var $errorKeyword = $exclusiveKeyword;
var $$outStack = $$outStack || [];
$$outStack.push(out);
out = ''; /* istanbul ignore else */
if (it.createErrors !== false) {
out += ' { keyword: \'' + ($errorKeyword || '_formatExclusiveLimit') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: {} ';
if (it.opts.messages !== false) {
out += ' , message: \'' + ($exclusiveKeyword) + ' should be boolean\' ';
}
if (it.opts.verbose) {
out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
}
out += ' } ';
} else {
out += ' {} ';
}
var __err = out;
out = $$outStack.pop();
if (!it.compositeRule && $breakOnError) { /* istanbul ignore if */
if (it.async) {
out += ' throw new ValidationError([' + (__err) + ']); ';
} else {
out += ' validate.errors = [' + (__err) + ']; return false; ';
}
} else {
out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
}
out += ' } ';
if ($breakOnError) {
$closingBraces += '}';
out += ' else { ';
}
if ($isData) {
out += ' if (' + ($schemaValue) + ' === undefined) ' + ($valid) + ' = true; else if (typeof ' + ($schemaValue) + ' != \'string\') ' + ($valid) + ' = false; else { ';
$closingBraces += '}';
}
if ($isDataFormat) {
out += ' if (!' + ($compare) + ') ' + ($valid) + ' = true; else { ';
$closingBraces += '}';
}
out += ' var ' + ($result) + ' = ' + ($compare) + '(' + ($data) + ', ';
if ($isData) {
out += '' + ($schemaValue);
} else {
out += '' + (it.util.toQuotedString($schema));
}
out += ' ); if (' + ($result) + ' === undefined) ' + ($valid) + ' = false; var ' + ($exclusive) + ' = ' + ($schemaValueExcl) + ' === true; if (' + ($valid) + ' === undefined) { ' + ($valid) + ' = ' + ($exclusive) + ' ? ' + ($result) + ' ' + ($op) + ' 0 : ' + ($result) + ' ' + ($op) + '= 0; } if (!' + ($valid) + ') var op' + ($lvl) + ' = ' + ($exclusive) + ' ? \'' + ($op) + '\' : \'' + ($op) + '=\';';
} else {
var $exclusive = $schemaExcl === true,
$opStr = $op;
if (!$exclusive) $opStr += '=';
var $opExpr = '\'' + $opStr + '\'';
if ($isData) {
out += ' if (' + ($schemaValue) + ' === undefined) ' + ($valid) + ' = true; else if (typeof ' + ($schemaValue) + ' != \'string\') ' + ($valid) + ' = false; else { ';
$closingBraces += '}';
}
if ($isDataFormat) {
out += ' if (!' + ($compare) + ') ' + ($valid) + ' = true; else { ';
$closingBraces += '}';
}
out += ' var ' + ($result) + ' = ' + ($compare) + '(' + ($data) + ', ';
if ($isData) {
out += '' + ($schemaValue);
} else {
out += '' + (it.util.toQuotedString($schema));
}
out += ' ); if (' + ($result) + ' === undefined) ' + ($valid) + ' = false; if (' + ($valid) + ' === undefined) ' + ($valid) + ' = ' + ($result) + ' ' + ($op);
if (!$exclusive) {
out += '=';
}
out += ' 0;';
}
out += '' + ($closingBraces) + 'if (!' + ($valid) + ') { ';
var $errorKeyword = $keyword;
var $$outStack = $$outStack || [];
$$outStack.push(out);
out = ''; /* istanbul ignore else */
if (it.createErrors !== false) {
out += ' { keyword: \'' + ($errorKeyword || '_formatLimit') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { comparison: ' + ($opExpr) + ', limit: ';
if ($isData) {
out += '' + ($schemaValue);
} else {
out += '' + (it.util.toQuotedString($schema));
}
out += ' , exclusive: ' + ($exclusive) + ' } ';
if (it.opts.messages !== false) {
out += ' , message: \'should be ' + ($opStr) + ' "';
if ($isData) {
out += '\' + ' + ($schemaValue) + ' + \'';
} else {
out += '' + (it.util.escapeQuotes($schema));
}
out += '"\' ';
}
if (it.opts.verbose) {
out += ' , schema: ';
if ($isData) {
out += 'validate.schema' + ($schemaPath);
} else {
out += '' + (it.util.toQuotedString($schema));
}
out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
}
out += ' } ';
} else {
out += ' {} ';
}
var __err = out;
out = $$outStack.pop();
if (!it.compositeRule && $breakOnError) { /* istanbul ignore if */
if (it.async) {
out += ' throw new ValidationError([' + (__err) + ']); ';
} else {
out += ' validate.errors = [' + (__err) + ']; return false; ';
}
} else {
out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
}
out += '}';
return out;
}

View File

@ -1,52 +0,0 @@
'use strict';
module.exports = function generate_patternRequired(it, $keyword) {
var out = ' ';
var $lvl = it.level;
var $dataLvl = it.dataLevel;
var $schema = it.schema[$keyword];
var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
var $breakOnError = !it.opts.allErrors;
var $errorKeyword;
var $data = 'data' + ($dataLvl || '');
var $valid = 'valid' + $lvl;
var $key = 'key' + $lvl,
$matched = 'patternMatched' + $lvl,
$closingBraces = '',
$ownProperties = it.opts.ownProperties;
out += 'var ' + ($valid) + ' = true;';
var arr1 = $schema;
if (arr1) {
var $pProperty, i1 = -1,
l1 = arr1.length - 1;
while (i1 < l1) {
$pProperty = arr1[i1 += 1];
out += ' var ' + ($matched) + ' = false; for (var ' + ($key) + ' in ' + ($data) + ') { ';
if ($ownProperties) {
out += ' if (!Object.prototype.hasOwnProperty.call(' + ($data) + ', ' + ($key) + ')) continue; ';
}
out += ' ' + ($matched) + ' = ' + (it.usePattern($pProperty)) + '.test(' + ($key) + '); if (' + ($matched) + ') break; } ';
var $missingPattern = it.util.escapeQuotes($pProperty);
out += ' if (!' + ($matched) + ') { ' + ($valid) + ' = false; var err = '; /* istanbul ignore else */
if (it.createErrors !== false) {
out += ' { keyword: \'' + ($errorKeyword || 'patternRequired') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { missingPattern: \'' + ($missingPattern) + '\' } ';
if (it.opts.messages !== false) {
out += ' , message: \'should have property matching pattern \\\'' + ($missingPattern) + '\\\'\' ';
}
if (it.opts.verbose) {
out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
}
out += ' } ';
} else {
out += ' {} ';
}
out += '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; } ';
if ($breakOnError) {
$closingBraces += '}';
out += ' else { ';
}
}
}
out += '' + ($closingBraces);
return out;
}

View File

@ -1,129 +0,0 @@
'use strict';
module.exports = function generate_switch(it, $keyword) {
var out = ' ';
var $lvl = it.level;
var $dataLvl = it.dataLevel;
var $schema = it.schema[$keyword];
var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
var $breakOnError = !it.opts.allErrors;
var $errorKeyword;
var $data = 'data' + ($dataLvl || '');
var $valid = 'valid' + $lvl;
var $errs = 'errs__' + $lvl;
var $it = it.util.copy(it);
var $closingBraces = '';
$it.level++;
var $nextValid = 'valid' + $it.level;
var $ifPassed = 'ifPassed' + it.level,
$currentBaseId = $it.baseId,
$shouldContinue;
out += 'var ' + ($ifPassed) + ';';
var arr1 = $schema;
if (arr1) {
var $sch, $caseIndex = -1,
l1 = arr1.length - 1;
while ($caseIndex < l1) {
$sch = arr1[$caseIndex += 1];
if ($caseIndex && !$shouldContinue) {
out += ' if (!' + ($ifPassed) + ') { ';
$closingBraces += '}';
}
if ($sch.if && it.util.schemaHasRules($sch.if, it.RULES.all)) {
out += ' var ' + ($errs) + ' = errors; ';
var $wasComposite = it.compositeRule;
it.compositeRule = $it.compositeRule = true;
$it.createErrors = false;
$it.schema = $sch.if;
$it.schemaPath = $schemaPath + '[' + $caseIndex + '].if';
$it.errSchemaPath = $errSchemaPath + '/' + $caseIndex + '/if';
out += ' ' + (it.validate($it)) + ' ';
$it.baseId = $currentBaseId;
$it.createErrors = true;
it.compositeRule = $it.compositeRule = $wasComposite;
out += ' ' + ($ifPassed) + ' = valid' + ($it.level) + '; if (' + ($ifPassed) + ') { ';
if (typeof $sch.then == 'boolean') {
if ($sch.then === false) {
var $$outStack = $$outStack || [];
$$outStack.push(out);
out = ''; /* istanbul ignore else */
if (it.createErrors !== false) {
out += ' { keyword: \'' + ($errorKeyword || 'switch') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { caseIndex: ' + ($caseIndex) + ' } ';
if (it.opts.messages !== false) {
out += ' , message: \'should pass "switch" keyword validation\' ';
}
if (it.opts.verbose) {
out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
}
out += ' } ';
} else {
out += ' {} ';
}
var __err = out;
out = $$outStack.pop();
if (!it.compositeRule && $breakOnError) { /* istanbul ignore if */
if (it.async) {
out += ' throw new ValidationError([' + (__err) + ']); ';
} else {
out += ' validate.errors = [' + (__err) + ']; return false; ';
}
} else {
out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
}
}
out += ' var valid' + ($it.level) + ' = ' + ($sch.then) + '; ';
} else {
$it.schema = $sch.then;
$it.schemaPath = $schemaPath + '[' + $caseIndex + '].then';
$it.errSchemaPath = $errSchemaPath + '/' + $caseIndex + '/then';
out += ' ' + (it.validate($it)) + ' ';
$it.baseId = $currentBaseId;
}
out += ' } else { errors = ' + ($errs) + '; if (vErrors !== null) { if (' + ($errs) + ') vErrors.length = ' + ($errs) + '; else vErrors = null; } } ';
} else {
out += ' ' + ($ifPassed) + ' = true; ';
if (typeof $sch.then == 'boolean') {
if ($sch.then === false) {
var $$outStack = $$outStack || [];
$$outStack.push(out);
out = ''; /* istanbul ignore else */
if (it.createErrors !== false) {
out += ' { keyword: \'' + ($errorKeyword || 'switch') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { caseIndex: ' + ($caseIndex) + ' } ';
if (it.opts.messages !== false) {
out += ' , message: \'should pass "switch" keyword validation\' ';
}
if (it.opts.verbose) {
out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
}
out += ' } ';
} else {
out += ' {} ';
}
var __err = out;
out = $$outStack.pop();
if (!it.compositeRule && $breakOnError) { /* istanbul ignore if */
if (it.async) {
out += ' throw new ValidationError([' + (__err) + ']); ';
} else {
out += ' validate.errors = [' + (__err) + ']; return false; ';
}
} else {
out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
}
}
out += ' var valid' + ($it.level) + ' = ' + ($sch.then) + '; ';
} else {
$it.schema = $sch.then;
$it.schemaPath = $schemaPath + '[' + $caseIndex + '].then';
$it.errSchemaPath = $errSchemaPath + '/' + $caseIndex + '/then';
out += ' ' + (it.validate($it)) + ' ';
$it.baseId = $currentBaseId;
}
}
$shouldContinue = $sch.continue
}
}
out += '' + ($closingBraces) + 'var ' + ($valid) + ' = valid' + ($it.level) + '; ';
out = it.util.cleanUpCode(out);
return out;
}

View File

@ -1,68 +0,0 @@
'use strict';
var sequences = {};
var DEFAULTS = {
timestamp: function() { return Date.now(); },
datetime: function() { return (new Date).toISOString(); },
date: function() { return (new Date).toISOString().slice(0, 10); },
time: function() { return (new Date).toISOString().slice(11); },
random: function() { return Math.random(); },
randomint: function (args) {
var limit = args && args.max || 2;
return function() { return Math.floor(Math.random() * limit); };
},
seq: function (args) {
var name = args && args.name || '';
sequences[name] = sequences[name] || 0;
return function() { return sequences[name]++; };
}
};
module.exports = function defFunc(ajv) {
defFunc.definition = {
compile: function (schema, parentSchema, it) {
var funcs = {};
for (var key in schema) {
var d = schema[key];
var func = getDefault(typeof d == 'string' ? d : d.func);
funcs[key] = func.length ? func(d.args) : func;
}
return it.opts.useDefaults && !it.compositeRule
? assignDefaults
: noop;
function assignDefaults(data) {
for (var prop in schema)
if (data[prop] === undefined) data[prop] = funcs[prop]();
return true;
}
function noop() { return true; }
},
DEFAULTS: DEFAULTS,
metaSchema: {
type: 'object',
additionalProperties: {
type: ['string', 'object'],
additionalProperties: false,
required: ['func', 'args'],
properties: {
func: { type: 'string' },
args: { type: 'object' }
}
}
}
};
ajv.addKeyword('dynamicDefaults', defFunc.definition);
return ajv;
function getDefault(d) {
var def = DEFAULTS[d];
if (def) return def;
throw new Error('invalid "dynamicDefaults" keyword property value: ' + d);
}
};

View File

@ -1,3 +0,0 @@
'use strict';
module.exports = require('./_formatLimit')('Maximum');

View File

@ -1,3 +0,0 @@
'use strict';
module.exports = require('./_formatLimit')('Minimum');

View File

@ -1,21 +0,0 @@
'use strict';
module.exports = function defFunc(ajv) {
if (!ajv._opts.v5) console.warn('keywords if/then/else require v5 option');
defFunc.definition = {
macro: function (schema, parentSchema) {
if (parentSchema.then === undefined)
throw new Error('keyword "then" is absent');
var cases = [ { 'if': schema, 'then': parentSchema.then } ];
if (parentSchema.else !== undefined)
cases[1] = { 'then': parentSchema.else };
return { switch: cases };
}
};
ajv.addKeyword('if', defFunc.definition);
ajv.addKeyword('then');
ajv.addKeyword('else');
return ajv;
};

View File

@ -1,18 +0,0 @@
'use strict';
module.exports = {
'instanceof': require('./instanceof'),
propertyNames: require('./propertyNames'),
range: require('./range'),
regexp: require('./regexp'),
'typeof': require('./typeof'),
dynamicDefaults: require('./dynamicDefaults'),
'if': require('./if'),
prohibited: require('./prohibited'),
deepProperties: require('./deepProperties'),
deepRequired: require('./deepRequired')
// formatMinimum: require('./formatMinimum'),
// formatMaximum: require('./formatMaximum'),
// patternRequired: require('./patternRequired'),
// 'switch': require('./switch')
};

View File

@ -1,54 +0,0 @@
'use strict';
var CONSTRUCTORS = {
Object: Object,
Array: Array,
Function: Function,
Number: Number,
String: String,
Date: Date,
RegExp: RegExp
};
module.exports = function defFunc(ajv) {
/* istanbul ignore else */
if (typeof Buffer != 'undefined')
CONSTRUCTORS.Buffer = Buffer;
defFunc.definition = {
compile: function (schema) {
if (typeof schema == 'string') {
var Constructor = getConstructor(schema);
return function (data) {
return data instanceof Constructor;
};
}
var constructors = schema.map(getConstructor);
return function (data) {
for (var i=0; i<constructors.length; i++)
if (data instanceof constructors[i]) return true;
return false;
};
},
CONSTRUCTORS: CONSTRUCTORS,
metaSchema: {
anyOf: [
{ type: 'string' },
{
type: 'array',
items: { type: 'string' }
}
]
}
};
ajv.addKeyword('instanceof', defFunc.definition);
return ajv;
function getConstructor(c) {
var Constructor = CONSTRUCTORS[c];
if (Constructor) return Constructor;
throw new Error('invalid "instanceof" keyword value ' + c);
}
};

View File

@ -1,24 +0,0 @@
'use strict';
module.exports = function defFunc(ajv) {
if (ajv.RULES.keywords.patternRequired)
return console.warn('Keyword patternRequired is already defined');
defFunc.definition = {
type: 'object',
inline: require('./dotjs/patternRequired'),
statements: true,
errors: 'full',
metaSchema: {
type: 'array',
items: {
type: 'string',
format: 'regex'
},
uniqueItems: true
}
};
ajv.addKeyword('patternRequired', defFunc.definition);
return ajv;
};

View File

@ -1,25 +0,0 @@
'use strict';
module.exports = function defFunc(ajv) {
defFunc.definition = {
type: 'object',
macro: function (schema) {
if (schema.length == 0) return {};
if (schema.length == 1) return { not: { required: schema } };
var schemas = schema.map(function (prop) {
return { required: [prop] };
});
return { not: { anyOf: schemas } };
},
metaSchema: {
type: 'array',
items: {
type: 'string'
}
}
};
ajv.addKeyword('prohibited', defFunc.definition);
return ajv;
};

View File

@ -1,51 +0,0 @@
'use strict';
module.exports = function defFunc(ajv) {
defFunc.definition = {
type: 'object',
compile: function(schema) {
var validate = ajv.compile(schema);
return ajv._opts.allErrors ? vAllErrors : vBreakOnError;
function vBreakOnError(data) {
for (var prop in data) {
if (!validate(prop)) {
vBreakOnError.errors = validate.errors;
addPropertyNameError(vBreakOnError.errors, prop);
return false;
}
}
return true;
}
function vAllErrors(data) {
var errors = [];
for (var prop in data) {
if (!validate(prop)) {
errors = errors.concat(validate.errors);
addPropertyNameError(errors, prop);
}
}
if (errors.length) vAllErrors.errors = errors;
return errors.length == 0;
}
function addPropertyNameError(errors, propName) {
errors.push({
keyword: 'propertyNames',
params: { propertyName: propName },
message: 'should have valid property name of "' + propName + '"'
});
}
},
metaSchema: {
$ref: ajv._opts.v5
? 'https://raw.githubusercontent.com/epoberezkin/ajv/master/lib/refs/json-schema-v5.json#'
: 'http://json-schema.org/draft-04/schema#'
},
errors: true
};
ajv.addKeyword('propertyNames', defFunc.definition);
return ajv;
};

View File

@ -1,39 +0,0 @@
'use strict';
module.exports = function defFunc(ajv) {
defFunc.definition = {
type: 'number',
macro: function (schema, parentSchema) {
var min = schema[0]
, max = schema[1]
, exclusive = parentSchema.exclusiveRange;
validateRangeSchema(min, max, exclusive);
return {
minimum: min,
exclusiveMinimum: exclusive,
maximum: max,
exclusiveMaximum: exclusive
};
},
metaSchema: {
type: 'array',
minItems: 2,
maxItems: 2,
items: { type: 'number' }
}
};
ajv.addKeyword('range', defFunc.definition);
ajv.addKeyword('exclusiveRange');
return ajv;
function validateRangeSchema(min, max, exclusive) {
if (exclusive !== undefined && typeof exclusive != 'boolean')
throw new Error('Invalid schema for exclusiveRange keyword, should be boolean');
if (min > max || (exclusive && min == max))
throw new Error('There are no numbers in range');
}
};

View File

@ -1,36 +0,0 @@
'use strict';
module.exports = function defFunc(ajv) {
defFunc.definition = {
type: 'string',
inline: function (it, keyword, schema) {
return getRegExp() + '.test(data' + (it.dataLevel || '') + ')';
function getRegExp() {
try {
if (typeof schema == 'object')
return new RegExp(schema.pattern, schema.flags);
var rx = schema.match(/^\/(.*)\/([gimy]*)$/);
if (rx) return new RegExp(rx[1], rx[2]);
throw new Error('cannot parse string into RegExp');
} catch(e) {
console.error('regular expression', schema, 'is invalid');
throw e;
}
}
},
metaSchema: {
type: ['string', 'object'],
properties: {
pattern: { type: 'string' },
flags: { type: 'string' }
},
required: ['pattern'],
additionalProperties: false
}
};
ajv.addKeyword('regexp', defFunc.definition);
return ajv;
};

View File

@ -1,39 +0,0 @@
'use strict';
module.exports = function defFunc(ajv) {
if (ajv.RULES.keywords.switch)
return console.warn('Keyword switch is already defined');
var metaSchemaUri = ajv._opts.v5
? 'https://raw.githubusercontent.com/epoberezkin/ajv/master/lib/refs/json-schema-v5.json#'
: 'http://json-schema.org/draft-04/schema#';
defFunc.definition = {
inline: require('./dotjs/switch'),
statements: true,
errors: 'full',
metaSchema: {
type: 'array',
items: {
required: [ 'then' ],
properties: {
'if': { $ref: metaSchemaUri },
'then': {
anyOf: [
{ type: 'boolean' },
{ $ref: metaSchemaUri }
]
},
'continue': { type: 'boolean' }
},
additionalProperties: false,
dependencies: {
'continue': [ 'if' ]
}
}
}
};
ajv.addKeyword('switch', defFunc.definition);
return ajv;
};

View File

@ -1,32 +0,0 @@
'use strict';
var KNOWN_TYPES = ['undefined', 'string', 'number', 'object', 'function', 'boolean', 'symbol'];
module.exports = function defFunc(ajv) {
defFunc.definition = {
inline: function (it, keyword, schema) {
var data = 'data' + (it.dataLevel || '');
if (typeof schema == 'string') return 'typeof ' + data + ' == "' + schema + '"';
schema = 'validate.schema' + it.schemaPath + '.' + keyword;
return schema + '.indexOf(typeof ' + data + ') >= 0';
},
metaSchema: {
anyOf: [
{
type: 'string',
enum: KNOWN_TYPES
},
{
type: 'array',
items: {
type: 'string',
enum: KNOWN_TYPES
}
}
]
}
};
ajv.addKeyword('typeof', defFunc.definition);
return ajv;
};

View File

@ -1,106 +0,0 @@
{
"_args": [
[
"ajv-keywords@^1.0.0",
"/Users/pmarsceill/_projects/just-the-docs/node_modules/table"
]
],
"_from": "ajv-keywords@>=1.0.0 <2.0.0",
"_id": "ajv-keywords@1.5.1",
"_inCache": true,
"_installable": true,
"_location": "/ajv-keywords",
"_nodeVersion": "4.6.1",
"_npmOperationalInternal": {
"host": "packages-18-east.internal.npmjs.com",
"tmp": "tmp/ajv-keywords-1.5.1.tgz_1485107517951_0.29220994655042887"
},
"_npmUser": {
"email": "e.poberezkin@me.com",
"name": "esp"
},
"_npmVersion": "2.15.9",
"_phantomChildren": {},
"_requested": {
"name": "ajv-keywords",
"raw": "ajv-keywords@^1.0.0",
"rawSpec": "^1.0.0",
"scope": null,
"spec": ">=1.0.0 <2.0.0",
"type": "range"
},
"_requiredBy": [
"/table"
],
"_resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-1.5.1.tgz",
"_shasum": "314dd0a4b3368fad3dfcdc54ede6171b886daf3c",
"_shrinkwrap": null,
"_spec": "ajv-keywords@^1.0.0",
"_where": "/Users/pmarsceill/_projects/just-the-docs/node_modules/table",
"author": {
"name": "Evgeny Poberezkin"
},
"bugs": {
"url": "https://github.com/epoberezkin/ajv-keywords/issues"
},
"dependencies": {},
"description": "Custom JSON-Schema keywords for ajv validator",
"devDependencies": {
"ajv": "^4.10.0",
"ajv-pack": "^0.2.0",
"chai": "^3.5.0",
"coveralls": "^2.11.9",
"dot": "^1.1.1",
"eslint": "^3.6.0",
"glob": "^7.1.1",
"istanbul": "^0.4.3",
"js-beautify": "^1.6.4",
"json-schema-test": "^1.2.1",
"mocha": "^3.0.2",
"pre-commit": "^1.1.3",
"uuid": "^3.0.1"
},
"directories": {},
"dist": {
"shasum": "314dd0a4b3368fad3dfcdc54ede6171b886daf3c",
"tarball": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-1.5.1.tgz"
},
"files": [
"index.js",
"keywords"
],
"gitHead": "33c43a2b190c9929fe9e3e9a32a38dace146abf4",
"homepage": "https://github.com/epoberezkin/ajv-keywords#readme",
"keywords": [
"JSON-Schema",
"ajv",
"keywords"
],
"license": "MIT",
"main": "index.js",
"maintainers": [
{
"name": "esp",
"email": "e.poberezkin@me.com"
}
],
"name": "ajv-keywords",
"optionalDependencies": {},
"peerDependencies": {
"ajv": ">=4.10.0"
},
"readme": "ERROR: No README data found!",
"repository": {
"type": "git",
"url": "git+https://github.com/epoberezkin/ajv-keywords.git"
},
"scripts": {
"build": "node node_modules/ajv/scripts/compile-dots.js node_modules/ajv/lib keywords",
"eslint": "eslint index.js keywords/*.js",
"prepublish": "npm run build",
"test": "npm run build && npm run eslint && npm run test-cov",
"test-cov": "istanbul cover -x 'spec/**' node_modules/mocha/bin/_mocha -- spec/*.spec.js -R spec",
"test-spec": "mocha spec/*.spec.js -R spec"
},
"version": "1.5.1"
}

20
node_modules/ajv/.tonic_example.js generated vendored
View File

@ -1,20 +0,0 @@
var Ajv = require('ajv');
var ajv = Ajv({allErrors: true});
var schema = {
"properties": {
"foo": { "type": "string" },
"bar": { "type": "number", "maximum": 3 }
}
};
var validate = ajv.compile(schema);
test({"foo": "abc", "bar": 2});
test({"foo": 2, "bar": 4});
function test(data) {
var valid = validate(data);
if (valid) console.log('Valid!');
else console.log('Invalid: ' + ajv.errorsText(validate.errors));
}

22
node_modules/ajv/LICENSE generated vendored
View File

@ -1,22 +0,0 @@
The MIT License (MIT)
Copyright (c) 2015 Evgeny Poberezkin
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

Some files were not shown because too many files have changed in this diff Show More