0.2.8 Adding Grunt/Grunt SASS task

This commit is contained in:
Holger Koenemann 2015-02-06 09:40:32 +01:00
parent b0846efaae
commit 6e61f36d14
765 changed files with 243579 additions and 1 deletions

2
.gitignore vendored
View File

@ -14,3 +14,5 @@
.idea/watcherTasks.xml
.idea/watcherTasks.xml
.sass-cache

7
css/theme.css.map Normal file

File diff suppressed because one or more lines are too long

36
gruntfile.js Normal file
View File

@ -0,0 +1,36 @@
module.exports = function(grunt) {
//2. Project configuration.
grunt.initConfig({
//define what to do with the SASS process
sass: { // Task
dist: { // Target
options: { // Target options
style: 'compressed'
},
files: [{
src: ['sass/theme.scss'],
dest: 'css/theme.css',
ext: '.css'
}]
}
},
watch: {
css: {
files: 'sass/*',
tasks: ['sass']
}
}
});
//1. load task libraries for use
grunt.loadNpmTasks('grunt-contrib-sass');
grunt.loadNpmTasks('grunt-contrib-watch');
//3. register a named task ( 'process' ) that runs Grunt processes ( 'imagemin', 'concat' )
grunt.registerTask( 'process', [ 'sass' ] );
};

13
node_modules/grunt-contrib-sass/.jshintrc generated vendored Normal file
View File

@ -0,0 +1,13 @@
{
"curly": true,
"eqeqeq": true,
"immed": true,
"latedef": true,
"newcap": true,
"noarg": true,
"sub": true,
"undef": true,
"boss": true,
"eqnull": true,
"node": true
}

1
node_modules/grunt-contrib-sass/AUTHORS generated vendored Normal file
View File

@ -0,0 +1 @@
Sindre Sorhus (http://github.com/sindresorhus)

88
node_modules/grunt-contrib-sass/CHANGELOG generated vendored Normal file
View File

@ -0,0 +1,88 @@
v0.8.1:
date: 2014-08-24
changes:
- Fix `check` option.
v0.8.0:
date: 2014-08-21
changes:
- Support Sass 3.4 Source Map option.
- Add `update` option.
v0.7.4:
date: 2014-08-09
changes:
- Fix bundleExec option.
- Fix `os.cpus()` issue.
- Log `sass` command when `--verbose` flag is set.
v0.7.3:
date: 2014-03-06
changes:
- Only create empty dest files when they don't already exist.
v0.7.2:
date: 2014-02-02
changes:
- Fix error reporting when Sass is not available.
v0.7.1:
date: 2014-01-28
changes:
- Fix regression of Bundler support.
v0.7.0:
date: 2014-01-26
changes:
- Improve Windows support.
v0.6.0:
date: 2013-12-10
changes:
- Ignore files where filename have leading underscore.
v0.5.0:
date: 2013-08-21
changes:
- Add banner option.
v0.4.1:
date: 2013-07-06
changes:
- Use file.orig.src if file.src does not exist and return early to avoid passing non-existent files to sass binary.
v0.4.0:
date: 2013-06-30
changes:
- Rewrite task to be able to support Source Maps.
- Compile Sass files in parallel for better performance.
v0.3.0:
date: 2013-03-26
changes:
- Add support for `bundle exec`. Make sure `.css` files are compiled with SCSS.
v0.2.2:
date: 2013-02-15
changes:
- First official release for Grunt 0.4.0.
v0.2.2rc7:
date: 2013-01-25
changes:
- Updating grunt/gruntplugin dependencies to rc7.
- Changing in-development grunt/gruntplugin dependency versions from tilde version ranges to specific versions.
v0.2.2rc5:
date: 2013-01-09
changes:
- Updating to work with grunt v0.4.0rc5.
- Switching to this.files api.
- Add separator option.
v0.2.0:
date: 2012-11-05
changes:
- Grunt 0.4 compatibility.
- Improve error message when Sass binary couldn't be found
v0.1.3:
date: 2012-10-12
changes:
- Rename grunt-contrib-lib dep to grunt-lib-contrib.
v0.1.2:
date: 2012-10-08
changes:
- Fix regression for darwin.
v0.1.1:
date: 2012-10-05
changes:
- Windows support.
v0.1.0:
date: 2012-09-24
changes:
- Initial release.

1
node_modules/grunt-contrib-sass/CONTRIBUTING.md generated vendored Normal file
View File

@ -0,0 +1 @@
Please see the [Contributing to grunt](http://gruntjs.com/contributing) guide for information on contributing to this project.

92
node_modules/grunt-contrib-sass/Gruntfile.js generated vendored Normal file
View File

@ -0,0 +1,92 @@
/*
* grunt-contrib-sass
* http://gruntjs.com/
*
* Copyright (c) 2012 Sindre Sorhus, contributors
* Licensed under the MIT license.
*/
'use strict';
module.exports = function (grunt) {
grunt.initConfig({
pkg: {
name: 'grunt-contrib-sass'
},
jshint: {
options: {
jshintrc: '.jshintrc'
},
all: [
'Gruntfile.js',
'tasks/*.js',
'<%= nodeunit.tests %>'
]
},
clean: {
test: [
'test/tmp',
'.sass-cache'
]
},
nodeunit: {
tests: ['test/*_test.js']
},
sass: {
options: {
sourcemap: 'none'
},
compile: {
files: {
'test/tmp/scss.css': ['test/fixtures/compile.scss'],
'test/tmp/sass.css': ['test/fixtures/compile.sass'],
'test/tmp/css.css': ['test/fixtures/compile.css']
}
},
compileBanner: {
options: {
banner: '/* <%= pkg.name %> banner */'
},
files: {
'test/tmp/scss-banner.css': ['test/fixtures/banner.scss'],
'test/tmp/sass-banner.css': ['test/fixtures/banner.sass'],
'test/tmp/css-banner.css': ['test/fixtures/banner.css']
}
},
ignorePartials: {
cwd: 'test/fixtures/partials',
src: '*.scss',
dest: 'test/tmp',
expand: true,
ext: '.css'
},
updateTrue: {
options: {
update: true
},
files: [{
expand: true,
cwd: 'test/fixtures',
src: ['updatetrue.scss', 'updatetrue.sass', 'updatetrue.css'],
dest: 'test/tmp',
ext: '.css'
}]
}
}
});
grunt.loadTasks('tasks');
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-nodeunit');
grunt.loadNpmTasks('grunt-contrib-internal');
grunt.registerTask('mkdir', grunt.file.mkdir);
grunt.registerTask('test', [
'clean',
'mkdir:tmp',
'sass',
'nodeunit',
'clean'
]);
grunt.registerTask('default', ['jshint', 'test', 'build-contrib']);
};

22
node_modules/grunt-contrib-sass/LICENSE-MIT generated vendored Normal file
View File

@ -0,0 +1,22 @@
Copyright (c) 2012 Sindre Sorhus, contributors
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.

286
node_modules/grunt-contrib-sass/README.md generated vendored Normal file
View File

@ -0,0 +1,286 @@
# grunt-contrib-sass v0.8.1 [![Build Status: Linux](https://travis-ci.org/gruntjs/grunt-contrib-sass.png?branch=master)](https://travis-ci.org/gruntjs/grunt-contrib-sass)
> Compile Sass to CSS
## Getting Started
This plugin requires Grunt `>=0.4.0`
If you haven't used [Grunt](http://gruntjs.com/) before, be sure to check out the [Getting Started](http://gruntjs.com/getting-started) guide, as it explains how to create a [Gruntfile](http://gruntjs.com/sample-gruntfile) as well as install and use Grunt plugins. Once you're familiar with that process, you may install this plugin with this command:
```shell
npm install grunt-contrib-sass --save-dev
```
Once the plugin has been installed, it may be enabled inside your Gruntfile with this line of JavaScript:
```js
grunt.loadNpmTasks('grunt-contrib-sass');
```
## Sass task
_Run this task with the `grunt sass` command._
[Sass](http://sass-lang.com) is a preprocessor that adds nested rules, variables, mixins and functions, selector inheritance, and more to CSS. Sass files compile into well-formatted, standard CSS to use in your site or application.
This task requires you to have [Ruby](http://www.ruby-lang.org/en/downloads/) and [Sass](http://sass-lang.com/download.html) installed. If you're on OS X or Linux you probably already have Ruby installed; test with `ruby -v` in your terminal. When you've confirmed you have Ruby installed, run `gem install sass` to install Sass.
Note: Files that begin with "_" are ignored even if they match the globbing pattern. This is done to match the expected [Sass partial behaviour](http://sass-lang.com/documentation/file.SASS_REFERENCE.html#partials).
### Options
#### sourcemap
Type: `String`
Default: `auto`
Values:
- `auto` - relative paths where possible, file URIs elsewhere
- `file` - always absolute file URIs
- `inline` - include the source text in the sourcemap
- `none`- no sourcemaps
**Requires Sass 3.4.0, which can be installed with `gem install sass`**
#### trace
Type: `Boolean`
Default: `false`
Show a full traceback on error.
#### unixNewlines
Type: `Boolean`
Default: `false` on Windows, otherwise `true`
Force Unix newlines in written files.
#### check
Type: `Boolean`
Default: `false`
Just check the Sass syntax, does not evaluate and write the output.
#### style
Type: `String`
Default: `nested`
Output style. Can be `nested`, `compact`, `compressed`, `expanded`.
#### precision
Type: `Number`
Default: `5`
How many digits of precision to use when outputting decimal numbers.
#### quiet
Type: `Boolean`
Default: `false`
Silence warnings and status messages during compilation.
#### compass
Type: `Boolean`
Default: `false`
Make Compass imports available and load project configuration (`config.rb` located close to the `Gruntfile.js`).
#### debugInfo
Type: `Boolean`
Default: `false`
Emit extra information in the generated CSS that can be used by the FireSass Firebug plugin.
#### lineNumbers
Type: `Boolean`
Default: `false`
Emit comments in the generated CSS indicating the corresponding source line.
#### loadPath
Type: `String|Array`
Add a (or multiple) Sass import path.
#### require
Type: `String|Array`
Require a (or multiple) Ruby library before running Sass.
#### cacheLocation
Type: `String`
Default: `.sass-cache`
The path to put cached Sass files.
#### noCache
Type: `Boolean`
Default: `false`
Don't cache to sassc files.
#### bundleExec
Type: `Boolean`
Default: `false`
Run `sass` with [bundle exec](http://gembundler.com/man/bundle-exec.1.html): `bundle exec sass`.
#### banner
Type: `String`
Prepend the specified string to the output file. Useful for licensing information.
*Can't be used if you use the `sourcemap` option.*
#### update
Type: `Boolean`
Default: `false`
Only compile changed files.
### Examples
#### Example config
```javascript
grunt.initConfig({
sass: { // Task
dist: { // Target
options: { // Target options
style: 'expanded'
},
files: { // Dictionary of files
'main.css': 'main.scss', // 'destination': 'source'
'widgets.css': 'widgets.scss'
}
}
}
});
grunt.loadNpmTasks('grunt-contrib-sass');
grunt.registerTask('default', ['sass']);
```
#### Compile
```javascript
grunt.initConfig({
sass: {
dist: {
files: {
'main.css': 'main.scss'
}
}
}
});
```
#### Concat and compile
Instead of concatenating the files, just `@import` them into another `.sass` file eg. `main.scss`.
#### Compile multiple files
You can specify multiple `destination: source` items in `files`.
```javascript
grunt.initConfig({
sass: {
dist: {
files: {
'main.css': 'main.scss',
'widgets.css': 'widgets.scss'
}
}
}
});
```
#### Compile files in a directory
Instead of naming all files you want to compile, you can use the `expand` property allowing you to specify a directory. More information available in the [grunt docs](http://gruntjs.com/configuring-tasks) - `Building the files object dynamically`.
```javascript
grunt.initConfig({
sass: {
dist: {
files: [{
expand: true,
cwd: 'styles',
src: ['*.scss'],
dest: '../public',
ext: '.css'
}]
}
}
});
```
## Release History
* 2014-08-24v0.8.1Fix `check` option.
* 2014-08-21v0.8.0Support Sass 3.4 Source Map option. Add `update` option.
* 2014-08-09v0.7.4Fix bundleExec option. Fix `os.cpus()` issue. Log `sass` command when `--verbose` flag is set.
* 2014-03-06v0.7.3Only create empty dest files when they don't already exist.
* 2014-02-02v0.7.2Fix error reporting when Sass is not available.
* 2014-01-28v0.7.1Fix regression of Bundler support.
* 2014-01-26v0.7.0Improve Windows support.
* 2013-12-10v0.6.0Ignore files where filename have leading underscore.
* 2013-08-21v0.5.0Add banner option.
* 2013-07-06v0.4.1Use file.orig.src if file.src does not exist and return early to avoid passing non-existent files to sass binary.
* 2013-06-30v0.4.0Rewrite task to be able to support Source Maps. Compile Sass files in parallel for better performance.
* 2013-03-26v0.3.0Add support for `bundle exec`. Make sure `.css` files are compiled with SCSS.
* 2013-02-15v0.2.2First official release for Grunt 0.4.0.
* 2013-01-25v0.2.2rc7Updating grunt/gruntplugin dependencies to rc7. Changing in-development grunt/gruntplugin dependency versions from tilde version ranges to specific versions.
* 2013-01-09v0.2.2rc5Updating to work with grunt v0.4.0rc5. Switching to this.files api. Add separator option.
* 2012-11-05v0.2.0Grunt 0.4 compatibility. Improve error message when Sass binary couldn't be found
* 2012-10-12v0.1.3Rename grunt-contrib-lib dep to grunt-lib-contrib.
* 2012-10-08v0.1.2Fix regression for darwin.
* 2012-10-05v0.1.1Windows support.
* 2012-09-24v0.1.0Initial release.
---
Task submitted by [Sindre Sorhus](http://github.com/sindresorhus)
*This file was generated on Sun Aug 24 2014 16:51:41.*

79
node_modules/grunt-contrib-sass/docs/sass-examples.md generated vendored Normal file
View File

@ -0,0 +1,79 @@
# Examples
## Example config
```javascript
grunt.initConfig({
sass: { // Task
dist: { // Target
options: { // Target options
style: 'expanded'
},
files: { // Dictionary of files
'main.css': 'main.scss', // 'destination': 'source'
'widgets.css': 'widgets.scss'
}
}
}
});
grunt.loadNpmTasks('grunt-contrib-sass');
grunt.registerTask('default', ['sass']);
```
## Compile
```javascript
grunt.initConfig({
sass: {
dist: {
files: {
'main.css': 'main.scss'
}
}
}
});
```
## Concat and compile
Instead of concatenating the files, just `@import` them into another `.sass` file eg. `main.scss`.
## Compile multiple files
You can specify multiple `destination: source` items in `files`.
```javascript
grunt.initConfig({
sass: {
dist: {
files: {
'main.css': 'main.scss',
'widgets.css': 'widgets.scss'
}
}
}
});
```
## Compile files in a directory
Instead of naming all files you want to compile, you can use the `expand` property allowing you to specify a directory. More information available in the [grunt docs](http://gruntjs.com/configuring-tasks) - `Building the files object dynamically`.
```javascript
grunt.initConfig({
sass: {
dist: {
files: [{
expand: true,
cwd: 'styles',
src: ['*.scss'],
dest: '../public',
ext: '.css'
}]
}
}
});
```

143
node_modules/grunt-contrib-sass/docs/sass-options.md generated vendored Normal file
View File

@ -0,0 +1,143 @@
# Options
## sourcemap
Type: `String`
Default: `auto`
Values:
- `auto` - relative paths where possible, file URIs elsewhere
- `file` - always absolute file URIs
- `inline` - include the source text in the sourcemap
- `none`- no sourcemaps
**Requires Sass 3.4.0, which can be installed with `gem install sass`**
## trace
Type: `Boolean`
Default: `false`
Show a full traceback on error.
## unixNewlines
Type: `Boolean`
Default: `false` on Windows, otherwise `true`
Force Unix newlines in written files.
## check
Type: `Boolean`
Default: `false`
Just check the Sass syntax, does not evaluate and write the output.
## style
Type: `String`
Default: `nested`
Output style. Can be `nested`, `compact`, `compressed`, `expanded`.
## precision
Type: `Number`
Default: `5`
How many digits of precision to use when outputting decimal numbers.
## quiet
Type: `Boolean`
Default: `false`
Silence warnings and status messages during compilation.
## compass
Type: `Boolean`
Default: `false`
Make Compass imports available and load project configuration (`config.rb` located close to the `Gruntfile.js`).
## debugInfo
Type: `Boolean`
Default: `false`
Emit extra information in the generated CSS that can be used by the FireSass Firebug plugin.
## lineNumbers
Type: `Boolean`
Default: `false`
Emit comments in the generated CSS indicating the corresponding source line.
## loadPath
Type: `String|Array`
Add a (or multiple) Sass import path.
## require
Type: `String|Array`
Require a (or multiple) Ruby library before running Sass.
## cacheLocation
Type: `String`
Default: `.sass-cache`
The path to put cached Sass files.
## noCache
Type: `Boolean`
Default: `false`
Don't cache to sassc files.
## bundleExec
Type: `Boolean`
Default: `false`
Run `sass` with [bundle exec](http://gembundler.com/man/bundle-exec.1.html): `bundle exec sass`.
## banner
Type: `String`
Prepend the specified string to the output file. Useful for licensing information.
*Can't be used if you use the `sourcemap` option.*
## update
Type: `Boolean`
Default: `false`
Only compile changed files.

View File

@ -0,0 +1,5 @@
[Sass](http://sass-lang.com) is a preprocessor that adds nested rules, variables, mixins and functions, selector inheritance, and more to CSS. Sass files compile into well-formatted, standard CSS to use in your site or application.
This task requires you to have [Ruby](http://www.ruby-lang.org/en/downloads/) and [Sass](http://sass-lang.com/download.html) installed. If you're on OS X or Linux you probably already have Ruby installed; test with `ruby -v` in your terminal. When you've confirmed you have Ruby installed, run `gem install sass` to install Sass.
Note: Files that begin with "_" are ignored even if they match the globbing pattern. This is done to match the expected [Sass partial behaviour](http://sass-lang.com/documentation/file.SASS_REFERENCE.html#partials).

1
node_modules/grunt-contrib-sass/node_modules/.bin/which generated vendored Symbolic link
View File

@ -0,0 +1 @@
../which/bin/which

View File

@ -0,0 +1 @@
../win-spawn/bin/win-spawn

View File

@ -0,0 +1,3 @@
language: node_js
node_js:
- "0.10"

View File

@ -0,0 +1,19 @@
Copyright (c) 2010-2014 Caolan McMahon
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,11 @@
{
"name": "async",
"repo": "caolan/async",
"description": "Higher-order functions and common patterns for asynchronous code",
"version": "0.1.23",
"keywords": [],
"dependencies": {},
"development": {},
"main": "lib/async.js",
"scripts": [ "lib/async.js" ]
}

1123
node_modules/grunt-contrib-sass/node_modules/async/lib/async.js generated vendored Executable file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,60 @@
{
"name": "async",
"description": "Higher-order functions and common patterns for asynchronous code",
"main": "./lib/async",
"author": {
"name": "Caolan McMahon"
},
"version": "0.9.0",
"repository": {
"type": "git",
"url": "https://github.com/caolan/async.git"
},
"bugs": {
"url": "https://github.com/caolan/async/issues"
},
"licenses": [
{
"type": "MIT",
"url": "https://github.com/caolan/async/raw/master/LICENSE"
}
],
"devDependencies": {
"nodeunit": ">0.0.0",
"uglify-js": "1.2.x",
"nodelint": ">0.0.0"
},
"jam": {
"main": "lib/async.js",
"include": [
"lib/async.js",
"README.md",
"LICENSE"
]
},
"scripts": {
"test": "nodeunit test/test-async.js"
},
"homepage": "https://github.com/caolan/async",
"_id": "async@0.9.0",
"dist": {
"shasum": "ac3613b1da9bed1b47510bb4651b8931e47146c7",
"tarball": "http://registry.npmjs.org/async/-/async-0.9.0.tgz"
},
"_from": "async@^0.9.0",
"_npmVersion": "1.4.3",
"_npmUser": {
"name": "caolan",
"email": "caolan.mcmahon@gmail.com"
},
"maintainers": [
{
"name": "caolan",
"email": "caolan@caolanmcmahon.com"
}
],
"directories": {},
"_shasum": "ac3613b1da9bed1b47510bb4651b8931e47146c7",
"_resolved": "https://registry.npmjs.org/async/-/async-0.9.0.tgz",
"readme": "ERROR: No README data found!"
}

View File

@ -0,0 +1,95 @@
'use strict';
var escapeStringRegexp = require('escape-string-regexp');
var ansiStyles = require('ansi-styles');
var stripAnsi = require('strip-ansi');
var hasAnsi = require('has-ansi');
var supportsColor = require('supports-color');
var defineProps = Object.defineProperties;
var chalk = module.exports;
function build(_styles) {
var builder = function builder() {
return applyStyle.apply(builder, arguments);
};
builder._styles = _styles;
// __proto__ is used because we must return a function, but there is
// no way to create a function with a different prototype.
builder.__proto__ = proto;
return builder;
}
var styles = (function () {
var ret = {};
ansiStyles.grey = ansiStyles.gray;
Object.keys(ansiStyles).forEach(function (key) {
ansiStyles[key].closeRe = new RegExp(escapeStringRegexp(ansiStyles[key].close), 'g');
ret[key] = {
get: function () {
return build(this._styles.concat(key));
}
};
});
return ret;
})();
var proto = defineProps(function chalk() {}, styles);
function applyStyle() {
// support varags, but simply cast to string in case there's only one arg
var args = arguments;
var argsLen = args.length;
var str = argsLen !== 0 && String(arguments[0]);
if (argsLen > 1) {
// don't slice `arguments`, it prevents v8 optimizations
for (var a = 1; a < argsLen; a++) {
str += ' ' + args[a];
}
}
if (!chalk.enabled || !str) {
return str;
}
/*jshint validthis: true*/
var nestedStyles = this._styles;
for (var i = 0; i < nestedStyles.length; i++) {
var code = ansiStyles[nestedStyles[i]];
// Replace any instances already present with a re-opening code
// otherwise only the part of the string until said closing code
// will be colored, and the rest will simply be 'plain'.
str = code.open + str.replace(code.closeRe, code.open) + code.close;
}
return str;
}
function init() {
var ret = {};
Object.keys(styles).forEach(function (name) {
ret[name] = {
get: function () {
return build([name]);
}
};
});
return ret;
}
defineProps(chalk, init());
chalk.styles = ansiStyles;
chalk.hasColor = hasAnsi;
chalk.stripColor = stripAnsi;
chalk.supportsColor = supportsColor;
// detect mode if not set manually
if (chalk.enabled === undefined) {
chalk.enabled = chalk.supportsColor;
}

View File

@ -0,0 +1 @@
../has-ansi/cli.js

View File

@ -0,0 +1 @@
../strip-ansi/cli.js

View File

@ -0,0 +1 @@
../supports-color/cli.js

View File

@ -0,0 +1,40 @@
'use strict';
var styles = module.exports;
var codes = {
reset: [0, 0],
bold: [1, 22], // 21 isn't widely supported and 22 does the same thing
dim: [2, 22],
italic: [3, 23],
underline: [4, 24],
inverse: [7, 27],
hidden: [8, 28],
strikethrough: [9, 29],
black: [30, 39],
red: [31, 39],
green: [32, 39],
yellow: [33, 39],
blue: [34, 39],
magenta: [35, 39],
cyan: [36, 39],
white: [37, 39],
gray: [90, 39],
bgBlack: [40, 49],
bgRed: [41, 49],
bgGreen: [42, 49],
bgYellow: [43, 49],
bgBlue: [44, 49],
bgMagenta: [45, 49],
bgCyan: [46, 49],
bgWhite: [47, 49]
};
Object.keys(codes).forEach(function (key) {
var val = codes[key];
var style = styles[key] = {};
style.open = '\u001b[' + val[0] + 'm';
style.close = '\u001b[' + val[1] + 'm';
});

View File

@ -0,0 +1,74 @@
{
"name": "ansi-styles",
"version": "1.1.0",
"description": "ANSI escape codes for styling strings in the terminal",
"license": "MIT",
"repository": {
"type": "git",
"url": "git://github.com/sindresorhus/ansi-styles"
},
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "http://sindresorhus.com"
},
"engines": {
"node": ">=0.10.0"
},
"scripts": {
"test": "mocha"
},
"files": [
"index.js"
],
"keywords": [
"ansi",
"styles",
"color",
"colour",
"colors",
"terminal",
"console",
"cli",
"string",
"tty",
"escape",
"formatting",
"rgb",
"256",
"shell",
"xterm",
"log",
"logging",
"command-line",
"text"
],
"devDependencies": {
"mocha": "*"
},
"bugs": {
"url": "https://github.com/sindresorhus/ansi-styles/issues"
},
"homepage": "https://github.com/sindresorhus/ansi-styles",
"_id": "ansi-styles@1.1.0",
"_shasum": "eaecbf66cd706882760b2f4691582b8f55d7a7de",
"_from": "ansi-styles@^1.1.0",
"_npmVersion": "1.4.9",
"_npmUser": {
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
},
"maintainers": [
{
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
}
],
"dist": {
"shasum": "eaecbf66cd706882760b2f4691582b8f55d7a7de",
"tarball": "http://registry.npmjs.org/ansi-styles/-/ansi-styles-1.1.0.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.1.0.tgz",
"readme": "ERROR: No README data found!"
}

View File

@ -0,0 +1,70 @@
# ansi-styles [![Build Status](https://travis-ci.org/sindresorhus/ansi-styles.svg?branch=master)](https://travis-ci.org/sindresorhus/ansi-styles)
> [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal
You probably want the higher-level [chalk](https://github.com/sindresorhus/chalk) module for styling your strings.
![screenshot](screenshot.png)
## Install
```sh
$ npm install --save ansi-styles
```
## Usage
```js
var ansi = require('ansi-styles');
console.log(ansi.green.open + 'Hello world!' + ansi.green.close);
```
## API
Each style has an `open` and `close` property.
## Styles
### General
- `reset`
- `bold`
- `dim`
- `italic` *(not widely supported)*
- `underline`
- `inverse`
- `hidden`
- `strikethrough` *(not widely supported)*
### Text colors
- `black`
- `red`
- `green`
- `yellow`
- `blue`
- `magenta`
- `cyan`
- `white`
- `gray`
### Background colors
- `bgBlack`
- `bgRed`
- `bgGreen`
- `bgYellow`
- `bgBlue`
- `bgMagenta`
- `bgCyan`
- `bgWhite`
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)

View File

@ -0,0 +1,11 @@
'use strict';
var matchOperatorsRe = /[|\\{}()[\]^$+*?.]/g;
module.exports = function (str) {
if (typeof str !== 'string') {
throw new TypeError('Expected a string');
}
return str.replace(matchOperatorsRe, '\\$&');
};

View File

@ -0,0 +1,69 @@
{
"name": "escape-string-regexp",
"version": "1.0.2",
"description": "Escape RegExp special characters",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/sindresorhus/escape-string-regexp"
},
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "http://sindresorhus.com"
},
"engines": {
"node": ">=0.8.0"
},
"scripts": {
"test": "mocha"
},
"files": [
"index.js"
],
"keywords": [
"regex",
"regexp",
"re",
"regular",
"expression",
"escape",
"string",
"str",
"special",
"characters"
],
"devDependencies": {
"mocha": "*"
},
"gitHead": "0587ee0ee03ea3fcbfa3c15cf67b47f214e20987",
"bugs": {
"url": "https://github.com/sindresorhus/escape-string-regexp/issues"
},
"homepage": "https://github.com/sindresorhus/escape-string-regexp",
"_id": "escape-string-regexp@1.0.2",
"_shasum": "4dbc2fe674e71949caf3fb2695ce7f2dc1d9a8d1",
"_from": "escape-string-regexp@^1.0.0",
"_npmVersion": "1.4.23",
"_npmUser": {
"name": "jbnicolai",
"email": "jappelman@xebia.com"
},
"maintainers": [
{
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
},
{
"name": "jbnicolai",
"email": "jappelman@xebia.com"
}
],
"dist": {
"shasum": "4dbc2fe674e71949caf3fb2695ce7f2dc1d9a8d1",
"tarball": "http://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.2.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.2.tgz",
"readme": "ERROR: No README data found!"
}

View File

@ -0,0 +1,27 @@
# escape-string-regexp [![Build Status](https://travis-ci.org/sindresorhus/escape-string-regexp.svg?branch=master)](https://travis-ci.org/sindresorhus/escape-string-regexp)
> Escape RegExp special characters
## Install
```sh
$ npm install --save escape-string-regexp
```
## Usage
```js
var escapeStringRegexp = require('escape-string-regexp');
var escapedString = escapeStringRegexp('how much $ for a unicorn?');
//=> how much \$ for a unicorn\?
new RegExp(escapedString);
```
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)

View File

@ -0,0 +1,53 @@
#!/usr/bin/env node
'use strict';
var pkg = require('./package.json');
var hasAnsi = require('./');
var input = process.argv[2];
function stdin(cb) {
var ret = '';
process.stdin.setEncoding('utf8');
process.stdin.on('data', function (data) {
ret += data;
});
process.stdin.on('end', function () {
cb(ret);
});
}
function help() {
console.log([
pkg.description,
'',
'Usage',
' $ has-ansi <string>',
' $ echo <string> | has-ansi',
'',
'Exits with code 0 if input has ANSI escape codes and 1 if not'
].join('\n'));
}
function init(data) {
process.exit(hasAnsi(data) ? 0 : 1);
}
if (process.argv.indexOf('--help') !== -1) {
help();
return;
}
if (process.argv.indexOf('--version') !== -1) {
console.log(pkg.version);
return;
}
if (process.stdin.isTTY) {
if (!input) {
help();
return;
}
init(input);
} else {
stdin(init);
}

View File

@ -0,0 +1,4 @@
'use strict';
var ansiRegex = require('ansi-regex');
var re = new RegExp(ansiRegex().source); // remove the `g` flag
module.exports = re.test.bind(re);

View File

@ -0,0 +1,4 @@
'use strict';
module.exports = function () {
return /\u001b\[(?:[0-9]{1,3}(?:;[0-9]{1,3})*)?[m|K]/g;
};

View File

@ -0,0 +1,79 @@
{
"name": "ansi-regex",
"version": "0.2.1",
"description": "Regular expression for matching ANSI escape codes",
"license": "MIT",
"repository": {
"type": "git",
"url": "git://github.com/sindresorhus/ansi-regex"
},
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "http://sindresorhus.com"
},
"engines": {
"node": ">=0.10.0"
},
"scripts": {
"test": "mocha"
},
"files": [
"index.js"
],
"keywords": [
"ansi",
"styles",
"color",
"colour",
"colors",
"terminal",
"console",
"cli",
"string",
"tty",
"escape",
"formatting",
"rgb",
"256",
"shell",
"xterm",
"command-line",
"text",
"regex",
"regexp",
"re",
"match",
"test",
"find",
"pattern"
],
"devDependencies": {
"mocha": "*"
},
"bugs": {
"url": "https://github.com/sindresorhus/ansi-regex/issues"
},
"homepage": "https://github.com/sindresorhus/ansi-regex",
"_id": "ansi-regex@0.2.1",
"_shasum": "0d8e946967a3d8143f93e24e298525fc1b2235f9",
"_from": "ansi-regex@^0.2.0",
"_npmVersion": "1.4.9",
"_npmUser": {
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
},
"maintainers": [
{
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
}
],
"dist": {
"shasum": "0d8e946967a3d8143f93e24e298525fc1b2235f9",
"tarball": "http://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz",
"readme": "ERROR: No README data found!"
}

View File

@ -0,0 +1,33 @@
# ansi-regex [![Build Status](https://travis-ci.org/sindresorhus/ansi-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/ansi-regex)
> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
## Install
```sh
$ npm install --save ansi-regex
```
## Usage
```js
var ansiRegex = require('ansi-regex');
ansiRegex().test('\u001b[4mcake\u001b[0m');
//=> true
ansiRegex().test('cake');
//=> false
'\u001b[4mcake\u001b[0m'.match(ansiRegex());
//=> ['\u001b[4m', '\u001b[0m']
```
*It's a function so you can create multiple instances. Regexes with the global flag will have the `.lastIndex` property changed for each call to methods on the instance. Therefore reusing the instance with multiple calls will not work as expected for `.test()`.*
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)

View File

@ -0,0 +1,85 @@
{
"name": "has-ansi",
"version": "0.1.0",
"description": "Check if a string has ANSI escape codes",
"license": "MIT",
"repository": {
"type": "git",
"url": "git://github.com/sindresorhus/has-ansi"
},
"bin": {
"has-ansi": "cli.js"
},
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "http://sindresorhus.com"
},
"engines": {
"node": ">=0.10.0"
},
"scripts": {
"test": "mocha"
},
"files": [
"index.js",
"cli.js"
],
"keywords": [
"cli",
"bin",
"ansi",
"styles",
"color",
"colour",
"colors",
"terminal",
"console",
"string",
"tty",
"escape",
"shell",
"xterm",
"command-line",
"text",
"regex",
"regexp",
"re",
"match",
"test",
"find",
"pattern",
"has"
],
"dependencies": {
"ansi-regex": "^0.2.0"
},
"devDependencies": {
"mocha": "*"
},
"bugs": {
"url": "https://github.com/sindresorhus/has-ansi/issues"
},
"homepage": "https://github.com/sindresorhus/has-ansi",
"_id": "has-ansi@0.1.0",
"_shasum": "84f265aae8c0e6a88a12d7022894b7568894c62e",
"_from": "has-ansi@^0.1.0",
"_npmVersion": "1.4.9",
"_npmUser": {
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
},
"maintainers": [
{
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
}
],
"dist": {
"shasum": "84f265aae8c0e6a88a12d7022894b7568894c62e",
"tarball": "http://registry.npmjs.org/has-ansi/-/has-ansi-0.1.0.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-0.1.0.tgz",
"readme": "ERROR: No README data found!"
}

View File

@ -0,0 +1,45 @@
# has-ansi [![Build Status](https://travis-ci.org/sindresorhus/has-ansi.svg?branch=master)](https://travis-ci.org/sindresorhus/has-ansi)
> Check if a string has [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
## Install
```sh
$ npm install --save has-ansi
```
## Usage
```js
var hasAnsi = require('has-ansi');
hasAnsi('\u001b[4mcake\u001b[0m');
//=> true
hasAnsi('cake');
//=> false
```
## CLI
```sh
$ npm install --global has-ansi
```
```
$ has-ansi --help
Usage
$ has-ansi <string>
$ echo <string> | has-ansi
Exits with code 0 if input has ANSI escape codes and 1 if not
```
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)

View File

@ -0,0 +1,39 @@
#!/usr/bin/env node
'use strict';
var fs = require('fs');
var pkg = require('./package.json');
var strip = require('./');
var input = process.argv[2];
function help() {
console.log([
pkg.description,
'',
'Usage',
' $ strip-ansi <input-file> > <output-file>',
' $ cat <input-file> | strip-ansi > <output-file>',
'',
'Example',
' $ strip-ansi unicorn.txt > unicorn-stripped.txt'
].join('\n'));
}
if (process.argv.indexOf('--help') !== -1) {
help();
return;
}
if (process.argv.indexOf('--version') !== -1) {
console.log(pkg.version);
return;
}
if (input) {
process.stdout.write(strip(fs.readFileSync(input, 'utf8')));
return;
}
process.stdin.setEncoding('utf8');
process.stdin.on('data', function (data) {
process.stdout.write(strip(data));
});

View File

@ -0,0 +1,6 @@
'use strict';
var ansiRegex = require('ansi-regex')();
module.exports = function (str) {
return typeof str === 'string' ? str.replace(ansiRegex, '') : str;
};

View File

@ -0,0 +1,4 @@
'use strict';
module.exports = function () {
return /\u001b\[(?:[0-9]{1,3}(?:;[0-9]{1,3})*)?[m|K]/g;
};

View File

@ -0,0 +1,79 @@
{
"name": "ansi-regex",
"version": "0.2.1",
"description": "Regular expression for matching ANSI escape codes",
"license": "MIT",
"repository": {
"type": "git",
"url": "git://github.com/sindresorhus/ansi-regex"
},
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "http://sindresorhus.com"
},
"engines": {
"node": ">=0.10.0"
},
"scripts": {
"test": "mocha"
},
"files": [
"index.js"
],
"keywords": [
"ansi",
"styles",
"color",
"colour",
"colors",
"terminal",
"console",
"cli",
"string",
"tty",
"escape",
"formatting",
"rgb",
"256",
"shell",
"xterm",
"command-line",
"text",
"regex",
"regexp",
"re",
"match",
"test",
"find",
"pattern"
],
"devDependencies": {
"mocha": "*"
},
"bugs": {
"url": "https://github.com/sindresorhus/ansi-regex/issues"
},
"homepage": "https://github.com/sindresorhus/ansi-regex",
"_id": "ansi-regex@0.2.1",
"_shasum": "0d8e946967a3d8143f93e24e298525fc1b2235f9",
"_from": "ansi-regex@^0.2.0",
"_npmVersion": "1.4.9",
"_npmUser": {
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
},
"maintainers": [
{
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
}
],
"dist": {
"shasum": "0d8e946967a3d8143f93e24e298525fc1b2235f9",
"tarball": "http://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz",
"readme": "ERROR: No README data found!"
}

View File

@ -0,0 +1,33 @@
# ansi-regex [![Build Status](https://travis-ci.org/sindresorhus/ansi-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/ansi-regex)
> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
## Install
```sh
$ npm install --save ansi-regex
```
## Usage
```js
var ansiRegex = require('ansi-regex');
ansiRegex().test('\u001b[4mcake\u001b[0m');
//=> true
ansiRegex().test('cake');
//=> false
'\u001b[4mcake\u001b[0m'.match(ansiRegex());
//=> ['\u001b[4m', '\u001b[0m']
```
*It's a function so you can create multiple instances. Regexes with the global flag will have the `.lastIndex` property changed for each call to methods on the instance. Therefore reusing the instance with multiple calls will not work as expected for `.test()`.*
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)

View File

@ -0,0 +1,84 @@
{
"name": "strip-ansi",
"version": "0.3.0",
"description": "Strip ANSI escape codes",
"license": "MIT",
"bin": {
"strip-ansi": "cli.js"
},
"repository": {
"type": "git",
"url": "git://github.com/sindresorhus/strip-ansi"
},
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "http://sindresorhus.com"
},
"engines": {
"node": ">=0.10.0"
},
"scripts": {
"test": "mocha"
},
"files": [
"index.js",
"cli.js"
],
"keywords": [
"strip",
"trim",
"remove",
"ansi",
"styles",
"color",
"colour",
"colors",
"terminal",
"console",
"cli",
"string",
"tty",
"escape",
"formatting",
"rgb",
"256",
"shell",
"xterm",
"log",
"logging",
"command-line",
"text"
],
"dependencies": {
"ansi-regex": "^0.2.1"
},
"devDependencies": {
"mocha": "*"
},
"bugs": {
"url": "https://github.com/sindresorhus/strip-ansi/issues"
},
"homepage": "https://github.com/sindresorhus/strip-ansi",
"_id": "strip-ansi@0.3.0",
"_shasum": "25f48ea22ca79187f3174a4db8759347bb126220",
"_from": "strip-ansi@^0.3.0",
"_npmVersion": "1.4.9",
"_npmUser": {
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
},
"maintainers": [
{
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
}
],
"dist": {
"shasum": "25f48ea22ca79187f3174a4db8759347bb126220",
"tarball": "http://registry.npmjs.org/strip-ansi/-/strip-ansi-0.3.0.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.3.0.tgz",
"readme": "ERROR: No README data found!"
}

View File

@ -0,0 +1,43 @@
# strip-ansi [![Build Status](https://travis-ci.org/sindresorhus/strip-ansi.svg?branch=master)](https://travis-ci.org/sindresorhus/strip-ansi)
> Strip [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
## Install
```sh
$ npm install --save strip-ansi
```
## Usage
```js
var stripAnsi = require('strip-ansi');
stripAnsi('\x1b[4mcake\x1b[0m');
//=> 'cake'
```
## CLI
```sh
$ npm install --global strip-ansi
```
```sh
$ strip-ansi --help
Usage
$ strip-ansi <input-file> > <output-file>
$ cat <input-file> | strip-ansi > <output-file>
Example
$ strip-ansi unicorn.txt > unicorn-stripped.txt
```
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)

View File

@ -0,0 +1,28 @@
#!/usr/bin/env node
'use strict';
var pkg = require('./package.json');
var supportsColor = require('./');
var input = process.argv[2];
function help() {
console.log([
pkg.description,
'',
'Usage',
' $ supports-color',
'',
'Exits with code 0 if color is supported and 1 if not'
].join('\n'));
}
if (!input || process.argv.indexOf('--help') !== -1) {
help();
return;
}
if (process.argv.indexOf('--version') !== -1) {
console.log(pkg.version);
return;
}
process.exit(supportsColor ? 0 : 1);

View File

@ -0,0 +1,32 @@
'use strict';
module.exports = (function () {
if (process.argv.indexOf('--no-color') !== -1) {
return false;
}
if (process.argv.indexOf('--color') !== -1) {
return true;
}
if (process.stdout && !process.stdout.isTTY) {
return false;
}
if (process.platform === 'win32') {
return true;
}
if ('COLORTERM' in process.env) {
return true;
}
if (process.env.TERM === 'dumb') {
return false;
}
if (/^screen|^xterm|^vt100|color|ansi|cygwin|linux/i.test(process.env.TERM)) {
return true;
}
return false;
})();

View File

@ -0,0 +1,78 @@
{
"name": "supports-color",
"version": "0.2.0",
"description": "Detect whether a terminal supports color",
"license": "MIT",
"repository": {
"type": "git",
"url": "git://github.com/sindresorhus/supports-color"
},
"bin": {
"supports-color": "cli.js"
},
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "http://sindresorhus.com"
},
"engines": {
"node": ">=0.10.0"
},
"scripts": {
"test": "mocha"
},
"files": [
"index.js",
"cli.js"
],
"keywords": [
"cli",
"bin",
"color",
"colour",
"colors",
"terminal",
"console",
"cli",
"ansi",
"styles",
"tty",
"rgb",
"256",
"shell",
"xterm",
"command-line",
"support",
"supports",
"capability",
"detect"
],
"devDependencies": {
"mocha": "*"
},
"bugs": {
"url": "https://github.com/sindresorhus/supports-color/issues"
},
"homepage": "https://github.com/sindresorhus/supports-color",
"_id": "supports-color@0.2.0",
"_shasum": "d92de2694eb3f67323973d7ae3d8b55b4c22190a",
"_from": "supports-color@^0.2.0",
"_npmVersion": "1.4.9",
"_npmUser": {
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
},
"maintainers": [
{
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
}
],
"dist": {
"shasum": "d92de2694eb3f67323973d7ae3d8b55b4c22190a",
"tarball": "http://registry.npmjs.org/supports-color/-/supports-color-0.2.0.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/supports-color/-/supports-color-0.2.0.tgz",
"readme": "ERROR: No README data found!"
}

View File

@ -0,0 +1,44 @@
# supports-color [![Build Status](https://travis-ci.org/sindresorhus/supports-color.svg?branch=master)](https://travis-ci.org/sindresorhus/supports-color)
> Detect whether a terminal supports color
## Install
```sh
$ npm install --save supports-color
```
## Usage
```js
var supportsColor = require('supports-color');
if (supportsColor) {
console.log('Terminal supports color');
}
```
It obeys the `--color` and `--no-color` CLI flags.
## CLI
```sh
$ npm install --global supports-color
```
```sh
$ supports-color --help
Usage
$ supports-color
# Exits with code 0 if color is supported and 1 if not
```
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)

View File

@ -0,0 +1,82 @@
{
"name": "chalk",
"version": "0.5.1",
"description": "Terminal string styling done right. Created because the `colors` module does some really horrible things.",
"license": "MIT",
"repository": {
"type": "git",
"url": "git://github.com/sindresorhus/chalk"
},
"maintainers": [
{
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
},
{
"name": "jbnicolai",
"email": "jappelman@xebia.com"
}
],
"engines": {
"node": ">=0.10.0"
},
"scripts": {
"test": "mocha",
"bench": "matcha benchmark.js"
},
"files": [
"index.js"
],
"keywords": [
"color",
"colour",
"colors",
"terminal",
"console",
"cli",
"string",
"ansi",
"styles",
"tty",
"formatting",
"rgb",
"256",
"shell",
"xterm",
"log",
"logging",
"command-line",
"text"
],
"dependencies": {
"ansi-styles": "^1.1.0",
"escape-string-regexp": "^1.0.0",
"has-ansi": "^0.1.0",
"strip-ansi": "^0.3.0",
"supports-color": "^0.2.0"
},
"devDependencies": {
"matcha": "^0.5.0",
"mocha": "*"
},
"gitHead": "994758f01293f1fdcf63282e9917cb9f2cfbdaac",
"bugs": {
"url": "https://github.com/sindresorhus/chalk/issues"
},
"homepage": "https://github.com/sindresorhus/chalk",
"_id": "chalk@0.5.1",
"_shasum": "663b3a648b68b55d04690d49167aa837858f2174",
"_from": "chalk@^0.5.1",
"_npmVersion": "1.4.14",
"_npmUser": {
"name": "jbnicolai",
"email": "jappelman@xebia.com"
},
"dist": {
"shasum": "663b3a648b68b55d04690d49167aa837858f2174",
"tarball": "http://registry.npmjs.org/chalk/-/chalk-0.5.1.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/chalk/-/chalk-0.5.1.tgz",
"readme": "ERROR: No README data found!"
}

View File

@ -0,0 +1,175 @@
# <img width="300" src="https://cdn.rawgit.com/sindresorhus/chalk/77ae94f63ab1ac61389b190e5a59866569d1a376/logo.svg" alt="chalk">
> Terminal string styling done right
[![Build Status](https://travis-ci.org/sindresorhus/chalk.svg?branch=master)](https://travis-ci.org/sindresorhus/chalk)
![](http://img.shields.io/badge/unicorn-approved-ff69b4.svg)
[colors.js](https://github.com/Marak/colors.js) is currently the most popular string styling module, but it has serious deficiencies like extending String.prototype which causes all kinds of [problems](https://github.com/yeoman/yo/issues/68). Although there are other ones, they either do too much or not enough.
**Chalk is a clean and focused alternative.**
![screenshot](https://github.com/sindresorhus/ansi-styles/raw/master/screenshot.png)
## Why
- Highly performant
- Doesn't extend String.prototype
- Expressive API
- Ability to nest styles
- Clean and focused
- Auto-detects color support
- Actively maintained
- [Used by 1000+ modules](https://npmjs.org/browse/depended/chalk)
## Install
```sh
$ npm install --save chalk
```
## Usage
Chalk comes with an easy to use composable API where you just chain and nest the styles you want.
```js
var chalk = require('chalk');
// style a string
console.log( chalk.blue('Hello world!') );
// combine styled and normal strings
console.log( chalk.blue('Hello'), 'World' + chalk.red('!') );
// compose multiple styles using the chainable API
console.log( chalk.blue.bgRed.bold('Hello world!') );
// pass in multiple arguments
console.log( chalk.blue('Hello', 'World!', 'Foo', 'bar', 'biz', 'baz') );
// nest styles
console.log( chalk.red('Hello', chalk.underline.bgBlue('world') + '!') );
// nest styles of the same type even (color, underline, background)
console.log( chalk.green('I am a green line ' + chalk.blue('with a blue substring') + ' that becomes green again!') );
```
Easily define your own themes.
```js
var chalk = require('chalk');
var error = chalk.bold.red;
console.log(error('Error!'));
```
Take advantage of console.log [string substitution](http://nodejs.org/docs/latest/api/console.html#console_console_log_data).
```js
var name = 'Sindre';
console.log(chalk.green('Hello %s'), name);
//=> Hello Sindre
```
## API
### chalk.`<style>[.<style>...](string, [string...])`
Example: `chalk.red.bold.underline('Hello', 'world');`
Chain [styles](#styles) and call the last one as a method with a string argument. Order doesn't matter.
Multiple arguments will be separated by space.
### chalk.enabled
Color support is automatically detected, but you can override it.
### chalk.supportsColor
Detect whether the terminal [supports color](https://github.com/sindresorhus/supports-color).
Can be overridden by the user with the flags `--color` and `--no-color`.
Used internally and handled for you, but exposed for convenience.
### chalk.styles
Exposes the styles as [ANSI escape codes](https://github.com/sindresorhus/ansi-styles).
Generally not useful, but you might need just the `.open` or `.close` escape code if you're mixing externally styled strings with yours.
```js
var chalk = require('chalk');
console.log(chalk.styles.red);
//=> {open: '\u001b[31m', close: '\u001b[39m'}
console.log(chalk.styles.red.open + 'Hello' + chalk.styles.red.close);
```
### chalk.hasColor(string)
Check whether a string [has color](https://github.com/sindresorhus/has-ansi).
### chalk.stripColor(string)
[Strip color](https://github.com/sindresorhus/strip-ansi) from a string.
Can be useful in combination with `.supportsColor` to strip color on externally styled text when it's not supported.
Example:
```js
var chalk = require('chalk');
var styledString = getText();
if (!chalk.supportsColor) {
styledString = chalk.stripColor(styledString);
}
```
## Styles
### General
- `reset`
- `bold`
- `dim`
- `italic` *(not widely supported)*
- `underline`
- `inverse`
- `hidden`
- `strikethrough` *(not widely supported)*
### Text colors
- `black`
- `red`
- `green`
- `yellow`
- `blue`
- `magenta`
- `cyan`
- `white`
- `gray`
### Background colors
- `bgBlack`
- `bgRed`
- `bgGreen`
- `bgYellow`
- `bgBlue`
- `bgMagenta`
- `bgCyan`
- `bgWhite`
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)

View File

@ -0,0 +1,42 @@
'use strict';
function createArg(key, val) {
key = key.replace(/[A-Z]/g, '-$&').toLowerCase();
return '--' + key + (val ? '=' + val : '');
};
module.exports = function (opts, excludes, includes) {
var args = [];
Object.keys(opts).forEach(function (key) {
var val = opts[key];
if (Array.isArray(excludes) && excludes.indexOf(key) !== -1) {
return;
}
if (Array.isArray(includes) && includes.indexOf(key) === -1) {
return;
}
if (val === true) {
args.push(createArg(key));
}
if (typeof val === 'string') {
args.push(createArg(key, val));
}
if (typeof val === 'number' && isNaN(val) === false) {
args.push(createArg(key, '' + val));
}
if (Array.isArray(val)) {
val.forEach(function (arrVal) {
args.push(createArg(key, arrVal));
});
}
});
return args;
};

View File

@ -0,0 +1,67 @@
{
"name": "dargs",
"version": "2.1.0",
"description": "Convert an object of options into an array of command-line arguments",
"repository": {
"type": "git",
"url": "https://github.com/sindresorhus/dargs"
},
"keywords": [
"options",
"arguments",
"args",
"flags",
"cli",
"nopt",
"minimist",
"bin",
"binary",
"command",
"cmd"
],
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "http://sindresorhus.com"
},
"scripts": {
"test": "mocha"
},
"devDependencies": {
"mocha": "*"
},
"engines": {
"node": ">=0.10.0"
},
"license": "MIT",
"files": [
"index.js"
],
"gitHead": "e1f3e8ae83fe011a2b9ceb9aa596e2e54751e5cd",
"bugs": {
"url": "https://github.com/sindresorhus/dargs/issues"
},
"homepage": "https://github.com/sindresorhus/dargs",
"_id": "dargs@2.1.0",
"_shasum": "46c27ffab1ffb1378ef212597213719fe602bc93",
"_from": "dargs@^2.0.0",
"_npmVersion": "2.1.4",
"_nodeVersion": "0.10.32",
"_npmUser": {
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
},
"maintainers": [
{
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
}
],
"dist": {
"shasum": "46c27ffab1ffb1378ef212597213719fe602bc93",
"tarball": "http://registry.npmjs.org/dargs/-/dargs-2.1.0.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/dargs/-/dargs-2.1.0.tgz",
"readme": "ERROR: No README data found!"
}

View File

@ -0,0 +1,89 @@
# dargs [![Build Status](https://travis-ci.org/sindresorhus/dargs.svg?branch=master)](https://travis-ci.org/sindresorhus/dargs)
> Convert an object of options into an array of command-line arguments
Basically the inverse of an argument parser like minimist. Useful when spawning command-line tools.
## Install
```sh
$ npm install --save dargs
```
#### Usage
```js
var dargs = require('dargs');
var options = {
foo: 'bar',
hello: true, // results in only the key being used
cake: false, // ignored
camelCase: 5, // camelCase is slugged to `camel-case`
multiple: ['value', 'value2'], // converted to multiple arguments
sad: ':('
};
var excludes = ['sad'];
var includes = ['camelCase', 'multiple', 'sad'];
console.log(dargs(options, excludes));
/*
[
'--foo=bar',
'--hello',
'--camel-case=5',
'--multiple=value',
'--multiple=value2'
]
*/
console.log(dargs(options, excludes, includes));
/*
[
'--camel-case=5',
'--multiple=value',
'--multiple=value2'
]
*/
console.log(dargs(options, [], includes));
/*
[
'--camel-case=5',
'--multiple=value',
'--multiple=value2',
'--sad=:(''
]
*/
```
## API
### dargs(options, excludes, includes)
#### options
Type: `object`
Options to convert to command-line arguments.
#### excludes
Type: `array`
Keys to exclude.
Takes precedence over `includes`.
#### includes
Type: `array`
Keys to include.
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)

View File

@ -0,0 +1,15 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@ -0,0 +1,5 @@
The "which" util from npm's guts.
Finds the first instance of a specified executable in the PATH
environment variable. Does not cache the results, so `hash -r` is not
needed when the PATH changes.

14
node_modules/grunt-contrib-sass/node_modules/which/bin/which generated vendored Executable file
View File

@ -0,0 +1,14 @@
#!/usr/bin/env node
var which = require("../")
if (process.argv.length < 3) {
console.error("Usage: which <thing>")
process.exit(1)
}
which(process.argv[2], function (er, thing) {
if (er) {
console.error(er.message)
process.exit(er.errno || 127)
}
console.log(thing)
})

View File

@ -0,0 +1,47 @@
{
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me"
},
"name": "which",
"description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
"version": "1.0.8",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-which.git"
},
"main": "which.js",
"bin": {
"which": "./bin/which"
},
"license": "ISC",
"gitHead": "681a9ebbc447cb428232ddf6c0983006d89e7755",
"bugs": {
"url": "https://github.com/isaacs/node-which/issues"
},
"homepage": "https://github.com/isaacs/node-which",
"_id": "which@1.0.8",
"scripts": {},
"_shasum": "c2ff319534ac4a1fa45df2221b56c36279903ded",
"_from": "which@^1.0.5",
"_npmVersion": "2.1.11",
"_nodeVersion": "0.10.16",
"_npmUser": {
"name": "isaacs",
"email": "i@izs.me"
},
"maintainers": [
{
"name": "isaacs",
"email": "i@izs.me"
}
],
"dist": {
"shasum": "c2ff319534ac4a1fa45df2221b56c36279903ded",
"tarball": "http://registry.npmjs.org/which/-/which-1.0.8.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/which/-/which-1.0.8.tgz",
"readme": "ERROR: No README data found!"
}

View File

@ -0,0 +1,99 @@
module.exports = which
which.sync = whichSync
var path = require("path")
, fs
, COLON = process.platform === "win32" ? ";" : ":"
, isExe
, fs = require("fs")
if (process.platform == "win32") {
// On windows, there is no good way to check that a file is executable
isExe = function isExe () { return true }
} else {
isExe = function isExe (mod, uid, gid) {
//console.error(mod, uid, gid);
//console.error("isExe?", (mod & 0111).toString(8))
var ret = (mod & 0001)
|| (mod & 0010) && process.getgid && gid === process.getgid()
|| (mod & 0100) && process.getuid && uid === process.getuid()
//console.error("isExe?", ret)
return ret
}
}
function which (cmd, cb) {
if (isAbsolute(cmd)) return cb(null, cmd)
var pathEnv = (process.env.PATH || "").split(COLON)
, pathExt = [""]
if (process.platform === "win32") {
pathEnv.push(process.cwd())
pathExt = (process.env.PATHEXT || ".EXE").split(COLON)
if (cmd.indexOf(".") !== -1) pathExt.unshift("")
}
//console.error("pathEnv", pathEnv)
;(function F (i, l) {
if (i === l) return cb(new Error("not found: "+cmd))
var p = path.resolve(pathEnv[i], cmd)
;(function E (ii, ll) {
if (ii === ll) return F(i + 1, l)
var ext = pathExt[ii]
//console.error(p + ext)
fs.stat(p + ext, function (er, stat) {
if (!er &&
stat &&
stat.isFile() &&
isExe(stat.mode, stat.uid, stat.gid)) {
//console.error("yes, exe!", p + ext)
return cb(null, p + ext)
}
return E(ii + 1, ll)
})
})(0, pathExt.length)
})(0, pathEnv.length)
}
function whichSync (cmd) {
if (isAbsolute(cmd)) return cmd
var pathEnv = (process.env.PATH || "").split(COLON)
, pathExt = [""]
if (process.platform === "win32") {
pathEnv.push(process.cwd())
pathExt = (process.env.PATHEXT || ".EXE").split(COLON)
if (cmd.indexOf(".") !== -1) pathExt.unshift("")
}
for (var i = 0, l = pathEnv.length; i < l; i ++) {
var p = path.join(pathEnv[i], cmd)
for (var j = 0, ll = pathExt.length; j < ll; j ++) {
var cur = p + pathExt[j]
var stat
try { stat = fs.statSync(cur) } catch (ex) {}
if (stat &&
stat.isFile() &&
isExe(stat.mode, stat.uid, stat.gid)) return cur
}
}
throw new Error("not found: "+cmd)
}
var isAbsolute = process.platform === "win32" ? absWin : absUnix
function absWin (p) {
if (absUnix(p)) return true
// pull off the device/UNC bit from a windows path.
// from node's lib/path.js
var splitDeviceRe =
/^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?([\\\/])?/
, result = splitDeviceRe.exec(p)
, device = result[1] || ''
, isUnc = device && device.charAt(1) !== ':'
, isAbsolute = !!result[2] || isUnc // UNC paths are always absolute
return isAbsolute
}
function absUnix (p) {
return p.charAt(0) === "/" || p === ""
}

View File

@ -0,0 +1,15 @@
lib-cov
*.seed
*.log
*.csv
*.dat
*.out
*.pid
*.gz
pids
logs
results
node_modules
npm-debug.log

View File

@ -0,0 +1,41 @@
# win-spawn
Spawn for node.js but in a way that works regardless of which OS you're using. Use this if you want to use spawn with a JavaScript file. It works by explicitly invoking node on windows. It also shims support for environment variable setting by attempting to parse the command with a regex. Since all modification is wrapped in `if (os === 'Windows_NT')` it can be safely used on non-windows systems and will not break anything.
## Installation
$ npm install win-spawn
## Usage
### Command Line
All the following will work exactly as if the 'win-spawn ' prefix was ommitted when on unix.
$ win-spawn foo
$ win-spawn ./bin/foo
$ win-spawn NODE_PATH=./lib foo
$ win-spawn NODE_PATH=./lib foo arg1 arg2
You can also transform all the line endings in a directory from `\r\n` to `\n` just by running:
$ win-line-endings
You can preview the changes by running:
$ win-line-endings -p
It will ignore `node_modules` and `.git` by default, but is not clever enough to recognise binary files yet.
### API
This will just pass through to `child_process.spawn` on unix systems, but will correctly parse the arguments on windows.
```javascript
spawn('foo', [], {stdio: 'inherit'});
spawn('./bin/foo', [], {stdio: 'inherit'});
spawn('NODE_PATH=./lib foo', [], {stdio: 'inherit'});
spawn('NODE_PATH=./lib foo', [arg1, arg2], {stdio: 'inherit'});
```
![viewcount](https://viewcount.jepso.com/count/ForbesLindesay/win-spawn.png)

View File

@ -0,0 +1,12 @@
#!/usr/bin/env node
var spawn = require('../index.js');
var args = process.argv.slice(2);
var cmd = '';
while (/^[A-Z_]+\=[^ \=]+$/.test(args[0])) {
cmd += args.shift() + ' ';
}
cmd += args.shift();
spawn(cmd, args, { stdio: 'inherit' });

View File

@ -0,0 +1,64 @@
var cSpawn = require('child_process').spawn;
var os = require('os').type();
exports = module.exports = spawn;
function spawn(command, args, options) {
if (os === 'Windows_NT') {
command = command.replace(/\//g, '\\');
if (command === 'rm') {
command = 'rmdir';
if (args[0] === '-rf' || args[0] == '-fr') {
args[0] = '/q';
args.unshift('/s');
}
if (args[0] === '-f') {
args[0] = '/q';
}
if (args[0] === '-r') {
args[0] = '/s';
}
}
args = args || [];
options = options || {};
var match, matchA;
if (matchA = /((?:[A-Z_]+\=[^ \=]+ )+)?([^\r\n]+)/.exec(command)) {
try {
var file = require('fs').readFileSync(matchA[2], 'utf8');
if (match = /\#\!\/usr\/bin\/env ([^\r\n]+)/.exec(file)) {
args.unshift(matchA[2]);
command = (matchA[1] || '') + match[1];
}
} catch (ex) { }
}
if (match = /((?:[A-Z_]+\=[^ \=]+ )+)([^\r\n]+)/.exec(command)) {
command = match[2];
options.env = options.env || shallowClone(process.env);
var env = match[1].split(' ');
env.forEach(function (v) {
v = v.split('=');
if (v.length === 2) {
options.env[v[0]] = v[1];
}
});
}
args.unshift(command);
args.unshift('/c');
args.unshift('/d');
command = 'cmd';
}
return cSpawn(command, args, options);
}
function shallowClone(obj) {
var out = {};
Object.keys(obj)
.forEach(function (key) {
out[key] = obj[key];
});
return out;
}

View File

@ -0,0 +1,33 @@
{
"name": "win-spawn",
"version": "2.0.0",
"description": "Spawn for node.js but in a way that works regardless of which OS you're using",
"main": "index.js",
"repository": {
"type": "git",
"url": "https://github.com/ForbesLindesay/win-spawn.git"
},
"bin": {
"win-spawn": "./bin/win-spawn"
},
"devDependencies": {
"linify": "~1.0.1"
},
"scripts": {
"prepublish": "linify transform bin"
},
"author": {
"name": "ForbesLindesay"
},
"license": "BSD",
"readme": "# win-spawn\n\n Spawn for node.js but in a way that works regardless of which OS you're using. Use this if you want to use spawn with a JavaScript file. It works by explicitly invoking node on windows. It also shims support for environment variable setting by attempting to parse the command with a regex. Since all modification is wrapped in `if (os === 'Windows_NT')` it can be safely used on non-windows systems and will not break anything.\n\n## Installation\n\n $ npm install win-spawn\n\n## Usage\n\n### Command Line\n\n All the following will work exactly as if the 'win-spawn ' prefix was ommitted when on unix.\n\n $ win-spawn foo\n $ win-spawn ./bin/foo\n $ win-spawn NODE_PATH=./lib foo\n $ win-spawn NODE_PATH=./lib foo arg1 arg2\n\n You can also transform all the line endings in a directory from `\\r\\n` to `\\n` just by running:\n\n $ win-line-endings\n\n You can preview the changes by running:\n\n $ win-line-endings -p\n\n It will ignore `node_modules` and `.git` by default, but is not clever enough to recognise binary files yet.\n\n### API\n\nThis will just pass through to `child_process.spawn` on unix systems, but will correctly parse the arguments on windows.\n\n```javascript\nspawn('foo', [], {stdio: 'inherit'});\nspawn('./bin/foo', [], {stdio: 'inherit'});\nspawn('NODE_PATH=./lib foo', [], {stdio: 'inherit'});\nspawn('NODE_PATH=./lib foo', [arg1, arg2], {stdio: 'inherit'});\n```\n\n![viewcount](https://viewcount.jepso.com/count/ForbesLindesay/win-spawn.png)\n",
"readmeFilename": "README.md",
"bugs": {
"url": "https://github.com/ForbesLindesay/win-spawn/issues"
},
"homepage": "https://github.com/ForbesLindesay/win-spawn",
"_id": "win-spawn@2.0.0",
"_shasum": "397a29130ec98d0aa0bc86baa4621393effd0b07",
"_from": "win-spawn@^2.0.0",
"_resolved": "https://registry.npmjs.org/win-spawn/-/win-spawn-2.0.0.tgz"
}

98
node_modules/grunt-contrib-sass/package.json generated vendored Normal file
View File

@ -0,0 +1,98 @@
{
"name": "grunt-contrib-sass",
"description": "Compile Sass to CSS",
"version": "0.8.1",
"homepage": "https://github.com/gruntjs/grunt-contrib-sass",
"author": {
"name": "Grunt Team",
"url": "http://gruntjs.com/"
},
"repository": {
"type": "git",
"url": "git://github.com/gruntjs/grunt-contrib-sass"
},
"licenses": [
{
"type": "MIT",
"url": "https://github.com/gruntjs/grunt-contrib-sass/blob/master/LICENSE-MIT"
}
],
"engines": {
"node": ">=0.10.0"
},
"scripts": {
"test": "grunt test"
},
"dependencies": {
"async": "^0.9.0",
"chalk": "^0.5.1",
"dargs": "^2.0.0",
"which": "^1.0.5",
"win-spawn": "^2.0.0"
},
"devDependencies": {
"grunt": "^0.4.5",
"grunt-contrib-clean": "^0.6.0",
"grunt-contrib-internal": "^0.4.10",
"grunt-contrib-jshint": "^0.10.0",
"grunt-contrib-nodeunit": "^0.4.1"
},
"peerDependencies": {
"grunt": ">=0.4.0"
},
"keywords": [
"gruntplugin",
"scss",
"sass",
"css",
"compile",
"preprocessor",
"style"
],
"contributors": [
{
"name": "Sindre Sorhus",
"url": "http://github.com/sindresorhus"
}
],
"bugs": {
"url": "https://github.com/gruntjs/grunt-contrib-sass/issues"
},
"_id": "grunt-contrib-sass@0.8.1",
"_shasum": "8de924480dc12a51d35abe96a721682afc88d800",
"_from": "grunt-contrib-sass@",
"_npmVersion": "1.4.9",
"_npmUser": {
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
},
"maintainers": [
{
"name": "tkellen",
"email": "tyler@sleekcode.net"
},
{
"name": "cowboy",
"email": "cowboy@rj3.net"
},
{
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
},
{
"name": "shama",
"email": "kyle@dontkry.com"
},
{
"name": "jmeas",
"email": "jellyes2@gmail.com"
}
],
"dist": {
"shasum": "8de924480dc12a51d35abe96a721682afc88d800",
"tarball": "http://registry.npmjs.org/grunt-contrib-sass/-/grunt-contrib-sass-0.8.1.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/grunt-contrib-sass/-/grunt-contrib-sass-0.8.1.tgz",
"readme": "ERROR: No README data found!"
}

184
node_modules/grunt-contrib-sass/tasks/sass.js generated vendored Normal file
View File

@ -0,0 +1,184 @@
/*
* grunt-contrib-sass
* http://gruntjs.com/
*
* Copyright (c) 2013 Sindre Sorhus, contributors
* Licensed under the MIT license.
*/
'use strict';
var path = require('path');
var dargs = require('dargs');
var numCPUs = require('os').cpus().length || 1;
var async = require('async');
var chalk = require('chalk');
var spawn = require('win-spawn');
var which = require('which');
module.exports = function (grunt) {
var bannerCallback = function (filename, banner) {
grunt.verbose.writeln('Writing CSS banner for ' + filename);
grunt.file.write(filename, banner + grunt.util.linefeed + grunt.file.read(filename));
};
var checkBinary = function (cmd, errMess) {
try {
which.sync(cmd);
} catch (err) {
return grunt.warn(
'\n' + errMess + '\n' +
'More info: https://github.com/gruntjs/grunt-contrib-sass\n'
);
}
};
var checkFiles = function (files, options, cb) {
var failCount = 0;
var filesToCheck = files.filter(function (src) {
return path.basename(src)[0] !== '_' && grunt.file.exists(src);
});
async.eachLimit(filesToCheck, numCPUs, function (src, next) {
var bin;
var args;
if (options.bundleExec) {
bin = 'bundle';
args = ['exec', 'sass', '--check', src];
} else {
bin = 'sass';
args = ['--check', src];
}
grunt.verbose.writeln('Command: ' + bin + ' ' + args.join(' '));
grunt.verbose.writeln('Checking file ' + chalk.cyan(src) + ' syntax.');
spawn(bin, args, { stdio: 'inherit' })
.on('error', grunt.warn)
.on('close', function (code) {
if (code > 0) {
failCount++;
grunt.log.error('Checking file ' + chalk.cyan(src) + ' - ' + chalk.red('failed') + '.');
} else {
grunt.verbose.ok('Checking file ' + chalk.cyan(src) + ' - ' + chalk.green('passed') + '.');
}
next();
});
}, function () {
if (failCount > 0) {
grunt.warn('Sass check failed for ' + failCount + ' files.');
} else {
grunt.log.ok('All ' + chalk.cyan(filesToCheck.length) + ' files passed.');
}
cb();
});
};
grunt.registerMultiTask('sass', 'Compile Sass to CSS', function () {
var cb = this.async();
var options = this.options();
var bundleExec = options.bundleExec;
var banner;
var passedArgs;
if (bundleExec) {
checkBinary('bundle',
'bundleExec options set but no Bundler executable found in your PATH.'
);
} else {
checkBinary('sass',
'You need to have Ruby and Sass installed and in your PATH for this task to work.'
);
}
if (options.check) {
checkFiles(this.filesSrc, options, cb);
return;
}
// Unset banner option if set
if (options.banner) {
banner = options.banner;
delete options.banner;
}
passedArgs = dargs(options, ['bundleExec']);
async.eachLimit(this.files, numCPUs, function (file, next) {
var src = file.src[0];
if (typeof src !== 'string') {
src = file.orig.src[0];
}
if (!grunt.file.exists(src)) {
grunt.log.warn('Source file "' + src + '" not found.');
return next();
}
if (path.basename(src)[0] === '_') {
return next();
}
var args = [
src,
file.dest
].concat(passedArgs);
if (options.update) {
// When the source file hasn't yet been compiled SASS will write an empty file.
// If this is the first time the file has been written we treat it as a if update was not passed
if (!grunt.file.exists(file.dest)) {
// Find where the --update flag is and remove it.
var index = args.indexOf('--update');
args.splice(index, 1);
} else {
// The first two elements in args is our source and destination files,
// we use those values to build a path that SASS recognizes namely: source:destination
var sassPath = args.shift() + ':' + args.shift();
args.push(sassPath);
}
}
var bin = 'sass';
if (bundleExec) {
bin = 'bundle';
args.unshift('exec', 'sass');
}
// If we're compiling scss or css files
if (path.extname(src) === '.css') {
args.push('--scss');
}
// Make sure grunt creates the destination folders if they don't exist
if (!grunt.file.exists(file.dest)) {
grunt.file.write(file.dest, '');
}
grunt.verbose.writeln('Command: ' + bin + ' ' + args.join(' '));
var cp = spawn(bin, args, {stdio: 'inherit'});
cp.on('error', function (err) {
grunt.warn(err);
});
cp.on('close', function (code) {
if (code > 0) {
return grunt.warn('Exited with error code ' + code);
}
// Callback to insert banner
if (banner) {
bannerCallback(file.dest, banner);
}
grunt.verbose.writeln('File ' + chalk.cyan(file.dest) + ' created.');
next();
});
}, cb);
});
};

22
node_modules/grunt-contrib-watch/LICENSE-MIT generated vendored Normal file
View File

@ -0,0 +1,22 @@
Copyright (c) 2014 "Cowboy" Ben Alman, contributors
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.

486
node_modules/grunt-contrib-watch/README.md generated vendored Normal file
View File

@ -0,0 +1,486 @@
# grunt-contrib-watch v0.6.1 [![Build Status](https://travis-ci.org/gruntjs/grunt-contrib-watch.png?branch=master)](https://travis-ci.org/gruntjs/grunt-contrib-watch)
> Run predefined tasks whenever watched file patterns are added, changed or deleted.
## Getting Started
This plugin requires Grunt `~0.4.0`
If you haven't used [Grunt](http://gruntjs.com/) before, be sure to check out the [Getting Started](http://gruntjs.com/getting-started) guide, as it explains how to create a [Gruntfile](http://gruntjs.com/sample-gruntfile) as well as install and use Grunt plugins. Once you're familiar with that process, you may install this plugin with this command:
```shell
npm install grunt-contrib-watch --save-dev
```
Once the plugin has been installed, it may be enabled inside your Gruntfile with this line of JavaScript:
```js
grunt.loadNpmTasks('grunt-contrib-watch');
```
## Watch task
_Run this task with the `grunt watch` command._
### Settings
There are a number of options available. Please review the [minimatch options here](https://github.com/isaacs/minimatch#options). As well as some additional options as follows:
#### files
Type: `String|Array`
This defines what file patterns this task will watch. Can be a string or an array of files and/or minimatch patterns.
#### tasks
Type: `String|Array`
This defines which tasks to run when a watched file event occurs.
#### options.spawn
Type: `Boolean`
Default: true
Whether to spawn task runs in a child process. Setting this option to `false` speeds up the reaction time of the watch (usually 500ms faster for most) and allows subsequent task runs to share the same context. Not spawning task runs can make the watch more prone to failing so please use as needed.
Example:
```js
watch: {
scripts: {
files: ['**/*.js'],
tasks: ['jshint'],
options: {
spawn: false,
},
},
},
```
*For backwards compatibility the option `nospawn` is still available and will do the opposite of `spawn`.*
#### options.interrupt
Type: `Boolean`
Default: false
As files are modified this watch task will spawn tasks in child processes. The default behavior will only spawn a new child process per target when the previous process has finished. Set the `interrupt` option to true to terminate the previous process and spawn a new one upon later changes.
Example:
```js
watch: {
scripts: {
files: '**/*.js',
tasks: ['jshint'],
options: {
interrupt: true,
},
},
},
```
#### options.debounceDelay
Type: `Integer`
Default: 500
How long to wait before emitting events in succession for the same filepath and status. For example if your `Gruntfile.js` file was `changed`, a `changed` event will only fire again after the given milliseconds.
Example:
```js
watch: {
scripts: {
files: '**/*.js',
tasks: ['jshint'],
options: {
debounceDelay: 250,
},
},
},
```
#### options.interval
Type: `Integer`
Default: 100
The `interval` is passed to `fs.watchFile`. Since `interval` is only used by `fs.watchFile` and this watcher also uses `fs.watch`; it is recommended to ignore this option. *Default is 100ms*.
#### options.event
Type: `String|Array`
Default: `'all'`
Specify the type watch event that trigger the specified task. This option can be one or many of: `'all'`, `'changed'`, `'added'` and `'deleted'`.
Example:
```js
watch: {
scripts: {
files: '**/*.js',
tasks: ['generateFileManifest'],
options: {
event: ['added', 'deleted'],
},
},
},
```
#### options.reload
Type: `Boolean`
Default: `false`
By default, if `Gruntfile.js` is being watched, then changes to it will trigger the watch task to restart, and reload the `Gruntfile.js` changes.
When `reload` is set to `true`, changes to *any* of the watched files will trigger the watch task to restart.
This is especially useful if your `Gruntfile.js` is dependent on other files.
```js
watch: {
configFiles: {
files: [ 'Gruntfile.js', 'config/*.js' ],
options: {
reload: true
}
}
}
```
#### options.forever
Type: `Boolean`
Default: true
This is *only a task level option* and cannot be configured per target. By default the watch task will duck punch `grunt.fatal` and `grunt.warn` to try and prevent them from exiting the watch process. If you don't want `grunt.fatal` and `grunt.warn` to be overridden set the `forever` option to `false`.
#### options.dateFormat
Type: `Function`
This is *only a task level option* and cannot be configured per target. By default when the watch has finished running tasks it will display the message `Completed in 1.301s at Thu Jul 18 2013 14:58:21 GMT-0700 (PDT) - Waiting...`. You can override this message by supplying your own function:
```js
watch: {
options: {
dateFormat: function(time) {
grunt.log.writeln('The watch finished in ' + time + 'ms at' + (new Date()).toString());
grunt.log.writeln('Waiting for more changes...');
},
},
scripts: {
files: '**/*.js',
tasks: 'jshint',
},
},
```
#### options.atBegin
Type: `Boolean`
Default: false
This option will trigger the run of each specified task at startup of the watcher.
#### options.livereload
Type: `Boolean|Number|Object`
Default: false
Set to `true` or set `livereload: 1337` to a port number to enable live reloading. Default and recommended port is `35729`.
If enabled a live reload server will be started with the watch task per target. Then after the indicated tasks have ran, the live reload server will be triggered with the modified files.
Example:
```js
watch: {
css: {
files: '**/*.sass',
tasks: ['sass'],
options: {
livereload: true,
},
},
},
```
It's possible to get livereload working over https connections. To do this, pass an object to `livereload` with a `key` and `cert` paths specified.
Example:
```js
watch: {
css: {
files: '**/*.sass',
tasks: ['sass'],
options: {
livereload: {
port: 9000,
key: grunt.file.read('path/to/ssl.key'),
cert: grunt.file.read('path/to/ssl.crt')
// you can pass in any other options you'd like to the https server, as listed here: http://nodejs.org/api/tls.html#tls_tls_createserver_options_secureconnectionlistener
}
},
},
},
```
#### options.cwd
Type: `String|Object`
Default: `process.cwd()`
Ability to set the current working directory. Defaults to `process.cwd()`. Can either be a string to set the cwd to match files and spawn tasks. Or an object to set each independently. Such as `options: { cwd: { files: 'match/files/from/here', spawn: 'but/spawn/files/from/here' } }`.
#### options.livereloadOnError
Type: `Boolean`
Default: `true`
Option to prevent the livereload if the executed tasks encountered an error. If set to `false`, the livereload will only be triggered if all tasks completed successfully.
### Examples
```js
// Simple config to run jshint any time a file is added, changed or deleted
grunt.initConfig({
watch: {
files: ['**/*'],
tasks: ['jshint'],
},
});
```
```js
// Advanced config. Run specific tasks when specific files are added, changed or deleted.
grunt.initConfig({
watch: {
gruntfile: {
files: 'Gruntfile.js',
tasks: ['jshint:gruntfile'],
},
src: {
files: ['lib/*.js', 'css/**/*.scss', '!lib/dontwatch.js'],
tasks: ['default'],
},
test: {
files: '<%= jshint.test.src %>',
tasks: ['jshint:test', 'qunit'],
},
},
});
```
#### Using the `watch` event
This task will emit a `watch` event when watched files are modified. This is useful if you would like a simple notification when files are edited or if you're using this task in tandem with another task. Here is a simple example using the `watch` event:
```js
grunt.initConfig({
watch: {
scripts: {
files: ['lib/*.js'],
},
},
});
grunt.event.on('watch', function(action, filepath, target) {
grunt.log.writeln(target + ': ' + filepath + ' has ' + action);
});
```
**The `watch` event is not intended for replacing the standard Grunt API for configuring and running tasks. If you're trying to run tasks from within the `watch` event you're more than likely doing it wrong. Please read [configuring tasks](http://gruntjs.com/configuring-tasks).**
##### Compiling Files As Needed
A very common request is to only compile files as needed. Here is an example that will only lint changed files with the `jshint` task:
```js
grunt.initConfig({
watch: {
scripts: {
files: ['lib/*.js'],
tasks: ['jshint'],
options: {
spawn: false,
},
},
},
jshint: {
all: {
src: ['lib/*.js'],
},
},
});
// on watch events configure jshint:all to only run on changed file
grunt.event.on('watch', function(action, filepath) {
grunt.config('jshint.all.src', filepath);
});
```
If you need to dynamically modify your config, the `spawn` option must be disabled to keep the watch running under the same context.
If you save multiple files simultaneously you may opt for a more robust method:
```js
var changedFiles = Object.create(null);
var onChange = grunt.util._.debounce(function() {
grunt.config('jshint.all.src', Object.keys(changedFiles));
changedFiles = Object.create(null);
}, 200);
grunt.event.on('watch', function(action, filepath) {
changedFiles[filepath] = action;
onChange();
});
```
#### Live Reloading
Live reloading is built into the watch task. Set the option `livereload` to `true` to enable on the default port `35729` or set to a custom port: `livereload: 1337`.
The simplest way to add live reloading to all your watch targets is by setting `livereload` to `true` at the task level. This will run a single live reload server and trigger the live reload for all your watch targets:
```js
grunt.initConfig({
watch: {
options: {
livereload: true,
},
css: {
files: ['public/scss/*.scss'],
tasks: ['compass'],
},
},
});
```
You can also configure live reload for individual watch targets or run multiple live reload servers. Just be sure if you're starting multiple servers they operate on different ports:
```js
grunt.initConfig({
watch: {
css: {
files: ['public/scss/*.scss'],
tasks: ['compass'],
options: {
// Start a live reload server on the default port 35729
livereload: true,
},
},
another: {
files: ['lib/*.js'],
tasks: ['anothertask'],
options: {
// Start another live reload server on port 1337
livereload: 1337,
},
},
dont: {
files: ['other/stuff/*'],
tasks: ['dostuff'],
},
},
});
```
##### Enabling Live Reload in Your HTML
Once you've started a live reload server you'll be able to access the live reload script. To enable live reload on your page, add a script tag before your closing `</body>` tag pointing to the `livereload.js` script:
```html
<script src="//localhost:35729/livereload.js"></script>
```
Feel free to add this script to your template situation and toggle with some sort of `dev` flag.
##### Using Live Reload with the Browser Extension
Instead of adding a script tag to your page, you can live reload your page by installing a browser extension. Please visit [how do I install and use the browser extensions](http://feedback.livereload.com/knowledgebase/articles/86242-how-do-i-install-and-use-the-browser-extensions-) for help installing an extension for your browser.
Once installed please use the default live reload port `35729` and the browser extension will automatically reload your page without needing the `<script>` tag.
##### Using Connect Middleware
Since live reloading is used when developing, you may want to disable building for production (and are not using the browser extension). One method is to use Connect middleware to inject the script tag into your page. Try the [connect-livereload](https://github.com/intesso/connect-livereload) middleware for injecting the live reload script into your page.
##### Rolling Your Own Live Reload
Live reloading is made easy by the library [tiny-lr](https://github.com/mklabs/tiny-lr). It is encouraged to read the documentation for `tiny-lr`. If you would like to trigger the live reload server yourself, simply POST files to the URL: `http://localhost:35729/changed`. Or if you rather roll your own live reload implementation use the following example:
```js
// Create a live reload server instance
var lrserver = require('tiny-lr')();
// Listen on port 35729
lrserver.listen(35729, function(err) { console.log('LR Server Started'); });
// Then later trigger files or POST to localhost:35729/changed
lrserver.changed({body:{files:['public/css/changed.css']}});
```
##### Live Reload with Preprocessors
Any time a watched file is edited with the `livereload` option enabled, the file will be sent to the live reload server. Some edited files you may desire to have sent to the live reload server, such as when preprocessing (`sass`, `less`, `coffeescript`, etc). As any file not recognized will reload the entire page as opposed to just the `css` or `javascript`.
The solution is to point a `livereload` watch target to your destination files:
```js
grunt.initConfig({
sass: {
dev: {
src: ['src/sass/*.sass'],
dest: 'dest/css/index.css',
},
},
watch: {
sass: {
// We watch and compile sass files as normal but don't live reload here
files: ['src/sass/*.sass'],
tasks: ['sass'],
},
livereload: {
// Here we watch the files the sass task will compile to
// These files are sent to the live reload server after sass compiles to them
options: { livereload: true },
files: ['dest/**/*'],
},
},
});
```
### FAQs
#### How do I fix the error `EMFILE: Too many opened files.`?
This is because of your system's max opened file limit. For OSX the default is very low (256). Temporarily increase your limit with `ulimit -n 10480`, the number being the new max limit.
In some versions of OSX the above solution doesn't work. In that case try `launchctl limit maxfiles 10480 10480 ` and restart your terminal. See [here](http://superuser.com/questions/261023/how-to-change-default-ulimit-values-in-mac-os-x-10-6).
#### Can I use this with Grunt v0.3?
`grunt-contrib-watch@0.1.x` is compatible with Grunt v0.3 but it is highly recommended to upgrade Grunt instead.
#### Why is the watch devouring all my memory/cpu?
Likely because of an enthusiastic pattern trying to watch thousands of files. Such as `'**/*.js'` but forgetting to exclude the `node_modules` folder with `'!**/node_modules/**'`. Try grouping your files within a subfolder or be more explicit with your file matching pattern.
Another reason if you're watching a large number of files could be the low default `interval`. Try increasing with `options: { interval: 5007 }`. Please see issues [#35](https://github.com/gruntjs/grunt-contrib-watch/issues/145) and [#145](https://github.com/gruntjs/grunt-contrib-watch/issues/145) for more information.
#### Why spawn as child processes as a default?
The goal of this watch task is as files are changed, run tasks as if they were triggered by the user themself. Each time a user runs `grunt` a process is spawned and tasks are ran in succession. In an effort to keep the experience consistent and continually produce expected results, this watch task spawns tasks as child processes by default.
Sandboxing task runs also allows this watch task to run more stable over long periods of time. As well as more efficiently with more complex tasks and file structures.
Spawning does cause a performance hit (usually 500ms for most environments). It also cripples tasks that rely on the watch task to share the context with each subsequent run (i.e., reload tasks). If you would like a faster watch task or need to share the context please set the `spawn` option to `false`. Just be aware that with this option enabled, the watch task is more prone to failure.
## Release History
* 2014-03-19v0.6.1Fix for watch targets named "default"
* 2014-03-11v0.6.0Clear changed files after triggering live reload to ensure they're only triggered once. cwd option now accepts separate settings for files and spawn. Fix to make interrupt work more than once. Enable live reload over HTTPS. Print newline after initial 'Waiting...' Remove deprecated grunt.util libs Add reload option to specify files other than Gruntfile files to reload. Update to gaze@0.5.1 Use fork of tiny-lr (which has quiter operation, support for HTTPS and windows path fixes) Add livereloadOnError, which if set to false will not trigger live reload if there is an error.
* 2013-08-25v0.5.3Fixed for live reload missing files.
* 2013-08-16v0.5.2Fixed issue running tasks after gruntfile is reloaded. Ignores empty file paths.
* 2013-07-20v0.5.1Fixed issue with options resetting.
* 2013-07-18v0.5.0Added target name to watch event. Added atBegin option to run tasks when watcher starts. Changed nospawn option to spawn (nospawn still available for backwards compatibility). Moved libs/vars into top scope to prevent re-init. Bumped Gaze version to ~0.4. Re-grab task/target options upon each task run. Add dateFormat option to override the date/time output upon completion.
* 2013-05-27v0.4.4Remove gracefully closing SIGINT. Not needed and causes problems for Windows. Ensure tasks are an array to not conflict with cliArgs.
* 2013-05-11v0.4.3Only group changed files per target to send correct files to live reload.
* 2013-05-09v0.4.2Fix for closing watchers.
* 2013-05-09v0.4.1Removed "beep" notification. Tasks now optional with livereload option. Reverted "run again" with interrupt off to fix infinite recursion issue. Watchers now close more properly on task run.
* 2013-05-03v0.4.0Option livereload to start live reload servers. Will reload a Gruntfile before running tasks if Gruntfile is modified. Option event to only trigger watch on certain events. Refactor watch task into separate task runs per target. Option forever to override grunt.fatal/warn to help keeping the watch alive with nospawn enabled. Emit a beep upon complete. Logs all watched files with verbose flag set. If interrupt is off, will run the tasks once more if watch triggered during a previous task run. tasks property is optional for use with watch event. Watchers properly closed when exiting.
* 2013-02-28v0.3.1Fix for top level options.
* 2013-02-27v0.3.0nospawn option added to run tasks without spawning as child processes. Watch emits 'watch' events upon files being triggered with grunt.event. Completion time in seconds and date/time shown after tasks ran. Negate file patterns fixed. Tasks debounced individually to handle simultaneous triggering for multiple targets. Errors handled better and viewable with --stack cli option. Code complexity reduced making the watch task code easier to read.
* 2013-02-15v0.2.0First official release for Grunt 0.4.0.
* 2013-01-18v0.2.0rc7Updating grunt/gruntplugin dependencies to rc6. Changing in-development grunt/gruntplugin dependency versions from tilde version ranges to specific versions.
* 2013-01-09v0.2.0rc5Updating to work with grunt v0.4.0rc5.
* 2012-12-15v0.2.0aConversion to grunt v0.4 conventions. Remove node v0.6 and grunt v0.3 support. Allow watch task to be renamed. Use grunt.util.spawn "grunt" option. Updated to gaze@0.3.0, forceWatchMethod option removed.
* 2012-11-01v0.1.4Prevent watch from spawning duplicate watch tasks
* 2012-10-28v0.1.3Better method to spawn the grunt bin Bump gaze to v0.2.0. Better handles some events and new option forceWatchMethod Only support Node.js >= v0.8
* 2012-10-17v0.1.2Only spawn a process per task one at a time Add interrupt option to cancel previous spawned process Grunt v0.3 compatibility changes
* 2012-10-16v0.1.1Fallback to global grunt bin if local doesnt exist. Fatal if bin cannot be found Update to gaze 0.1.6
* 2012-10-08v0.1.0Release watch task Remove spawn from helper Run on Grunt v0.4
---
Task submitted by [Kyle Robinson Young](http://dontkry.com)
*This file was generated on Wed Mar 19 2014 13:09:11.*

View File

@ -0,0 +1 @@
../tiny-lr-fork/bin/tiny-lr

View File

@ -0,0 +1,19 @@
Copyright (c) 2010 Caolan McMahon
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,11 @@
{
"name": "async",
"repo": "caolan/async",
"description": "Higher-order functions and common patterns for asynchronous code",
"version": "0.1.23",
"keywords": [],
"dependencies": {},
"development": {},
"main": "lib/async.js",
"scripts": [ "lib/async.js" ]
}

View File

@ -0,0 +1,958 @@
/*global setImmediate: false, setTimeout: false, console: false */
(function () {
var async = {};
// global on the server, window in the browser
var root, previous_async;
root = this;
if (root != null) {
previous_async = root.async;
}
async.noConflict = function () {
root.async = previous_async;
return async;
};
function only_once(fn) {
var called = false;
return function() {
if (called) throw new Error("Callback was already called.");
called = true;
fn.apply(root, arguments);
}
}
//// cross-browser compatiblity functions ////
var _each = function (arr, iterator) {
if (arr.forEach) {
return arr.forEach(iterator);
}
for (var i = 0; i < arr.length; i += 1) {
iterator(arr[i], i, arr);
}
};
var _map = function (arr, iterator) {
if (arr.map) {
return arr.map(iterator);
}
var results = [];
_each(arr, function (x, i, a) {
results.push(iterator(x, i, a));
});
return results;
};
var _reduce = function (arr, iterator, memo) {
if (arr.reduce) {
return arr.reduce(iterator, memo);
}
_each(arr, function (x, i, a) {
memo = iterator(memo, x, i, a);
});
return memo;
};
var _keys = function (obj) {
if (Object.keys) {
return Object.keys(obj);
}
var keys = [];
for (var k in obj) {
if (obj.hasOwnProperty(k)) {
keys.push(k);
}
}
return keys;
};
//// exported async module functions ////
//// nextTick implementation with browser-compatible fallback ////
if (typeof process === 'undefined' || !(process.nextTick)) {
if (typeof setImmediate === 'function') {
async.nextTick = function (fn) {
// not a direct alias for IE10 compatibility
setImmediate(fn);
};
async.setImmediate = async.nextTick;
}
else {
async.nextTick = function (fn) {
setTimeout(fn, 0);
};
async.setImmediate = async.nextTick;
}
}
else {
async.nextTick = process.nextTick;
if (typeof setImmediate !== 'undefined') {
async.setImmediate = function (fn) {
// not a direct alias for IE10 compatibility
setImmediate(fn);
};
}
else {
async.setImmediate = async.nextTick;
}
}
async.each = function (arr, iterator, callback) {
callback = callback || function () {};
if (!arr.length) {
return callback();
}
var completed = 0;
_each(arr, function (x) {
iterator(x, only_once(function (err) {
if (err) {
callback(err);
callback = function () {};
}
else {
completed += 1;
if (completed >= arr.length) {
callback(null);
}
}
}));
});
};
async.forEach = async.each;
async.eachSeries = function (arr, iterator, callback) {
callback = callback || function () {};
if (!arr.length) {
return callback();
}
var completed = 0;
var iterate = function () {
iterator(arr[completed], function (err) {
if (err) {
callback(err);
callback = function () {};
}
else {
completed += 1;
if (completed >= arr.length) {
callback(null);
}
else {
iterate();
}
}
});
};
iterate();
};
async.forEachSeries = async.eachSeries;
async.eachLimit = function (arr, limit, iterator, callback) {
var fn = _eachLimit(limit);
fn.apply(null, [arr, iterator, callback]);
};
async.forEachLimit = async.eachLimit;
var _eachLimit = function (limit) {
return function (arr, iterator, callback) {
callback = callback || function () {};
if (!arr.length || limit <= 0) {
return callback();
}
var completed = 0;
var started = 0;
var running = 0;
(function replenish () {
if (completed >= arr.length) {
return callback();
}
while (running < limit && started < arr.length) {
started += 1;
running += 1;
iterator(arr[started - 1], function (err) {
if (err) {
callback(err);
callback = function () {};
}
else {
completed += 1;
running -= 1;
if (completed >= arr.length) {
callback();
}
else {
replenish();
}
}
});
}
})();
};
};
var doParallel = function (fn) {
return function () {
var args = Array.prototype.slice.call(arguments);
return fn.apply(null, [async.each].concat(args));
};
};
var doParallelLimit = function(limit, fn) {
return function () {
var args = Array.prototype.slice.call(arguments);
return fn.apply(null, [_eachLimit(limit)].concat(args));
};
};
var doSeries = function (fn) {
return function () {
var args = Array.prototype.slice.call(arguments);
return fn.apply(null, [async.eachSeries].concat(args));
};
};
var _asyncMap = function (eachfn, arr, iterator, callback) {
var results = [];
arr = _map(arr, function (x, i) {
return {index: i, value: x};
});
eachfn(arr, function (x, callback) {
iterator(x.value, function (err, v) {
results[x.index] = v;
callback(err);
});
}, function (err) {
callback(err, results);
});
};
async.map = doParallel(_asyncMap);
async.mapSeries = doSeries(_asyncMap);
async.mapLimit = function (arr, limit, iterator, callback) {
return _mapLimit(limit)(arr, iterator, callback);
};
var _mapLimit = function(limit) {
return doParallelLimit(limit, _asyncMap);
};
// reduce only has a series version, as doing reduce in parallel won't
// work in many situations.
async.reduce = function (arr, memo, iterator, callback) {
async.eachSeries(arr, function (x, callback) {
iterator(memo, x, function (err, v) {
memo = v;
callback(err);
});
}, function (err) {
callback(err, memo);
});
};
// inject alias
async.inject = async.reduce;
// foldl alias
async.foldl = async.reduce;
async.reduceRight = function (arr, memo, iterator, callback) {
var reversed = _map(arr, function (x) {
return x;
}).reverse();
async.reduce(reversed, memo, iterator, callback);
};
// foldr alias
async.foldr = async.reduceRight;
var _filter = function (eachfn, arr, iterator, callback) {
var results = [];
arr = _map(arr, function (x, i) {
return {index: i, value: x};
});
eachfn(arr, function (x, callback) {
iterator(x.value, function (v) {
if (v) {
results.push(x);
}
callback();
});
}, function (err) {
callback(_map(results.sort(function (a, b) {
return a.index - b.index;
}), function (x) {
return x.value;
}));
});
};
async.filter = doParallel(_filter);
async.filterSeries = doSeries(_filter);
// select alias
async.select = async.filter;
async.selectSeries = async.filterSeries;
var _reject = function (eachfn, arr, iterator, callback) {
var results = [];
arr = _map(arr, function (x, i) {
return {index: i, value: x};
});
eachfn(arr, function (x, callback) {
iterator(x.value, function (v) {
if (!v) {
results.push(x);
}
callback();
});
}, function (err) {
callback(_map(results.sort(function (a, b) {
return a.index - b.index;
}), function (x) {
return x.value;
}));
});
};
async.reject = doParallel(_reject);
async.rejectSeries = doSeries(_reject);
var _detect = function (eachfn, arr, iterator, main_callback) {
eachfn(arr, function (x, callback) {
iterator(x, function (result) {
if (result) {
main_callback(x);
main_callback = function () {};
}
else {
callback();
}
});
}, function (err) {
main_callback();
});
};
async.detect = doParallel(_detect);
async.detectSeries = doSeries(_detect);
async.some = function (arr, iterator, main_callback) {
async.each(arr, function (x, callback) {
iterator(x, function (v) {
if (v) {
main_callback(true);
main_callback = function () {};
}
callback();
});
}, function (err) {
main_callback(false);
});
};
// any alias
async.any = async.some;
async.every = function (arr, iterator, main_callback) {
async.each(arr, function (x, callback) {
iterator(x, function (v) {
if (!v) {
main_callback(false);
main_callback = function () {};
}
callback();
});
}, function (err) {
main_callback(true);
});
};
// all alias
async.all = async.every;
async.sortBy = function (arr, iterator, callback) {
async.map(arr, function (x, callback) {
iterator(x, function (err, criteria) {
if (err) {
callback(err);
}
else {
callback(null, {value: x, criteria: criteria});
}
});
}, function (err, results) {
if (err) {
return callback(err);
}
else {
var fn = function (left, right) {
var a = left.criteria, b = right.criteria;
return a < b ? -1 : a > b ? 1 : 0;
};
callback(null, _map(results.sort(fn), function (x) {
return x.value;
}));
}
});
};
async.auto = function (tasks, callback) {
callback = callback || function () {};
var keys = _keys(tasks);
if (!keys.length) {
return callback(null);
}
var results = {};
var listeners = [];
var addListener = function (fn) {
listeners.unshift(fn);
};
var removeListener = function (fn) {
for (var i = 0; i < listeners.length; i += 1) {
if (listeners[i] === fn) {
listeners.splice(i, 1);
return;
}
}
};
var taskComplete = function () {
_each(listeners.slice(0), function (fn) {
fn();
});
};
addListener(function () {
if (_keys(results).length === keys.length) {
callback(null, results);
callback = function () {};
}
});
_each(keys, function (k) {
var task = (tasks[k] instanceof Function) ? [tasks[k]]: tasks[k];
var taskCallback = function (err) {
var args = Array.prototype.slice.call(arguments, 1);
if (args.length <= 1) {
args = args[0];
}
if (err) {
var safeResults = {};
_each(_keys(results), function(rkey) {
safeResults[rkey] = results[rkey];
});
safeResults[k] = args;
callback(err, safeResults);
// stop subsequent errors hitting callback multiple times
callback = function () {};
}
else {
results[k] = args;
async.setImmediate(taskComplete);
}
};
var requires = task.slice(0, Math.abs(task.length - 1)) || [];
var ready = function () {
return _reduce(requires, function (a, x) {
return (a && results.hasOwnProperty(x));
}, true) && !results.hasOwnProperty(k);
};
if (ready()) {
task[task.length - 1](taskCallback, results);
}
else {
var listener = function () {
if (ready()) {
removeListener(listener);
task[task.length - 1](taskCallback, results);
}
};
addListener(listener);
}
});
};
async.waterfall = function (tasks, callback) {
callback = callback || function () {};
if (tasks.constructor !== Array) {
var err = new Error('First argument to waterfall must be an array of functions');
return callback(err);
}
if (!tasks.length) {
return callback();
}
var wrapIterator = function (iterator) {
return function (err) {
if (err) {
callback.apply(null, arguments);
callback = function () {};
}
else {
var args = Array.prototype.slice.call(arguments, 1);
var next = iterator.next();
if (next) {
args.push(wrapIterator(next));
}
else {
args.push(callback);
}
async.setImmediate(function () {
iterator.apply(null, args);
});
}
};
};
wrapIterator(async.iterator(tasks))();
};
var _parallel = function(eachfn, tasks, callback) {
callback = callback || function () {};
if (tasks.constructor === Array) {
eachfn.map(tasks, function (fn, callback) {
if (fn) {
fn(function (err) {
var args = Array.prototype.slice.call(arguments, 1);
if (args.length <= 1) {
args = args[0];
}
callback.call(null, err, args);
});
}
}, callback);
}
else {
var results = {};
eachfn.each(_keys(tasks), function (k, callback) {
tasks[k](function (err) {
var args = Array.prototype.slice.call(arguments, 1);
if (args.length <= 1) {
args = args[0];
}
results[k] = args;
callback(err);
});
}, function (err) {
callback(err, results);
});
}
};
async.parallel = function (tasks, callback) {
_parallel({ map: async.map, each: async.each }, tasks, callback);
};
async.parallelLimit = function(tasks, limit, callback) {
_parallel({ map: _mapLimit(limit), each: _eachLimit(limit) }, tasks, callback);
};
async.series = function (tasks, callback) {
callback = callback || function () {};
if (tasks.constructor === Array) {
async.mapSeries(tasks, function (fn, callback) {
if (fn) {
fn(function (err) {
var args = Array.prototype.slice.call(arguments, 1);
if (args.length <= 1) {
args = args[0];
}
callback.call(null, err, args);
});
}
}, callback);
}
else {
var results = {};
async.eachSeries(_keys(tasks), function (k, callback) {
tasks[k](function (err) {
var args = Array.prototype.slice.call(arguments, 1);
if (args.length <= 1) {
args = args[0];
}
results[k] = args;
callback(err);
});
}, function (err) {
callback(err, results);
});
}
};
async.iterator = function (tasks) {
var makeCallback = function (index) {
var fn = function () {
if (tasks.length) {
tasks[index].apply(null, arguments);
}
return fn.next();
};
fn.next = function () {
return (index < tasks.length - 1) ? makeCallback(index + 1): null;
};
return fn;
};
return makeCallback(0);
};
async.apply = function (fn) {
var args = Array.prototype.slice.call(arguments, 1);
return function () {
return fn.apply(
null, args.concat(Array.prototype.slice.call(arguments))
);
};
};
var _concat = function (eachfn, arr, fn, callback) {
var r = [];
eachfn(arr, function (x, cb) {
fn(x, function (err, y) {
r = r.concat(y || []);
cb(err);
});
}, function (err) {
callback(err, r);
});
};
async.concat = doParallel(_concat);
async.concatSeries = doSeries(_concat);
async.whilst = function (test, iterator, callback) {
if (test()) {
iterator(function (err) {
if (err) {
return callback(err);
}
async.whilst(test, iterator, callback);
});
}
else {
callback();
}
};
async.doWhilst = function (iterator, test, callback) {
iterator(function (err) {
if (err) {
return callback(err);
}
if (test()) {
async.doWhilst(iterator, test, callback);
}
else {
callback();
}
});
};
async.until = function (test, iterator, callback) {
if (!test()) {
iterator(function (err) {
if (err) {
return callback(err);
}
async.until(test, iterator, callback);
});
}
else {
callback();
}
};
async.doUntil = function (iterator, test, callback) {
iterator(function (err) {
if (err) {
return callback(err);
}
if (!test()) {
async.doUntil(iterator, test, callback);
}
else {
callback();
}
});
};
async.queue = function (worker, concurrency) {
if (concurrency === undefined) {
concurrency = 1;
}
function _insert(q, data, pos, callback) {
if(data.constructor !== Array) {
data = [data];
}
_each(data, function(task) {
var item = {
data: task,
callback: typeof callback === 'function' ? callback : null
};
if (pos) {
q.tasks.unshift(item);
} else {
q.tasks.push(item);
}
if (q.saturated && q.tasks.length === concurrency) {
q.saturated();
}
async.setImmediate(q.process);
});
}
var workers = 0;
var q = {
tasks: [],
concurrency: concurrency,
saturated: null,
empty: null,
drain: null,
push: function (data, callback) {
_insert(q, data, false, callback);
},
unshift: function (data, callback) {
_insert(q, data, true, callback);
},
process: function () {
if (workers < q.concurrency && q.tasks.length) {
var task = q.tasks.shift();
if (q.empty && q.tasks.length === 0) {
q.empty();
}
workers += 1;
var next = function () {
workers -= 1;
if (task.callback) {
task.callback.apply(task, arguments);
}
if (q.drain && q.tasks.length + workers === 0) {
q.drain();
}
q.process();
};
var cb = only_once(next);
worker(task.data, cb);
}
},
length: function () {
return q.tasks.length;
},
running: function () {
return workers;
}
};
return q;
};
async.cargo = function (worker, payload) {
var working = false,
tasks = [];
var cargo = {
tasks: tasks,
payload: payload,
saturated: null,
empty: null,
drain: null,
push: function (data, callback) {
if(data.constructor !== Array) {
data = [data];
}
_each(data, function(task) {
tasks.push({
data: task,
callback: typeof callback === 'function' ? callback : null
});
if (cargo.saturated && tasks.length === payload) {
cargo.saturated();
}
});
async.setImmediate(cargo.process);
},
process: function process() {
if (working) return;
if (tasks.length === 0) {
if(cargo.drain) cargo.drain();
return;
}
var ts = typeof payload === 'number'
? tasks.splice(0, payload)
: tasks.splice(0);
var ds = _map(ts, function (task) {
return task.data;
});
if(cargo.empty) cargo.empty();
working = true;
worker(ds, function () {
working = false;
var args = arguments;
_each(ts, function (data) {
if (data.callback) {
data.callback.apply(null, args);
}
});
process();
});
},
length: function () {
return tasks.length;
},
running: function () {
return working;
}
};
return cargo;
};
var _console_fn = function (name) {
return function (fn) {
var args = Array.prototype.slice.call(arguments, 1);
fn.apply(null, args.concat([function (err) {
var args = Array.prototype.slice.call(arguments, 1);
if (typeof console !== 'undefined') {
if (err) {
if (console.error) {
console.error(err);
}
}
else if (console[name]) {
_each(args, function (x) {
console[name](x);
});
}
}
}]));
};
};
async.log = _console_fn('log');
async.dir = _console_fn('dir');
/*async.info = _console_fn('info');
async.warn = _console_fn('warn');
async.error = _console_fn('error');*/
async.memoize = function (fn, hasher) {
var memo = {};
var queues = {};
hasher = hasher || function (x) {
return x;
};
var memoized = function () {
var args = Array.prototype.slice.call(arguments);
var callback = args.pop();
var key = hasher.apply(null, args);
if (key in memo) {
callback.apply(null, memo[key]);
}
else if (key in queues) {
queues[key].push(callback);
}
else {
queues[key] = [callback];
fn.apply(null, args.concat([function () {
memo[key] = arguments;
var q = queues[key];
delete queues[key];
for (var i = 0, l = q.length; i < l; i++) {
q[i].apply(null, arguments);
}
}]));
}
};
memoized.memo = memo;
memoized.unmemoized = fn;
return memoized;
};
async.unmemoize = function (fn) {
return function () {
return (fn.unmemoized || fn).apply(null, arguments);
};
};
async.times = function (count, iterator, callback) {
var counter = [];
for (var i = 0; i < count; i++) {
counter.push(i);
}
return async.map(counter, iterator, callback);
};
async.timesSeries = function (count, iterator, callback) {
var counter = [];
for (var i = 0; i < count; i++) {
counter.push(i);
}
return async.mapSeries(counter, iterator, callback);
};
async.compose = function (/* functions... */) {
var fns = Array.prototype.reverse.call(arguments);
return function () {
var that = this;
var args = Array.prototype.slice.call(arguments);
var callback = args.pop();
async.reduce(fns, args, function (newargs, fn, cb) {
fn.apply(that, newargs.concat([function () {
var err = arguments[0];
var nextargs = Array.prototype.slice.call(arguments, 1);
cb(err, nextargs);
}]))
},
function (err, results) {
callback.apply(that, [err].concat(results));
});
};
};
var _applyEach = function (eachfn, fns /*args...*/) {
var go = function () {
var that = this;
var args = Array.prototype.slice.call(arguments);
var callback = args.pop();
return eachfn(fns, function (fn, cb) {
fn.apply(that, args.concat([cb]));
},
callback);
};
if (arguments.length > 2) {
var args = Array.prototype.slice.call(arguments, 2);
return go.apply(this, args);
}
else {
return go;
}
};
async.applyEach = doParallel(_applyEach);
async.applyEachSeries = doSeries(_applyEach);
async.forever = function (fn, callback) {
function next(err) {
if (err) {
if (callback) {
return callback(err);
}
throw err;
}
fn(next);
}
next();
};
// AMD / RequireJS
if (typeof define !== 'undefined' && define.amd) {
define([], function () {
return async;
});
}
// Node.js
else if (typeof module !== 'undefined' && module.exports) {
module.exports = async;
}
// included directly via <script> tag
else {
root.async = async;
}
}());

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,22 @@
Copyright (c) 2013 Kyle Robinson Young
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,180 @@
# gaze [![Build Status](https://travis-ci.org/shama/gaze.png?branch=master)](https://travis-ci.org/shama/gaze)
A globbing fs.watch wrapper built from the best parts of other fine watch libs.
Compatible with Node.js 0.10/0.8, Windows, OSX and Linux.
![gaze](http://dontkry.com/images/repos/gaze.png)
## Usage
Install the module with: `npm install gaze` or place into your `package.json`
and run `npm install`.
```javascript
var gaze = require('gaze');
// Watch all .js files/dirs in process.cwd()
gaze('**/*.js', function(err, watcher) {
// Files have all started watching
// watcher === this
// Get all watched files
console.log(this.watched());
// On file changed
this.on('changed', function(filepath) {
console.log(filepath + ' was changed');
});
// On file added
this.on('added', function(filepath) {
console.log(filepath + ' was added');
});
// On file deleted
this.on('deleted', function(filepath) {
console.log(filepath + ' was deleted');
});
// On changed/added/deleted
this.on('all', function(event, filepath) {
console.log(filepath + ' was ' + event);
});
// Get watched files with relative paths
console.log(this.relative());
});
// Also accepts an array of patterns
gaze(['stylesheets/*.css', 'images/**/*.png'], function() {
// Add more patterns later to be watched
this.add(['js/*.js']);
});
```
### Alternate Interface
```javascript
var Gaze = require('gaze').Gaze;
var gaze = new Gaze('**/*');
// Files have all started watching
gaze.on('ready', function(watcher) { });
// A file has been added/changed/deleted has occurred
gaze.on('all', function(event, filepath) { });
```
### Errors
```javascript
gaze('**/*', function() {
this.on('error', function(err) {
// Handle error here
});
});
```
### Minimatch / Glob
See [isaacs's minimatch](https://github.com/isaacs/minimatch) for more
information on glob patterns.
## Documentation
### gaze(patterns, [options], callback)
* `patterns` {String|Array} File patterns to be matched
* `options` {Object}
* `callback` {Function}
* `err` {Error | null}
* `watcher` {Object} Instance of the Gaze watcher
### Class: gaze.Gaze
Create a Gaze object by instanting the `gaze.Gaze` class.
```javascript
var Gaze = require('gaze').Gaze;
var gaze = new Gaze(pattern, options, callback);
```
#### Properties
* `options` The options object passed in.
* `interval` {integer} Interval to pass to fs.watchFile
* `debounceDelay` {integer} Delay for events called in succession for the same
file/event
#### Events
* `ready(watcher)` When files have been globbed and watching has begun.
* `all(event, filepath)` When an `added`, `changed` or `deleted` event occurs.
* `added(filepath)` When a file has been added to a watch directory.
* `changed(filepath)` When a file has been changed.
* `deleted(filepath)` When a file has been deleted.
* `renamed(newPath, oldPath)` When a file has been renamed.
* `end()` When the watcher is closed and watches have been removed.
* `error(err)` When an error occurs.
* `nomatch` When no files have been matched.
#### Methods
* `emit(event, [...])` Wrapper for the EventEmitter.emit.
`added`|`changed`|`deleted` events will also trigger the `all` event.
* `close()` Unwatch all files and reset the watch instance.
* `add(patterns, callback)` Adds file(s) patterns to be watched.
* `remove(filepath)` removes a file or directory from being watched. Does not
recurse directories.
* `watched()` Returns the currently watched files.
* `relative([dir, unixify])` Returns the currently watched files with relative paths.
* `dir` {string} Only return relative files for this directory.
* `unixify` {boolean} Return paths with `/` instead of `\\` if on Windows.
## FAQs
### Why Another `fs.watch` Wrapper?
I liked parts of other `fs.watch` wrappers but none had all the features I
needed. This lib combines the features I needed from other fine watch libs:
Speedy data behavior from
[paulmillr's chokidar](https://github.com/paulmillr/chokidar), API interface
from [mikeal's watch](https://github.com/mikeal/watch) and file globbing using
[isaacs's glob](https://github.com/isaacs/node-glob) which is also used by
[cowboy's Grunt](https://github.com/gruntjs/grunt).
### How do I fix the error `EMFILE: Too many opened files.`?
This is because of your system's max opened file limit. For OSX the default is
very low (256). Increase your limit temporarily with `ulimit -n 10480`, the
number being the new max limit.
## Contributing
In lieu of a formal styleguide, take care to maintain the existing coding style.
Add unit tests for any new or changed functionality. Lint and test your code
using [grunt](http://gruntjs.com/).
## Release History
* 0.5.1 - Use setImmediate (process.nextTick for node v0.8) to defer ready/nomatch events (@amasad).
* 0.5.0 - Process is now kept alive while watching files. Emits a nomatch event when no files are matching.
* 0.4.3 - Track file additions in newly created folders (@brett-shwom).
* 0.4.2 - Fix .remove() method to remove a single file in a directory (@kaelzhang). Fixing Cannot call method 'call' of undefined (@krasimir). Track new file additions within folders (@brett-shwom).
* 0.4.1 - Fix watchDir not respecting close in race condition (@chrisirhc).
* 0.4.0 - Drop support for node v0.6. Use globule for file matching. Avoid node v0.10 path.resolve/join errors. Register new files when added to non-existent folder. Multiple instances can now poll the same files (@jpommerening).
* 0.3.4 - Code clean up. Fix path must be strings errors (@groner). Fix incorrect added events (@groner).
* 0.3.3 - Fix for multiple patterns with negate.
* 0.3.2 - Emit `end` before removeAllListeners.
* 0.3.1 - Fix added events within subfolder patterns.
* 0.3.0 - Handle safewrite events, `forceWatchMethod` option removed, bug fixes and watch optimizations (@rgaskill).
* 0.2.2 - Fix issue where subsequent add calls dont get watched (@samcday). removeAllListeners on close.
* 0.2.1 - Fix issue with invalid `added` events in current working dir.
* 0.2.0 - Support and mark folders with `path.sep`. Add `forceWatchMethod` option. Support `renamed` events.
* 0.1.6 - Recognize the `cwd` option properly
* 0.1.5 - Catch too many open file errors
* 0.1.4 - Really fix the race condition with 2 watches
* 0.1.3 - Fix race condition with 2 watches
* 0.1.2 - Read triggering changed event fix
* 0.1.1 - Minor fixes
* 0.1.0 - Initial release
## License
Copyright (c) 2013 Kyle Robinson Young
Licensed under the MIT license.

View File

@ -0,0 +1,439 @@
/*
* gaze
* https://github.com/shama/gaze
*
* Copyright (c) 2013 Kyle Robinson Young
* Licensed under the MIT license.
*/
'use strict';
// libs
var util = require('util');
var EE = require('events').EventEmitter;
var fs = require('fs');
var path = require('path');
var globule = require('globule');
var helper = require('./helper');
// shim setImmediate for node v0.8
var setImmediate = require('timers').setImmediate;
if (typeof setImmediate !== 'function') {
setImmediate = process.nextTick;
}
// globals
var delay = 10;
// `Gaze` EventEmitter object to return in the callback
function Gaze(patterns, opts, done) {
var self = this;
EE.call(self);
// If second arg is the callback
if (typeof opts === 'function') {
done = opts;
opts = {};
}
// Default options
opts = opts || {};
opts.mark = true;
opts.interval = opts.interval || 100;
opts.debounceDelay = opts.debounceDelay || 500;
opts.cwd = opts.cwd || process.cwd();
this.options = opts;
// Default done callback
done = done || function() {};
// Remember our watched dir:files
this._watched = Object.create(null);
// Store watchers
this._watchers = Object.create(null);
// Store watchFile listeners
this._pollers = Object.create(null);
// Store patterns
this._patterns = [];
// Cached events for debouncing
this._cached = Object.create(null);
// Set maxListeners
if (this.options.maxListeners) {
this.setMaxListeners(this.options.maxListeners);
Gaze.super_.prototype.setMaxListeners(this.options.maxListeners);
delete this.options.maxListeners;
}
// Initialize the watch on files
if (patterns) {
this.add(patterns, done);
}
// keep the process alive
this._keepalive = setInterval(function() {}, 200);
return this;
}
util.inherits(Gaze, EE);
// Main entry point. Start watching and call done when setup
module.exports = function gaze(patterns, opts, done) {
return new Gaze(patterns, opts, done);
};
module.exports.Gaze = Gaze;
// Override the emit function to emit `all` events
// and debounce on duplicate events per file
Gaze.prototype.emit = function() {
var self = this;
var args = arguments;
var e = args[0];
var filepath = args[1];
var timeoutId;
// If not added/deleted/changed/renamed then just emit the event
if (e.slice(-2) !== 'ed') {
Gaze.super_.prototype.emit.apply(self, args);
return this;
}
// Detect rename event, if added and previous deleted is in the cache
if (e === 'added') {
Object.keys(this._cached).forEach(function(oldFile) {
if (self._cached[oldFile].indexOf('deleted') !== -1) {
args[0] = e = 'renamed';
[].push.call(args, oldFile);
delete self._cached[oldFile];
return false;
}
});
}
// If cached doesnt exist, create a delay before running the next
// then emit the event
var cache = this._cached[filepath] || [];
if (cache.indexOf(e) === -1) {
helper.objectPush(self._cached, filepath, e);
clearTimeout(timeoutId);
timeoutId = setTimeout(function() {
delete self._cached[filepath];
}, this.options.debounceDelay);
// Emit the event and `all` event
Gaze.super_.prototype.emit.apply(self, args);
Gaze.super_.prototype.emit.apply(self, ['all', e].concat([].slice.call(args, 1)));
}
// Detect if new folder added to trigger for matching files within folder
if (e === 'added') {
if (helper.isDir(filepath)) {
fs.readdirSync(filepath).map(function(file) {
return path.join(filepath, file);
}).filter(function(file) {
return globule.isMatch(self._patterns, file, self.options);
}).forEach(function(file) {
self.emit('added', file);
});
}
}
return this;
};
// Close watchers
Gaze.prototype.close = function(_reset) {
var self = this;
_reset = _reset === false ? false : true;
Object.keys(self._watchers).forEach(function(file) {
self._watchers[file].close();
});
self._watchers = Object.create(null);
Object.keys(this._watched).forEach(function(dir) {
self._unpollDir(dir);
});
if (_reset) {
self._watched = Object.create(null);
setTimeout(function() {
self.emit('end');
self.removeAllListeners();
clearInterval(self._keepalive);
}, delay + 100);
}
return self;
};
// Add file patterns to be watched
Gaze.prototype.add = function(files, done) {
if (typeof files === 'string') { files = [files]; }
this._patterns = helper.unique.apply(null, [this._patterns, files]);
files = globule.find(this._patterns, this.options);
this._addToWatched(files);
this.close(false);
this._initWatched(done);
};
// Dont increment patterns and dont call done if nothing added
Gaze.prototype._internalAdd = function(file, done) {
var files = [];
if (helper.isDir(file)) {
files = [helper.markDir(file)].concat(globule.find(this._patterns, this.options));
} else {
if (globule.isMatch(this._patterns, file, this.options)) {
files = [file];
}
}
if (files.length > 0) {
this._addToWatched(files);
this.close(false);
this._initWatched(done);
}
};
// Remove file/dir from `watched`
Gaze.prototype.remove = function(file) {
var self = this;
if (this._watched[file]) {
// is dir, remove all files
this._unpollDir(file);
delete this._watched[file];
} else {
// is a file, find and remove
Object.keys(this._watched).forEach(function(dir) {
var index = self._watched[dir].indexOf(file);
if (index !== -1) {
self._unpollFile(file);
self._watched[dir].splice(index, 1);
return false;
}
});
}
if (this._watchers[file]) {
this._watchers[file].close();
}
return this;
};
// Return watched files
Gaze.prototype.watched = function() {
return this._watched;
};
// Returns `watched` files with relative paths to process.cwd()
Gaze.prototype.relative = function(dir, unixify) {
var self = this;
var relative = Object.create(null);
var relDir, relFile, unixRelDir;
var cwd = this.options.cwd || process.cwd();
if (dir === '') { dir = '.'; }
dir = helper.markDir(dir);
unixify = unixify || false;
Object.keys(this._watched).forEach(function(dir) {
relDir = path.relative(cwd, dir) + path.sep;
if (relDir === path.sep) { relDir = '.'; }
unixRelDir = unixify ? helper.unixifyPathSep(relDir) : relDir;
relative[unixRelDir] = self._watched[dir].map(function(file) {
relFile = path.relative(path.join(cwd, relDir) || '', file || '');
if (helper.isDir(file)) {
relFile = helper.markDir(relFile);
}
if (unixify) {
relFile = helper.unixifyPathSep(relFile);
}
return relFile;
});
});
if (dir && unixify) {
dir = helper.unixifyPathSep(dir);
}
return dir ? relative[dir] || [] : relative;
};
// Adds files and dirs to watched
Gaze.prototype._addToWatched = function(files) {
for (var i = 0; i < files.length; i++) {
var file = files[i];
var filepath = path.resolve(this.options.cwd, file);
var dirname = (helper.isDir(file)) ? filepath : path.dirname(filepath);
dirname = helper.markDir(dirname);
// If a new dir is added
if (helper.isDir(file) && !(filepath in this._watched)) {
helper.objectPush(this._watched, filepath, []);
}
if (file.slice(-1) === '/') { filepath += path.sep; }
helper.objectPush(this._watched, path.dirname(filepath) + path.sep, filepath);
// add folders into the mix
var readdir = fs.readdirSync(dirname);
for (var j = 0; j < readdir.length; j++) {
var dirfile = path.join(dirname, readdir[j]);
if (fs.statSync(dirfile).isDirectory()) {
helper.objectPush(this._watched, dirname, dirfile + path.sep);
}
}
}
return this;
};
Gaze.prototype._watchDir = function(dir, done) {
var self = this;
var timeoutId;
try {
this._watchers[dir] = fs.watch(dir, function(event) {
// race condition. Let's give the fs a little time to settle down. so we
// don't fire events on non existent files.
clearTimeout(timeoutId);
timeoutId = setTimeout(function() {
// race condition. Ensure that this directory is still being watched
// before continuing.
if ((dir in self._watchers) && fs.existsSync(dir)) {
done(null, dir);
}
}, delay + 100);
});
} catch (err) {
return this._handleError(err);
}
return this;
};
Gaze.prototype._unpollFile = function(file) {
if (this._pollers[file]) {
fs.unwatchFile(file, this._pollers[file] );
delete this._pollers[file];
}
return this;
};
Gaze.prototype._unpollDir = function(dir) {
this._unpollFile(dir);
for (var i = 0; i < this._watched[dir].length; i++) {
this._unpollFile(this._watched[dir][i]);
}
};
Gaze.prototype._pollFile = function(file, done) {
var opts = { persistent: true, interval: this.options.interval };
if (!this._pollers[file]) {
this._pollers[file] = function(curr, prev) {
done(null, file);
};
try {
fs.watchFile(file, opts, this._pollers[file]);
} catch (err) {
return this._handleError(err);
}
}
return this;
};
// Initialize the actual watch on `watched` files
Gaze.prototype._initWatched = function(done) {
var self = this;
var cwd = this.options.cwd || process.cwd();
var curWatched = Object.keys(self._watched);
// if no matching files
if (curWatched.length < 1) {
// Defer to emitting to give a chance to attach event handlers.
setImmediate(function () {
self.emit('ready', self);
if (done) { done.call(self, null, self); }
self.emit('nomatch');
});
return;
}
helper.forEachSeries(curWatched, function(dir, next) {
dir = dir || '';
var files = self._watched[dir];
// Triggered when a watched dir has an event
self._watchDir(dir, function(event, dirpath) {
var relDir = cwd === dir ? '.' : path.relative(cwd, dir);
relDir = relDir || '';
fs.readdir(dirpath, function(err, current) {
if (err) { return self.emit('error', err); }
if (!current) { return; }
try {
// append path.sep to directories so they match previous.
current = current.map(function(curPath) {
if (fs.existsSync(path.join(dir, curPath)) && fs.statSync(path.join(dir, curPath)).isDirectory()) {
return curPath + path.sep;
} else {
return curPath;
}
});
} catch (err) {
// race condition-- sometimes the file no longer exists
}
// Get watched files for this dir
var previous = self.relative(relDir);
// If file was deleted
previous.filter(function(file) {
return current.indexOf(file) < 0;
}).forEach(function(file) {
if (!helper.isDir(file)) {
var filepath = path.join(dir, file);
self.remove(filepath);
self.emit('deleted', filepath);
}
});
// If file was added
current.filter(function(file) {
return previous.indexOf(file) < 0;
}).forEach(function(file) {
// Is it a matching pattern?
var relFile = path.join(relDir, file);
// Add to watch then emit event
self._internalAdd(relFile, function() {
self.emit('added', path.join(dir, file));
});
});
});
});
// Watch for change/rename events on files
files.forEach(function(file) {
if (helper.isDir(file)) { return; }
self._pollFile(file, function(err, filepath) {
// Only emit changed if the file still exists
// Prevents changed/deleted duplicate events
if (fs.existsSync(filepath)) {
self.emit('changed', filepath);
}
});
});
next();
}, function() {
// Return this instance of Gaze
// delay before ready solves a lot of issues
setTimeout(function() {
self.emit('ready', self);
if (done) { done.call(self, null, self); }
}, delay + 100);
});
};
// If an error, handle it here
Gaze.prototype._handleError = function(err) {
if (err.code === 'EMFILE') {
return this.emit('error', new Error('EMFILE: Too many opened files.'));
}
return this.emit('error', err);
};

View File

@ -0,0 +1,67 @@
'use strict';
var path = require('path');
var helper = module.exports = {};
// Returns boolean whether filepath is dir terminated
helper.isDir = function isDir(dir) {
if (typeof dir !== 'string') { return false; }
return (dir.slice(-(path.sep.length)) === path.sep);
};
// Create a `key:[]` if doesnt exist on `obj` then push or concat the `val`
helper.objectPush = function objectPush(obj, key, val) {
if (obj[key] == null) { obj[key] = []; }
if (Array.isArray(val)) { obj[key] = obj[key].concat(val); }
else if (val) { obj[key].push(val); }
return obj[key] = helper.unique(obj[key]);
};
// Ensures the dir is marked with path.sep
helper.markDir = function markDir(dir) {
if (typeof dir === 'string' &&
dir.slice(-(path.sep.length)) !== path.sep &&
dir !== '.') {
dir += path.sep;
}
return dir;
};
// Changes path.sep to unix ones for testing
helper.unixifyPathSep = function unixifyPathSep(filepath) {
return (process.platform === 'win32') ? String(filepath).replace(/\\/g, '/') : filepath;
};
/**
* Lo-Dash 1.0.1 <http://lodash.com/>
* Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/>
* Based on Underscore.js 1.4.4 <http://underscorejs.org/>
* Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud Inc.
* Available under MIT license <http://lodash.com/license>
*/
helper.unique = function unique() { var array = Array.prototype.concat.apply(Array.prototype, arguments); var result = []; for (var i = 0; i < array.length; i++) { if (result.indexOf(array[i]) === -1) { result.push(array[i]); } } return result; };
/**
* Copyright (c) 2010 Caolan McMahon
* Available under MIT license <https://raw.github.com/caolan/async/master/LICENSE>
*/
helper.forEachSeries = function forEachSeries(arr, iterator, callback) {
if (!arr.length) { return callback(); }
var completed = 0;
var iterate = function() {
iterator(arr[completed], function (err) {
if (err) {
callback(err);
callback = function() {};
} else {
completed += 1;
if (completed === arr.length) {
callback(null);
} else {
iterate();
}
}
});
};
iterate();
};

View File

@ -0,0 +1,15 @@
{
"curly": true,
"eqeqeq": true,
"immed": true,
"latedef": true,
"newcap": true,
"noarg": true,
"sub": true,
"undef": true,
"unused": true,
"boss": true,
"eqnull": true,
"node": true,
"es5": true
}

View File

@ -0,0 +1 @@
/node_modules/

View File

@ -0,0 +1,6 @@
language: node_js
node_js:
- "0.8"
- "0.10"
before_script:
- npm install -g grunt-cli

View File

@ -0,0 +1,48 @@
'use strict';
module.exports = function(grunt) {
// Project configuration.
grunt.initConfig({
nodeunit: {
files: ['test/**/*_test.js'],
},
jshint: {
options: {
jshintrc: '.jshintrc'
},
gruntfile: {
src: 'Gruntfile.js'
},
lib: {
src: ['lib/**/*.js']
},
test: {
src: ['test/*.js']
},
},
watch: {
gruntfile: {
files: '<%= jshint.gruntfile.src %>',
tasks: ['jshint:gruntfile']
},
lib: {
files: '<%= jshint.lib.src %>',
tasks: ['jshint:lib', 'nodeunit']
},
test: {
files: '<%= jshint.test.src %>',
tasks: ['jshint:test', 'nodeunit']
},
},
});
// These plugins provide necessary tasks.
grunt.loadNpmTasks('grunt-contrib-nodeunit');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-watch');
// Default task.
grunt.registerTask('default', ['jshint', 'nodeunit']);
};

View File

@ -0,0 +1,22 @@
Copyright (c) 2013 "Cowboy" Ben Alman
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,117 @@
# globule [![Build Status](https://secure.travis-ci.org/cowboy/node-globule.png?branch=master)](http://travis-ci.org/cowboy/node-globule)
An easy-to-use wildcard globbing library.
## Getting Started
Install the module with: `npm install globule`
```javascript
var globule = require('globule');
var filepaths = globule.find('**/*.js');
```
## Documentation
### globule.find
Returns a unique array of all file or directory paths that match the given globbing pattern(s). This method accepts either comma separated globbing patterns or an array of globbing patterns. Paths matching patterns that begin with `!` will be excluded from the returned array. Patterns are processed in order, so inclusion and exclusion order is significant.
```js
globule.find(patterns [, options])
```
The `options` object supports all [glob][] library options, along with a few extras. These are the most commonly used:
* `filter` Either a valid [fs.Stats method name](http://nodejs.org/docs/latest/api/fs.html#fs_class_fs_stats) or a function that will be passed the matched `src` filepath and `options` object as arguments. This function should return a `Boolean` value.
* `nonull` Retain globbing patterns in result set even if they fail to match files.
* `matchBase` Patterns without slashes will match just the basename part. Eg. this makes `*.js` work like `**/*.js`.
* `srcBase` Patterns will be matched relative to the specified path instead of the current working directory. This is a synonym for `cwd`.
* `prefixBase` Any specified `srcBase` will be prefixed to all returned filepaths.
[glob]: https://github.com/isaacs/node-glob
### globule.match
Match one or more globbing patterns against one or more file paths. Returns a uniqued array of all file paths that match any of the specified globbing patterns. Both the `patterns` and `filepaths` arguments can be a single string or array of strings. Paths matching patterns that begin with `!` will be excluded from the returned array. Patterns are processed in order, so inclusion and exclusion order is significant.
```js
grunt.file.match(patterns, filepaths [, options])
```
### globule.isMatch
This method contains the same signature and logic as the `globule.match` method, but returns `true` if any files were matched, otherwise `false`.
```js
grunt.file.isMatch(patterns, filepaths [, options])
```
### globule.mapping
Given a set of source file paths, returns an array of src-dest file mapping objects. Both src and dest paths may be renamed, depending on the options specified.
```js
globule.mapping(filepaths [, options])
```
In addition to the options the `globule.find` method supports, the options object also supports these properties:
* `srcBase` The directory from which patterns are matched. Any string specified as `srcBase` is effectively stripped from the beginning of all matched paths.
* `destBase` The specified path is prefixed to all `dest` filepaths.
* `ext` Remove anything after (and including) the first `.` in the destination path, then append this value.
* `extDot` Change the behavior of `ext`, `"first"` and `"last"` will remove anything after the first or last `.` in the destination filename, respectively. Defaults to `"first"`.
* `flatten` Remove the path component from all matched src files. The src file path is still joined to the specified destBase.
* `rename` If specified, this function will be responsible for returning the final `dest` filepath. By default, it flattens paths (if specified), changes extensions (if specified) and joins the matched path to the `destBase`.
### globule.findMapping
This method is a convenience wrapper around the `globule.find` and `globule.mapping` methods.
```js
globule.findMapping(patterns [, options])
```
## Examples
Given the files `foo/a.js` and `foo/b.js`:
### srcBase and destBase
```js
globule.find("foo/*.js")
// ["foo/a.js", "foo/b.js"]
globule.find("*.js", {srcBase: "foo"})
// ["a.js", "b.js"]
globule.find("*.js", {srcBase: "foo", prefixBase: true})
// ["foo/a.js", "foo/b.js"]
```
```js
globule.findMapping("foo/*.js")
// [{src: "foo/a.js", dest: "foo/a.js"}, {src: "foo/b.js", dest: "foo/b.js"}]
globule.findMapping("foo/*.js", {destBase: "bar"})
// [{src: "foo/a.js", dest: "bar/foo/a.js"}, {src: "foo/b.js", dest: "bar/foo/b.js"}]
globule.findMapping("*.js", {srcBase: "foo", destBase: "bar"})
// [{src: "foo/a.js", dest: "bar/a.js"}, {src: "foo/b.js", dest: "bar/b.js"}]
```
```js
globule.mapping(["foo/a.js", "foo/b.js"])
// [{src: "foo/a.js", dest: "foo/a.js"}, {src: "foo/b.js", dest: "foo/b.js"}]
globule.mapping(["foo/a.js", "foo/b.js"], {destBase: "bar"})
// [{src: "foo/a.js", dest: "bar/foo/a.js"}, {src: "foo/b.js", dest: "bar/foo/b.js"}]
globule.mapping(["a.js", "b.js"], {srcBase: "foo", destBase: "bar"})
// [{src: "foo/a.js", dest: "bar/a.js"}, {src: "foo/b.js", dest: "bar/b.js"}]
```
## Contributing
In lieu of a formal styleguide, take care to maintain the existing coding style. Add unit tests for any new or changed functionality. Lint and test your code using [Grunt](http://gruntjs.com/).
## Release History
_(Nothing yet)_
## License
Copyright (c) 2013 "Cowboy" Ben Alman
Licensed under the MIT license.

View File

@ -0,0 +1,172 @@
/*
* globule
* https://github.com/cowboy/node-globule
*
* Copyright (c) 2013 "Cowboy" Ben Alman
* Licensed under the MIT license.
*/
'use strict';
var fs = require('fs');
var path = require('path');
var _ = require('lodash');
var glob = require('glob');
var minimatch = require('minimatch');
// The module.
var globule = exports;
// Process specified wildcard glob patterns or filenames against a
// callback, excluding and uniquing files in the result set.
function processPatterns(patterns, fn) {
return _.flatten(patterns).reduce(function(result, pattern) {
if (pattern.indexOf('!') === 0) {
// If the first character is ! all matches via this pattern should be
// removed from the result set.
pattern = pattern.slice(1);
return _.difference(result, fn(pattern));
} else {
// Otherwise, add all matching filepaths to the result set.
return _.union(result, fn(pattern));
}
}, []);
}
// Match a filepath or filepaths against one or more wildcard patterns. Returns
// all matching filepaths. This behaves just like minimatch.match, but supports
// any number of patterns.
globule.match = function(patterns, filepaths, options) {
// Return empty set if either patterns or filepaths was omitted.
if (patterns == null || filepaths == null) { return []; }
// Normalize patterns and filepaths to arrays.
if (!_.isArray(patterns)) { patterns = [patterns]; }
if (!_.isArray(filepaths)) { filepaths = [filepaths]; }
// Return empty set if there are no patterns or filepaths.
if (patterns.length === 0 || filepaths.length === 0) { return []; }
// Return all matching filepaths.
return processPatterns(patterns, function(pattern) {
return minimatch.match(filepaths, pattern, options || {});
});
};
// Match a filepath or filepaths against one or more wildcard patterns. Returns
// true if any of the patterns match.
globule.isMatch = function() {
return globule.match.apply(null, arguments).length > 0;
};
// Return an array of all file paths that match the given wildcard patterns.
globule.find = function() {
var args = _.toArray(arguments);
// If the last argument is an options object, remove it from args.
var options = _.isPlainObject(args[args.length - 1]) ? args.pop() : {};
// Use the first argument if it's an Array, otherwise use all arguments.
var patterns = _.isArray(args[0]) ? args[0] : args;
// Return empty set if there are no patterns or filepaths.
if (patterns.length === 0) { return []; }
var srcBase = options.srcBase || options.cwd;
// Create glob-specific options object.
var globOptions = _.extend({}, options);
if (srcBase) {
globOptions.cwd = srcBase;
}
// Get all matching filepaths.
var matches = processPatterns(patterns, function(pattern) {
return glob.sync(pattern, globOptions);
});
// If srcBase and prefixBase were specified, prefix srcBase to matched paths.
if (srcBase && options.prefixBase) {
matches = matches.map(function(filepath) {
return path.join(srcBase, filepath);
});
}
// Filter result set?
if (options.filter) {
matches = matches.filter(function(filepath) {
// If srcBase was specified but prefixBase was NOT, prefix srcBase
// temporarily, for filtering.
if (srcBase && !options.prefixBase) {
filepath = path.join(srcBase, filepath);
}
try {
if (_.isFunction(options.filter)) {
return options.filter(filepath, options);
} else {
// If the file is of the right type and exists, this should work.
return fs.statSync(filepath)[options.filter]();
}
} catch(err) {
// Otherwise, it's probably not the right type.
return false;
}
});
}
return matches;
};
var pathSeparatorRe = /[\/\\]/g;
var extDotRe = {
first: /(\.[^\/]*)?$/,
last: /(\.[^\/\.]*)?$/,
};
function rename(dest, options) {
// Flatten path?
if (options.flatten) {
dest = path.basename(dest);
}
// Change the extension?
if (options.ext) {
dest = dest.replace(extDotRe[options.extDot], options.ext);
}
// Join dest and destBase?
if (options.destBase) {
dest = path.join(options.destBase, dest);
}
return dest;
}
// Build a mapping of src-dest filepaths from the given set of filepaths.
globule.mapping = function(filepaths, options) {
// Return empty set if filepaths was omitted.
if (filepaths == null) { return []; }
options = _.defaults({}, options, {
extDot: 'first',
rename: rename,
});
var files = [];
var fileByDest = {};
// Find all files matching pattern, using passed-in options.
filepaths.forEach(function(src) {
// Generate destination filename.
var dest = options.rename(src, options);
// Prepend srcBase to all src paths.
if (options.srcBase) {
src = path.join(options.srcBase, src);
}
// Normalize filepaths to be unix-style.
dest = dest.replace(pathSeparatorRe, '/');
src = src.replace(pathSeparatorRe, '/');
// Map correct src path to dest path.
if (fileByDest[dest]) {
// If dest already exists, push this src onto that dest's src array.
fileByDest[dest].src.push(src);
} else {
// Otherwise create a new src-dest file mapping object.
files.push({
src: [src],
dest: dest,
});
// And store a reference for later use.
fileByDest[dest] = files[files.length - 1];
}
});
return files;
};
// Return a mapping of src-dest filepaths from files matching the given
// wildcard patterns.
globule.findMapping = function(patterns, options) {
return globule.mapping(globule.find(patterns, options), options);
};

View File

@ -0,0 +1,2 @@
.*.swp
test/a/

View File

@ -0,0 +1,3 @@
language: node_js
node_js:
- 0.8

View File

@ -0,0 +1,27 @@
Copyright (c) Isaac Z. Schlueter ("Author")
All rights reserved.
The BSD License
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,233 @@
# Glob
This is a glob implementation in JavaScript. It uses the `minimatch`
library to do its matching.
## Attention: node-glob users!
The API has changed dramatically between 2.x and 3.x. This library is
now 100% JavaScript, and the integer flags have been replaced with an
options object.
Also, there's an event emitter class, proper tests, and all the other
things you've come to expect from node modules.
And best of all, no compilation!
## Usage
```javascript
var glob = require("glob")
// options is optional
glob("**/*.js", options, function (er, files) {
// files is an array of filenames.
// If the `nonull` option is set, and nothing
// was found, then files is ["**/*.js"]
// er is an error object or null.
})
```
## Features
Please see the [minimatch
documentation](https://github.com/isaacs/minimatch) for more details.
Supports these glob features:
* Brace Expansion
* Extended glob matching
* "Globstar" `**` matching
See:
* `man sh`
* `man bash`
* `man 3 fnmatch`
* `man 5 gitignore`
* [minimatch documentation](https://github.com/isaacs/minimatch)
## glob(pattern, [options], cb)
* `pattern` {String} Pattern to be matched
* `options` {Object}
* `cb` {Function}
* `err` {Error | null}
* `matches` {Array<String>} filenames found matching the pattern
Perform an asynchronous glob search.
## glob.sync(pattern, [options]
* `pattern` {String} Pattern to be matched
* `options` {Object}
* return: {Array<String>} filenames found matching the pattern
Perform a synchronous glob search.
## Class: glob.Glob
Create a Glob object by instanting the `glob.Glob` class.
```javascript
var Glob = require("glob").Glob
var mg = new Glob(pattern, options, cb)
```
It's an EventEmitter, and starts walking the filesystem to find matches
immediately.
### new glob.Glob(pattern, [options], [cb])
* `pattern` {String} pattern to search for
* `options` {Object}
* `cb` {Function} Called when an error occurs, or matches are found
* `err` {Error | null}
* `matches` {Array<String>} filenames found matching the pattern
Note that if the `sync` flag is set in the options, then matches will
be immediately available on the `g.found` member.
### Properties
* `minimatch` The minimatch object that the glob uses.
* `options` The options object passed in.
* `error` The error encountered. When an error is encountered, the
glob object is in an undefined state, and should be discarded.
* `aborted` Boolean which is set to true when calling `abort()`. There
is no way at this time to continue a glob search after aborting, but
you can re-use the statCache to avoid having to duplicate syscalls.
### Events
* `end` When the matching is finished, this is emitted with all the
matches found. If the `nonull` option is set, and no match was found,
then the `matches` list contains the original pattern. The matches
are sorted, unless the `nosort` flag is set.
* `match` Every time a match is found, this is emitted with the matched.
* `error` Emitted when an unexpected error is encountered, or whenever
any fs error occurs if `options.strict` is set.
* `abort` When `abort()` is called, this event is raised.
### Methods
* `abort` Stop the search.
### Options
All the options that can be passed to Minimatch can also be passed to
Glob to change pattern matching behavior. Also, some have been added,
or have glob-specific ramifications.
All options are false by default, unless otherwise noted.
All options are added to the glob object, as well.
* `cwd` The current working directory in which to search. Defaults
to `process.cwd()`.
* `root` The place where patterns starting with `/` will be mounted
onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix
systems, and `C:\` or some such on Windows.)
* `nomount` By default, a pattern starting with a forward-slash will be
"mounted" onto the root setting, so that a valid filesystem path is
returned. Set this flag to disable that behavior.
* `mark` Add a `/` character to directory matches. Note that this
requires additional stat calls.
* `nosort` Don't sort the results.
* `stat` Set to true to stat *all* results. This reduces performance
somewhat, and is completely unnecessary, unless `readdir` is presumed
to be an untrustworthy indicator of file existence. It will cause
ELOOP to be triggered one level sooner in the case of cyclical
symbolic links.
* `silent` When an unusual error is encountered
when attempting to read a directory, a warning will be printed to
stderr. Set the `silent` option to true to suppress these warnings.
* `strict` When an unusual error is encountered
when attempting to read a directory, the process will just continue on
in search of other matches. Set the `strict` option to raise an error
in these cases.
* `statCache` A cache of results of filesystem information, to prevent
unnecessary stat calls. While it should not normally be necessary to
set this, you may pass the statCache from one glob() call to the
options object of another, if you know that the filesystem will not
change between calls. (See "Race Conditions" below.)
* `sync` Perform a synchronous glob search.
* `nounique` In some cases, brace-expanded patterns can result in the
same file showing up multiple times in the result set. By default,
this implementation prevents duplicates in the result set.
Set this flag to disable that behavior.
* `nonull` Set to never return an empty set, instead returning a set
containing the pattern itself. This is the default in glob(3).
* `nocase` Perform a case-insensitive match. Note that case-insensitive
filesystems will sometimes result in glob returning results that are
case-insensitively matched anyway, since readdir and stat will not
raise an error.
* `debug` Set to enable debug logging in minimatch and glob.
* `globDebug` Set to enable debug logging in glob, but not minimatch.
## Comparisons to other fnmatch/glob implementations
While strict compliance with the existing standards is a worthwhile
goal, some discrepancies exist between node-glob and other
implementations, and are intentional.
If the pattern starts with a `!` character, then it is negated. Set the
`nonegate` flag to suppress this behavior, and treat leading `!`
characters normally. This is perhaps relevant if you wish to start the
pattern with a negative extglob pattern like `!(a|B)`. Multiple `!`
characters at the start of a pattern will negate the pattern multiple
times.
If a pattern starts with `#`, then it is treated as a comment, and
will not match anything. Use `\#` to match a literal `#` at the
start of a line, or set the `nocomment` flag to suppress this behavior.
The double-star character `**` is supported by default, unless the
`noglobstar` flag is set. This is supported in the manner of bsdglob
and bash 4.1, where `**` only has special significance if it is the only
thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
`a/**b` will not. **Note that this is different from the way that `**` is
handled by ruby's `Dir` class.**
If an escaped pattern has no matches, and the `nonull` flag is set,
then glob returns the pattern as-provided, rather than
interpreting the character escapes. For example,
`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
`"*a?"`. This is akin to setting the `nullglob` option in bash, except
that it does not resolve escaped pattern characters.
If brace expansion is not disabled, then it is performed before any
other interpretation of the glob pattern. Thus, a pattern like
`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
checked for validity. Since those two are valid, matching proceeds.
## Windows
**Please only use forward-slashes in glob expressions.**
Though windows uses either `/` or `\` as its path separator, only `/`
characters are used by this glob implementation. You must use
forward-slashes **only** in glob expressions. Back-slashes will always
be interpreted as escape characters, not path separators.
Results from absolute patterns such as `/foo/*` are mounted onto the
root setting using `path.join`. On windows, this will by default result
in `/foo/*` matching `C:\foo\bar.txt`.
## Race Conditions
Glob searching, by its very nature, is susceptible to race conditions,
since it relies on directory walking and such.
As a result, it is possible that a file that exists when glob looks for
it may have been deleted or modified by the time it returns the result.
As part of its internal implementation, this program caches all stat
and readdir calls that it makes, in order to cut down on system
overhead. However, this also makes it even more susceptible to races,
especially if the statCache object is reused between glob calls.
Users are thus advised not to use a glob result as a
guarantee of filesystem state in the face of rapid changes.
For the vast majority of operations, this is never a problem.

View File

@ -0,0 +1,9 @@
var Glob = require("../").Glob
var pattern = "test/a/**/[cg]/../[cg]"
console.log(pattern)
var mg = new Glob(pattern, {mark: true, sync:true}, function (er, matches) {
console.log("matches", matches)
})
console.log("after")

View File

@ -0,0 +1,9 @@
var Glob = require("../").Glob
var pattern = "{./*/*,/*,/usr/local/*}"
console.log(pattern)
var mg = new Glob(pattern, {mark: true}, function (er, matches) {
console.log("matches", matches)
})
console.log("after")

View File

@ -0,0 +1,643 @@
// Approach:
//
// 1. Get the minimatch set
// 2. For each pattern in the set, PROCESS(pattern)
// 3. Store matches per-set, then uniq them
//
// PROCESS(pattern)
// Get the first [n] items from pattern that are all strings
// Join these together. This is PREFIX.
// If there is no more remaining, then stat(PREFIX) and
// add to matches if it succeeds. END.
// readdir(PREFIX) as ENTRIES
// If fails, END
// If pattern[n] is GLOBSTAR
// // handle the case where the globstar match is empty
// // by pruning it out, and testing the resulting pattern
// PROCESS(pattern[0..n] + pattern[n+1 .. $])
// // handle other cases.
// for ENTRY in ENTRIES (not dotfiles)
// // attach globstar + tail onto the entry
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $])
//
// else // not globstar
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
// Test ENTRY against pattern[n]
// If fails, continue
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
//
// Caveat:
// Cache all stats and readdirs results to minimize syscall. Since all
// we ever care about is existence and directory-ness, we can just keep
// `true` for files, and [children,...] for directories, or `false` for
// things that don't exist.
module.exports = glob
var fs = require("graceful-fs")
, minimatch = require("minimatch")
, Minimatch = minimatch.Minimatch
, inherits = require("inherits")
, EE = require("events").EventEmitter
, path = require("path")
, isDir = {}
, assert = require("assert").ok
function glob (pattern, options, cb) {
if (typeof options === "function") cb = options, options = {}
if (!options) options = {}
if (typeof options === "number") {
deprecated()
return
}
var g = new Glob(pattern, options, cb)
return g.sync ? g.found : g
}
glob.fnmatch = deprecated
function deprecated () {
throw new Error("glob's interface has changed. Please see the docs.")
}
glob.sync = globSync
function globSync (pattern, options) {
if (typeof options === "number") {
deprecated()
return
}
options = options || {}
options.sync = true
return glob(pattern, options)
}
glob.Glob = Glob
inherits(Glob, EE)
function Glob (pattern, options, cb) {
if (!(this instanceof Glob)) {
return new Glob(pattern, options, cb)
}
if (typeof cb === "function") {
this.on("error", cb)
this.on("end", function (matches) {
cb(null, matches)
})
}
options = options || {}
this.EOF = {}
this._emitQueue = []
this.maxDepth = options.maxDepth || 1000
this.maxLength = options.maxLength || Infinity
this.statCache = options.statCache || {}
this.changedCwd = false
var cwd = process.cwd()
if (!options.hasOwnProperty("cwd")) this.cwd = cwd
else {
this.cwd = options.cwd
this.changedCwd = path.resolve(options.cwd) !== cwd
}
this.root = options.root || path.resolve(this.cwd, "/")
this.root = path.resolve(this.root)
if (process.platform === "win32")
this.root = this.root.replace(/\\/g, "/")
this.nomount = !!options.nomount
if (!pattern) {
throw new Error("must provide pattern")
}
// base-matching: just use globstar for that.
if (options.matchBase && -1 === pattern.indexOf("/")) {
if (options.noglobstar) {
throw new Error("base matching requires globstar")
}
pattern = "**/" + pattern
}
this.strict = options.strict !== false
this.dot = !!options.dot
this.mark = !!options.mark
this.sync = !!options.sync
this.nounique = !!options.nounique
this.nonull = !!options.nonull
this.nosort = !!options.nosort
this.nocase = !!options.nocase
this.stat = !!options.stat
this.debug = !!options.debug || !!options.globDebug
if (this.debug)
this.log = console.error
this.silent = !!options.silent
var mm = this.minimatch = new Minimatch(pattern, options)
this.options = mm.options
pattern = this.pattern = mm.pattern
this.error = null
this.aborted = false
EE.call(this)
// process each pattern in the minimatch set
var n = this.minimatch.set.length
// The matches are stored as {<filename>: true,...} so that
// duplicates are automagically pruned.
// Later, we do an Object.keys() on these.
// Keep them as a list so we can fill in when nonull is set.
this.matches = new Array(n)
this.minimatch.set.forEach(iterator.bind(this))
function iterator (pattern, i, set) {
this._process(pattern, 0, i, function (er) {
if (er) this.emit("error", er)
if (-- n <= 0) this._finish()
})
}
}
Glob.prototype.log = function () {}
Glob.prototype._finish = function () {
assert(this instanceof Glob)
var nou = this.nounique
, all = nou ? [] : {}
for (var i = 0, l = this.matches.length; i < l; i ++) {
var matches = this.matches[i]
this.log("matches[%d] =", i, matches)
// do like the shell, and spit out the literal glob
if (!matches) {
if (this.nonull) {
var literal = this.minimatch.globSet[i]
if (nou) all.push(literal)
else all[literal] = true
}
} else {
// had matches
var m = Object.keys(matches)
if (nou) all.push.apply(all, m)
else m.forEach(function (m) {
all[m] = true
})
}
}
if (!nou) all = Object.keys(all)
if (!this.nosort) {
all = all.sort(this.nocase ? alphasorti : alphasort)
}
if (this.mark) {
// at *some* point we statted all of these
all = all.map(function (m) {
var sc = this.statCache[m]
if (!sc)
return m
var isDir = (Array.isArray(sc) || sc === 2)
if (isDir && m.slice(-1) !== "/") {
return m + "/"
}
if (!isDir && m.slice(-1) === "/") {
return m.replace(/\/+$/, "")
}
return m
}, this)
}
this.log("emitting end", all)
this.EOF = this.found = all
this.emitMatch(this.EOF)
}
function alphasorti (a, b) {
a = a.toLowerCase()
b = b.toLowerCase()
return alphasort(a, b)
}
function alphasort (a, b) {
return a > b ? 1 : a < b ? -1 : 0
}
Glob.prototype.abort = function () {
this.aborted = true
this.emit("abort")
}
Glob.prototype.pause = function () {
if (this.paused) return
if (this.sync)
this.emit("error", new Error("Can't pause/resume sync glob"))
this.paused = true
this.emit("pause")
}
Glob.prototype.resume = function () {
if (!this.paused) return
if (this.sync)
this.emit("error", new Error("Can't pause/resume sync glob"))
this.paused = false
this.emit("resume")
this._processEmitQueue()
//process.nextTick(this.emit.bind(this, "resume"))
}
Glob.prototype.emitMatch = function (m) {
this._emitQueue.push(m)
this._processEmitQueue()
}
Glob.prototype._processEmitQueue = function (m) {
while (!this._processingEmitQueue &&
!this.paused) {
this._processingEmitQueue = true
var m = this._emitQueue.shift()
if (!m) {
this._processingEmitQueue = false
break
}
this.log('emit!', m === this.EOF ? "end" : "match")
this.emit(m === this.EOF ? "end" : "match", m)
this._processingEmitQueue = false
}
}
Glob.prototype._process = function (pattern, depth, index, cb_) {
assert(this instanceof Glob)
var cb = function cb (er, res) {
assert(this instanceof Glob)
if (this.paused) {
if (!this._processQueue) {
this._processQueue = []
this.once("resume", function () {
var q = this._processQueue
this._processQueue = null
q.forEach(function (cb) { cb() })
})
}
this._processQueue.push(cb_.bind(this, er, res))
} else {
cb_.call(this, er, res)
}
}.bind(this)
if (this.aborted) return cb()
if (depth > this.maxDepth) return cb()
// Get the first [n] parts of pattern that are all strings.
var n = 0
while (typeof pattern[n] === "string") {
n ++
}
// now n is the index of the first one that is *not* a string.
// see if there's anything else
var prefix
switch (n) {
// if not, then this is rather simple
case pattern.length:
prefix = pattern.join("/")
this._stat(prefix, function (exists, isDir) {
// either it's there, or it isn't.
// nothing more to do, either way.
if (exists) {
if (prefix && isAbsolute(prefix) && !this.nomount) {
if (prefix.charAt(0) === "/") {
prefix = path.join(this.root, prefix)
} else {
prefix = path.resolve(this.root, prefix)
}
}
if (process.platform === "win32")
prefix = prefix.replace(/\\/g, "/")
this.matches[index] = this.matches[index] || {}
this.matches[index][prefix] = true
this.emitMatch(prefix)
}
return cb()
})
return
case 0:
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
default:
// pattern has some string bits in the front.
// whatever it starts with, whether that's "absolute" like /foo/bar,
// or "relative" like "../baz"
prefix = pattern.slice(0, n)
prefix = prefix.join("/")
break
}
// get the list of entries.
var read
if (prefix === null) read = "."
else if (isAbsolute(prefix) || isAbsolute(pattern.join("/"))) {
if (!prefix || !isAbsolute(prefix)) {
prefix = path.join("/", prefix)
}
read = prefix = path.resolve(prefix)
// if (process.platform === "win32")
// read = prefix = prefix.replace(/^[a-zA-Z]:|\\/g, "/")
this.log('absolute: ', prefix, this.root, pattern, read)
} else {
read = prefix
}
this.log('readdir(%j)', read, this.cwd, this.root)
return this._readdir(read, function (er, entries) {
if (er) {
// not a directory!
// this means that, whatever else comes after this, it can never match
return cb()
}
// globstar is special
if (pattern[n] === minimatch.GLOBSTAR) {
// test without the globstar, and with every child both below
// and replacing the globstar.
var s = [ pattern.slice(0, n).concat(pattern.slice(n + 1)) ]
entries.forEach(function (e) {
if (e.charAt(0) === "." && !this.dot) return
// instead of the globstar
s.push(pattern.slice(0, n).concat(e).concat(pattern.slice(n + 1)))
// below the globstar
s.push(pattern.slice(0, n).concat(e).concat(pattern.slice(n)))
}, this)
// now asyncForEach over this
var l = s.length
, errState = null
s.forEach(function (gsPattern) {
this._process(gsPattern, depth + 1, index, function (er) {
if (errState) return
if (er) return cb(errState = er)
if (--l <= 0) return cb()
})
}, this)
return
}
// not a globstar
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = pattern[n]
if (typeof pn === "string") {
var found = entries.indexOf(pn) !== -1
entries = found ? entries[pn] : []
} else {
var rawGlob = pattern[n]._glob
, dotOk = this.dot || rawGlob.charAt(0) === "."
entries = entries.filter(function (e) {
return (e.charAt(0) !== "." || dotOk) &&
(typeof pattern[n] === "string" && e === pattern[n] ||
e.match(pattern[n]))
})
}
// If n === pattern.length - 1, then there's no need for the extra stat
// *unless* the user has specified "mark" or "stat" explicitly.
// We know that they exist, since the readdir returned them.
if (n === pattern.length - 1 &&
!this.mark &&
!this.stat) {
entries.forEach(function (e) {
if (prefix) {
if (prefix !== "/") e = prefix + "/" + e
else e = prefix + e
}
if (e.charAt(0) === "/" && !this.nomount) {
e = path.join(this.root, e)
}
if (process.platform === "win32")
e = e.replace(/\\/g, "/")
this.matches[index] = this.matches[index] || {}
this.matches[index][e] = true
this.emitMatch(e)
}, this)
return cb.call(this)
}
// now test all the remaining entries as stand-ins for that part
// of the pattern.
var l = entries.length
, errState = null
if (l === 0) return cb() // no matches possible
entries.forEach(function (e) {
var p = pattern.slice(0, n).concat(e).concat(pattern.slice(n + 1))
this._process(p, depth + 1, index, function (er) {
if (errState) return
if (er) return cb(errState = er)
if (--l === 0) return cb.call(this)
})
}, this)
})
}
Glob.prototype._stat = function (f, cb) {
assert(this instanceof Glob)
var abs = f
if (f.charAt(0) === "/") {
abs = path.join(this.root, f)
} else if (this.changedCwd) {
abs = path.resolve(this.cwd, f)
}
this.log('stat', [this.cwd, f, '=', abs])
if (f.length > this.maxLength) {
var er = new Error("Path name too long")
er.code = "ENAMETOOLONG"
er.path = f
return this._afterStat(f, abs, cb, er)
}
if (this.statCache.hasOwnProperty(f)) {
var exists = this.statCache[f]
, isDir = exists && (Array.isArray(exists) || exists === 2)
if (this.sync) return cb.call(this, !!exists, isDir)
return process.nextTick(cb.bind(this, !!exists, isDir))
}
if (this.sync) {
var er, stat
try {
stat = fs.statSync(abs)
} catch (e) {
er = e
}
this._afterStat(f, abs, cb, er, stat)
} else {
fs.stat(abs, this._afterStat.bind(this, f, abs, cb))
}
}
Glob.prototype._afterStat = function (f, abs, cb, er, stat) {
var exists
assert(this instanceof Glob)
if (abs.slice(-1) === "/" && stat && !stat.isDirectory()) {
this.log("should be ENOTDIR, fake it")
er = new Error("ENOTDIR, not a directory '" + abs + "'")
er.path = abs
er.code = "ENOTDIR"
stat = null
}
if (er || !stat) {
exists = false
} else {
exists = stat.isDirectory() ? 2 : 1
}
this.statCache[f] = this.statCache[f] || exists
cb.call(this, !!exists, exists === 2)
}
Glob.prototype._readdir = function (f, cb) {
assert(this instanceof Glob)
var abs = f
if (f.charAt(0) === "/") {
abs = path.join(this.root, f)
} else if (isAbsolute(f)) {
abs = f
} else if (this.changedCwd) {
abs = path.resolve(this.cwd, f)
}
this.log('readdir', [this.cwd, f, abs])
if (f.length > this.maxLength) {
var er = new Error("Path name too long")
er.code = "ENAMETOOLONG"
er.path = f
return this._afterReaddir(f, abs, cb, er)
}
if (this.statCache.hasOwnProperty(f)) {
var c = this.statCache[f]
if (Array.isArray(c)) {
if (this.sync) return cb.call(this, null, c)
return process.nextTick(cb.bind(this, null, c))
}
if (!c || c === 1) {
// either ENOENT or ENOTDIR
var code = c ? "ENOTDIR" : "ENOENT"
, er = new Error((c ? "Not a directory" : "Not found") + ": " + f)
er.path = f
er.code = code
this.log(f, er)
if (this.sync) return cb.call(this, er)
return process.nextTick(cb.bind(this, er))
}
// at this point, c === 2, meaning it's a dir, but we haven't
// had to read it yet, or c === true, meaning it's *something*
// but we don't have any idea what. Need to read it, either way.
}
if (this.sync) {
var er, entries
try {
entries = fs.readdirSync(abs)
} catch (e) {
er = e
}
return this._afterReaddir(f, abs, cb, er, entries)
}
fs.readdir(abs, this._afterReaddir.bind(this, f, abs, cb))
}
Glob.prototype._afterReaddir = function (f, abs, cb, er, entries) {
assert(this instanceof Glob)
if (entries && !er) {
this.statCache[f] = entries
// if we haven't asked to stat everything for suresies, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time. This also gets us one step
// further into ELOOP territory.
if (!this.mark && !this.stat) {
entries.forEach(function (e) {
if (f === "/") e = f + e
else e = f + "/" + e
this.statCache[e] = true
}, this)
}
return cb.call(this, er, entries)
}
// now handle errors, and cache the information
if (er) switch (er.code) {
case "ENOTDIR": // totally normal. means it *does* exist.
this.statCache[f] = 1
return cb.call(this, er)
case "ENOENT": // not terribly unusual
case "ELOOP":
case "ENAMETOOLONG":
case "UNKNOWN":
this.statCache[f] = false
return cb.call(this, er)
default: // some unusual error. Treat as failure.
this.statCache[f] = false
if (this.strict) this.emit("error", er)
if (!this.silent) console.error("glob error", er)
return cb.call(this, er)
}
}
var isAbsolute = process.platform === "win32" ? absWin : absUnix
function absWin (p) {
if (absUnix(p)) return true
// pull off the device/UNC bit from a windows path.
// from node's lib/path.js
var splitDeviceRe =
/^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/
, result = splitDeviceRe.exec(p)
, device = result[1] || ''
, isUnc = device && device.charAt(1) !== ':'
, isAbsolute = !!result[2] || isUnc // UNC paths are always absolute
return isAbsolute
}
function absUnix (p) {
return p.charAt(0) === "/" || p === ""
}

View File

@ -0,0 +1,27 @@
Copyright (c) Isaac Z. Schlueter ("Author")
All rights reserved.
The BSD License
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,33 @@
# graceful-fs
graceful-fs functions as a drop-in replacement for the fs module,
making various improvements.
The improvements are meant to normalize behavior across different
platforms and environments, and to make filesystem access more
resilient to errors.
## Improvements over fs module
graceful-fs:
* keeps track of how many file descriptors are open, and by default
limits this to 1024. Any further requests to open a file are put in a
queue until new slots become available. If 1024 turns out to be too
much, it decreases the limit further.
* fixes `lchmod` for Node versions prior to 0.6.2.
* implements `fs.lutimes` if possible. Otherwise it becomes a noop.
* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or
`lchown` if the user isn't root.
* makes `lchmod` and `lchown` become noops, if not available.
* retries reading a file if `read` results in EAGAIN error.
On Windows, it retries renaming a file for up to one second if `EACCESS`
or `EPERM` error occurs, likely because antivirus software has locked
the directory.
## Configuration
The maximum number of open file descriptors that graceful-fs manages may
be adjusted by setting `fs.MAX_OPEN` to a different number. The default
is 1024.

View File

@ -0,0 +1,442 @@
// this keeps a queue of opened file descriptors, and will make
// fs operations wait until some have closed before trying to open more.
var fs = exports = module.exports = {}
fs._originalFs = require("fs")
Object.getOwnPropertyNames(fs._originalFs).forEach(function(prop) {
var desc = Object.getOwnPropertyDescriptor(fs._originalFs, prop)
Object.defineProperty(fs, prop, desc)
})
var queue = []
, constants = require("constants")
fs._curOpen = 0
fs.MIN_MAX_OPEN = 64
fs.MAX_OPEN = 1024
// prevent EMFILE errors
function OpenReq (path, flags, mode, cb) {
this.path = path
this.flags = flags
this.mode = mode
this.cb = cb
}
function noop () {}
fs.open = gracefulOpen
function gracefulOpen (path, flags, mode, cb) {
if (typeof mode === "function") cb = mode, mode = null
if (typeof cb !== "function") cb = noop
if (fs._curOpen >= fs.MAX_OPEN) {
queue.push(new OpenReq(path, flags, mode, cb))
setTimeout(flush)
return
}
open(path, flags, mode, function (er, fd) {
if (er && er.code === "EMFILE" && fs._curOpen > fs.MIN_MAX_OPEN) {
// that was too many. reduce max, get back in queue.
// this should only happen once in a great while, and only
// if the ulimit -n is set lower than 1024.
fs.MAX_OPEN = fs._curOpen - 1
return fs.open(path, flags, mode, cb)
}
cb(er, fd)
})
}
function open (path, flags, mode, cb) {
cb = cb || noop
fs._curOpen ++
fs._originalFs.open.call(fs, path, flags, mode, function (er, fd) {
if (er) onclose()
cb(er, fd)
})
}
fs.openSync = function (path, flags, mode) {
var ret
ret = fs._originalFs.openSync.call(fs, path, flags, mode)
fs._curOpen ++
return ret
}
function onclose () {
fs._curOpen --
flush()
}
function flush () {
while (fs._curOpen < fs.MAX_OPEN) {
var req = queue.shift()
if (!req) return
switch (req.constructor.name) {
case 'OpenReq':
open(req.path, req.flags || "r", req.mode || 0777, req.cb)
break
case 'ReaddirReq':
readdir(req.path, req.cb)
break
case 'ReadFileReq':
readFile(req.path, req.options, req.cb)
break
case 'WriteFileReq':
writeFile(req.path, req.data, req.options, req.cb)
break
default:
throw new Error('Unknown req type: ' + req.constructor.name)
}
}
}
fs.close = function (fd, cb) {
cb = cb || noop
fs._originalFs.close.call(fs, fd, function (er) {
onclose()
cb(er)
})
}
fs.closeSync = function (fd) {
try {
return fs._originalFs.closeSync.call(fs, fd)
} finally {
onclose()
}
}
// readdir takes a fd as well.
// however, the sync version closes it right away, so
// there's no need to wrap.
// It would be nice to catch when it throws an EMFILE,
// but that's relatively rare anyway.
fs.readdir = gracefulReaddir
function gracefulReaddir (path, cb) {
if (fs._curOpen >= fs.MAX_OPEN) {
queue.push(new ReaddirReq(path, cb))
setTimeout(flush)
return
}
readdir(path, function (er, files) {
if (er && er.code === "EMFILE" && fs._curOpen > fs.MIN_MAX_OPEN) {
fs.MAX_OPEN = fs._curOpen - 1
return fs.readdir(path, cb)
}
cb(er, files)
})
}
function readdir (path, cb) {
cb = cb || noop
fs._curOpen ++
fs._originalFs.readdir.call(fs, path, function (er, files) {
onclose()
cb(er, files)
})
}
function ReaddirReq (path, cb) {
this.path = path
this.cb = cb
}
fs.readFile = gracefulReadFile
function gracefulReadFile(path, options, cb) {
if (typeof options === "function") cb = options, options = null
if (typeof cb !== "function") cb = noop
if (fs._curOpen >= fs.MAX_OPEN) {
queue.push(new ReadFileReq(path, options, cb))
setTimeout(flush)
return
}
readFile(path, options, function (er, data) {
if (er && er.code === "EMFILE" && fs._curOpen > fs.MIN_MAX_OPEN) {
fs.MAX_OPEN = fs._curOpen - 1
return fs.readFile(path, options, cb)
}
cb(er, data)
})
}
function readFile (path, options, cb) {
cb = cb || noop
fs._curOpen ++
fs._originalFs.readFile.call(fs, path, options, function (er, data) {
onclose()
cb(er, data)
})
}
function ReadFileReq (path, options, cb) {
this.path = path
this.options = options
this.cb = cb
}
fs.writeFile = gracefulWriteFile
function gracefulWriteFile(path, data, options, cb) {
if (typeof options === "function") cb = options, options = null
if (typeof cb !== "function") cb = noop
if (fs._curOpen >= fs.MAX_OPEN) {
queue.push(new WriteFileReq(path, data, options, cb))
setTimeout(flush)
return
}
writeFile(path, data, options, function (er) {
if (er && er.code === "EMFILE" && fs._curOpen > fs.MIN_MAX_OPEN) {
fs.MAX_OPEN = fs._curOpen - 1
return fs.writeFile(path, data, options, cb)
}
cb(er)
})
}
function writeFile (path, data, options, cb) {
cb = cb || noop
fs._curOpen ++
fs._originalFs.writeFile.call(fs, path, data, options, function (er) {
onclose()
cb(er)
})
}
function WriteFileReq (path, data, options, cb) {
this.path = path
this.data = data
this.options = options
this.cb = cb
}
// (re-)implement some things that are known busted or missing.
var constants = require("constants")
// lchmod, broken prior to 0.6.2
// back-port the fix here.
if (constants.hasOwnProperty('O_SYMLINK') &&
process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
fs.lchmod = function (path, mode, callback) {
callback = callback || noop
fs.open( path
, constants.O_WRONLY | constants.O_SYMLINK
, mode
, function (err, fd) {
if (err) {
callback(err)
return
}
// prefer to return the chmod error, if one occurs,
// but still try to close, and report closing errors if they occur.
fs.fchmod(fd, mode, function (err) {
fs.close(fd, function(err2) {
callback(err || err2)
})
})
})
}
fs.lchmodSync = function (path, mode) {
var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
// prefer to return the chmod error, if one occurs,
// but still try to close, and report closing errors if they occur.
var err, err2
try {
var ret = fs.fchmodSync(fd, mode)
} catch (er) {
err = er
}
try {
fs.closeSync(fd)
} catch (er) {
err2 = er
}
if (err || err2) throw (err || err2)
return ret
}
}
// lutimes implementation, or no-op
if (!fs.lutimes) {
if (constants.hasOwnProperty("O_SYMLINK")) {
fs.lutimes = function (path, at, mt, cb) {
fs.open(path, constants.O_SYMLINK, function (er, fd) {
cb = cb || noop
if (er) return cb(er)
fs.futimes(fd, at, mt, function (er) {
fs.close(fd, function (er2) {
return cb(er || er2)
})
})
})
}
fs.lutimesSync = function (path, at, mt) {
var fd = fs.openSync(path, constants.O_SYMLINK)
, err
, err2
, ret
try {
var ret = fs.futimesSync(fd, at, mt)
} catch (er) {
err = er
}
try {
fs.closeSync(fd)
} catch (er) {
err2 = er
}
if (err || err2) throw (err || err2)
return ret
}
} else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) {
// maybe utimensat will be bound soonish?
fs.lutimes = function (path, at, mt, cb) {
fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb)
}
fs.lutimesSync = function (path, at, mt) {
return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW)
}
} else {
fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) }
fs.lutimesSync = function () {}
}
}
// https://github.com/isaacs/node-graceful-fs/issues/4
// Chown should not fail on einval or eperm if non-root.
fs.chown = chownFix(fs.chown)
fs.fchown = chownFix(fs.fchown)
fs.lchown = chownFix(fs.lchown)
fs.chownSync = chownFixSync(fs.chownSync)
fs.fchownSync = chownFixSync(fs.fchownSync)
fs.lchownSync = chownFixSync(fs.lchownSync)
function chownFix (orig) {
if (!orig) return orig
return function (target, uid, gid, cb) {
return orig.call(fs, target, uid, gid, function (er, res) {
if (chownErOk(er)) er = null
cb(er, res)
})
}
}
function chownFixSync (orig) {
if (!orig) return orig
return function (target, uid, gid) {
try {
return orig.call(fs, target, uid, gid)
} catch (er) {
if (!chownErOk(er)) throw er
}
}
}
function chownErOk (er) {
// if there's no getuid, or if getuid() is something other than 0,
// and the error is EINVAL or EPERM, then just ignore it.
// This specific case is a silent failure in cp, install, tar,
// and most other unix tools that manage permissions.
// When running as root, or if other types of errors are encountered,
// then it's strict.
if (!er || (!process.getuid || process.getuid() !== 0)
&& (er.code === "EINVAL" || er.code === "EPERM")) return true
}
// if lchmod/lchown do not exist, then make them no-ops
if (!fs.lchmod) {
fs.lchmod = function (path, mode, cb) {
process.nextTick(cb)
}
fs.lchmodSync = function () {}
}
if (!fs.lchown) {
fs.lchown = function (path, uid, gid, cb) {
process.nextTick(cb)
}
fs.lchownSync = function () {}
}
// on Windows, A/V software can lock the directory, causing this
// to fail with an EACCES or EPERM if the directory contains newly
// created files. Try again on failure, for up to 1 second.
if (process.platform === "win32") {
var rename_ = fs.rename
fs.rename = function rename (from, to, cb) {
var start = Date.now()
rename_(from, to, function CB (er) {
if (er
&& (er.code === "EACCES" || er.code === "EPERM")
&& Date.now() - start < 1000) {
return rename_(from, to, CB)
}
cb(er)
})
}
}
// if read() returns EAGAIN, then just try it again.
var read = fs.read
fs.read = function (fd, buffer, offset, length, position, callback_) {
var callback
if (callback_ && typeof callback_ === 'function') {
var eagCounter = 0
callback = function (er, _, __) {
if (er && er.code === 'EAGAIN' && eagCounter < 10) {
eagCounter ++
return read.call(fs, fd, buffer, offset, length, position, callback)
}
callback_.apply(this, arguments)
}
}
return read.call(fs, fd, buffer, offset, length, position, callback)
}
var readSync = fs.readSync
fs.readSync = function (fd, buffer, offset, length, position) {
var eagCounter = 0
while (true) {
try {
return readSync.call(fs, fd, buffer, offset, length, position)
} catch (er) {
if (er.code === 'EAGAIN' && eagCounter < 10) {
eagCounter ++
continue
}
throw er
}
}
}

View File

@ -0,0 +1,51 @@
{
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me"
},
"name": "graceful-fs",
"description": "A drop-in replacement for fs, making various improvements.",
"version": "1.2.3",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-graceful-fs.git"
},
"main": "graceful-fs.js",
"engines": {
"node": ">=0.4.0"
},
"directories": {
"test": "test"
},
"scripts": {
"test": "tap test/*.js"
},
"keywords": [
"fs",
"module",
"reading",
"retry",
"retries",
"queue",
"error",
"errors",
"handling",
"EMFILE",
"EAGAIN",
"EINVAL",
"EPERM",
"EACCESS"
],
"license": "BSD",
"readme": "# graceful-fs\n\ngraceful-fs functions as a drop-in replacement for the fs module,\nmaking various improvements.\n\nThe improvements are meant to normalize behavior across different\nplatforms and environments, and to make filesystem access more\nresilient to errors.\n\n## Improvements over fs module\n\ngraceful-fs:\n\n* keeps track of how many file descriptors are open, and by default\n limits this to 1024. Any further requests to open a file are put in a\n queue until new slots become available. If 1024 turns out to be too\n much, it decreases the limit further.\n* fixes `lchmod` for Node versions prior to 0.6.2.\n* implements `fs.lutimes` if possible. Otherwise it becomes a noop.\n* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or\n `lchown` if the user isn't root.\n* makes `lchmod` and `lchown` become noops, if not available.\n* retries reading a file if `read` results in EAGAIN error.\n\nOn Windows, it retries renaming a file for up to one second if `EACCESS`\nor `EPERM` error occurs, likely because antivirus software has locked\nthe directory.\n\n## Configuration\n\nThe maximum number of open file descriptors that graceful-fs manages may\nbe adjusted by setting `fs.MAX_OPEN` to a different number. The default\nis 1024.\n",
"readmeFilename": "README.md",
"bugs": {
"url": "https://github.com/isaacs/node-graceful-fs/issues"
},
"homepage": "https://github.com/isaacs/node-graceful-fs",
"_id": "graceful-fs@1.2.3",
"_shasum": "15a4806a57547cb2d2dbf27f42e89a8c3451b364",
"_from": "graceful-fs@~1.2.0",
"_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz"
}

View File

@ -0,0 +1,46 @@
var test = require('tap').test
var fs = require('../graceful-fs.js')
test('graceful fs is not fs', function (t) {
t.notEqual(fs, require('fs'))
t.end()
})
test('open an existing file works', function (t) {
var start = fs._curOpen
var fd = fs.openSync(__filename, 'r')
t.equal(fs._curOpen, start + 1)
fs.closeSync(fd)
t.equal(fs._curOpen, start)
fs.open(__filename, 'r', function (er, fd) {
if (er) throw er
t.equal(fs._curOpen, start + 1)
fs.close(fd, function (er) {
if (er) throw er
t.equal(fs._curOpen, start)
t.end()
})
})
})
test('open a non-existing file throws', function (t) {
var start = fs._curOpen
var er
try {
var fd = fs.openSync('this file does not exist', 'r')
} catch (x) {
er = x
}
t.ok(er, 'should throw')
t.notOk(fd, 'should not get an fd')
t.equal(er.code, 'ENOENT')
t.equal(fs._curOpen, start)
fs.open('neither does this file', 'r', function (er, fd) {
t.ok(er, 'should throw')
t.notOk(fd, 'should not get an fd')
t.equal(er.code, 'ENOENT')
t.equal(fs._curOpen, start)
t.end()
})
})

View File

@ -0,0 +1,158 @@
var test = require('tap').test
// simulated ulimit
// this is like graceful-fs, but in reverse
var fs_ = require('fs')
var fs = require('../graceful-fs.js')
var files = fs.readdirSync(__dirname)
// Ok, no more actual file reading!
var fds = 0
var nextFd = 60
var limit = 8
fs_.open = function (path, flags, mode, cb) {
process.nextTick(function() {
++fds
if (fds >= limit) {
--fds
var er = new Error('EMFILE Curses!')
er.code = 'EMFILE'
er.path = path
return cb(er)
} else {
cb(null, nextFd++)
}
})
}
fs_.openSync = function (path, flags, mode) {
if (fds >= limit) {
var er = new Error('EMFILE Curses!')
er.code = 'EMFILE'
er.path = path
throw er
} else {
++fds
return nextFd++
}
}
fs_.close = function (fd, cb) {
process.nextTick(function () {
--fds
cb()
})
}
fs_.closeSync = function (fd) {
--fds
}
fs_.readdir = function (path, cb) {
process.nextTick(function() {
if (fds >= limit) {
var er = new Error('EMFILE Curses!')
er.code = 'EMFILE'
er.path = path
return cb(er)
} else {
++fds
process.nextTick(function () {
--fds
cb(null, [__filename, "some-other-file.js"])
})
}
})
}
fs_.readdirSync = function (path) {
if (fds >= limit) {
var er = new Error('EMFILE Curses!')
er.code = 'EMFILE'
er.path = path
throw er
} else {
return [__filename, "some-other-file.js"]
}
}
test('open emfile autoreduce', function (t) {
fs.MIN_MAX_OPEN = 4
t.equal(fs.MAX_OPEN, 1024)
var max = 12
for (var i = 0; i < max; i++) {
fs.open(__filename, 'r', next(i))
}
var phase = 0
var expect =
[ [ 0, 60, null, 1024, 4, 12, 1 ],
[ 1, 61, null, 1024, 4, 12, 2 ],
[ 2, 62, null, 1024, 4, 12, 3 ],
[ 3, 63, null, 1024, 4, 12, 4 ],
[ 4, 64, null, 1024, 4, 12, 5 ],
[ 5, 65, null, 1024, 4, 12, 6 ],
[ 6, 66, null, 1024, 4, 12, 7 ],
[ 7, 67, null, 6, 4, 5, 1 ],
[ 8, 68, null, 6, 4, 5, 2 ],
[ 9, 69, null, 6, 4, 5, 3 ],
[ 10, 70, null, 6, 4, 5, 4 ],
[ 11, 71, null, 6, 4, 5, 5 ] ]
var actual = []
function next (i) { return function (er, fd) {
if (er)
throw er
actual.push([i, fd, er, fs.MAX_OPEN, fs.MIN_MAX_OPEN, fs._curOpen, fds])
if (i === max - 1) {
t.same(actual, expect)
t.ok(fs.MAX_OPEN < limit)
t.end()
}
fs.close(fd)
} }
})
test('readdir emfile autoreduce', function (t) {
fs.MAX_OPEN = 1024
var max = 12
for (var i = 0; i < max; i ++) {
fs.readdir(__dirname, next(i))
}
var expect =
[ [0,[__filename,"some-other-file.js"],null,7,4,7,7],
[1,[__filename,"some-other-file.js"],null,7,4,7,6],
[2,[__filename,"some-other-file.js"],null,7,4,7,5],
[3,[__filename,"some-other-file.js"],null,7,4,7,4],
[4,[__filename,"some-other-file.js"],null,7,4,7,3],
[5,[__filename,"some-other-file.js"],null,7,4,6,2],
[6,[__filename,"some-other-file.js"],null,7,4,5,1],
[7,[__filename,"some-other-file.js"],null,7,4,4,0],
[8,[__filename,"some-other-file.js"],null,7,4,3,3],
[9,[__filename,"some-other-file.js"],null,7,4,2,2],
[10,[__filename,"some-other-file.js"],null,7,4,1,1],
[11,[__filename,"some-other-file.js"],null,7,4,0,0] ]
var actual = []
function next (i) { return function (er, files) {
if (er)
throw er
var line = [i, files, er, fs.MAX_OPEN, fs.MIN_MAX_OPEN, fs._curOpen, fds ]
actual.push(line)
if (i === max - 1) {
t.ok(fs.MAX_OPEN < limit)
t.same(actual, expect)
t.end()
}
} }
})

View File

@ -0,0 +1,51 @@
A dead simple way to do inheritance in JS.
var inherits = require("inherits")
function Animal () {
this.alive = true
}
Animal.prototype.say = function (what) {
console.log(what)
}
inherits(Dog, Animal)
function Dog () {
Dog.super.apply(this)
}
Dog.prototype.sniff = function () {
this.say("sniff sniff")
}
Dog.prototype.bark = function () {
this.say("woof woof")
}
inherits(Chihuahua, Dog)
function Chihuahua () {
Chihuahua.super.apply(this)
}
Chihuahua.prototype.bark = function () {
this.say("yip yip")
}
// also works
function Cat () {
Cat.super.apply(this)
}
Cat.prototype.hiss = function () {
this.say("CHSKKSS!!")
}
inherits(Cat, Animal, {
meow: function () { this.say("miao miao") }
})
Cat.prototype.purr = function () {
this.say("purr purr")
}
var c = new Chihuahua
assert(c instanceof Chihuahua)
assert(c instanceof Dog)
assert(c instanceof Animal)
The actual function is laughably small. 10-lines small.

Some files were not shown because too many files have changed in this diff Show More