Commit 3463c97b authored by Patiphan Marak's avatar Patiphan Marak

pro

parent d1d7d2e0

Too many changes to show.

To preserve performance only 1000 of 1000+ files are displayed.

node_modules/
release-builds/
\ No newline at end of file
../asar/bin/asar.js
\ No newline at end of file
../decompress-zip/bin/decompress-zip
\ No newline at end of file
../electron-osx-sign/bin/electron-osx-flat.js
\ No newline at end of file
../electron-osx-sign/bin/electron-osx-sign.js
\ No newline at end of file
../electron-packager/cli.js
\ No newline at end of file
../nopt/bin/nopt.js
\ No newline at end of file
This software is dual-licensed under the ISC and MIT licenses.
You may use this software under EITHER of the following licenses.
----------
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
----------
Copyright Isaac Z. Schlueter and Contributors
All rights reserved.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
# abbrev-js
Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev).
Usage:
var abbrev = require("abbrev");
abbrev("foo", "fool", "folding", "flop");
// returns:
{ fl: 'flop'
, flo: 'flop'
, flop: 'flop'
, fol: 'folding'
, fold: 'folding'
, foldi: 'folding'
, foldin: 'folding'
, folding: 'folding'
, foo: 'foo'
, fool: 'fool'
}
This is handy for command-line scripts, or other cases where you want to be able to accept shorthands.
module.exports = exports = abbrev.abbrev = abbrev
abbrev.monkeyPatch = monkeyPatch
function monkeyPatch () {
Object.defineProperty(Array.prototype, 'abbrev', {
value: function () { return abbrev(this) },
enumerable: false, configurable: true, writable: true
})
Object.defineProperty(Object.prototype, 'abbrev', {
value: function () { return abbrev(Object.keys(this)) },
enumerable: false, configurable: true, writable: true
})
}
function abbrev (list) {
if (arguments.length !== 1 || !Array.isArray(list)) {
list = Array.prototype.slice.call(arguments, 0)
}
for (var i = 0, l = list.length, args = [] ; i < l ; i ++) {
args[i] = typeof list[i] === "string" ? list[i] : String(list[i])
}
// sort them lexicographically, so that they're next to their nearest kin
args = args.sort(lexSort)
// walk through each, seeing how much it has in common with the next and previous
var abbrevs = {}
, prev = ""
for (var i = 0, l = args.length ; i < l ; i ++) {
var current = args[i]
, next = args[i + 1] || ""
, nextMatches = true
, prevMatches = true
if (current === next) continue
for (var j = 0, cl = current.length ; j < cl ; j ++) {
var curChar = current.charAt(j)
nextMatches = nextMatches && curChar === next.charAt(j)
prevMatches = prevMatches && curChar === prev.charAt(j)
if (!nextMatches && !prevMatches) {
j ++
break
}
}
prev = current
if (j === cl) {
abbrevs[current] = current
continue
}
for (var a = current.substr(0, j) ; j <= cl ; j ++) {
abbrevs[a] = current
a += current.charAt(j)
}
}
return abbrevs
}
function lexSort (a, b) {
return a === b ? 0 : a > b ? 1 : -1
}
{
"_from": "abbrev@1",
"_id": "abbrev@1.1.1",
"_inBundle": false,
"_integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==",
"_location": "/abbrev",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "abbrev@1",
"name": "abbrev",
"escapedName": "abbrev",
"rawSpec": "1",
"saveSpec": null,
"fetchSpec": "1"
},
"_requiredBy": [
"/nopt",
"/touch/nopt"
],
"_resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
"_shasum": "f8f2c887ad10bf67f634f005b6987fed3179aac8",
"_spec": "abbrev@1",
"_where": "/home/lab1-24/Toua/node-js-60-2/week05/windowapp/node_modules/nopt",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me"
},
"bugs": {
"url": "https://github.com/isaacs/abbrev-js/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "Like ruby's abbrev module, but in js",
"devDependencies": {
"tap": "^10.1"
},
"files": [
"abbrev.js"
],
"homepage": "https://github.com/isaacs/abbrev-js#readme",
"license": "ISC",
"main": "abbrev.js",
"name": "abbrev",
"repository": {
"type": "git",
"url": "git+ssh://git@github.com/isaacs/abbrev-js.git"
},
"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test.js --100"
},
"version": "1.1.1"
}
.git*
test/
test-browser/
build/
.travis.yml
*.swp
Makefile
Copyright (C) 2014-2016 Kevin Beaty
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
## Any Promise
[![Build Status](https://secure.travis-ci.org/kevinbeaty/any-promise.svg)](http://travis-ci.org/kevinbeaty/any-promise)
Let your library support any ES 2015 (ES6) compatible `Promise` and leave the choice to application authors. The application can *optionally* register its preferred `Promise` implementation and it will be exported when requiring `any-promise` from library code.
If no preference is registered, defaults to the global `Promise` for newer Node.js versions. The browser version defaults to the window `Promise`, so polyfill or register as necessary.
### Usage with global Promise:
Assuming the global `Promise` is the desired implementation:
```bash
# Install any libraries depending on any-promise
$ npm install mz
```
The installed libraries will use global Promise by default.
```js
// in library
var Promise = require('any-promise') // the global Promise
function promiseReturningFunction(){
return new Promise(function(resolve, reject){...})
}
```
### Usage with registration:
Assuming `bluebird` is the desired Promise implementation:
```bash
# Install preferred promise library
$ npm install bluebird
# Install any-promise to allow registration
$ npm install any-promise
# Install any libraries you would like to use depending on any-promise
$ npm install mz
```
Register your preference in the application entry point before any other `require` of packages that load `any-promise`:
```javascript
// top of application index.js or other entry point
require('any-promise/register/bluebird')
// -or- Equivalent to above, but allows customization of Promise library
require('any-promise/register')('bluebird', {Promise: require('bluebird')})
```
Now that the implementation is registered, you can use any package depending on `any-promise`:
```javascript
var fsp = require('mz/fs') // mz/fs will use registered bluebird promises
var Promise = require('any-promise') // the registered bluebird promise
```
It is safe to call `register` multiple times, but it must always be with the same implementation.
Again, registration is *optional*. It should only be called by the application user if overriding the global `Promise` implementation is desired.
### Optional Application Registration
As an application author, you can *optionally* register a preferred `Promise` implementation on application startup (before any call to `require('any-promise')`:
You must register your preference before any call to `require('any-promise')` (by you or required packages), and only one implementation can be registered. Typically, this registration would occur at the top of the application entry point.
#### Registration shortcuts
If you are using a known `Promise` implementation, you can register your preference with a shortcut:
```js
require('any-promise/register/bluebird')
// -or-
import 'any-promise/register/q';
```
Shortcut registration is the preferred registration method as it works in the browser and Node.js. It is also convenient for using with `import` and many test runners, that offer a `--require` flag:
```
$ ava --require=any-promise/register/bluebird test.js
```
Current known implementations include `bluebird`, `q`, `when`, `rsvp`, `es6-promise`, `promise`, `native-promise-only`, `pinkie`, `vow` and `lie`. If you are not using a known implementation, you can use another registration method described below.
#### Basic Registration
As an alternative to registration shortcuts, you can call the `register` function with the preferred `Promise` implementation. The benefit of this approach is that a `Promise` library can be required by name without being a known implementation. This approach does NOT work in the browser. To use `any-promise` in the browser use either registration shortcuts or specify the `Promise` constructor using advanced registration (see below).
```javascript
require('any-promise/register')('when')
// -or- require('any-promise/register')('any other ES6 compatible library (known or otherwise)')
```
This registration method will try to detect the `Promise` constructor from requiring the specified implementation. If you would like to specify your own constructor, see advanced registration.
#### Advanced Registration
To use the browser version, you should either install a polyfill or explicitly register the `Promise` constructor:
```javascript
require('any-promise/register')('bluebird', {Promise: require('bluebird')})
```
This could also be used for registering a custom `Promise` implementation or subclass.
Your preference will be registered globally, allowing a single registration even if multiple versions of `any-promise` are installed in the NPM dependency tree or are using multiple bundled JavaScript files in the browser. You can bypass this global registration in options:
```javascript
require('../register')('es6-promise', {Promise: require('es6-promise').Promise, global: false})
```
### Library Usage
To use any `Promise` constructor, simply require it:
```javascript
var Promise = require('any-promise');
return Promise
.all([xf, f, init, coll])
.then(fn);
return new Promise(function(resolve, reject){
try {
resolve(item);
} catch(e){
reject(e);
}
});
```
Except noted below, libraries using `any-promise` should only use [documented](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise) functions as there is no guarantee which implementation will be chosen by the application author. Libraries should never call `register`, only the application user should call if desired.
#### Advanced Library Usage
If your library needs to branch code based on the registered implementation, you can retrieve it using `var impl = require('any-promise/implementation')`, where `impl` will be the package name (`"bluebird"`, `"when"`, etc.) if registered, `"global.Promise"` if using the global version on Node.js, or `"window.Promise"` if using the browser version. You should always include a default case, as there is no guarantee what package may be registered.
### Support for old Node.js versions
Node.js versions prior to `v0.12` may have contained buggy versions of the global `Promise`. For this reason, the global `Promise` is not loaded automatically for these old versions. If using `any-promise` in Node.js versions versions `<= v0.12`, the user should register a desired implementation.
If an implementation is not registered, `any-promise` will attempt to discover an installed `Promise` implementation. If no implementation can be found, an error will be thrown on `require('any-promise')`. While the auto-discovery usually avoids errors, it is non-deterministic. It is recommended that the user always register a preferred implementation for older Node.js versions.
This auto-discovery is only available for Node.jS versions prior to `v0.12`. Any newer versions will always default to the global `Promise` implementation.
### Related
- [any-observable](https://github.com/sindresorhus/any-observable) - `any-promise` for Observables.
declare var implementation: string;
export = implementation;
module.exports = require('./register')().implementation
declare class Promise <R> implements Promise.Thenable <R> {
/**
* If you call resolve in the body of the callback passed to the constructor,
* your promise is fulfilled with result object passed to resolve.
* If you call reject your promise is rejected with the object passed to resolve.
* For consistency and debugging (eg stack traces), obj should be an instanceof Error.
* Any errors thrown in the constructor callback will be implicitly passed to reject().
*/
constructor (callback: (resolve : (value?: R | Promise.Thenable<R>) => void, reject: (error?: any) => void) => void);
/**
* onFulfilled is called when/if "promise" resolves. onRejected is called when/if "promise" rejects.
* Both are optional, if either/both are omitted the next onFulfilled/onRejected in the chain is called.
* Both callbacks have a single parameter , the fulfillment value or rejection reason.
* "then" returns a new promise equivalent to the value you return from onFulfilled/onRejected after being passed through Promise.resolve.
* If an error is thrown in the callback, the returned promise rejects with that error.
*
* @param onFulfilled called when/if "promise" resolves
* @param onRejected called when/if "promise" rejects
*/
then <U> (onFulfilled?: (value: R) => U | Promise.Thenable<U>, onRejected?: (error: any) => U | Promise.Thenable<U>): Promise<U>;
then <U> (onFulfilled?: (value: R) => U | Promise.Thenable<U>, onRejected?: (error: any) => void): Promise<U>;
/**
* Sugar for promise.then(undefined, onRejected)
*
* @param onRejected called when/if "promise" rejects
*/
catch <U> (onRejected?: (error: any) => U | Promise.Thenable<U>): Promise<U>;
/**
* Make a new promise from the thenable.
* A thenable is promise-like in as far as it has a "then" method.
*/
static resolve (): Promise<void>;
static resolve <R> (value: R | Promise.Thenable<R>): Promise<R>;
/**
* Make a promise that rejects to obj. For consistency and debugging (eg stack traces), obj should be an instanceof Error
*/
static reject <R> (error: any): Promise<R>;
/**
* Make a promise that fulfills when every item in the array fulfills, and rejects if (and when) any item rejects.
* the array passed to all can be a mixture of promise-like objects and other objects.
* The fulfillment value is an array (in order) of fulfillment values. The rejection value is the first rejection value.
*/
static all <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> (values: [T1 | Promise.Thenable<T1>, T2 | Promise.Thenable<T2>, T3 | Promise.Thenable<T3>, T4 | Promise.Thenable <T4>, T5 | Promise.Thenable<T5>, T6 | Promise.Thenable<T6>, T7 | Promise.Thenable<T7>, T8 | Promise.Thenable<T8>, T9 | Promise.Thenable<T9>, T10 | Promise.Thenable<T10>]): Promise<[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]>;
static all <T1, T2, T3, T4, T5, T6, T7, T8, T9> (values: [T1 | Promise.Thenable<T1>, T2 | Promise.Thenable<T2>, T3 | Promise.Thenable<T3>, T4 | Promise.Thenable <T4>, T5 | Promise.Thenable<T5>, T6 | Promise.Thenable<T6>, T7 | Promise.Thenable<T7>, T8 | Promise.Thenable<T8>, T9 | Promise.Thenable<T9>]): Promise<[T1, T2, T3, T4, T5, T6, T7, T8, T9]>;
static all <T1, T2, T3, T4, T5, T6, T7, T8> (values: [T1 | Promise.Thenable<T1>, T2 | Promise.Thenable<T2>, T3 | Promise.Thenable<T3>, T4 | Promise.Thenable <T4>, T5 | Promise.Thenable<T5>, T6 | Promise.Thenable<T6>, T7 | Promise.Thenable<T7>, T8 | Promise.Thenable<T8>]): Promise<[T1, T2, T3, T4, T5, T6, T7, T8]>;
static all <T1, T2, T3, T4, T5, T6, T7> (values: [T1 | Promise.Thenable<T1>, T2 | Promise.Thenable<T2>, T3 | Promise.Thenable<T3>, T4 | Promise.Thenable <T4>, T5 | Promise.Thenable<T5>, T6 | Promise.Thenable<T6>, T7 | Promise.Thenable<T7>]): Promise<[T1, T2, T3, T4, T5, T6, T7]>;
static all <T1, T2, T3, T4, T5, T6> (values: [T1 | Promise.Thenable<T1>, T2 | Promise.Thenable<T2>, T3 | Promise.Thenable<T3>, T4 | Promise.Thenable <T4>, T5 | Promise.Thenable<T5>, T6 | Promise.Thenable<T6>]): Promise<[T1, T2, T3, T4, T5, T6]>;
static all <T1, T2, T3, T4, T5> (values: [T1 | Promise.Thenable<T1>, T2 | Promise.Thenable<T2>, T3 | Promise.Thenable<T3>, T4 | Promise.Thenable <T4>, T5 | Promise.Thenable<T5>]): Promise<[T1, T2, T3, T4, T5]>;
static all <T1, T2, T3, T4> (values: [T1 | Promise.Thenable<T1>, T2 | Promise.Thenable<T2>, T3 | Promise.Thenable<T3>, T4 | Promise.Thenable <T4>]): Promise<[T1, T2, T3, T4]>;
static all <T1, T2, T3> (values: [T1 | Promise.Thenable<T1>, T2 | Promise.Thenable<T2>, T3 | Promise.Thenable<T3>]): Promise<[T1, T2, T3]>;
static all <T1, T2> (values: [T1 | Promise.Thenable<T1>, T2 | Promise.Thenable<T2>]): Promise<[T1, T2]>;
static all <T1> (values: [T1 | Promise.Thenable<T1>]): Promise<[T1]>;
static all <TAll> (values: Array<TAll | Promise.Thenable<TAll>>): Promise<TAll[]>;
/**
* Make a Promise that fulfills when any item fulfills, and rejects if any item rejects.
*/
static race <R> (promises: (R | Promise.Thenable<R>)[]): Promise<R>;
}
declare namespace Promise {
export interface Thenable <R> {
then <U> (onFulfilled?: (value: R) => U | Thenable<U>, onRejected?: (error: any) => U | Thenable<U>): Thenable<U>;
then <U> (onFulfilled?: (value: R) => U | Thenable<U>, onRejected?: (error: any) => void): Thenable<U>;
}
}
export = Promise;
module.exports = require('./register')().Promise
"use strict"
// global key for user preferred registration
var REGISTRATION_KEY = '@@any-promise/REGISTRATION',
// Prior registration (preferred or detected)
registered = null
/**
* Registers the given implementation. An implementation must
* be registered prior to any call to `require("any-promise")`,
* typically on application load.
*
* If called with no arguments, will return registration in
* following priority:
*
* For Node.js:
*
* 1. Previous registration
* 2. global.Promise if node.js version >= 0.12
* 3. Auto detected promise based on first sucessful require of
* known promise libraries. Note this is a last resort, as the
* loaded library is non-deterministic. node.js >= 0.12 will
* always use global.Promise over this priority list.
* 4. Throws error.
*
* For Browser:
*
* 1. Previous registration
* 2. window.Promise
* 3. Throws error.
*
* Options:
*
* Promise: Desired Promise constructor
* global: Boolean - Should the registration be cached in a global variable to
* allow cross dependency/bundle registration? (default true)
*/
module.exports = function(root, loadImplementation){
return function register(implementation, opts){
implementation = implementation || null
opts = opts || {}
// global registration unless explicitly {global: false} in options (default true)
var registerGlobal = opts.global !== false;
// load any previous global registration
if(registered === null && registerGlobal){
registered = root[REGISTRATION_KEY] || null
}
if(registered !== null
&& implementation !== null
&& registered.implementation !== implementation){
// Throw error if attempting to redefine implementation
throw new Error('any-promise already defined as "'+registered.implementation+
'". You can only register an implementation before the first '+
' call to require("any-promise") and an implementation cannot be changed')
}
if(registered === null){
// use provided implementation
if(implementation !== null && typeof opts.Promise !== 'undefined'){
registered = {
Promise: opts.Promise,
implementation: implementation
}
} else {
// require implementation if implementation is specified but not provided
registered = loadImplementation(implementation)
}
if(registerGlobal){
// register preference globally in case multiple installations
root[REGISTRATION_KEY] = registered
}
}
return registered
}
}
"use strict";
try {
module.exports = require('./register')().Promise || null
} catch(e) {
module.exports = null
}
{
"_from": "any-promise@^1.0.0",
"_id": "any-promise@1.3.0",
"_inBundle": false,
"_integrity": "sha1-q8av7tzqUugJzcA3au0845Y10X8=",
"_location": "/any-promise",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "any-promise@^1.0.0",
"name": "any-promise",
"escapedName": "any-promise",
"rawSpec": "^1.0.0",
"saveSpec": null,
"fetchSpec": "^1.0.0"
},
"_requiredBy": [
"/mz",
"/thenify"
],
"_resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz",
"_shasum": "abc6afeedcea52e809cdc0376aed3ce39635d17f",
"_spec": "any-promise@^1.0.0",
"_where": "/home/lab1-24/Toua/node-js-60-2/week05/windowapp/node_modules/mz",
"author": {
"name": "Kevin Beaty"
},
"browser": {
"./register.js": "./register-shim.js"
},
"bugs": {
"url": "https://github.com/kevinbeaty/any-promise/issues"
},
"bundleDependencies": false,
"dependencies": {},
"deprecated": false,
"description": "Resolve any installed ES6 compatible promise",
"devDependencies": {
"ava": "^0.14.0",
"bluebird": "^3.0.0",
"es6-promise": "^3.0.0",
"is-promise": "^2.0.0",
"lie": "^3.0.0",
"mocha": "^2.0.0",
"native-promise-only": "^0.8.0",
"phantomjs-prebuilt": "^2.0.0",
"pinkie": "^2.0.0",
"promise": "^7.0.0",
"q": "^1.0.0",
"rsvp": "^3.0.0",
"vow": "^0.4.0",
"when": "^3.0.0",
"zuul": "^3.0.0"
},
"homepage": "http://github.com/kevinbeaty/any-promise",
"keywords": [
"promise",
"es6"
],
"license": "MIT",
"main": "index.js",
"name": "any-promise",
"repository": {
"type": "git",
"url": "git+https://github.com/kevinbeaty/any-promise.git"
},
"scripts": {
"test": "ava"
},
"typings": "index.d.ts",
"version": "1.3.0"
}
"use strict";
module.exports = require('./loader')(window, loadImplementation)
/**
* Browser specific loadImplementation. Always uses `window.Promise`
*
* To register a custom implementation, must register with `Promise` option.
*/
function loadImplementation(){
if(typeof window.Promise === 'undefined'){
throw new Error("any-promise browser requires a polyfill or explicit registration"+
" e.g: require('any-promise/register/bluebird')")
}
return {
Promise: window.Promise,
implementation: 'window.Promise'
}
}
import Promise = require('./index');
declare function register (module?: string, options?: register.Options): register.Register;
declare namespace register {
export interface Register {
Promise: typeof Promise;
implementation: string;
}
export interface Options {
Promise?: typeof Promise;
global?: boolean
}
}
export = register;
"use strict"
module.exports = require('./loader')(global, loadImplementation);
/**
* Node.js version of loadImplementation.
*
* Requires the given implementation and returns the registration
* containing {Promise, implementation}
*
* If implementation is undefined or global.Promise, loads it
* Otherwise uses require
*/
function loadImplementation(implementation){
var impl = null
if(shouldPreferGlobalPromise(implementation)){
// if no implementation or env specified use global.Promise
impl = {
Promise: global.Promise,
implementation: 'global.Promise'
}
} else if(implementation){
// if implementation specified, require it
var lib = require(implementation)
impl = {
Promise: lib.Promise || lib,
implementation: implementation
}
} else {
// try to auto detect implementation. This is non-deterministic
// and should prefer other branches, but this is our last chance
// to load something without throwing error
impl = tryAutoDetect()
}
if(impl === null){
throw new Error('Cannot find any-promise implementation nor'+
' global.Promise. You must install polyfill or call'+
' require("any-promise/register") with your preferred'+
' implementation, e.g. require("any-promise/register/bluebird")'+
' on application load prior to any require("any-promise").')
}
return impl
}
/**
* Determines if the global.Promise should be preferred if an implementation
* has not been registered.
*/
function shouldPreferGlobalPromise(implementation){
if(implementation){
return implementation === 'global.Promise'
} else if(typeof global.Promise !== 'undefined'){
// Load global promise if implementation not specified
// Versions < 0.11 did not have global Promise
// Do not use for version < 0.12 as version 0.11 contained buggy versions
var version = (/v(\d+)\.(\d+)\.(\d+)/).exec(process.version)
return !(version && +version[1] == 0 && +version[2] < 12)
}
// do not have global.Promise or another implementation was specified
return false
}
/**
* Look for common libs as last resort there is no guarantee that
* this will return a desired implementation or even be deterministic.
* The priority is also nearly arbitrary. We are only doing this
* for older versions of Node.js <0.12 that do not have a reasonable
* global.Promise implementation and we the user has not registered
* the preference. This preserves the behavior of any-promise <= 0.1
* and may be deprecated or removed in the future
*/
function tryAutoDetect(){
var libs = [
"es6-promise",
"promise",
"native-promise-only",
"bluebird",
"rsvp",
"when",
"q",
"pinkie",
"lie",
"vow"]
var i = 0, len = libs.length
for(; i < len; i++){
try {
return loadImplementation(libs[i])
} catch(e){}
}
return null
}
'use strict';
require('../register')('bluebird', {Promise: require('bluebird')})
'use strict';
require('../register')('es6-promise', {Promise: require('es6-promise').Promise})
'use strict';
require('../register')('lie', {Promise: require('lie')})
'use strict';
require('../register')('native-promise-only', {Promise: require('native-promise-only')})
'use strict';
require('../register')('pinkie', {Promise: require('pinkie')})
'use strict';
require('../register')('promise', {Promise: require('promise')})
'use strict';
require('../register')('q', {Promise: require('q').Promise})
'use strict';
require('../register')('rsvp', {Promise: require('rsvp').Promise})
'use strict';
require('../register')('vow', {Promise: require('vow').Promise})
'use strict';
require('../register')('when', {Promise: require('when').Promise})
# Changes By Version
## 0.14.0 - 2017-11-02
### Added
* Snapcraft metadata (#130)
* `uncache` and `uncacheAll` (#118)
### Fixed
* Use of asar inside of an Electron app (#118)
## 0.13.1 - 2017-11-02
### Fixed
- Do not return before the write stream fully closes (#113)
## 0.13.0 - 2017-01-09
### Changed
- Dropped support for Node `0.10.0` and `0.12.0`. The minimum supported version
is now Node `4.6.0`. (#100)
- This project was ported from CoffeeScript to JavaScript. The behavior and
APIs should be the same as previous releases. (#100)
## 0.12.4 - 2016-12-28
### Fixed
- Unpack glob patterns containing `{}` characters not working properly (#99)
## 0.12.3 - 2016-08-29
### Fixed
- Multibyte characters in paths are now supported (#86)
## 0.12.2 - 2016-08-22
### Fixed
- Upgraded `minimatch` to `^3.0.3` from `^3.0.0` for [RegExp DOS fix](https://nodesecurity.io/advisories/minimatch_regular-expression-denial-of-service).
## 0.12.1 - 2016-07-25
### Fixed
- Fix `Maximum call stack size exceeded` error regression (#80)
## 0.12.0 - 2016-07-20
### Added
- Added `transform` option to specify a `stream.Transform` function to the
`createPackageWithOptions` API (#73)
## 0.11.0 - 2016-04-06
### Fixed
- Upgraded `mksnapshot` dependency to remove logged `graceful-fs` deprecation
warnings (#61)
Copyright (c) 2014 GitHub Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# asar - Electron Archive
[![Travis build status](https://travis-ci.org/electron/asar.svg?branch=master)](https://travis-ci.org/electron/asar)
[![AppVeyor build status](https://ci.appveyor.com/api/projects/status/mrfwfr0uxlbwkuq3?svg=true)](https://ci.appveyor.com/project/electron-bot/asar)
[![dependencies](http://img.shields.io/david/electron/asar.svg?style=flat-square)](https://david-dm.org/electron/asar)
[![npm version](http://img.shields.io/npm/v/asar.svg?style=flat-square)](https://npmjs.org/package/asar)
Asar is a simple extensive archive format, it works like `tar` that concatenates
all files together without compression, while having random access support.
## Features
* Support random access
* Use JSON to store files' information
* Very easy to write a parser
## Command line utility
### Install
```bash
$ npm install asar
```
### Usage
```bash
$ asar --help
Usage: asar [options] [command]
Commands:
pack|p <dir> <output>
create asar archive
list|l <archive>
list files of asar archive
extract-file|ef <archive> <filename>
extract one file from archive
extract|e <archive> <dest>
extract archive
Options:
-h, --help output usage information
-V, --version output the version number
```
#### Excluding multiple resources from being packed
Given:
```
app
(a) ├── x1
(b) ├── x2
(c) ├── y3
(d) │   ├── x1
(e) │   └── z1
(f) │   └── x2
(g) └── z4
(h) └── w1
```
Exclude: a, b
```bash
$ asar pack app app.asar --unpack-dir "{x1,x2}"
```
Exclude: a, b, d, f
```bash
$ asar pack app app.asar --unpack-dir "**/{x1,x2}"
```
Exclude: a, b, d, f, h
```bash
$ asar pack app app.asar --unpack-dir "{**/x1,**/x2,z4/w1}"
```
## Using programatically
### Example
```js
var asar = require('asar');
var src = 'some/path/';
var dest = 'name.asar';
asar.createPackage(src, dest, function() {
console.log('done.');
})
```
Please note that there is currently **no** error handling provided!
### Transform
You can pass in a `transform` option, that is a function, which either returns
nothing, or a `stream.Transform`. The latter will be used on files that will be
in the `.asar` file to transform them (e.g. compress).
```js
var asar = require('asar');
var src = 'some/path/';
var dest = 'name.asar';
function transform(filename) {
return new CustomTransformStream()
}
asar.createPackageWithOptions(src, dest, { transform: transform }, function() {
console.log('done.');
})
```
## Using with grunt
There is also an unofficial grunt plugin to generate asar archives at [bwin/grunt-asar][grunt-asar].
## Format
Asar uses [Pickle][pickle] to safely serialize binary value to file, there is
also a [node.js binding][node-pickle] of `Pickle` class.
The format of asar is very flat:
```
| UInt32: header_size | String: header | Bytes: file1 | ... | Bytes: file42 |
```
The `header_size` and `header` are serialized with [Pickle][pickle] class, and
`header_size`'s [Pickle][pickle] object is 8 bytes.
The `header` is a JSON string, and the `header_size` is the size of `header`'s
`Pickle` object.
Structure of `header` is something like this:
```json
{
"files": {
"tmp": {
"files": {}
},
"usr" : {
"files": {
"bin": {
"files": {
"ls": {
"offset": "0",
"size": 100,
"executable": true
},
"cd": {
"offset": "100",
"size": 100,
"executable": true
}
}
}
}
},
"etc": {
"files": {
"hosts": {
"offset": "200",
"size": 32
}
}
}
}
}
```
`offset` and `size` records the information to read the file from archive, the
`offset` starts from 0 so you have to manually add the size of `header_size` and
`header` to the `offset` to get the real offset of the file.
`offset` is a UINT64 number represented in string, because there is no way to
precisely represent UINT64 in JavaScript `Number`. `size` is a JavaScript
`Number` that is no larger than `Number.MAX_SAFE_INTEGER`, which has a value of
`9007199254740991` and is about 8PB in size. We didn't store `size` in UINT64
because file size in Node.js is represented as `Number` and it is not safe to
convert `Number` to UINT64.
[pickle]: https://chromium.googlesource.com/chromium/src/+/master/base/pickle.h
[node-pickle]: https://www.npmjs.org/package/chromium-pickle
[grunt-asar]: https://github.com/bwin/grunt-asar
#!/usr/bin/env node
var asar = require('../lib/asar')
var program = require('commander')
program.version('v' + require('../package.json').version)
.description('Manipulate asar archive files')
program.command('pack <dir> <output>')
.alias('p')
.description('create asar archive')
.option('--ordering <file path>', 'path to a text file for ordering contents')
.option('--unpack <expression>', 'do not pack files matching glob <expression>')
.option('--unpack-dir <expression>', 'do not pack dirs matching glob <expression> or starting with literal <expression>')
.option('--snapshot', 'create snapshot')
.option('--exclude-hidden', 'exclude hidden files')
.option('--sv <version>', '(snapshot) version of Electron')
.option('--sa <arch>', '(snapshot) arch of Electron')
.option('--sb <builddir>', '(snapshot) where to put downloaded files')
.action(function (dir, output, options) {
options = {
unpack: options.unpack,
unpackDir: options.unpackDir,
snapshot: options.snapshot,
ordering: options.ordering,
version: options.sv,
arch: options.sa,
builddir: options.sb,
dot: !options.excludeHidden
}
asar.createPackageWithOptions(dir, output, options, function (error) {
if (error) {
console.error(error.stack)
process.exit(1)
}
})
})
program.command('list <archive>')
.alias('l')
.description('list files of asar archive')
.action(function (archive) {
var files = asar.listPackage(archive)
for (var i in files) {
console.log(files[i])
}
})
program.command('extract-file <archive> <filename>')
.alias('ef')
.description('extract one file from archive')
.action(function (archive, filename) {
require('fs').writeFileSync(require('path').basename(filename),
asar.extractFile(archive, filename))
})
program.command('extract <archive> <dest>')
.alias('e')
.description('extract archive')
.action(function (archive, dest) {
asar.extractAll(archive, dest)
})
program.command('*')
.action(function (cmd) {
console.log('asar: \'%s\' is not an asar command. See \'asar --help\'.', cmd)
})
program.parse(process.argv)
if (program.args.length === 0) {
program.help()
}
'use strict'
const fs = process.versions.electron ? require('original-fs') : require('fs')
const path = require('path')
const minimatch = require('minimatch')
const mkdirp = require('mkdirp')
const Filesystem = require('./filesystem')
const disk = require('./disk')
const crawlFilesystem = require('./crawlfs')
const createSnapshot = require('./snapshot')
// Return whether or not a directory should be excluded from packing due to
// "--unpack-dir" option
//
// @param {string} path - diretory path to check
// @param {string} pattern - literal prefix [for backward compatibility] or glob pattern
// @param {array} unpackDirs - Array of directory paths previously marked as unpacked
//
const isUnpackDir = function (path, pattern, unpackDirs) {
if (path.indexOf(pattern) === 0 || minimatch(path, pattern)) {
if (unpackDirs.indexOf(path) === -1) {
unpackDirs.push(path)
}
return true
} else {
for (let i = 0; i < unpackDirs.length; i++) {
if (path.indexOf(unpackDirs[i]) === 0) {
return true
}
}
return false
}
}
module.exports.createPackage = function (src, dest, callback) {
return module.exports.createPackageWithOptions(src, dest, {}, callback)
}
module.exports.createPackageWithOptions = function (src, dest, options, callback) {
const dot = typeof options.dot === 'undefined' ? true : options.dot
return crawlFilesystem(src, { dot: dot }, function (error, filenames, metadata) {
if (error) { return callback(error) }
module.exports.createPackageFromFiles(src, dest, filenames, metadata, options, callback)
})
}
/*
createPackageFromFiles - Create an asar-archive from a list of filenames
src: Base path. All files are relative to this.
dest: Archive filename (& path).
filenames: Array of filenames relative to src.
metadata: Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional)
options: The options.
callback: The callback function. Accepts (err).
*/
module.exports.createPackageFromFiles = function (src, dest, filenames, metadata, options, callback) {
if (typeof metadata === 'undefined' || metadata === null) { metadata = {} }
const filesystem = new Filesystem(src)
const files = []
const unpackDirs = []
let filenamesSorted = []
if (options.ordering) {
const orderingFiles = fs.readFileSync(options.ordering).toString().split('\n').map(function (line) {
if (line.includes(':')) { line = line.split(':').pop() }
line = line.trim()
if (line.startsWith('/')) { line = line.slice(1) }
return line
})
const ordering = []
for (const file of orderingFiles) {
const pathComponents = file.split(path.sep)
let str = src
for (const pathComponent of pathComponents) {
str = path.join(str, pathComponent)
ordering.push(str)
}
}
let missing = 0
const total = filenames.length
for (const file of ordering) {
if (!filenamesSorted.includes(file) && filenames.includes(file)) {
filenamesSorted.push(file)
}
}
for (const file of filenames) {
if (!filenamesSorted.includes(file)) {
filenamesSorted.push(file)
missing += 1
}
}
console.log(`Ordering file has ${((total - missing) / total) * 100}% coverage.`)
} else {
filenamesSorted = filenames
}
const handleFile = function (filename, done) {
let file = metadata[filename]
let type
if (!file) {
const stat = fs.lstatSync(filename)
if (stat.isDirectory()) { type = 'directory' }
if (stat.isFile()) { type = 'file' }
if (stat.isSymbolicLink()) { type = 'link' }
file = {stat, type}
}
let shouldUnpack
switch (file.type) {
case 'directory':
shouldUnpack = options.unpackDir
? isUnpackDir(path.relative(src, filename), options.unpackDir, unpackDirs)
: false
filesystem.insertDirectory(filename, shouldUnpack)
break
case 'file':
shouldUnpack = false
if (options.unpack) {
shouldUnpack = minimatch(filename, options.unpack, {matchBase: true})
}
if (!shouldUnpack && options.unpackDir) {
const dirName = path.relative(src, path.dirname(filename))
shouldUnpack = isUnpackDir(dirName, options.unpackDir, unpackDirs)
}
files.push({filename: filename, unpack: shouldUnpack})
filesystem.insertFile(filename, shouldUnpack, file, options, done)
return
case 'link':
filesystem.insertLink(filename, file.stat)
break
}
return process.nextTick(done)
}
const insertsDone = function () {
return mkdirp(path.dirname(dest), function (error) {
if (error) { return callback(error) }
return disk.writeFilesystem(dest, filesystem, files, metadata, function (error) {
if (error) { return callback(error) }
if (options.snapshot) {
return createSnapshot(src, dest, filenames, metadata, options, callback)
} else {
return callback(null)
}
})
})
}
const names = filenamesSorted.slice()
const next = function (name) {
if (!name) { return insertsDone() }
return handleFile(name, function () {
return next(names.shift())
})
}
return next(names.shift())
}
module.exports.statFile = function (archive, filename, followLinks) {
const filesystem = disk.readFilesystemSync(archive)
return filesystem.getFile(filename, followLinks)
}
module.exports.listPackage = function (archive) {
return disk.readFilesystemSync(archive).listFiles()
}
module.exports.extractFile = function (archive, filename) {
const filesystem = disk.readFilesystemSync(archive)
return disk.readFileSync(filesystem, filename, filesystem.getFile(filename))
}
module.exports.extractAll = function (archive, dest) {
const filesystem = disk.readFilesystemSync(archive)
const filenames = filesystem.listFiles()
// under windows just extract links as regular files
const followLinks = process.platform === 'win32'
// create destination directory
mkdirp.sync(dest)
return filenames.map((filename) => {
filename = filename.substr(1) // get rid of leading slash
const destFilename = path.join(dest, filename)
const file = filesystem.getFile(filename, followLinks)
if (file.files) {
// it's a directory, create it and continue with the next entry
mkdirp.sync(destFilename)
} else if (file.link) {
// it's a symlink, create a symlink
const linkSrcPath = path.dirname(path.join(dest, file.link))
const linkDestPath = path.dirname(destFilename)
const relativePath = path.relative(linkDestPath, linkSrcPath);
// try to delete output file, because we can't overwrite a link
(() => {
try {
fs.unlinkSync(destFilename)
} catch (error) {}
})()
const linkTo = path.join(relativePath, path.basename(file.link))
fs.symlinkSync(linkTo, destFilename)
} else {
// it's a file, extract it
const content = disk.readFileSync(filesystem, filename, file)
fs.writeFileSync(destFilename, content)
}
})
}
module.exports.uncache = function (archive) {
return disk.uncacheFilesystem(archive)
}
module.exports.uncacheAll = function () {
disk.uncacheAll()
}
'use strict'
const fs = process.versions.electron ? require('original-fs') : require('fs')
const glob = require('glob')
module.exports = function (dir, options, callback) {
const metadata = {}
return glob(dir + '/**/*', options, function (error, filenames) {
if (error) { return callback(error) }
for (const filename of filenames) {
const stat = fs.lstatSync(filename)
if (stat.isFile()) {
metadata[filename] = {type: 'file', stat: stat}
} else if (stat.isDirectory()) {
metadata[filename] = {type: 'directory', stat: stat}
} else if (stat.isSymbolicLink()) {
metadata[filename] = {type: 'link', stat: stat}
}
}
return callback(null, filenames, metadata)
})
}
'use strict'
const fs = process.versions.electron ? require('original-fs') : require('fs')
const path = require('path')
const mkdirp = require('mkdirp')
const pickle = require('chromium-pickle-js')
const Filesystem = require('./filesystem')
let filesystemCache = {}
const copyFileToSync = function (dest, src, filename) {
const srcFile = path.join(src, filename)
const targetFile = path.join(dest, filename)
const content = fs.readFileSync(srcFile)
const stats = fs.statSync(srcFile)
mkdirp.sync(path.dirname(targetFile))
return fs.writeFileSync(targetFile, content, {mode: stats.mode})
}
const writeFileListToStream = function (dest, filesystem, out, list, metadata, callback) {
for (let i = 0; i < list.length; i++) {
const file = list[i]
if (file.unpack) {
// the file should not be packed into archive.
const filename = path.relative(filesystem.src, file.filename)
try {
copyFileToSync(`${dest}.unpacked`, filesystem.src, filename)
} catch (error) {
return callback(error)
}
} else {
const tr = metadata[file.filename].transformed
const stream = fs.createReadStream((tr ? tr.path : file.filename))
stream.pipe(out, {end: false})
stream.on('error', callback)
return stream.on('end', function () {
return writeFileListToStream(dest, filesystem, out, list.slice(i + 1), metadata, callback)
})
}
}
out.end()
return callback(null)
}
module.exports.writeFilesystem = function (dest, filesystem, files, metadata, callback) {
let sizeBuf
let headerBuf
try {
const headerPickle = pickle.createEmpty()
headerPickle.writeString(JSON.stringify(filesystem.header))
headerBuf = headerPickle.toBuffer()
const sizePickle = pickle.createEmpty()
sizePickle.writeUInt32(headerBuf.length)
sizeBuf = sizePickle.toBuffer()
} catch (error) {
return callback(error)
}
const out = fs.createWriteStream(dest)
out.on('error', callback)
out.write(sizeBuf)
return out.write(headerBuf, function () {
return writeFileListToStream(dest, filesystem, out, files, metadata, callback)
})
}
module.exports.readArchiveHeaderSync = function (archive) {
const fd = fs.openSync(archive, 'r')
let size
let headerBuf
try {
const sizeBuf = new Buffer(8)
if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
throw new Error('Unable to read header size')
}
const sizePickle = pickle.createFromBuffer(sizeBuf)
size = sizePickle.createIterator().readUInt32()
headerBuf = new Buffer(size)
if (fs.readSync(fd, headerBuf, 0, size, null) !== size) {
throw new Error('Unable to read header')
}
} finally {
fs.closeSync(fd)
}
const headerPickle = pickle.createFromBuffer(headerBuf)
const header = headerPickle.createIterator().readString()
return {header: JSON.parse(header), headerSize: size}
}
module.exports.readFilesystemSync = function (archive) {
if (!filesystemCache[archive]) {
const header = this.readArchiveHeaderSync(archive)
const filesystem = new Filesystem(archive)
filesystem.header = header.header
filesystem.headerSize = header.headerSize
filesystemCache[archive] = filesystem
}
return filesystemCache[archive]
}
module.exports.uncacheFilesystem = function (archive) {
if (filesystemCache[archive]) {
filesystemCache[archive] = undefined
return true
}
return false
}
module.exports.uncacheAll = function () {
filesystemCache = {}
}
module.exports.readFileSync = function (filesystem, filename, info) {
let buffer = new Buffer(info.size)
if (info.size <= 0) { return buffer }
if (info.unpacked) {
// it's an unpacked file, copy it.
buffer = fs.readFileSync(path.join(`${filesystem.src}.unpacked`, filename))
} else {
// Node throws an exception when reading 0 bytes into a 0-size buffer,
// so we short-circuit the read in this case.
const fd = fs.openSync(filesystem.src, 'r')
try {
const offset = 8 + filesystem.headerSize + parseInt(info.offset)
fs.readSync(fd, buffer, 0, info.size, offset)
} finally {
fs.closeSync(fd)
}
}
return buffer
}
'use strict'
const fs = process.versions.electron ? require('original-fs') : require('fs')
const path = require('path')
const tmp = require('tmp')
const UINT64 = require('cuint').UINT64
class Filesystem {
constructor (src) {
this.src = path.resolve(src)
this.header = {files: {}}
this.offset = UINT64(0)
}
searchNodeFromDirectory (p) {
let json = this.header
const dirs = p.split(path.sep)
for (const dir of dirs) {
if (dir !== '.') {
json = json.files[dir]
}
}
return json
}
searchNodeFromPath (p) {
p = path.relative(this.src, p)
if (!p) { return this.header }
const name = path.basename(p)
const node = this.searchNodeFromDirectory(path.dirname(p))
if (node.files == null) {
node.files = {}
}
if (node.files[name] == null) {
node.files[name] = {}
}
return node.files[name]
}
insertDirectory (p, shouldUnpack) {
const node = this.searchNodeFromPath(p)
if (shouldUnpack) {
node.unpacked = shouldUnpack
}
node.files = {}
return node.files
}
insertFile (p, shouldUnpack, file, options, callback) {
const dirNode = this.searchNodeFromPath(path.dirname(p))
const node = this.searchNodeFromPath(p)
if (shouldUnpack || dirNode.unpacked) {
node.size = file.stat.size
node.unpacked = true
process.nextTick(callback)
return
}
const handler = () => {
const size = file.transformed ? file.transformed.stat.size : file.stat.size
// JavaScript can not precisely present integers >= UINT32_MAX.
if (size > 4294967295) {
throw new Error(`${p}: file size can not be larger than 4.2GB`)
}
node.size = size
node.offset = this.offset.toString()
if (process.platform !== 'win32' && (file.stat.mode & 0o100)) {
node.executable = true
}
this.offset.add(UINT64(size))
return callback()
}
const tr = options.transform && options.transform(p)
if (tr) {
return tmp.file(function (err, path) {
if (err) { return handler() }
const out = fs.createWriteStream(path)
const stream = fs.createReadStream(p)
stream.pipe(tr).pipe(out)
return out.on('close', function () {
file.transformed = {
path,
stat: fs.lstatSync(path)
}
return handler()
})
})
} else {
return process.nextTick(handler)
}
}
insertLink (p, stat) {
const link = path.relative(fs.realpathSync(this.src), fs.realpathSync(p))
if (link.substr(0, 2) === '..') {
throw new Error(`${p}: file links out of the package`)
}
const node = this.searchNodeFromPath(p)
node.link = link
return link
}
listFiles () {
const files = []
const fillFilesFromHeader = function (p, json) {
if (!json.files) {
return
}
return (() => {
const result = []
for (const f in json.files) {
const fullPath = path.join(p, f)
files.push(fullPath)
result.push(fillFilesFromHeader(fullPath, json.files[f]))
}
return result
})()
}
fillFilesFromHeader('/', this.header)
return files
}
getNode (p) {
const node = this.searchNodeFromDirectory(path.dirname(p))
const name = path.basename(p)
if (name) {
return node.files[name]
} else {
return node
}
}
getFile (p, followLinks) {
followLinks = typeof followLinks === 'undefined' ? true : followLinks
const info = this.getNode(p)
// if followLinks is false we don't resolve symlinks
if (info.link && followLinks) {
return this.getFile(info.link)
} else {
return info
}
}
}
module.exports = Filesystem
'use strict'
const fs = process.versions.electron ? require('original-fs') : require('fs')
const path = require('path')
const mksnapshot = require('mksnapshot')
const vm = require('vm')
const stripBOM = function (content) {
if (content.charCodeAt(0) === 0xFEFF) {
content = content.slice(1)
}
return content
}
const wrapModuleCode = function (script) {
script = script.replace(/^#!.*/, '')
return `(function(exports, require, module, __filename, __dirname) { ${script} \n});`
}
const dumpObjectToJS = function (content) {
let result = 'var __ATOM_SHELL_SNAPSHOT = {\n'
for (const filename in content) {
const func = content[filename].toString()
result += ` '${filename}': ${func},\n`
}
result += '};\n'
return result
}
const createSnapshot = function (src, dest, filenames, metadata, options, callback) {
const content = {}
try {
src = path.resolve(src)
for (const filename of filenames) {
const file = metadata[filename]
if ((file.type === 'file' || file.type === 'link') && filename.substr(-3) === '.js') {
const script = wrapModuleCode(stripBOM(fs.readFileSync(filename, 'utf8')))
const relativeFilename = path.relative(src, filename)
try {
const compiled = vm.runInThisContext(script, {filename: relativeFilename})
content[relativeFilename] = compiled
} catch (error) {
console.error('Ignoring ' + relativeFilename + ' for ' + error.name)
}
}
}
} catch (error) {
return callback(error)
}
// run mksnapshot
const str = dumpObjectToJS(content)
const version = options.version
const arch = options.arch
const builddir = options.builddir
let snapshotdir = options.snapshotdir
if (typeof snapshotdir === 'undefined' || snapshotdir === null) { snapshotdir = path.dirname(dest) }
const target = path.resolve(snapshotdir, 'snapshot_blob.bin')
return mksnapshot(str, target, version, arch, builddir, callback)
}
module.exports = createSnapshot
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
This diff is collapsed.
exports.alphasort = alphasort
exports.alphasorti = alphasorti
exports.setopts = setopts
exports.ownProp = ownProp
exports.makeAbs = makeAbs
exports.finish = finish
exports.mark = mark
exports.isIgnored = isIgnored
exports.childrenIgnored = childrenIgnored
function ownProp (obj, field) {
return Object.prototype.hasOwnProperty.call(obj, field)
}
var path = require("path")
var minimatch = require("minimatch")
var isAbsolute = require("path-is-absolute")
var Minimatch = minimatch.Minimatch
function alphasorti (a, b) {
return a.toLowerCase().localeCompare(b.toLowerCase())
}
function alphasort (a, b) {
return a.localeCompare(b)
}
function setupIgnores (self, options) {
self.ignore = options.ignore || []
if (!Array.isArray(self.ignore))
self.ignore = [self.ignore]
if (self.ignore.length) {
self.ignore = self.ignore.map(ignoreMap)
}
}
// ignore patterns are always in dot:true mode.
function ignoreMap (pattern) {
var gmatcher = null
if (pattern.slice(-3) === '/**') {
var gpattern = pattern.replace(/(\/\*\*)+$/, '')
gmatcher = new Minimatch(gpattern, { dot: true })
}
return {
matcher: new Minimatch(pattern, { dot: true }),
gmatcher: gmatcher
}
}
function setopts (self, pattern, options) {
if (!options)
options = {}
// base-matching: just use globstar for that.
if (options.matchBase && -1 === pattern.indexOf("/")) {
if (options.noglobstar) {
throw new Error("base matching requires globstar")
}
pattern = "**/" + pattern
}
self.silent = !!options.silent
self.pattern = pattern
self.strict = options.strict !== false
self.realpath = !!options.realpath
self.realpathCache = options.realpathCache || Object.create(null)
self.follow = !!options.follow
self.dot = !!options.dot
self.mark = !!options.mark
self.nodir = !!options.nodir
if (self.nodir)
self.mark = true
self.sync = !!options.sync
self.nounique = !!options.nounique
self.nonull = !!options.nonull
self.nosort = !!options.nosort
self.nocase = !!options.nocase
self.stat = !!options.stat
self.noprocess = !!options.noprocess
self.maxLength = options.maxLength || Infinity
self.cache = options.cache || Object.create(null)
self.statCache = options.statCache || Object.create(null)
self.symlinks = options.symlinks || Object.create(null)
setupIgnores(self, options)
self.changedCwd = false
var cwd = process.cwd()
if (!ownProp(options, "cwd"))
self.cwd = cwd
else {
self.cwd = options.cwd
self.changedCwd = path.resolve(options.cwd) !== cwd
}
self.root = options.root || path.resolve(self.cwd, "/")
self.root = path.resolve(self.root)
if (process.platform === "win32")
self.root = self.root.replace(/\\/g, "/")
self.nomount = !!options.nomount
// disable comments and negation in Minimatch.
// Note that they are not supported in Glob itself anyway.
options.nonegate = true
options.nocomment = true
self.minimatch = new Minimatch(pattern, options)
self.options = self.minimatch.options
}
function finish (self) {
var nou = self.nounique
var all = nou ? [] : Object.create(null)
for (var i = 0, l = self.matches.length; i < l; i ++) {
var matches = self.matches[i]
if (!matches || Object.keys(matches).length === 0) {
if (self.nonull) {
// do like the shell, and spit out the literal glob
var literal = self.minimatch.globSet[i]
if (nou)
all.push(literal)
else
all[literal] = true
}
} else {
// had matches
var m = Object.keys(matches)
if (nou)
all.push.apply(all, m)
else
m.forEach(function (m) {
all[m] = true
})
}
}
if (!nou)
all = Object.keys(all)
if (!self.nosort)
all = all.sort(self.nocase ? alphasorti : alphasort)
// at *some* point we statted all of these
if (self.mark) {
for (var i = 0; i < all.length; i++) {
all[i] = self._mark(all[i])
}
if (self.nodir) {
all = all.filter(function (e) {
return !(/\/$/.test(e))
})
}
}
if (self.ignore.length)
all = all.filter(function(m) {
return !isIgnored(self, m)
})
self.found = all
}
function mark (self, p) {
var abs = makeAbs(self, p)
var c = self.cache[abs]
var m = p
if (c) {
var isDir = c === 'DIR' || Array.isArray(c)
var slash = p.slice(-1) === '/'
if (isDir && !slash)
m += '/'
else if (!isDir && slash)
m = m.slice(0, -1)
if (m !== p) {
var mabs = makeAbs(self, m)
self.statCache[mabs] = self.statCache[abs]
self.cache[mabs] = self.cache[abs]
}
}
return m
}
// lotta situps...
function makeAbs (self, f) {
var abs = f
if (f.charAt(0) === '/') {
abs = path.join(self.root, f)
} else if (isAbsolute(f) || f === '') {
abs = f
} else if (self.changedCwd) {
abs = path.resolve(self.cwd, f)
} else {
abs = path.resolve(f)
}
return abs
}
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
function isIgnored (self, path) {
if (!self.ignore.length)
return false
return self.ignore.some(function(item) {
return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
})
}
function childrenIgnored (self, path) {
if (!self.ignore.length)
return false
return self.ignore.some(function(item) {
return !!(item.gmatcher && item.gmatcher.match(path))
})
}
This diff is collapsed.
{
"_from": "glob@^6.0.4",
"_id": "glob@6.0.4",
"_inBundle": false,
"_integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=",
"_location": "/asar/glob",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "glob@^6.0.4",
"name": "glob",
"escapedName": "glob",
"rawSpec": "^6.0.4",
"saveSpec": null,
"fetchSpec": "^6.0.4"
},
"_requiredBy": [
"/asar"
],
"_resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz",
"_shasum": "0f08860f6a155127b2fadd4f9ce24b1aab6e4d22",
"_spec": "glob@^6.0.4",
"_where": "/home/lab1-24/Toua/node-js-60-2/week05/windowapp/node_modules/asar",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"bugs": {
"url": "https://github.com/isaacs/node-glob/issues"
},
"bundleDependencies": false,
"dependencies": {
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "2 || 3",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"deprecated": false,
"description": "a little globber",
"devDependencies": {
"mkdirp": "0",
"rimraf": "^2.2.8",
"tap": "^5.0.0",
"tick": "0.0.6"
},
"engines": {
"node": "*"
},
"files": [
"glob.js",
"sync.js",
"common.js"
],
"homepage": "https://github.com/isaacs/node-glob#readme",
"license": "ISC",
"main": "glob.js",
"name": "glob",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-glob.git"
},
"scripts": {
"bench": "bash benchmark.sh",
"benchclean": "node benchclean.js",
"prepublish": "npm run benchclean",
"prof": "bash prof.sh && cat profile.txt",
"profclean": "rm -f v8.log profile.txt",
"test": "tap test/*.js --cov",
"test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js"
},
"version": "6.0.4"
}
This diff is collapsed.
{
"_from": "asar@^0.14.0",
"_id": "asar@0.14.2",
"_inBundle": false,
"_integrity": "sha512-eKo4ywQDq9dC/0Pu6UJsX4PxNi5ZlC4/NQ1JORUW4xkMRrEWpoLPpkngmQ6K7ZkioVjE2ZafLMmHPAQKMO0BdA==",
"_location": "/asar",
"_phantomChildren": {
"inflight": "1.0.6",
"inherits": "2.0.3",
"minimatch": "3.0.4",
"once": "1.4.0",
"path-is-absolute": "1.0.1"
},
"_requested": {
"type": "range",
"registry": true,
"raw": "asar@^0.14.0",
"name": "asar",
"escapedName": "asar",
"rawSpec": "^0.14.0",
"saveSpec": null,
"fetchSpec": "^0.14.0"
},
"_requiredBy": [
"/electron-packager"
],
"_resolved": "https://registry.npmjs.org/asar/-/asar-0.14.2.tgz",
"_shasum": "9ec5ec8dcb1904de288fd5ff94f2a8ed4014b094",
"_spec": "asar@^0.14.0",
"_where": "/home/lab1-24/Toua/node-js-60-2/week05/windowapp/node_modules/electron-packager",
"bin": {
"asar": "./bin/asar.js"
},
"bugs": {
"url": "https://github.com/electron/asar/issues"
},
"bundleDependencies": false,
"dependencies": {
"chromium-pickle-js": "^0.2.0",
"commander": "^2.9.0",
"cuint": "^0.2.1",
"glob": "^6.0.4",
"minimatch": "^3.0.3",
"mkdirp": "^0.5.0",
"mksnapshot": "^0.3.0",
"tmp": "0.0.28"
},
"deprecated": false,
"description": "Creating Electron app packages",
"devDependencies": {
"electron": "^1.6.2",
"electron-mocha": "^3.4.0",
"lodash": "^4.2.1",
"mocha": "^2.0.1",
"rimraf": "^2.5.1",
"standard": "^8.6.0",
"xvfb-maybe": "^0.1.3"
},
"engines": {
"node": ">=4.6"
},
"homepage": "https://github.com/electron/asar",
"license": "MIT",
"main": "./lib/asar.js",
"name": "asar",
"repository": {
"type": "git",
"url": "git+https://github.com/electron/asar.git"
},
"scripts": {
"lint": "standard",
"test": "xvfb-maybe electron-mocha --reporter spec && mocha --reporter spec && npm run lint"
},
"standard": {
"env": {
"mocha": true
}
},
"version": "0.14.2"
}
name: asar
version: git
summary: Manipulate asar archive files
description: |
Asar is a simple extensive archive format, it works like tar that
concatenates all files together without compression, while having
random access support.
confinement: classic
parts:
asar:
plugin: nodejs
source: .
apps:
asar:
command: lib/node_modules/asar/bin/asar.js
Copyright (c) 2014 Jon Schlinkert, contributors.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
# author-regex [![NPM version](https://badge.fury.io/js/author-regex.svg)](http://badge.fury.io/js/author-regex)
> Regular expression for parsing an `author` string into an object following npm conventions.
This the regex used by [parse-authors](https://github.com/jonschlinkert/parse-authors).
**Related**
- [parse-author](https://github.com/jonschlinkert/parse-author)
- [parse-authors](https://github.com/jonschlinkert/parse-authors)
## Install
#### Install with [npm](npmjs.org)
```bash
npm i author-regex --save
```
#### Install with [bower](https://github.com/bower/bower)
```bash
bower install author-regex --save
```
## Tests
Run
```bash
npm test
```
## Usage
```js
var re = require('author-regex');
function authors(str) {
return re().exec(str);
}
console.log(author('Jon Schlinkert <foo@bar.com> (https://github.com/jonschlinkert)'));
```
Returns:
```js
[ 'Jon Schlinkert <foo@bar.com> (https://github.com/jonschlinkert)',
'Jon Schlinkert',
'foo@bar.com',
'https://github.com/jonschlinkert',
index: 0,
input: 'Jon Schlinkert <foo@bar.com> (https://github.com/jonschlinkert)' ]
```
## Author
**Jon Schlinkert**
+ [github/jonschlinkert](https://github.com/jonschlinkert)
+ [twitter/jonschlinkert](http://twitter.com/jonschlinkert)
## License
Copyright (c) 2014 Jon Schlinkert, contributors.
Released under the MIT license
***
_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on September 29, 2014._
\ No newline at end of file
/*!
* author-regex <https://github.com/jonschlinkert/author-regex>
*
* Copyright (c) 2014, 2017, Jon Schlinkert.
* Released under the MIT License.
*/
'use strict';
module.exports = function() {
return /^\s*([^<(]*?)\s*([<(]([^>)]*?)[>)])?\s*([<(]([^>)]*?)[>)])*\s*$/;
};
{
"_from": "author-regex@^1.0.0",
"_id": "author-regex@1.0.0",
"_inBundle": false,
"_integrity": "sha1-0IiFvmubv5Q5/gh8dihyRfCoFFA=",
"_location": "/author-regex",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "author-regex@^1.0.0",
"name": "author-regex",
"escapedName": "author-regex",
"rawSpec": "^1.0.0",
"saveSpec": null,
"fetchSpec": "^1.0.0"
},
"_requiredBy": [
"/parse-author"
],
"_resolved": "https://registry.npmjs.org/author-regex/-/author-regex-1.0.0.tgz",
"_shasum": "d08885be6b9bbf9439fe087c76287245f0a81450",
"_spec": "author-regex@^1.0.0",
"_where": "/home/lab1-24/Toua/node-js-60-2/week05/windowapp/node_modules/parse-author",
"author": {
"name": "Jon Schlinkert",
"url": "https://github.com/jonschlinkert"
},
"bugs": {
"url": "https://github.com/jonschlinkert/author-regex/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "Regular expression for parsing an `author` string into an object following npm conventions.",
"devDependencies": {
"gulp-format-md": "^0.1.11",
"mocha": "^3.2.0"
},
"engines": {
"node": ">=0.8"
},
"files": [
"index.js"
],
"homepage": "https://github.com/jonschlinkert/author-regex",
"keywords": [
"author",
"authors",
"exec",
"expression",
"extract",
"maintainer",
"maintainers",
"match",
"package",
"parse",
"person",
"pkg",
"re",
"regex",
"regexp",
"regular"
],
"license": "MIT",
"main": "index.js",
"name": "author-regex",
"repository": {
"type": "git",
"url": "git+https://github.com/jonschlinkert/author-regex.git"
},
"scripts": {
"test": "mocha"
},
"verb": {
"toc": false,
"layout": "default",
"tasks": [
"readme"
],
"plugins": [
"gulp-format-md"
],
"lint": {
"reflinks": true
}
},
"version": "1.0.0"
}
The MIT License (MIT)
Copyright (c) 2014
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
base64-js
=========
`base64-js` does basic base64 encoding/decoding in pure JS.
[![build status](https://secure.travis-ci.org/beatgammit/base64-js.png)](http://travis-ci.org/beatgammit/base64-js)
[![testling badge](https://ci.testling.com/beatgammit/base64-js.png)](https://ci.testling.com/beatgammit/base64-js)
Many browsers already have base64 encoding/decoding functionality, but it is for text data, not all-purpose binary data.
Sometimes encoding/decoding binary data in the browser is useful, and that is what this module does.
## install
With [npm](https://npmjs.org) do:
`npm install base64-js`
## methods
`var base64 = require('base64-js')`
`base64` has three exposed functions, `byteLength`, `toByteArray` and `fromByteArray`, which both take a single argument.
* `byteLength` - Takes a base64 string and returns length of byte array
* `toByteArray` - Takes a base64 string and returns a byte array
* `fromByteArray` - Takes a byte array and returns a base64 string
## license
MIT
(function(r){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=r()}else if(typeof define==="function"&&define.amd){define([],r)}else{var e;if(typeof window!=="undefined"){e=window}else if(typeof global!=="undefined"){e=global}else if(typeof self!=="undefined"){e=self}else{e=this}e.base64js=r()}})(function(){var r,e,t;return function r(e,t,n){function o(i,a){if(!t[i]){if(!e[i]){var u=typeof require=="function"&&require;if(!a&&u)return u(i,!0);if(f)return f(i,!0);var d=new Error("Cannot find module '"+i+"'");throw d.code="MODULE_NOT_FOUND",d}var c=t[i]={exports:{}};e[i][0].call(c.exports,function(r){var t=e[i][1][r];return o(t?t:r)},c,c.exports,r,e,t,n)}return t[i].exports}var f=typeof require=="function"&&require;for(var i=0;i<n.length;i++)o(n[i]);return o}({"/":[function(r,e,t){"use strict";t.byteLength=c;t.toByteArray=v;t.fromByteArray=s;var n=[];var o=[];var f=typeof Uint8Array!=="undefined"?Uint8Array:Array;var i="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";for(var a=0,u=i.length;a<u;++a){n[a]=i[a];o[i.charCodeAt(a)]=a}o["-".charCodeAt(0)]=62;o["_".charCodeAt(0)]=63;function d(r){var e=r.length;if(e%4>0){throw new Error("Invalid string. Length must be a multiple of 4")}return r[e-2]==="="?2:r[e-1]==="="?1:0}function c(r){return r.length*3/4-d(r)}function v(r){var e,t,n,i,a,u;var c=r.length;a=d(r);u=new f(c*3/4-a);n=a>0?c-4:c;var v=0;for(e=0,t=0;e<n;e+=4,t+=3){i=o[r.charCodeAt(e)]<<18|o[r.charCodeAt(e+1)]<<12|o[r.charCodeAt(e+2)]<<6|o[r.charCodeAt(e+3)];u[v++]=i>>16&255;u[v++]=i>>8&255;u[v++]=i&255}if(a===2){i=o[r.charCodeAt(e)]<<2|o[r.charCodeAt(e+1)]>>4;u[v++]=i&255}else if(a===1){i=o[r.charCodeAt(e)]<<10|o[r.charCodeAt(e+1)]<<4|o[r.charCodeAt(e+2)]>>2;u[v++]=i>>8&255;u[v++]=i&255}return u}function l(r){return n[r>>18&63]+n[r>>12&63]+n[r>>6&63]+n[r&63]}function h(r,e,t){var n;var o=[];for(var f=e;f<t;f+=3){n=(r[f]<<16)+(r[f+1]<<8)+r[f+2];o.push(l(n))}return o.join("")}function s(r){var e;var t=r.length;var o=t%3;var f="";var i=[];var a=16383;for(var u=0,d=t-o;u<d;u+=a){i.push(h(r,u,u+a>d?d:u+a))}if(o===1){e=r[t-1];f+=n[e>>2];f+=n[e<<4&63];f+="=="}else if(o===2){e=(r[t-2]<<8)+r[t-1];f+=n[e>>10];f+=n[e>>4&63];f+=n[e<<2&63];f+="="}i.push(f);return i.join("")}},{}]},{},[])("/")});
'use strict'
exports.byteLength = byteLength
exports.toByteArray = toByteArray
exports.fromByteArray = fromByteArray
var lookup = []
var revLookup = []
var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array
var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
for (var i = 0, len = code.length; i < len; ++i) {
lookup[i] = code[i]
revLookup[code.charCodeAt(i)] = i
}
revLookup['-'.charCodeAt(0)] = 62
revLookup['_'.charCodeAt(0)] = 63
function placeHoldersCount (b64) {
var len = b64.length
if (len % 4 > 0) {
throw new Error('Invalid string. Length must be a multiple of 4')
}
// the number of equal signs (place holders)
// if there are two placeholders, than the two characters before it
// represent one byte
// if there is only one, then the three characters before it represent 2 bytes
// this is just a cheap hack to not do indexOf twice
return b64[len - 2] === '=' ? 2 : b64[len - 1] === '=' ? 1 : 0
}
function byteLength (b64) {
// base64 is 4/3 + up to two characters of the original data
return b64.length * 3 / 4 - placeHoldersCount(b64)
}
function toByteArray (b64) {
var i, j, l, tmp, placeHolders, arr
var len = b64.length
placeHolders = placeHoldersCount(b64)
arr = new Arr(len * 3 / 4 - placeHolders)
// if there are placeholders, only get up to the last complete 4 chars
l = placeHolders > 0 ? len - 4 : len
var L = 0
for (i = 0, j = 0; i < l; i += 4, j += 3) {
tmp = (revLookup[b64.charCodeAt(i)] << 18) | (revLookup[b64.charCodeAt(i + 1)] << 12) | (revLookup[b64.charCodeAt(i + 2)] << 6) | revLookup[b64.charCodeAt(i + 3)]
arr[L++] = (tmp >> 16) & 0xFF
arr[L++] = (tmp >> 8) & 0xFF
arr[L++] = tmp & 0xFF
}
if (placeHolders === 2) {
tmp = (revLookup[b64.charCodeAt(i)] << 2) | (revLookup[b64.charCodeAt(i + 1)] >> 4)
arr[L++] = tmp & 0xFF
} else if (placeHolders === 1) {
tmp = (revLookup[b64.charCodeAt(i)] << 10) | (revLookup[b64.charCodeAt(i + 1)] << 4) | (revLookup[b64.charCodeAt(i + 2)] >> 2)
arr[L++] = (tmp >> 8) & 0xFF
arr[L++] = tmp & 0xFF
}
return arr
}
function tripletToBase64 (num) {
return lookup[num >> 18 & 0x3F] + lookup[num >> 12 & 0x3F] + lookup[num >> 6 & 0x3F] + lookup[num & 0x3F]
}
function encodeChunk (uint8, start, end) {
var tmp
var output = []
for (var i = start; i < end; i += 3) {
tmp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2])
output.push(tripletToBase64(tmp))
}
return output.join('')
}
function fromByteArray (uint8) {
var tmp
var len = uint8.length
var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes
var output = ''
var parts = []
var maxChunkLength = 16383 // must be multiple of 3
// go through the array every three bytes, we'll deal with trailing stuff later
for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)))
}
// pad the end with zeros, but make sure to not forget the extra bytes
if (extraBytes === 1) {
tmp = uint8[len - 1]
output += lookup[tmp >> 2]
output += lookup[(tmp << 4) & 0x3F]
output += '=='
} else if (extraBytes === 2) {
tmp = (uint8[len - 2] << 8) + (uint8[len - 1])
output += lookup[tmp >> 10]
output += lookup[(tmp >> 4) & 0x3F]
output += lookup[(tmp << 2) & 0x3F]
output += '='
}
parts.push(output)
return parts.join('')
}
{
"_from": "base64-js@1.2.0",
"_id": "base64-js@1.2.0",
"_inBundle": false,
"_integrity": "sha1-o5mS1yNYSBGYK+XikLtqU9hnAPE=",
"_location": "/base64-js",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "base64-js@1.2.0",
"name": "base64-js",
"escapedName": "base64-js",
"rawSpec": "1.2.0",
"saveSpec": null,
"fetchSpec": "1.2.0"
},
"_requiredBy": [
"/plist"
],
"_resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.2.0.tgz",
"_shasum": "a39992d723584811982be5e290bb6a53d86700f1",
"_spec": "base64-js@1.2.0",
"_where": "/home/lab1-24/Toua/node-js-60-2/week05/windowapp/node_modules/plist",
"author": {
"name": "T. Jameson Little",
"email": "t.jameson.little@gmail.com"
},
"bugs": {
"url": "https://github.com/beatgammit/base64-js/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "Base64 encoding/decoding in pure JS",
"devDependencies": {
"benchmark": "^2.1.0",
"browserify": "^13.0.0",
"standard": "*",
"tape": "4.x",
"uglify-js": "^2.6.2"
},
"files": [
"test",
"index.js",
"base64js.min.js"
],
"homepage": "https://github.com/beatgammit/base64-js",
"keywords": [
"base64"
],
"license": "MIT",
"main": "index.js",
"name": "base64-js",
"repository": {
"type": "git",
"url": "git://github.com/beatgammit/base64-js.git"
},
"scripts": {
"build": "browserify -s base64js -r ./ | uglifyjs -m > base64js.min.js",
"lint": "standard",
"test": "npm run lint && npm run unit",
"unit": "tape test/*.js"
},
"version": "1.2.0"
}
var test = require('tape')
var b64 = require('../')
test('convert big data to base64', function (t) {
var b64str, arr, i, length
var big = new Uint8Array(64 * 1024 * 1024)
for (i = 0, length = big.length; i < length; ++i) {
big[i] = i % 256
}
b64str = b64.fromByteArray(big)
arr = b64.toByteArray(b64str)
t.ok(equal(arr, big))
t.end()
})
function equal (a, b) {
var i
var length = a.length
if (length !== b.length) return false
for (i = 0; i < length; ++i) {
if (a[i] !== b[i]) return false
}
return true
}
var test = require('tape')
var b64 = require('../')
var checks = [
'a',
'aa',
'aaa',
'hi',
'hi!',
'hi!!',
'sup',
'sup?',
'sup?!'
]
test('convert to base64 and back', function (t) {
t.plan(checks.length * 2)
for (var i = 0; i < checks.length; i++) {
var check = checks[i]
var b64Str, arr, str
b64Str = b64.fromByteArray(map(check, function (char) { return char.charCodeAt(0) }))
arr = b64.toByteArray(b64Str)
str = map(arr, function (byte) { return String.fromCharCode(byte) }).join('')
t.equal(check, str, 'Checked ' + check)
t.equal(b64.byteLength(b64Str), arr.length, 'Checked length for ' + check)
}
})
function map (arr, callback) {
var res = []
var kValue, mappedValue
for (var k = 0, len = arr.length; k < len; k++) {
if ((typeof arr === 'string' && !!arr.charAt(k))) {
kValue = arr.charAt(k)
mappedValue = callback(kValue, k, arr)
res[k] = mappedValue
} else if (typeof arr !== 'string' && k in arr) {
kValue = arr[k]
mappedValue = callback(kValue, k, arr)
res[k] = mappedValue
}
}
return res
}
var test = require('tape')
var b64 = require('../')
test('decode url-safe style base64 strings', function (t) {
var expected = [0xff, 0xff, 0xbe, 0xff, 0xef, 0xbf, 0xfb, 0xef, 0xff]
var actual = b64.toByteArray('//++/++/++//')
for (var i = 0; i < actual.length; i++) {
t.equal(actual[i], expected[i])
}
actual = b64.toByteArray('__--_--_--__')
for (i = 0; i < actual.length; i++) {
t.equal(actual[i], expected[i])
}
t.end()
})
language: node_js
node_js:
- 0.4
- 0.6
binary
======
Unpack multibyte binary values from buffers and streams.
You can specify the endianness and signedness of the fields to be unpacked too.
This module is a cleaner and more complete version of
[bufferlist](https://github.com/substack/node-bufferlist)'s binary module that
runs on pre-allocated buffers instead of a linked list.
[![build status](https://secure.travis-ci.org/substack/node-binary.png)](http://travis-ci.org/substack/node-binary)
examples
========
stream.js
---------
``` js
var binary = require('binary');
var ws = binary()
.word32lu('x')
.word16bs('y')
.word16bu('z')
.tap(function (vars) {
console.dir(vars);
})
;
process.stdin.pipe(ws);
process.stdin.resume();
```
output:
```
$ node examples/stream.js
abcdefgh
{ x: 1684234849, y: 25958, z: 26472 }
^D
```
parse.js
--------
``` js
var buf = new Buffer([ 97, 98, 99, 100, 101, 102, 0 ]);
var binary = require('binary');
var vars = binary.parse(buf)
.word16ls('ab')
.word32bu('cf')
.word8('x')
.vars
;
console.dir(vars);
```
output:
```
{ ab: 25185, cf: 1667523942, x: 0 }
```
methods
=======
`var binary = require('binary')`
var b = binary()
----------------
Return a new writable stream `b` that has the chainable methods documented below
for buffering binary input.
binary.parse(buf)
-----------------
Parse a static buffer in one pass. Returns a chainable interface with the
methods below plus a `vars` field to get at the variable stash as the last item
in a chain.
In parse mode, methods will set their keys to `null` if the buffer isn't big
enough except `buffer()` and `scan()` which read up up to the end of the buffer
and stop.
b.word{8,16,32,64}{l,b}{e,u,s}(key)
-----------------------------------
Parse bytes in the buffer or stream given:
* number of bits
* endianness ( l : little, b : big ),
* signedness ( u and e : unsigned, s : signed )
These functions won't start parsing until all previous parser functions have run
and the data is available.
The result of the parse goes into the variable stash at `key`.
If `key` has dots (`.`s), it refers to a nested address. If parent container
values don't exist they will be created automatically, so for instance you can
assign into `dst.addr` and `dst.port` and the `dst` key in the variable stash
will be `{ addr : x, port : y }` afterwards.
b.buffer(key, size)
-------------------
Take `size` bytes directly off the buffer stream, putting the resulting buffer
slice in the variable stash at `key`. If `size` is a string, use the value at
`vars[size]`. The key follows the same dotted address rules as the word
functions.
b.scan(key, buffer)
-------------------
Search for `buffer` in the stream and store all the intervening data in the
stash at at `key`, excluding the search buffer. If `buffer` passed as a string,
it will be converted into a Buffer internally.
For example, to read in a line you can just do:
``` js
var b = binary()
.scan('line', new Buffer('\r\n'))
.tap(function (vars) {
console.log(vars.line)
})
;
stream.pipe(b);
```
b.tap(cb)
---------
The callback `cb` is provided with the variable stash from all the previous
actions once they've all finished.
You can nest additional actions onto `this` inside the callback.
b.into(key, cb)
---------------
Like `.tap()`, except all nested actions will assign into a `key` in the `vars`
stash.
b.loop(cb)
----------
Loop, each time calling `cb(end, vars)` for function `end` and the variable
stash with `this` set to a new chain for nested parsing. The loop terminates
once `end` is called.
b.flush()
---------
Clear the variable stash entirely.
installation
============
To install with [npm](http://github.com/isaacs/npm):
```
npm install binary
```
notes
=====
The word64 functions will only return approximations since javascript uses ieee
floating point for all number types. Mind the loss of precision.
license
=======
MIT
var buf = new Buffer([ 97, 98, 99, 100, 101, 102, 0 ]);
var binary = require('binary');
binary(buf)
.word16ls('ab')
.word32bu('cf')
.word8('x')
.tap(function (vars) {
console.dir(vars);
})
;
var buf = new Buffer([ 97, 98, 99, 100, 101, 102, 0 ]);
var binary = require('binary');
var vars = binary.parse(buf)
.word16ls('ab')
.word32bu('cf')
.word8('x')
.vars
;
console.dir(vars);
var binary = require('binary');
var ws = binary()
.word32lu('x')
.word16bs('y')
.word16bu('z')
.tap(function (vars) {
console.dir(vars);
})
;
process.stdin.pipe(ws);
process.stdin.resume();
This diff is collapsed.
module.exports = function (store) {
function getset (name, value) {
var node = vars.store;
var keys = name.split('.');
keys.slice(0,-1).forEach(function (k) {
if (node[k] === undefined) node[k] = {};
node = node[k]
});
var key = keys[keys.length - 1];
if (arguments.length == 1) {
return node[key];
}
else {
return node[key] = value;
}
}
var vars = {
get : function (name) {
return getset(name);
},
set : function (name, value) {
return getset(name, value);
},
store : store || {},
};
return vars;
};
{
"_from": "binary@^0.3.0",
"_id": "binary@0.3.0",
"_inBundle": false,
"_integrity": "sha1-n2BVO8XOjDOG87VTz/R0Yq3sqnk=",
"_location": "/binary",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "binary@^0.3.0",
"name": "binary",
"escapedName": "binary",
"rawSpec": "^0.3.0",
"saveSpec": null,
"fetchSpec": "^0.3.0"
},
"_requiredBy": [
"/decompress-zip"
],
"_resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz",
"_shasum": "9f60553bc5ce8c3386f3b553cff47462adecaa79",
"_spec": "binary@^0.3.0",
"_where": "/home/lab1-24/Toua/node-js-60-2/week05/windowapp/node_modules/decompress-zip",
"author": {
"name": "James Halliday",
"email": "mail@substack.net",
"url": "http://substack.net"
},
"bugs": {
"url": "https://github.com/substack/node-binary/issues"
},
"bundleDependencies": false,
"dependencies": {
"buffers": "~0.1.1",
"chainsaw": "~0.1.0"
},
"deprecated": false,
"description": "Unpack multibyte binary values from buffers",
"devDependencies": {
"seq": "~0.2.5",
"tap": "~0.2.4"
},
"engine": {
"node": ">=0.4.0"
},
"engines": {
"node": "*"
},
"homepage": "https://github.com/substack/node-binary#readme",
"keywords": [
"binary",
"decode",
"endian",
"unpack",
"signed",
"unsigned"
],
"license": "MIT",
"main": "./index.js",
"name": "binary",
"repository": {
"type": "git",
"url": "git+ssh://git@github.com/substack/node-binary.git"
},
"scripts": {
"test": "tap test/*.js"
},
"version": "0.3.0"
}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
var binary = require('../');
var test = require('tap').test;
var EventEmitter = require('events').EventEmitter;
test('deferred', function (t) {
t.plan(1);
var em = new EventEmitter;
binary.stream(em)
.word8('a')
.word16be('bc')
.tap(function (vars) {
t.same(vars, { a : 97, bc : 25187 });
})
;
setTimeout(function () {
em.emit('data', new Buffer([ 97, 98, 99 ]));
}, 10);
});
var binary = require('../');
var test = require('tap').test;
test('dots', function (t) {
t.plan(1);
binary.parse(new Buffer([ 97, 98, 99, 100, 101, 102 ]))
.word8('a')
.word16be('b.x')
.word16be('b.y')
.word8('b.z')
.tap(function (vars) {
t.same(vars, {
a : 97,
b : {
x : 256 * 98 + 99,
y : 256 * 100 + 101,
z : 102
},
});
})
;
});
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment