Remove frozen neo4j lib, add neo4j database setup for travis-ci
This commit is contained in:
parent
c894694c7f
commit
633ae4067a
|
@ -7,3 +7,4 @@ before_script:
|
|||
- "mysql -e 'create database myapp_test;'"
|
||||
- "psql -c 'create database myapp_test;' -U postgres"
|
||||
- mongo mydb_test --eval 'db.addUser("travis", "test");'
|
||||
- which neo4j && neo4j start && sleep 5
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
.DS_Store
|
||||
|
||||
db/
|
||||
support/
|
|
@ -1 +0,0 @@
|
|||
test
|
|
@ -1,48 +0,0 @@
|
|||
# Changelog: Neo4j driver for Node
|
||||
|
||||
## Version 0.2.1 – September 2, 2011
|
||||
|
||||
- Updated `request` dependency. ([@aseemk][])
|
||||
- Added Cypher querying and tests. ([@aseemk][])
|
||||
- Better error handling. ([@aseemk][])
|
||||
|
||||
## Version 0.2.0 – July 14, 2011
|
||||
|
||||
- Massive overhaul of the entire library:
|
||||
- Rewrote complete library using [Streamline.js][] ([@aseemk][])
|
||||
- Massively extended test suite ([@aseemk][])
|
||||
- Implemented `Node.getRelationships` method ([@aseemk][])
|
||||
- Implemented `Node.getRelationshipNodes` method ([@aseemk][])
|
||||
- Simplified error handling ([@gasi][])
|
||||
- Split monolithic file into separate files according to classes ([@aseemk][])
|
||||
- Implemented `Node.path` method and `Path` class ([@gasi][])
|
||||
- Added `Node.createRelationshipFrom` method ([@gasi][])
|
||||
- Fixed numerous bugs ([@aseemk][] & [@gasi][])
|
||||
|
||||
## Version 0.1.0 – April 20, 2011
|
||||
|
||||
- Changed name from _Neo4j REST client for Node.js_ to _Neo4j driver for Node_.
|
||||
- Rewrote complete library to feature an object-oriented structure.
|
||||
|
||||
## Version 0.0.3 – March 26, 2011
|
||||
|
||||
- Updated README.
|
||||
|
||||
## Version 0.0.2 – March 26, 2011
|
||||
|
||||
- Renamed top-level constructor to `Client`.
|
||||
- Added top-level `serialize` and `deserialize` functions.
|
||||
- Added `autoMarshal` argument to `Client` for storing hierarchical data on
|
||||
nodes and relationship. Internally uses new `serialize` and `deserialize`
|
||||
functions.
|
||||
- Changed position of Client's `basePath` argument (now last).
|
||||
- Updated test.
|
||||
|
||||
## Version 0.0.1 – March 21, 2011
|
||||
|
||||
- Initial release.
|
||||
|
||||
|
||||
[Streamline.js]: https://github.com/Sage/streamlinejs
|
||||
[@aseemk]: https://github.com/aseemk
|
||||
[@gasi]: https://github.com/gasi
|
|
@ -1,202 +0,0 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -1,64 +0,0 @@
|
|||
# Neo4j driver for Node
|
||||
|
||||
This driver let's you access the [Neo4j graph database][neo4j] from Node.
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
npm install neo4j
|
||||
|
||||
|
||||
## Development
|
||||
|
||||
git clone git@github.com:gasi/node-neo4j.git neo4j
|
||||
cd neo4j
|
||||
npm link
|
||||
|
||||
You'll also need a local Neo4j database instance for the tests:
|
||||
|
||||
curl http://dist.neo4j.org/neo4j-community-1.3-unix.tar.gz --O neo4j-community-1.3-unix.tar.gz
|
||||
tar -zxvf neo4j-community-1.3-unix.tar.gz
|
||||
mv neo4j-community-1.3 db
|
||||
|
||||
If you're new to Neo4j, read the [Getting Started][neo4j-getting-started] page.
|
||||
Start the server:
|
||||
|
||||
db/bin/neo4j start
|
||||
|
||||
Stop the server:
|
||||
|
||||
db/bin/neo4j stop
|
||||
|
||||
To run the tests:
|
||||
|
||||
npm test
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
var neo4j = require('neo4j');
|
||||
var db = new neo4j.GraphDatabase('http://localhost:7474');
|
||||
|
||||
function print(err, res) {
|
||||
console.log(err || res);
|
||||
}
|
||||
|
||||
var node = db.createNode({hello: 'world'});
|
||||
node.save(print);
|
||||
|
||||
## License
|
||||
|
||||
This library is licensed under the [Apache License, Version 2.0][license].
|
||||
|
||||
|
||||
## Reporting Issues
|
||||
|
||||
If you encounter any bugs or other issues, please file them in the
|
||||
[issue tracker][issue-tracker].
|
||||
|
||||
|
||||
[neo4j]: http://neo4j.org/
|
||||
[neo-rest-api]: http://components.neo4j.org/neo4j-server/snapshot/rest.html
|
||||
[neo4j-getting-started]: http://wiki.neo4j.org/content/Getting_Started_With_Neo4j_Server
|
||||
[issue-tracker]: https://github.com/gasi/node-neo4j/issues
|
||||
[license]: http://www.apache.org/licenses/LICENSE-2.0.html
|
|
@ -1,250 +0,0 @@
|
|||
# TODO many of these functions take a callback but, in some cases, call the
|
||||
# callback immediately (e.g. if a value is cached). we should probably make
|
||||
# sure to always call callbacks asynchronously, to prevent race conditions.
|
||||
# this can be done in Streamline syntax by adding one line before cases where
|
||||
# we're returning immediately: process.nextTick _
|
||||
|
||||
status = require 'http-status'
|
||||
request = require 'request'
|
||||
|
||||
applyPatch = (method, auth) ->
|
||||
return if applyPatch.patched[method]
|
||||
applyPatch.patched[method] = true
|
||||
__m = request[method]
|
||||
request[method] = ->
|
||||
args = [].slice.call(arguments)
|
||||
url = args[0]
|
||||
# console.log(args)
|
||||
if typeof url == 'string' && !url.match(/https?:\/\/[^\/]*@/)
|
||||
args[0] = url.replace(/http:\/\//, 'http://' + auth + '@')
|
||||
# normalize opts
|
||||
if url && url.url
|
||||
url.uri = url.url
|
||||
delete url.url
|
||||
# handle auth in uri
|
||||
if url && url.uri && url.uri.match && !url.uri.match(/https?:\/\/[^\/]*@/)
|
||||
args[0].uri = url.uri.replace(/http:\/\//, 'http://' + auth + '@')
|
||||
__m.apply(request, args)
|
||||
|
||||
applyPatch.patched = {}
|
||||
|
||||
util = require './util_'
|
||||
adjustError = util.adjustError
|
||||
|
||||
Relationship = require './Relationship_'
|
||||
Node = require './Node_'
|
||||
|
||||
module.exports = class GraphDatabase
|
||||
constructor: (url) ->
|
||||
|
||||
@url = url
|
||||
@auth = require('url').parse(url).auth
|
||||
|
||||
applyPatch('get', @auth)
|
||||
applyPatch('post', @auth)
|
||||
applyPatch('put', @auth)
|
||||
applyPatch('del', @auth)
|
||||
applyPatch('head', @auth)
|
||||
|
||||
# Cache
|
||||
@_root = null
|
||||
@_services = null
|
||||
|
||||
# Database
|
||||
_purgeCache: ->
|
||||
@_root = null
|
||||
@_services = null
|
||||
|
||||
_getRoot: (_) ->
|
||||
if @_root?
|
||||
return @_root
|
||||
|
||||
try
|
||||
response = request.get @url, _
|
||||
|
||||
if response.statusCode isnt status.OK
|
||||
throw response
|
||||
|
||||
@_root = JSON.parse response.body
|
||||
return @_root
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
||||
|
||||
getServices: (_) ->
|
||||
if @_services?
|
||||
return @_services
|
||||
|
||||
try
|
||||
root = @_getRoot _
|
||||
response = request.get root.data, _
|
||||
|
||||
if response.statusCode isnt status.OK
|
||||
throw response
|
||||
|
||||
@_services = JSON.parse response.body
|
||||
return @_services
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
||||
|
||||
# Nodes
|
||||
createNode: (data) ->
|
||||
data = data || {}
|
||||
node = new Node this,
|
||||
data: data
|
||||
return node
|
||||
|
||||
getNode: (url, _) ->
|
||||
try
|
||||
response = request.get url, _
|
||||
|
||||
if response.statusCode isnt status.OK
|
||||
|
||||
# Node not found
|
||||
if response.statusCode is status.NOT_FOUND
|
||||
throw new Error "No node at #{url}"
|
||||
|
||||
throw response
|
||||
|
||||
node = new Node this, JSON.parse response.body
|
||||
return node
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
||||
|
||||
getIndexedNode: (index, property, value, _) ->
|
||||
try
|
||||
nodes = @getIndexedNodes index, property, value, _
|
||||
|
||||
node = null
|
||||
if nodes and nodes.length > 0
|
||||
node = nodes[0]
|
||||
return node
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
||||
|
||||
getIndexedNodes: (index, property, value, _) ->
|
||||
try
|
||||
services = @getServices _
|
||||
|
||||
key = encodeURIComponent property
|
||||
val = encodeURIComponent value
|
||||
url = "#{services.node_index}/#{index}/#{key}/#{val}"
|
||||
|
||||
response = request.get url, _
|
||||
|
||||
if response.statusCode isnt status.OK
|
||||
# Database error
|
||||
throw response
|
||||
|
||||
# Success
|
||||
nodeArray = JSON.parse response.body
|
||||
nodes = nodeArray.map (node) =>
|
||||
new Node this, node
|
||||
return nodes
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
||||
|
||||
getNodeById: (id, _) ->
|
||||
try
|
||||
services = @getServices _
|
||||
url = "#{services.node}/#{id}"
|
||||
node = @getNode url, _
|
||||
return node
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
||||
|
||||
# Relationships
|
||||
createRelationship: (startNode, endNode, type, _) ->
|
||||
# TODO: Implement
|
||||
|
||||
getRelationship: (url, _) ->
|
||||
try
|
||||
response = request.get url, _
|
||||
|
||||
if response.statusCode isnt status.OK
|
||||
# TODO: Handle 404
|
||||
throw response
|
||||
|
||||
data = JSON.parse response.body
|
||||
|
||||
# Construct relationship
|
||||
relationship = new Relationship this, data
|
||||
|
||||
return relationship
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
||||
|
||||
getRelationshipById: (id, _) ->
|
||||
services = @getServices _
|
||||
# FIXME: Neo4j doesn't expose the path to relationships
|
||||
relationshipURL = services.node.replace('node', 'relationship')
|
||||
url = "#{relationshipURL}/#{id}"
|
||||
@getRelationship url, _
|
||||
|
||||
# wrapper around the Cypher plugin, which comes bundled w/ Neo4j.
|
||||
# pass in the Cypher query as a string (can be multi-line).
|
||||
# http://docs.neo4j.org/chunked/stable/cypher-query-lang.html
|
||||
# returns an array of "rows" (matches), where each row is a map from
|
||||
# variable name (as given in the passed in query) to value. any values
|
||||
# that represent nodes or relationships are transformed to instances.
|
||||
query: (_, query) ->
|
||||
try
|
||||
services = @getServices _
|
||||
endpoint = services.extensions?.CypherPlugin?['execute_query']
|
||||
if not endpoint
|
||||
throw new Error 'Cypher plugin not installed'
|
||||
|
||||
response = request.post
|
||||
uri: endpoint
|
||||
json: {query}
|
||||
, _
|
||||
|
||||
if response.statusCode isnt status.OK
|
||||
# Database error
|
||||
throw response
|
||||
|
||||
# Success: build result maps, and transform nodes/relationships
|
||||
body = response.body # JSON already parsed by request
|
||||
columns = body.columns
|
||||
results = for row in body.data
|
||||
map = {}
|
||||
for value, i in row
|
||||
map[columns[i]] =
|
||||
if value and typeof value is 'object' and value.self
|
||||
if value.type then new Relationship this, value
|
||||
else new Node this, value
|
||||
else
|
||||
value
|
||||
map
|
||||
return results
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
||||
|
||||
# executes a query against the given node index. lucene syntax reference:
|
||||
# http://lucene.apache.org/java/3_1_0/queryparsersyntax.html
|
||||
queryNodeIndex: (index, query, _) ->
|
||||
try
|
||||
services = @getServices _
|
||||
url = "#{services.node_index}/#{index}?query=#{encodeURIComponent query}"
|
||||
|
||||
response = request.get url, _
|
||||
|
||||
if response.statusCode isnt status.OK
|
||||
# Database error
|
||||
throw response
|
||||
|
||||
# Success
|
||||
nodeArray = JSON.parse response.body
|
||||
nodes = nodeArray.map (node) =>
|
||||
new Node this, node
|
||||
return nodes
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
|
@ -1,311 +0,0 @@
|
|||
status = require 'http-status'
|
||||
request = require 'request'
|
||||
|
||||
util = require './util_'
|
||||
adjustError = util.adjustError
|
||||
|
||||
PropertyContainer = require './PropertyContainer_'
|
||||
Relationship = require './Relationship_'
|
||||
Path = require './Path_'
|
||||
|
||||
module.exports = class Node extends PropertyContainer
|
||||
constructor: (db, data) ->
|
||||
super db, data
|
||||
|
||||
toString: ->
|
||||
"node @#{@id}"
|
||||
|
||||
save: (_) ->
|
||||
try
|
||||
# TODO: check for actual modification
|
||||
if @exists
|
||||
response = request.put
|
||||
uri: "#{@self}/properties"
|
||||
json: @data
|
||||
, _
|
||||
|
||||
if response.statusCode isnt status.NO_CONTENT
|
||||
# database error
|
||||
# note that JSON has already been parsed by request.
|
||||
message = response.body?.message
|
||||
switch response.statusCode
|
||||
when status.BAD_REQUEST then message or= 'Invalid data sent'
|
||||
when status.NOT_FOUND then message or= 'Node not found'
|
||||
throw new Error message
|
||||
else
|
||||
services = @db.getServices _
|
||||
|
||||
response = request.post
|
||||
uri: services.node
|
||||
json: @data
|
||||
, _
|
||||
|
||||
if response.statusCode isnt status.CREATED
|
||||
# database error
|
||||
# note that JSON has already been parsed by request.
|
||||
message = response.body?.message or 'Invalid data sent'
|
||||
throw new Error message
|
||||
|
||||
# only update our copy of the data when it is POSTed.
|
||||
# note that JSON has already been parsed by request.
|
||||
@_data = response.body
|
||||
|
||||
# explicitly not returning any value; making this a "void" method.
|
||||
return
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
||||
|
||||
# throws an error if this node has any relationships on it, unless force
|
||||
# is true, in which case the relationships are also deleted.
|
||||
delete: (_, force=false) ->
|
||||
if not @exists
|
||||
return
|
||||
|
||||
try
|
||||
# Does this node have any relationships on it?
|
||||
relationships = @all null, _
|
||||
|
||||
# If so, and it's not expected, prevent mistakes!
|
||||
if relationships.length and not force
|
||||
throw new Error "Could not delete #{@}; still has relationships."
|
||||
|
||||
# Otherwise, if there are any, delete the relationships
|
||||
# TODO parallelize using Streamline
|
||||
for relationship in relationships
|
||||
relationship.delete _
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
||||
|
||||
# *Then* delete the node
|
||||
# XXX need to explicitly relay arguments to super since streamline
|
||||
# needs to see the underscore parameter currently.
|
||||
super _
|
||||
|
||||
# Alias
|
||||
del: @::delete
|
||||
|
||||
createRelationshipTo: (otherNode, type, data, _) ->
|
||||
@_createRelationship this, otherNode, type, data, _
|
||||
|
||||
createRelationshipFrom: (otherNode, type, data, _) ->
|
||||
@_createRelationship otherNode, this, type, data, _
|
||||
|
||||
_createRelationship: (from, to, type, data, _) ->
|
||||
try
|
||||
# ensure this node exists
|
||||
# ensure otherNode exists
|
||||
# create relationship
|
||||
|
||||
# XXX Can we really always assume `from` is loaded?
|
||||
createRelationshipURL = from._data['create_relationship']
|
||||
otherNodeURL = to.self
|
||||
|
||||
if createRelationshipURL? and otherNodeURL
|
||||
response = request.post
|
||||
url: createRelationshipURL
|
||||
json:
|
||||
to: otherNodeURL
|
||||
data: data
|
||||
type: type
|
||||
, _
|
||||
|
||||
if response.statusCode isnt status.CREATED
|
||||
# database error
|
||||
message = ''
|
||||
switch response.statusCode
|
||||
when status.BAD_REQUEST
|
||||
# note that JSON has already been parsed by request.
|
||||
message = response.body?.message or
|
||||
response.body?.exception or
|
||||
"Invalid createRelationship: #{from.id} #{type} #{to.id} w/ data: #{JSON.stringify data}"
|
||||
when status.CONFLICT
|
||||
message = '"to" node, or the node specified by the URI not found'
|
||||
throw new Error message
|
||||
|
||||
# success
|
||||
# note that JSON has already been parsed by request.
|
||||
relationship = new Relationship @db, response.body, from, to
|
||||
return relationship
|
||||
else
|
||||
throw new Error 'Failed to create relationship'
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
||||
|
||||
# TODO support passing in no type, e.g. for all types?
|
||||
# TODO to be consistent with the REST and Java APIs, this returns an array
|
||||
# of all returned relationships. it would certainly be more user-friendly
|
||||
# though if it returned a dictionary of relationships mapped by type, no?
|
||||
# XXX TODO this takes direction and type as separate parameters, while the
|
||||
# getRelationshipNodes() method combines both as an object. inconsistent?
|
||||
# unfortunately, the REST API is also inconsistent like this...
|
||||
_getRelationships: (direction, type, _) ->
|
||||
# Method overload: No type specified
|
||||
# XXX can't support method overloading right now, because Streamline
|
||||
# doesn't allow "setting" the callback parameter like this requires.
|
||||
#if typeof type is 'function'
|
||||
# _ = type
|
||||
# type = []
|
||||
|
||||
# Assume no types
|
||||
types = null
|
||||
|
||||
# support passing in multiple types, as array
|
||||
if type?
|
||||
types = if type instanceof Array then type else [type]
|
||||
|
||||
try
|
||||
if types?
|
||||
prefix = @_data["#{direction}_typed_relationships"]
|
||||
relationshipsURL = prefix?.replace '{-list|&|types}', types.join '&'
|
||||
else
|
||||
relationshipsURL = @_data["#{direction}_relationships"]
|
||||
|
||||
if not relationshipsURL
|
||||
throw new Error 'Couldn\'t find URL of relationships endpoint.'
|
||||
|
||||
resp = request.get relationshipsURL, _
|
||||
|
||||
if resp.statusCode is status.NOT_FOUND
|
||||
throw new Error 'Node not found.'
|
||||
|
||||
if resp.statusCode isnt status.OK
|
||||
throw new Error "Unrecognized response code: #{resp.statusCode}"
|
||||
|
||||
# success
|
||||
data = JSON.parse resp.body
|
||||
relationships = data.map (data) =>
|
||||
# other node will automatically get filled in by Relationship
|
||||
if @self is data.start
|
||||
new Relationship @db, data, this, null
|
||||
else
|
||||
new Relationship @db, data, null, this
|
||||
return relationships
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
||||
|
||||
# TODO to be consistent with the REST and Java APIs, this returns an array
|
||||
# of all returned relationships. it would certainly be more user-friendly
|
||||
# though if it returned a dictionary of relationships mapped by type, no?
|
||||
getRelationships: (type, _) ->
|
||||
@all type, _
|
||||
|
||||
outgoing: (type, _) ->
|
||||
@_getRelationships 'outgoing', type, _
|
||||
|
||||
incoming: (type, _) ->
|
||||
@_getRelationships 'incoming', type, _
|
||||
|
||||
all: (type, _) ->
|
||||
@_getRelationships 'all', type, _
|
||||
|
||||
path: (to, type, direction, maxDepth=1, algorithm='shortestPath', _) ->
|
||||
try
|
||||
pathURL = "#{@self}/path"
|
||||
data =
|
||||
to: to.self
|
||||
relationships:
|
||||
type: type
|
||||
direction: direction
|
||||
max_depth: maxDepth
|
||||
algorithm: algorithm
|
||||
|
||||
res = request.post
|
||||
url: pathURL
|
||||
json: data
|
||||
, _
|
||||
|
||||
if res.statusCode is status.NOT_FOUND
|
||||
# Empty path
|
||||
return null
|
||||
|
||||
if res.statusCode isnt status.OK
|
||||
throw new Error "Unrecognized response code: #{res.statusCode}"
|
||||
|
||||
# Parse result
|
||||
# Note that JSON has already been parsed by request.
|
||||
data = res.body
|
||||
|
||||
start = new Node this, {self: data.start}
|
||||
end = new Node this, {self: data.end}
|
||||
length = data.length
|
||||
nodes = data.nodes.map (url) =>
|
||||
new Node this, {self: url}
|
||||
relationships = data.relationships.map (url) =>
|
||||
new Relationship this, {self: url, type}
|
||||
|
||||
# Return path
|
||||
path = new Path start, end, length, nodes, relationships
|
||||
return path
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
||||
|
||||
# XXX this is actually a traverse, but in lieu of defining a non-trivial
|
||||
# traverse() method, exposing this for now for our simple use case.
|
||||
# the rels parameter can be:
|
||||
# - just a string, e.g. 'has' (both directions traversed)
|
||||
# - an array of strings, e.g. 'has' and 'wants' (both directions traversed)
|
||||
# - just an object, e.g. {type: 'has', direction: 'out'}
|
||||
# - an array of objects, e.g. [{type: 'has', direction: 'out'}, ...]
|
||||
getRelationshipNodes: (rels, _) ->
|
||||
|
||||
# support passing in both one rel and multiple rels, as array
|
||||
rels = if rels instanceof Array then rels else [rels]
|
||||
|
||||
try
|
||||
traverseURL = @_data['traverse']?.replace '{returnType}', 'node'
|
||||
|
||||
if not traverseURL
|
||||
throw new Error 'Traverse not available.'
|
||||
|
||||
resp = request.post
|
||||
url: traverseURL
|
||||
json:
|
||||
'max_depth': 1
|
||||
'relationships': rels.map (rel) ->
|
||||
if typeof rel is 'string' then {'type': rel} else rel
|
||||
, _
|
||||
|
||||
if resp.statusCode is 404
|
||||
throw new Error 'Node not found.'
|
||||
|
||||
if resp.statusCode isnt 200
|
||||
throw new Error "Unrecognized response code: #{resp.statusCode}"
|
||||
|
||||
# success
|
||||
# note that JSON has already been parsed by request.
|
||||
return resp.body.map (data) => new Node @db, data
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
||||
|
||||
index: (index, key, value, _) ->
|
||||
try
|
||||
# TODO
|
||||
if not @exists
|
||||
throw new Error 'Node must exists before indexing properties'
|
||||
|
||||
services = @db.getServices _
|
||||
|
||||
encodedKey = encodeURIComponent key
|
||||
encodedValue = encodeURIComponent value
|
||||
url = "#{services.node_index}/#{index}/#{encodedKey}/#{encodedValue}"
|
||||
|
||||
response = request.post
|
||||
url: url
|
||||
json: @self
|
||||
, _
|
||||
|
||||
if response.statusCode isnt status.CREATED
|
||||
# database error
|
||||
throw new Error response.statusCode
|
||||
|
||||
# success
|
||||
return
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
|
@ -1,16 +0,0 @@
|
|||
module.exports = class Path
|
||||
constructor: (start, end, length, nodes, relationships) ->
|
||||
@_start = start
|
||||
@_nodes = nodes
|
||||
@_length = length
|
||||
@_relationships = relationships
|
||||
@_end = end
|
||||
|
||||
@getter 'start', -> @_start || null
|
||||
@getter 'end', -> @_end || null
|
||||
@getter 'length', -> @_length || 0
|
||||
@getter 'nodes', -> @_nodes || []
|
||||
@getter 'relationships', -> @_relationships || []
|
||||
|
||||
getter: @__defineGetter__
|
||||
setter: @__defineSetter__
|
|
@ -1,56 +0,0 @@
|
|||
status = require 'http-status'
|
||||
request = require 'request'
|
||||
|
||||
util = require './util_'
|
||||
adjustError = util.adjustError
|
||||
|
||||
module.exports = class PropertyContainer
|
||||
constructor: (db, data) ->
|
||||
@db = db
|
||||
|
||||
@_data = data or {}
|
||||
@_data.self = data?.self or null
|
||||
|
||||
@getter 'self', -> @_data.self or null
|
||||
@getter 'exists', -> @self?
|
||||
@getter 'id', ->
|
||||
if not @exists
|
||||
null
|
||||
else
|
||||
match = /(?:node|relationship)\/(\d+)$/.exec @self
|
||||
#/ XXX slash to unbreak broken coda coffee plugin (which chokes on the regex with a slash)
|
||||
parseInt match[1]
|
||||
|
||||
@getter 'data', -> @_data.data or null
|
||||
@setter 'data', (value) -> @_data.data = value
|
||||
|
||||
getter: @::__defineGetter__
|
||||
setter: @::__defineSetter__
|
||||
|
||||
equals: (other) ->
|
||||
@self is other?.self
|
||||
|
||||
delete: (_) ->
|
||||
if not @exists
|
||||
return
|
||||
|
||||
try
|
||||
response = request.del @self, _
|
||||
|
||||
if response.statusCode isnt status.NO_CONTENT
|
||||
# database error
|
||||
message = ''
|
||||
switch response.statusCode
|
||||
when status.NOT_FOUND
|
||||
message = 'PropertyContainer not found'
|
||||
when status.CONFLICT
|
||||
message = 'Node could not be deleted (still has relationships?)'
|
||||
throw new Error message
|
||||
|
||||
# success
|
||||
@_data.self = null
|
||||
|
||||
return
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
|
@ -1,52 +0,0 @@
|
|||
status = require 'http-status'
|
||||
request = require 'request'
|
||||
|
||||
util = require './util_'
|
||||
adjustError = util.adjustError
|
||||
|
||||
PropertyContainer = require './PropertyContainer_'
|
||||
|
||||
module.exports = class Relationship extends PropertyContainer
|
||||
constructor: (db, data, start, end) ->
|
||||
super db, data
|
||||
|
||||
# require Node inline to prevent circular require dependency:
|
||||
Node = require './Node_'
|
||||
|
||||
# TODO relationship "start" and "end" are inconsistent with
|
||||
# creating relationships "to" and "from". consider renaming.
|
||||
@_start = start or new Node db, {self: data.start}
|
||||
@_end = end or new Node db, {self: data.end}
|
||||
|
||||
@getter 'start', -> @_start or null
|
||||
@getter 'end', -> @_end or null
|
||||
@getter 'type', -> data.type
|
||||
|
||||
save: (_) ->
|
||||
try
|
||||
# TODO: check for actual modification
|
||||
if @exists
|
||||
response = request.put
|
||||
uri: "#{@self}/properties"
|
||||
json: @data
|
||||
, _
|
||||
|
||||
if response.statusCode isnt status.NO_CONTENT
|
||||
# database error
|
||||
message = ''
|
||||
switch response.statusCode
|
||||
when status.BAD_REQUEST
|
||||
message = 'Invalid data sent'
|
||||
when status.NOT_FOUND
|
||||
message = 'Relationship not found'
|
||||
throw new Error message
|
||||
|
||||
# explicitly returning nothing to make this a "void" method.
|
||||
return
|
||||
|
||||
catch error
|
||||
throw adjustError error
|
||||
|
||||
# Alias
|
||||
del: @::delete
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
/*
|
||||
|
||||
Neo4j driver for Node
|
||||
|
||||
Copyright 2011 Daniel Gasienica <daniel@gasienica.ch>
|
||||
Copyright 2011 Aseem Kishore <aseem.kishore@gmail.com>
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
not use this file except in compliance with the License. You may obtain
|
||||
a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
License for the specific language governing permissions and limitations
|
||||
under the License.
|
||||
|
||||
*/
|
||||
|
||||
require('coffee-script');
|
||||
require('streamline');
|
||||
|
||||
exports.GraphDatabase = require('./GraphDatabase_');
|
||||
|
||||
// XXX serialize functions not used internally right now, but used by outside
|
||||
// clients, e.g. the scrapedb script. TODO formalize these better?
|
||||
var util = require('./util_');
|
||||
exports.serialize = util.serialize;
|
||||
exports.deserialize = util.deserialize;
|
|
@ -1,89 +0,0 @@
|
|||
constants = require 'constants'
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
#
|
||||
# Errors
|
||||
#
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
exports.adjustError = (error) ->
|
||||
# Neo4j server error (error is a response object)
|
||||
if error.statusCode >= 400 and error.body
|
||||
serverError = error.body
|
||||
|
||||
# in some cases, node-request hasn't parsed response JSON yet, so do.
|
||||
# XXX protect against neo4j incorrectly sending HTML instead of JSON.
|
||||
if typeof serverError is 'string'
|
||||
try
|
||||
serverError = JSON.parse serverError
|
||||
|
||||
error = new Error
|
||||
error.message = serverError.message or serverError
|
||||
|
||||
if typeof error isnt 'object'
|
||||
error = new Error error
|
||||
|
||||
if error.errno is constants.ECONNREFUSED
|
||||
error.message = "Couldn’t reach database (Connection refused)"
|
||||
|
||||
return error
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
#
|
||||
# Serialization / Deserialization
|
||||
#
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
exports.serialize = (o, separator) ->
|
||||
JSON.stringify flatten(o, separator)
|
||||
|
||||
|
||||
exports.deserialize = (o, separator) ->
|
||||
unflatten JSON.parse(o), separator
|
||||
|
||||
|
||||
flatten = (o, separator, result, prefix) ->
|
||||
separator = separator || '.'
|
||||
result = result || {}
|
||||
prefix = prefix || ''
|
||||
|
||||
# only proceed if argument o is a complex object
|
||||
if typeof o isnt 'object'
|
||||
return o
|
||||
|
||||
for key in Object.keys o
|
||||
value = o[key]
|
||||
if typeof value != 'object'
|
||||
result[prefix + key] = value
|
||||
else
|
||||
flatten(value, separator, result, key + separator)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
unflatten = (o, separator, result) ->
|
||||
separator = separator || '.'
|
||||
result = result || {}
|
||||
|
||||
# only proceed if argument o is a complex object
|
||||
if typeof o isnt 'object'
|
||||
return o
|
||||
|
||||
for key in Object.keys o
|
||||
value = o[key]
|
||||
separatorIndex = key.indexOf separator
|
||||
if separatorIndex == -1
|
||||
result[key] = value
|
||||
else
|
||||
keys = key.split separator
|
||||
target = result
|
||||
numKeys = keys.length
|
||||
for i in [0..(numKeys - 2)]
|
||||
currentKey = keys[i]
|
||||
if target[currentKey] == undefined
|
||||
target[currentKey] = {}
|
||||
target = target[currentKey]
|
||||
lastKey = keys[numKeys - 1]
|
||||
target[lastKey] = value
|
||||
|
||||
return result
|
|
@ -1 +0,0 @@
|
|||
../coffee-script/bin/cake
|
|
@ -1 +0,0 @@
|
|||
../coffee-script/bin/coffee
|
|
@ -1 +0,0 @@
|
|||
../streamline/bin/coffee-streamline
|
|
@ -1 +0,0 @@
|
|||
../streamline/bin/node-streamline
|
|
@ -1,11 +0,0 @@
|
|||
*.coffee
|
||||
*.html
|
||||
.DS_Store
|
||||
.git*
|
||||
Cakefile
|
||||
documentation/
|
||||
examples/
|
||||
extras/coffee-script.js
|
||||
raw/
|
||||
src/
|
||||
test/
|
|
@ -1,22 +0,0 @@
|
|||
Copyright (c) 2011 Jeremy Ashkenas
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
|
@ -1,48 +0,0 @@
|
|||
=
|
||||
{
|
||||
} } {
|
||||
{ { } }
|
||||
} }{ {
|
||||
{ }{ } } _____ __ __
|
||||
( }{ }{ { ) / ____| / _|/ _|
|
||||
.- { { } { }} -. | | ___ | |_| |_ ___ ___
|
||||
( ( } { } { } } ) | | / _ \| _| _/ _ \/ _ \
|
||||
|`-..________ ..-'| | |___| (_) | | | || __/ __/
|
||||
| | \_____\___/|_| |_| \___|\___|
|
||||
| ;--.
|
||||
| (__ \ _____ _ _
|
||||
| | ) ) / ____| (_) | |
|
||||
| |/ / | (___ ___ _ __ _ _ __ | |_
|
||||
| ( / \___ \ / __| '__| | '_ \| __|
|
||||
| |/ ____) | (__| | | | |_) | |_
|
||||
| | |_____/ \___|_| |_| .__/ \__|
|
||||
`-.._________..-' | |
|
||||
|_|
|
||||
|
||||
|
||||
CoffeeScript is a little language that compiles into JavaScript.
|
||||
|
||||
Install Node.js, and then the CoffeeScript compiler:
|
||||
sudo bin/cake install
|
||||
|
||||
Or, if you have the Node Package Manager installed:
|
||||
npm install -g coffee-script
|
||||
(Leave off the -g if you don't wish to install globally.)
|
||||
|
||||
Compile a script:
|
||||
coffee /path/to/script.coffee
|
||||
|
||||
For documentation, usage, and examples, see:
|
||||
http://coffeescript.org/
|
||||
|
||||
To suggest a feature, report a bug, or general discussion:
|
||||
http://github.com/jashkenas/coffee-script/issues/
|
||||
|
||||
If you'd like to chat, drop by #coffeescript on Freenode IRC,
|
||||
or on webchat.freenode.net.
|
||||
|
||||
The source repository:
|
||||
git://github.com/jashkenas/coffee-script.git
|
||||
|
||||
All contributors are listed here:
|
||||
http://github.com/jashkenas/coffee-script/contributors
|
|
@ -1,78 +0,0 @@
|
|||
require 'rubygems'
|
||||
require 'erb'
|
||||
require 'fileutils'
|
||||
require 'rake/testtask'
|
||||
require 'json'
|
||||
|
||||
desc "Build the documentation page"
|
||||
task :doc do
|
||||
source = 'documentation/index.html.erb'
|
||||
child = fork { exec "bin/coffee -bcw -o documentation/js documentation/coffee/*.coffee" }
|
||||
at_exit { Process.kill("INT", child) }
|
||||
Signal.trap("INT") { exit }
|
||||
loop do
|
||||
mtime = File.stat(source).mtime
|
||||
if !@mtime || mtime > @mtime
|
||||
rendered = ERB.new(File.read(source)).result(binding)
|
||||
File.open('index.html', 'w+') {|f| f.write(rendered) }
|
||||
end
|
||||
@mtime = mtime
|
||||
sleep 1
|
||||
end
|
||||
end
|
||||
|
||||
desc "Build coffee-script-source gem"
|
||||
task :gem do
|
||||
require 'rubygems'
|
||||
require 'rubygems/package'
|
||||
|
||||
gemspec = Gem::Specification.new do |s|
|
||||
s.name = 'coffee-script-source'
|
||||
s.version = JSON.parse(File.read('package.json'))["version"]
|
||||
s.date = Time.now.strftime("%Y-%m-%d")
|
||||
|
||||
s.homepage = "http://jashkenas.github.com/coffee-script/"
|
||||
s.summary = "The CoffeeScript Compiler"
|
||||
s.description = <<-EOS
|
||||
CoffeeScript is a little language that compiles into JavaScript.
|
||||
Underneath all of those embarrassing braces and semicolons,
|
||||
JavaScript has always had a gorgeous object model at its heart.
|
||||
CoffeeScript is an attempt to expose the good parts of JavaScript
|
||||
in a simple way.
|
||||
EOS
|
||||
|
||||
s.files = [
|
||||
'lib/coffee_script/coffee-script.js',
|
||||
'lib/coffee_script/source.rb'
|
||||
]
|
||||
|
||||
s.authors = ['Jeremy Ashkenas']
|
||||
s.email = 'jashkenas@gmail.com'
|
||||
s.rubyforge_project = 'coffee-script-source'
|
||||
end
|
||||
|
||||
file = File.open("coffee-script-source.gem", "w")
|
||||
Gem::Package.open(file, 'w') do |pkg|
|
||||
pkg.metadata = gemspec.to_yaml
|
||||
|
||||
path = "lib/coffee_script/source.rb"
|
||||
contents = <<-ERUBY
|
||||
module CoffeeScript
|
||||
module Source
|
||||
def self.bundled_path
|
||||
File.expand_path("../coffee-script.js", __FILE__)
|
||||
end
|
||||
end
|
||||
end
|
||||
ERUBY
|
||||
pkg.add_file_simple(path, 0644, contents.size) do |tar_io|
|
||||
tar_io.write(contents)
|
||||
end
|
||||
|
||||
contents = File.read("extras/coffee-script.js")
|
||||
path = "lib/coffee_script/coffee-script.js"
|
||||
pkg.add_file_simple(path, 0644, contents.size) do |tar_io|
|
||||
tar_io.write(contents)
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,7 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var lib = path.join(path.dirname(fs.realpathSync(__filename)), '../lib');
|
||||
|
||||
require(lib + '/cake').run();
|
|
@ -1,7 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var lib = path.join(path.dirname(fs.realpathSync(__filename)), '../lib');
|
||||
|
||||
require(lib + '/command').run();
|
|
@ -1,44 +0,0 @@
|
|||
# JavaScriptLint configuration file for CoffeeScript.
|
||||
|
||||
+no_return_value # function {0} does not always return a value
|
||||
+duplicate_formal # duplicate formal argument {0}
|
||||
-equal_as_assign # test for equality (==) mistyped as assignment (=)?{0}
|
||||
+var_hides_arg # variable {0} hides argument
|
||||
+redeclared_var # redeclaration of {0} {1}
|
||||
-anon_no_return_value # anonymous function does not always return a value
|
||||
+missing_semicolon # missing semicolon
|
||||
+meaningless_block # meaningless block; curly braces have no impact
|
||||
-comma_separated_stmts # multiple statements separated by commas (use semicolons?)
|
||||
+unreachable_code # unreachable code
|
||||
+missing_break # missing break statement
|
||||
-missing_break_for_last_case # missing break statement for last case in switch
|
||||
-comparison_type_conv # comparisons against null, 0, true, false, or an empty string allowing implicit type conversion (use === or !==)
|
||||
-inc_dec_within_stmt # increment (++) and decrement (--) operators used as part of greater statement
|
||||
-useless_void # use of the void type may be unnecessary (void is always undefined)
|
||||
+multiple_plus_minus # unknown order of operations for successive plus (e.g. x+++y) or minus (e.g. x---y) signs
|
||||
+use_of_label # use of label
|
||||
-block_without_braces # block statement without curly braces
|
||||
+leading_decimal_point # leading decimal point may indicate a number or an object member
|
||||
+trailing_decimal_point # trailing decimal point may indicate a number or an object member
|
||||
+octal_number # leading zeros make an octal number
|
||||
+nested_comment # nested comment
|
||||
+misplaced_regex # regular expressions should be preceded by a left parenthesis, assignment, colon, or comma
|
||||
+ambiguous_newline # unexpected end of line; it is ambiguous whether these lines are part of the same statement
|
||||
+empty_statement # empty statement or extra semicolon
|
||||
-missing_option_explicit # the "option explicit" control comment is missing
|
||||
+partial_option_explicit # the "option explicit" control comment, if used, must be in the first script tag
|
||||
+dup_option_explicit # duplicate "option explicit" control comment
|
||||
+useless_assign # useless assignment
|
||||
+ambiguous_nested_stmt # block statements containing block statements should use curly braces to resolve ambiguity
|
||||
+ambiguous_else_stmt # the else statement could be matched with one of multiple if statements (use curly braces to indicate intent)
|
||||
-missing_default_case # missing default case in switch statement
|
||||
+duplicate_case_in_switch # duplicate case in switch statements
|
||||
+default_not_at_end # the default case is not at the end of the switch statement
|
||||
+legacy_cc_not_understood # couldn't understand control comment using /*@keyword@*/ syntax
|
||||
+jsl_cc_not_understood # couldn't understand control comment using /*jsl:keyword*/ syntax
|
||||
+useless_comparison # useless comparison; comparing identical expressions
|
||||
+with_statement # with statement hides undeclared variables; use temporary variable instead
|
||||
+trailing_comma_in_array # extra comma is not recommended in array initializers
|
||||
+assign_to_function_call # assignment to a function call
|
||||
+parseint_missing_radix # parseInt missing radix parameter
|
||||
+lambda_assign_requires_semicolon
|
|
@ -1,75 +0,0 @@
|
|||
(function() {
|
||||
var CoffeeScript, runScripts;
|
||||
CoffeeScript = require('./coffee-script');
|
||||
CoffeeScript.require = require;
|
||||
CoffeeScript.eval = function(code, options) {
|
||||
return eval(CoffeeScript.compile(code, options));
|
||||
};
|
||||
CoffeeScript.run = function(code, options) {
|
||||
if (options == null) {
|
||||
options = {};
|
||||
}
|
||||
options.bare = true;
|
||||
return Function(CoffeeScript.compile(code, options))();
|
||||
};
|
||||
if (typeof window === "undefined" || window === null) {
|
||||
return;
|
||||
}
|
||||
CoffeeScript.load = function(url, callback) {
|
||||
var xhr;
|
||||
xhr = new (window.ActiveXObject || XMLHttpRequest)('Microsoft.XMLHTTP');
|
||||
xhr.open('GET', url, true);
|
||||
if ('overrideMimeType' in xhr) {
|
||||
xhr.overrideMimeType('text/plain');
|
||||
}
|
||||
xhr.onreadystatechange = function() {
|
||||
var _ref;
|
||||
if (xhr.readyState === 4) {
|
||||
if ((_ref = xhr.status) === 0 || _ref === 200) {
|
||||
CoffeeScript.run(xhr.responseText);
|
||||
} else {
|
||||
throw new Error("Could not load " + url);
|
||||
}
|
||||
if (callback) {
|
||||
return callback();
|
||||
}
|
||||
}
|
||||
};
|
||||
return xhr.send(null);
|
||||
};
|
||||
runScripts = function() {
|
||||
var coffees, execute, index, length, s, scripts;
|
||||
scripts = document.getElementsByTagName('script');
|
||||
coffees = (function() {
|
||||
var _i, _len, _results;
|
||||
_results = [];
|
||||
for (_i = 0, _len = scripts.length; _i < _len; _i++) {
|
||||
s = scripts[_i];
|
||||
if (s.type === 'text/coffeescript') {
|
||||
_results.push(s);
|
||||
}
|
||||
}
|
||||
return _results;
|
||||
})();
|
||||
index = 0;
|
||||
length = coffees.length;
|
||||
(execute = function() {
|
||||
var script;
|
||||
script = coffees[index++];
|
||||
if ((script != null ? script.type : void 0) === 'text/coffeescript') {
|
||||
if (script.src) {
|
||||
return CoffeeScript.load(script.src, execute);
|
||||
} else {
|
||||
CoffeeScript.run(script.innerHTML);
|
||||
return execute();
|
||||
}
|
||||
}
|
||||
})();
|
||||
return null;
|
||||
};
|
||||
if (window.addEventListener) {
|
||||
addEventListener('DOMContentLoaded', runScripts, false);
|
||||
} else {
|
||||
attachEvent('onload', runScripts);
|
||||
}
|
||||
}).call(this);
|
|
@ -1,76 +0,0 @@
|
|||
(function() {
|
||||
var CoffeeScript, fs, helpers, missingTask, oparse, options, optparse, path, printTasks, switches, tasks;
|
||||
fs = require('fs');
|
||||
path = require('path');
|
||||
helpers = require('./helpers');
|
||||
optparse = require('./optparse');
|
||||
CoffeeScript = require('./coffee-script');
|
||||
tasks = {};
|
||||
options = {};
|
||||
switches = [];
|
||||
oparse = null;
|
||||
helpers.extend(global, {
|
||||
task: function(name, description, action) {
|
||||
var _ref;
|
||||
if (!action) {
|
||||
_ref = [description, action], action = _ref[0], description = _ref[1];
|
||||
}
|
||||
return tasks[name] = {
|
||||
name: name,
|
||||
description: description,
|
||||
action: action
|
||||
};
|
||||
},
|
||||
option: function(letter, flag, description) {
|
||||
return switches.push([letter, flag, description]);
|
||||
},
|
||||
invoke: function(name) {
|
||||
if (!tasks[name]) {
|
||||
missingTask(name);
|
||||
}
|
||||
return tasks[name].action(options);
|
||||
}
|
||||
});
|
||||
exports.run = function() {
|
||||
return path.exists('Cakefile', function(exists) {
|
||||
var arg, args, _i, _len, _ref, _results;
|
||||
if (!exists) {
|
||||
throw new Error("Cakefile not found in " + (process.cwd()));
|
||||
}
|
||||
args = process.argv.slice(2);
|
||||
CoffeeScript.run(fs.readFileSync('Cakefile').toString(), {
|
||||
filename: 'Cakefile'
|
||||
});
|
||||
oparse = new optparse.OptionParser(switches);
|
||||
if (!args.length) {
|
||||
return printTasks();
|
||||
}
|
||||
options = oparse.parse(args);
|
||||
_ref = options.arguments;
|
||||
_results = [];
|
||||
for (_i = 0, _len = _ref.length; _i < _len; _i++) {
|
||||
arg = _ref[_i];
|
||||
_results.push(invoke(arg));
|
||||
}
|
||||
return _results;
|
||||
});
|
||||
};
|
||||
printTasks = function() {
|
||||
var desc, name, spaces, task;
|
||||
console.log('');
|
||||
for (name in tasks) {
|
||||
task = tasks[name];
|
||||
spaces = 20 - name.length;
|
||||
spaces = spaces > 0 ? Array(spaces + 1).join(' ') : '';
|
||||
desc = task.description ? "# " + task.description : '';
|
||||
console.log("cake " + name + spaces + " " + desc);
|
||||
}
|
||||
if (switches.length) {
|
||||
return console.log(oparse.help());
|
||||
}
|
||||
};
|
||||
missingTask = function(task) {
|
||||
console.log("No such task: \"" + task + "\"");
|
||||
return process.exit(1);
|
||||
};
|
||||
}).call(this);
|
|
@ -1,135 +0,0 @@
|
|||
(function() {
|
||||
var Lexer, RESERVED, compile, fs, lexer, parser, path, _ref;
|
||||
var __hasProp = Object.prototype.hasOwnProperty;
|
||||
fs = require('fs');
|
||||
path = require('path');
|
||||
_ref = require('./lexer'), Lexer = _ref.Lexer, RESERVED = _ref.RESERVED;
|
||||
parser = require('./parser').parser;
|
||||
if (require.extensions) {
|
||||
require.extensions['.coffee'] = function(module, filename) {
|
||||
var content;
|
||||
content = compile(fs.readFileSync(filename, 'utf8'), {
|
||||
filename: filename
|
||||
});
|
||||
return module._compile(content, filename);
|
||||
};
|
||||
} else if (require.registerExtension) {
|
||||
require.registerExtension('.coffee', function(content) {
|
||||
return compile(content);
|
||||
});
|
||||
}
|
||||
exports.VERSION = '1.1.2';
|
||||
exports.RESERVED = RESERVED;
|
||||
exports.helpers = require('./helpers');
|
||||
exports.compile = compile = function(code, options) {
|
||||
if (options == null) {
|
||||
options = {};
|
||||
}
|
||||
try {
|
||||
return (parser.parse(lexer.tokenize(code))).compile(options);
|
||||
} catch (err) {
|
||||
if (options.filename) {
|
||||
err.message = "In " + options.filename + ", " + err.message;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
exports.tokens = function(code, options) {
|
||||
return lexer.tokenize(code, options);
|
||||
};
|
||||
exports.nodes = function(source, options) {
|
||||
if (typeof source === 'string') {
|
||||
return parser.parse(lexer.tokenize(source, options));
|
||||
} else {
|
||||
return parser.parse(source);
|
||||
}
|
||||
};
|
||||
exports.run = function(code, options) {
|
||||
var Module, mainModule;
|
||||
mainModule = require.main;
|
||||
mainModule.filename = process.argv[1] = options.filename ? fs.realpathSync(options.filename) : '.';
|
||||
mainModule.moduleCache && (mainModule.moduleCache = {});
|
||||
if (process.binding('natives').module) {
|
||||
Module = require('module').Module;
|
||||
mainModule.paths = Module._nodeModulePaths(path.dirname(options.filename));
|
||||
}
|
||||
if (path.extname(mainModule.filename) !== '.coffee' || require.extensions) {
|
||||
return mainModule._compile(compile(code, options), mainModule.filename);
|
||||
} else {
|
||||
return mainModule._compile(code, mainModule.filename);
|
||||
}
|
||||
};
|
||||
exports.eval = function(code, options) {
|
||||
var Module, Script, js, k, o, r, sandbox, v, _i, _len, _module, _ref2, _ref3, _ref4, _require;
|
||||
if (options == null) {
|
||||
options = {};
|
||||
}
|
||||
if (!(code = code.trim())) {
|
||||
return;
|
||||
}
|
||||
if (_ref2 = require('vm'), Script = _ref2.Script, _ref2) {
|
||||
sandbox = Script.createContext();
|
||||
sandbox.global = sandbox.root = sandbox.GLOBAL = sandbox;
|
||||
if (options.sandbox != null) {
|
||||
if (options.sandbox instanceof sandbox.constructor) {
|
||||
sandbox = options.sandbox;
|
||||
} else {
|
||||
_ref3 = options.sandbox;
|
||||
for (k in _ref3) {
|
||||
if (!__hasProp.call(_ref3, k)) continue;
|
||||
v = _ref3[k];
|
||||
sandbox[k] = v;
|
||||
}
|
||||
}
|
||||
}
|
||||
sandbox.__filename = options.filename || 'eval';
|
||||
sandbox.__dirname = path.dirname(sandbox.__filename);
|
||||
if (!(sandbox.module || sandbox.require)) {
|
||||
Module = require('module');
|
||||
sandbox.module = _module = new Module(options.modulename || 'eval');
|
||||
sandbox.require = _require = function(path) {
|
||||
return Module._load(path, _module);
|
||||
};
|
||||
_module.filename = sandbox.__filename;
|
||||
_ref4 = Object.getOwnPropertyNames(require);
|
||||
for (_i = 0, _len = _ref4.length; _i < _len; _i++) {
|
||||
r = _ref4[_i];
|
||||
_require[r] = require[r];
|
||||
}
|
||||
_require.paths = _module.paths = Module._nodeModulePaths(process.cwd());
|
||||
_require.resolve = function(request) {
|
||||
return Module._resolveFilename(request, _module);
|
||||
};
|
||||
}
|
||||
}
|
||||
o = {};
|
||||
for (k in options) {
|
||||
if (!__hasProp.call(options, k)) continue;
|
||||
v = options[k];
|
||||
o[k] = v;
|
||||
}
|
||||
o.bare = true;
|
||||
js = compile(code, o);
|
||||
if (Script) {
|
||||
return Script.runInContext(js, sandbox);
|
||||
} else {
|
||||
return eval(js);
|
||||
}
|
||||
};
|
||||
lexer = new Lexer;
|
||||
parser.lexer = {
|
||||
lex: function() {
|
||||
var tag, _ref2;
|
||||
_ref2 = this.tokens[this.pos++] || [''], tag = _ref2[0], this.yytext = _ref2[1], this.yylineno = _ref2[2];
|
||||
return tag;
|
||||
},
|
||||
setInput: function(tokens) {
|
||||
this.tokens = tokens;
|
||||
return this.pos = 0;
|
||||
},
|
||||
upcomingInput: function() {
|
||||
return "";
|
||||
}
|
||||
};
|
||||
parser.yy = require('./nodes');
|
||||
}).call(this);
|
|
@ -1,301 +0,0 @@
|
|||
(function() {
|
||||
var BANNER, CoffeeScript, EventEmitter, SWITCHES, compileJoin, compileOptions, compileScript, compileScripts, compileStdio, contents, exec, forkNode, fs, helpers, lint, loadRequires, optionParser, optparse, opts, parseOptions, path, printLine, printTokens, printWarn, sources, spawn, usage, version, watch, writeJs, _ref;
|
||||
fs = require('fs');
|
||||
path = require('path');
|
||||
helpers = require('./helpers');
|
||||
optparse = require('./optparse');
|
||||
CoffeeScript = require('./coffee-script');
|
||||
_ref = require('child_process'), spawn = _ref.spawn, exec = _ref.exec;
|
||||
EventEmitter = require('events').EventEmitter;
|
||||
helpers.extend(CoffeeScript, new EventEmitter);
|
||||
printLine = function(line) {
|
||||
return process.stdout.write(line + '\n');
|
||||
};
|
||||
printWarn = function(line) {
|
||||
return process.binding('stdio').writeError(line + '\n');
|
||||
};
|
||||
BANNER = 'Usage: coffee [options] path/to/script.coffee';
|
||||
SWITCHES = [['-c', '--compile', 'compile to JavaScript and save as .js files'], ['-i', '--interactive', 'run an interactive CoffeeScript REPL'], ['-o', '--output [DIR]', 'set the directory for compiled JavaScript'], ['-j', '--join [FILE]', 'concatenate the scripts before compiling'], ['-w', '--watch', 'watch scripts for changes, and recompile'], ['-p', '--print', 'print the compiled JavaScript to stdout'], ['-l', '--lint', 'pipe the compiled JavaScript through JavaScript Lint'], ['-s', '--stdio', 'listen for and compile scripts over stdio'], ['-e', '--eval', 'compile a string from the command line'], ['-r', '--require [FILE*]', 'require a library before executing your script'], ['-b', '--bare', 'compile without the top-level function wrapper'], ['-t', '--tokens', 'print the tokens that the lexer produces'], ['-n', '--nodes', 'print the parse tree that Jison produces'], ['--nodejs [ARGS]', 'pass options through to the "node" binary'], ['-v', '--version', 'display CoffeeScript version'], ['-h', '--help', 'display this help message']];
|
||||
opts = {};
|
||||
sources = [];
|
||||
contents = [];
|
||||
optionParser = null;
|
||||
exports.run = function() {
|
||||
parseOptions();
|
||||
if (opts.nodejs) {
|
||||
return forkNode();
|
||||
}
|
||||
if (opts.help) {
|
||||
return usage();
|
||||
}
|
||||
if (opts.version) {
|
||||
return version();
|
||||
}
|
||||
if (opts.require) {
|
||||
loadRequires();
|
||||
}
|
||||
if (opts.interactive) {
|
||||
return require('./repl');
|
||||
}
|
||||
if (opts.stdio) {
|
||||
return compileStdio();
|
||||
}
|
||||
if (opts.eval) {
|
||||
return compileScript(null, sources[0]);
|
||||
}
|
||||
if (!sources.length) {
|
||||
return require('./repl');
|
||||
}
|
||||
if (opts.run) {
|
||||
opts.literals = sources.splice(1).concat(opts.literals);
|
||||
}
|
||||
process.ARGV = process.argv = process.argv.slice(0, 2).concat(opts.literals);
|
||||
process.argv[0] = 'coffee';
|
||||
process.execPath = require.main.filename;
|
||||
return compileScripts();
|
||||
};
|
||||
compileScripts = function() {
|
||||
var base, compile, source, unprocessed, _i, _j, _len, _len2, _results;
|
||||
unprocessed = [];
|
||||
for (_i = 0, _len = sources.length; _i < _len; _i++) {
|
||||
source = sources[_i];
|
||||
unprocessed[sources.indexOf(source)] = 1;
|
||||
}
|
||||
_results = [];
|
||||
for (_j = 0, _len2 = sources.length; _j < _len2; _j++) {
|
||||
source = sources[_j];
|
||||
base = path.join(source);
|
||||
compile = function(source, sourceIndex, topLevel) {
|
||||
var remaining_files;
|
||||
remaining_files = function() {
|
||||
var total, x, _k, _len3;
|
||||
total = 0;
|
||||
for (_k = 0, _len3 = unprocessed.length; _k < _len3; _k++) {
|
||||
x = unprocessed[_k];
|
||||
total += x;
|
||||
}
|
||||
return total;
|
||||
};
|
||||
return path.exists(source, function(exists) {
|
||||
if (topLevel && !exists && source.slice(-7) !== '.coffee') {
|
||||
return compile("" + source + ".coffee", sourceIndex, topLevel);
|
||||
}
|
||||
if (topLevel && !exists) {
|
||||
throw new Error("File not found: " + source);
|
||||
}
|
||||
return fs.stat(source, function(err, stats) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
if (stats.isDirectory()) {
|
||||
return fs.readdir(source, function(err, files) {
|
||||
var file, _k, _len3;
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
unprocessed[sourceIndex] += files.length;
|
||||
for (_k = 0, _len3 = files.length; _k < _len3; _k++) {
|
||||
file = files[_k];
|
||||
compile(path.join(source, file), sourceIndex);
|
||||
}
|
||||
return unprocessed[sourceIndex] -= 1;
|
||||
});
|
||||
} else if (topLevel || path.extname(source) === '.coffee') {
|
||||
fs.readFile(source, function(err, code) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
unprocessed[sourceIndex] -= 1;
|
||||
if (opts.join) {
|
||||
contents[sourceIndex] = helpers.compact([contents[sourceIndex], code.toString()]).join('\n');
|
||||
if (helpers.compact(contents).length > 0 && remaining_files() === 0) {
|
||||
return compileJoin();
|
||||
}
|
||||
} else {
|
||||
return compileScript(source, code.toString(), base);
|
||||
}
|
||||
});
|
||||
if (opts.watch && !opts.join) {
|
||||
return watch(source, base);
|
||||
}
|
||||
} else {
|
||||
return unprocessed[sourceIndex] -= 1;
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
_results.push(compile(source, sources.indexOf(source), true));
|
||||
}
|
||||
return _results;
|
||||
};
|
||||
compileScript = function(file, input, base) {
|
||||
var o, options, t, task;
|
||||
o = opts;
|
||||
options = compileOptions(file);
|
||||
try {
|
||||
t = task = {
|
||||
file: file,
|
||||
input: input,
|
||||
options: options
|
||||
};
|
||||
CoffeeScript.emit('compile', task);
|
||||
if (o.tokens) {
|
||||
return printTokens(CoffeeScript.tokens(t.input));
|
||||
} else if (o.nodes) {
|
||||
return printLine(CoffeeScript.nodes(t.input).toString().trim());
|
||||
} else if (o.run) {
|
||||
return CoffeeScript.run(t.input, t.options);
|
||||
} else {
|
||||
t.output = CoffeeScript.compile(t.input, t.options);
|
||||
CoffeeScript.emit('success', task);
|
||||
if (o.print) {
|
||||
return printLine(t.output.trim());
|
||||
} else if (o.compile) {
|
||||
return writeJs(t.file, t.output, base);
|
||||
} else if (o.lint) {
|
||||
return lint(t.file, t.output);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
CoffeeScript.emit('failure', err, task);
|
||||
if (CoffeeScript.listeners('failure').length) {
|
||||
return;
|
||||
}
|
||||
if (o.watch) {
|
||||
return printLine(err.message);
|
||||
}
|
||||
printWarn(err.stack);
|
||||
return process.exit(1);
|
||||
}
|
||||
};
|
||||
compileStdio = function() {
|
||||
var code, stdin;
|
||||
code = '';
|
||||
stdin = process.openStdin();
|
||||
stdin.on('data', function(buffer) {
|
||||
if (buffer) {
|
||||
return code += buffer.toString();
|
||||
}
|
||||
});
|
||||
return stdin.on('end', function() {
|
||||
return compileScript(null, code);
|
||||
});
|
||||
};
|
||||
compileJoin = function() {
|
||||
var code;
|
||||
code = contents.join('\n');
|
||||
return compileScript(opts.join, code, opts.join);
|
||||
};
|
||||
loadRequires = function() {
|
||||
var realFilename, req, _i, _len, _ref2;
|
||||
realFilename = module.filename;
|
||||
module.filename = '.';
|
||||
_ref2 = opts.require;
|
||||
for (_i = 0, _len = _ref2.length; _i < _len; _i++) {
|
||||
req = _ref2[_i];
|
||||
require(req);
|
||||
}
|
||||
return module.filename = realFilename;
|
||||
};
|
||||
watch = function(source, base) {
|
||||
return fs.watchFile(source, {
|
||||
persistent: true,
|
||||
interval: 500
|
||||
}, function(curr, prev) {
|
||||
if (curr.size === prev.size && curr.mtime.getTime() === prev.mtime.getTime()) {
|
||||
return;
|
||||
}
|
||||
return fs.readFile(source, function(err, code) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
return compileScript(source, code.toString(), base);
|
||||
});
|
||||
});
|
||||
};
|
||||
writeJs = function(source, js, base) {
|
||||
var baseDir, compile, dir, filename, jsPath, srcDir;
|
||||
filename = path.basename(source, path.extname(source)) + '.js';
|
||||
srcDir = path.dirname(source);
|
||||
baseDir = base === '.' ? srcDir : srcDir.substring(base.length);
|
||||
dir = opts.output ? path.join(opts.output, baseDir) : srcDir;
|
||||
jsPath = path.join(dir, filename);
|
||||
compile = function() {
|
||||
if (js.length <= 0) {
|
||||
js = ' ';
|
||||
}
|
||||
return fs.writeFile(jsPath, js, function(err) {
|
||||
if (err) {
|
||||
return printLine(err.message);
|
||||
} else if (opts.compile && opts.watch) {
|
||||
return console.log("" + ((new Date).toLocaleTimeString()) + " - compiled " + source);
|
||||
}
|
||||
});
|
||||
};
|
||||
return path.exists(dir, function(exists) {
|
||||
if (exists) {
|
||||
return compile();
|
||||
} else {
|
||||
return exec("mkdir -p " + dir, compile);
|
||||
}
|
||||
});
|
||||
};
|
||||
lint = function(file, js) {
|
||||
var conf, jsl, printIt;
|
||||
printIt = function(buffer) {
|
||||
return printLine(file + ':\t' + buffer.toString().trim());
|
||||
};
|
||||
conf = __dirname + '/../extras/jsl.conf';
|
||||
jsl = spawn('jsl', ['-nologo', '-stdin', '-conf', conf]);
|
||||
jsl.stdout.on('data', printIt);
|
||||
jsl.stderr.on('data', printIt);
|
||||
jsl.stdin.write(js);
|
||||
return jsl.stdin.end();
|
||||
};
|
||||
printTokens = function(tokens) {
|
||||
var strings, tag, token, value;
|
||||
strings = (function() {
|
||||
var _i, _len, _ref2, _results;
|
||||
_results = [];
|
||||
for (_i = 0, _len = tokens.length; _i < _len; _i++) {
|
||||
token = tokens[_i];
|
||||
_ref2 = [token[0], token[1].toString().replace(/\n/, '\\n')], tag = _ref2[0], value = _ref2[1];
|
||||
_results.push("[" + tag + " " + value + "]");
|
||||
}
|
||||
return _results;
|
||||
})();
|
||||
return printLine(strings.join(' '));
|
||||
};
|
||||
parseOptions = function() {
|
||||
var o;
|
||||
optionParser = new optparse.OptionParser(SWITCHES, BANNER);
|
||||
o = opts = optionParser.parse(process.argv.slice(2));
|
||||
o.compile || (o.compile = !!o.output);
|
||||
o.run = !(o.compile || o.print || o.lint);
|
||||
o.print = !!(o.print || (o.eval || o.stdio && o.compile));
|
||||
return sources = o.arguments;
|
||||
};
|
||||
compileOptions = function(filename) {
|
||||
return {
|
||||
filename: filename,
|
||||
bare: opts.bare
|
||||
};
|
||||
};
|
||||
forkNode = function() {
|
||||
var args, nodeArgs;
|
||||
nodeArgs = opts.nodejs.split(/\s+/);
|
||||
args = process.argv.slice(1);
|
||||
args.splice(args.indexOf('--nodejs'), 2);
|
||||
return spawn(process.execPath, nodeArgs.concat(args), {
|
||||
cwd: process.cwd(),
|
||||
env: process.env,
|
||||
customFds: [0, 1, 2]
|
||||
});
|
||||
};
|
||||
usage = function() {
|
||||
return printLine((new optparse.OptionParser(SWITCHES, BANNER)).help());
|
||||
};
|
||||
version = function() {
|
||||
return printLine("CoffeeScript version " + CoffeeScript.VERSION);
|
||||
};
|
||||
}).call(this);
|
|
@ -1,591 +0,0 @@
|
|||
(function() {
|
||||
var Parser, alt, alternatives, grammar, name, o, operators, token, tokens, unwrap;
|
||||
Parser = require('jison').Parser;
|
||||
unwrap = /^function\s*\(\)\s*\{\s*return\s*([\s\S]*);\s*\}/;
|
||||
o = function(patternString, action, options) {
|
||||
var match;
|
||||
patternString = patternString.replace(/\s{2,}/g, ' ');
|
||||
if (!action) {
|
||||
return [patternString, '$$ = $1;', options];
|
||||
}
|
||||
action = (match = unwrap.exec(action)) ? match[1] : "(" + action + "())";
|
||||
action = action.replace(/\bnew /g, '$&yy.');
|
||||
action = action.replace(/\b(?:Block\.wrap|extend)\b/g, 'yy.$&');
|
||||
return [patternString, "$$ = " + action + ";", options];
|
||||
};
|
||||
grammar = {
|
||||
Root: [
|
||||
o('', function() {
|
||||
return new Block;
|
||||
}), o('Body'), o('Block TERMINATOR')
|
||||
],
|
||||
Body: [
|
||||
o('Line', function() {
|
||||
return Block.wrap([$1]);
|
||||
}), o('Body TERMINATOR Line', function() {
|
||||
return $1.push($3);
|
||||
}), o('Body TERMINATOR')
|
||||
],
|
||||
Line: [o('Expression'), o('Statement')],
|
||||
Statement: [
|
||||
o('Return'), o('Throw'), o('Comment'), o('STATEMENT', function() {
|
||||
return new Literal($1);
|
||||
})
|
||||
],
|
||||
Expression: [o('Value'), o('Invocation'), o('Code'), o('Operation'), o('Assign'), o('If'), o('Try'), o('While'), o('For'), o('Switch'), o('Class')],
|
||||
Block: [
|
||||
o('INDENT OUTDENT', function() {
|
||||
return new Block;
|
||||
}), o('INDENT Body OUTDENT', function() {
|
||||
return $2;
|
||||
})
|
||||
],
|
||||
Identifier: [
|
||||
o('IDENTIFIER', function() {
|
||||
return new Literal($1);
|
||||
})
|
||||
],
|
||||
AlphaNumeric: [
|
||||
o('NUMBER', function() {
|
||||
return new Literal($1);
|
||||
}), o('STRING', function() {
|
||||
return new Literal($1);
|
||||
})
|
||||
],
|
||||
Literal: [
|
||||
o('AlphaNumeric'), o('JS', function() {
|
||||
return new Literal($1);
|
||||
}), o('REGEX', function() {
|
||||
return new Literal($1);
|
||||
}), o('BOOL', function() {
|
||||
var val;
|
||||
val = new Literal($1);
|
||||
if ($1 === 'undefined') {
|
||||
val.isUndefined = true;
|
||||
}
|
||||
return val;
|
||||
})
|
||||
],
|
||||
Assign: [
|
||||
o('Assignable = Expression', function() {
|
||||
return new Assign($1, $3);
|
||||
}), o('Assignable = INDENT Expression OUTDENT', function() {
|
||||
return new Assign($1, $4);
|
||||
})
|
||||
],
|
||||
AssignObj: [
|
||||
o('ObjAssignable', function() {
|
||||
return new Value($1);
|
||||
}), o('ObjAssignable : Expression', function() {
|
||||
return new Assign(new Value($1), $3, 'object');
|
||||
}), o('ObjAssignable :\
|
||||
INDENT Expression OUTDENT', function() {
|
||||
return new Assign(new Value($1), $4, 'object');
|
||||
}), o('Comment')
|
||||
],
|
||||
ObjAssignable: [o('Identifier'), o('AlphaNumeric'), o('ThisProperty')],
|
||||
Return: [
|
||||
o('RETURN Expression', function() {
|
||||
return new Return($2);
|
||||
}), o('RETURN', function() {
|
||||
return new Return;
|
||||
})
|
||||
],
|
||||
Comment: [
|
||||
o('HERECOMMENT', function() {
|
||||
return new Comment($1);
|
||||
})
|
||||
],
|
||||
Code: [
|
||||
o('PARAM_START ParamList PARAM_END FuncGlyph Block', function() {
|
||||
return new Code($2, $5, $4);
|
||||
}), o('FuncGlyph Block', function() {
|
||||
return new Code([], $2, $1);
|
||||
})
|
||||
],
|
||||
FuncGlyph: [
|
||||
o('->', function() {
|
||||
return 'func';
|
||||
}), o('=>', function() {
|
||||
return 'boundfunc';
|
||||
})
|
||||
],
|
||||
OptComma: [o(''), o(',')],
|
||||
ParamList: [
|
||||
o('', function() {
|
||||
return [];
|
||||
}), o('Param', function() {
|
||||
return [$1];
|
||||
}), o('ParamList , Param', function() {
|
||||
return $1.concat($3);
|
||||
})
|
||||
],
|
||||
Param: [
|
||||
o('ParamVar', function() {
|
||||
return new Param($1);
|
||||
}), o('ParamVar ...', function() {
|
||||
return new Param($1, null, true);
|
||||
}), o('ParamVar = Expression', function() {
|
||||
return new Param($1, $3);
|
||||
})
|
||||
],
|
||||
ParamVar: [o('Identifier'), o('ThisProperty'), o('Array'), o('Object')],
|
||||
Splat: [
|
||||
o('Expression ...', function() {
|
||||
return new Splat($1);
|
||||
})
|
||||
],
|
||||
SimpleAssignable: [
|
||||
o('Identifier', function() {
|
||||
return new Value($1);
|
||||
}), o('Value Accessor', function() {
|
||||
return $1.push($2);
|
||||
}), o('Invocation Accessor', function() {
|
||||
return new Value($1, [$2]);
|
||||
}), o('ThisProperty')
|
||||
],
|
||||
Assignable: [
|
||||
o('SimpleAssignable'), o('Array', function() {
|
||||
return new Value($1);
|
||||
}), o('Object', function() {
|
||||
return new Value($1);
|
||||
})
|
||||
],
|
||||
Value: [
|
||||
o('Assignable'), o('Literal', function() {
|
||||
return new Value($1);
|
||||
}), o('Parenthetical', function() {
|
||||
return new Value($1);
|
||||
}), o('Range', function() {
|
||||
return new Value($1);
|
||||
}), o('This')
|
||||
],
|
||||
Accessor: [
|
||||
o('. Identifier', function() {
|
||||
return new Access($2);
|
||||
}), o('?. Identifier', function() {
|
||||
return new Access($2, 'soak');
|
||||
}), o(':: Identifier', function() {
|
||||
return new Access($2, 'proto');
|
||||
}), o('::', function() {
|
||||
return new Access(new Literal('prototype'));
|
||||
}), o('Index')
|
||||
],
|
||||
Index: [
|
||||
o('INDEX_START IndexValue INDEX_END', function() {
|
||||
return $2;
|
||||
}), o('INDEX_SOAK Index', function() {
|
||||
return extend($2, {
|
||||
soak: true
|
||||
});
|
||||
}), o('INDEX_PROTO Index', function() {
|
||||
return extend($2, {
|
||||
proto: true
|
||||
});
|
||||
})
|
||||
],
|
||||
IndexValue: [
|
||||
o('Expression', function() {
|
||||
return new Index($1);
|
||||
}), o('Slice', function() {
|
||||
return new Slice($1);
|
||||
})
|
||||
],
|
||||
Object: [
|
||||
o('{ AssignList OptComma }', function() {
|
||||
return new Obj($2, $1.generated);
|
||||
})
|
||||
],
|
||||
AssignList: [
|
||||
o('', function() {
|
||||
return [];
|
||||
}), o('AssignObj', function() {
|
||||
return [$1];
|
||||
}), o('AssignList , AssignObj', function() {
|
||||
return $1.concat($3);
|
||||
}), o('AssignList OptComma TERMINATOR AssignObj', function() {
|
||||
return $1.concat($4);
|
||||
}), o('AssignList OptComma INDENT AssignList OptComma OUTDENT', function() {
|
||||
return $1.concat($4);
|
||||
})
|
||||
],
|
||||
Class: [
|
||||
o('CLASS', function() {
|
||||
return new Class;
|
||||
}), o('CLASS Block', function() {
|
||||
return new Class(null, null, $2);
|
||||
}), o('CLASS EXTENDS Value', function() {
|
||||
return new Class(null, $3);
|
||||
}), o('CLASS EXTENDS Value Block', function() {
|
||||
return new Class(null, $3, $4);
|
||||
}), o('CLASS SimpleAssignable', function() {
|
||||
return new Class($2);
|
||||
}), o('CLASS SimpleAssignable Block', function() {
|
||||
return new Class($2, null, $3);
|
||||
}), o('CLASS SimpleAssignable EXTENDS Value', function() {
|
||||
return new Class($2, $4);
|
||||
}), o('CLASS SimpleAssignable EXTENDS Value Block', function() {
|
||||
return new Class($2, $4, $5);
|
||||
})
|
||||
],
|
||||
Invocation: [
|
||||
o('Value OptFuncExist Arguments', function() {
|
||||
return new Call($1, $3, $2);
|
||||
}), o('Invocation OptFuncExist Arguments', function() {
|
||||
return new Call($1, $3, $2);
|
||||
}), o('SUPER', function() {
|
||||
return new Call('super', [new Splat(new Literal('arguments'))]);
|
||||
}), o('SUPER Arguments', function() {
|
||||
return new Call('super', $2);
|
||||
})
|
||||
],
|
||||
OptFuncExist: [
|
||||
o('', function() {
|
||||
return false;
|
||||
}), o('FUNC_EXIST', function() {
|
||||
return true;
|
||||
})
|
||||
],
|
||||
Arguments: [
|
||||
o('CALL_START CALL_END', function() {
|
||||
return [];
|
||||
}), o('CALL_START ArgList OptComma CALL_END', function() {
|
||||
return $2;
|
||||
})
|
||||
],
|
||||
This: [
|
||||
o('THIS', function() {
|
||||
return new Value(new Literal('this'));
|
||||
}), o('@', function() {
|
||||
return new Value(new Literal('this'));
|
||||
})
|
||||
],
|
||||
ThisProperty: [
|
||||
o('@ Identifier', function() {
|
||||
return new Value(new Literal('this'), [new Access($2)], 'this');
|
||||
})
|
||||
],
|
||||
Array: [
|
||||
o('[ ]', function() {
|
||||
return new Arr([]);
|
||||
}), o('[ ArgList OptComma ]', function() {
|
||||
return new Arr($2);
|
||||
})
|
||||
],
|
||||
RangeDots: [
|
||||
o('..', function() {
|
||||
return 'inclusive';
|
||||
}), o('...', function() {
|
||||
return 'exclusive';
|
||||
})
|
||||
],
|
||||
Range: [
|
||||
o('[ Expression RangeDots Expression ]', function() {
|
||||
return new Range($2, $4, $3);
|
||||
})
|
||||
],
|
||||
Slice: [
|
||||
o('Expression RangeDots Expression', function() {
|
||||
return new Range($1, $3, $2);
|
||||
}), o('Expression RangeDots', function() {
|
||||
return new Range($1, null, $2);
|
||||
}), o('RangeDots Expression', function() {
|
||||
return new Range(null, $2, $1);
|
||||
})
|
||||
],
|
||||
ArgList: [
|
||||
o('Arg', function() {
|
||||
return [$1];
|
||||
}), o('ArgList , Arg', function() {
|
||||
return $1.concat($3);
|
||||
}), o('ArgList OptComma TERMINATOR Arg', function() {
|
||||
return $1.concat($4);
|
||||
}), o('INDENT ArgList OptComma OUTDENT', function() {
|
||||
return $2;
|
||||
}), o('ArgList OptComma INDENT ArgList OptComma OUTDENT', function() {
|
||||
return $1.concat($4);
|
||||
})
|
||||
],
|
||||
Arg: [o('Expression'), o('Splat')],
|
||||
SimpleArgs: [
|
||||
o('Expression'), o('SimpleArgs , Expression', function() {
|
||||
return [].concat($1, $3);
|
||||
})
|
||||
],
|
||||
Try: [
|
||||
o('TRY Block', function() {
|
||||
return new Try($2);
|
||||
}), o('TRY Block Catch', function() {
|
||||
return new Try($2, $3[0], $3[1]);
|
||||
}), o('TRY Block FINALLY Block', function() {
|
||||
return new Try($2, null, null, $4);
|
||||
}), o('TRY Block Catch FINALLY Block', function() {
|
||||
return new Try($2, $3[0], $3[1], $5);
|
||||
})
|
||||
],
|
||||
Catch: [
|
||||
o('CATCH Identifier Block', function() {
|
||||
return [$2, $3];
|
||||
})
|
||||
],
|
||||
Throw: [
|
||||
o('THROW Expression', function() {
|
||||
return new Throw($2);
|
||||
})
|
||||
],
|
||||
Parenthetical: [
|
||||
o('( Body )', function() {
|
||||
return new Parens($2);
|
||||
}), o('( INDENT Body OUTDENT )', function() {
|
||||
return new Parens($3);
|
||||
})
|
||||
],
|
||||
WhileSource: [
|
||||
o('WHILE Expression', function() {
|
||||
return new While($2);
|
||||
}), o('WHILE Expression WHEN Expression', function() {
|
||||
return new While($2, {
|
||||
guard: $4
|
||||
});
|
||||
}), o('UNTIL Expression', function() {
|
||||
return new While($2, {
|
||||
invert: true
|
||||
});
|
||||
}), o('UNTIL Expression WHEN Expression', function() {
|
||||
return new While($2, {
|
||||
invert: true,
|
||||
guard: $4
|
||||
});
|
||||
})
|
||||
],
|
||||
While: [
|
||||
o('WhileSource Block', function() {
|
||||
return $1.addBody($2);
|
||||
}), o('Statement WhileSource', function() {
|
||||
return $2.addBody(Block.wrap([$1]));
|
||||
}), o('Expression WhileSource', function() {
|
||||
return $2.addBody(Block.wrap([$1]));
|
||||
}), o('Loop', function() {
|
||||
return $1;
|
||||
})
|
||||
],
|
||||
Loop: [
|
||||
o('LOOP Block', function() {
|
||||
return new While(new Literal('true')).addBody($2);
|
||||
}), o('LOOP Expression', function() {
|
||||
return new While(new Literal('true')).addBody(Block.wrap([$2]));
|
||||
})
|
||||
],
|
||||
For: [
|
||||
o('Statement ForBody', function() {
|
||||
return new For($1, $2);
|
||||
}), o('Expression ForBody', function() {
|
||||
return new For($1, $2);
|
||||
}), o('ForBody Block', function() {
|
||||
return new For($2, $1);
|
||||
})
|
||||
],
|
||||
ForBody: [
|
||||
o('FOR Range', function() {
|
||||
return {
|
||||
source: new Value($2)
|
||||
};
|
||||
}), o('ForStart ForSource', function() {
|
||||
$2.own = $1.own;
|
||||
$2.name = $1[0];
|
||||
$2.index = $1[1];
|
||||
return $2;
|
||||
})
|
||||
],
|
||||
ForStart: [
|
||||
o('FOR ForVariables', function() {
|
||||
return $2;
|
||||
}), o('FOR OWN ForVariables', function() {
|
||||
$3.own = true;
|
||||
return $3;
|
||||
})
|
||||
],
|
||||
ForValue: [
|
||||
o('Identifier'), o('Array', function() {
|
||||
return new Value($1);
|
||||
}), o('Object', function() {
|
||||
return new Value($1);
|
||||
})
|
||||
],
|
||||
ForVariables: [
|
||||
o('ForValue', function() {
|
||||
return [$1];
|
||||
}), o('ForValue , ForValue', function() {
|
||||
return [$1, $3];
|
||||
})
|
||||
],
|
||||
ForSource: [
|
||||
o('FORIN Expression', function() {
|
||||
return {
|
||||
source: $2
|
||||
};
|
||||
}), o('FOROF Expression', function() {
|
||||
return {
|
||||
source: $2,
|
||||
object: true
|
||||
};
|
||||
}), o('FORIN Expression WHEN Expression', function() {
|
||||
return {
|
||||
source: $2,
|
||||
guard: $4
|
||||
};
|
||||
}), o('FOROF Expression WHEN Expression', function() {
|
||||
return {
|
||||
source: $2,
|
||||
guard: $4,
|
||||
object: true
|
||||
};
|
||||
}), o('FORIN Expression BY Expression', function() {
|
||||
return {
|
||||
source: $2,
|
||||
step: $4
|
||||
};
|
||||
}), o('FORIN Expression WHEN Expression BY Expression', function() {
|
||||
return {
|
||||
source: $2,
|
||||
guard: $4,
|
||||
step: $6
|
||||
};
|
||||
}), o('FORIN Expression BY Expression WHEN Expression', function() {
|
||||
return {
|
||||
source: $2,
|
||||
step: $4,
|
||||
guard: $6
|
||||
};
|
||||
})
|
||||
],
|
||||
Switch: [
|
||||
o('SWITCH Expression INDENT Whens OUTDENT', function() {
|
||||
return new Switch($2, $4);
|
||||
}), o('SWITCH Expression INDENT Whens ELSE Block OUTDENT', function() {
|
||||
return new Switch($2, $4, $6);
|
||||
}), o('SWITCH INDENT Whens OUTDENT', function() {
|
||||
return new Switch(null, $3);
|
||||
}), o('SWITCH INDENT Whens ELSE Block OUTDENT', function() {
|
||||
return new Switch(null, $3, $5);
|
||||
})
|
||||
],
|
||||
Whens: [
|
||||
o('When'), o('Whens When', function() {
|
||||
return $1.concat($2);
|
||||
})
|
||||
],
|
||||
When: [
|
||||
o('LEADING_WHEN SimpleArgs Block', function() {
|
||||
return [[$2, $3]];
|
||||
}), o('LEADING_WHEN SimpleArgs Block TERMINATOR', function() {
|
||||
return [[$2, $3]];
|
||||
})
|
||||
],
|
||||
IfBlock: [
|
||||
o('IF Expression Block', function() {
|
||||
return new If($2, $3, {
|
||||
type: $1
|
||||
});
|
||||
}), o('IfBlock ELSE IF Expression Block', function() {
|
||||
return $1.addElse(new If($4, $5, {
|
||||
type: $3
|
||||
}));
|
||||
})
|
||||
],
|
||||
If: [
|
||||
o('IfBlock'), o('IfBlock ELSE Block', function() {
|
||||
return $1.addElse($3);
|
||||
}), o('Statement POST_IF Expression', function() {
|
||||
return new If($3, Block.wrap([$1]), {
|
||||
type: $2,
|
||||
statement: true
|
||||
});
|
||||
}), o('Expression POST_IF Expression', function() {
|
||||
return new If($3, Block.wrap([$1]), {
|
||||
type: $2,
|
||||
statement: true
|
||||
});
|
||||
})
|
||||
],
|
||||
Operation: [
|
||||
o('UNARY Expression', function() {
|
||||
return new Op($1, $2);
|
||||
}), o('- Expression', (function() {
|
||||
return new Op('-', $2);
|
||||
}), {
|
||||
prec: 'UNARY'
|
||||
}), o('+ Expression', (function() {
|
||||
return new Op('+', $2);
|
||||
}), {
|
||||
prec: 'UNARY'
|
||||
}), o('-- SimpleAssignable', function() {
|
||||
return new Op('--', $2);
|
||||
}), o('++ SimpleAssignable', function() {
|
||||
return new Op('++', $2);
|
||||
}), o('SimpleAssignable --', function() {
|
||||
return new Op('--', $1, null, true);
|
||||
}), o('SimpleAssignable ++', function() {
|
||||
return new Op('++', $1, null, true);
|
||||
}), o('Expression ?', function() {
|
||||
return new Existence($1);
|
||||
}), o('Expression + Expression', function() {
|
||||
return new Op('+', $1, $3);
|
||||
}), o('Expression - Expression', function() {
|
||||
return new Op('-', $1, $3);
|
||||
}), o('Expression MATH Expression', function() {
|
||||
return new Op($2, $1, $3);
|
||||
}), o('Expression SHIFT Expression', function() {
|
||||
return new Op($2, $1, $3);
|
||||
}), o('Expression COMPARE Expression', function() {
|
||||
return new Op($2, $1, $3);
|
||||
}), o('Expression LOGIC Expression', function() {
|
||||
return new Op($2, $1, $3);
|
||||
}), o('Expression RELATION Expression', function() {
|
||||
if ($2.charAt(0) === '!') {
|
||||
return new Op($2.slice(1), $1, $3).invert();
|
||||
} else {
|
||||
return new Op($2, $1, $3);
|
||||
}
|
||||
}), o('SimpleAssignable COMPOUND_ASSIGN\
|
||||
Expression', function() {
|
||||
return new Assign($1, $3, $2);
|
||||
}), o('SimpleAssignable COMPOUND_ASSIGN\
|
||||
INDENT Expression OUTDENT', function() {
|
||||
return new Assign($1, $4, $2);
|
||||
}), o('SimpleAssignable EXTENDS Expression', function() {
|
||||
return new Extends($1, $3);
|
||||
})
|
||||
]
|
||||
};
|
||||
operators = [['left', '.', '?.', '::'], ['left', 'CALL_START', 'CALL_END'], ['nonassoc', '++', '--'], ['left', '?'], ['right', 'UNARY'], ['left', 'MATH'], ['left', '+', '-'], ['left', 'SHIFT'], ['left', 'RELATION'], ['left', 'COMPARE'], ['left', 'LOGIC'], ['nonassoc', 'INDENT', 'OUTDENT'], ['right', '=', ':', 'COMPOUND_ASSIGN', 'RETURN', 'THROW', 'EXTENDS'], ['right', 'FORIN', 'FOROF', 'BY', 'WHEN'], ['right', 'IF', 'ELSE', 'FOR', 'DO', 'WHILE', 'UNTIL', 'LOOP', 'SUPER', 'CLASS'], ['right', 'POST_IF']];
|
||||
tokens = [];
|
||||
for (name in grammar) {
|
||||
alternatives = grammar[name];
|
||||
grammar[name] = (function() {
|
||||
var _i, _j, _len, _len2, _ref, _results;
|
||||
_results = [];
|
||||
for (_i = 0, _len = alternatives.length; _i < _len; _i++) {
|
||||
alt = alternatives[_i];
|
||||
_ref = alt[0].split(' ');
|
||||
for (_j = 0, _len2 = _ref.length; _j < _len2; _j++) {
|
||||
token = _ref[_j];
|
||||
if (!grammar[token]) {
|
||||
tokens.push(token);
|
||||
}
|
||||
}
|
||||
if (name === 'Root') {
|
||||
alt[1] = "return " + alt[1];
|
||||
}
|
||||
_results.push(alt);
|
||||
}
|
||||
return _results;
|
||||
})();
|
||||
}
|
||||
exports.parser = new Parser({
|
||||
tokens: tokens.join(' '),
|
||||
bnf: grammar,
|
||||
operators: operators.reverse(),
|
||||
startSymbol: 'Root'
|
||||
});
|
||||
}).call(this);
|
|
@ -1,66 +0,0 @@
|
|||
(function() {
|
||||
var extend, flatten;
|
||||
exports.starts = function(string, literal, start) {
|
||||
return literal === string.substr(start, literal.length);
|
||||
};
|
||||
exports.ends = function(string, literal, back) {
|
||||
var len;
|
||||
len = literal.length;
|
||||
return literal === string.substr(string.length - len - (back || 0), len);
|
||||
};
|
||||
exports.compact = function(array) {
|
||||
var item, _i, _len, _results;
|
||||
_results = [];
|
||||
for (_i = 0, _len = array.length; _i < _len; _i++) {
|
||||
item = array[_i];
|
||||
if (item) {
|
||||
_results.push(item);
|
||||
}
|
||||
}
|
||||
return _results;
|
||||
};
|
||||
exports.count = function(string, substr) {
|
||||
var num, pos;
|
||||
num = pos = 0;
|
||||
if (!substr.length) {
|
||||
return 1 / 0;
|
||||
}
|
||||
while (pos = 1 + string.indexOf(substr, pos)) {
|
||||
num++;
|
||||
}
|
||||
return num;
|
||||
};
|
||||
exports.merge = function(options, overrides) {
|
||||
return extend(extend({}, options), overrides);
|
||||
};
|
||||
extend = exports.extend = function(object, properties) {
|
||||
var key, val;
|
||||
for (key in properties) {
|
||||
val = properties[key];
|
||||
object[key] = val;
|
||||
}
|
||||
return object;
|
||||
};
|
||||
exports.flatten = flatten = function(array) {
|
||||
var element, flattened, _i, _len;
|
||||
flattened = [];
|
||||
for (_i = 0, _len = array.length; _i < _len; _i++) {
|
||||
element = array[_i];
|
||||
if (element instanceof Array) {
|
||||
flattened = flattened.concat(flatten(element));
|
||||
} else {
|
||||
flattened.push(element);
|
||||
}
|
||||
}
|
||||
return flattened;
|
||||
};
|
||||
exports.del = function(obj, key) {
|
||||
var val;
|
||||
val = obj[key];
|
||||
delete obj[key];
|
||||
return val;
|
||||
};
|
||||
exports.last = function(array, back) {
|
||||
return array[array.length - (back || 0) - 1];
|
||||
};
|
||||
}).call(this);
|
|
@ -1,8 +0,0 @@
|
|||
(function() {
|
||||
var key, val, _ref;
|
||||
_ref = require('./coffee-script');
|
||||
for (key in _ref) {
|
||||
val = _ref[key];
|
||||
exports[key] = val;
|
||||
}
|
||||
}).call(this);
|
|
@ -1,656 +0,0 @@
|
|||
(function() {
|
||||
var ASSIGNED, BOOL, CALLABLE, CODE, COFFEE_ALIASES, COFFEE_ALIAS_MAP, COFFEE_KEYWORDS, COMMENT, COMPARE, COMPOUND_ASSIGN, HEREDOC, HEREDOC_ILLEGAL, HEREDOC_INDENT, HEREGEX, HEREGEX_OMIT, IDENTIFIER, INDEXABLE, JSTOKEN, JS_FORBIDDEN, JS_KEYWORDS, LINE_BREAK, LINE_CONTINUER, LOGIC, Lexer, MATH, MULTILINER, MULTI_DENT, NOT_REGEX, NOT_SPACED_REGEX, NO_NEWLINE, NUMBER, OPERATOR, REGEX, RELATION, RESERVED, Rewriter, SHIFT, SIMPLESTR, TRAILING_SPACES, UNARY, WHITESPACE, compact, count, key, last, starts, _ref;
|
||||
var __indexOf = Array.prototype.indexOf || function(item) {
|
||||
for (var i = 0, l = this.length; i < l; i++) {
|
||||
if (this[i] === item) return i;
|
||||
}
|
||||
return -1;
|
||||
};
|
||||
Rewriter = require('./rewriter').Rewriter;
|
||||
_ref = require('./helpers'), count = _ref.count, starts = _ref.starts, compact = _ref.compact, last = _ref.last;
|
||||
exports.Lexer = Lexer = (function() {
|
||||
function Lexer() {}
|
||||
Lexer.prototype.tokenize = function(code, opts) {
|
||||
var i;
|
||||
if (opts == null) {
|
||||
opts = {};
|
||||
}
|
||||
if (WHITESPACE.test(code)) {
|
||||
code = "\n" + code;
|
||||
}
|
||||
code = code.replace(/\r/g, '').replace(TRAILING_SPACES, '');
|
||||
this.code = code;
|
||||
this.line = opts.line || 0;
|
||||
this.indent = 0;
|
||||
this.indebt = 0;
|
||||
this.outdebt = 0;
|
||||
this.indents = [];
|
||||
this.tokens = [];
|
||||
i = 0;
|
||||
while (this.chunk = code.slice(i)) {
|
||||
i += this.identifierToken() || this.commentToken() || this.whitespaceToken() || this.lineToken() || this.heredocToken() || this.stringToken() || this.numberToken() || this.regexToken() || this.jsToken() || this.literalToken();
|
||||
}
|
||||
this.closeIndentation();
|
||||
if (opts.rewrite === false) {
|
||||
return this.tokens;
|
||||
}
|
||||
return (new Rewriter).rewrite(this.tokens);
|
||||
};
|
||||
Lexer.prototype.identifierToken = function() {
|
||||
var colon, forcedIdentifier, id, input, match, prev, tag, _ref2, _ref3;
|
||||
if (!(match = IDENTIFIER.exec(this.chunk))) {
|
||||
return 0;
|
||||
}
|
||||
input = match[0], id = match[1], colon = match[2];
|
||||
if (id === 'own' && this.tag() === 'FOR') {
|
||||
this.token('OWN', id);
|
||||
return id.length;
|
||||
}
|
||||
forcedIdentifier = colon || (prev = last(this.tokens)) && (((_ref2 = prev[0]) === '.' || _ref2 === '?.' || _ref2 === '::') || !prev.spaced && prev[0] === '@');
|
||||
tag = 'IDENTIFIER';
|
||||
if (!forcedIdentifier && (__indexOf.call(JS_KEYWORDS, id) >= 0 || __indexOf.call(COFFEE_KEYWORDS, id) >= 0)) {
|
||||
tag = id.toUpperCase();
|
||||
if (tag === 'WHEN' && (_ref3 = this.tag(), __indexOf.call(LINE_BREAK, _ref3) >= 0)) {
|
||||
tag = 'LEADING_WHEN';
|
||||
} else if (tag === 'FOR') {
|
||||
this.seenFor = true;
|
||||
} else if (tag === 'UNLESS') {
|
||||
tag = 'IF';
|
||||
} else if (__indexOf.call(UNARY, tag) >= 0) {
|
||||
tag = 'UNARY';
|
||||
} else if (__indexOf.call(RELATION, tag) >= 0) {
|
||||
if (tag !== 'INSTANCEOF' && this.seenFor) {
|
||||
tag = 'FOR' + tag;
|
||||
this.seenFor = false;
|
||||
} else {
|
||||
tag = 'RELATION';
|
||||
if (this.value() === '!') {
|
||||
this.tokens.pop();
|
||||
id = '!' + id;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (__indexOf.call(JS_FORBIDDEN, id) >= 0) {
|
||||
if (forcedIdentifier) {
|
||||
tag = 'IDENTIFIER';
|
||||
id = new String(id);
|
||||
id.reserved = true;
|
||||
} else if (__indexOf.call(RESERVED, id) >= 0) {
|
||||
this.identifierError(id);
|
||||
}
|
||||
}
|
||||
if (!forcedIdentifier) {
|
||||
if (__indexOf.call(COFFEE_ALIASES, id) >= 0) {
|
||||
id = COFFEE_ALIAS_MAP[id];
|
||||
}
|
||||
tag = (function() {
|
||||
switch (id) {
|
||||
case '!':
|
||||
return 'UNARY';
|
||||
case '==':
|
||||
case '!=':
|
||||
return 'COMPARE';
|
||||
case '&&':
|
||||
case '||':
|
||||
return 'LOGIC';
|
||||
case 'true':
|
||||
case 'false':
|
||||
case 'null':
|
||||
case 'undefined':
|
||||
return 'BOOL';
|
||||
case 'break':
|
||||
case 'continue':
|
||||
case 'debugger':
|
||||
return 'STATEMENT';
|
||||
default:
|
||||
return tag;
|
||||
}
|
||||
})();
|
||||
}
|
||||
this.token(tag, id);
|
||||
if (colon) {
|
||||
this.token(':', ':');
|
||||
}
|
||||
return input.length;
|
||||
};
|
||||
Lexer.prototype.numberToken = function() {
|
||||
var match, number;
|
||||
if (!(match = NUMBER.exec(this.chunk))) {
|
||||
return 0;
|
||||
}
|
||||
number = match[0];
|
||||
this.token('NUMBER', number);
|
||||
return number.length;
|
||||
};
|
||||
Lexer.prototype.stringToken = function() {
|
||||
var match, string;
|
||||
switch (this.chunk.charAt(0)) {
|
||||
case "'":
|
||||
if (!(match = SIMPLESTR.exec(this.chunk))) {
|
||||
return 0;
|
||||
}
|
||||
this.token('STRING', (string = match[0]).replace(MULTILINER, '\\\n'));
|
||||
break;
|
||||
case '"':
|
||||
if (!(string = this.balancedString(this.chunk, '"'))) {
|
||||
return 0;
|
||||
}
|
||||
if (0 < string.indexOf('#{', 1)) {
|
||||
this.interpolateString(string.slice(1, -1));
|
||||
} else {
|
||||
this.token('STRING', this.escapeLines(string));
|
||||
}
|
||||
break;
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
this.line += count(string, '\n');
|
||||
return string.length;
|
||||
};
|
||||
Lexer.prototype.heredocToken = function() {
|
||||
var doc, heredoc, match, quote;
|
||||
if (!(match = HEREDOC.exec(this.chunk))) {
|
||||
return 0;
|
||||
}
|
||||
heredoc = match[0];
|
||||
quote = heredoc.charAt(0);
|
||||
doc = this.sanitizeHeredoc(match[2], {
|
||||
quote: quote,
|
||||
indent: null
|
||||
});
|
||||
if (quote === '"' && 0 <= doc.indexOf('#{')) {
|
||||
this.interpolateString(doc, {
|
||||
heredoc: true
|
||||
});
|
||||
} else {
|
||||
this.token('STRING', this.makeString(doc, quote, true));
|
||||
}
|
||||
this.line += count(heredoc, '\n');
|
||||
return heredoc.length;
|
||||
};
|
||||
Lexer.prototype.commentToken = function() {
|
||||
var comment, here, match;
|
||||
if (!(match = this.chunk.match(COMMENT))) {
|
||||
return 0;
|
||||
}
|
||||
comment = match[0], here = match[1];
|
||||
if (here) {
|
||||
this.token('HERECOMMENT', this.sanitizeHeredoc(here, {
|
||||
herecomment: true,
|
||||
indent: Array(this.indent + 1).join(' ')
|
||||
}));
|
||||
this.token('TERMINATOR', '\n');
|
||||
}
|
||||
this.line += count(comment, '\n');
|
||||
return comment.length;
|
||||
};
|
||||
Lexer.prototype.jsToken = function() {
|
||||
var match, script;
|
||||
if (!(this.chunk.charAt(0) === '`' && (match = JSTOKEN.exec(this.chunk)))) {
|
||||
return 0;
|
||||
}
|
||||
this.token('JS', (script = match[0]).slice(1, -1));
|
||||
return script.length;
|
||||
};
|
||||
Lexer.prototype.regexToken = function() {
|
||||
var length, match, prev, regex, _ref2;
|
||||
if (this.chunk.charAt(0) !== '/') {
|
||||
return 0;
|
||||
}
|
||||
if (match = HEREGEX.exec(this.chunk)) {
|
||||
length = this.heregexToken(match);
|
||||
this.line += count(match[0], '\n');
|
||||
return length;
|
||||
}
|
||||
prev = last(this.tokens);
|
||||
if (prev && (_ref2 = prev[0], __indexOf.call((prev.spaced ? NOT_REGEX : NOT_SPACED_REGEX), _ref2) >= 0)) {
|
||||
return 0;
|
||||
}
|
||||
if (!(match = REGEX.exec(this.chunk))) {
|
||||
return 0;
|
||||
}
|
||||
regex = match[0];
|
||||
this.token('REGEX', regex === '//' ? '/(?:)/' : regex);
|
||||
return regex.length;
|
||||
};
|
||||
Lexer.prototype.heregexToken = function(match) {
|
||||
var body, flags, heregex, re, tag, tokens, value, _i, _len, _ref2, _ref3, _ref4, _ref5;
|
||||
heregex = match[0], body = match[1], flags = match[2];
|
||||
if (0 > body.indexOf('#{')) {
|
||||
re = body.replace(HEREGEX_OMIT, '').replace(/\//g, '\\/');
|
||||
this.token('REGEX', "/" + (re || '(?:)') + "/" + flags);
|
||||
return heregex.length;
|
||||
}
|
||||
this.token('IDENTIFIER', 'RegExp');
|
||||
this.tokens.push(['CALL_START', '(']);
|
||||
tokens = [];
|
||||
_ref2 = this.interpolateString(body, {
|
||||
regex: true
|
||||
});
|
||||
for (_i = 0, _len = _ref2.length; _i < _len; _i++) {
|
||||
_ref3 = _ref2[_i], tag = _ref3[0], value = _ref3[1];
|
||||
if (tag === 'TOKENS') {
|
||||
tokens.push.apply(tokens, value);
|
||||
} else {
|
||||
if (!(value = value.replace(HEREGEX_OMIT, ''))) {
|
||||
continue;
|
||||
}
|
||||
value = value.replace(/\\/g, '\\\\');
|
||||
tokens.push(['STRING', this.makeString(value, '"', true)]);
|
||||
}
|
||||
tokens.push(['+', '+']);
|
||||
}
|
||||
tokens.pop();
|
||||
if (((_ref4 = tokens[0]) != null ? _ref4[0] : void 0) !== 'STRING') {
|
||||
this.tokens.push(['STRING', '""'], ['+', '+']);
|
||||
}
|
||||
(_ref5 = this.tokens).push.apply(_ref5, tokens);
|
||||
if (flags) {
|
||||
this.tokens.push([',', ','], ['STRING', '"' + flags + '"']);
|
||||
}
|
||||
this.token(')', ')');
|
||||
return heregex.length;
|
||||
};
|
||||
Lexer.prototype.lineToken = function() {
|
||||
var diff, indent, match, noNewlines, prev, size;
|
||||
if (!(match = MULTI_DENT.exec(this.chunk))) {
|
||||
return 0;
|
||||
}
|
||||
indent = match[0];
|
||||
this.line += count(indent, '\n');
|
||||
prev = last(this.tokens, 1);
|
||||
size = indent.length - 1 - indent.lastIndexOf('\n');
|
||||
noNewlines = this.unfinished();
|
||||
if (size - this.indebt === this.indent) {
|
||||
if (noNewlines) {
|
||||
this.suppressNewlines();
|
||||
} else {
|
||||
this.newlineToken();
|
||||
}
|
||||
return indent.length;
|
||||
}
|
||||
if (size > this.indent) {
|
||||
if (noNewlines) {
|
||||
this.indebt = size - this.indent;
|
||||
this.suppressNewlines();
|
||||
return indent.length;
|
||||
}
|
||||
diff = size - this.indent + this.outdebt;
|
||||
this.token('INDENT', diff);
|
||||
this.indents.push(diff);
|
||||
this.outdebt = this.indebt = 0;
|
||||
} else {
|
||||
this.indebt = 0;
|
||||
this.outdentToken(this.indent - size, noNewlines);
|
||||
}
|
||||
this.indent = size;
|
||||
return indent.length;
|
||||
};
|
||||
Lexer.prototype.outdentToken = function(moveOut, noNewlines, close) {
|
||||
var dent, len;
|
||||
while (moveOut > 0) {
|
||||
len = this.indents.length - 1;
|
||||
if (this.indents[len] === void 0) {
|
||||
moveOut = 0;
|
||||
} else if (this.indents[len] === this.outdebt) {
|
||||
moveOut -= this.outdebt;
|
||||
this.outdebt = 0;
|
||||
} else if (this.indents[len] < this.outdebt) {
|
||||
this.outdebt -= this.indents[len];
|
||||
moveOut -= this.indents[len];
|
||||
} else {
|
||||
dent = this.indents.pop() - this.outdebt;
|
||||
moveOut -= dent;
|
||||
this.outdebt = 0;
|
||||
this.token('OUTDENT', dent);
|
||||
}
|
||||
}
|
||||
if (dent) {
|
||||
this.outdebt -= moveOut;
|
||||
}
|
||||
if (!(this.tag() === 'TERMINATOR' || noNewlines)) {
|
||||
this.token('TERMINATOR', '\n');
|
||||
}
|
||||
return this;
|
||||
};
|
||||
Lexer.prototype.whitespaceToken = function() {
|
||||
var match, nline, prev;
|
||||
if (!((match = WHITESPACE.exec(this.chunk)) || (nline = this.chunk.charAt(0) === '\n'))) {
|
||||
return 0;
|
||||
}
|
||||
prev = last(this.tokens);
|
||||
if (prev) {
|
||||
prev[match ? 'spaced' : 'newLine'] = true;
|
||||
}
|
||||
if (match) {
|
||||
return match[0].length;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
Lexer.prototype.newlineToken = function() {
|
||||
if (this.tag() !== 'TERMINATOR') {
|
||||
this.token('TERMINATOR', '\n');
|
||||
}
|
||||
return this;
|
||||
};
|
||||
Lexer.prototype.suppressNewlines = function() {
|
||||
if (this.value() === '\\') {
|
||||
this.tokens.pop();
|
||||
}
|
||||
return this;
|
||||
};
|
||||
Lexer.prototype.literalToken = function() {
|
||||
var match, prev, tag, value, _ref2, _ref3, _ref4, _ref5;
|
||||
if (match = OPERATOR.exec(this.chunk)) {
|
||||
value = match[0];
|
||||
if (CODE.test(value)) {
|
||||
this.tagParameters();
|
||||
}
|
||||
} else {
|
||||
value = this.chunk.charAt(0);
|
||||
}
|
||||
tag = value;
|
||||
prev = last(this.tokens);
|
||||
if (value === '=' && prev) {
|
||||
if (!prev[1].reserved && (_ref2 = prev[1], __indexOf.call(JS_FORBIDDEN, _ref2) >= 0)) {
|
||||
this.assignmentError();
|
||||
}
|
||||
if ((_ref3 = prev[1]) === '||' || _ref3 === '&&') {
|
||||
prev[0] = 'COMPOUND_ASSIGN';
|
||||
prev[1] += '=';
|
||||
return value.length;
|
||||
}
|
||||
}
|
||||
if (value === ';') {
|
||||
tag = 'TERMINATOR';
|
||||
} else if (__indexOf.call(MATH, value) >= 0) {
|
||||
tag = 'MATH';
|
||||
} else if (__indexOf.call(COMPARE, value) >= 0) {
|
||||
tag = 'COMPARE';
|
||||
} else if (__indexOf.call(COMPOUND_ASSIGN, value) >= 0) {
|
||||
tag = 'COMPOUND_ASSIGN';
|
||||
} else if (__indexOf.call(UNARY, value) >= 0) {
|
||||
tag = 'UNARY';
|
||||
} else if (__indexOf.call(SHIFT, value) >= 0) {
|
||||
tag = 'SHIFT';
|
||||
} else if (__indexOf.call(LOGIC, value) >= 0 || value === '?' && (prev != null ? prev.spaced : void 0)) {
|
||||
tag = 'LOGIC';
|
||||
} else if (prev && !prev.spaced) {
|
||||
if (value === '(' && (_ref4 = prev[0], __indexOf.call(CALLABLE, _ref4) >= 0)) {
|
||||
if (prev[0] === '?') {
|
||||
prev[0] = 'FUNC_EXIST';
|
||||
}
|
||||
tag = 'CALL_START';
|
||||
} else if (value === '[' && (_ref5 = prev[0], __indexOf.call(INDEXABLE, _ref5) >= 0)) {
|
||||
tag = 'INDEX_START';
|
||||
switch (prev[0]) {
|
||||
case '?':
|
||||
prev[0] = 'INDEX_SOAK';
|
||||
break;
|
||||
case '::':
|
||||
prev[0] = 'INDEX_PROTO';
|
||||
}
|
||||
}
|
||||
}
|
||||
this.token(tag, value);
|
||||
return value.length;
|
||||
};
|
||||
Lexer.prototype.sanitizeHeredoc = function(doc, options) {
|
||||
var attempt, herecomment, indent, match, _ref2;
|
||||
indent = options.indent, herecomment = options.herecomment;
|
||||
if (herecomment) {
|
||||
if (HEREDOC_ILLEGAL.test(doc)) {
|
||||
throw new Error("block comment cannot contain \"*/\", starting on line " + (this.line + 1));
|
||||
}
|
||||
if (doc.indexOf('\n') <= 0) {
|
||||
return doc;
|
||||
}
|
||||
} else {
|
||||
while (match = HEREDOC_INDENT.exec(doc)) {
|
||||
attempt = match[1];
|
||||
if (indent === null || (0 < (_ref2 = attempt.length) && _ref2 < indent.length)) {
|
||||
indent = attempt;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (indent) {
|
||||
doc = doc.replace(RegExp("\\n" + indent, "g"), '\n');
|
||||
}
|
||||
if (!herecomment) {
|
||||
doc = doc.replace(/^\n/, '');
|
||||
}
|
||||
return doc;
|
||||
};
|
||||
Lexer.prototype.tagParameters = function() {
|
||||
var i, stack, tok, tokens;
|
||||
if (this.tag() !== ')') {
|
||||
return this;
|
||||
}
|
||||
stack = [];
|
||||
tokens = this.tokens;
|
||||
i = tokens.length;
|
||||
tokens[--i][0] = 'PARAM_END';
|
||||
while (tok = tokens[--i]) {
|
||||
switch (tok[0]) {
|
||||
case ')':
|
||||
stack.push(tok);
|
||||
break;
|
||||
case '(':
|
||||
case 'CALL_START':
|
||||
if (stack.length) {
|
||||
stack.pop();
|
||||
} else if (tok[0] === '(') {
|
||||
tok[0] = 'PARAM_START';
|
||||
return this;
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
return this;
|
||||
};
|
||||
Lexer.prototype.closeIndentation = function() {
|
||||
return this.outdentToken(this.indent);
|
||||
};
|
||||
Lexer.prototype.identifierError = function(word) {
|
||||
throw SyntaxError("Reserved word \"" + word + "\" on line " + (this.line + 1));
|
||||
};
|
||||
Lexer.prototype.assignmentError = function() {
|
||||
throw SyntaxError("Reserved word \"" + (this.value()) + "\" on line " + (this.line + 1) + " can't be assigned");
|
||||
};
|
||||
Lexer.prototype.balancedString = function(str, end) {
|
||||
var i, letter, match, prev, stack, _ref2;
|
||||
stack = [end];
|
||||
for (i = 1, _ref2 = str.length; 1 <= _ref2 ? i < _ref2 : i > _ref2; 1 <= _ref2 ? i++ : i--) {
|
||||
switch (letter = str.charAt(i)) {
|
||||
case '\\':
|
||||
i++;
|
||||
continue;
|
||||
case end:
|
||||
stack.pop();
|
||||
if (!stack.length) {
|
||||
return str.slice(0, i + 1);
|
||||
}
|
||||
end = stack[stack.length - 1];
|
||||
continue;
|
||||
}
|
||||
if (end === '}' && (letter === '"' || letter === "'")) {
|
||||
stack.push(end = letter);
|
||||
} else if (end === '}' && letter === '/' && (match = HEREGEX.exec(str.slice(i)) || REGEX.exec(str.slice(i)))) {
|
||||
i += match[0].length - 1;
|
||||
} else if (end === '}' && letter === '{') {
|
||||
stack.push(end = '}');
|
||||
} else if (end === '"' && prev === '#' && letter === '{') {
|
||||
stack.push(end = '}');
|
||||
}
|
||||
prev = letter;
|
||||
}
|
||||
throw new Error("missing " + (stack.pop()) + ", starting on line " + (this.line + 1));
|
||||
};
|
||||
Lexer.prototype.interpolateString = function(str, options) {
|
||||
var expr, heredoc, i, inner, interpolated, len, letter, nested, pi, regex, tag, tokens, value, _len, _ref2, _ref3, _ref4;
|
||||
if (options == null) {
|
||||
options = {};
|
||||
}
|
||||
heredoc = options.heredoc, regex = options.regex;
|
||||
tokens = [];
|
||||
pi = 0;
|
||||
i = -1;
|
||||
while (letter = str.charAt(i += 1)) {
|
||||
if (letter === '\\') {
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
if (!(letter === '#' && str.charAt(i + 1) === '{' && (expr = this.balancedString(str.slice(i + 1), '}')))) {
|
||||
continue;
|
||||
}
|
||||
if (pi < i) {
|
||||
tokens.push(['NEOSTRING', str.slice(pi, i)]);
|
||||
}
|
||||
inner = expr.slice(1, -1);
|
||||
if (inner.length) {
|
||||
nested = new Lexer().tokenize(inner, {
|
||||
line: this.line,
|
||||
rewrite: false
|
||||
});
|
||||
nested.pop();
|
||||
if (((_ref2 = nested[0]) != null ? _ref2[0] : void 0) === 'TERMINATOR') {
|
||||
nested.shift();
|
||||
}
|
||||
if (len = nested.length) {
|
||||
if (len > 1) {
|
||||
nested.unshift(['(', '(']);
|
||||
nested.push([')', ')']);
|
||||
}
|
||||
tokens.push(['TOKENS', nested]);
|
||||
}
|
||||
}
|
||||
i += expr.length;
|
||||
pi = i + 1;
|
||||
}
|
||||
if ((i > pi && pi < str.length)) {
|
||||
tokens.push(['NEOSTRING', str.slice(pi)]);
|
||||
}
|
||||
if (regex) {
|
||||
return tokens;
|
||||
}
|
||||
if (!tokens.length) {
|
||||
return this.token('STRING', '""');
|
||||
}
|
||||
if (tokens[0][0] !== 'NEOSTRING') {
|
||||
tokens.unshift(['', '']);
|
||||
}
|
||||
if (interpolated = tokens.length > 1) {
|
||||
this.token('(', '(');
|
||||
}
|
||||
for (i = 0, _len = tokens.length; i < _len; i++) {
|
||||
_ref3 = tokens[i], tag = _ref3[0], value = _ref3[1];
|
||||
if (i) {
|
||||
this.token('+', '+');
|
||||
}
|
||||
if (tag === 'TOKENS') {
|
||||
(_ref4 = this.tokens).push.apply(_ref4, value);
|
||||
} else {
|
||||
this.token('STRING', this.makeString(value, '"', heredoc));
|
||||
}
|
||||
}
|
||||
if (interpolated) {
|
||||
this.token(')', ')');
|
||||
}
|
||||
return tokens;
|
||||
};
|
||||
Lexer.prototype.token = function(tag, value) {
|
||||
return this.tokens.push([tag, value, this.line]);
|
||||
};
|
||||
Lexer.prototype.tag = function(index, tag) {
|
||||
var tok;
|
||||
return (tok = last(this.tokens, index)) && (tag ? tok[0] = tag : tok[0]);
|
||||
};
|
||||
Lexer.prototype.value = function(index, val) {
|
||||
var tok;
|
||||
return (tok = last(this.tokens, index)) && (val ? tok[1] = val : tok[1]);
|
||||
};
|
||||
Lexer.prototype.unfinished = function() {
|
||||
var prev, value;
|
||||
return LINE_CONTINUER.test(this.chunk) || (prev = last(this.tokens, 1)) && prev[0] !== '.' && (value = this.value()) && !value.reserved && NO_NEWLINE.test(value) && !CODE.test(value) && !ASSIGNED.test(this.chunk);
|
||||
};
|
||||
Lexer.prototype.escapeLines = function(str, heredoc) {
|
||||
return str.replace(MULTILINER, heredoc ? '\\n' : '');
|
||||
};
|
||||
Lexer.prototype.makeString = function(body, quote, heredoc) {
|
||||
if (!body) {
|
||||
return quote + quote;
|
||||
}
|
||||
body = body.replace(/\\([\s\S])/g, function(match, contents) {
|
||||
if (contents === '\n' || contents === quote) {
|
||||
return contents;
|
||||
} else {
|
||||
return match;
|
||||
}
|
||||
});
|
||||
body = body.replace(RegExp("" + quote, "g"), '\\$&');
|
||||
return quote + this.escapeLines(body, heredoc) + quote;
|
||||
};
|
||||
return Lexer;
|
||||
})();
|
||||
JS_KEYWORDS = ['true', 'false', 'null', 'this', 'new', 'delete', 'typeof', 'in', 'instanceof', 'return', 'throw', 'break', 'continue', 'debugger', 'if', 'else', 'switch', 'for', 'while', 'do', 'try', 'catch', 'finally', 'class', 'extends', 'super'];
|
||||
COFFEE_KEYWORDS = ['undefined', 'then', 'unless', 'until', 'loop', 'of', 'by', 'when'];
|
||||
COFFEE_ALIAS_MAP = {
|
||||
and: '&&',
|
||||
or: '||',
|
||||
is: '==',
|
||||
isnt: '!=',
|
||||
not: '!',
|
||||
yes: 'true',
|
||||
no: 'false',
|
||||
on: 'true',
|
||||
off: 'false'
|
||||
};
|
||||
COFFEE_ALIASES = (function() {
|
||||
var _results;
|
||||
_results = [];
|
||||
for (key in COFFEE_ALIAS_MAP) {
|
||||
_results.push(key);
|
||||
}
|
||||
return _results;
|
||||
})();
|
||||
COFFEE_KEYWORDS = COFFEE_KEYWORDS.concat(COFFEE_ALIASES);
|
||||
RESERVED = ['case', 'default', 'function', 'var', 'void', 'with', 'const', 'let', 'enum', 'export', 'import', 'native', '__hasProp', '__extends', '__slice', '__bind', '__indexOf'];
|
||||
JS_FORBIDDEN = JS_KEYWORDS.concat(RESERVED);
|
||||
exports.RESERVED = RESERVED.concat(JS_KEYWORDS).concat(COFFEE_KEYWORDS);
|
||||
IDENTIFIER = /^([$A-Za-z_\x7f-\uffff][$\w\x7f-\uffff]*)([^\n\S]*:(?!:))?/;
|
||||
NUMBER = /^0x[\da-f]+|^\d*\.?\d+(?:e[+-]?\d+)?/i;
|
||||
HEREDOC = /^("""|''')([\s\S]*?)(?:\n[^\n\S]*)?\1/;
|
||||
OPERATOR = /^(?:[-=]>|[-+*\/%<>&|^!?=]=|>>>=?|([-+:])\1|([&|<>])\2=?|\?\.|\.{2,3})/;
|
||||
WHITESPACE = /^[^\n\S]+/;
|
||||
COMMENT = /^###([^#][\s\S]*?)(?:###[^\n\S]*|(?:###)?$)|^(?:\s*#(?!##[^#]).*)+/;
|
||||
CODE = /^[-=]>/;
|
||||
MULTI_DENT = /^(?:\n[^\n\S]*)+/;
|
||||
SIMPLESTR = /^'[^\\']*(?:\\.[^\\']*)*'/;
|
||||
JSTOKEN = /^`[^\\`]*(?:\\.[^\\`]*)*`/;
|
||||
REGEX = /^\/(?![\s=])[^[\/\n\\]*(?:(?:\\[\s\S]|\[[^\]\n\\]*(?:\\[\s\S][^\]\n\\]*)*])[^[\/\n\\]*)*\/[imgy]{0,4}(?!\w)/;
|
||||
HEREGEX = /^\/{3}([\s\S]+?)\/{3}([imgy]{0,4})(?!\w)/;
|
||||
HEREGEX_OMIT = /\s+(?:#.*)?/g;
|
||||
MULTILINER = /\n/g;
|
||||
HEREDOC_INDENT = /\n+([^\n\S]*)/g;
|
||||
HEREDOC_ILLEGAL = /\*\//;
|
||||
ASSIGNED = /^\s*@?([$A-Za-z_][$\w\x7f-\uffff]*|['"].*['"])[^\n\S]*?[:=][^:=>]/;
|
||||
LINE_CONTINUER = /^\s*(?:,|\??\.(?![.\d])|::)/;
|
||||
TRAILING_SPACES = /\s+$/;
|
||||
NO_NEWLINE = /^(?:[-+*&|\/%=<>!.\\][<>=&|]*|and|or|is(?:nt)?|n(?:ot|ew)|delete|typeof|instanceof)$/;
|
||||
COMPOUND_ASSIGN = ['-=', '+=', '/=', '*=', '%=', '||=', '&&=', '?=', '<<=', '>>=', '>>>=', '&=', '^=', '|='];
|
||||
UNARY = ['!', '~', 'NEW', 'TYPEOF', 'DELETE', 'DO'];
|
||||
LOGIC = ['&&', '||', '&', '|', '^'];
|
||||
SHIFT = ['<<', '>>', '>>>'];
|
||||
COMPARE = ['==', '!=', '<', '>', '<=', '>='];
|
||||
MATH = ['*', '/', '%'];
|
||||
RELATION = ['IN', 'OF', 'INSTANCEOF'];
|
||||
BOOL = ['TRUE', 'FALSE', 'NULL', 'UNDEFINED'];
|
||||
NOT_REGEX = ['NUMBER', 'REGEX', 'BOOL', '++', '--', ']'];
|
||||
NOT_SPACED_REGEX = NOT_REGEX.concat(')', '}', 'THIS', 'IDENTIFIER', 'STRING');
|
||||
CALLABLE = ['IDENTIFIER', 'STRING', 'REGEX', ')', ']', '}', '?', '::', '@', 'THIS', 'SUPER'];
|
||||
INDEXABLE = CALLABLE.concat('NUMBER', 'BOOL');
|
||||
LINE_BREAK = ['INDENT', 'OUTDENT', 'TERMINATOR'];
|
||||
}).call(this);
|
File diff suppressed because it is too large
Load Diff
|
@ -1,111 +0,0 @@
|
|||
(function() {
|
||||
var LONG_FLAG, MULTI_FLAG, OPTIONAL, OptionParser, SHORT_FLAG, buildRule, buildRules, normalizeArguments;
|
||||
exports.OptionParser = OptionParser = (function() {
|
||||
function OptionParser(rules, banner) {
|
||||
this.banner = banner;
|
||||
this.rules = buildRules(rules);
|
||||
}
|
||||
OptionParser.prototype.parse = function(args) {
|
||||
var arg, i, isOption, matchedRule, options, rule, value, _i, _len, _len2, _ref;
|
||||
options = {
|
||||
arguments: [],
|
||||
literals: []
|
||||
};
|
||||
args = normalizeArguments(args);
|
||||
for (i = 0, _len = args.length; i < _len; i++) {
|
||||
arg = args[i];
|
||||
if (arg === '--') {
|
||||
options.literals = args.slice(i + 1);
|
||||
break;
|
||||
}
|
||||
isOption = !!(arg.match(LONG_FLAG) || arg.match(SHORT_FLAG));
|
||||
matchedRule = false;
|
||||
_ref = this.rules;
|
||||
for (_i = 0, _len2 = _ref.length; _i < _len2; _i++) {
|
||||
rule = _ref[_i];
|
||||
if (rule.shortFlag === arg || rule.longFlag === arg) {
|
||||
value = rule.hasArgument ? args[i += 1] : true;
|
||||
options[rule.name] = rule.isList ? (options[rule.name] || []).concat(value) : value;
|
||||
matchedRule = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (isOption && !matchedRule) {
|
||||
throw new Error("unrecognized option: " + arg);
|
||||
}
|
||||
if (!isOption) {
|
||||
options.arguments = args.slice(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return options;
|
||||
};
|
||||
OptionParser.prototype.help = function() {
|
||||
var letPart, lines, rule, spaces, _i, _len, _ref;
|
||||
lines = [];
|
||||
if (this.banner) {
|
||||
lines.unshift("" + this.banner + "\n");
|
||||
}
|
||||
_ref = this.rules;
|
||||
for (_i = 0, _len = _ref.length; _i < _len; _i++) {
|
||||
rule = _ref[_i];
|
||||
spaces = 15 - rule.longFlag.length;
|
||||
spaces = spaces > 0 ? Array(spaces + 1).join(' ') : '';
|
||||
letPart = rule.shortFlag ? rule.shortFlag + ', ' : ' ';
|
||||
lines.push(' ' + letPart + rule.longFlag + spaces + rule.description);
|
||||
}
|
||||
return "\n" + (lines.join('\n')) + "\n";
|
||||
};
|
||||
return OptionParser;
|
||||
})();
|
||||
LONG_FLAG = /^(--\w[\w\-]+)/;
|
||||
SHORT_FLAG = /^(-\w)/;
|
||||
MULTI_FLAG = /^-(\w{2,})/;
|
||||
OPTIONAL = /\[(\w+(\*?))\]/;
|
||||
buildRules = function(rules) {
|
||||
var tuple, _i, _len, _results;
|
||||
_results = [];
|
||||
for (_i = 0, _len = rules.length; _i < _len; _i++) {
|
||||
tuple = rules[_i];
|
||||
if (tuple.length < 3) {
|
||||
tuple.unshift(null);
|
||||
}
|
||||
_results.push(buildRule.apply(null, tuple));
|
||||
}
|
||||
return _results;
|
||||
};
|
||||
buildRule = function(shortFlag, longFlag, description, options) {
|
||||
var match;
|
||||
if (options == null) {
|
||||
options = {};
|
||||
}
|
||||
match = longFlag.match(OPTIONAL);
|
||||
longFlag = longFlag.match(LONG_FLAG)[1];
|
||||
return {
|
||||
name: longFlag.substr(2),
|
||||
shortFlag: shortFlag,
|
||||
longFlag: longFlag,
|
||||
description: description,
|
||||
hasArgument: !!(match && match[1]),
|
||||
isList: !!(match && match[2])
|
||||
};
|
||||
};
|
||||
normalizeArguments = function(args) {
|
||||
var arg, l, match, result, _i, _j, _len, _len2, _ref;
|
||||
args = args.slice(0);
|
||||
result = [];
|
||||
for (_i = 0, _len = args.length; _i < _len; _i++) {
|
||||
arg = args[_i];
|
||||
if (match = arg.match(MULTI_FLAG)) {
|
||||
_ref = match[1].split('');
|
||||
for (_j = 0, _len2 = _ref.length; _j < _len2; _j++) {
|
||||
l = _ref[_j];
|
||||
result.push('-' + l);
|
||||
}
|
||||
} else {
|
||||
result.push(arg);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
}).call(this);
|
File diff suppressed because one or more lines are too long
|
@ -1,123 +0,0 @@
|
|||
(function() {
|
||||
var ACCESSOR, CoffeeScript, Module, REPL_PROMPT, REPL_PROMPT_CONTINUATION, SIMPLEVAR, Script, autocomplete, backlog, completeAttribute, completeVariable, enableColours, error, g, getCompletions, inspect, nonContextGlobals, readline, repl, run, sandbox, stdin, stdout, _i, _len;
|
||||
CoffeeScript = require('./coffee-script');
|
||||
readline = require('readline');
|
||||
inspect = require('util').inspect;
|
||||
Script = require('vm').Script;
|
||||
Module = require('module');
|
||||
REPL_PROMPT = 'coffee> ';
|
||||
REPL_PROMPT_CONTINUATION = '......> ';
|
||||
enableColours = false;
|
||||
if (process.platform !== 'win32') {
|
||||
enableColours = !process.env.NODE_DISABLE_COLORS;
|
||||
}
|
||||
stdin = process.openStdin();
|
||||
stdout = process.stdout;
|
||||
error = function(err) {
|
||||
return stdout.write((err.stack || err.toString()) + '\n\n');
|
||||
};
|
||||
backlog = '';
|
||||
sandbox = Script.createContext();
|
||||
nonContextGlobals = ['Buffer', 'console', 'process', 'setInterval', 'clearInterval', 'setTimeout', 'clearTimeout'];
|
||||
for (_i = 0, _len = nonContextGlobals.length; _i < _len; _i++) {
|
||||
g = nonContextGlobals[_i];
|
||||
sandbox[g] = global[g];
|
||||
}
|
||||
sandbox.global = sandbox.root = sandbox.GLOBAL = sandbox;
|
||||
run = function(buffer) {
|
||||
var code, returnValue, _;
|
||||
if (!buffer.toString().trim() && !backlog) {
|
||||
repl.prompt();
|
||||
return;
|
||||
}
|
||||
code = backlog += buffer;
|
||||
if (code[code.length - 1] === '\\') {
|
||||
backlog = "" + backlog.slice(0, -1) + "\n";
|
||||
repl.setPrompt(REPL_PROMPT_CONTINUATION);
|
||||
repl.prompt();
|
||||
return;
|
||||
}
|
||||
repl.setPrompt(REPL_PROMPT);
|
||||
backlog = '';
|
||||
try {
|
||||
_ = sandbox._;
|
||||
returnValue = CoffeeScript.eval("_=(" + code + "\n)", {
|
||||
sandbox: sandbox,
|
||||
filename: 'repl',
|
||||
modulename: 'repl'
|
||||
});
|
||||
if (returnValue === void 0) {
|
||||
sandbox._ = _;
|
||||
} else {
|
||||
process.stdout.write(inspect(returnValue, false, 2, enableColours) + '\n');
|
||||
}
|
||||
} catch (err) {
|
||||
error(err);
|
||||
}
|
||||
return repl.prompt();
|
||||
};
|
||||
ACCESSOR = /\s*([\w\.]+)(?:\.(\w*))$/;
|
||||
SIMPLEVAR = /\s*(\w*)$/i;
|
||||
autocomplete = function(text) {
|
||||
return completeAttribute(text) || completeVariable(text) || [[], text];
|
||||
};
|
||||
completeAttribute = function(text) {
|
||||
var all, completions, match, obj, prefix, val;
|
||||
if (match = text.match(ACCESSOR)) {
|
||||
all = match[0], obj = match[1], prefix = match[2];
|
||||
try {
|
||||
val = Script.runInContext(obj, sandbox);
|
||||
} catch (error) {
|
||||
return;
|
||||
}
|
||||
completions = getCompletions(prefix, Object.getOwnPropertyNames(val));
|
||||
return [completions, prefix];
|
||||
}
|
||||
};
|
||||
completeVariable = function(text) {
|
||||
var completions, free, possibilities, vars, _ref;
|
||||
if (free = (_ref = text.match(SIMPLEVAR)) != null ? _ref[1] : void 0) {
|
||||
vars = Script.runInContext('Object.getOwnPropertyNames(this)', sandbox);
|
||||
possibilities = vars.concat(CoffeeScript.RESERVED);
|
||||
completions = getCompletions(free, possibilities);
|
||||
return [completions, free];
|
||||
}
|
||||
};
|
||||
getCompletions = function(prefix, candidates) {
|
||||
var el, _j, _len2, _results;
|
||||
_results = [];
|
||||
for (_j = 0, _len2 = candidates.length; _j < _len2; _j++) {
|
||||
el = candidates[_j];
|
||||
if (el.indexOf(prefix) === 0) {
|
||||
_results.push(el);
|
||||
}
|
||||
}
|
||||
return _results;
|
||||
};
|
||||
process.on('uncaughtException', error);
|
||||
if (readline.createInterface.length < 3) {
|
||||
repl = readline.createInterface(stdin, autocomplete);
|
||||
stdin.on('data', function(buffer) {
|
||||
return repl.write(buffer);
|
||||
});
|
||||
} else {
|
||||
repl = readline.createInterface(stdin, stdout, autocomplete);
|
||||
}
|
||||
repl.on('attemptClose', function() {
|
||||
if (backlog) {
|
||||
backlog = '';
|
||||
process.stdout.write('\n');
|
||||
repl.setPrompt(REPL_PROMPT);
|
||||
return repl.prompt();
|
||||
} else {
|
||||
return repl.close();
|
||||
}
|
||||
});
|
||||
repl.on('close', function() {
|
||||
process.stdout.write('\n');
|
||||
return stdin.destroy();
|
||||
});
|
||||
repl.on('line', run);
|
||||
repl.setPrompt(REPL_PROMPT);
|
||||
repl.prompt();
|
||||
}).call(this);
|
|
@ -1,363 +0,0 @@
|
|||
(function() {
|
||||
var BALANCED_PAIRS, EXPRESSION_CLOSE, EXPRESSION_END, EXPRESSION_START, IMPLICIT_BLOCK, IMPLICIT_CALL, IMPLICIT_END, IMPLICIT_FUNC, IMPLICIT_UNSPACED_CALL, INVERSES, LINEBREAKS, SINGLE_CLOSERS, SINGLE_LINERS, left, rite, _i, _len, _ref;
|
||||
var __indexOf = Array.prototype.indexOf || function(item) {
|
||||
for (var i = 0, l = this.length; i < l; i++) {
|
||||
if (this[i] === item) return i;
|
||||
}
|
||||
return -1;
|
||||
}, __slice = Array.prototype.slice;
|
||||
exports.Rewriter = (function() {
|
||||
function Rewriter() {}
|
||||
Rewriter.prototype.rewrite = function(tokens) {
|
||||
this.tokens = tokens;
|
||||
this.removeLeadingNewlines();
|
||||
this.removeMidExpressionNewlines();
|
||||
this.closeOpenCalls();
|
||||
this.closeOpenIndexes();
|
||||
this.addImplicitIndentation();
|
||||
this.tagPostfixConditionals();
|
||||
this.addImplicitBraces();
|
||||
this.addImplicitParentheses();
|
||||
this.ensureBalance(BALANCED_PAIRS);
|
||||
this.rewriteClosingParens();
|
||||
return this.tokens;
|
||||
};
|
||||
Rewriter.prototype.scanTokens = function(block) {
|
||||
var i, token, tokens;
|
||||
tokens = this.tokens;
|
||||
i = 0;
|
||||
while (token = tokens[i]) {
|
||||
i += block.call(this, token, i, tokens);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
Rewriter.prototype.detectEnd = function(i, condition, action) {
|
||||
var levels, token, tokens, _ref, _ref2;
|
||||
tokens = this.tokens;
|
||||
levels = 0;
|
||||
while (token = tokens[i]) {
|
||||
if (levels === 0 && condition.call(this, token, i)) {
|
||||
return action.call(this, token, i);
|
||||
}
|
||||
if (!token || levels < 0) {
|
||||
return action.call(this, token, i - 1);
|
||||
}
|
||||
if (_ref = token[0], __indexOf.call(EXPRESSION_START, _ref) >= 0) {
|
||||
levels += 1;
|
||||
} else if (_ref2 = token[0], __indexOf.call(EXPRESSION_END, _ref2) >= 0) {
|
||||
levels -= 1;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
return i - 1;
|
||||
};
|
||||
Rewriter.prototype.removeLeadingNewlines = function() {
|
||||
var i, tag, _len, _ref;
|
||||
_ref = this.tokens;
|
||||
for (i = 0, _len = _ref.length; i < _len; i++) {
|
||||
tag = _ref[i][0];
|
||||
if (tag !== 'TERMINATOR') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (i) {
|
||||
return this.tokens.splice(0, i);
|
||||
}
|
||||
};
|
||||
Rewriter.prototype.removeMidExpressionNewlines = function() {
|
||||
return this.scanTokens(function(token, i, tokens) {
|
||||
var _ref;
|
||||
if (!(token[0] === 'TERMINATOR' && (_ref = this.tag(i + 1), __indexOf.call(EXPRESSION_CLOSE, _ref) >= 0))) {
|
||||
return 1;
|
||||
}
|
||||
tokens.splice(i, 1);
|
||||
return 0;
|
||||
});
|
||||
};
|
||||
Rewriter.prototype.closeOpenCalls = function() {
|
||||
var action, condition;
|
||||
condition = function(token, i) {
|
||||
var _ref;
|
||||
return ((_ref = token[0]) === ')' || _ref === 'CALL_END') || token[0] === 'OUTDENT' && this.tag(i - 1) === ')';
|
||||
};
|
||||
action = function(token, i) {
|
||||
return this.tokens[token[0] === 'OUTDENT' ? i - 1 : i][0] = 'CALL_END';
|
||||
};
|
||||
return this.scanTokens(function(token, i) {
|
||||
if (token[0] === 'CALL_START') {
|
||||
this.detectEnd(i + 1, condition, action);
|
||||
}
|
||||
return 1;
|
||||
});
|
||||
};
|
||||
Rewriter.prototype.closeOpenIndexes = function() {
|
||||
var action, condition;
|
||||
condition = function(token, i) {
|
||||
var _ref;
|
||||
return (_ref = token[0]) === ']' || _ref === 'INDEX_END';
|
||||
};
|
||||
action = function(token, i) {
|
||||
return token[0] = 'INDEX_END';
|
||||
};
|
||||
return this.scanTokens(function(token, i) {
|
||||
if (token[0] === 'INDEX_START') {
|
||||
this.detectEnd(i + 1, condition, action);
|
||||
}
|
||||
return 1;
|
||||
});
|
||||
};
|
||||
Rewriter.prototype.addImplicitBraces = function() {
|
||||
var action, condition, stack, start, startIndent;
|
||||
stack = [];
|
||||
start = null;
|
||||
startIndent = 0;
|
||||
condition = function(token, i) {
|
||||
var one, tag, three, two, _ref, _ref2;
|
||||
_ref = this.tokens.slice(i + 1, (i + 3 + 1) || 9e9), one = _ref[0], two = _ref[1], three = _ref[2];
|
||||
if ('HERECOMMENT' === (one != null ? one[0] : void 0)) {
|
||||
return false;
|
||||
}
|
||||
tag = token[0];
|
||||
return ((tag === 'TERMINATOR' || tag === 'OUTDENT') && !((two != null ? two[0] : void 0) === ':' || (one != null ? one[0] : void 0) === '@' && (three != null ? three[0] : void 0) === ':')) || (tag === ',' && one && ((_ref2 = one[0]) !== 'IDENTIFIER' && _ref2 !== 'NUMBER' && _ref2 !== 'STRING' && _ref2 !== '@' && _ref2 !== 'TERMINATOR' && _ref2 !== 'OUTDENT'));
|
||||
};
|
||||
action = function(token, i) {
|
||||
var tok;
|
||||
tok = ['}', '}', token[2]];
|
||||
tok.generated = true;
|
||||
return this.tokens.splice(i, 0, tok);
|
||||
};
|
||||
return this.scanTokens(function(token, i, tokens) {
|
||||
var ago, idx, tag, tok, value, _ref, _ref2;
|
||||
if (_ref = (tag = token[0]), __indexOf.call(EXPRESSION_START, _ref) >= 0) {
|
||||
stack.push([(tag === 'INDENT' && this.tag(i - 1) === '{' ? '{' : tag), i]);
|
||||
return 1;
|
||||
}
|
||||
if (__indexOf.call(EXPRESSION_END, tag) >= 0) {
|
||||
start = stack.pop();
|
||||
return 1;
|
||||
}
|
||||
if (!(tag === ':' && ((ago = this.tag(i - 2)) === ':' || ((_ref2 = stack[stack.length - 1]) != null ? _ref2[0] : void 0) !== '{'))) {
|
||||
return 1;
|
||||
}
|
||||
stack.push(['{']);
|
||||
idx = ago === '@' ? i - 2 : i - 1;
|
||||
while (this.tag(idx - 2) === 'HERECOMMENT') {
|
||||
idx -= 2;
|
||||
}
|
||||
value = new String('{');
|
||||
value.generated = true;
|
||||
tok = ['{', value, token[2]];
|
||||
tok.generated = true;
|
||||
tokens.splice(idx, 0, tok);
|
||||
this.detectEnd(i + 2, condition, action);
|
||||
return 2;
|
||||
});
|
||||
};
|
||||
Rewriter.prototype.addImplicitParentheses = function() {
|
||||
var action, noCall;
|
||||
noCall = false;
|
||||
action = function(token, i) {
|
||||
var idx;
|
||||
idx = token[0] === 'OUTDENT' ? i + 1 : i;
|
||||
return this.tokens.splice(idx, 0, ['CALL_END', ')', token[2]]);
|
||||
};
|
||||
return this.scanTokens(function(token, i, tokens) {
|
||||
var callObject, current, next, prev, seenControl, seenSingle, tag, _ref, _ref2, _ref3;
|
||||
tag = token[0];
|
||||
if (tag === 'CLASS' || tag === 'IF') {
|
||||
noCall = true;
|
||||
}
|
||||
_ref = tokens.slice(i - 1, (i + 1 + 1) || 9e9), prev = _ref[0], current = _ref[1], next = _ref[2];
|
||||
callObject = !noCall && tag === 'INDENT' && next && next.generated && next[0] === '{' && prev && (_ref2 = prev[0], __indexOf.call(IMPLICIT_FUNC, _ref2) >= 0);
|
||||
seenSingle = false;
|
||||
seenControl = false;
|
||||
if (__indexOf.call(LINEBREAKS, tag) >= 0) {
|
||||
noCall = false;
|
||||
}
|
||||
if (prev && !prev.spaced && tag === '?') {
|
||||
token.call = true;
|
||||
}
|
||||
if (token.fromThen) {
|
||||
return 1;
|
||||
}
|
||||
if (!(callObject || (prev != null ? prev.spaced : void 0) && (prev.call || (_ref3 = prev[0], __indexOf.call(IMPLICIT_FUNC, _ref3) >= 0)) && (__indexOf.call(IMPLICIT_CALL, tag) >= 0 || !(token.spaced || token.newLine) && __indexOf.call(IMPLICIT_UNSPACED_CALL, tag) >= 0))) {
|
||||
return 1;
|
||||
}
|
||||
tokens.splice(i, 0, ['CALL_START', '(', token[2]]);
|
||||
this.detectEnd(i + 1, function(token, i) {
|
||||
var post, _ref4;
|
||||
tag = token[0];
|
||||
if (!seenSingle && token.fromThen) {
|
||||
return true;
|
||||
}
|
||||
if (tag === 'IF' || tag === 'ELSE' || tag === 'CATCH' || tag === '->' || tag === '=>') {
|
||||
seenSingle = true;
|
||||
}
|
||||
if (tag === 'IF' || tag === 'ELSE' || tag === 'SWITCH' || tag === 'TRY') {
|
||||
seenControl = true;
|
||||
}
|
||||
if ((tag === '.' || tag === '?.' || tag === '::') && this.tag(i - 1) === 'OUTDENT') {
|
||||
return true;
|
||||
}
|
||||
return !token.generated && this.tag(i - 1) !== ',' && (__indexOf.call(IMPLICIT_END, tag) >= 0 || (tag === 'INDENT' && !seenControl)) && (tag !== 'INDENT' || (this.tag(i - 2) !== 'CLASS' && (_ref4 = this.tag(i - 1), __indexOf.call(IMPLICIT_BLOCK, _ref4) < 0) && !((post = this.tokens[i + 1]) && post.generated && post[0] === '{')));
|
||||
}, action);
|
||||
if (prev[0] === '?') {
|
||||
prev[0] = 'FUNC_EXIST';
|
||||
}
|
||||
return 2;
|
||||
});
|
||||
};
|
||||
Rewriter.prototype.addImplicitIndentation = function() {
|
||||
return this.scanTokens(function(token, i, tokens) {
|
||||
var action, condition, indent, outdent, starter, tag, _ref, _ref2;
|
||||
tag = token[0];
|
||||
if (tag === 'TERMINATOR' && this.tag(i + 1) === 'THEN') {
|
||||
tokens.splice(i, 1);
|
||||
return 0;
|
||||
}
|
||||
if (tag === 'ELSE' && this.tag(i - 1) !== 'OUTDENT') {
|
||||
tokens.splice.apply(tokens, [i, 0].concat(__slice.call(this.indentation(token))));
|
||||
return 2;
|
||||
}
|
||||
if (tag === 'CATCH' && ((_ref = this.tag(i + 2)) === 'OUTDENT' || _ref === 'TERMINATOR' || _ref === 'FINALLY')) {
|
||||
tokens.splice.apply(tokens, [i + 2, 0].concat(__slice.call(this.indentation(token))));
|
||||
return 4;
|
||||
}
|
||||
if (__indexOf.call(SINGLE_LINERS, tag) >= 0 && this.tag(i + 1) !== 'INDENT' && !(tag === 'ELSE' && this.tag(i + 1) === 'IF')) {
|
||||
starter = tag;
|
||||
_ref2 = this.indentation(token), indent = _ref2[0], outdent = _ref2[1];
|
||||
if (starter === 'THEN') {
|
||||
indent.fromThen = true;
|
||||
}
|
||||
indent.generated = outdent.generated = true;
|
||||
tokens.splice(i + 1, 0, indent);
|
||||
condition = function(token, i) {
|
||||
var _ref3;
|
||||
return token[1] !== ';' && (_ref3 = token[0], __indexOf.call(SINGLE_CLOSERS, _ref3) >= 0) && !(token[0] === 'ELSE' && (starter !== 'IF' && starter !== 'THEN'));
|
||||
};
|
||||
action = function(token, i) {
|
||||
return this.tokens.splice((this.tag(i - 1) === ',' ? i - 1 : i), 0, outdent);
|
||||
};
|
||||
this.detectEnd(i + 2, condition, action);
|
||||
if (tag === 'THEN') {
|
||||
tokens.splice(i, 1);
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
return 1;
|
||||
});
|
||||
};
|
||||
Rewriter.prototype.tagPostfixConditionals = function() {
|
||||
var condition;
|
||||
condition = function(token, i) {
|
||||
var _ref;
|
||||
return (_ref = token[0]) === 'TERMINATOR' || _ref === 'INDENT';
|
||||
};
|
||||
return this.scanTokens(function(token, i) {
|
||||
var original;
|
||||
if (token[0] !== 'IF') {
|
||||
return 1;
|
||||
}
|
||||
original = token;
|
||||
this.detectEnd(i + 1, condition, function(token, i) {
|
||||
if (token[0] !== 'INDENT') {
|
||||
return original[0] = 'POST_' + original[0];
|
||||
}
|
||||
});
|
||||
return 1;
|
||||
});
|
||||
};
|
||||
Rewriter.prototype.ensureBalance = function(pairs) {
|
||||
var close, level, levels, open, openLine, tag, token, _i, _j, _len, _len2, _ref, _ref2;
|
||||
levels = {};
|
||||
openLine = {};
|
||||
_ref = this.tokens;
|
||||
for (_i = 0, _len = _ref.length; _i < _len; _i++) {
|
||||
token = _ref[_i];
|
||||
tag = token[0];
|
||||
for (_j = 0, _len2 = pairs.length; _j < _len2; _j++) {
|
||||
_ref2 = pairs[_j], open = _ref2[0], close = _ref2[1];
|
||||
levels[open] |= 0;
|
||||
if (tag === open) {
|
||||
if (levels[open]++ === 0) {
|
||||
openLine[open] = token[2];
|
||||
}
|
||||
} else if (tag === close && --levels[open] < 0) {
|
||||
throw Error("too many " + token[1] + " on line " + (token[2] + 1));
|
||||
}
|
||||
}
|
||||
}
|
||||
for (open in levels) {
|
||||
level = levels[open];
|
||||
if (level > 0) {
|
||||
throw Error("unclosed " + open + " on line " + (openLine[open] + 1));
|
||||
}
|
||||
}
|
||||
return this;
|
||||
};
|
||||
Rewriter.prototype.rewriteClosingParens = function() {
|
||||
var debt, key, stack;
|
||||
stack = [];
|
||||
debt = {};
|
||||
for (key in INVERSES) {
|
||||
debt[key] = 0;
|
||||
}
|
||||
return this.scanTokens(function(token, i, tokens) {
|
||||
var inv, match, mtag, oppos, tag, val, _ref;
|
||||
if (_ref = (tag = token[0]), __indexOf.call(EXPRESSION_START, _ref) >= 0) {
|
||||
stack.push(token);
|
||||
return 1;
|
||||
}
|
||||
if (__indexOf.call(EXPRESSION_END, tag) < 0) {
|
||||
return 1;
|
||||
}
|
||||
if (debt[inv = INVERSES[tag]] > 0) {
|
||||
debt[inv] -= 1;
|
||||
tokens.splice(i, 1);
|
||||
return 0;
|
||||
}
|
||||
match = stack.pop();
|
||||
mtag = match[0];
|
||||
oppos = INVERSES[mtag];
|
||||
if (tag === oppos) {
|
||||
return 1;
|
||||
}
|
||||
debt[mtag] += 1;
|
||||
val = [oppos, mtag === 'INDENT' ? match[1] : oppos];
|
||||
if (this.tag(i + 2) === mtag) {
|
||||
tokens.splice(i + 3, 0, val);
|
||||
stack.push(match);
|
||||
} else {
|
||||
tokens.splice(i, 0, val);
|
||||
}
|
||||
return 1;
|
||||
});
|
||||
};
|
||||
Rewriter.prototype.indentation = function(token) {
|
||||
return [['INDENT', 2, token[2]], ['OUTDENT', 2, token[2]]];
|
||||
};
|
||||
Rewriter.prototype.tag = function(i) {
|
||||
var _ref;
|
||||
return (_ref = this.tokens[i]) != null ? _ref[0] : void 0;
|
||||
};
|
||||
return Rewriter;
|
||||
})();
|
||||
BALANCED_PAIRS = [['(', ')'], ['[', ']'], ['{', '}'], ['INDENT', 'OUTDENT'], ['CALL_START', 'CALL_END'], ['PARAM_START', 'PARAM_END'], ['INDEX_START', 'INDEX_END']];
|
||||
INVERSES = {};
|
||||
EXPRESSION_START = [];
|
||||
EXPRESSION_END = [];
|
||||
for (_i = 0, _len = BALANCED_PAIRS.length; _i < _len; _i++) {
|
||||
_ref = BALANCED_PAIRS[_i], left = _ref[0], rite = _ref[1];
|
||||
EXPRESSION_START.push(INVERSES[rite] = left);
|
||||
EXPRESSION_END.push(INVERSES[left] = rite);
|
||||
}
|
||||
EXPRESSION_CLOSE = ['CATCH', 'WHEN', 'ELSE', 'FINALLY'].concat(EXPRESSION_END);
|
||||
IMPLICIT_FUNC = ['IDENTIFIER', 'SUPER', ')', 'CALL_END', ']', 'INDEX_END', '@', 'THIS'];
|
||||
IMPLICIT_CALL = ['IDENTIFIER', 'NUMBER', 'STRING', 'JS', 'REGEX', 'NEW', 'PARAM_START', 'CLASS', 'IF', 'TRY', 'SWITCH', 'THIS', 'BOOL', 'UNARY', 'SUPER', '@', '->', '=>', '[', '(', '{', '--', '++'];
|
||||
IMPLICIT_UNSPACED_CALL = ['+', '-'];
|
||||
IMPLICIT_BLOCK = ['->', '=>', '{', '[', ','];
|
||||
IMPLICIT_END = ['POST_IF', 'FOR', 'WHILE', 'UNTIL', 'WHEN', 'BY', 'LOOP', 'TERMINATOR'];
|
||||
SINGLE_LINERS = ['ELSE', '->', '=>', 'TRY', 'FINALLY', 'THEN'];
|
||||
SINGLE_CLOSERS = ['TERMINATOR', 'CATCH', 'FINALLY', 'ELSE', 'OUTDENT', 'LEADING_WHEN'];
|
||||
LINEBREAKS = ['TERMINATOR', 'INDENT', 'OUTDENT'];
|
||||
}).call(this);
|
|
@ -1,120 +0,0 @@
|
|||
(function() {
|
||||
var Scope, extend, last, _ref;
|
||||
_ref = require('./helpers'), extend = _ref.extend, last = _ref.last;
|
||||
exports.Scope = Scope = (function() {
|
||||
Scope.root = null;
|
||||
function Scope(parent, expressions, method) {
|
||||
this.parent = parent;
|
||||
this.expressions = expressions;
|
||||
this.method = method;
|
||||
this.variables = [
|
||||
{
|
||||
name: 'arguments',
|
||||
type: 'arguments'
|
||||
}
|
||||
];
|
||||
this.positions = {};
|
||||
if (!this.parent) {
|
||||
Scope.root = this;
|
||||
}
|
||||
}
|
||||
Scope.prototype.add = function(name, type, immediate) {
|
||||
var pos;
|
||||
if (this.shared && !immediate) {
|
||||
return this.parent.add(name, type, immediate);
|
||||
}
|
||||
if (typeof (pos = this.positions[name]) === 'number') {
|
||||
return this.variables[pos].type = type;
|
||||
} else {
|
||||
return this.positions[name] = this.variables.push({
|
||||
name: name,
|
||||
type: type
|
||||
}) - 1;
|
||||
}
|
||||
};
|
||||
Scope.prototype.find = function(name, options) {
|
||||
if (this.check(name, options)) {
|
||||
return true;
|
||||
}
|
||||
this.add(name, 'var');
|
||||
return false;
|
||||
};
|
||||
Scope.prototype.parameter = function(name) {
|
||||
if (this.shared && this.parent.check(name, true)) {
|
||||
return;
|
||||
}
|
||||
return this.add(name, 'param');
|
||||
};
|
||||
Scope.prototype.check = function(name, immediate) {
|
||||
var found, _ref2;
|
||||
found = !!this.type(name);
|
||||
if (found || immediate) {
|
||||
return found;
|
||||
}
|
||||
return !!((_ref2 = this.parent) != null ? _ref2.check(name) : void 0);
|
||||
};
|
||||
Scope.prototype.temporary = function(name, index) {
|
||||
if (name.length > 1) {
|
||||
return '_' + name + (index > 1 ? index : '');
|
||||
} else {
|
||||
return '_' + (index + parseInt(name, 36)).toString(36).replace(/\d/g, 'a');
|
||||
}
|
||||
};
|
||||
Scope.prototype.type = function(name) {
|
||||
var v, _i, _len, _ref2;
|
||||
_ref2 = this.variables;
|
||||
for (_i = 0, _len = _ref2.length; _i < _len; _i++) {
|
||||
v = _ref2[_i];
|
||||
if (v.name === name) {
|
||||
return v.type;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
Scope.prototype.freeVariable = function(type) {
|
||||
var index, temp;
|
||||
index = 0;
|
||||
while (this.check((temp = this.temporary(type, index)))) {
|
||||
index++;
|
||||
}
|
||||
this.add(temp, 'var', true);
|
||||
return temp;
|
||||
};
|
||||
Scope.prototype.assign = function(name, value) {
|
||||
this.add(name, {
|
||||
value: value,
|
||||
assigned: true
|
||||
});
|
||||
return this.hasAssignments = true;
|
||||
};
|
||||
Scope.prototype.hasDeclarations = function() {
|
||||
return !!this.declaredVariables().length;
|
||||
};
|
||||
Scope.prototype.declaredVariables = function() {
|
||||
var realVars, tempVars, v, _i, _len, _ref2;
|
||||
realVars = [];
|
||||
tempVars = [];
|
||||
_ref2 = this.variables;
|
||||
for (_i = 0, _len = _ref2.length; _i < _len; _i++) {
|
||||
v = _ref2[_i];
|
||||
if (v.type === 'var') {
|
||||
(v.name.charAt(0) === '_' ? tempVars : realVars).push(v.name);
|
||||
}
|
||||
}
|
||||
return realVars.sort().concat(tempVars.sort());
|
||||
};
|
||||
Scope.prototype.assignedVariables = function() {
|
||||
var v, _i, _len, _ref2, _results;
|
||||
_ref2 = this.variables;
|
||||
_results = [];
|
||||
for (_i = 0, _len = _ref2.length; _i < _len; _i++) {
|
||||
v = _ref2[_i];
|
||||
if (v.type.assigned) {
|
||||
_results.push("" + v.name + " = " + v.type.value);
|
||||
}
|
||||
}
|
||||
return _results;
|
||||
};
|
||||
return Scope;
|
||||
})();
|
||||
}).call(this);
|
|
@ -1,27 +0,0 @@
|
|||
{
|
||||
"name": "coffee-script",
|
||||
"description": "Unfancy JavaScript",
|
||||
"keywords": ["javascript", "language", "coffeescript", "compiler"],
|
||||
"author": "Jeremy Ashkenas",
|
||||
"version": "1.1.2",
|
||||
"licenses": [{
|
||||
"type": "MIT",
|
||||
"url": "http://github.com/jashkenas/coffee-script/raw/master/LICENSE"
|
||||
}],
|
||||
"engines": {
|
||||
"node": ">=0.2.5"
|
||||
},
|
||||
"directories" : {
|
||||
"lib" : "./lib"
|
||||
},
|
||||
"main" : "./lib/coffee-script",
|
||||
"bin": {
|
||||
"coffee": "./bin/coffee",
|
||||
"cake": "./bin/cake"
|
||||
},
|
||||
"homepage": "http://coffeescript.org",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/jashkenas/coffee-script.git"
|
||||
}
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
.DS_Store
|
|
@ -1,14 +0,0 @@
|
|||
# Changelog: HTTP Status
|
||||
|
||||
## Version 0.1.0 – April 17, 2011
|
||||
|
||||
- Added reference links to HTTP specification
|
||||
- Fixed naming convention for constants from `PascalCase` to `ALL_CAPS`
|
||||
- Converted status codes from string to number
|
||||
- Updated samples
|
||||
- Updated tests
|
||||
- Added this `CHANGELOG`
|
||||
|
||||
## Version 0.0.1 – March 25, 2011
|
||||
|
||||
- Initial release.
|
|
@ -1,41 +0,0 @@
|
|||
# HTTP Status
|
||||
|
||||
Utility to interact with HTTP status code.
|
||||
|
||||
## Usage
|
||||
|
||||
### API sample
|
||||
|
||||
var HTTPStatus = require('http-status');
|
||||
|
||||
// Print "Internal Server Error"
|
||||
console.log(HTTPStatus[500]);
|
||||
|
||||
// Print 500
|
||||
console.log(HTTPStatus.INTERNAL_SERVER_ERROR);
|
||||
|
||||
### Express sample
|
||||
|
||||
var express = require('express'),
|
||||
redis = require('redis'),
|
||||
HTTPStatus = require('http-status');
|
||||
|
||||
var app = express.createServer();
|
||||
|
||||
app.get('/', function (req, res) {
|
||||
var client = redis.createClient();
|
||||
client.ping(function (err, msg) {
|
||||
if (err) {
|
||||
return res.send(HTTPStatus.INTERNAL_SERVER_ERROR);
|
||||
}
|
||||
res.send(msg, HTTPStatus.OK);
|
||||
});
|
||||
});
|
||||
|
||||
app.listen(3000);
|
||||
|
||||
Contributors
|
||||
------------
|
||||
|
||||
* David Worms : <https://github.com/wdavidw>
|
||||
* Daniel Gasienica : <https://github.com/gasi>
|
|
@ -1,82 +0,0 @@
|
|||
module.exports = {
|
||||
100: 'Continue',
|
||||
101: 'Switching Protocols',
|
||||
200: 'OK',
|
||||
201: 'Created',
|
||||
202: 'Accepted',
|
||||
203: 'Non-Authoritative Information',
|
||||
204: 'No Content',
|
||||
205: 'Reset Content',
|
||||
206: 'Partial Content',
|
||||
300: 'Multiple Choices',
|
||||
301: 'Moved Permanently',
|
||||
302: 'Found',
|
||||
303: 'See Other',
|
||||
304: 'Not Modified',
|
||||
305: 'Use Proxy',
|
||||
307: 'Temporary Redirect',
|
||||
400: 'Bad Request',
|
||||
401: 'Unauthorized',
|
||||
402: 'Payment Required',
|
||||
403: 'Forbidden',
|
||||
404: 'Not Found',
|
||||
405: 'Method Not Allowed',
|
||||
406: 'Not Acceptable',
|
||||
407: 'Proxy Authentication Required',
|
||||
408: 'Request Time-out',
|
||||
409: 'Conflict',
|
||||
410: 'Gone',
|
||||
411: 'Length Required',
|
||||
412: 'Precondition Failed',
|
||||
413: 'Request Entity Too Large',
|
||||
414: 'Request-URI Too Large',
|
||||
415: 'Unsupported Media Type',
|
||||
416: 'Requested range not satisfiable',
|
||||
417: 'Expectation Failed',
|
||||
500: 'Internal Server Error',
|
||||
501: 'Not Implemented',
|
||||
502: 'Bad Gateway',
|
||||
503: 'Service Unavailable',
|
||||
504: 'Gateway Time-out',
|
||||
505: 'HTTP Version not supported',
|
||||
CONTINUE: 100,
|
||||
SWITCHING_PROTOCOLS: 101,
|
||||
OK: 200,
|
||||
CREATED: 201,
|
||||
ACCEPTED: 202,
|
||||
NON_AUTHORITATIVE_INFORMATION: 203,
|
||||
NO_CONTENT: 204,
|
||||
RESET_CONTENT: 205,
|
||||
PARTIAL_CONTENT: 206,
|
||||
MULTITPLE_CHOICES: 300,
|
||||
MOVED_PERMAMENTLY: 301,
|
||||
FOUND: 302,
|
||||
SEE_OTHER: 303,
|
||||
NOT_MODIFIED: 304,
|
||||
USE_PROXY: 305,
|
||||
TEMPORARY_REDIRECT: 307,
|
||||
BAD_REQUEST: 400,
|
||||
UNAUTHORIZED: 401,
|
||||
PAYMENT_REQUIRED: 402,
|
||||
FORBIDDEN: 403,
|
||||
NOT_FOUND: 404,
|
||||
METHOD_NOT_ALLOWED: 405,
|
||||
NOT_ACCEPTABLE: 406,
|
||||
PROXY_AUTHENTICATION_REQUIRED: 407,
|
||||
REQUEST_TIMEOUT: 408,
|
||||
CONFLICT: 409,
|
||||
GONE: 410,
|
||||
LENGTH_REQUIRED: 411,
|
||||
PRECONDITION_FAILED: 412,
|
||||
REQUEST_ENTITY_TOO_LARGE: 413,
|
||||
REQUEST_URI_TOO_LONG: 414,
|
||||
UNSUPPORTED_MEDIA_TYPE: 415,
|
||||
REQUESTED_RANGE_NOT_SATISFIABLE: 416,
|
||||
EXPECTATION_FAILED: 417,
|
||||
INTERNAL_SERVER_ERROR: 500,
|
||||
NOT_IMPLEMENTED: 501,
|
||||
BAD_GATEWAY: 502,
|
||||
SERVICE_UNAVAILABLE: 503,
|
||||
GATEWAY_TIMEOUT: 504,
|
||||
HTTP_VERSION_NOT_SUPPORTED: 505
|
||||
};
|
|
@ -1,9 +0,0 @@
|
|||
{ "name": "http-status"
|
||||
, "version": "0.1.0"
|
||||
, "description": "Interact with HTTP status code"
|
||||
, "author": "David Worms <david@adaltas.com>"
|
||||
, "contributors": [{"name": "Daniel Gasienica", "email": "daniel@gasienica.ch"}]
|
||||
, "devDependencies": { "coffee-script": "1.x" }
|
||||
, "main": "./lib/index"
|
||||
, "engines": { "node": ">= 0.1.90" }
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
var HTTPStatus = require('http-status');
|
||||
|
||||
// Print "Internal Server Error"
|
||||
console.log(HTTPStatus[500]);
|
||||
|
||||
// Print 500
|
||||
console.log(HTTPStatus.INTERNAL_SERVER_ERROR);
|
|
@ -1,17 +0,0 @@
|
|||
var express = require('express'),
|
||||
redis = require('redis'),
|
||||
HTTPStatus = require('http-status');
|
||||
|
||||
var app = express.createServer();
|
||||
|
||||
app.get('/', function (req, res) {
|
||||
var client = redis.createClient();
|
||||
client.ping(function (err, msg) {
|
||||
if (err) {
|
||||
return res.send(HTTPStatus.INTERNAL_SERVER_ERROR);
|
||||
}
|
||||
res.send(msg, HTTPStatus.OK);
|
||||
});
|
||||
});
|
||||
|
||||
app.listen(3000);
|
|
@ -1,115 +0,0 @@
|
|||
# Reference:
|
||||
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
|
||||
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec6.html#sec6.1.1
|
||||
|
||||
module.exports =
|
||||
# Informational 1xx
|
||||
# Request received, continuing process
|
||||
100: 'Continue'
|
||||
101: 'Switching Protocols'
|
||||
|
||||
# Successful 2xx
|
||||
# The action was successfully received, understood, and accepted
|
||||
200: 'OK'
|
||||
201: 'Created'
|
||||
202: 'Accepted'
|
||||
203: 'Non-Authoritative Information'
|
||||
204: 'No Content'
|
||||
205: 'Reset Content'
|
||||
206: 'Partial Content'
|
||||
|
||||
# Redirection 3xx
|
||||
# Further action must be taken in order to complete the request
|
||||
300: 'Multiple Choices'
|
||||
301: 'Moved Permanently'
|
||||
302: 'Found'
|
||||
303: 'See Other'
|
||||
304: 'Not Modified'
|
||||
305: 'Use Proxy'
|
||||
307: 'Temporary Redirect'
|
||||
|
||||
# Client Error 4xx
|
||||
# The request contains bad syntax or cannot be fulfilled
|
||||
400: 'Bad Request'
|
||||
401: 'Unauthorized'
|
||||
402: 'Payment Required'
|
||||
403: 'Forbidden'
|
||||
404: 'Not Found'
|
||||
405: 'Method Not Allowed'
|
||||
406: 'Not Acceptable'
|
||||
407: 'Proxy Authentication Required'
|
||||
408: 'Request Time-out'
|
||||
409: 'Conflict'
|
||||
410: 'Gone'
|
||||
411: 'Length Required'
|
||||
412: 'Precondition Failed'
|
||||
413: 'Request Entity Too Large'
|
||||
414: 'Request-URI Too Large'
|
||||
415: 'Unsupported Media Type'
|
||||
416: 'Requested range not satisfiable'
|
||||
417: 'Expectation Failed'
|
||||
|
||||
# Server Error 5xx
|
||||
# The server failed to fulfill an apparently valid request
|
||||
500: 'Internal Server Error'
|
||||
501: 'Not Implemented'
|
||||
502: 'Bad Gateway'
|
||||
503: 'Service Unavailable'
|
||||
504: 'Gateway Time-out'
|
||||
505: 'HTTP Version not supported'
|
||||
|
||||
# Informational 1xx
|
||||
# Request received, continuing process
|
||||
CONTINUE: 100
|
||||
SWITCHING_PROTOCOLS: 101
|
||||
|
||||
# Successful 2xx
|
||||
# The action was successfully received, understood, and accepted
|
||||
OK: 200
|
||||
CREATED: 201
|
||||
ACCEPTED: 202
|
||||
NON_AUTHORITATIVE_INFORMATION: 203
|
||||
NO_CONTENT: 204
|
||||
RESET_CONTENT: 205
|
||||
PARTIAL_CONTENT: 206
|
||||
|
||||
# Redirection 3xx
|
||||
# Further action must be taken in order to complete the request
|
||||
MULTITPLE_CHOICES: 300
|
||||
MOVED_PERMAMENTLY: 301
|
||||
FOUND: 302
|
||||
SEE_OTHER: 303
|
||||
NOT_MODIFIED: 304
|
||||
USE_PROXY: 305
|
||||
# Unused: 306 (reserved)
|
||||
TEMPORARY_REDIRECT: 307
|
||||
|
||||
# Client Error 4xx
|
||||
# The request contains bad syntax or cannot be fulfilled
|
||||
BAD_REQUEST: 400
|
||||
UNAUTHORIZED: 401
|
||||
PAYMENT_REQUIRED: 402
|
||||
FORBIDDEN: 403
|
||||
NOT_FOUND: 404
|
||||
METHOD_NOT_ALLOWED: 405
|
||||
NOT_ACCEPTABLE: 406
|
||||
PROXY_AUTHENTICATION_REQUIRED: 407
|
||||
REQUEST_TIMEOUT: 408
|
||||
CONFLICT: 409
|
||||
GONE: 410
|
||||
LENGTH_REQUIRED: 411
|
||||
PRECONDITION_FAILED: 412
|
||||
REQUEST_ENTITY_TOO_LARGE: 413
|
||||
REQUEST_URI_TOO_LONG: 414
|
||||
UNSUPPORTED_MEDIA_TYPE: 415
|
||||
REQUESTED_RANGE_NOT_SATISFIABLE: 416
|
||||
EXPECTATION_FAILED: 417
|
||||
|
||||
# Server Error 5xx
|
||||
# The server failed to fulfill an apparently valid request
|
||||
INTERNAL_SERVER_ERROR: 500
|
||||
NOT_IMPLEMENTED: 501
|
||||
BAD_GATEWAY: 502
|
||||
SERVICE_UNAVAILABLE: 503
|
||||
GATEWAY_TIMEOUT: 504
|
||||
HTTP_VERSION_NOT_SUPPORTED: 505
|
|
@ -1,9 +0,0 @@
|
|||
var assert = require('assert'),
|
||||
HTTPStatus = require('../lib/index');
|
||||
|
||||
module.exports = {
|
||||
'Test HTTP Status Code': function () {
|
||||
assert.eql(200, HTTPStatus.OK);
|
||||
assert.eql('OK', HTTPStatus[200]);
|
||||
}
|
||||
};
|
|
@ -1,55 +0,0 @@
|
|||
Apache License
|
||||
|
||||
Version 2.0, January 2004
|
||||
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
|
||||
|
||||
You must give any other recipients of the Work or Derivative Works a copy of this License; and
|
||||
|
||||
You must cause any modified files to carry prominent notices stating that You changed the files; and
|
||||
|
||||
You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
|
||||
|
||||
If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
|
@ -1,191 +0,0 @@
|
|||
# Request -- Simplified HTTP request method
|
||||
|
||||
## Install
|
||||
|
||||
<pre>
|
||||
npm install request
|
||||
</pre>
|
||||
|
||||
Or from source:
|
||||
|
||||
<pre>
|
||||
git clone git://github.com/mikeal/request.git
|
||||
cd request
|
||||
npm link
|
||||
</pre>
|
||||
|
||||
## Super simple to use
|
||||
|
||||
Request is designed to be the simplest way possible to make http calls. It support HTTPS and follows redirects by default.
|
||||
|
||||
```javascript
|
||||
var request = require('request');
|
||||
request('http://www.google.com', function (error, response, body) {
|
||||
if (!error && response.statusCode == 200) {
|
||||
sys.puts(body) // Print the google web page.
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
## Streaming
|
||||
|
||||
You can stream any response to a file stream.
|
||||
|
||||
```javascript
|
||||
request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'))
|
||||
```
|
||||
|
||||
You can also stream a file to a PUT or POST request. This method will also check the file extension against a mapping of file extensions to content-types, in this case `application/json`, and use the proper content-type in the PUT request if one is not already provided in the headers.
|
||||
|
||||
```javascript
|
||||
fs.readStream('file.json').pipe(request.put('http://mysite.com/obj.json'))
|
||||
```
|
||||
|
||||
Request can also pipe to itself. When doing so the content-type and content-length will be preserved in the PUT headers.
|
||||
|
||||
```javascript
|
||||
request.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png'))
|
||||
```
|
||||
|
||||
Now let's get fancy.
|
||||
|
||||
```javascript
|
||||
http.createServer(function (req, resp) {
|
||||
if (req.url === '/doodle.png') {
|
||||
if (req.method === 'PUT') {
|
||||
req.pipe(request.put('http://mysite.com/doodle.png'))
|
||||
} else if (req.method === 'GET' || req.method === 'HEAD') {
|
||||
request.get('http://mysite.com/doodle.png').pipe(resp)
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
You can also pipe() from a http.ServerRequest instance and to a http.ServerResponse instance. The HTTP method and headers will be sent as well as the entity-body data. Which means that, if you don't really care about security, you can do:
|
||||
|
||||
```javascript
|
||||
http.createServer(function (req, resp) {
|
||||
if (req.url === '/doodle.png') {
|
||||
var x = request('http://mysite.com/doodle.png')
|
||||
req.pipe(x)
|
||||
x.pipe(resp)
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
And since pipe() returns the destination stream in node 0.5.x you can do one line proxying :)
|
||||
|
||||
```javascript
|
||||
req.pipe(request('http://mysite.com/doodle.png')).pipe(resp)
|
||||
```
|
||||
|
||||
Also, none of this new functionality conflicts with requests previous features, it just expands them.
|
||||
|
||||
```javascript
|
||||
var r = request.defaults({'proxy':'http://localproxy.com'})
|
||||
|
||||
http.createServer(function (req, resp) {
|
||||
if (req.url === '/doodle.png') {
|
||||
r.get('http://google.com/doodle.png').pipe(resp)
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
You can still use intermediate proxies, the requests will still follow HTTP forwards, etc.
|
||||
|
||||
### request(options, callback)
|
||||
|
||||
The first argument can be either a url or an options object. The only required option is uri, all others are optional.
|
||||
|
||||
* `uri` || `url` - fully qualified uri or a parsed url object from url.parse()
|
||||
* `method` - http method, defaults to GET
|
||||
* `headers` - http headers, defaults to {}
|
||||
* `body` - entity body for POST and PUT requests. Must be buffer or string.
|
||||
* `json` - sets `body` but to JSON representation of value and adds `Content-type: application/json` header.
|
||||
* `multipart` - (experimental) array of objects which contains their own headers and `body` attribute. Sends `multipart/related` request. See example below.
|
||||
* `followRedirect` - follow HTTP 3xx responses as redirects. defaults to true.
|
||||
* `maxRedirects` - the maximum number of redirects to follow, defaults to 10.
|
||||
* `onResponse` - If true the callback will be fired on the "response" event instead of "end". If a function it will be called on "response" and not effect the regular semantics of the main callback on "end".
|
||||
* `encoding` - Encoding to be used on response.setEncoding when buffering the response data.
|
||||
* `pool` - A hash object containing the agents for these requests. If omitted this request will use the global pool which is set to node's default maxSockets.
|
||||
* `pool.maxSockets` - Integer containing the maximum amount of sockets in the pool.
|
||||
* `timeout` - Integer containing the number of milliseconds to wait for a request to respond before aborting the request
|
||||
* `proxy` - An HTTP proxy to be used. Support proxy Auth with Basic Auth the same way it's supported with the `url` parameter by embedding the auth info in the uri.
|
||||
|
||||
The callback argument gets 3 arguments. The first is an error when applicable (usually from the http.Client option not the http.ClientRequest object). The second in an http.ClientResponse object. The third is the response body buffer.
|
||||
|
||||
## Convenience methods
|
||||
|
||||
There are also shorthand methods for different HTTP METHODs and some other conveniences.
|
||||
|
||||
### request.defaults(options)
|
||||
|
||||
This method returns a wrapper around the normal request API that defaults to whatever options you pass in to it.
|
||||
|
||||
### request.put
|
||||
|
||||
Same as request() but defaults to `method: "PUT"`.
|
||||
|
||||
```javascript
|
||||
request.put(url)
|
||||
```
|
||||
|
||||
### request.post
|
||||
|
||||
Same as request() but defaults to `method: "POST"`.
|
||||
|
||||
```javascript
|
||||
request.post(url)
|
||||
```
|
||||
|
||||
### request.head
|
||||
|
||||
Same as request() but defaults to `method: "HEAD"`.
|
||||
|
||||
```javascript
|
||||
request.head(url)
|
||||
```
|
||||
|
||||
### request.del
|
||||
|
||||
Same as request() but defaults to `method: "DELETE"`.
|
||||
|
||||
```javascript
|
||||
request.del(url)
|
||||
```
|
||||
|
||||
### request.get
|
||||
|
||||
Alias to normal request method for uniformity.
|
||||
|
||||
```javascript
|
||||
request.get(url)
|
||||
```
|
||||
|
||||
|
||||
## Examples:
|
||||
|
||||
```javascript
|
||||
var request = require('request')
|
||||
, rand = Math.floor(Math.random()*100000000).toString()
|
||||
;
|
||||
request(
|
||||
{ method: 'PUT'
|
||||
, uri: 'http://mikeal.couchone.com/testjs/' + rand
|
||||
, multipart:
|
||||
[ { 'content-type': 'application/json'
|
||||
, body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})
|
||||
}
|
||||
, { body: 'I am an attachment' }
|
||||
]
|
||||
}
|
||||
, function (error, response, body) {
|
||||
if(response.statusCode == 201){
|
||||
console.log('document saved as: http://mikeal.couchone.com/testjs/'+ rand)
|
||||
} else {
|
||||
console.log('error: '+ response.statusCode)
|
||||
console.log(body)
|
||||
}
|
||||
}
|
||||
)
|
||||
```
|
|
@ -1,150 +0,0 @@
|
|||
|
||||
var cookie_str_splitter=/[:](?=\s*[a-zA-Z0-9_\-]+\s*[=])/g
|
||||
|
||||
function stringify (cookie) {
|
||||
var str=[cookie.name+"="+cookie.value];
|
||||
if(cookie.expiration_date !== Infinity) {
|
||||
str.push("expires="+(new Date(cookie.expiration_date)).toGMTString());
|
||||
}
|
||||
if(cookie.domain) {
|
||||
str.push("domain="+cookie.domain);
|
||||
}
|
||||
if(cookie.path) {
|
||||
str.push("path="+cookie.path);
|
||||
}
|
||||
if(cookie.secure) {
|
||||
str.push("secure");
|
||||
}
|
||||
if(cookie.noscript) {
|
||||
str.push("httponly");
|
||||
}
|
||||
return str.join("; ");
|
||||
}
|
||||
|
||||
function Jar () {
|
||||
this.cookies = []
|
||||
}
|
||||
Jar.prototype.setCookies = function (cookieString) {
|
||||
|
||||
}
|
||||
Jar.prototype.getHeader = function (host, path) {
|
||||
|
||||
}
|
||||
Jar.prototype.getCookies = function (access_info) {
|
||||
var matches=[];
|
||||
for(var cookie_name in cookies) {
|
||||
var cookie=this.getCookie(cookie_name,access_info);
|
||||
if (cookie) {
|
||||
matches.push(cookie);
|
||||
}
|
||||
}
|
||||
matches.toString=function toString(){return matches.join(":");}
|
||||
return matches;
|
||||
}
|
||||
|
||||
Jar.prototype.getCookie = function (host, path) {
|
||||
var cookies_list = self.cookies[cookie_name];
|
||||
for(var i=0;i<cookies_list.length;i++) {
|
||||
var cookie = cookies_list[i];
|
||||
if(cookie.expiration_date <= Date.now()) {
|
||||
if(cookies_list.length===0) {
|
||||
delete cookies[cookie.name]
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if(cookie.matches(access_info)) {
|
||||
return cookie;
|
||||
}
|
||||
}
|
||||
}
|
||||
Jar.prototype.setCookie = function (){}
|
||||
Jar.prototype.parseCookie = function (str) {
|
||||
var cookies = str.split(cookie_str_splitter)
|
||||
, successful = []
|
||||
, self = this
|
||||
;
|
||||
cookies.forEach(function (cookie) {
|
||||
self.parseCookie(cookie);
|
||||
})
|
||||
}
|
||||
|
||||
Jar.prototype.parseCookie = function (str) {
|
||||
var parts = str.split(";")
|
||||
, pai r= parts[0].match(/([^=]+)=((?:.|\n)*)/)
|
||||
, key = pair[1]
|
||||
, value = pair[2]
|
||||
, cookie =
|
||||
{ name: null
|
||||
, value: value
|
||||
, expiration_date: = Infinity
|
||||
, path: '/'
|
||||
, domain: null
|
||||
, secure: false
|
||||
, noscript: false
|
||||
}
|
||||
;
|
||||
|
||||
cookie.name = key;
|
||||
cookie.value = value;
|
||||
|
||||
for(var i=1;i<parts.length;i++) {
|
||||
var pair = parts[i].match(/([^=]+)=((?:.|\n)*)/)
|
||||
, key = pair[1].trim().toLowerCase()
|
||||
, value = pair[2]
|
||||
;
|
||||
switch(key) {
|
||||
case "httponly":
|
||||
cookie.noscript = true;
|
||||
break;
|
||||
case "expires":
|
||||
cookie.expiration_date = value
|
||||
? Number(Date.parse(value))
|
||||
: Infinity;
|
||||
break;
|
||||
case "path":
|
||||
cookie.path = value
|
||||
? value.trim()
|
||||
: "";
|
||||
break;
|
||||
case "domain":
|
||||
cookie.domain = value
|
||||
? value.trim()
|
||||
: "";
|
||||
break;
|
||||
case "secure":
|
||||
cookie.secure = true;
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if(cookie.name in this.cookies) {
|
||||
var cookies_list = this.cookies[cookie.name];
|
||||
for(var i=0;i<this.cookies_list.length;i++) {
|
||||
var collidable_cookie = cookies_list[i];
|
||||
if(collidable_cookie.collidesWith(cookie)) {
|
||||
if(remove) {
|
||||
cookies_list.splice(i,1);
|
||||
if(cookies_list.length===0) {
|
||||
delete this.cookies[cookie.name]
|
||||
}
|
||||
return false;
|
||||
}
|
||||
else {
|
||||
return cookies_list[i]=cookie;
|
||||
}
|
||||
}
|
||||
}
|
||||
if(remove) {
|
||||
return false;
|
||||
}
|
||||
cookies_list.push(cookie);
|
||||
return cookie;
|
||||
}
|
||||
else if(remove){
|
||||
return false;
|
||||
}
|
||||
else {
|
||||
return this.cookies[cookie.name]=[cookie];
|
||||
}
|
||||
|
||||
}
|
|
@ -1,447 +0,0 @@
|
|||
// Copyright 2010-2011 Mikeal Rogers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
var http = require('http')
|
||||
, https = false
|
||||
, tls = false
|
||||
, url = require('url')
|
||||
, util = require('util')
|
||||
, stream = require('stream')
|
||||
, qs = require('querystring')
|
||||
, mimetypes = require('./mimetypes')
|
||||
;
|
||||
|
||||
try {
|
||||
https = require('https')
|
||||
} catch (e) {}
|
||||
|
||||
try {
|
||||
tls = require('tls')
|
||||
} catch (e) {}
|
||||
|
||||
function toBase64 (str) {
|
||||
return (new Buffer(str || "", "ascii")).toString("base64")
|
||||
}
|
||||
|
||||
// Hacky fix for pre-0.4.4 https
|
||||
if (https && !https.Agent) {
|
||||
https.Agent = function (options) {
|
||||
http.Agent.call(this, options)
|
||||
}
|
||||
util.inherits(https.Agent, http.Agent)
|
||||
https.Agent.prototype._getConnection = function(host, port, cb) {
|
||||
var s = tls.connect(port, host, this.options, function() {
|
||||
// do other checks here?
|
||||
if (cb) cb()
|
||||
})
|
||||
return s
|
||||
}
|
||||
}
|
||||
|
||||
function isReadStream (rs) {
|
||||
if (rs.readable && rs.path && rs.mode) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
function copy (obj) {
|
||||
var o = {}
|
||||
for (i in obj) o[i] = obj[i]
|
||||
return o
|
||||
}
|
||||
|
||||
var isUrl = /^https?:/
|
||||
|
||||
var globalPool = {}
|
||||
|
||||
function Request (options) {
|
||||
stream.Stream.call(this)
|
||||
this.readable = true
|
||||
this.writable = true
|
||||
|
||||
if (typeof options === 'string') {
|
||||
options = {uri:options}
|
||||
}
|
||||
|
||||
for (i in options) {
|
||||
this[i] = options[i]
|
||||
}
|
||||
if (!this.pool) this.pool = globalPool
|
||||
this.dests = []
|
||||
this.__isRequestRequest = true
|
||||
}
|
||||
util.inherits(Request, stream.Stream)
|
||||
Request.prototype.getAgent = function (host, port) {
|
||||
if (!this.pool[host+':'+port]) {
|
||||
this.pool[host+':'+port] = new this.httpModule.Agent({host:host, port:port})
|
||||
}
|
||||
return this.pool[host+':'+port]
|
||||
}
|
||||
Request.prototype.request = function () {
|
||||
var options = this
|
||||
if (options.url) {
|
||||
// People use this property instead all the time so why not just support it.
|
||||
options.uri = options.url
|
||||
delete options.url
|
||||
}
|
||||
|
||||
if (!options.uri) {
|
||||
throw new Error("options.uri is a required argument")
|
||||
} else {
|
||||
if (typeof options.uri == "string") options.uri = url.parse(options.uri)
|
||||
}
|
||||
if (options.proxy) {
|
||||
if (typeof options.proxy == 'string') options.proxy = url.parse(options.proxy)
|
||||
}
|
||||
|
||||
options._redirectsFollowed = options._redirectsFollowed || 0
|
||||
options.maxRedirects = (options.maxRedirects !== undefined) ? options.maxRedirects : 10
|
||||
options.followRedirect = (options.followRedirect !== undefined) ? options.followRedirect : true
|
||||
|
||||
options.headers = options.headers ? copy(options.headers) : {}
|
||||
|
||||
var setHost = false
|
||||
if (!options.headers.host) {
|
||||
options.headers.host = options.uri.hostname
|
||||
if (options.uri.port) {
|
||||
if ( !(options.uri.port === 80 && options.uri.protocol === 'http:') &&
|
||||
!(options.uri.port === 443 && options.uri.protocol === 'https:') )
|
||||
options.headers.host += (':'+options.uri.port)
|
||||
}
|
||||
setHost = true
|
||||
}
|
||||
|
||||
if (!options.uri.pathname) {options.uri.pathname = '/'}
|
||||
if (!options.uri.port) {
|
||||
if (options.uri.protocol == 'http:') {options.uri.port = 80}
|
||||
else if (options.uri.protocol == 'https:') {options.uri.port = 443}
|
||||
}
|
||||
|
||||
if (options.bodyStream || options.responseBodyStream) {
|
||||
console.error('options.bodyStream and options.responseBodyStream is deprecated. You should now send the request object to stream.pipe()')
|
||||
this.pipe(options.responseBodyStream || options.bodyStream)
|
||||
}
|
||||
|
||||
if (options.proxy) {
|
||||
options.port = options.proxy.port
|
||||
options.host = options.proxy.hostname
|
||||
} else {
|
||||
options.port = options.uri.port
|
||||
options.host = options.uri.hostname
|
||||
}
|
||||
|
||||
if (options.onResponse === true) {
|
||||
options.onResponse = options.callback
|
||||
delete options.callback
|
||||
}
|
||||
|
||||
var clientErrorHandler = function (error) {
|
||||
if (setHost) delete options.headers.host
|
||||
options.emit('error', error)
|
||||
}
|
||||
if (options.onResponse) options.on('error', function (e) {options.onResponse(e)})
|
||||
if (options.callback) options.on('error', function (e) {options.callback(e)})
|
||||
|
||||
|
||||
if (options.uri.auth && !options.headers.authorization) {
|
||||
options.headers.authorization = "Basic " + toBase64(options.uri.auth.split(':').map(function(item){ return qs.unescape(item)}).join(':'))
|
||||
}
|
||||
if (options.proxy && options.proxy.auth && !options.headers['proxy-authorization']) {
|
||||
options.headers.authorization = "Basic " + toBase64(options.uri.auth.split(':').map(function(item){ return qs.unescape(item)}).join(':'))
|
||||
}
|
||||
|
||||
options.path = options.uri.href.replace(options.uri.protocol + '//' + options.uri.host, '')
|
||||
if (options.path.length === 0) options.path = '/'
|
||||
|
||||
if (options.proxy) options.path = (options.uri.protocol + '//' + options.uri.host + options.path)
|
||||
|
||||
if (options.json) {
|
||||
options.headers['content-type'] = 'application/json'
|
||||
if (typeof options.json === 'boolean') {
|
||||
if (typeof options.body === 'object') options.body = JSON.stringify(options.body)
|
||||
} else {
|
||||
options.body = JSON.stringify(options.json)
|
||||
}
|
||||
|
||||
} else if (options.multipart) {
|
||||
options.body = ''
|
||||
options.headers['content-type'] = 'multipart/related;boundary="frontier"'
|
||||
if (!options.multipart.forEach) throw new Error('Argument error, options.multipart.')
|
||||
|
||||
options.multipart.forEach(function (part) {
|
||||
var body = part.body
|
||||
if(!body) throw Error('Body attribute missing in multipart.')
|
||||
delete part.body
|
||||
options.body += '--frontier\r\n'
|
||||
Object.keys(part).forEach(function(key){
|
||||
options.body += key + ': ' + part[key] + '\r\n'
|
||||
})
|
||||
options.body += '\r\n' + body + '\r\n'
|
||||
})
|
||||
options.body += '--frontier--'
|
||||
}
|
||||
|
||||
if (options.body) {
|
||||
if (!Buffer.isBuffer(options.body)) {
|
||||
options.body = new Buffer(options.body)
|
||||
}
|
||||
if (options.body.length) {
|
||||
options.headers['content-length'] = options.body.length
|
||||
} else {
|
||||
throw new Error('Argument error, options.body.')
|
||||
}
|
||||
}
|
||||
|
||||
options.httpModule =
|
||||
{"http:":http, "https:":https}[options.proxy ? options.proxy.protocol : options.uri.protocol]
|
||||
|
||||
if (!options.httpModule) throw new Error("Invalid protocol")
|
||||
|
||||
if (options.pool === false) {
|
||||
options.agent = false
|
||||
} else {
|
||||
if (options.maxSockets) {
|
||||
// Don't use our pooling if node has the refactored client
|
||||
options.agent = options.httpModule.globalAgent || options.getAgent(options.host, options.port)
|
||||
options.agent.maxSockets = options.maxSockets
|
||||
}
|
||||
if (options.pool.maxSockets) {
|
||||
// Don't use our pooling if node has the refactored client
|
||||
options.agent = options.httpModule.globalAgent || options.getAgent(options.host, options.port)
|
||||
options.agent.maxSockets = options.pool.maxSockets
|
||||
}
|
||||
}
|
||||
|
||||
options.start = function () {
|
||||
options._started = true
|
||||
options.method = options.method || 'GET'
|
||||
|
||||
options.req = options.httpModule.request(options, function (response) {
|
||||
options.response = response
|
||||
response.request = options
|
||||
if (setHost) delete options.headers.host
|
||||
if (options.timeout && options.timeoutTimer) clearTimeout(options.timeoutTimer)
|
||||
|
||||
if (response.statusCode >= 300 &&
|
||||
response.statusCode < 400 &&
|
||||
options.followRedirect &&
|
||||
options.method !== 'PUT' &&
|
||||
options.method !== 'POST' &&
|
||||
response.headers.location) {
|
||||
if (options._redirectsFollowed >= options.maxRedirects) {
|
||||
options.emit('error', new Error("Exceeded maxRedirects. Probably stuck in a redirect loop."))
|
||||
return
|
||||
}
|
||||
options._redirectsFollowed += 1
|
||||
|
||||
if (!isUrl.test(response.headers.location)) {
|
||||
response.headers.location = url.resolve(options.uri.href, response.headers.location)
|
||||
}
|
||||
options.uri = response.headers.location
|
||||
delete options.req
|
||||
delete options.agent
|
||||
delete options._started
|
||||
if (options.headers) {
|
||||
delete options.headers.host
|
||||
}
|
||||
request(options, options.callback)
|
||||
return // Ignore the rest of the response
|
||||
} else {
|
||||
options._redirectsFollowed = 0
|
||||
// Be a good stream and emit end when the response is finished.
|
||||
// Hack to emit end on close because of a core bug that never fires end
|
||||
response.on('close', function () {
|
||||
if (!options._ended) options.response.emit('end')
|
||||
})
|
||||
|
||||
if (options.encoding) {
|
||||
if (options.dests.length !== 0) {
|
||||
console.error("Ingoring encoding parameter as this stream is being piped to another stream which makes the encoding option invalid.")
|
||||
} else {
|
||||
response.setEncoding(options.encoding)
|
||||
}
|
||||
}
|
||||
|
||||
options.dests.forEach(function (dest) {
|
||||
if (dest.headers) {
|
||||
dest.headers['content-type'] = response.headers['content-type']
|
||||
if (response.headers['content-length']) {
|
||||
dest.headers['content-length'] = response.headers['content-length']
|
||||
}
|
||||
}
|
||||
if (dest.setHeader) {
|
||||
for (i in response.headers) {
|
||||
dest.setHeader(i, response.headers[i])
|
||||
}
|
||||
dest.statusCode = response.statusCode
|
||||
}
|
||||
if (options.pipefilter) options.pipefilter(response, dest)
|
||||
})
|
||||
|
||||
response.on("data", function (chunk) {options.emit("data", chunk)})
|
||||
response.on("end", function (chunk) {
|
||||
options._ended = true
|
||||
options.emit("end", chunk)
|
||||
})
|
||||
response.on("close", function () {options.emit("close")})
|
||||
|
||||
if (options.onResponse) {
|
||||
options.onResponse(null, response)
|
||||
}
|
||||
if (options.callback) {
|
||||
var buffer = ''
|
||||
options.on("data", function (chunk) {
|
||||
buffer += chunk
|
||||
})
|
||||
options.on("end", function () {
|
||||
response.body = buffer
|
||||
if (options.json) {
|
||||
try {
|
||||
response.body = JSON.parse(response.body)
|
||||
} catch (e) {}
|
||||
}
|
||||
options.callback(null, response, response.body)
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (options.timeout) {
|
||||
options.timeoutTimer = setTimeout(function() {
|
||||
options.req.abort()
|
||||
var e = new Error("ETIMEDOUT")
|
||||
e.code = "ETIMEDOUT"
|
||||
options.emit("error", e)
|
||||
}, options.timeout)
|
||||
}
|
||||
|
||||
options.req.on('error', clientErrorHandler)
|
||||
}
|
||||
|
||||
options.once('pipe', function (src) {
|
||||
if (options.ntick) throw new Error("You cannot pipe to this stream after the first nextTick() after creation of the request stream.")
|
||||
options.src = src
|
||||
if (isReadStream(src)) {
|
||||
if (!options.headers['content-type'] && !options.headers['Content-Type'])
|
||||
options.headers['content-type'] = mimetypes.lookup(src.path.slice(src.path.lastIndexOf('.')+1))
|
||||
} else {
|
||||
if (src.headers) {
|
||||
for (i in src.headers) {
|
||||
if (!options.headers[i]) {
|
||||
options.headers[i] = src.headers[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
if (src.method && !options.method) {
|
||||
options.method = src.method
|
||||
}
|
||||
}
|
||||
|
||||
options.on('pipe', function () {
|
||||
console.error("You have already piped to this stream. Pipeing twice is likely to break the request.")
|
||||
})
|
||||
})
|
||||
|
||||
process.nextTick(function () {
|
||||
if (options.body) {
|
||||
options.write(options.body)
|
||||
options.end()
|
||||
} else if (options.requestBodyStream) {
|
||||
console.warn("options.requestBodyStream is deprecated, please pass the request object to stream.pipe.")
|
||||
options.requestBodyStream.pipe(options)
|
||||
} else if (!options.src) {
|
||||
options.end()
|
||||
}
|
||||
options.ntick = true
|
||||
})
|
||||
}
|
||||
Request.prototype.pipe = function (dest) {
|
||||
if (this.response) throw new Error("You cannot pipe after the response event.")
|
||||
this.dests.push(dest)
|
||||
stream.Stream.prototype.pipe.call(this, dest)
|
||||
return dest
|
||||
}
|
||||
Request.prototype.write = function () {
|
||||
if (!this._started) this.start()
|
||||
if (!this.req) throw new Error("This request has been piped before http.request() was called.")
|
||||
this.req.write.apply(this.req, arguments)
|
||||
}
|
||||
Request.prototype.end = function () {
|
||||
if (!this._started) this.start()
|
||||
if (!this.req) throw new Error("This request has been piped before http.request() was called.")
|
||||
this.req.end.apply(this.req, arguments)
|
||||
}
|
||||
Request.prototype.pause = function () {
|
||||
if (!this.response) throw new Error("This request has been piped before http.request() was called.")
|
||||
this.response.pause.apply(this.response, arguments)
|
||||
}
|
||||
Request.prototype.resume = function () {
|
||||
if (!this.response) throw new Error("This request has been piped before http.request() was called.")
|
||||
this.response.resume.apply(this.response, arguments)
|
||||
}
|
||||
|
||||
function request (options, callback) {
|
||||
if (typeof options === 'string') options = {uri:options}
|
||||
if (callback) options.callback = callback
|
||||
var r = new Request(options)
|
||||
r.request()
|
||||
return r
|
||||
}
|
||||
|
||||
module.exports = request
|
||||
|
||||
request.defaults = function (options) {
|
||||
var def = function (method) {
|
||||
var d = function (opts, callback) {
|
||||
if (typeof opts === 'string') opts = {uri:opts}
|
||||
for (i in options) {
|
||||
if (opts[i] === undefined) opts[i] = options[i]
|
||||
}
|
||||
return method(opts, callback)
|
||||
}
|
||||
return d
|
||||
}
|
||||
de = def(request)
|
||||
de.get = def(request.get)
|
||||
de.post = def(request.post)
|
||||
de.put = def(request.put)
|
||||
de.head = def(request.head)
|
||||
de.del = def(request.del)
|
||||
return de
|
||||
}
|
||||
|
||||
request.get = request
|
||||
request.post = function (options, callback) {
|
||||
if (typeof options === 'string') options = {uri:options}
|
||||
options.method = 'POST'
|
||||
return request(options, callback)
|
||||
}
|
||||
request.put = function (options, callback) {
|
||||
if (typeof options === 'string') options = {uri:options}
|
||||
options.method = 'PUT'
|
||||
return request(options, callback)
|
||||
}
|
||||
request.head = function (options, callback) {
|
||||
if (typeof options === 'string') options = {uri:options}
|
||||
options.method = 'HEAD'
|
||||
if (options.body || options.requestBodyStream || options.json || options.multipart) {
|
||||
throw new Error("HTTP HEAD requests MUST NOT include a request body.")
|
||||
}
|
||||
return request(options, callback)
|
||||
}
|
||||
request.del = function (options, callback) {
|
||||
if (typeof options === 'string') options = {uri:options}
|
||||
options.method = 'DELETE'
|
||||
return request(options, callback)
|
||||
}
|
|
@ -1,146 +0,0 @@
|
|||
// from http://github.com/felixge/node-paperboy
|
||||
exports.types = {
|
||||
"aiff":"audio/x-aiff",
|
||||
"arj":"application/x-arj-compressed",
|
||||
"asf":"video/x-ms-asf",
|
||||
"asx":"video/x-ms-asx",
|
||||
"au":"audio/ulaw",
|
||||
"avi":"video/x-msvideo",
|
||||
"bcpio":"application/x-bcpio",
|
||||
"ccad":"application/clariscad",
|
||||
"cod":"application/vnd.rim.cod",
|
||||
"com":"application/x-msdos-program",
|
||||
"cpio":"application/x-cpio",
|
||||
"cpt":"application/mac-compactpro",
|
||||
"csh":"application/x-csh",
|
||||
"css":"text/css",
|
||||
"deb":"application/x-debian-package",
|
||||
"dl":"video/dl",
|
||||
"doc":"application/msword",
|
||||
"drw":"application/drafting",
|
||||
"dvi":"application/x-dvi",
|
||||
"dwg":"application/acad",
|
||||
"dxf":"application/dxf",
|
||||
"dxr":"application/x-director",
|
||||
"etx":"text/x-setext",
|
||||
"ez":"application/andrew-inset",
|
||||
"fli":"video/x-fli",
|
||||
"flv":"video/x-flv",
|
||||
"gif":"image/gif",
|
||||
"gl":"video/gl",
|
||||
"gtar":"application/x-gtar",
|
||||
"gz":"application/x-gzip",
|
||||
"hdf":"application/x-hdf",
|
||||
"hqx":"application/mac-binhex40",
|
||||
"html":"text/html",
|
||||
"ice":"x-conference/x-cooltalk",
|
||||
"ico":"image/x-icon",
|
||||
"ief":"image/ief",
|
||||
"igs":"model/iges",
|
||||
"ips":"application/x-ipscript",
|
||||
"ipx":"application/x-ipix",
|
||||
"jad":"text/vnd.sun.j2me.app-descriptor",
|
||||
"jar":"application/java-archive",
|
||||
"jpeg":"image/jpeg",
|
||||
"jpg":"image/jpeg",
|
||||
"js":"text/javascript",
|
||||
"json":"application/json",
|
||||
"latex":"application/x-latex",
|
||||
"lsp":"application/x-lisp",
|
||||
"lzh":"application/octet-stream",
|
||||
"m":"text/plain",
|
||||
"m3u":"audio/x-mpegurl",
|
||||
"man":"application/x-troff-man",
|
||||
"me":"application/x-troff-me",
|
||||
"midi":"audio/midi",
|
||||
"mif":"application/x-mif",
|
||||
"mime":"www/mime",
|
||||
"movie":"video/x-sgi-movie",
|
||||
"mustache":"text/plain",
|
||||
"mp4":"video/mp4",
|
||||
"mpg":"video/mpeg",
|
||||
"mpga":"audio/mpeg",
|
||||
"ms":"application/x-troff-ms",
|
||||
"nc":"application/x-netcdf",
|
||||
"oda":"application/oda",
|
||||
"ogm":"application/ogg",
|
||||
"pbm":"image/x-portable-bitmap",
|
||||
"pdf":"application/pdf",
|
||||
"pgm":"image/x-portable-graymap",
|
||||
"pgn":"application/x-chess-pgn",
|
||||
"pgp":"application/pgp",
|
||||
"pm":"application/x-perl",
|
||||
"png":"image/png",
|
||||
"pnm":"image/x-portable-anymap",
|
||||
"ppm":"image/x-portable-pixmap",
|
||||
"ppz":"application/vnd.ms-powerpoint",
|
||||
"pre":"application/x-freelance",
|
||||
"prt":"application/pro_eng",
|
||||
"ps":"application/postscript",
|
||||
"qt":"video/quicktime",
|
||||
"ra":"audio/x-realaudio",
|
||||
"rar":"application/x-rar-compressed",
|
||||
"ras":"image/x-cmu-raster",
|
||||
"rgb":"image/x-rgb",
|
||||
"rm":"audio/x-pn-realaudio",
|
||||
"rpm":"audio/x-pn-realaudio-plugin",
|
||||
"rtf":"text/rtf",
|
||||
"rtx":"text/richtext",
|
||||
"scm":"application/x-lotusscreencam",
|
||||
"set":"application/set",
|
||||
"sgml":"text/sgml",
|
||||
"sh":"application/x-sh",
|
||||
"shar":"application/x-shar",
|
||||
"silo":"model/mesh",
|
||||
"sit":"application/x-stuffit",
|
||||
"skt":"application/x-koan",
|
||||
"smil":"application/smil",
|
||||
"snd":"audio/basic",
|
||||
"sol":"application/solids",
|
||||
"spl":"application/x-futuresplash",
|
||||
"src":"application/x-wais-source",
|
||||
"stl":"application/SLA",
|
||||
"stp":"application/STEP",
|
||||
"sv4cpio":"application/x-sv4cpio",
|
||||
"sv4crc":"application/x-sv4crc",
|
||||
"svg":"image/svg+xml",
|
||||
"swf":"application/x-shockwave-flash",
|
||||
"tar":"application/x-tar",
|
||||
"tcl":"application/x-tcl",
|
||||
"tex":"application/x-tex",
|
||||
"texinfo":"application/x-texinfo",
|
||||
"tgz":"application/x-tar-gz",
|
||||
"tiff":"image/tiff",
|
||||
"tr":"application/x-troff",
|
||||
"tsi":"audio/TSP-audio",
|
||||
"tsp":"application/dsptype",
|
||||
"tsv":"text/tab-separated-values",
|
||||
"unv":"application/i-deas",
|
||||
"ustar":"application/x-ustar",
|
||||
"vcd":"application/x-cdlink",
|
||||
"vda":"application/vda",
|
||||
"vivo":"video/vnd.vivo",
|
||||
"vrm":"x-world/x-vrml",
|
||||
"wav":"audio/x-wav",
|
||||
"wax":"audio/x-ms-wax",
|
||||
"wma":"audio/x-ms-wma",
|
||||
"wmv":"video/x-ms-wmv",
|
||||
"wmx":"video/x-ms-wmx",
|
||||
"wrl":"model/vrml",
|
||||
"wvx":"video/x-ms-wvx",
|
||||
"xbm":"image/x-xbitmap",
|
||||
"xlw":"application/vnd.ms-excel",
|
||||
"xml":"text/xml",
|
||||
"xpm":"image/x-xpixmap",
|
||||
"xwd":"image/x-xwindowdump",
|
||||
"xyz":"chemical/x-pdb",
|
||||
"zip":"application/zip",
|
||||
};
|
||||
|
||||
exports.lookup = function(ext, defaultType) {
|
||||
defaultType = defaultType || 'application/octet-stream';
|
||||
|
||||
return (ext in exports.types)
|
||||
? exports.types[ext]
|
||||
: defaultType;
|
||||
};
|
|
@ -1,14 +0,0 @@
|
|||
{ "name" : "request"
|
||||
, "description" : "Simplified HTTP request client."
|
||||
, "tags" : ["http", "simple", "util", "utility"]
|
||||
, "version" : "2.1.1"
|
||||
, "author" : "Mikeal Rogers <mikeal.rogers@gmail.com>"
|
||||
, "repository" :
|
||||
{ "type" : "git"
|
||||
, "url" : "http://github.com/mikeal/request.git"
|
||||
}
|
||||
, "bugs" :
|
||||
{ "web" : "http://github.com/mikeal/request/issues" }
|
||||
, "engines" : ["node >= 0.3.6"]
|
||||
, "main" : "./main"
|
||||
}
|
Binary file not shown.
Before Width: | Height: | Size: 38 KiB |
|
@ -1,46 +0,0 @@
|
|||
var http = require('http')
|
||||
, events = require('events')
|
||||
, stream = require('stream')
|
||||
, assert = require('assert')
|
||||
;
|
||||
|
||||
exports.createServer = function (port) {
|
||||
port = port || 6767
|
||||
var s = http.createServer(function (req, resp) {
|
||||
s.emit(req.url, req, resp);
|
||||
})
|
||||
s.listen(port)
|
||||
s.url = 'http://localhost:'+port
|
||||
return s;
|
||||
}
|
||||
|
||||
exports.createPostStream = function (text) {
|
||||
var postStream = new stream.Stream();
|
||||
postStream.writeable = true;
|
||||
postStream.readable = true;
|
||||
setTimeout(function () {postStream.emit('data', new Buffer(text)); postStream.emit('end')}, 0);
|
||||
return postStream;
|
||||
}
|
||||
exports.createPostValidator = function (text) {
|
||||
var l = function (req, resp) {
|
||||
var r = '';
|
||||
req.on('data', function (chunk) {r += chunk})
|
||||
req.on('end', function () {
|
||||
if (r !== text) console.log(r, text);
|
||||
assert.equal(r, text)
|
||||
resp.writeHead(200, {'content-type':'text/plain'})
|
||||
resp.write('OK')
|
||||
resp.end()
|
||||
})
|
||||
}
|
||||
return l;
|
||||
}
|
||||
exports.createGetResponse = function (text, contentType) {
|
||||
var l = function (req, resp) {
|
||||
contentType = contentType || 'text/plain'
|
||||
resp.writeHead(200, {'content-type':contentType})
|
||||
resp.write(text)
|
||||
resp.end()
|
||||
}
|
||||
return l;
|
||||
}
|
|
@ -1,78 +0,0 @@
|
|||
var server = require('./server')
|
||||
, events = require('events')
|
||||
, stream = require('stream')
|
||||
, assert = require('assert')
|
||||
, request = require('../main.js')
|
||||
;
|
||||
|
||||
var s = server.createServer();
|
||||
|
||||
var tests =
|
||||
{ testGet :
|
||||
{ resp : server.createGetResponse("TESTING!")
|
||||
, expectBody: "TESTING!"
|
||||
}
|
||||
, testGetJSON :
|
||||
{ resp : server.createGetResponse('{"test":true}', 'application/json')
|
||||
, json : true
|
||||
, expectBody: {"test":true}
|
||||
}
|
||||
, testPutString :
|
||||
{ resp : server.createPostValidator("PUTTINGDATA")
|
||||
, method : "PUT"
|
||||
, body : "PUTTINGDATA"
|
||||
}
|
||||
, testPutBuffer :
|
||||
{ resp : server.createPostValidator("PUTTINGDATA")
|
||||
, method : "PUT"
|
||||
, body : new Buffer("PUTTINGDATA")
|
||||
}
|
||||
, testPutStream :
|
||||
{ resp : server.createPostValidator("PUTTINGDATA")
|
||||
, method : "PUT"
|
||||
, requestBodyStream : server.createPostStream("PUTTINGDATA")
|
||||
}
|
||||
, testPutJSON :
|
||||
{ resp : server.createPostValidator(JSON.stringify({foo: 'bar'}))
|
||||
, method: "PUT"
|
||||
, json: {foo: 'bar'}
|
||||
}
|
||||
, testPutMultipart :
|
||||
{ resp: server.createPostValidator(
|
||||
'--frontier\r\n' +
|
||||
'content-type: text/html\r\n' +
|
||||
'\r\n' +
|
||||
'<html><body>Oh hi.</body></html>' +
|
||||
'\r\n--frontier\r\n\r\n' +
|
||||
'Oh hi.' +
|
||||
'\r\n--frontier--'
|
||||
)
|
||||
, method: "PUT"
|
||||
, multipart:
|
||||
[ {'content-type': 'text/html', 'body': '<html><body>Oh hi.</body></html>'}
|
||||
, {'body': 'Oh hi.'}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
var counter = 0;
|
||||
|
||||
for (i in tests) {
|
||||
(function () {
|
||||
var test = tests[i];
|
||||
s.on('/'+i, test.resp);
|
||||
test.uri = s.url + '/' + i;
|
||||
request(test, function (err, resp, body) {
|
||||
if (err) throw err;
|
||||
if (test.expectBody) {
|
||||
assert.deepEqual(test.expectBody, body)
|
||||
}
|
||||
counter = counter - 1;
|
||||
if (counter === 0) {
|
||||
console.log(Object.keys(tests).length+" tests passed.")
|
||||
s.close();
|
||||
}
|
||||
})
|
||||
counter++;
|
||||
})()
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
var server = require('./server')
|
||||
, events = require('events')
|
||||
, assert = require('assert')
|
||||
, request = require('../main.js')
|
||||
;
|
||||
|
||||
var local = 'http://localhost:8888/asdf'
|
||||
|
||||
try {
|
||||
request({uri:local, body:{}})
|
||||
assert.fail("Should have throw")
|
||||
} catch(e) {
|
||||
assert.equal(e.message, 'Argument error, options.body.')
|
||||
}
|
||||
|
||||
try {
|
||||
request({uri:local, multipart: 'foo'})
|
||||
assert.fail("Should have throw")
|
||||
} catch(e) {
|
||||
assert.equal(e.message, 'Argument error, options.multipart.')
|
||||
}
|
||||
|
||||
try {
|
||||
request({uri:local, multipart: [{}]})
|
||||
assert.fail("Should have throw")
|
||||
} catch(e) {
|
||||
assert.equal(e.message, 'Body attribute missing in multipart.')
|
||||
}
|
||||
|
||||
console.log("All tests passed.")
|
|
@ -1,136 +0,0 @@
|
|||
var server = require('./server')
|
||||
, events = require('events')
|
||||
, stream = require('stream')
|
||||
, assert = require('assert')
|
||||
, fs = require('fs')
|
||||
, request = require('../main.js')
|
||||
, path = require('path')
|
||||
;
|
||||
|
||||
var s = server.createServer(3453);
|
||||
|
||||
passes = 0;
|
||||
|
||||
var check = function () {
|
||||
if (passes === 7) {
|
||||
console.log('All tests passed.')
|
||||
setTimeout(function () {
|
||||
process.exit();
|
||||
}, 500)
|
||||
}
|
||||
if (passes > 7) throw new Error('Need to update for more failures')
|
||||
}
|
||||
|
||||
// Test pipeing to a request object
|
||||
s.once('/push', server.createPostValidator("mydata"));
|
||||
|
||||
var mydata = new stream.Stream();
|
||||
mydata.readable = true
|
||||
|
||||
var r1 = request.put({url:'http://localhost:3453/push'}, function () {
|
||||
passes += 1;
|
||||
check();
|
||||
})
|
||||
mydata.pipe(r1)
|
||||
|
||||
mydata.emit('data', 'mydata');
|
||||
mydata.emit('end');
|
||||
|
||||
|
||||
// Test pipeing from a request object.
|
||||
s.once('/pull', server.createGetResponse("mypulldata"));
|
||||
|
||||
var mypulldata = new stream.Stream();
|
||||
mypulldata.writable = true
|
||||
|
||||
request({url:'http://localhost:3453/pull'}).pipe(mypulldata)
|
||||
|
||||
var d = '';
|
||||
|
||||
mypulldata.write = function (chunk) {
|
||||
d += chunk;
|
||||
}
|
||||
mypulldata.end = function () {
|
||||
assert.equal(d, 'mypulldata');
|
||||
passes += 1
|
||||
check();
|
||||
};
|
||||
|
||||
|
||||
s.on('/cat', function (req, resp) {
|
||||
if (req.method === "GET") {
|
||||
resp.writeHead(200, {'content-type':'text/plain-test', 'content-length':4});
|
||||
resp.end('asdf')
|
||||
} else if (req.method === "PUT") {
|
||||
assert.equal(req.headers['content-type'], 'text/plain-test');
|
||||
assert.equal(req.headers['content-length'], 4)
|
||||
var validate = '';
|
||||
|
||||
req.on('data', function (chunk) {validate += chunk})
|
||||
req.on('end', function () {
|
||||
resp.writeHead(201);
|
||||
resp.end();
|
||||
assert.equal(validate, 'asdf');
|
||||
passes += 1;
|
||||
check();
|
||||
})
|
||||
}
|
||||
})
|
||||
s.on('/pushjs', function (req, resp) {
|
||||
if (req.method === "PUT") {
|
||||
assert.equal(req.headers['content-type'], 'text/javascript');
|
||||
passes += 1;
|
||||
check();
|
||||
}
|
||||
})
|
||||
s.on('/catresp', function (req, resp) {
|
||||
request.get('http://localhost:3453/cat').pipe(resp)
|
||||
})
|
||||
s.on('/doodle', function (req, resp) {
|
||||
if (req.headers['x-oneline-proxy']) {
|
||||
resp.setHeader('x-oneline-proxy', 'yup')
|
||||
}
|
||||
resp.writeHead('200', {'content-type':'image/png'})
|
||||
fs.createReadStream(path.join(__dirname, 'googledoodle.png')).pipe(resp)
|
||||
})
|
||||
s.on('/onelineproxy', function (req, resp) {
|
||||
var x = request('http://localhost:3453/doodle')
|
||||
req.pipe(x)
|
||||
x.pipe(resp)
|
||||
})
|
||||
|
||||
|
||||
fs.createReadStream(__filename).pipe(request.put('http://localhost:3453/pushjs'))
|
||||
|
||||
request.get('http://localhost:3453/cat').pipe(request.put('http://localhost:3453/cat'))
|
||||
|
||||
request.get('http://localhost:3453/catresp', function (e, resp, body) {
|
||||
assert.equal(resp.headers['content-type'], 'text/plain-test');
|
||||
assert.equal(resp.headers['content-length'], 4)
|
||||
passes += 1
|
||||
check();
|
||||
})
|
||||
|
||||
var doodleWrite = fs.createWriteStream(path.join(__dirname, 'test.png'))
|
||||
|
||||
request.get('http://localhost:3453/doodle').pipe(doodleWrite)
|
||||
|
||||
doodleWrite.on('close', function () {
|
||||
assert.deepEqual(fs.readFileSync(path.join(__dirname, 'googledoodle.png')), fs.readFileSync(path.join(__dirname, 'test.png')))
|
||||
passes += 1
|
||||
check()
|
||||
})
|
||||
|
||||
process.on('exit', function () {
|
||||
fs.unlinkSync(path.join(__dirname, 'test.png'))
|
||||
})
|
||||
|
||||
request.get({uri:'http://localhost:3453/onelineproxy', headers:{'x-oneline-proxy':'nope'}}, function (err, resp, body) {
|
||||
assert.equal(resp.headers['x-oneline-proxy'], 'yup')
|
||||
passes += 1
|
||||
check()
|
||||
})
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
// var http = require('http')
|
||||
// , request = require('../main')
|
||||
// ;
|
||||
//
|
||||
// var s = http.createServer(function (req, resp) {
|
||||
// resp.statusCode = 412
|
||||
// resp.end('hello')
|
||||
// })
|
||||
// s.listen(8000)
|
||||
//
|
||||
// request.put("http://localhost:8000", function (e,_,b){
|
||||
// console.log(e,b)
|
||||
// s.close()
|
||||
// })
|
||||
|
||||
require('../main').put("http://testnano.iriscouch.com:80/test", function (e,_,b){
|
||||
console.log(e,b)
|
||||
})
|
|
@ -1,7 +0,0 @@
|
|||
var request = require("../main");
|
||||
|
||||
request({'uri': 'https://encrypted.google.com/'}, function (err, res, body) {
|
||||
// util.debug(err);
|
||||
// console.dir(res)
|
||||
console.log('asdf')
|
||||
});
|
|
@ -1,84 +0,0 @@
|
|||
var server = require('./server')
|
||||
, events = require('events')
|
||||
, stream = require('stream')
|
||||
, assert = require('assert')
|
||||
, request = require('../main.js')
|
||||
;
|
||||
|
||||
var s = server.createServer();
|
||||
var expectedBody = "waited";
|
||||
var remainingTests = 5;
|
||||
|
||||
// Request that waits for 200ms
|
||||
s.on('/timeout', function (req, resp) {
|
||||
setTimeout(function(){
|
||||
resp.writeHead(200, {'content-type':'text/plain'})
|
||||
resp.write(expectedBody)
|
||||
resp.end()
|
||||
}, 200);
|
||||
});
|
||||
|
||||
// Scenario that should timeout
|
||||
var shouldTimeout = {
|
||||
url: s.url + "/timeout",
|
||||
timeout:100
|
||||
}
|
||||
|
||||
request(shouldTimeout, function (err, resp, body) {
|
||||
assert.equal(err, "ETIMEDOUT");
|
||||
checkDone();
|
||||
})
|
||||
|
||||
|
||||
// Scenario that shouldn't timeout
|
||||
var shouldntTimeout = {
|
||||
url: s.url + "/timeout",
|
||||
timeout:300
|
||||
}
|
||||
|
||||
request(shouldntTimeout, function (err, resp, body) {
|
||||
assert.equal(!err);
|
||||
assert.equal(expectedBody, body)
|
||||
checkDone();
|
||||
})
|
||||
|
||||
// Scenario with no timeout set, so shouldn't timeout
|
||||
var noTimeout = {
|
||||
url: s.url + "/timeout"
|
||||
}
|
||||
|
||||
request(noTimeout, function (err, resp, body) {
|
||||
assert.equal(!err);
|
||||
assert.equal(expectedBody, body)
|
||||
checkDone();
|
||||
})
|
||||
|
||||
// Scenario with a negative timeout value, should be treated a zero or the minimum delay
|
||||
var negativeTimeout = {
|
||||
url: s.url + "/timeout",
|
||||
timeout:-1000
|
||||
}
|
||||
|
||||
request(negativeTimeout, function (err, resp, body) {
|
||||
assert.equal(err, "ETIMEDOUT");
|
||||
checkDone();
|
||||
})
|
||||
|
||||
// Scenario with a float timeout value, should be rounded by setTimeout anyway
|
||||
var floatTimeout = {
|
||||
url: s.url + "/timeout",
|
||||
timeout: 100.76
|
||||
}
|
||||
|
||||
request(floatTimeout, function (err, resp, body) {
|
||||
assert.equal(err, "ETIMEDOUT");
|
||||
checkDone();
|
||||
})
|
||||
|
||||
function checkDone() {
|
||||
if(--remainingTests == 0) {
|
||||
s.close();
|
||||
console.log("All tests passed.");
|
||||
}
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
lib/*_.js
|
|
@ -1,363 +0,0 @@
|
|||
|
||||
# streamline/lib/compiler/command
|
||||
|
||||
Streamline commmand line analyzer / dispatcher
|
||||
|
||||
* `command.run()`
|
||||
runs `node-streamline` command line analyzer / dispatcher
|
||||
|
||||
# streamline/lib/compiler/compile
|
||||
|
||||
Streamline compiler and file loader
|
||||
|
||||
* `script = compile.loadFile(_, path, options)`
|
||||
Loads Javascript file and transforms it if necessary.
|
||||
Returns the transformed source.
|
||||
If `path` is `foo_.js`, the source is transformed and the result
|
||||
is *not* saved to disk.
|
||||
If `path` is `foo.js` and if a `foo_.js` file exists,
|
||||
`foo_.js` is transformed if necessary and saved as `foo.js`.
|
||||
If `path` is `foo.js` and `foo_.js` does not exist, the contents
|
||||
of `foo.js` is returned.
|
||||
`options` is a set of options passed to the transformation engine.
|
||||
If `options.force` is set, `foo_.js` is transformed even if
|
||||
`foo.js` is more recent.
|
||||
* `script = compile.loadFileSync(path, options)`
|
||||
Synchronous version of `compile.loadFile`.
|
||||
Used by `require` logic.
|
||||
* `compile.compile(_, paths, options)`
|
||||
Compiles streamline source files in `paths`.
|
||||
Generates a `foo.js` file for each `foo_.js` file found in `paths`.
|
||||
`paths` may be a list of files or a list of directories which
|
||||
will be traversed recursively.
|
||||
`options` is a set of options for the `transform` operation.
|
||||
|
||||
# streamline/lib/compiler/register
|
||||
|
||||
Streamline `require` handler registration
|
||||
|
||||
* `register.register(options)`
|
||||
Registers `require` handlers for streamline.
|
||||
`options` is a set of default options passed to the `transform` function.
|
||||
|
||||
# streamline/lib/compiler/transform
|
||||
|
||||
Streamline's transformation engine
|
||||
|
||||
* `transformed = transform.transform(source, options)`
|
||||
Transforms streamline source.
|
||||
The following `options` may be specified:
|
||||
* `tryCatch` controls exception handling
|
||||
* `lines` controls line mapping
|
||||
* `callback` alternative identifier if `_` is already used.
|
||||
* `noHelpers` disables generation of helper functions (`__cb`, etc.)
|
||||
|
||||
# streamline/lib/require/client/require
|
||||
|
||||
Client-side require script
|
||||
|
||||
* `id = module.id`
|
||||
the `id` of the current module.
|
||||
* `module = require(id)`
|
||||
_requires_ a module synchronously.
|
||||
`id` _must_ be a string literal.
|
||||
* `module = require.async(id, _)`
|
||||
_requires_ a module asynchronously.
|
||||
`id` may be a variable or an expression.
|
||||
* `main = require.main`
|
||||
return the main module
|
||||
* `require.main(id)`
|
||||
loads main module from HTML page.
|
||||
|
||||
# streamline/lib/require/server/require
|
||||
|
||||
Server-side require handler
|
||||
|
||||
Handles require requests coming from the client.
|
||||
|
||||
* `dispatcher = require.dispatcher(options)`
|
||||
returns an HTTP request dispatcher that responds to requests
|
||||
issued by the client-side `require` script.
|
||||
The dispatcher is called as `dispatcher(_, request, response)`
|
||||
|
||||
# streamline/lib/streams/server/streams
|
||||
|
||||
Server Streams module
|
||||
|
||||
The `streams` module contains _pull mode_ wrappers around node streams.
|
||||
|
||||
These wrappers implement a _pull style_ API.
|
||||
Instead of having the stream _push_ the data to its consumer by emitting `data` and `end` events,
|
||||
these wrappers let the consumer _pull_ the data from the stream by calling asynchronous `read` methods.
|
||||
|
||||
For a bit more background on this design,
|
||||
you can read [this blog post](http://bjouhier.wordpress.com/2011/04/25/asynchronous-episode-3-adventures-in-event-land/)
|
||||
|
||||
For a simple example of this API in action,
|
||||
see the [google client example](./examples/googleClient_.js)
|
||||
|
||||
## Emitter
|
||||
|
||||
Base wrapper for all objects that emit an `end` or `close` event.
|
||||
All stream wrappers derive from this wrapper.
|
||||
|
||||
* `wrapper = new streams.Emitter(stream)`
|
||||
creates a wrapper.
|
||||
* `emitter = wrapper.emitter`
|
||||
returns the underlying emitter. The emitter stream can be used to attach additional observers.
|
||||
* `emitter = wrapper.unwrap()`
|
||||
unwraps and returns the underlying emitter.
|
||||
The wrapper should not be used after this call.
|
||||
|
||||
## ReadableStream
|
||||
|
||||
All readable stream wrappers derive from this wrapper.
|
||||
|
||||
* `stream = new streams.ReadableStream(stream, [options])`
|
||||
creates a readable stream wrapper.
|
||||
* `stream.setEncoding(enc)`
|
||||
sets the encoding.
|
||||
returns `this` for chaining.
|
||||
* `data = stream.read(_, [len])`
|
||||
reads asynchronously from the stream and returns a `string` or a `Buffer` depending on the encoding.
|
||||
If a `len` argument is passed, the `read` call returns when `len` characters or bytes
|
||||
(depending on encoding) have been read, or when the underlying stream has emitted its `end` event.
|
||||
Without `len`, the read calls returns the data chunks as they have been emitted by the underlying stream.
|
||||
Once the end of stream has been reached, the `read` call returns `null`.
|
||||
* `data = stream.readAll(_)`
|
||||
reads till the end of stream.
|
||||
Equivalent to `stream.read(_, -1)`.
|
||||
* `stream.unread(chunk)`
|
||||
pushes the chunk back to the stream.
|
||||
returns `this` for chaining.
|
||||
|
||||
## WritableStream
|
||||
|
||||
All writable stream wrappers derive from this wrapper.
|
||||
|
||||
* `stream = new streams.WritableStream(stream, [options])`
|
||||
creates a writable stream wrapper.
|
||||
* `stream.write(_, data, [enc])`
|
||||
Writes the data.
|
||||
This operation is asynchronous because it _drains_ the stream if necessary.
|
||||
If you have a lot of small write operations to perform and you don't want the overhead of draining at every step,
|
||||
you can write to the underlying stream with `stream.emitter.write(data)` most of the time
|
||||
and call `stream.write(_, data)` once in a while to drain.
|
||||
Returns `this` for chaining.
|
||||
* `stream.end()`
|
||||
signals the end of the send operation.
|
||||
Returns `this` for chaining.
|
||||
|
||||
## HttpServerRequest
|
||||
|
||||
This is a wrapper around node's `http.ServerRequest`:
|
||||
This stream is readable (see Readable Stream above).
|
||||
|
||||
* `request = new streams.HttpServerRequest(req, [options])`
|
||||
returns a wrapper around `req`, an `http.ServerRequest` object.
|
||||
The `options` parameter can be used to pass `lowMark` and `highMark` values.
|
||||
* `method = request.method`
|
||||
* `url = request.url`
|
||||
* `headers = request.headers`
|
||||
* `trailers = request.trailers`
|
||||
* `httpVersion = request.httpVersion`
|
||||
* `connection = request.connection`
|
||||
* `socket = request.socket`
|
||||
(same as `http.ServerRequest`)
|
||||
|
||||
## HttmServerResponse
|
||||
|
||||
This is a wrapper around node's `http.ServerResponse`.
|
||||
This stream is writable (see Writable Stream above).
|
||||
|
||||
* `response = new streams.HttpServerResponse(resp, [options])`
|
||||
returns a wrapper around `resp`, an `http.ServerResponse` object.
|
||||
* `response.writeContinue()`
|
||||
* `response.writeHead(head)`
|
||||
* `response.setHeader(name, value)`
|
||||
* `value = response.getHeader(head)`
|
||||
* `response.removeHeader(name)`
|
||||
* `response.addTrailers(trailers)`
|
||||
* `response.statusCode = value`
|
||||
(same as `http.ServerResponse`)
|
||||
|
||||
## HttpServer
|
||||
|
||||
This is a wrapper around node's `http.Server` object:
|
||||
|
||||
* `server = new streams.HttpServer(requestListener, [options])`
|
||||
creates the wrapper.
|
||||
`requestListener` is called as `requestListener(request, response, _)`
|
||||
where `request` and `response` are wrappers around `http.ServerRequest` and `http.ServerResponse`.
|
||||
* `server.listen(_, port, [host])`
|
||||
* `server.listen(_, path)`
|
||||
(same as `http.Server`)
|
||||
|
||||
## HttpClientResponse
|
||||
|
||||
This is a wrapper around node's `http.ClientResponse`
|
||||
|
||||
This stream is readable (see Readable Stream above).
|
||||
|
||||
* `response = request.response(_)` returns the response stream.
|
||||
* `status = response.statusCode`
|
||||
returns the HTTP status code.
|
||||
* `version = response.httpVersion`
|
||||
returns the HTTP version.
|
||||
* `headers = response.headers`
|
||||
returns the HTTP response headers.
|
||||
* `trailers = response.trailers`
|
||||
returns the HTTP response trailers.
|
||||
* `response.checkStatus(statuses)`
|
||||
throws an error if the status is not in the `statuses` array.
|
||||
If only one status is expected, it may be passed directly as an integer rather than as an array.
|
||||
Returns `this` for chaining.
|
||||
|
||||
## HttpClientRequest
|
||||
|
||||
This is a wrapper around node's `http.ClientRequest`.
|
||||
|
||||
This stream is writable (see Writable Stream above).
|
||||
|
||||
* `request = streams.httpRequest(options)`
|
||||
creates the wrapper.
|
||||
The options are the following:
|
||||
* `method`: the HTTP method, `'GET'` by default.
|
||||
* `headers`: the HTTP headers.
|
||||
* `url`: the requested URL (with query string if necessary).
|
||||
* `proxy.url`: the proxy URL.
|
||||
* `lowMark` and `highMark`: low and high water mark values for buffering (in bytes or characters depending
|
||||
on encoding).
|
||||
Note that these values are only hints as the data is received in chunks.
|
||||
* `response = request.response(_)`
|
||||
returns the response.
|
||||
* `request.abort()`
|
||||
aborts the request.
|
||||
|
||||
## NetStream
|
||||
|
||||
This is a wrapper around streams returned by TCP and socket clients:
|
||||
|
||||
These streams is both readable and writable (see Readable Stream and Writable Stream above).
|
||||
|
||||
* `stream = new streams.NetStream(stream, [options])`
|
||||
creates a network stream wrapper.
|
||||
|
||||
## TCP and Socket clients
|
||||
|
||||
These are wrappers around node's `net.createConnection`:
|
||||
|
||||
* `client = streams.tcpClient(port, host, [options])`
|
||||
returns a TCP connection client.
|
||||
* `client = streams.socketClient(path, [options])`
|
||||
returns a socket client.
|
||||
The `options` parameter of the constructor provide options for the stream (`lowMark` and `highMark`). If you want different options for `read` and `write` operations, you can specify them by creating `options.read` and `options.write` sub-objects inside `options`.
|
||||
* `stream = client.connect(_)`
|
||||
connects the client and returns a network stream.
|
||||
# streamline/lib/tools/docTool
|
||||
|
||||
Documentation tool
|
||||
|
||||
Usage:
|
||||
|
||||
node streamline/lib/tools/docTool [path]
|
||||
|
||||
Extracts documentation comments from `.js` files and generates `API.md` file
|
||||
under package root.
|
||||
|
||||
Top of source file must contain `/// !doc` marker to enable doc extraction.
|
||||
Documentation comments must start with `/// ` (with 1 trailing space).
|
||||
Extraction can be turned off with `/// !nodoc` and turned back on with `/// !doc`.
|
||||
|
||||
The tool can also be invoked programatically with:
|
||||
|
||||
* `doc = docTool.generate(_, path)`
|
||||
extracts documentation comments from file `path`
|
||||
|
||||
# streamline/lib/util/flows
|
||||
|
||||
Flows Module
|
||||
|
||||
The `streamline/lib/util/flows` module contains some handy utilities for streamline code
|
||||
|
||||
## Array utilities
|
||||
|
||||
The following functions are async equivalents of the ES5 Array methods (`forEach`, `map`, `filter`, ...)
|
||||
|
||||
* `flows.each(_, array, fn, [thisObj])`
|
||||
applies `fn` sequentially to the elements of `array`.
|
||||
`fn` is called as `fn(_, elt, i)`.
|
||||
* `result = flows.map(_, array, fn, [thisObj])`
|
||||
transforms `array` by applying `fn` to each element in turn.
|
||||
`fn` is called as `fn(_, elt, i)`.
|
||||
* `result = flows.filter(_, array, fn, [thisObj])`
|
||||
generates a new array that only contains the elements that satisfy the `fn` predicate.
|
||||
`fn` is called as `fn(_, elt)`.
|
||||
* `bool = flows.every(_, array, fn, [thisObj])`
|
||||
returns true if `fn` is true on every element (if `array` is empty too).
|
||||
`fn` is called as `fn(_, elt)`.
|
||||
* `bool = flows.some(_, array, fn, [thisObj])`
|
||||
returns true if `fn` is true for at least one element.
|
||||
`fn` is called as `fn(_, elt)`.
|
||||
* `result = flows.reduce(_, array, fn, val, [thisObj])`
|
||||
reduces by applying `fn` to each element.
|
||||
`fn` is called as `val = fn(_, val, elt, i, array)`.
|
||||
* `result = flows.reduceRight(_, array, fn, val, [thisObj])`
|
||||
reduces from end to start by applying `fn` to each element.
|
||||
`fn` is called as `val = fn(_, val, elt, i, array)`.
|
||||
|
||||
## Object utility
|
||||
|
||||
The following function can be used to iterate through object properties:
|
||||
|
||||
* `flows.eachKey(_, obj, fn)`
|
||||
calls `fn(_, key, obj[key])` for every `key` in `obj`.
|
||||
|
||||
## Workflow Utilities
|
||||
|
||||
* `fun = flows.funnel(max)`
|
||||
limits the number of concurrent executions of a given code block.
|
||||
|
||||
The `funnel` function is typically used with the following pattern:
|
||||
|
||||
// somewhere
|
||||
var myFunnel = flows.funnel(10); // create a funnel that only allows 10 concurrent executions.
|
||||
|
||||
// elsewhere
|
||||
myFunnel(_, function(_) { /* code with at most 10 concurrent executions */ });
|
||||
|
||||
The `diskUsage2.js` example demonstrates how these calls can be combined to control concurrent execution.
|
||||
|
||||
The `funnel` function can also be used to implement critical sections. Just set funnel's `max` parameter to 1.
|
||||
|
||||
* `results = flows.collect(_, futures)`
|
||||
collects the results of an array of futures
|
||||
|
||||
## Context propagation
|
||||
|
||||
Streamline also allows you to propagate a global context along a chain of calls and callbacks.
|
||||
This context can be used like TLS (Thread Local Storage) in a threaded environment.
|
||||
It allows you to have several active chains that each have their own global context.
|
||||
|
||||
This kind of context is very handy to store information that all calls should be able to access
|
||||
but that you don't want to pass explicitly via function parameters. The most obvious example is
|
||||
the `locale` that each request may set differently and that your low level libraries should
|
||||
be able to retrieve to format messages.
|
||||
|
||||
The `streamline.flows` module exposes two functions to manipulate the context:
|
||||
|
||||
* `oldCtx = flows.setContext(ctx)`
|
||||
sets the context (and returns the old context).
|
||||
* `ctx = flows.getContext()`
|
||||
returns the current context.
|
||||
|
||||
## Miscellaneous
|
||||
|
||||
* `flows.nextTick(_)`
|
||||
`nextTick` function for both browser and server.
|
||||
Aliased to `process.nextTick` on the server side.
|
||||
* `result = flows.apply(_, fn, thisObj, args, [index])`
|
||||
Helper to apply `Function.apply` to streamline functions.
|
||||
Equivalent to `result = fn.apply(thisObj, argsWith_)` where `argsWith_` is
|
||||
a modified argument list in which the callback has been inserted at `index`
|
||||
(at the end of the argument list if `index` is not specified).
|
|
@ -1,7 +0,0 @@
|
|||
# Authors ordered by first contribution.
|
||||
|
||||
Bruno Jouhier <bruno.jouhier@sage.com>
|
||||
Preston Guillory <pguillory@gmail.com>
|
||||
Will Conant <will.conant@gmail.com>
|
||||
Aseem Kishore <aseem.kishore@gmail.com>
|
||||
|
|
@ -1,155 +0,0 @@
|
|||
## streamline.js
|
||||
|
||||
`streamline.js` is a small tool to simplify asynchronous Javascript programming.
|
||||
|
||||
Instead of writing hairy code like:
|
||||
|
||||
```javascript
|
||||
function lineCount(path, callback) {
|
||||
fs.readFile(path, "utf8", function(err, data) {
|
||||
if (err) { callback(err); return; }
|
||||
callback(null, data.split('\n').length);
|
||||
});
|
||||
}
|
||||
```
|
||||
Streamline.js lets you write:
|
||||
|
||||
```javascript
|
||||
function lineCount(path, _) {
|
||||
return fs.readFile(path, "utf8", _).split('\n').length;
|
||||
}
|
||||
```
|
||||
You just have to follow a simple rule:
|
||||
|
||||
> Replace all callbacks by an underscore and write your code as if all functions were synchronous.
|
||||
|
||||
Streamline will transform the code and generate the callbacks for you!
|
||||
|
||||
And streamline is not limited to a subset of Javascript.
|
||||
You can use all the flow control features of Javascript in your asynchronous code: conditionals,
|
||||
loops, `try/catch/finally` blocks, anonymous functions, `this`, etc.
|
||||
|
||||
Streamline generates more or less the callbacks that you would write yourself. So you get the same level
|
||||
of performance as with hand-written callbacks.
|
||||
Also, the generated code is nicely indented, easy to read, and directly available to debuggers.
|
||||
|
||||
Streamline also provides _futures_, and comes with a small optional library of helper functions (see Goodies section below).
|
||||
|
||||
# On-line demo
|
||||
|
||||
You can test `streamline.js` directly with the [on-line demo](http://sage.github.com/streamlinejs/examples/streamlineMe/streamlineMe.html)
|
||||
|
||||
# Installation
|
||||
|
||||
The easiest way to install `streamline.js` is with NPM:
|
||||
|
||||
```sh
|
||||
npm install streamline -g
|
||||
```
|
||||
|
||||
The `-g` option installs it _globally_.
|
||||
You can also install it _locally_, without `-g` but then the `node-streamline` and `coffee-streamline`
|
||||
commands will not be in your default PATH.
|
||||
|
||||
Note: if you encounter a permission error when installing on UNIX systems, you should retry with `sudo`.
|
||||
|
||||
# Creating and running streamline modules
|
||||
|
||||
To create a module called `myModule`, put your _streamlined_ source in a file called `myModule_.js`.
|
||||
|
||||
Then you have several options:
|
||||
|
||||
1. You can _compile_ your module with `node-streamline -c`. This will create a file called `myModule.js` that you can directly run with the `node` command,
|
||||
or _require_ from a normal node program.
|
||||
2. You can run the module with `node-streamline myModule_` or require it as `require('myModule_')` from a program that you launch with `node-streamline`.
|
||||
If you choose this option, the `myModule.js` file will not be created.
|
||||
3. You can run the module with `node-streamline myModule` or require it as `require('myModule')` from a program that you launch with `node-streamline`.
|
||||
If you choose this option, you have to create an empty `myModule.js` file to initiate the process.
|
||||
4. You can load source and transform it _on the fly_ with the `transform` API.
|
||||
|
||||
Option 1 is ideal for production code, as your transformed module will be loaded standalone. The transformation engine will not be loaded.
|
||||
|
||||
Option 2 is your best option if you do not want to save the transformed code to disk.
|
||||
|
||||
Option 3 is ideal for the development phase if you do not have a build script.
|
||||
The files will only be recompiled if the source has changed (so you won't get the overhead every time you launch your program).
|
||||
The transformed source will be available on disk, and will be loaded by the debugger (because you require `myModule`, not `myModule_`).
|
||||
Also, this option makes the switch to production really easy: recompile the whole tree and run with `node` rather than with `node-streamline`.
|
||||
|
||||
Option 4 is reserved for advanced scenarios where the code is transformed on the fly.
|
||||
|
||||
There is an alternative to running your application with `node-streamline`:
|
||||
you can call `require('streamline')` from your main script and then run it with `node`.
|
||||
Modules that are required (directly or indirectly) by your main script will be transformed on demand.
|
||||
|
||||
Note: streamline can also transform vanilla Javascript files that don't use CommonJS modules and don't target node.
|
||||
So you can compile them (option 1) and load them directly in the browser from a `<script>` directive.
|
||||
|
||||
# Examples
|
||||
|
||||
The `examples/diskUsage` directory contains a simple example that traverses directories to compute disk usage.
|
||||
You can run as follows:
|
||||
|
||||
```sh
|
||||
node-streamline diskUsage_ (will not regenerate diskUsage.js)
|
||||
node-streamline diskUsage (will regenerate diskUsage.js if necessary)
|
||||
node diskUsage (assumes that diskUsage.js is there and up-to-date)
|
||||
```
|
||||
|
||||
# Interoperability with standard node.js code
|
||||
|
||||
You can call standard node functions from streamline code. For example the `fs.readFile` function:
|
||||
|
||||
```javascript
|
||||
function lineCount(path, _) {
|
||||
return fs.readFile(path, "utf8", _).split('\n').length;
|
||||
}
|
||||
```
|
||||
You can also call streamline functions as if they were standard node functions. For example:
|
||||
|
||||
```javascript
|
||||
lineCount("README.md", function(err, result) {
|
||||
if (err) return console.error("ERROR: " + err.message);
|
||||
console.log("README has " + result + " lines.");
|
||||
});
|
||||
```
|
||||
And you can mix streamline functions, classical callback based code and synchrononous functions in the same file.
|
||||
Streamline will only transform the functions that have the special `_` parameter. The other functions will end up unmodified in the output file (maybe slightly reformatted by the narcissus pretty printer though).
|
||||
|
||||
# Running in other environments
|
||||
|
||||
`streamline.js` generates vanilla Javascript code that may be run browser-side too.
|
||||
|
||||
You can also transform the code in the browser with the `transform` API. See the `test/*.js` unit test files for examples.
|
||||
|
||||
You can also use `streamline.js` with CoffeeScript. For example:
|
||||
|
||||
```sh
|
||||
coffee-streamline diskUsage_.coffee
|
||||
```
|
||||
|
||||
See the [Compilers wiki page](https://github.com/Sage/streamlinejs/wiki/Compilers) for details.
|
||||
|
||||
# Goodies
|
||||
|
||||
The functions generated by streamline return a _future_ if you call them without a callback.
|
||||
This gives you an easy way to run several asynchronous operations in parallel and resynchronize later.
|
||||
See the [futures](https://github.com/Sage/streamlinejs/wiki/Futures) wiki page for details.
|
||||
|
||||
The following subdirectories contain various modules that have been written with streamline.js:
|
||||
|
||||
* `lib/util`: utilities for array manipulation, semaphores, etc.
|
||||
* `lib/streams`: pull-mode API for node.js streams.
|
||||
* `lib/require`: infrastructure to support client-side require.
|
||||
* `lib/tools`: small tools (doc generator for API.md file).
|
||||
|
||||
## Resources
|
||||
|
||||
The API is documented [here](https://github.com/Sage/streamlinejs/blob/master/API.md).
|
||||
The [wiki](https://github.com/Sage/streamlinejs/wiki) discusses advanced topics like exception handling.
|
||||
|
||||
For support and discussion, please join the [streamline.js Google Group](http://groups.google.com/group/streamlinejs).
|
||||
|
||||
## License
|
||||
|
||||
This work is licensed under the [MIT license](http://en.wikipedia.org/wiki/MIT_License).
|
|
@ -1,18 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
var coffee = require('coffee-script');
|
||||
var path = require('path');
|
||||
|
||||
var streamline = require('streamline');
|
||||
|
||||
if (process.argv.length >= 3 && process.argv[2][0] != '-') {
|
||||
// coffee-streamline myScript.coffee args
|
||||
// our compile hook is not called if we pass it to coffee-script command line
|
||||
process.argv.shift(); // remove the 'node' arg inserted by the shell
|
||||
require(path.join(process.cwd(), process.argv[1]));
|
||||
}
|
||||
else {
|
||||
// coffee-script command line with handle REPL and compiler
|
||||
global.error = console.log
|
||||
require('coffee-script/lib/command').run();
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
require("streamline").command.run();
|
|
@ -1,5 +0,0 @@
|
|||
This software is available under your choice of the following licenses:
|
||||
|
||||
* MPL 1.1 or later: http://www.mozilla.org/MPL/
|
||||
* GPL 2.0 or later: http://www.gnu.org/licenses/gpl.html
|
||||
* LGPL 2.1 or later: http://www.gnu.org/licenses/lgpl.html
|
|
@ -1,21 +0,0 @@
|
|||
# Narcissus
|
||||
|
||||
Narcissus is a JavaScript interpreter written in pure JavaScript (i.e., a [meta-circular evaluator](http://en.wikipedia.org/wiki/Meta-circular_evaluator)), using the [SpiderMonkey](http://www.mozilla.org/js/spidermonkey/) engine.
|
||||
|
||||
Originally a proof-of-concept by [Brendan Eich](http://brendaneich.com/), Narcissus is being revived as a test-bed for rapidly prototyping new language features for the JavaScript language (as well as the ECMAScript standard).
|
||||
|
||||
# Documentation
|
||||
|
||||
Documentation can be found on the [Narcissus wiki](https://github.com/mozilla/narcissus/wiki).
|
||||
|
||||
# Contributors
|
||||
|
||||
* Tom Austin
|
||||
* Brendan Eich
|
||||
* Andreas Gal
|
||||
* Shu-yu Guo
|
||||
* Dave Herman
|
||||
* Bruno Jouhier
|
||||
* Gregor Richards
|
||||
* Dimitris Vardoulakis
|
||||
* Patrick Walton
|
|
@ -1,44 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# Root of the narcissus tree
|
||||
NJS_HOME=`dirname $0`
|
||||
NJS_HOME=`(cd $NJS_HOME; pwd)`
|
||||
|
||||
# Fake information for the test harness's |xulRuntime| configuration object.
|
||||
XUL_INFO=none:none:true
|
||||
|
||||
if [ $# -eq 1 -a "$1" = "-h" ]; then
|
||||
echo "usage: jstests [-h | -a | <test> ...]" 1>&2
|
||||
echo " -h display this usage information and quit" 1>&2
|
||||
echo " -a run all but the slowest tests (those in xfail/narcissus-slow.txt)" 1>&2
|
||||
echo " <test> path to individual test (relative to test directory)" 1>&2
|
||||
echo "" 1>&2
|
||||
echo "With no arguments, jstests runs all tests except those listed in" 1>&2
|
||||
echo "xfail/narcissus-failures.txt, which includes all the tests listed in" 1>&2
|
||||
echo "xfail/narcissus-slow.txt." 1>&2
|
||||
echo "" 1>&2
|
||||
echo "The test directory is searched for either in NJS_TESTS or in" 1>&2
|
||||
echo "a tests/ subdirectory of the Narcissus home directory."
|
||||
exit
|
||||
elif [ $# -gt 0 -a "$1" = "-a" ]; then
|
||||
shift
|
||||
XFAIL=narcissus-slow.txt
|
||||
else
|
||||
XFAIL=narcissus-failures.txt
|
||||
fi
|
||||
|
||||
if [ ! -z $NJS_TESTS -a -d $NJS_TESTS ]; then
|
||||
cd $NJS_TESTS
|
||||
elif [ -d $NJS_HOME/tests ]; then
|
||||
cd $NJS_HOME/tests
|
||||
else
|
||||
echo 'Expected a test directory in $NJS_TESTS or '"$NJS_HOME/tests." 1>&2
|
||||
echo "Run jstests -h for more information." 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $# -gt 0 ]; then
|
||||
exec python jstests.py --xul-info=$XUL_INFO -s -o -d -j 4 $NJS_HOME/njs $*
|
||||
else
|
||||
exec python jstests.py --xul-info=$XUL_INFO -d -j 4 $NJS_HOME/njs -x $NJS_HOME/xfail/$XFAIL
|
||||
fi
|
50
lib/adapters/neo4j-lib/node_modules/streamline/deps/narcissus/lib/decompiler.js
generated
vendored
50
lib/adapters/neo4j-lib/node_modules/streamline/deps/narcissus/lib/decompiler.js
generated
vendored
|
@ -1,50 +0,0 @@
|
|||
/* vim: set sw=4 ts=4 et tw=78: */
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is the Narcissus JavaScript engine.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* Brendan Eich <brendan@mozilla.org>.
|
||||
* Portions created by the Initial Developer are Copyright (C) 2004
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
* Tom Austin <taustin@ucsc.edu>
|
||||
* Brendan Eich <brendan@mozilla.org>
|
||||
* Shu-Yu Guo <shu@rfrn.org>
|
||||
* Dave Herman <dherman@mozilla.com>
|
||||
* Dimitris Vardoulakis <dimvar@ccs.neu.edu>
|
||||
* Patrick Walton <pcwalton@mozilla.com>
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
require("./jsdefs");
|
||||
require("./jslex");
|
||||
require("./jsparse");
|
||||
require("./jsdecomp");
|
||||
|
||||
for (var exp in Narcissus.decompiler)
|
||||
exports[exp] = Narcissus.decompiler[exp];
|
47
lib/adapters/neo4j-lib/node_modules/streamline/deps/narcissus/lib/definitions.js
generated
vendored
47
lib/adapters/neo4j-lib/node_modules/streamline/deps/narcissus/lib/definitions.js
generated
vendored
|
@ -1,47 +0,0 @@
|
|||
/* vim: set sw=4 ts=4 et tw=78: */
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is the Narcissus JavaScript engine.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* Brendan Eich <brendan@mozilla.org>.
|
||||
* Portions created by the Initial Developer are Copyright (C) 2004
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
* Tom Austin <taustin@ucsc.edu>
|
||||
* Brendan Eich <brendan@mozilla.org>
|
||||
* Shu-Yu Guo <shu@rfrn.org>
|
||||
* Dave Herman <dherman@mozilla.com>
|
||||
* Dimitris Vardoulakis <dimvar@ccs.neu.edu>
|
||||
* Patrick Walton <pcwalton@mozilla.com>
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
require("./jsdefs");
|
||||
|
||||
for (var exp in Narcissus.definitions)
|
||||
exports[exp] = Narcissus.definitions[exp];
|
65
lib/adapters/neo4j-lib/node_modules/streamline/deps/narcissus/lib/jsbrowser.js
generated
vendored
65
lib/adapters/neo4j-lib/node_modules/streamline/deps/narcissus/lib/jsbrowser.js
generated
vendored
|
@ -1,65 +0,0 @@
|
|||
/* -*- Mode: JS; tab-width: 4; indent-tabs-mode: nil; -*-
|
||||
* vim: set sw=4 ts=8 et tw=78:
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
*
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is the Narcissus JavaScript engine.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* Brendan Eich <brendan@mozilla.org>.
|
||||
* Portions created by the Initial Developer are Copyright (C) 2004
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
/*
|
||||
* Narcissus - JS implemented in JS.
|
||||
*
|
||||
* Browser-specific tweaks needed for Narcissus to execute properly
|
||||
*/
|
||||
|
||||
// Prevent setTimeout from breaking out to SpiderMonkey
|
||||
Narcissus.interpreter.globalBase.setTimeout = function(code, delay) {
|
||||
var timeoutCode = (typeof code === "string") ?
|
||||
function() { Narcissus.interpreter.evaluate(code); } :
|
||||
code;
|
||||
return setTimeout(timeoutCode, delay);
|
||||
};
|
||||
|
||||
// Prevent setInterval from breaking out to SpiderMonkey
|
||||
Narcissus.interpreter.globalBase.setInterval = function(code, delay) {
|
||||
var timeoutCode = (typeof code === "string") ?
|
||||
function() { Narcissus.interpreter.evaluate(code); } :
|
||||
code;
|
||||
return setInterval(timeoutCode, delay);
|
||||
};
|
||||
|
||||
// Hack to avoid problems with the Image constructor in Narcissus.
|
||||
Narcissus.interpreter.globalBase.Image = function() {};
|
||||
|
||||
|
528
lib/adapters/neo4j-lib/node_modules/streamline/deps/narcissus/lib/jsdecomp.js
generated
vendored
528
lib/adapters/neo4j-lib/node_modules/streamline/deps/narcissus/lib/jsdecomp.js
generated
vendored
|
@ -1,528 +0,0 @@
|
|||
/* vim: set sw=4 ts=4 et tw=78: */
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is the Narcissus JavaScript engine.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* Brendan Eich <brendan@mozilla.org>.
|
||||
* Portions created by the Initial Developer are Copyright (C) 2010
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
* Shu-Yu Guo <shu@rfrn.org>
|
||||
* Bruno Jouhier
|
||||
* Gregor Richards
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
/*
|
||||
* Narcissus - JS implemented in JS.
|
||||
*
|
||||
* Decompiler and pretty-printer.
|
||||
*/
|
||||
|
||||
Narcissus.decompiler = (function() {
|
||||
|
||||
const parser = Narcissus.parser;
|
||||
const definitions = Narcissus.definitions;
|
||||
const tokens = definitions.tokens;
|
||||
|
||||
// Set constants in the local scope.
|
||||
eval(definitions.consts);
|
||||
|
||||
function indent(n, s) {
|
||||
var ss = "", d = true;
|
||||
|
||||
for (var i = 0, j = s.length; i < j; i++) {
|
||||
if (d)
|
||||
for (var k = 0; k < n; k++)
|
||||
ss += " ";
|
||||
ss += s[i];
|
||||
d = s[i] === '\n';
|
||||
}
|
||||
|
||||
return ss;
|
||||
}
|
||||
|
||||
function isBlock(n) {
|
||||
return n && (n.type === BLOCK);
|
||||
}
|
||||
|
||||
function isNonEmptyBlock(n) {
|
||||
return isBlock(n) && n.children.length > 0;
|
||||
}
|
||||
|
||||
function nodeStr(n) {
|
||||
return '"' +
|
||||
n.value.replace(/\\/g, "\\\\")
|
||||
.replace(/"/g, "\\\"")
|
||||
.replace(/\n/g, "\\n")
|
||||
.replace(/\r/g, "\\r") +
|
||||
'"';
|
||||
}
|
||||
|
||||
function pp(n, d, inLetHead) {
|
||||
var topScript = false;
|
||||
|
||||
if (!n)
|
||||
return "";
|
||||
if (!(n instanceof Object))
|
||||
return n;
|
||||
if (!d) {
|
||||
topScript = true;
|
||||
d = 1;
|
||||
}
|
||||
|
||||
var p = "";
|
||||
|
||||
if (n.parenthesized)
|
||||
p += "(";
|
||||
|
||||
switch (n.type) {
|
||||
case FUNCTION:
|
||||
case GETTER:
|
||||
case SETTER:
|
||||
if (n.type === FUNCTION)
|
||||
p += "function";
|
||||
else if (n.type === GETTER)
|
||||
p += "get";
|
||||
else
|
||||
p += "set";
|
||||
|
||||
p += (n.name ? " " + n.name : "") + "(";
|
||||
for (var i = 0, j = n.params.length; i < j; i++)
|
||||
p += (i > 0 ? ", " : "") + pp(n.params[i], d);
|
||||
p += ") " + pp(n.body, d);
|
||||
break;
|
||||
|
||||
case SCRIPT:
|
||||
case BLOCK:
|
||||
var nc = n.children;
|
||||
if (topScript) {
|
||||
// No indentation.
|
||||
for (var i = 0, j = nc.length; i < j; i++) {
|
||||
if (i > 0)
|
||||
p += "\n";
|
||||
p += pp(nc[i], d);
|
||||
var eoc = p[p.length - 1];
|
||||
if (eoc != ";")
|
||||
p += ";";
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
p += "{";
|
||||
if (n.id !== undefined)
|
||||
p += " /* " + n.id + " */";
|
||||
p += "\n";
|
||||
for (var i = 0, j = nc.length; i < j; i++) {
|
||||
if (i > 0)
|
||||
p += "\n";
|
||||
p += indent(4, pp(nc[i], d));
|
||||
var eoc = p[p.length - 1];
|
||||
if (eoc != ";")
|
||||
p += ";";
|
||||
}
|
||||
p += "\n}";
|
||||
break;
|
||||
|
||||
case LET_BLOCK:
|
||||
p += "let (" + pp(n.variables, d, true) + ") ";
|
||||
if (n.expression)
|
||||
p += pp(n.expression, d);
|
||||
else
|
||||
p += pp(n.block, d);
|
||||
break;
|
||||
|
||||
case IF:
|
||||
p += "if (" + pp(n.condition, d) + ") ";
|
||||
|
||||
var tp = n.thenPart, ep = n.elsePart;
|
||||
var b = isBlock(tp) || isBlock(ep);
|
||||
if (!b)
|
||||
p += "{\n";
|
||||
p += (b ? pp(tp, d) : indent(4, pp(tp, d))) + "\n";
|
||||
|
||||
if (ep) {
|
||||
if (!b)
|
||||
p += "} else {\n";
|
||||
else
|
||||
p += " else ";
|
||||
|
||||
p += (b ? pp(ep, d) : indent(4, pp(ep, d))) + "\n";
|
||||
}
|
||||
if (!b)
|
||||
p += "}";
|
||||
break;
|
||||
|
||||
case SWITCH:
|
||||
p += "switch (" + pp(n.discriminant, d) + ") {\n";
|
||||
for (var i = 0, j = n.cases.length; i < j; i++) {
|
||||
var ca = n.cases[i];
|
||||
if (ca.type === CASE)
|
||||
p += " case " + pp(ca.caseLabel, d) + ":\n";
|
||||
else
|
||||
p += " default:\n";
|
||||
ps = pp(ca.statements, d);
|
||||
p += ps.slice(2, ps.length - 2) + "\n";
|
||||
}
|
||||
p += "}";
|
||||
break;
|
||||
|
||||
case FOR:
|
||||
p += "for (" + pp(n.setup, d) + "; "
|
||||
+ pp(n.condition, d) + "; "
|
||||
+ pp(n.update, d) + ") ";
|
||||
|
||||
var pb = pp(n.body, d);
|
||||
if (!isBlock(n.body))
|
||||
p += "{\n" + indent(4, pb) + ";\n}";
|
||||
else if (n.body)
|
||||
p += pb;
|
||||
break;
|
||||
|
||||
case WHILE:
|
||||
p += "while (" + pp(n.condition, d) + ") ";
|
||||
|
||||
var pb = pp(n.body, d);
|
||||
if (!isBlock(n.body))
|
||||
p += "{\n" + indent(4, pb) + ";\n}";
|
||||
else
|
||||
p += pb;
|
||||
break;
|
||||
|
||||
case FOR_IN:
|
||||
var u = n.varDecl;
|
||||
p += n.isEach ? "for each (" : "for (";
|
||||
p += (u ? pp(u, d) : pp(n.iterator, d)) + " in " +
|
||||
pp(n.object, d) + ") ";
|
||||
|
||||
var pb = pp(n.body, d);
|
||||
if (!isBlock(n.body))
|
||||
p += "{\n" + indent(4, pb) + ";\n}";
|
||||
else if (n.body)
|
||||
p += pb;
|
||||
break;
|
||||
|
||||
case DO:
|
||||
p += "do " + pp(n.body, d);
|
||||
p += " while (" + pp(n.condition, d) + ");";
|
||||
break;
|
||||
|
||||
case BREAK:
|
||||
p += "break" + (n.label ? " " + n.label : "") + ";";
|
||||
break;
|
||||
|
||||
case CONTINUE:
|
||||
p += "continue" + (n.label ? " " + n.label : "") + ";";
|
||||
break;
|
||||
|
||||
case TRY:
|
||||
p += "try ";
|
||||
p += pp(n.tryBlock, d);
|
||||
for (var i = 0, j = n.catchClauses.length; i < j; i++) {
|
||||
var t = n.catchClauses[i];
|
||||
p += " catch (" + pp(t.varName, d) +
|
||||
(t.guard ? " if " + pp(t.guard, d) : "") +
|
||||
") ";
|
||||
p += pp(t.block, d);
|
||||
}
|
||||
if (n.finallyBlock) {
|
||||
p += " finally ";
|
||||
p += pp(n.finallyBlock, d);
|
||||
}
|
||||
break;
|
||||
|
||||
case THROW:
|
||||
p += "throw " + pp(n.exception, d);
|
||||
break;
|
||||
|
||||
case RETURN:
|
||||
p += "return";
|
||||
if (n.value)
|
||||
p += " " + pp(n.value, d);
|
||||
break;
|
||||
|
||||
case YIELD:
|
||||
p += "yield";
|
||||
if (n.value.type)
|
||||
p += " " + pp(n.value, d);
|
||||
break;
|
||||
|
||||
case GENERATOR:
|
||||
p += pp(n.expression, d) + " " + pp(n.tail, d);
|
||||
break;
|
||||
|
||||
case WITH:
|
||||
p += "with (" + pp(n.object, d) + ") ";
|
||||
p += pp(n.body, d);
|
||||
break;
|
||||
|
||||
case LET:
|
||||
case VAR:
|
||||
case CONST:
|
||||
var nc = n.children;
|
||||
if (!inLetHead) {
|
||||
p += tokens[n.type] + " ";
|
||||
}
|
||||
for (var i = 0, j = nc.length; i < j; i++) {
|
||||
if (i > 0)
|
||||
p += ", ";
|
||||
var u = nc[i];
|
||||
p += pp(u.name, d);
|
||||
if (u.initializer)
|
||||
p += " = " + pp(u.initializer, d);
|
||||
}
|
||||
break;
|
||||
|
||||
case DEBUGGER:
|
||||
p += "debugger NYI\n";
|
||||
break;
|
||||
|
||||
case SEMICOLON:
|
||||
if (n.expression) {
|
||||
p += pp(n.expression, d) + ";";
|
||||
}
|
||||
break;
|
||||
|
||||
case LABEL:
|
||||
p += n.label + ":\n" + pp(n.statement, d);
|
||||
break;
|
||||
|
||||
case COMMA:
|
||||
case LIST:
|
||||
var nc = n.children;
|
||||
for (var i = 0, j = nc.length; i < j; i++) {
|
||||
if (i > 0)
|
||||
p += ", ";
|
||||
p += pp(nc[i], d);
|
||||
}
|
||||
break;
|
||||
|
||||
case ASSIGN:
|
||||
var nc = n.children;
|
||||
var t = n.assignOp;
|
||||
p += pp(nc[0], d) + " " + (t ? tokens[t] : "") + "="
|
||||
+ " " + pp(nc[1], d);
|
||||
break;
|
||||
|
||||
case HOOK:
|
||||
var nc = n.children;
|
||||
p += "(" + pp(nc[0], d) + " ? "
|
||||
+ pp(nc[1], d) + " : "
|
||||
+ pp(nc[2], d);
|
||||
p += ")";
|
||||
break;
|
||||
|
||||
case OR:
|
||||
case AND:
|
||||
var nc = n.children;
|
||||
p += "(" + pp(nc[0], d) + " " + tokens[n.type] + " "
|
||||
+ pp(nc[1], d);
|
||||
p += ")";
|
||||
break;
|
||||
|
||||
case BITWISE_OR:
|
||||
case BITWISE_XOR:
|
||||
case BITWISE_AND:
|
||||
case EQ:
|
||||
case NE:
|
||||
case STRICT_EQ:
|
||||
case STRICT_NE:
|
||||
case LT:
|
||||
case LE:
|
||||
case GE:
|
||||
case GT:
|
||||
case IN:
|
||||
case INSTANCEOF:
|
||||
case LSH:
|
||||
case RSH:
|
||||
case URSH:
|
||||
case PLUS:
|
||||
case MINUS:
|
||||
case MUL:
|
||||
case DIV:
|
||||
case MOD:
|
||||
var nc = n.children;
|
||||
p += "(" + pp(nc[0], d) + " " + tokens[n.type] + " "
|
||||
+ pp(nc[1], d) + ")";
|
||||
break;
|
||||
|
||||
case DELETE:
|
||||
case VOID:
|
||||
case TYPEOF:
|
||||
p += tokens[n.type] + " " + pp(n.children[0], d);
|
||||
break;
|
||||
|
||||
case NOT:
|
||||
case BITWISE_NOT:
|
||||
p += tokens[n.type] + pp(n.children[0], d);
|
||||
break;
|
||||
|
||||
case UNARY_PLUS:
|
||||
p += "+" + pp(n.children[0], d);
|
||||
break;
|
||||
|
||||
case UNARY_MINUS:
|
||||
p += "-" + pp(n.children[0], d);
|
||||
break;
|
||||
|
||||
case INCREMENT:
|
||||
case DECREMENT:
|
||||
if (n.postfix) {
|
||||
p += pp(n.children[0], d) + tokens[n.type];
|
||||
} else {
|
||||
p += tokens[n.type] + pp(n.children[0], d);
|
||||
}
|
||||
break;
|
||||
|
||||
case DOT:
|
||||
var nc = n.children;
|
||||
p += pp(nc[0], d) + "." + pp(nc[1], d);
|
||||
break;
|
||||
|
||||
case INDEX:
|
||||
var nc = n.children;
|
||||
p += pp(nc[0], d) + "[" + pp(nc[1], d) + "]";
|
||||
break;
|
||||
|
||||
case CALL:
|
||||
var nc = n.children;
|
||||
p += pp(nc[0], d) + "(" + pp(nc[1], d) + ")";
|
||||
break;
|
||||
|
||||
case NEW:
|
||||
case NEW_WITH_ARGS:
|
||||
var nc = n.children;
|
||||
p += "new " + pp(nc[0], d);
|
||||
if (nc[1])
|
||||
p += "(" + pp(nc[1], d) + ")";
|
||||
break;
|
||||
|
||||
case ARRAY_INIT:
|
||||
p += "[";
|
||||
var nc = n.children;
|
||||
for (var i = 0, j = nc.length; i < j; i++) {
|
||||
if(nc[i])
|
||||
p += pp(nc[i], d);
|
||||
p += ","
|
||||
}
|
||||
p += "]";
|
||||
break;
|
||||
|
||||
case ARRAY_COMP:
|
||||
p += "[" + pp (n.expression, d) + " ";
|
||||
p += pp(n.tail, d);
|
||||
p += "]";
|
||||
break;
|
||||
|
||||
case COMP_TAIL:
|
||||
var nc = n.children;
|
||||
for (var i = 0, j = nc.length; i < j; i++) {
|
||||
if (i > 0)
|
||||
p += " ";
|
||||
p += pp(nc[i], d);
|
||||
}
|
||||
if (n.guard)
|
||||
p += " if (" + pp(n.guard, d) + ")";
|
||||
break;
|
||||
|
||||
case OBJECT_INIT:
|
||||
var nc = n.children;
|
||||
if (nc[0] && nc[0].type === PROPERTY_INIT)
|
||||
p += "{\n";
|
||||
else
|
||||
p += "{";
|
||||
for (var i = 0, j = nc.length; i < j; i++) {
|
||||
if (i > 0) {
|
||||
p += ",\n";
|
||||
}
|
||||
|
||||
var t = nc[i];
|
||||
if (t.type === PROPERTY_INIT) {
|
||||
var tc = t.children;
|
||||
var l;
|
||||
// see if the left needs to be a string
|
||||
if (tc[0].value === "" || /[^A-Za-z0-9_$]/.test(tc[0].value)) {
|
||||
l = nodeStr(tc[0]);
|
||||
} else {
|
||||
l = pp(tc[0], d);
|
||||
}
|
||||
p += indent(4, l) + ": " +
|
||||
indent(4, pp(tc[1], d)).substring(4);
|
||||
} else {
|
||||
p += indent(4, pp(t, d));
|
||||
}
|
||||
}
|
||||
p += "\n}";
|
||||
break;
|
||||
|
||||
case NULL:
|
||||
p += "null";
|
||||
break;
|
||||
|
||||
case THIS:
|
||||
p += "this";
|
||||
break;
|
||||
|
||||
case TRUE:
|
||||
p += "true";
|
||||
break;
|
||||
|
||||
case FALSE:
|
||||
p += "false";
|
||||
break;
|
||||
|
||||
case IDENTIFIER:
|
||||
case NUMBER:
|
||||
case REGEXP:
|
||||
p += n.value;
|
||||
break;
|
||||
|
||||
case STRING:
|
||||
p += nodeStr(n);
|
||||
break;
|
||||
|
||||
case GROUP:
|
||||
p += "(" + pp(n.children[0], d) + ")";
|
||||
break;
|
||||
|
||||
default:
|
||||
throw "PANIC: unknown operation " + tokens[n.type] + " " + n.toSource();
|
||||
}
|
||||
|
||||
if (n.parenthesized)
|
||||
p += ")";
|
||||
|
||||
return p;
|
||||
}
|
||||
|
||||
return {
|
||||
pp: pp
|
||||
};
|
||||
|
||||
}());
|
|
@ -1,379 +0,0 @@
|
|||
/* vim: set sw=4 ts=4 et tw=78: */
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is the Narcissus JavaScript engine.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* Brendan Eich <brendan@mozilla.org>.
|
||||
* Portions created by the Initial Developer are Copyright (C) 2004
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
* Tom Austin <taustin@ucsc.edu>
|
||||
* Brendan Eich <brendan@mozilla.org>
|
||||
* Shu-Yu Guo <shu@rfrn.org>
|
||||
* Dave Herman <dherman@mozilla.com>
|
||||
* Dimitris Vardoulakis <dimvar@ccs.neu.edu>
|
||||
* Patrick Walton <pcwalton@mozilla.com>
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
/*
|
||||
* Narcissus - JS implemented in JS.
|
||||
*
|
||||
* Well-known constants and lookup tables. Many consts are generated from the
|
||||
* tokens table via eval to minimize redundancy, so consumers must be compiled
|
||||
* separately to take advantage of the simple switch-case constant propagation
|
||||
* done by SpiderMonkey.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
|
||||
var narcissus = {
|
||||
options: {
|
||||
version: 185,
|
||||
},
|
||||
hostGlobal: this
|
||||
};
|
||||
Narcissus = narcissus;
|
||||
})();
|
||||
|
||||
Narcissus.definitions = (function() {
|
||||
|
||||
var tokens = [
|
||||
// End of source.
|
||||
"END",
|
||||
|
||||
// Operators and punctuators. Some pair-wise order matters, e.g. (+, -)
|
||||
// and (UNARY_PLUS, UNARY_MINUS).
|
||||
"\n", ";",
|
||||
",",
|
||||
"=",
|
||||
"?", ":", "CONDITIONAL",
|
||||
"||",
|
||||
"&&",
|
||||
"|",
|
||||
"^",
|
||||
"&",
|
||||
"==", "!=", "===", "!==",
|
||||
"<", "<=", ">=", ">",
|
||||
"<<", ">>", ">>>",
|
||||
"+", "-",
|
||||
"*", "/", "%",
|
||||
"!", "~", "UNARY_PLUS", "UNARY_MINUS",
|
||||
"++", "--",
|
||||
".",
|
||||
"[", "]",
|
||||
"{", "}",
|
||||
"(", ")",
|
||||
|
||||
// Nonterminal tree node type codes.
|
||||
"SCRIPT", "BLOCK", "LABEL", "FOR_IN", "CALL", "NEW_WITH_ARGS", "INDEX",
|
||||
"ARRAY_INIT", "OBJECT_INIT", "PROPERTY_INIT", "GETTER", "SETTER",
|
||||
"GROUP", "LIST", "LET_BLOCK", "ARRAY_COMP", "GENERATOR", "COMP_TAIL",
|
||||
|
||||
// Terminals.
|
||||
"IDENTIFIER", "NUMBER", "STRING", "REGEXP",
|
||||
|
||||
// Keywords.
|
||||
"break",
|
||||
"case", "catch", "const", "continue",
|
||||
"debugger", "default", "delete", "do",
|
||||
"else",
|
||||
"false", "finally", "for", "function",
|
||||
"if", "in", "instanceof",
|
||||
"let",
|
||||
"new", "null",
|
||||
"return",
|
||||
"switch",
|
||||
"this", "throw", "true", "try", "typeof",
|
||||
"var", "void",
|
||||
"yield",
|
||||
"while", "with",
|
||||
];
|
||||
|
||||
var statementStartTokens = [
|
||||
"break",
|
||||
"const", "continue",
|
||||
"debugger", "do",
|
||||
"for",
|
||||
"if",
|
||||
"return",
|
||||
"switch",
|
||||
"throw", "try",
|
||||
"var",
|
||||
"yield",
|
||||
"while", "with",
|
||||
];
|
||||
|
||||
// Operator and punctuator mapping from token to tree node type name.
|
||||
// NB: because the lexer doesn't backtrack, all token prefixes must themselves
|
||||
// be valid tokens (e.g. !== is acceptable because its prefixes are the valid
|
||||
// tokens != and !).
|
||||
var opTypeNames = {
|
||||
'\n': "NEWLINE",
|
||||
';': "SEMICOLON",
|
||||
',': "COMMA",
|
||||
'?': "HOOK",
|
||||
':': "COLON",
|
||||
'||': "OR",
|
||||
'&&': "AND",
|
||||
'|': "BITWISE_OR",
|
||||
'^': "BITWISE_XOR",
|
||||
'&': "BITWISE_AND",
|
||||
'===': "STRICT_EQ",
|
||||
'==': "EQ",
|
||||
'=': "ASSIGN",
|
||||
'!==': "STRICT_NE",
|
||||
'!=': "NE",
|
||||
'<<': "LSH",
|
||||
'<=': "LE",
|
||||
'<': "LT",
|
||||
'>>>': "URSH",
|
||||
'>>': "RSH",
|
||||
'>=': "GE",
|
||||
'>': "GT",
|
||||
'++': "INCREMENT",
|
||||
'--': "DECREMENT",
|
||||
'+': "PLUS",
|
||||
'-': "MINUS",
|
||||
'*': "MUL",
|
||||
'/': "DIV",
|
||||
'%': "MOD",
|
||||
'!': "NOT",
|
||||
'~': "BITWISE_NOT",
|
||||
'.': "DOT",
|
||||
'[': "LEFT_BRACKET",
|
||||
']': "RIGHT_BRACKET",
|
||||
'{': "LEFT_CURLY",
|
||||
'}': "RIGHT_CURLY",
|
||||
'(': "LEFT_PAREN",
|
||||
')': "RIGHT_PAREN"
|
||||
};
|
||||
|
||||
// Hash of keyword identifier to tokens index. NB: we must null __proto__ to
|
||||
// avoid toString, etc. namespace pollution.
|
||||
var keywords = {__proto__: null};
|
||||
|
||||
// Define const END, etc., based on the token names. Also map name to index.
|
||||
var tokenIds = {};
|
||||
|
||||
// Building up a string to be eval'd in different contexts.
|
||||
var consts = "const ";
|
||||
for (var i = 0, j = tokens.length; i < j; i++) {
|
||||
if (i > 0)
|
||||
consts += ", ";
|
||||
var t = tokens[i];
|
||||
var name;
|
||||
if (/^[a-z]/.test(t)) {
|
||||
name = t.toUpperCase();
|
||||
keywords[t] = i;
|
||||
} else {
|
||||
name = (/^\W/.test(t) ? opTypeNames[t] : t);
|
||||
}
|
||||
consts += name + " = " + i;
|
||||
tokenIds[name] = i;
|
||||
tokens[t] = i;
|
||||
}
|
||||
consts += ";";
|
||||
|
||||
var isStatementStartCode = {__proto__: null};
|
||||
for (i = 0, j = statementStartTokens.length; i < j; i++)
|
||||
isStatementStartCode[keywords[statementStartTokens[i]]] = true;
|
||||
|
||||
// Map assignment operators to their indexes in the tokens array.
|
||||
var assignOps = ['|', '^', '&', '<<', '>>', '>>>', '+', '-', '*', '/', '%'];
|
||||
|
||||
for (i = 0, j = assignOps.length; i < j; i++) {
|
||||
t = assignOps[i];
|
||||
assignOps[t] = tokens[t];
|
||||
}
|
||||
|
||||
function defineGetter(obj, prop, fn, dontDelete, dontEnum) {
|
||||
Object.defineProperty(obj, prop,
|
||||
{ get: fn, configurable: !dontDelete, enumerable: !dontEnum });
|
||||
}
|
||||
|
||||
function defineProperty(obj, prop, val, dontDelete, readOnly, dontEnum) {
|
||||
Object.defineProperty(obj, prop,
|
||||
{ value: val, writable: !readOnly, configurable: !dontDelete,
|
||||
enumerable: !dontEnum });
|
||||
}
|
||||
|
||||
// Returns true if fn is a native function. (Note: SpiderMonkey specific.)
|
||||
function isNativeCode(fn) {
|
||||
// Relies on the toString method to identify native code.
|
||||
return ((typeof fn) === "function") && fn.toString().match(/\[native code\]/);
|
||||
}
|
||||
|
||||
function getPropertyDescriptor(obj, name) {
|
||||
while (obj) {
|
||||
if (({}).hasOwnProperty.call(obj, name))
|
||||
return Object.getOwnPropertyDescriptor(obj, name);
|
||||
obj = Object.getPrototypeOf(obj);
|
||||
}
|
||||
}
|
||||
|
||||
function getOwnProperties(obj) {
|
||||
var map = {};
|
||||
for (var name in Object.getOwnPropertyNames(obj))
|
||||
map[name] = Object.getOwnPropertyDescriptor(obj, name);
|
||||
return map;
|
||||
}
|
||||
|
||||
function makePassthruHandler(obj) {
|
||||
// Handler copied from
|
||||
// http://wiki.ecmascript.org/doku.php?id=harmony:proxies&s=proxy%20object#examplea_no-op_forwarding_proxy
|
||||
return {
|
||||
getOwnPropertyDescriptor: function(name) {
|
||||
var desc = Object.getOwnPropertyDescriptor(obj, name);
|
||||
|
||||
// a trapping proxy's properties must always be configurable
|
||||
desc.configurable = true;
|
||||
return desc;
|
||||
},
|
||||
getPropertyDescriptor: function(name) {
|
||||
var desc = getPropertyDescriptor(obj, name);
|
||||
|
||||
// a trapping proxy's properties must always be configurable
|
||||
desc.configurable = true;
|
||||
return desc;
|
||||
},
|
||||
getOwnPropertyNames: function() {
|
||||
return Object.getOwnPropertyNames(obj);
|
||||
},
|
||||
defineProperty: function(name, desc) {
|
||||
Object.defineProperty(obj, name, desc);
|
||||
},
|
||||
"delete": function(name) { return delete obj[name]; },
|
||||
fix: function() {
|
||||
if (Object.isFrozen(obj)) {
|
||||
return getOwnProperties(obj);
|
||||
}
|
||||
|
||||
// As long as obj is not frozen, the proxy won't allow itself to be fixed.
|
||||
return undefined; // will cause a TypeError to be thrown
|
||||
},
|
||||
|
||||
has: function(name) { return name in obj; },
|
||||
hasOwn: function(name) { return ({}).hasOwnProperty.call(obj, name); },
|
||||
get: function(receiver, name) { return obj[name]; },
|
||||
|
||||
// bad behavior when set fails in non-strict mode
|
||||
set: function(receiver, name, val) { obj[name] = val; return true; },
|
||||
enumerate: function() {
|
||||
var result = [];
|
||||
for (name in obj) { result.push(name); };
|
||||
return result;
|
||||
},
|
||||
keys: function() { return Object.keys(obj); }
|
||||
};
|
||||
}
|
||||
|
||||
// default function used when looking for a property in the global object
|
||||
function noPropFound() { return undefined; }
|
||||
|
||||
var hasOwnProperty = ({}).hasOwnProperty;
|
||||
|
||||
function StringMap() {
|
||||
this.table = Object.create(null, {});
|
||||
this.size = 0;
|
||||
}
|
||||
|
||||
StringMap.prototype = {
|
||||
has: function(x) { return hasOwnProperty.call(this.table, x); },
|
||||
set: function(x, v) {
|
||||
if (!hasOwnProperty.call(this.table, x))
|
||||
this.size++;
|
||||
this.table[x] = v;
|
||||
},
|
||||
get: function(x) { return this.table[x]; },
|
||||
getDef: function(x, thunk) {
|
||||
if (!hasOwnProperty.call(this.table, x)) {
|
||||
this.size++;
|
||||
this.table[x] = thunk();
|
||||
}
|
||||
return this.table[x];
|
||||
},
|
||||
forEach: function(f) {
|
||||
var table = this.table;
|
||||
for (var key in table)
|
||||
f.call(this, key, table[key]);
|
||||
},
|
||||
toString: function() { return "[object StringMap]" }
|
||||
};
|
||||
|
||||
// non-destructive stack
|
||||
function Stack(elts) {
|
||||
this.elts = elts || null;
|
||||
}
|
||||
|
||||
Stack.prototype = {
|
||||
push: function(x) {
|
||||
return new Stack({ top: x, rest: this.elts });
|
||||
},
|
||||
top: function() {
|
||||
if (!this.elts)
|
||||
throw new Error("empty stack");
|
||||
return this.elts.top;
|
||||
},
|
||||
isEmpty: function() {
|
||||
return this.top === null;
|
||||
},
|
||||
find: function(test) {
|
||||
for (var elts = this.elts; elts; elts = elts.rest) {
|
||||
if (test(elts.top))
|
||||
return elts.top;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
has: function(x) {
|
||||
return Boolean(this.find(function(elt) { return elt === x }));
|
||||
},
|
||||
forEach: function(f) {
|
||||
for (var elts = this.elts; elts; elts = elts.rest) {
|
||||
f(elts.top);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
tokens: tokens,
|
||||
opTypeNames: opTypeNames,
|
||||
keywords: keywords,
|
||||
isStatementStartCode: isStatementStartCode,
|
||||
tokenIds: tokenIds,
|
||||
consts: consts,
|
||||
assignOps: assignOps,
|
||||
defineGetter: defineGetter,
|
||||
defineProperty: defineProperty,
|
||||
isNativeCode: isNativeCode,
|
||||
makePassthruHandler: makePassthruHandler,
|
||||
noPropFound: noPropFound,
|
||||
StringMap: StringMap,
|
||||
Stack: Stack
|
||||
};
|
||||
}());
|
File diff suppressed because it is too large
Load Diff
|
@ -1,472 +0,0 @@
|
|||
/* vim: set sw=4 ts=4 et tw=78: */
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is the Narcissus JavaScript engine.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* Brendan Eich <brendan@mozilla.org>.
|
||||
* Portions created by the Initial Developer are Copyright (C) 2004
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
* Tom Austin <taustin@ucsc.edu>
|
||||
* Brendan Eich <brendan@mozilla.org>
|
||||
* Shu-Yu Guo <shu@rfrn.org>
|
||||
* Dave Herman <dherman@mozilla.com>
|
||||
* Dimitris Vardoulakis <dimvar@ccs.neu.edu>
|
||||
* Patrick Walton <pcwalton@mozilla.com>
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
/*
|
||||
* Narcissus - JS implemented in JS.
|
||||
*
|
||||
* Lexical scanner.
|
||||
*/
|
||||
|
||||
Narcissus.lexer = (function() {
|
||||
|
||||
var definitions = Narcissus.definitions;
|
||||
|
||||
// Set constants in the local scope.
|
||||
eval(definitions.consts);
|
||||
|
||||
// Build up a trie of operator tokens.
|
||||
var opTokens = {};
|
||||
for (var op in definitions.opTypeNames) {
|
||||
if (op === '\n' || op === '.')
|
||||
continue;
|
||||
|
||||
var node = opTokens;
|
||||
for (var i = 0; i < op.length; i++) {
|
||||
var ch = op[i];
|
||||
if (!(ch in node))
|
||||
node[ch] = {};
|
||||
node = node[ch];
|
||||
node.op = op;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Tokenizer :: (source, filename, line number) -> Tokenizer
|
||||
*/
|
||||
function Tokenizer(s, f, l) {
|
||||
this.cursor = 0;
|
||||
this.source = String(s);
|
||||
this.tokens = [];
|
||||
this.tokenIndex = 0;
|
||||
this.lookahead = 0;
|
||||
this.scanNewlines = false;
|
||||
this.unexpectedEOF = false;
|
||||
this.filename = f || "";
|
||||
this.lineno = l || 1;
|
||||
}
|
||||
|
||||
Tokenizer.prototype = {
|
||||
get done() {
|
||||
// We need to set scanOperand to true here because the first thing
|
||||
// might be a regexp.
|
||||
return this.peek(true) === END;
|
||||
},
|
||||
|
||||
get token() {
|
||||
return this.tokens[this.tokenIndex];
|
||||
},
|
||||
|
||||
match: function (tt, scanOperand) {
|
||||
return this.get(scanOperand) === tt || this.unget();
|
||||
},
|
||||
|
||||
mustMatch: function (tt) {
|
||||
if (!this.match(tt)) {
|
||||
throw this.newSyntaxError("Missing " +
|
||||
definitions.tokens[tt].toLowerCase());
|
||||
}
|
||||
return this.token;
|
||||
},
|
||||
|
||||
forceIdentifier: function() {
|
||||
if (!this.match(IDENTIFIER)) {
|
||||
// keywords are valid property names in ES 5
|
||||
if (this.get() >= definitions.keywords[0] || this.unget) {
|
||||
this.token.type = IDENTIFIER;
|
||||
}
|
||||
else {
|
||||
throw this.newSyntaxError("Missing identifier");
|
||||
}
|
||||
}
|
||||
return this.token;
|
||||
},
|
||||
|
||||
peek: function (scanOperand) {
|
||||
var tt, next;
|
||||
if (this.lookahead) {
|
||||
next = this.tokens[(this.tokenIndex + this.lookahead) & 3];
|
||||
tt = (this.scanNewlines && next.lineno !== this.lineno)
|
||||
? NEWLINE
|
||||
: next.type;
|
||||
} else {
|
||||
tt = this.get(scanOperand);
|
||||
this.unget();
|
||||
}
|
||||
return tt;
|
||||
},
|
||||
|
||||
peekOnSameLine: function (scanOperand) {
|
||||
this.scanNewlines = true;
|
||||
var tt = this.peek(scanOperand);
|
||||
this.scanNewlines = false;
|
||||
return tt;
|
||||
},
|
||||
|
||||
// Eat comments and whitespace.
|
||||
skip: function () {
|
||||
var input = this.source;
|
||||
for (;;) {
|
||||
var ch = input[this.cursor++];
|
||||
var next = input[this.cursor];
|
||||
if (ch === '\n' && !this.scanNewlines) {
|
||||
this.lineno++;
|
||||
} else if (ch === '/' && next === '*') {
|
||||
this.cursor++;
|
||||
for (;;) {
|
||||
ch = input[this.cursor++];
|
||||
if (ch === undefined)
|
||||
throw this.newSyntaxError("Unterminated comment");
|
||||
|
||||
if (ch === '*') {
|
||||
next = input[this.cursor];
|
||||
if (next === '/') {
|
||||
this.cursor++;
|
||||
break;
|
||||
}
|
||||
} else if (ch === '\n') {
|
||||
this.lineno++;
|
||||
}
|
||||
}
|
||||
} else if (ch === '/' && next === '/') {
|
||||
this.cursor++;
|
||||
for (;;) {
|
||||
ch = input[this.cursor++];
|
||||
if (ch === undefined)
|
||||
return;
|
||||
|
||||
if (ch === '\n') {
|
||||
this.lineno++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (ch !== ' ' && ch !== '\t') {
|
||||
this.cursor--;
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Lex the exponential part of a number, if present. Return true iff an
|
||||
// exponential part was found.
|
||||
lexExponent: function() {
|
||||
var input = this.source;
|
||||
var next = input[this.cursor];
|
||||
if (next === 'e' || next === 'E') {
|
||||
this.cursor++;
|
||||
ch = input[this.cursor++];
|
||||
if (ch === '+' || ch === '-')
|
||||
ch = input[this.cursor++];
|
||||
|
||||
if (ch < '0' || ch > '9')
|
||||
throw this.newSyntaxError("Missing exponent");
|
||||
|
||||
do {
|
||||
ch = input[this.cursor++];
|
||||
} while (ch >= '0' && ch <= '9');
|
||||
this.cursor--;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
|
||||
lexZeroNumber: function (ch) {
|
||||
var token = this.token, input = this.source;
|
||||
token.type = NUMBER;
|
||||
|
||||
ch = input[this.cursor++];
|
||||
if (ch === '.') {
|
||||
do {
|
||||
ch = input[this.cursor++];
|
||||
} while (ch >= '0' && ch <= '9');
|
||||
this.cursor--;
|
||||
|
||||
this.lexExponent();
|
||||
token.value = parseFloat(input.substring(token.start, this.cursor));
|
||||
} else if (ch === 'x' || ch === 'X') {
|
||||
do {
|
||||
ch = input[this.cursor++];
|
||||
} while ((ch >= '0' && ch <= '9') || (ch >= 'a' && ch <= 'f') ||
|
||||
(ch >= 'A' && ch <= 'F'));
|
||||
this.cursor--;
|
||||
|
||||
token.value = parseInt(input.substring(token.start, this.cursor));
|
||||
} else if (ch >= '0' && ch <= '7') {
|
||||
do {
|
||||
ch = input[this.cursor++];
|
||||
} while (ch >= '0' && ch <= '7');
|
||||
this.cursor--;
|
||||
|
||||
token.value = parseInt(input.substring(token.start, this.cursor));
|
||||
} else {
|
||||
this.cursor--;
|
||||
this.lexExponent(); // 0E1, &c.
|
||||
token.value = 0;
|
||||
}
|
||||
},
|
||||
|
||||
lexNumber: function (ch) {
|
||||
var token = this.token, input = this.source;
|
||||
token.type = NUMBER;
|
||||
|
||||
var floating = false;
|
||||
do {
|
||||
ch = input[this.cursor++];
|
||||
if (ch === '.' && !floating) {
|
||||
floating = true;
|
||||
ch = input[this.cursor++];
|
||||
}
|
||||
} while (ch >= '0' && ch <= '9');
|
||||
|
||||
this.cursor--;
|
||||
|
||||
var exponent = this.lexExponent();
|
||||
floating = floating || exponent;
|
||||
|
||||
var str = input.substring(token.start, this.cursor);
|
||||
token.value = floating ? parseFloat(str) : parseInt(str);
|
||||
},
|
||||
|
||||
lexDot: function (ch) {
|
||||
var token = this.token, input = this.source;
|
||||
var next = input[this.cursor];
|
||||
if (next >= '0' && next <= '9') {
|
||||
do {
|
||||
ch = input[this.cursor++];
|
||||
} while (ch >= '0' && ch <= '9');
|
||||
this.cursor--;
|
||||
|
||||
this.lexExponent();
|
||||
|
||||
token.type = NUMBER;
|
||||
token.value = parseFloat(input.substring(token.start, this.cursor));
|
||||
} else {
|
||||
token.type = DOT;
|
||||
token.assignOp = null;
|
||||
token.value = '.';
|
||||
}
|
||||
},
|
||||
|
||||
lexString: function (ch) {
|
||||
var token = this.token, input = this.source;
|
||||
token.type = STRING;
|
||||
|
||||
var hasEscapes = false;
|
||||
var delim = ch;
|
||||
while ((ch = input[this.cursor++]) !== delim) {
|
||||
if (this.cursor == input.length)
|
||||
throw this.newSyntaxError("Unterminated string literal");
|
||||
if (ch === '\\') {
|
||||
hasEscapes = true;
|
||||
if (++this.cursor == input.length)
|
||||
throw this.newSyntaxError("Unterminated string literal");
|
||||
}
|
||||
}
|
||||
|
||||
token.value = hasEscapes
|
||||
? eval(input.substring(token.start, this.cursor))
|
||||
: input.substring(token.start + 1, this.cursor - 1);
|
||||
},
|
||||
|
||||
lexRegExp: function (ch) {
|
||||
var token = this.token, input = this.source;
|
||||
token.type = REGEXP;
|
||||
|
||||
do {
|
||||
ch = input[this.cursor++];
|
||||
if (ch === '\\') {
|
||||
this.cursor++;
|
||||
} else if (ch === '[') {
|
||||
do {
|
||||
if (ch === undefined)
|
||||
throw this.newSyntaxError("Unterminated character class");
|
||||
|
||||
if (ch === '\\')
|
||||
this.cursor++;
|
||||
|
||||
ch = input[this.cursor++];
|
||||
} while (ch !== ']');
|
||||
} else if (ch === undefined) {
|
||||
throw this.newSyntaxError("Unterminated regex");
|
||||
}
|
||||
} while (ch !== '/');
|
||||
|
||||
do {
|
||||
ch = input[this.cursor++];
|
||||
} while (ch >= 'a' && ch <= 'z');
|
||||
|
||||
this.cursor--;
|
||||
|
||||
token.value = eval(input.substring(token.start, this.cursor));
|
||||
},
|
||||
|
||||
lexOp: function (ch) {
|
||||
var token = this.token, input = this.source;
|
||||
|
||||
// A bit ugly, but it seems wasteful to write a trie lookup routine
|
||||
// for only 3 characters...
|
||||
var node = opTokens[ch];
|
||||
var next = input[this.cursor];
|
||||
if (next in node) {
|
||||
node = node[next];
|
||||
this.cursor++;
|
||||
next = input[this.cursor];
|
||||
if (next in node) {
|
||||
node = node[next];
|
||||
this.cursor++;
|
||||
next = input[this.cursor];
|
||||
}
|
||||
}
|
||||
|
||||
var op = node.op;
|
||||
if (definitions.assignOps[op] && input[this.cursor] === '=') {
|
||||
this.cursor++;
|
||||
token.type = ASSIGN;
|
||||
token.assignOp = definitions.tokenIds[definitions.opTypeNames[op]];
|
||||
op += '=';
|
||||
} else {
|
||||
token.type = definitions.tokenIds[definitions.opTypeNames[op]];
|
||||
token.assignOp = null;
|
||||
}
|
||||
|
||||
token.value = op;
|
||||
},
|
||||
|
||||
// FIXME: Unicode escape sequences
|
||||
// FIXME: Unicode identifiers
|
||||
lexIdent: function (ch) {
|
||||
var token = this.token, input = this.source;
|
||||
|
||||
do {
|
||||
ch = input[this.cursor++];
|
||||
} while ((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') ||
|
||||
(ch >= '0' && ch <= '9') || ch === '$' || ch === '_');
|
||||
|
||||
this.cursor--; // Put the non-word character back.
|
||||
|
||||
var id = input.substring(token.start, this.cursor);
|
||||
token.type = definitions.keywords[id] || IDENTIFIER;
|
||||
token.value = id;
|
||||
},
|
||||
|
||||
/*
|
||||
* Tokenizer.get :: void -> token type
|
||||
*
|
||||
* Consume input *only* if there is no lookahead.
|
||||
* Dispatch to the appropriate lexing function depending on the input.
|
||||
*/
|
||||
get: function (scanOperand) {
|
||||
var token;
|
||||
while (this.lookahead) {
|
||||
--this.lookahead;
|
||||
this.tokenIndex = (this.tokenIndex + 1) & 3;
|
||||
token = this.tokens[this.tokenIndex];
|
||||
if (token.type !== NEWLINE || this.scanNewlines)
|
||||
return token.type;
|
||||
}
|
||||
|
||||
this.skip();
|
||||
|
||||
this.tokenIndex = (this.tokenIndex + 1) & 3;
|
||||
token = this.tokens[this.tokenIndex];
|
||||
if (!token)
|
||||
this.tokens[this.tokenIndex] = token = {};
|
||||
|
||||
var input = this.source;
|
||||
if (this.cursor === input.length)
|
||||
return token.type = END;
|
||||
|
||||
token.start = this.cursor;
|
||||
token.lineno = this.lineno;
|
||||
|
||||
var ch = input[this.cursor++];
|
||||
if ((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch === '$' || ch === '_') {
|
||||
this.lexIdent(ch);
|
||||
} else if (scanOperand && ch === '/') {
|
||||
this.lexRegExp(ch);
|
||||
} else if (ch in opTokens) {
|
||||
this.lexOp(ch);
|
||||
} else if (ch === '.') {
|
||||
this.lexDot(ch);
|
||||
} else if (ch >= '1' && ch <= '9') {
|
||||
this.lexNumber(ch);
|
||||
} else if (ch === '0') {
|
||||
this.lexZeroNumber(ch);
|
||||
} else if (ch === '"' || ch === "'") {
|
||||
this.lexString(ch);
|
||||
} else if (this.scanNewlines && ch === '\n') {
|
||||
token.type = NEWLINE;
|
||||
token.value = '\n';
|
||||
this.lineno++;
|
||||
} else {
|
||||
throw this.newSyntaxError("Illegal token");
|
||||
}
|
||||
|
||||
token.end = this.cursor;
|
||||
return token.type;
|
||||
},
|
||||
|
||||
/*
|
||||
* Tokenizer.unget :: void -> undefined
|
||||
*
|
||||
* Match depends on unget returning undefined.
|
||||
*/
|
||||
unget: function () {
|
||||
if (++this.lookahead === 4) throw "PANIC: too much lookahead!";
|
||||
this.tokenIndex = (this.tokenIndex - 1) & 3;
|
||||
},
|
||||
|
||||
newSyntaxError: function (m) {
|
||||
var e = new SyntaxError(m, this.filename, this.lineno);
|
||||
e.source = this.source;
|
||||
e.cursor = this.lookahead
|
||||
? this.tokens[(this.tokenIndex + this.lookahead) & 3].start
|
||||
: this.cursor;
|
||||
return e;
|
||||
},
|
||||
};
|
||||
|
||||
return { Tokenizer: Tokenizer };
|
||||
|
||||
}());
|
1441
lib/adapters/neo4j-lib/node_modules/streamline/deps/narcissus/lib/jsparse.js
generated
vendored
1441
lib/adapters/neo4j-lib/node_modules/streamline/deps/narcissus/lib/jsparse.js
generated
vendored
File diff suppressed because it is too large
Load Diff
|
@ -1,48 +0,0 @@
|
|||
/* vim: set sw=4 ts=4 et tw=78: */
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is the Narcissus JavaScript engine.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* Brendan Eich <brendan@mozilla.org>.
|
||||
* Portions created by the Initial Developer are Copyright (C) 2004
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
* Tom Austin <taustin@ucsc.edu>
|
||||
* Brendan Eich <brendan@mozilla.org>
|
||||
* Shu-Yu Guo <shu@rfrn.org>
|
||||
* Dave Herman <dherman@mozilla.com>
|
||||
* Dimitris Vardoulakis <dimvar@ccs.neu.edu>
|
||||
* Patrick Walton <pcwalton@mozilla.com>
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
require("./jsdefs");
|
||||
require("./jslex");
|
||||
|
||||
for (var exp in Narcissus.lexer)
|
||||
exports[exp] = Narcissus.lexer[exp];
|
|
@ -1,49 +0,0 @@
|
|||
/* vim: set sw=4 ts=4 et tw=78: */
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is the Narcissus JavaScript engine.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* Brendan Eich <brendan@mozilla.org>.
|
||||
* Portions created by the Initial Developer are Copyright (C) 2004
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
* Tom Austin <taustin@ucsc.edu>
|
||||
* Brendan Eich <brendan@mozilla.org>
|
||||
* Shu-Yu Guo <shu@rfrn.org>
|
||||
* Dave Herman <dherman@mozilla.com>
|
||||
* Dimitris Vardoulakis <dimvar@ccs.neu.edu>
|
||||
* Patrick Walton <pcwalton@mozilla.com>
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
require("./jsdefs");
|
||||
require("./jslex");
|
||||
require("./jsparse");
|
||||
|
||||
for (var exp in Narcissus.parser)
|
||||
exports[exp] = Narcissus.parser[exp];
|
|
@ -1,47 +0,0 @@
|
|||
/* vim: set sw=4 ts=4 et tw=78: */
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is the Narcissus JavaScript engine.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* Brendan Eich <brendan@mozilla.org>.
|
||||
* Portions created by the Initial Developer are Copyright (C) 2004
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
* Tom Austin <taustin@ucsc.edu>
|
||||
* Brendan Eich <brendan@mozilla.org>
|
||||
* Shu-Yu Guo <shu@rfrn.org>
|
||||
* Dave Herman <dherman@mozilla.com>
|
||||
* Dimitris Vardoulakis <dimvar@ccs.neu.edu>
|
||||
* Patrick Walton <pcwalton@mozilla.com>
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either the GNU General Public License Version 2 or later (the "GPL"), or
|
||||
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
exports.definitions = require("./lib/definitions");
|
||||
exports.lexer = require("./lib/lexer");
|
||||
exports.parser = require("./lib/parser");
|
||||
exports.decompiler = require("./lib/decompiler");
|
|
@ -1,96 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
#
|
||||
# Narcissus 'shell' for use with jstests.py
|
||||
# Expects to be in the same directory as ./js
|
||||
# Expects the Narcissus src files to be in ./narcissus/
|
||||
|
||||
import os, re, sys, signal
|
||||
from subprocess import *
|
||||
from optparse import OptionParser
|
||||
|
||||
THIS_DIR = os.path.dirname(__file__)
|
||||
LIB_DIR = os.path.abspath(os.path.join(THIS_DIR, 'lib'))
|
||||
|
||||
if 'NJS_SHELL' in os.environ:
|
||||
js_cmd = os.path.abspath(os.environ['NJS_SHELL'])
|
||||
else:
|
||||
js_cmd = os.path.abspath(os.path.join(THIS_DIR, 'js'))
|
||||
|
||||
narc_jsdefs = os.path.join(LIB_DIR, "jsdefs.js")
|
||||
narc_jslex = os.path.join(LIB_DIR, "jslex.js")
|
||||
narc_jsparse = os.path.join(LIB_DIR, "jsparse.js")
|
||||
narc_jsdecomp = os.path.join(LIB_DIR, "jsdecomp.js");
|
||||
narc_jsexec = os.path.join(LIB_DIR, "jsexec.js")
|
||||
|
||||
def handler(signum, frame):
|
||||
print ''
|
||||
# the exit code produced by ./js on SIGINT
|
||||
sys.exit(130)
|
||||
|
||||
signal.signal(signal.SIGINT, handler)
|
||||
|
||||
if __name__ == '__main__':
|
||||
op = OptionParser(usage='%prog [TEST-SPECS]')
|
||||
op.add_option('-f', '--file', dest='js_files', action='append',
|
||||
help='JS file to load', metavar='FILE')
|
||||
op.add_option('-e', '--expression', dest='js_exps', action='append',
|
||||
help='JS expression to evaluate')
|
||||
op.add_option('-i', '--interactive', dest='js_interactive', action='store_true',
|
||||
help='enable interactive shell')
|
||||
op.add_option('-I', '--interactive-meta', dest='js_interactive_meta', action='store_true',
|
||||
help='load Narcissus but run interactive SpiderMonkey shell')
|
||||
op.add_option('-H', '--harmony', dest='js_harmony', action='store_true',
|
||||
help='enable ECMAScript Harmony mode')
|
||||
op.add_option('-P', '--parse-only', dest='js_parseonly', action='store_true',
|
||||
help='stop after the parsing stage and output pretty-printed source code')
|
||||
op.add_option('-3', '--ecma3-only', dest='js_ecma3only', action='store_true',
|
||||
help='restrict source language to ECMA-262 Edition 3')
|
||||
op.add_option('-p', '--paren-free', dest='js_parenfree', action='store_true',
|
||||
help='use experimental paren-free syntax')
|
||||
|
||||
(options, args) = op.parse_args()
|
||||
|
||||
cmd = ""
|
||||
|
||||
if options.js_harmony:
|
||||
cmd += 'Narcissus.options.version = "harmony"; '
|
||||
|
||||
if options.js_ecma3only:
|
||||
cmd += 'Narcissus.options.ecma3OnlyMode = true; '
|
||||
|
||||
if options.js_parenfree:
|
||||
cmd += 'Narcissus.options.parenFreeMode = true; '
|
||||
|
||||
if options.js_exps:
|
||||
for exp in options.js_exps:
|
||||
if options.js_parseonly:
|
||||
cmd += 'print(Narcissus.decompiler.pp(Narcissus.parser.parse("%s"))); ' % exp.replace('"', '\\"')
|
||||
else:
|
||||
cmd += 'Narcissus.interpreter.evaluate("%s"); ' % exp.replace('"', '\\"')
|
||||
|
||||
if options.js_files:
|
||||
for file in options.js_files:
|
||||
if options.js_parseonly:
|
||||
cmd += 'print(Narcissus.decompiler.pp(Narcissus.parser.parse(snarf("%(file)s"), "%(file)s", 1))); ' % { 'file':file }
|
||||
else:
|
||||
cmd += 'Narcissus.interpreter.evaluate(snarf("%(file)s"), "%(file)s", 1); ' % { 'file':file }
|
||||
|
||||
if (not options.js_exps) and (not options.js_files):
|
||||
options.js_interactive = True
|
||||
|
||||
argv = [js_cmd, '-f', narc_jsdefs, '-f', narc_jslex, '-f', narc_jsparse, '-f', narc_jsdecomp, '-f', narc_jsexec]
|
||||
|
||||
if options.js_interactive_meta:
|
||||
argv += ['-e', cmd, '-i']
|
||||
else:
|
||||
if options.js_interactive:
|
||||
cmd += 'Narcissus.interpreter.repl();'
|
||||
argv = ['rlwrap'] + argv
|
||||
argv += ['-e', cmd]
|
||||
|
||||
try:
|
||||
Popen(argv).wait()
|
||||
except OSError as e:
|
||||
if e.errno is 2 and options.js_interactive:
|
||||
Popen(argv[1:]).wait()
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
"name": "narcissus",
|
||||
"version": "0.0.1",
|
||||
"author": "Mozilla",
|
||||
"directories": {"lib": "./lib"},
|
||||
"main": "main.js"
|
||||
}
|
1451
lib/adapters/neo4j-lib/node_modules/streamline/deps/narcissus/xfail/narcissus-failures.txt
generated
vendored
1451
lib/adapters/neo4j-lib/node_modules/streamline/deps/narcissus/xfail/narcissus-failures.txt
generated
vendored
File diff suppressed because it is too large
Load Diff
18
lib/adapters/neo4j-lib/node_modules/streamline/deps/narcissus/xfail/narcissus-slow.txt
generated
vendored
18
lib/adapters/neo4j-lib/node_modules/streamline/deps/narcissus/xfail/narcissus-slow.txt
generated
vendored
|
@ -1,18 +0,0 @@
|
|||
narcissus/../ecma/Date/15.9.5.2-2-n.js
|
||||
narcissus/../ecma/Date/15.9.5.23-3-n.js
|
||||
narcissus/../ecma/Date/15.9.5.3-1-n.js
|
||||
narcissus/../ecma/Date/15.9.5.4-2-n.js
|
||||
narcissus/../ecma/Date/15.9.5.8.js
|
||||
narcissus/../ecma/Date/dst-offset-caching-1-of-8.js
|
||||
narcissus/../ecma/Date/dst-offset-caching-2-of-8.js
|
||||
narcissus/../ecma/Date/dst-offset-caching-3-of-8.js
|
||||
narcissus/../ecma/Date/dst-offset-caching-4-of-8.js
|
||||
narcissus/../ecma/Date/dst-offset-caching-5-of-8.js
|
||||
narcissus/../ecma/Date/dst-offset-caching-6-of-8.js
|
||||
narcissus/../ecma/Date/dst-offset-caching-7-of-8.js
|
||||
narcissus/../ecma/Date/dst-offset-caching-8-of-8.js
|
||||
narcissus/../ecma/Date/15.9.5.11-2.js
|
||||
narcissus/../ecma/Date/15.9.5.12-2.js
|
||||
narcissus/../ecma_3/Date/15.9.5.4.js
|
||||
narcissus/../ecma_3/Date/regress-452786.js
|
||||
narcissus/../ecma/Date/15.9.5.10-2.js
|
|
@ -1,9 +0,0 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Streamline development</title>
|
||||
</head>
|
||||
<body>
|
||||
<p><a href="examples/streamlineMe/streamlineMe.html">interactive demo</a></p>
|
||||
<p><a href="test/common/transform-test.html">transform unit test</a></p>
|
||||
</body>
|
||||
</html>
|
82
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage.js
generated
vendored
82
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage.js
generated
vendored
|
@ -1,82 +0,0 @@
|
|||
/*** Generated by streamline 0.1.35 - DO NOT EDIT ***/
|
||||
|
||||
var __global = typeof global !== 'undefined' ? global : window;
|
||||
function __cb(_, fn){ var ctx = __global.__context; return function(err, result){ __global.__context = ctx; if (err) return _(err); try { return fn(null, result); } catch (ex) { return __propagate(_, ex); } } }
|
||||
function __future(fn, args, i){ var done, err, result; var cb = function(e, r){ done = true; err = e, result = r; }; args = Array.prototype.slice.call(args); args[i] = function(e, r){ cb(e, r); }; fn.apply(this, args); return function(_){ if (done) _.call(this, err, result); else cb = _.bind(this); } .bind(this); }
|
||||
function __nt(_, fn){ var i = 0; var cb = __cb(_, fn); var safeCb = function(){ try { cb(); } catch (ex) { __propagate(cb, ex); } }; if (typeof process != "undefined" && typeof process.nextTick == "function") return function(){ if (++i % 20 == 0) process.nextTick(safeCb); else cb(); }; else return function(){ if (++i % 20 == 0) setTimeout(safeCb); else cb(); }; }
|
||||
function __propagate(_, err){ try { _(err); } catch (ex) { __trap(ex); } }
|
||||
function __trap(err){ if (err) { if (__global.__context && __global.__context.errorHandler) __global.__context.errorHandler(err); else console.error("UNCAUGHT EXCEPTION: " + err.message + "\n" + err.stack); } }
|
||||
(function __main(_) {
|
||||
var fs, p, t0;
|
||||
/* 13 */ function du(_, path) {
|
||||
if (!_) {
|
||||
return __future(du, arguments, 0);
|
||||
}
|
||||
;
|
||||
var total, stat, files, i;
|
||||
/* 14 */ total = 0;
|
||||
/* 15 */ return fs.stat(path, __cb(_, function(__0, __1) {
|
||||
stat = __1;
|
||||
return (function(__then) {
|
||||
/* 16 */ if (stat.isFile()) {
|
||||
/* 17 */ return fs.readFile(path, __cb(_, function(__0, __2) {
|
||||
/* 17 */ total += __2.length;
|
||||
__then();
|
||||
}));
|
||||
}
|
||||
else {
|
||||
return (function(__then) {
|
||||
/* 19 */ if (stat.isDirectory()) {
|
||||
/* 20 */ return fs.readdir(path, __cb(_, function(__0, __3) {
|
||||
files = __3;
|
||||
/* 21 */ i = 0;
|
||||
var __9 = false;
|
||||
return (function(__break) {
|
||||
var __loop = __nt(_, function() {
|
||||
if (__9) {
|
||||
/* 21 */ i++;
|
||||
}
|
||||
else {
|
||||
__9 = true;
|
||||
}
|
||||
;
|
||||
/* 21 */ var __8 = (i < files.length);
|
||||
if (__8) {
|
||||
/* 22 */ return du(__cb(_, function(__0, __4) {
|
||||
/* 22 */ total += __4;
|
||||
__loop();
|
||||
/* 22 */ }), ((path + "/") + files[i]));
|
||||
}
|
||||
else {
|
||||
__break();
|
||||
}
|
||||
;
|
||||
});
|
||||
__loop();
|
||||
})(function() {
|
||||
/* 24 */ console.log(((path + ": ") + total));
|
||||
__then();
|
||||
});
|
||||
}));
|
||||
}
|
||||
else {
|
||||
/* 27 */ console.log((path + ": odd file"));
|
||||
__then();
|
||||
}
|
||||
;
|
||||
})(__then);
|
||||
}
|
||||
;
|
||||
})(function() {
|
||||
/* 29 */ return _(null, total);
|
||||
});
|
||||
}));
|
||||
};
|
||||
/* 11 */ fs = require("fs");
|
||||
/* 32 */ p = ((process.argv.length > 2) ? process.argv[2] : ".");
|
||||
/* 34 */ t0 = Date.now();
|
||||
/* 35 */ return du(__cb(_, function() {
|
||||
/* 36 */ console.log((("completed in " + ((Date.now() - t0))) + " ms"));
|
||||
_();
|
||||
/* 35 */ }), p);
|
||||
}).call(this, __trap);
|
78
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage2.js
generated
vendored
78
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage2.js
generated
vendored
|
@ -1,78 +0,0 @@
|
|||
/*** Generated by streamline 0.1.35 - DO NOT EDIT ***/
|
||||
|
||||
var __global = typeof global !== 'undefined' ? global : window;
|
||||
function __cb(_, fn){ var ctx = __global.__context; return function(err, result){ __global.__context = ctx; if (err) return _(err); try { return fn(null, result); } catch (ex) { return __propagate(_, ex); } } }
|
||||
function __future(fn, args, i){ var done, err, result; var cb = function(e, r){ done = true; err = e, result = r; }; args = Array.prototype.slice.call(args); args[i] = function(e, r){ cb(e, r); }; fn.apply(this, args); return function(_){ if (done) _.call(this, err, result); else cb = _.bind(this); } .bind(this); }
|
||||
function __propagate(_, err){ try { _(err); } catch (ex) { __trap(ex); } }
|
||||
function __trap(err){ if (err) { if (__global.__context && __global.__context.errorHandler) __global.__context.errorHandler(err); else console.error("UNCAUGHT EXCEPTION: " + err.message + "\n" + err.stack); } }
|
||||
(function __main(_) {
|
||||
var fs, flows, fileFunnel, p, t0;
|
||||
/* 27 */ function du(_, path) {
|
||||
if (!_) {
|
||||
return __future(du, arguments, 0);
|
||||
}
|
||||
;
|
||||
var total, stat, files, futures;
|
||||
/* 28 */ total = 0;
|
||||
/* 29 */ return fs.stat(path, __cb(_, function(__0, __3) {
|
||||
stat = __3;
|
||||
return (function(__then) {
|
||||
/* 30 */ if (stat.isFile()) {
|
||||
/* 31 */ return fileFunnel(__cb(_, __then), function __1(_) {
|
||||
if (!_) {
|
||||
return __future(__1, arguments, 0);
|
||||
}
|
||||
;
|
||||
/* 32 */ return fs.readFile(path, __cb(_, function(__0, __1) {
|
||||
/* 32 */ total += __1.length;
|
||||
_();
|
||||
}));
|
||||
});
|
||||
}
|
||||
else {
|
||||
return (function(__then) {
|
||||
/* 36 */ if (stat.isDirectory()) {
|
||||
/* 37 */ return fs.readdir(path, __cb(_, function(__0, __4) {
|
||||
files = __4;
|
||||
/* 38 */ futures = files.map(function(file) {
|
||||
/* 39 */ return du(null, ((path + "/") + file));
|
||||
});
|
||||
/* 41 */ return flows.reduce(__cb(_, function(__0, __5) {
|
||||
/* 41 */ total += __5;
|
||||
/* 44 */ console.log(((path + ": ") + total));
|
||||
__then();
|
||||
/* 41 */ }), futures, function __2(_, val, future) {
|
||||
if (!_) {
|
||||
return __future(__2, arguments, 0);
|
||||
}
|
||||
;
|
||||
/* 42 */ return future(__cb(_, function(__0, __2) {
|
||||
/* 42 */ var __1 = (val + __2);
|
||||
return _(null, __1);
|
||||
}));
|
||||
/* 43 */ }, 0);
|
||||
}));
|
||||
}
|
||||
else {
|
||||
/* 47 */ console.log((path + ": odd file"));
|
||||
__then();
|
||||
}
|
||||
;
|
||||
})(__then);
|
||||
}
|
||||
;
|
||||
})(function() {
|
||||
/* 49 */ return _(null, total);
|
||||
});
|
||||
}));
|
||||
};
|
||||
/* 22 */ fs = require("fs");
|
||||
/* 23 */ flows = require("streamline/lib/util/flows");
|
||||
/* 25 */ fileFunnel = flows.funnel(20);
|
||||
/* 52 */ p = ((process.argv.length > 2) ? process.argv[2] : ".");
|
||||
/* 54 */ t0 = Date.now();
|
||||
/* 55 */ return du(__cb(_, function() {
|
||||
/* 56 */ console.log((("completed in " + ((Date.now() - t0))) + " ms"));
|
||||
_();
|
||||
/* 55 */ }), p);
|
||||
}).call(this, __trap);
|
51
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage2_.coffee
generated
vendored
51
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage2_.coffee
generated
vendored
|
@ -1,51 +0,0 @@
|
|||
#
|
||||
# Usage: coffee-streamline diskUsage2_.coffee [path]
|
||||
#
|
||||
# This file is a parralelized version of the `diskUsage.coffee` example.
|
||||
#
|
||||
# The `spray` function is used to parallelize the processing on all the entries under a directory.
|
||||
# We use it with `collectAll_` because we want to continue the algorithm when all the
|
||||
# entries have been processed.
|
||||
#
|
||||
# Without any additional preventive measure, this 'sprayed' implementation quickly exhausts
|
||||
# file descriptors because of the number of concurrently open file increases exponentially
|
||||
# as we go deeper in the tree.
|
||||
#
|
||||
# The remedy is to channel the call that opens the file through a funnel.
|
||||
# With the funnel there won't be more that 20 files concurrently open at any time
|
||||
#
|
||||
# Note: You can disable the funnel by setting its size to -1.
|
||||
#
|
||||
# On my machine, the parallel version is almost twice faster than the sequential version.
|
||||
#
|
||||
|
||||
fs = require 'fs'
|
||||
flows = require 'streamline/lib/util/flows'
|
||||
|
||||
fileFunnel = flows.funnel 20
|
||||
|
||||
du = (_, path) ->
|
||||
total = 0
|
||||
stat = fs.stat path, _
|
||||
if stat.isFile()
|
||||
fileFunnel _, (_) ->
|
||||
total += fs.readFile(path, _).length
|
||||
else if stat.isDirectory()
|
||||
files = fs.readdir path, _
|
||||
futures = for f in files
|
||||
du null, path + "/" + f
|
||||
total += flows.reduce _, futures, ((_, val, future) -> val + future _), 0
|
||||
console.log path + ": " + total
|
||||
else
|
||||
console.log path + ": odd file"
|
||||
total
|
||||
|
||||
p = if process.argv.length > 2 then process.argv[2] else "."
|
||||
|
||||
t0 = Date.now()
|
||||
|
||||
try
|
||||
result = du _, p
|
||||
console.log "completed in " + (Date.now() - t0) + " ms"
|
||||
catch err
|
||||
console.log err.toString() + "\n" + err.stack
|
56
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage2_.js
generated
vendored
56
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage2_.js
generated
vendored
|
@ -1,56 +0,0 @@
|
|||
/*
|
||||
* Usage: node-streamline diskUsage2 [path]
|
||||
*
|
||||
* This file is a parralelized version of the `diskUsage.js` example.
|
||||
*
|
||||
* The `spray` function is used to parallelize the processing on all the entries under a directory.
|
||||
* We use it with `collectAll_` because we want to continue the algorithm when all the
|
||||
* entries have been processed.
|
||||
*
|
||||
* Without any additional preventive measure, this 'sprayed' implementation quickly exhausts
|
||||
* file descriptors because of the number of concurrently open file increases exponentially
|
||||
* as we go deeper in the tree.
|
||||
*
|
||||
* The remedy is to channel the call that opens the file through a funnel.
|
||||
* With the funnel there won't be more that 20 files concurrently open at any time
|
||||
*
|
||||
* Note: You can disable the funnel by setting its size to -1.
|
||||
*
|
||||
* On my machine, the parallel version is almost twice faster than the sequential version.
|
||||
*/
|
||||
|
||||
var fs = require('fs');
|
||||
var flows = require('streamline/lib/util/flows');
|
||||
|
||||
var fileFunnel = flows.funnel(20);
|
||||
|
||||
function du(_, path){
|
||||
var total = 0;
|
||||
var stat = fs.stat(path, _);
|
||||
if (stat.isFile()) {
|
||||
fileFunnel(_, function(_){
|
||||
total += fs.readFile(path, _).length;
|
||||
});
|
||||
}
|
||||
else
|
||||
if (stat.isDirectory()) {
|
||||
var files = fs.readdir(path, _);
|
||||
var futures = files.map(function(file){
|
||||
return du(null, path + "/" + file);
|
||||
});
|
||||
total += flows.reduce(_, futures, function(_, val, future) {
|
||||
return val + future(_);
|
||||
}, 0);
|
||||
console.log(path + ": " + total);
|
||||
}
|
||||
else {
|
||||
console.log(path + ": odd file");
|
||||
}
|
||||
return total;
|
||||
}
|
||||
|
||||
var p = process.argv.length > 2 ? process.argv[2] : ".";
|
||||
|
||||
var t0 = Date.now();
|
||||
du(_, p);
|
||||
console.log("completed in " + (Date.now() - t0) + " ms");
|
11
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage2_coffee.js
generated
vendored
11
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage2_coffee.js
generated
vendored
|
@ -1,11 +0,0 @@
|
|||
// diskUsage2_coffee.js
|
||||
// Wrapper for diskUsage2_.coffee that tests Streamline's require() of
|
||||
// CoffeeScript files.
|
||||
//
|
||||
// Usage:
|
||||
// node diskUsage2_coffee.js [path]
|
||||
|
||||
require('coffee-script');
|
||||
require('streamline');
|
||||
|
||||
require('./diskUsage2_.coffee');
|
35
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage_.coffee
generated
vendored
35
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage_.coffee
generated
vendored
|
@ -1,35 +0,0 @@
|
|||
#
|
||||
# Usage: coffee-streamline diskUsage_.coffee [path]
|
||||
#
|
||||
# Recursively computes the size of directories.
|
||||
#
|
||||
# Demonstrates how standard asynchronous node.js functions
|
||||
# like fs.stat, fs.readdir, fs.readFile can be called from 'streamlined'
|
||||
# Javascript code.
|
||||
#
|
||||
|
||||
fs = require 'fs'
|
||||
|
||||
du = (_, path) ->
|
||||
total = 0
|
||||
stat = fs.stat path, _
|
||||
if stat.isFile()
|
||||
total += fs.readFile(path, _).length
|
||||
else if stat.isDirectory()
|
||||
files = fs.readdir path, _
|
||||
for f in files
|
||||
total += du _, path + "/" + f
|
||||
console.log path + ": " + total
|
||||
else
|
||||
console.log path + ": odd file"
|
||||
total
|
||||
|
||||
p = if process.argv.length > 2 then process.argv[2] else "."
|
||||
|
||||
t0 = Date.now()
|
||||
|
||||
try
|
||||
result = du _, p
|
||||
console.log "completed in " + (Date.now() - t0) + " ms"
|
||||
catch err
|
||||
console.log err.toString() + "\n" + err.stack
|
37
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage_.js
generated
vendored
37
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage_.js
generated
vendored
|
@ -1,37 +0,0 @@
|
|||
/*
|
||||
* Usage: node-streamline diskUsage [path]
|
||||
*
|
||||
* Recursively computes the size of directories.
|
||||
*
|
||||
* Demonstrates how standard asynchronous node.js functions
|
||||
* like fs.stat, fs.readdir, fs.readFile can be called from 'streamlined'
|
||||
* Javascript code.
|
||||
*/
|
||||
|
||||
var fs = require('fs');
|
||||
|
||||
function du(_, path) {
|
||||
var total = 0;
|
||||
var stat = fs.stat(path, _);
|
||||
if (stat.isFile()) {
|
||||
total += fs.readFile(path, _).length;
|
||||
}
|
||||
else if (stat.isDirectory()) {
|
||||
var files = fs.readdir(path, _);
|
||||
for (var i = 0; i < files.length; i++) {
|
||||
total += du(_, path + "/" + files[i]);
|
||||
}
|
||||
console.log(path + ": " + total);
|
||||
}
|
||||
else {
|
||||
console.log(path + ": odd file");
|
||||
}
|
||||
return total;
|
||||
}
|
||||
|
||||
var p = process.argv.length > 2 ? process.argv[2] : ".";
|
||||
|
||||
var t0 = Date.now();
|
||||
du(_, p);
|
||||
console.log("completed in " + (Date.now() - t0) + " ms");
|
||||
|
11
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage_coffee.js
generated
vendored
11
lib/adapters/neo4j-lib/node_modules/streamline/examples/diskUsage/diskUsage_coffee.js
generated
vendored
|
@ -1,11 +0,0 @@
|
|||
// diskUsage_coffee.js
|
||||
// Wrapper for diskUsage_.coffee that tests Streamline's require() of
|
||||
// CoffeeScript files.
|
||||
//
|
||||
// Usage:
|
||||
// node diskUsage_coffee.js [path]
|
||||
|
||||
require('coffee-script');
|
||||
require('streamline');
|
||||
|
||||
require('./diskUsage_.coffee');
|
|
@ -1,19 +0,0 @@
|
|||
# options_.coffee
|
||||
# An example of specifying file-level options in CoffeeScript.
|
||||
#
|
||||
# Usage:
|
||||
# coffee-streamline options_.coffee
|
||||
#
|
||||
# streamline.options = { "callback": "_wait" }
|
||||
|
||||
_ = require 'underscore'
|
||||
assert = require 'assert'
|
||||
|
||||
# simulate async step here:
|
||||
setTimeout _wait, 2000;
|
||||
|
||||
# use underscore library here:
|
||||
assert.ok _.isArray [1, 2, 3]
|
||||
|
||||
# if we got here, it worked!
|
||||
console.log 'job well done.'
|
|
@ -1,8 +0,0 @@
|
|||
// require.js
|
||||
// Tests Streamline's ability to require() Streamline source files.
|
||||
//
|
||||
// Usage:
|
||||
// node require.js
|
||||
|
||||
require('streamline');
|
||||
require('./require_error_');
|
|
@ -1,13 +0,0 @@
|
|||
/*** Generated by streamline 0.1.35 - DO NOT EDIT ***/
|
||||
|
||||
var __global = typeof global !== 'undefined' ? global : window;
|
||||
function __cb(_, fn){ var ctx = __global.__context; return function(err, result){ __global.__context = ctx; if (err) return _(err); try { return fn(null, result); } catch (ex) { return __propagate(_, ex); } } }
|
||||
function __propagate(_, err){ try { _(err); } catch (ex) { __trap(ex); } }
|
||||
function __trap(err){ if (err) { if (__global.__context && __global.__context.errorHandler) __global.__context.errorHandler(err); else console.error("UNCAUGHT EXCEPTION: " + err.message + "\n" + err.stack); } }
|
||||
(function __main(_wait) {
|
||||
/* 16 */ return setTimeout(__cb(_wait, function() {
|
||||
/* 24 */ console.log("the next error should be reported from line 25!");
|
||||
/* 25 */ undefined.true;
|
||||
_wait();
|
||||
/* 16 */ }), 1000);
|
||||
}).call(this, __trap);
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue