From fe2ddeddf40ce0c766ec847d11377a2b8ebc0bfe Mon Sep 17 00:00:00 2001
From: Samaresh Shil <108946101+samaresh96@users.noreply.github.com>
Date: Sat, 16 Dec 2023 04:22:41 +0000
Subject: [PATCH 1/3] java commit
---
JAVA/calculator.java | 4 ++++
1 file changed, 4 insertions(+)
create mode 100644 JAVA/calculator.java
diff --git a/JAVA/calculator.java b/JAVA/calculator.java
new file mode 100644
index 00000000..a228e138
--- /dev/null
+++ b/JAVA/calculator.java
@@ -0,0 +1,4 @@
+/**
+ * calculator
+ */
+
From 5d24787c812c6a1e38a9e26f47fe03b1877418c3 Mon Sep 17 00:00:00 2001
From: Samaresh Shil <108946101+samaresh96@users.noreply.github.com>
Date: Sat, 16 Dec 2023 04:27:22 +0000
Subject: [PATCH 2/3] java 2 commit
---
JAVA/calculator.java | 9 +
TeachingKidsProgramming/.project | 11 +
node_modules/.package-lock.json | 23 +
node_modules/git/.npmignore | 2 +
node_modules/git/.travis.yml | 4 +
node_modules/git/README.md | 48 +
node_modules/git/benchmarks/benchmark.js | 150 ++
node_modules/git/lib/diff/block.js | 33 +
node_modules/git/lib/diff/callbacks.js | 311 ++++
node_modules/git/lib/diff/change.js | 105 ++
node_modules/git/lib/diff/diff.js | 892 +++++++++
node_modules/git/lib/diff/hunk.js | 291 +++
node_modules/git/lib/git/actor.js | 21 +
node_modules/git/lib/git/binary_parser.js | 253 +++
node_modules/git/lib/git/blame.js | 64 +
node_modules/git/lib/git/blame_line.js | 10 +
node_modules/git/lib/git/blob.js | 145 ++
node_modules/git/lib/git/commit.js | 336 ++++
node_modules/git/lib/git/commit_stats.js | 115 ++
node_modules/git/lib/git/config.js | 48 +
node_modules/git/lib/git/diff.js | 82 +
node_modules/git/lib/git/file_index.js | 241 +++
node_modules/git/lib/git/file_window.js | 66 +
node_modules/git/lib/git/git.js | 891 +++++++++
.../git/lib/git/git_file_operations.js | 170 ++
node_modules/git/lib/git/git_index.js | 188 ++
node_modules/git/lib/git/git_object.js | 20 +
node_modules/git/lib/git/head.js | 54 +
node_modules/git/lib/git/index.js | 36 +
.../git/lib/git/internal/directory_entry.js | 60 +
node_modules/git/lib/git/internal/git_blob.js | 14 +
.../git/lib/git/internal/git_commit.js | 87 +
node_modules/git/lib/git/internal/git_tag.js | 69 +
node_modules/git/lib/git/internal/git_tree.js | 70 +
node_modules/git/lib/git/loose_storage.js | 172 ++
node_modules/git/lib/git/merge.js | 39 +
node_modules/git/lib/git/pack_storage.js | 393 ++++
node_modules/git/lib/git/raw_object.js | 26 +
node_modules/git/lib/git/ref.js | 1 +
node_modules/git/lib/git/remote.js | 37 +
node_modules/git/lib/git/repo.js | 507 ++++++
node_modules/git/lib/git/repository.js | 764 ++++++++
node_modules/git/lib/git/status.js | 150 ++
node_modules/git/lib/git/status_file.js | 18 +
node_modules/git/lib/git/sub_module.js | 80 +
node_modules/git/lib/git/tag.js | 41 +
node_modules/git/lib/git/tree.js | 159 ++
node_modules/git/lib/git/user_info.js | 42 +
node_modules/git/lib/sprintf/sprintf.js | 100 ++
node_modules/git/lib/zlib/zlib.js | 1172 ++++++++++++
node_modules/git/package.json | 18 +
node_modules/mime/LICENSE | 19 +
node_modules/mime/README.md | 63 +
node_modules/mime/mime.js | 113 ++
node_modules/mime/package.json | 22 +
node_modules/mime/test.js | 55 +
node_modules/mime/types/mime.types | 1588 +++++++++++++++++
node_modules/mime/types/node.types | 60 +
package-lock.json | 28 +
package.json | 5 +
60 files changed, 10591 insertions(+)
create mode 100644 node_modules/.package-lock.json
create mode 100644 node_modules/git/.npmignore
create mode 100644 node_modules/git/.travis.yml
create mode 100644 node_modules/git/README.md
create mode 100644 node_modules/git/benchmarks/benchmark.js
create mode 100644 node_modules/git/lib/diff/block.js
create mode 100644 node_modules/git/lib/diff/callbacks.js
create mode 100644 node_modules/git/lib/diff/change.js
create mode 100644 node_modules/git/lib/diff/diff.js
create mode 100644 node_modules/git/lib/diff/hunk.js
create mode 100644 node_modules/git/lib/git/actor.js
create mode 100644 node_modules/git/lib/git/binary_parser.js
create mode 100644 node_modules/git/lib/git/blame.js
create mode 100644 node_modules/git/lib/git/blame_line.js
create mode 100644 node_modules/git/lib/git/blob.js
create mode 100644 node_modules/git/lib/git/commit.js
create mode 100644 node_modules/git/lib/git/commit_stats.js
create mode 100644 node_modules/git/lib/git/config.js
create mode 100644 node_modules/git/lib/git/diff.js
create mode 100644 node_modules/git/lib/git/file_index.js
create mode 100644 node_modules/git/lib/git/file_window.js
create mode 100644 node_modules/git/lib/git/git.js
create mode 100644 node_modules/git/lib/git/git_file_operations.js
create mode 100644 node_modules/git/lib/git/git_index.js
create mode 100644 node_modules/git/lib/git/git_object.js
create mode 100644 node_modules/git/lib/git/head.js
create mode 100644 node_modules/git/lib/git/index.js
create mode 100644 node_modules/git/lib/git/internal/directory_entry.js
create mode 100644 node_modules/git/lib/git/internal/git_blob.js
create mode 100644 node_modules/git/lib/git/internal/git_commit.js
create mode 100644 node_modules/git/lib/git/internal/git_tag.js
create mode 100644 node_modules/git/lib/git/internal/git_tree.js
create mode 100644 node_modules/git/lib/git/loose_storage.js
create mode 100644 node_modules/git/lib/git/merge.js
create mode 100644 node_modules/git/lib/git/pack_storage.js
create mode 100644 node_modules/git/lib/git/raw_object.js
create mode 100644 node_modules/git/lib/git/ref.js
create mode 100644 node_modules/git/lib/git/remote.js
create mode 100644 node_modules/git/lib/git/repo.js
create mode 100644 node_modules/git/lib/git/repository.js
create mode 100644 node_modules/git/lib/git/status.js
create mode 100644 node_modules/git/lib/git/status_file.js
create mode 100644 node_modules/git/lib/git/sub_module.js
create mode 100644 node_modules/git/lib/git/tag.js
create mode 100644 node_modules/git/lib/git/tree.js
create mode 100644 node_modules/git/lib/git/user_info.js
create mode 100644 node_modules/git/lib/sprintf/sprintf.js
create mode 100644 node_modules/git/lib/zlib/zlib.js
create mode 100644 node_modules/git/package.json
create mode 100644 node_modules/mime/LICENSE
create mode 100644 node_modules/mime/README.md
create mode 100644 node_modules/mime/mime.js
create mode 100644 node_modules/mime/package.json
create mode 100644 node_modules/mime/test.js
create mode 100644 node_modules/mime/types/mime.types
create mode 100644 node_modules/mime/types/node.types
create mode 100644 package-lock.json
create mode 100644 package.json
diff --git a/JAVA/calculator.java b/JAVA/calculator.java
index a228e138..b41e7751 100644
--- a/JAVA/calculator.java
+++ b/JAVA/calculator.java
@@ -2,3 +2,12 @@
* calculator
*/
+/**
+ * calculator
+ */
+public class calculator {
+
+ public static void main(String[] args) {
+ System.out.println("samaresh");
+ }
+}
\ No newline at end of file
diff --git a/TeachingKidsProgramming/.project b/TeachingKidsProgramming/.project
index 9d57ee3b..fb486981 100644
--- a/TeachingKidsProgramming/.project
+++ b/TeachingKidsProgramming/.project
@@ -14,4 +14,15 @@
org.eclipse.jdt.core.javanature
+
+
+ 1702559919792
+
+ 30
+
+ org.eclipse.core.resources.regexFilterMatcher
+ node_modules|\.git|__CREATED_BY_JAVA_LANGUAGE_SERVER__
+
+
+
diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json
new file mode 100644
index 00000000..7da48fd4
--- /dev/null
+++ b/node_modules/.package-lock.json
@@ -0,0 +1,23 @@
+{
+ "name": "TeachingKidsProgramming.Java",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "node_modules/git": {
+ "version": "0.1.5",
+ "resolved": "https://registry.npmjs.org/git/-/git-0.1.5.tgz",
+ "integrity": "sha512-N+bfOrXyKMU/fQtCj6D/U9MQOEN0DAA8TLHSLdUQRSWBOkeRvsjJHdrdkvcq05xO7GSDKWc3nDEGoTZ4DfCCSg==",
+ "dependencies": {
+ "mime": "1.2.9"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ }
+ },
+ "node_modules/mime": {
+ "version": "1.2.9",
+ "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.9.tgz",
+ "integrity": "sha512-WiLgbHTIq5AYUvU/Luli4mZ1bUcHpGNHyCsbl+KPMg4zt+XUDpQehWjuBjdLaEvDTinvKj/FgfQt3fPoT7j08g=="
+ }
+ }
+}
diff --git a/node_modules/git/.npmignore b/node_modules/git/.npmignore
new file mode 100644
index 00000000..5501f32e
--- /dev/null
+++ b/node_modules/git/.npmignore
@@ -0,0 +1,2 @@
+makefile
+test/
diff --git a/node_modules/git/.travis.yml b/node_modules/git/.travis.yml
new file mode 100644
index 00000000..3c6757d6
--- /dev/null
+++ b/node_modules/git/.travis.yml
@@ -0,0 +1,4 @@
+language: node_js
+node_js:
+ - '0.10'
+ - '0.11'
diff --git a/node_modules/git/README.md b/node_modules/git/README.md
new file mode 100644
index 00000000..f5ffd182
--- /dev/null
+++ b/node_modules/git/README.md
@@ -0,0 +1,48 @@
+[](http://travis-ci.org/christkv/node-git)
+
+# Introduction
+
+This is a library for Git written in Node.js. It's as close a port of grit http://github.com/mojombo/grit.
+
+The idea is to allow for manipulation of git repositories by the node.js application. Not everything is
+implemented directly in node-git. Some of the stuff is using the native git command line instead of
+direct javascript code. Also it's fairly synchronous right now but that will hopefully change a little
+by little over time as it gets more stable and I start using it in real life scenarios.
+
+## Github information
+
+The source code is available at http://github.com/christkv/node-git.
+You can either clone the repository or download a tarball of the latest release.
+
+Once you have the source you can test the driver by running
+
+ $ make test
+
+On windows:
+
+ PS > node.exe .\node_modules\nodeunit\bin\nodeunit .\test
+
+## Examples
+
+For simple examples of usage look at the tests included in the repository.
+
+## Notes
+
+The current version is basic git support, don't expect everything to work as you expect it
+off the bat.
+
+## License
+
+ Copyright 2009 - 2010 Christian Amor Kvalheim.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/node_modules/git/benchmarks/benchmark.js b/node_modules/git/benchmarks/benchmark.js
new file mode 100644
index 00000000..10003382
--- /dev/null
+++ b/node_modules/git/benchmarks/benchmark.js
@@ -0,0 +1,150 @@
+
+var Repo = require('git/repo').Repo,
+ util = require('util'),
+ fs = require('fs'),
+ exec = require('child_process').exec;
+
+var number_of_executions = 30;
+
+var create_tmp_directory = function(clone_path, callback) {
+ var filename = 'git_test' + new Date().getTime().toString() + Math.round((Math.random(100000) * 300)).toString();
+ var tmp_path = '/tmp/' + filename;
+ // Create directory
+ fs.mkdirSync(tmp_path, 0777);
+ // Copy the old directory to the new one
+ var child = exec('cp -R ' + clone_path + ' ' + tmp_path, function (error, stdout, stderr) {
+ if (error !== null) {
+ util.puts('exec error: ' + error);
+ return callback(error, null);
+ }
+ return callback(null, tmp_path);
+ });
+}
+
+var destroy_directory = function(directory, callback) {
+ // Copy the old directory to the new one
+ var child = exec('rm -rf ' + directory, function (error, stdout, stderr) {
+ if (error !== null) {
+ util.puts('exec error: ' + error);
+ return callback(error, null);
+ }
+ return callback(null, null);
+ });
+}
+
+var commit1 = '5e3ee1198672257164ce3fe31dea3e40848e68d5'
+var commit2 = 'ca8a30f5a7f0f163bbe3b6f0abf18a6c83b0687a'
+
+var pack_object_function = function(repo) {
+ repo.commit('5e3ee1198672257164ce3fe31dea3e40848e68d5', function(err, commit) {
+ repo.tree('cd7422af5a2e0fff3e94d6fb1a8fff03b2841881', function(err, tree) {
+ repo.blob('4232d073306f01cf0b895864e5a5cfad7dd76fce', function(err, blob) {
+ commit.parents[0].parents[0].parents[0];
+ })
+ })
+ });
+}
+
+var commits1_function = function(repo) {
+ repo.commits(function(err, commits) {
+ commits.length;
+ })
+}
+
+var commits2_function = function(repo) {
+ repo.commits('master', 15, function(err, log) {
+ log.length;
+ log.length;
+ log[0];
+ repo.commits('testing', function(err, commits) {
+ commits.map(function(c) { return c.message; });
+ })
+ })
+}
+
+var big_revlist_function = function(repo) {
+ repo.commits('master', 200, function(err, commits) {});
+}
+
+var log_function = function(repo) {
+ repo.log('master', function(err, log) {
+ log.length;
+ log.length;
+ log[0];
+ })
+}
+
+var diff_function = function(repo) {
+ repo.diff(commit1, commit2, function(err, diff) {});
+}
+
+var commit_diff_function = function(repo) {
+ repo.commit_diff(commit1, function(err, diff) {});
+}
+
+var heads_function = function(repo) {
+ repo.heads(function(err, heads) {
+ heads.map(function(b) { return b.commit.id; });
+ });
+}
+
+var execute_process = function(type) {
+ var execute_function = null;
+
+ // Check that we have the right function
+ if(type == "packobj") {
+ execute_function = pack_object_function;
+ } else if(type == "commits1") {
+ execute_function = commits1_function;
+ } else if(type == "commits2") {
+ execute_function = commits2_function;
+ } else if(type == "big_revlist") {
+ execute_function = big_revlist_function;
+ } else if(type == "log") {
+ execute_function = log_function;
+ } else if(type == "diff") {
+ execute_function = diff_function;
+ } else if(type == "commit_diff") {
+ execute_function = commit_diff_function;
+ } else if(type == "heads") {
+ execute_function = heads_function;
+ }
+
+ // Ensure that we have an executable function
+ if(execute_function) {
+ // Creat temp directory
+ create_tmp_directory("/Users/christian.kvalheim/coding/checkouts/grit/test/dot_git", function(err, target_path) {
+ // Open the repo
+ new Repo(target_path + "/dot_git", {is_bare:true}, function(err, repo) {
+ var start_time = new Date();
+
+ // Execute the benchmark x number of times if a function is defined
+ for(var i = 0; i < number_of_executions; i++) {
+ execute_function(repo);
+ }
+
+ var end_time = new Date();
+ var total_miliseconds = end_time.getTime() - start_time.getTime();
+ util.puts("[" + type + "]::executed in: " + (total_miliseconds/1000) + " seconds");
+ // Delete the directory
+ destroy_directory(target_path, function(err, result) {});
+ });
+ });
+ }
+}
+
+if(process.argv.length > 2 && process.argv[2].match(/packobj|commits1|commits2|big_revlist|log|diff|commit_diff|heads|all/)) {
+ if(process.argv[2] == "all") {
+ var tests = ["packobj", "commits1", "commits2", "big_revlist", "log", "diff", "commit_diff", "heads"];
+ // var tests = ["packobj", "commits1", "commits2", "big_revlist", "log"];
+ tests.forEach(function(t) {
+ execute_process(t);
+ })
+ } else {
+ execute_process(process.argv[2]);
+ }
+} else {
+ util.puts("Please provide the benchmark you wish to run in the form ")
+}
+
+
diff --git a/node_modules/git/lib/diff/block.js b/node_modules/git/lib/diff/block.js
new file mode 100644
index 00000000..847731ae
--- /dev/null
+++ b/node_modules/git/lib/diff/block.js
@@ -0,0 +1,33 @@
+// A block is an operation removing, adding, or changing a group of items.
+// Basically, this is just a list of changes, where each change adds or
+// deletes a single item. Used by bin/ldiff.
+var Block = exports.Block = function(chunk) {
+ this.changes = [];
+ this.insert = [];
+ this.remove = [];
+ var self = this;
+
+ chunk.forEach(function(item) {
+ self.changes.push(item);
+ if(item.is_deleting()) self.remove.push(item);
+ if(item.is_adding()) self.insert.push(item);
+ })
+
+ Object.defineProperty(this, "diff_size", { get: function() { return self.insert.length - self.remove.length; }, enumerable: true});
+ Object.defineProperty(this, "op", { get: function() {
+ var result = [self.remove.length == 0, self.insert.length == 0];
+
+ if(!result[0] && !result[1]) {
+ return "!";
+ } else if(!result[0] && result[1]) {
+ return "-";
+ } else if(result[0] && result[1]) {
+ return "+";
+ } else {
+ return "^";
+ }
+ }, enumerable: true});
+}
+
+Block.prototype.op = function() {
+}
\ No newline at end of file
diff --git a/node_modules/git/lib/diff/callbacks.js b/node_modules/git/lib/diff/callbacks.js
new file mode 100644
index 00000000..3008544a
--- /dev/null
+++ b/node_modules/git/lib/diff/callbacks.js
@@ -0,0 +1,311 @@
+var ContextChange = require('./change').ContextChange,
+ Change = require('./change').Change;
+
+// This callback object implements the default set of callback events, which
+// only returns the event itself. Note that //finished_a and //finished_b are
+// not implemented -- I haven't yet figured out where they would be useful.
+//
+// Note that this is intended to be called as is, e.g.,
+DefaultCallbacks = exports.DefaultCallbacks = function() {
+}
+
+// Called when two items match.
+DefaultCallbacks.prototype.match = function(event) {
+ return event;
+}
+
+// Called when the old value is discarded in favour of the new value.
+DefaultCallbacks.prototype.discard_a = function(event) {
+ return event;
+}
+
+// Called when the new value is discarded in favour of the old value.
+DefaultCallbacks.prototype.discard_b = function(event) {
+ return event;
+}
+
+// Called when both the old and new values have changed.
+DefaultCallbacks.prototype.change = function(event) {
+ return event;
+}
+
+// An alias for DefaultCallbacks that is used in Diff::LCS#traverse_sequences.
+SequenceCallbacks = exports.SequenceCallbacks = DefaultCallbacks;
+// An alias for DefaultCallbacks that is used in Diff::LCS#traverse_balanced.
+BalancedCallbacks = exports.BalancedCallbacks = DefaultCallbacks;
+
+// This will produce a compound array of simple diff change objects. Each
+// element in the //diffs array is a +hunk+ or +hunk+ array, where each
+// element in each +hunk+ array is a single Change object representing the
+// addition or removal of a single element from one of the two tested
+// sequences. The +hunk+ provides the full context for the changes.
+//
+// diffs = Diff::LCS.diff(seq1, seq2)
+// // This example shows a simplified array format.
+// // [ [ [ '-', 0, 'a' ] ], // 1
+// // [ [ '+', 2, 'd' ] ], // 2
+// // [ [ '-', 4, 'h' ], // 3
+// // [ '+', 4, 'f' ] ],
+// // [ [ '+', 6, 'k' ] ], // 4
+// // [ [ '-', 8, 'n' ], // 5
+// // [ '-', 9, 'p' ],
+// // [ '+', 9, 'r' ],
+// // [ '+', 10, 's' ],
+// // [ '+', 11, 't' ] ] ]
+//
+// There are five hunks here. The first hunk says that the +a+ at position 0
+// of the first sequence should be deleted ('-'). The second hunk
+// says that the +d+ at position 2 of the second sequence should be inserted
+// ('+'). The third hunk says that the +h+ at position 4 of the
+// first sequence should be removed and replaced with the +f+ from position 4
+// of the second sequence. The other two hunks are described similarly.
+//
+// === Use
+// This callback object must be initialised and is used by the Diff::LCS//diff
+// method.
+//
+// cbo = Diff::LCS::DiffCallbacks.new
+// Diff::LCS.LCS(seq1, seq2, cbo)
+// cbo.finish
+//
+// Note that the call to //finish is absolutely necessary, or the last set of
+// changes will not be visible. Alternatively, can be used as:
+//
+// cbo = Diff::LCS::DiffCallbacks.new { |tcbo| Diff::LCS.LCS(seq1, seq2, tcbo) }
+//
+// The necessary //finish call will be made.
+//
+// === Simplified Array Format
+// The simplified array format used in the example above can be obtained
+// with:
+//
+// require 'pp'
+// pp diffs.map { |e| e.map { |f| f.to_a } }
+DiffCallbacks = exports.DiffCallbacks = function(block) {
+ this.hunk = [];
+ this.diffs = [];
+
+ if(block != null) {
+ block(this);
+ this.finish();
+ }
+}
+
+// Finalizes the diff process. If an unprocessed hunk still exists, then it
+// is appended to the diff list.
+DiffCallbacks.prototype.finish = function() {
+ add_nonempty_hunk(this);
+}
+
+DiffCallbacks.prototype.match = function(event) {
+ add_nonempty_hunk(this);
+}
+
+DiffCallbacks.prototype.discard_a = function(event) {
+ this.hunk.push(new Change('-', event.old_position, event.old_element));
+}
+
+DiffCallbacks.prototype.discard_b = function(event) {
+ this.hunk.push(new Change('+', event.new_position, event.new_element));
+}
+
+var add_nonempty_hunk = function(diff_callback) {
+ if(diff_callback.hunk.length > 0) diff_callback.diffs.push(diff_callback.hunk);
+ diff_callback.hunk = [];
+}
+
+// This will produce a simple array of diff change objects. Each element in
+// the //diffs array is a single ContextChange. In the set of //diffs provided
+// by SDiffCallbacks, both old and new objects will be presented for both
+// changed and unchanged objects. +nil+ will be substituted
+// for a discarded object.
+//
+// The diffset produced by this callback, when provided to Diff::LCS//sdiff,
+// will compute and display the necessary components to show two sequences
+// and their minimized differences side by side, just like the Unix utility
+// +sdiff+.
+//
+// same same
+// before | after
+// old < -
+// - > new
+//
+// seq1 = %w(a b c e h j l m n p)
+// seq2 = %w(b c d e f j k l m r s t)
+//
+// diffs = Diff::LCS.sdiff(seq1, seq2)
+// // This example shows a simplified array format.
+// // [ [ "-", [ 0, "a"], [ 0, nil ] ],
+// // [ "=", [ 1, "b"], [ 0, "b" ] ],
+// // [ "=", [ 2, "c"], [ 1, "c" ] ],
+// // [ "+", [ 3, nil], [ 2, "d" ] ],
+// // [ "=", [ 3, "e"], [ 3, "e" ] ],
+// // [ "!", [ 4, "h"], [ 4, "f" ] ],
+// // [ "=", [ 5, "j"], [ 5, "j" ] ],
+// // [ "+", [ 6, nil], [ 6, "k" ] ],
+// // [ "=", [ 6, "l"], [ 7, "l" ] ],
+// // [ "=", [ 7, "m"], [ 8, "m" ] ],
+// // [ "!", [ 8, "n"], [ 9, "r" ] ],
+// // [ "!", [ 9, "p"], [ 10, "s" ] ],
+// // [ "+", [ 10, nil], [ 11, "t" ] ] ]
+//
+// The result of this operation is similar to that of
+// Diff::LCS::ContextDiffCallbacks. They may be compared as:
+//
+// s = Diff::LCS.sdiff(seq1, seq2).reject { |e| e.action == "=" }
+// c = Diff::LCS.sdiff(seq1, seq2, Diff::LCS::ContextDiffCallbacks).flatten
+//
+// s == c // -> true
+//
+// === Use
+// This callback object must be initialised and is used by the Diff::LCS//sdiff
+// method.
+//
+// cbo = Diff::LCS::SDiffCallbacks.new
+// Diff::LCS.LCS(seq1, seq2, cbo)
+//
+// As with the other initialisable callback objects, Diff::LCS::SDiffCallbacks
+// can be initialised with a block. As there is no "fininishing" to be done,
+// this has no effect on the state of the object.
+//
+// cbo = Diff::LCS::SDiffCallbacks.new { |tcbo| Diff::LCS.LCS(seq1, seq2, tcbo) }
+//
+// === Simplified Array Format
+// The simplified array format used in the example above can be obtained
+// with:
+//
+// require 'pp'
+// pp diffs.map { |e| e.to_a }
+SDiffCallbacks = exports.SDiffCallbacks = function(block) {
+ this.diffs = [];
+
+ if(block != null) {
+ block(this);
+ this.finish();
+ }
+}
+
+SDiffCallbacks.prototype.match = function(event) {
+ this.diffs.push(ContextChange.simplify(event));
+}
+
+SDiffCallbacks.prototype.discard_a = function(event) {
+ this.diffs.push(ContextChange.simplify(event));
+}
+
+SDiffCallbacks.prototype.discard_b = function(event) {
+ this.diffs.push(ContextChange.simplify(event));
+}
+
+SDiffCallbacks.prototype.change = function(event) {
+ this.diffs.push(ContextChange.simplify(event));
+}
+
+// This will produce a compound array of contextual diff change objects. Each
+// element in the //diffs array is a "hunk" array, where each element in each
+// "hunk" array is a single change. Each change is a Diff::LCS::ContextChange
+// that contains both the old index and new index values for the change. The
+// "hunk" provides the full context for the changes. Both old and new objects
+// will be presented for changed objects. +nil+ will be substituted for a
+// discarded object.
+//
+// seq1 = %w(a b c e h j l m n p)
+// seq2 = %w(b c d e f j k l m r s t)
+//
+// diffs = Diff::LCS.diff(seq1, seq2, Diff::LCS::ContextDiffCallbacks)
+// // This example shows a simplified array format.
+// // [ [ [ '-', [ 0, 'a' ], [ 0, nil ] ] ], // 1
+// // [ [ '+', [ 3, nil ], [ 2, 'd' ] ] ], // 2
+// // [ [ '-', [ 4, 'h' ], [ 4, nil ] ], // 3
+// // [ '+', [ 5, nil ], [ 4, 'f' ] ] ],
+// // [ [ '+', [ 6, nil ], [ 6, 'k' ] ] ], // 4
+// // [ [ '-', [ 8, 'n' ], [ 9, nil ] ], // 5
+// // [ '+', [ 9, nil ], [ 9, 'r' ] ],
+// // [ '-', [ 9, 'p' ], [ 10, nil ] ],
+// // [ '+', [ 10, nil ], [ 10, 's' ] ],
+// // [ '+', [ 10, nil ], [ 11, 't' ] ] ] ]
+//
+// The five hunks shown are comprised of individual changes; if there is a
+// related set of changes, they are still shown individually.
+//
+// This callback can also be used with Diff::LCS//sdiff, which will produce
+// results like:
+//
+// diffs = Diff::LCS.sdiff(seq1, seq2, Diff::LCS::ContextCallbacks)
+// // This example shows a simplified array format.
+// // [ [ [ "-", [ 0, "a" ], [ 0, nil ] ] ], // 1
+// // [ [ "+", [ 3, nil ], [ 2, "d" ] ] ], // 2
+// // [ [ "!", [ 4, "h" ], [ 4, "f" ] ] ], // 3
+// // [ [ "+", [ 6, nil ], [ 6, "k" ] ] ], // 4
+// // [ [ "!", [ 8, "n" ], [ 9, "r" ] ], // 5
+// // [ "!", [ 9, "p" ], [ 10, "s" ] ],
+// // [ "+", [ 10, nil ], [ 11, "t" ] ] ] ]
+//
+// The five hunks are still present, but are significantly shorter in total
+// presentation, because changed items are shown as changes ("!") instead of
+// potentially "mismatched" pairs of additions and deletions.
+//
+// The result of this operation is similar to that of
+// Diff::LCS::SDiffCallbacks. They may be compared as:
+//
+// s = Diff::LCS.sdiff(seq1, seq2).reject { |e| e.action == "=" }
+// c = Diff::LCS.sdiff(seq1, seq2, Diff::LCS::ContextDiffCallbacks).flatten
+//
+// s == c // -> true
+//
+// === Use
+// This callback object must be initialised and can be used by the
+// Diff::LCS//diff or Diff::LCS//sdiff methods.
+//
+// cbo = Diff::LCS::ContextDiffCallbacks.new
+// Diff::LCS.LCS(seq1, seq2, cbo)
+// cbo.finish
+//
+// Note that the call to //finish is absolutely necessary, or the last set of
+// changes will not be visible. Alternatively, can be used as:
+//
+// cbo = Diff::LCS::ContextDiffCallbacks.new { |tcbo| Diff::LCS.LCS(seq1, seq2, tcbo) }
+//
+// The necessary //finish call will be made.
+//
+// === Simplified Array Format
+// The simplified array format used in the example above can be obtained
+// with:
+//
+// require 'pp'
+// pp diffs.map { |e| e.map { |f| f.to_a } }
+ContextDiffCallbacks = exports.ContextDiffCallbacks = function(block) {
+ this.hunk = [];
+ this.diffs = [];
+
+ if(block != null) {
+ block(this);
+ this.finish();
+ }
+}
+
+ContextDiffCallbacks.prototype.finish = function() {
+ add_nonempty_hunk(this);
+}
+
+ContextDiffCallbacks.prototype.discard_a = function(event) {
+ this.hunk.push(ContextChange.simplify(event));
+}
+
+ContextDiffCallbacks.prototype.discard_b = function(event) {
+ this.hunk.push(ContextChange.simplify(event));
+}
+
+ContextDiffCallbacks.prototype.match = function(event) {
+ this.hunk.push(ContextChange.simplify(event));
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/node_modules/git/lib/diff/change.js b/node_modules/git/lib/diff/change.js
new file mode 100644
index 00000000..6677caaa
--- /dev/null
+++ b/node_modules/git/lib/diff/change.js
@@ -0,0 +1,105 @@
+// Represents a simplistic (non-contextual) change. Represents the removal or
+// addition of an element from either the old or the new sequenced enumerable.
+var Change = exports.Change = function(action, position, element) {
+ this.action = action;
+ this.position = position;
+ this.element = element;
+}
+
+Change.from_a = function(arr) {
+ return new Change(arr[0], arr[1], arr[2]);
+}
+
+Change.prototype.to_a = function() {
+ return [this.action, this.position, this.element];
+}
+
+Change.prototype.is_deleting = function() {
+ return this.action == '-';
+}
+
+Change.prototype.is_adding = function() {
+ return this.action == '+';
+}
+
+Change.prototype.is_unchanged = function() {
+ return this.action == '=';
+}
+
+Change.prototype.is_changed = function() {
+ return this.changed == '!';
+}
+
+Change.prototype.is_finished_a = function() {
+ return this.changed == '>';
+}
+
+Change.prototype.is_finished_b = function() {
+ return this.changed == '<';
+}
+
+var ContextChange = exports.ContextChange = function(action, old_position, old_element, new_position, new_element) {
+ this.action = action;
+ this.old_position = old_position;
+ this.old_element = old_element;
+ this.new_position = new_position;
+ this.new_element = new_element;
+}
+
+// Creates a ContextChange from an array produced by ContextChange#to_a.
+ContextChange.from_a = function(arr) {
+ if(arr.length == 5) {
+ return new ContextChange(arr[0], arr[1], arr[2], arr[3], arr[4]);
+ } else {
+ return new ContextChange(arr[0], arr[1][0], arr[1][1], arr[2][0], arr[2][1]);
+ }
+}
+
+// Simplifies a context change for use in some diff callbacks. '<' actions
+// are converted to '-' and '>' actions are converted to '+'.
+ContextChange.simplify = function(event) {
+ var ea = event.to_a();
+
+ if(ea[0] == '-') {
+ ea[2][1] = null;
+ } else if(ea[0] == '<') {
+ ea[0] = '-';
+ ea[2][1] = null;
+ } else if(ea[0] == '+') {
+ ea[1][1] = null;
+ } else if(ea[0] == '>') {
+ ea[0] = '+';
+ ea[1][1] = null;
+ }
+
+ // Return a Context Change object
+ return ContextChange.from_a(ea);
+}
+
+ContextChange.prototype.to_a = function() {
+ return [this.action, [this.old_position, this.old_element], [this.new_position, this.new_element]];
+}
+
+ContextChange.prototype.is_deleting = function() {
+ return this.action == '-';
+}
+
+ContextChange.prototype.is_adding = function() {
+ return this.action == '+';
+}
+
+ContextChange.prototype.is_unchanged = function() {
+ return this.action == '=';
+}
+
+ContextChange.prototype.is_changed = function() {
+ return this.changed == '!';
+}
+
+ContextChange.prototype.is_finished_a = function() {
+ return this.changed == '>';
+}
+
+ContextChange.prototype.is_finished_b = function() {
+ return this.changed == '<';
+}
diff --git a/node_modules/git/lib/diff/diff.js b/node_modules/git/lib/diff/diff.js
new file mode 100644
index 00000000..da6455aa
--- /dev/null
+++ b/node_modules/git/lib/diff/diff.js
@@ -0,0 +1,892 @@
+var util = require('util'),
+ SequenceCallbacks = require('./callbacks').SequenceCallbacks,
+ ContextChange = require('./change').ContextChange,
+ Change = require('./change').Change,
+ DiffCallbacks = require('./callbacks').DiffCallbacks,
+ SDiffCallbacks = require('./callbacks').SDiffCallbacks,
+ BalancedCallbacks = require('./callbacks').BalancedCallbacks,
+ ContextDiffCallbacks = require('./callbacks').ContextDiffCallbacks,
+ Hunk = require('./hunk').Hunk;
+
+var Difference = exports.Difference = function() {
+}
+
+Difference.LCS = function() {
+}
+
+// Scope the Sequence Callbacks class
+Difference.LCS.SequenceCallbacks = SequenceCallbacks;
+Difference.LCS.ContextChange = ContextChange;
+Difference.LCS.DiffCallbacks = DiffCallbacks;
+Difference.LCS.SDiffCallbacks = SDiffCallbacks;
+Difference.LCS.BalancedCallbacks = BalancedCallbacks;
+Difference.LCS.ContextDiffCallbacks = ContextDiffCallbacks;
+Difference.LCS.Change = Change;
+Difference.LCS.Hunk = Hunk;
+
+// Diff::LCS.sdiff computes all necessary components to show two sequences
+// and their minimized differences side by side, just like the Unix
+// utility sdiff does:
+//
+// old < -
+// same same
+// before | after
+// - > new
+//
+// See Diff::LCS::SDiffCallbacks for the default behaviour. An alternate
+// behaviour may be implemented with Diff::LCS::ContextDiffCallbacks. If
+// a Class argument is provided for +callbacks+, //diff will attempt to
+// initialise it. If the +callbacks+ object (possibly initialised)
+// responds to //finish, it will be called.
+Difference.LCS.sdiff = function(seq1, seq2, callbacks, block) {
+ callbacks = callbacks != null ? callbacks : Difference.LCS.SDiffCallbacks;
+
+ if(Object.prototype.toString.call(callbacks) == "[object Function]") {
+ callbacks = new callbacks();
+ }
+
+ // Traverse the sequence
+ Difference.LCS.traverse_balanced(seq1, seq2, callbacks);
+ if(callbacks.finish != null) callbacks.finish();
+
+ if(block != null) {
+ var res = callbacks.diffs.map(function(hunk) {
+ if(Array.isArray(hunk)) {
+ hunk = hunk.map(function(v) { return block(v); });
+ } else {
+ block(hunk);
+ }
+ });
+
+ return res;
+ } else {
+ return callbacks.diffs;
+ }
+}
+
+// Diff::LCS.diff computes the smallest set of additions and deletions
+// necessary to turn the first sequence into the second, and returns a
+// description of these changes.
+//
+// See Diff::LCS::DiffCallbacks for the default behaviour. An alternate
+// behaviour may be implemented with Diff::LCS::ContextDiffCallbacks.
+// If a Class argument is provided for +callbacks+, //diff will attempt
+// to initialise it. If the +callbacks+ object (possibly initialised)
+// responds to //finish, it will be called.
+Difference.LCS.diff = function(seq1, seq2, callbacks, block) {
+ callbacks = callbacks != null ? callbacks : Difference.LCS.DiffCallbacks;
+
+ if(Object.prototype.toString.call(callbacks) == "[object Function]") {
+ callbacks = new callbacks();
+ }
+
+ // Traverse the sequence
+ Difference.LCS.traverse_sequences(seq1, seq2, callbacks);
+ if(callbacks.finish != null) callbacks.finish();
+
+ if(block != null) {
+ var res = callbacks.diffs.map(function(hunk) {
+ if(Array.isArray(hunk)) {
+ hunk = hunk.map(function(v) { return block(v); });
+ } else {
+ block(hunk);
+ }
+ });
+
+ return res;
+ } else {
+ return callbacks.diffs;
+ }
+}
+
+
+// Diff::LCS.traverse_sequences is the most general facility provided by this
+// module; +diff+ and +LCS+ are implemented as calls to it.
+//
+// The arguments to //traverse_sequences are the two sequences to
+// traverse, and a callback object, like this:
+//
+// traverse_sequences(seq1, seq2, Diff::LCS::ContextDiffCallbacks.new)
+//
+// //diff is implemented with //traverse_sequences.
+//
+// == Callback Methods
+// Optional callback methods are emphasized.
+//
+// callbacks//match:: Called when +a+ and +b+ are pointing
+// to common elements in +A+ and +B+.
+// callbacks//discard_a:: Called when +a+ is pointing to an
+// element not in +B+.
+// callbacks//discard_b:: Called when +b+ is pointing to an
+// element not in +A+.
+// callbacks//finished_a:: Called when +a+ has reached the end of
+// sequence +A+.
+// callbacks//finished_b:: Called when +b+ has reached the end of
+// sequence +B+.
+//
+// == Algorithm
+// a---+
+// v
+// A = a b c e h j l m n p
+// B = b c d e f j k l m r s t
+// ^
+// b---+
+//
+// If there are two arrows (+a+ and +b+) pointing to elements of
+// sequences +A+ and +B+, the arrows will initially point to the first
+// elements of their respective sequences. //traverse_sequences will
+// advance the arrows through the sequences one element at a time,
+// calling a method on the user-specified callback object before each
+// advance. It will advance the arrows in such a way that if there are
+// elements A[ii] and B[jj] which are both equal and
+// part of the longest common subsequence, there will be some moment
+// during the execution of //traverse_sequences when arrow +a+ is pointing
+// to A[ii] and arrow +b+ is pointing to B[jj]. When
+// this happens, //traverse_sequences will call callbacks//match
+// and then it will advance both arrows.
+//
+// Otherwise, one of the arrows is pointing to an element of its sequence
+// that is not part of the longest common subsequence.
+// //traverse_sequences will advance that arrow and will call
+// callbacks//discard_a or callbacks//discard_b, depending
+// on which arrow it advanced. If both arrows point to elements that are
+// not part of the longest common subsequence, then //traverse_sequences
+// will advance one of them and call the appropriate callback, but it is
+// not specified which it will call.
+//
+// The methods for callbacks//match, callbacks//discard_a,
+// and callbacks//discard_b are invoked with an event comprising
+// the action ("=", "+", or "-", respectively), the indicies +ii+ and
+// +jj+, and the elements A[ii] and B[jj]. Return
+// values are discarded by //traverse_sequences.
+//
+// === End of Sequences
+// If arrow +a+ reaches the end of its sequence before arrow +b+ does,
+// //traverse_sequence try to call callbacks//finished_a with the
+// last index and element of +A+ (A[-1]) and the current index
+// and element of +B+ (B[jj]). If callbacks//finished_a
+// does not exist, then callbacks//discard_b will be called on
+// each element of +B+ until the end of the sequence is reached (the call
+// will be done with A[-1] and B[jj] for each element).
+//
+// If +b+ reaches the end of +B+ before +a+ reaches the end of +A+,
+// callbacks//finished_b will be called with the current index
+// and element of +A+ (A[ii]) and the last index and element of
+// +B+ (A[-1]). Again, if callbacks//finished_b does not
+// exist on the callback object, then callbacks//discard_a will
+// be called on each element of +A+ until the end of the sequence is
+// reached (A[ii] and B[-1]).
+//
+// There is a chance that one additional callbacks//discard_a or
+// callbacks//discard_b will be called after the end of the
+// sequence is reached, if +a+ has not yet reached the end of +A+ or +b+
+// has not yet reached the end of +B+.
+Difference.LCS.traverse_sequences = function(seq1, seq2, callbacks, block) { // The block allows callbacks on change events
+ // Ensure that we have at least a default callback object
+ callbacks = callbacks != null ? callbacks : new Difference.LCS.SequenceCallbacks();
+ // Fetch the matches from the __lcs algorithm
+ var matches = Difference.LCS.__lcs(seq1, seq2);
+
+ var run_finished_a = false, run_finished_b = false;
+ var string = seq1.constructor == String;
+
+ var a_size = seq1.length, b_size = seq2.length;
+ var ai = 0, bj = 0;
+ var event = null;
+
+ for(var ii = 0; ii <= matches.length; ii++) {
+ var b_line = matches[ii];
+
+ var ax = string ? seq1.substr(ii, 1) : seq1[ii];
+ var bx = string ? seq2.substr(bj, bj + 1) : seq2[bj];
+
+ if(b_line == null) {
+ if(ax != null) {
+ event = new Difference.LCS.ContextChange('-', ii, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.discard_a(event);
+ }
+ } else {
+ while(bj < b_line) {
+ bx = string ? seq2.substr(bj, 1) : seq2[bj];
+ event = new Difference.LCS.ContextChange('+', ii, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.discard_b(event);
+ bj = bj + 1;
+ }
+
+ bx = string ? seq2.substr(bj, 1) : seq2[bj];
+ event = new Difference.LCS.ContextChange('=', ii, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.match(event);
+ bj = bj + 1;
+ }
+
+ // Update the ai with the current index point
+ ai = ii;
+ }
+
+ // Update pointer
+ ai = ai + 1;
+
+ // The last entry (if any) processed was a match. +ai+ and +bj+ point
+ // just past the last matching lines in their sequences.
+ while(ai < a_size || bj < b_size) {
+ // last A
+ if(ai == a_size && bj < b_size) {
+ if(callbacks.finished_a != null && !run_finished_a) {
+ ax = string ? seq1.substr(seq1.length - 1, 1) : seq1[seq1.length - 1];
+ bx = string ? seq2.substr(bj, 1) : seq2[bj];
+ event = new Difference.LCS.ContextChange('>', (a_size - 1), ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.finished_a(event);
+ run_finished_a = true;
+ } else {
+ ax = string ? seq1.substr(ai, 1) : seq1[ai];
+ do {
+ bx = string ? seq2.substr(bj, 1) : seq2[bj];
+ event = new Difference.LCS.ContextChange('+', ai, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.discard_b(event);
+ bj = bj + 1;
+ } while(bj < b_size)
+ }
+ }
+
+ // last B?
+ if(bj == b_size && ai < a_size) {
+ if(callbacks.finished_b != null && !run_finished_b) {
+ ax = string ? seq1.substr(ai, 1) : seq1[ai];
+ bx = string ? seq2.substr(seq2.length - 1, 1) : seq2[seq2.length - 1];
+ event = new Difference.LCS.ContextChange('<', ai, ax, (b_size -1), bx);
+ if(block != null) event = block(event);
+ callbacks.finished_b(event);
+ run_finished_b = true;
+ } else {
+ bx = string ? seq2.substr(bj, 1) : seq2[bj];
+ do {
+ ax = string ? seq1.substr(ai, 1) : seq1[ai];
+ event = new Difference.LCS.ContextChange('-', ai, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.discard_a(event);
+ ai = ai + 1;
+ } while(bj < b_size)
+ }
+ }
+
+ if(ai < a_size) {
+ ax = string ? seq1.substr(ai, 1) : seq1[ai];
+ bx = string ? seq2.substr(bj, 1) : seq2[bj];
+ event = new Difference.LCS.ContextChange('-', ai, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.discard_a(event);
+ ai = ai + 1;
+ }
+
+ if(bj < b_size) {
+ ax = string ? seq1.substr(ai, 1) : seq1[ai];
+ bx = string ? seq2.substr(bj, 1) : seq2[bj];
+ event = new Difference.LCS.ContextChange('+', ai, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.discard_b(event);
+ bj = bj + 1;
+ }
+ }
+}
+
+// //traverse_balanced is an alternative to //traverse_sequences. It
+// uses a different algorithm to iterate through the entries in the
+// computed longest common subsequence. Instead of viewing the changes as
+// insertions or deletions from one of the sequences, //traverse_balanced
+// will report changes between the sequences. To represent a
+//
+// The arguments to //traverse_balanced are the two sequences to traverse
+// and a callback object, like this:
+//
+// traverse_balanced(seq1, seq2, Diff::LCS::ContextDiffCallbacks.new)
+//
+// //sdiff is implemented with //traverse_balanced.
+//
+// == Callback Methods
+// Optional callback methods are emphasized.
+//
+// callbacks//match:: Called when +a+ and +b+ are pointing
+// to common elements in +A+ and +B+.
+// callbacks//discard_a:: Called when +a+ is pointing to an
+// element not in +B+.
+// callbacks//discard_b:: Called when +b+ is pointing to an
+// element not in +A+.
+// callbacks//change:: Called when +a+ and +b+ are pointing
+// to the same relative position, but
+// A[a] and B[b] are
+// not the same; a change has
+// occurred.
+//
+// //traverse_balanced might be a bit slower than //traverse_sequences,
+// noticable only while processing huge amounts of data.
+//
+// The +sdiff+ function of this module is implemented as call to
+// //traverse_balanced.
+//
+// == Algorithm
+// a---+
+// v
+// A = a b c e h j l m n p
+// B = b c d e f j k l m r s t
+// ^
+// b---+
+//
+// === Matches
+// If there are two arrows (+a+ and +b+) pointing to elements of
+// sequences +A+ and +B+, the arrows will initially point to the first
+// elements of their respective sequences. //traverse_sequences will
+// advance the arrows through the sequences one element at a time,
+// calling a method on the user-specified callback object before each
+// advance. It will advance the arrows in such a way that if there are
+// elements A[ii] and B[jj] which are both equal and
+// part of the longest common subsequence, there will be some moment
+// during the execution of //traverse_sequences when arrow +a+ is pointing
+// to A[ii] and arrow +b+ is pointing to B[jj]. When
+// this happens, //traverse_sequences will call callbacks//match
+// and then it will advance both arrows.
+//
+// === Discards
+// Otherwise, one of the arrows is pointing to an element of its sequence
+// that is not part of the longest common subsequence.
+// //traverse_sequences will advance that arrow and will call
+// callbacks//discard_a or callbacks//discard_b,
+// depending on which arrow it advanced.
+//
+// === Changes
+// If both +a+ and +b+ point to elements that are not part of the longest
+// common subsequence, then //traverse_sequences will try to call
+// callbacks//change and advance both arrows. If
+// callbacks//change is not implemented, then
+// callbacks//discard_a and callbacks//discard_b will be
+// called in turn.
+//
+// The methods for callbacks//match, callbacks//discard_a,
+// callbacks//discard_b, and callbacks//change are
+// invoked with an event comprising the action ("=", "+", "-", or "!",
+// respectively), the indicies +ii+ and +jj+, and the elements
+// A[ii] and B[jj]. Return values are discarded by
+// //traverse_balanced.
+//
+// === Context
+// Note that +ii+ and +jj+ may not be the same index position, even if
+// +a+ and +b+ are considered to be pointing to matching or changed
+// elements.
+Difference.LCS.traverse_balanced = function(seq1, seq2, callbacks, block) {
+ // Ensure that we have at least a default callback object
+ callbacks = callbacks != null ? callbacks : new Difference.LCS.BalancedCallbacks();
+ // Fetch the matches from the __lcs algorithm
+ var matches = Difference.LCS.__lcs(seq1, seq2);
+ var a_size = seq1.length;
+ var b_size = seq2.length;
+ var ai = 0, bj = 0;
+ var mb = 0;
+ var ma = -1;
+ var string = seq1.constructor == String;
+ var ax = null, bx = null, event = null;
+ var execute = true;
+
+ // Process all the lines in the match vector.
+ while(true) {
+ // Find next match indices +ma+ and +mb+
+ while(execute) {
+ ma = ma + 1;
+ if(!(ma < matches.length && matches[ma] == null)) break;
+ // execute = !(ma < matches.length && matches[ma] == null);
+ }
+
+ if(ma >= matches.length) break; // end of matches
+ mb = matches[ma];
+
+ // Change seq2
+ while((ai < ma) || (bj < mb)) {
+ ax = string ? seq1.substr(ai, 1) : seq1[ai];
+ bx = string ? seq2.substr(bj, 1) : seq2[bj];
+
+ // Calculate result
+ var result = [(ai < ma), (bj < mb)];
+
+ if(result[0] && result[1]) {
+ if(callbacks.change != null) {
+ event = new Difference.LCS.ContextChange('!', ai, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.change(event);
+ ai = ai + 1;
+ bj = bj + 1;
+ } else {
+ event = new Difference.LCS.ContextChange('-', ai, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.discard_a(event);
+ ai = ai + 1;
+ ax = string ? seq1.substr(ai, 1) : seq1[ai];
+ event = new Difference.LCS.ContextChange('+', ai, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.discard_b(event);
+ bj = bj + 1
+ }
+ } else if(result[0] && !result[1]) {
+ event = new Difference.LCS.ContextChange('-', ai, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.discard_a(event);
+ ai = ai + 1;
+ } else if(!result[0] && result[1]) {
+ event = new Difference.LCS.ContextChange('+', ai, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.discard_b(event);
+ bj = bj + 1;
+ }
+ }
+
+ // Match
+ ax = string ? seq1.substr(ai, 1) : seq1[ai];
+ bx = string ? seq2.substr(bj, 1) : seq2[bj];
+ event = new Difference.LCS.ContextChange('=', ai, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.match(event);
+ ai = ai + 1;
+ bj = bj + 1;
+ }
+
+ while((ai < a_size) || (bj < b_size)) {
+ ax = string ? seq1.substr(ai, 1) : seq1[ai];
+ bx = string ? seq2.substr(bj, 1) : seq2[bj];
+
+ var result = [(ai < a_size), (bj < b_size)];
+ if(result[0] && result[1]) {
+ if(callbacks.change != null) {
+ event = new Difference.LCS.ContextChange('!', ai, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.change(event);
+ ai = ai + 1;
+ bj = bj + 1;
+ } else {
+ event = new Difference.LCS.ContextChange('-', ai, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.discard_a(event);
+ ai = ai + 1;
+ ax = string ? seq1.substr(ai, 1) : seq1[ai];
+ event = new Difference.LCS.ContextChange('+', ai, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.discard_b(event);
+ bj = bj + 1;
+ }
+ } else if(result[0] && !result[1]) {
+ event = new Difference.LCS.ContextChange('-', ai, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.discard_a(event);
+ ai = ai + 1;
+ } else if(!result[0] && result[1]) {
+ event = new Difference.LCS.ContextChange('+', ai, ax, bj, bx);
+ if(block != null) event = block(event);
+ callbacks.discard_b(event);
+ bj = bj + 1;
+ }
+ }
+}
+
+// Given two sequenced Enumerables, LCS returns an Array containing their
+// longest common subsequences.
+//
+// lcs = Diff::LCS.LCS(seq1, seq2)
+//
+// This array whose contents is such that:
+//
+// lcs.each_with_index do |ee, ii|
+// assert(ee.nil? || (seq1[ii] == seq2[ee]))
+// end
+//
+// If a block is provided, the matching subsequences will be yielded from
+// +seq1+ in turn and may be modified before they are placed into the
+// returned Array of subsequences.
+Difference.LCS.LCS = function(seq1, seq2, block) {
+ var matches = Difference.LCS.__lcs(seq1, seq2);
+ var ret = [];
+
+ for(var ii = 0; ii < matches.length; ii++) {
+ if(matches[ii] != null) {
+ if(block != null) {
+ ret.push(block(seq1[ii]));
+ } else {
+ ret.push(seq1[ii]);
+ }
+ }
+ }
+ // Return the result
+ return ret;
+}
+
+var PATCH_MAP = {
+ patch:{ '+':'+', '-':'-', '!':'!', '=':'=' },
+ unpatch:{ '+':'-', '-':'+', '!':'!', '=':'=' }
+}
+
+// Given a patchset, convert the current version to the new
+// version. If +direction+ is not specified (must be
+// :patch or :unpatch), then discovery of the
+// direction of the patch will be attempted.
+Difference.LCS.patch = function(src, patchset, direction) {
+ var string = src.constructor == String;
+ // Start with an empty type of the source's class
+ var res = string ? '' : [];
+
+ // Normalize the patchset
+ var patchset = this.__normalize_patchset(patchset);
+ var direction = direction || Difference.LCS.__diff_direction(src, patchset);
+ direction = direction || "patch";
+
+ var ai = 0, bj = 0;
+ var el = null, op = null, np = null;
+
+ for(var i = 0; i < patchset.length; i++) {
+ var change = patchset[i];
+ // Both Change and ContextChange has the action
+ var action = PATCH_MAP[direction][change.action];
+
+ if(change instanceof ContextChange) {
+ if(direction == 'patch') {
+ el = change.new_element;
+ op = change.old_position;
+ np = change.new_position;
+ } else if(direction == 'unpatch') {
+ el = change.old_element;
+ op = change.new_position;
+ np = change.old_position;
+ }
+
+ if(action == '-') { //Remove details from the old string
+ while(ai < op) {
+ string ? res = res + src.substr(ai, 1) : res.push(src[ai]);
+ ai = ai + 1;
+ bj = bj + 1;
+ }
+ ai = ai + 1;
+ } else if(action == '+') {
+ while(bj < np) {
+ string ? res = res + src.substr(ai, 1) : res.push(src[ai]);
+ ai = ai + 1;
+ bj = bj + 1;
+ }
+ string ? res = res + el : res.push(el);
+ bj = bj + 1;
+ } else if(action == '=') {
+ // This only appears in sdiff output with the SDiff callback.
+ // Therefore, we only need to worry about dealing with a single
+ // element.
+ string ? res = res + el : res.push(el);
+ ai = ai + 1;
+ bj = bj + 1;
+ } else if(action == '!') {
+ while(ai < op) {
+ string ? res = res + src.substr(ai, 1) : res.push(src[ai]);
+ ai = ai + 1;
+ bj = bj + 1;
+ }
+ bj = bj + 1;
+ ai = ai + 1;
+ string ? res = res + el : res.push(el);
+ }
+ } else if(change instanceof Change) {
+ if(action == '-') {
+ while(ai < change.position) {
+ string ? res = res + src.substr(ai, 1) : res.push(src[ai]);
+ ai = ai + 1;
+ bj = bj + 1;
+ }
+ ai = ai + 1;
+ } else if(action = '+') {
+ while(bj < change.position) {
+ string ? res = res + src.substr(ai, 1) : res.push(src[ai]);
+ ai = ai + 1;
+ bj = bj + 1;
+ }
+ bj = bj + 1;
+ string ? res = res + change.element : res.push(change.element);
+ }
+ }
+ }
+
+ while(ai < src.length) {
+ string ? res = res + src.substr(ai, 1) : res.push(src[ai]);
+ ai = ai + 1;
+ bj = bj + 1;
+ }
+
+ return res;
+}
+
+// Examine the patchset and the source to see in which direction the
+// patch should be applied.
+//
+// WARNING: By default, this examines the whole patch, so this could take
+// some time. This also works better with Diff::LCS::ContextChange or
+// Diff::LCS::Change as its source, as an array will cause the creation
+// of one of the above.
+Difference.LCS.__diff_direction = function(src, patchset, limit) {
+ var count = 0, left = 0, left_miss = 0, right = 0, right_miss = 0, element = null;
+ var string = src.constructor == String;
+
+ // Process all changes in the patchset
+ for(var i = 0; i < patchset.length; i++) {
+ var change = patchset[i];
+ count = count + 1;
+
+ if(change instanceof Change) {
+ // With a simplistic change, we can't tell the difference between
+ // the left and right on '!' actions, so we ignore those. On '='
+ // actions, if there's a miss, we miss both left and right.
+ element = string ? src.substr(change.position, 1) : src[change.position];
+
+ if(change.action == '-') {
+ element == change.element ? left = left + 1 : left_miss = left_miss + 1;
+ } else if(change.action == '+') {
+ element == change.element ? right = right + 1 : right_miss = right_miss + 1;
+ } else if(change.action == '=') {
+ if(element != change.element) {
+ left_miss = left_miss + 1;
+ right_miss = right_miss + 1;
+ }
+ }
+ } else if(change instanceof ContextChange) {
+ if(change.action == '-') {
+ element = string ? src.substr(change.old_position, 1) : src[change.old_position];
+ element == change.old_element ? left = left + 1 : left_miss = left_miss + 1;
+ } else if(change.action == '+') {
+ element = string ? src.substr(change.new_position, 1) : src[change.new_position];
+ element == change.new_element ? right = right + 1 : right_miss = right_miss + 1;
+ } else if(change.action == '=') {
+ var le = string ? src.substr(change.old_position, 1) : src[change.old_position];
+ var re = string ? src.substr(change.new_position, 1) : src[change.new_position];
+
+ if(le != change.old_element) left_miss = left_miss + 1;
+ if(re != change.new_element) right_miss = right_miss + 1;
+ } else if(change.action == '!') {
+ element = string ? src.substr(change.old_position, 1) : src[change.old_position];
+ if(element == change.old_element) {
+ left = left + 1;
+ } else {
+ left_miss = left_miss + 1;
+ right_miss = right_miss + 1;
+ }
+ }
+ }
+
+ if(limit != null && count > limit) break;
+ };
+
+ var no_left = (left == 0) && (left_miss >= 0);
+ var no_right = (right == 0) && (right_miss >= 0);
+
+ var result = [no_left, no_right];
+ if(!no_left && no_right) {
+ return "patch";
+ } else if(no_left && !no_right) {
+ return "unpatch";
+ } else {
+ throw "The provided patchset does not appear to apply to the provided value as either source or destination value."
+ }
+}
+
+// Normalize the patchset. A patchset is always a sequence of changes, but
+// how those changes are represented may vary, depending on how they were
+// generated. In all cases we support, we also support the array
+// representation of the changes. The formats are:
+//
+// [ // patchset <- Diff::LCS.diff(a, b)
+// [ // one or more hunks
+// Diff::LCS::Change // one or more changes
+// ] ]
+//
+// [ // patchset, equivalent to the above
+// [ // one or more hunks
+// [ action, line, value ] // one or more changes
+// ] ]
+//
+// [ // patchset <- Diff::LCS.diff(a, b, Diff::LCS::ContextDiffCallbacks)
+// // OR <- Diff::LCS.sdiff(a, b, Diff::LCS::ContextDiffCallbacks)
+// [ // one or more hunks
+// Diff::LCS::ContextChange // one or more changes
+// ] ]
+//
+// [ // patchset, equivalent to the above
+// [ // one or more hunks
+// [ action, [ old line, old value ], [ new line, new value ] ]
+// // one or more changes
+// ] ]
+//
+// [ // patchset <- Diff::LCS.sdiff(a, b)
+// // OR <- Diff::LCS.diff(a, b, Diff::LCS::SDiffCallbacks)
+// Diff::LCS::ContextChange // one or more changes
+// ]
+//
+// [ // patchset, equivalent to the above
+// [ action, [ old line, old value ], [ new line, new value ] ]
+// // one or more changes
+// ]
+//
+// The result of this will be either of the following.
+//
+// [ // patchset
+// Diff::LCS::ContextChange // one or more changes
+// ]
+//
+// [ // patchset
+// Diff::LCS::Change // one or more changes
+// ]
+//
+// If either of the above is provided, it will be returned as such.
+//
+Difference.LCS.__normalize_patchset = function(patchset) {
+ return flatten(patchset.map(function(hunk) {
+ if(hunk instanceof ContextChange || hunk instanceof Change) {
+ return hunk;
+ } else if(Array.isArray(hunk)) {
+ if(!Array.isArray(hunk[0]) && Array.isArray(hunk[1]) && Array.isArray(hunk[2])) {
+ return ContextChange.from_a(hunk);
+ } else {
+ return hunk.map(function(change) {
+ if(change instanceof ContextChange || change instanceof Change) {
+ return change;
+ } else if(Array.isArray(change)) {
+ // change[1] will ONLY be an array in a ContextChange#to_a call.
+ // In Change#to_a, it represents the line (singular).
+ if(Array.isArray(change[1])) {
+ return ContextChange.from_a(change);
+ } else {
+ return Change.from_a(change);
+ }
+ }
+ });
+ }
+ } else {
+ throw "Cannot normalize the hunk: " + util.inspect(hunk);
+ }
+ }));
+}
+
+// Gotten from
+var flatten = function(array) {
+ return array.reduce(function(a,b) {
+ return a.concat(b);
+ }, []);
+}
+
+// Compute the longest common subsequence between the arrays a and b the result
+// being an array whose content is such that they
+// count = 0
+// result.forEach(function(e) {
+// if(e) a[count] == b[e];
+// count++;
+// })
+Difference.LCS.__lcs = function(a, b) {
+ var a_start = 0;
+ var b_start = 0;
+ var a_finish = a.length - 1;
+ var b_finish = b.length - 1;
+ var vector = [];
+
+ // Remove common elements at the beginning
+ while((a_start <= a_finish) && (b_start <= b_finish) && (a[a_start] == b[b_start])) {
+ vector[a_start] = b_start;
+ a_start = a_start + 1;
+ b_start = b_start + 1;
+ }
+
+ // Remove common elements at the end
+ while((a_start <= a_finish) && (b_start <= b_finish) && (a[a_finish] == b[b_finish])) {
+ vector[a_finish] = b_finish;
+ a_finish = a_finish - 1;
+ b_finish = b_finish - 1;
+ }
+
+ // Now compute the equivalent classes of positions of elements
+ var b_matches = Difference.LCS.__position_hash(b, b_start, b_finish);
+
+ // Define treshold and links
+ var thresh = [];
+ var links = [];
+
+ for(var ii = a_start; ii <= a_finish; ii++) {
+ var ai = Array.isArray(a) ? a[ii] : a.charAt(ii);
+ var bm = b_matches[ai];
+ bm = bm ? bm : [];
+ var kk = null;
+
+ bm.reverse().forEach(function(jj) {
+ if(kk != null && (thresh[kk] > jj) && (thresh[kk - 1] < jj)) {
+ thresh[kk] = jj;
+ } else {
+ kk = Difference.LCS.__replace_next_larger(thresh, jj, kk);
+ }
+ // Add link
+ if(kk != null) links[kk] = [(kk > 0) ? links[kk - 1] : null, ii, jj];
+ });
+ }
+
+ // Build the vector
+ if(thresh.length > 0) {
+ var link = links[thresh.length - 1];
+
+ while(link != null) {
+ vector[link[1]] = link[2];
+ link = link[0];
+ }
+ }
+
+ // Return the vector of the longest commong subsequence
+ return vector;
+}
+
+// Find the place at which +value+ would normally be inserted into the
+// Enumerable. If that place is already occupied by +value+, do nothing
+// and return +nil+. If the place does not exist (i.e., it is off the end
+// of the Enumerable), add it to the end. Otherwise, replace the element
+// at that point with +value+. It is assumed that the Enumerable's values
+// are numeric.
+//
+// This operation preserves the sort order.
+Difference.LCS.__replace_next_larger = function(enumerable, value, last_index) {
+ // Is it off the end
+ if(enumerable.length == 0 || (value > enumerable[enumerable.length - 1])) {
+ enumerable.push(value);
+ return enumerable.length - 1;
+ }
+
+ // Binary search for the insertion point
+ var last_index = last_index || enumerable.length;
+ var first_index = 0;
+
+ while(first_index <= last_index) {
+ var ii = (first_index + last_index) >> 1;
+ var found = enumerable[ii];
+
+ if(value == found) {
+ return null;
+ } else if(value > found) {
+ first_index = ii + 1;
+ } else {
+ last_index = ii - 1;
+ }
+ }
+
+ // The insertion point is in first_index; overwrite the next larger
+ // value.
+ enumerable[first_index] = value;
+ return first_index;
+}
+
+Difference.LCS.__position_hash = function(enumerable, interval_start, interval_end) {
+ interval_start = interval_start ? interval_start : 0;
+ interval_end = interval_end ? interval_end : -1;
+
+ var hash = {}
+ for(var i = interval_start; i <= interval_end; i++) {
+ var kk = Array.isArray(enumerable) ? enumerable[i] : enumerable.charAt(i);
+ hash[kk] = Array.isArray(hash[kk]) ? hash[kk] : [];
+ hash[kk].push(i);
+ }
+ return hash;
+}
\ No newline at end of file
diff --git a/node_modules/git/lib/diff/hunk.js b/node_modules/git/lib/diff/hunk.js
new file mode 100644
index 00000000..61d7cfe1
--- /dev/null
+++ b/node_modules/git/lib/diff/hunk.js
@@ -0,0 +1,291 @@
+var Block = require('./block').Block;
+
+// A Hunk is a group of Blocks which overlap because of the context
+// surrounding each block. (So if we're not using context, every hunk will
+// contain one block.) Used in the diff program (bin/diff).
+var Hunk = exports.Hunk = function(data_old, data_new, piece, context, file_length_difference) {
+ // Internal variables
+ var _flag_context = null;
+ var self = this;
+
+ // At first, a hunk will have just one Block in it
+ this.blocks = [new Block(piece)];
+ this.data_old = data_old;
+ this.data_new = data_new;
+
+ var before = file_length_difference, after = file_length_difference;
+ after = after + this.blocks[0].diff_size;
+ this.file_length_difference = after; // The caller must get this manually
+ // Other parameters
+ var a1 = null, a2 = null;
+ var b1 = null, b2 = null;
+
+ // Save the start & end of each array. If the array doesn't exist
+ // (e.g., we're only adding items in this block), then figure out the
+ // line number based on the line number of the other file and the
+ // current difference in file lengths.
+ if(this.blocks[0].remove.length > 0) {
+ a1 = this.blocks[0].remove[0].position;
+ a2 = this.blocks[0].remove[this.blocks[0].remove.length - 1].position;
+ }
+
+ if(this.blocks[0].insert.length > 0) {
+ b1 = this.blocks[0].insert[0].position;
+ b2 = this.blocks[0].insert[this.blocks[0].insert.length - 1].position;
+ }
+
+ this.start_old = a1 || (b1 - before);
+ this.start_new = b1 || (a1 + before);
+ this.end_old = a2 || (b2 - after);
+ this.end_new = b2 || (a2 + after);
+
+ // Change the "start" and "end" fields to note that context should be added
+ // to this hunk
+ Object.defineProperty(this, "flag_context", { get: function() { return _flag_context; }, set: function(context) {
+ if(context == null || context == 0) return null;
+
+ var add_start = (context > self.start_old) ? self.start_old : context;
+ var add_end = null;
+
+ self.start_old = self.start_old - add_start;
+ self.start_new = self.start_new - add_start;
+
+ if((self.end_old + context) > self.data_old.length) {
+ add_end = self.data_old.length - self.end_old;
+ } else {
+ add_end = context;
+ }
+
+ self.end_old = self.end_old + add_end;
+ self.end_new = self.end_new + add_end;
+ _flag_context = context;
+ }, enumerable: true});
+
+ // Set the flag_context
+ this.flag_context = context;
+}
+
+Hunk.prototype.unshift = function(hunk) {
+ this.start_old = hunk.start_old;
+ this.start_new = hunk.start_new;
+ this.blocks = hunk.blocks.concat(this.blocks);
+}
+
+// Is there an overlap between hunk arg0 and old hunk arg1? Note: if end
+// of old hunk is one less than beginning of second, they overlap
+Hunk.prototype.overlaps = function(hunk) {
+ if(hunk == null) return null;
+
+ var a = (this.start_old - hunk.end_old) <= 1;
+ var b = (this.start_new - hunk.end_new) <= 1;
+ return (a || b);
+}
+
+Hunk.prototype.diff = function(format) {
+ if(format == "old") {
+ return old_diff(this);
+ } else if(format == 'unified') {
+ return unified_diff(this);
+ } else if(format == 'context') {
+ return context_diff(this);
+ } else if(format == 'ed') {
+ return this;
+ } else if(format == 'reverse_ed' || format == 'ed_finish') {
+ return ed_diff(this, format);
+ } else {
+ throw "unknown diff format " + format;
+ }
+}
+
+Hunk.prototype.each_old = function(block) {
+ var entries = this.data_old.slice(this.start_old, this.end_old);
+ entries.forEach(function(e) {
+ block(e);
+ });
+}
+
+// Note that an old diff can't have any context. Therefore, we know that
+// there's only one block in the hunk.
+var old_diff = function(hunk) {
+ if(hunk.blocks.length > 1) sys.puts("expecting only one block in an old diff hunk!");
+ // Set up operation actions
+ var opt_act = {'+':'a', '-':'d', '!':'c'};
+ var block = hunk.blocks[0];
+
+ // Calculate item number range. Old diff range is just like a context
+ // diff range, except the ranges are on one line with the action between
+ // them.
+ var s = "" + context_rang("old") + opt_act[block.op] + context_rang("new") + "\n";
+ // If removing anything, just print out all the remove lines in the hunk
+ // which is just all the remove lines in the block.
+ if(block.remove.length > 0) {
+ hunk.data_old.slice(hunk.start_old, hunk.end_old).forEach(function(e) {
+ s = s + "< " + e + "\n";
+ });
+ }
+
+ if(block.insert.length > 0) {
+ hunk.data_new.slice(hunk.start_new, hunk.end_new).forEach(function(e) {
+ s = s + "> " + e + "\n;"
+ });
+ }
+ // Return the diff string
+ return s;
+}
+
+var unified_diff = function(hunk) {
+ // Calculate item number range.
+ var s = "@@ -" + unified_range(hunk, 'old') + " +" + unified_range(hunk, 'new') + " @@\n";
+
+ // Outlist starts containing the hunk of the old file. Removing an item
+ // just means putting a '-' in front of it. Inserting an item requires
+ // getting it from the new file and splicing it in. We splice in
+ // +num_added+ items. Remove blocks use +num_added+ because splicing
+ // changed the length of outlist.
+ //
+ // We remove +num_removed+ items. Insert blocks use +num_removed+
+ // because their item numbers -- corresponding to positions in the NEW
+ // file -- don't take removed items into account.
+ var lo = hunk.start_old;
+ var hi = hunk.end_old;
+ var num_added = 0;
+ var num_removed = 0;
+
+ // Create list of stripped entries
+ var outlist = hunk.data_old.slice(lo, hi + 1).map(function(e) { return e.replace(/^/g, ' '); });
+ // Process all the blocks
+ hunk.blocks.forEach(function(block) {
+ block.remove.forEach(function(item) {
+ var op = item.action.toString(); // -
+ var offset = item.position - lo + num_added;
+ outlist[offset] = outlist[offset].replace(/^ /g, op.toString());
+ num_removed = num_removed + 1;
+ })
+
+ block.insert.forEach(function(item) {
+ var op = item.action.toString(); // +
+ var offset = item.position - hunk.start_new + num_removed;
+ outlist.splice(offset, 0, ("" + op + hunk.data_new[item.position]));
+ num_added = num_added + 1;
+ });
+ });
+
+ // Return the list
+ return s + outlist.join('\n');
+}
+
+var context_diff = function(hunk) {
+ var s = '***************\n';
+ s = s + '*** ' + context_range(hunk, 'old') + ' ****\n';
+ // Retrieve the context
+ var r = context_range(hunk, 'new');
+ var outlist = null;
+
+ // Print out file 1 part for each block in context diff format if there
+ // are any blocks that remove items
+ var lo = hunk.start_old;
+ var hi = hunk.end_old;
+ var removes = hunk.blocks.filter(function(e) { return !(e.remove.length == 0); });
+
+ if(removes) {
+ outlist = hunk.data_old.slice(lo, hi).map(function(e) { return e.replace(/^/g, ' '); });
+ removes.forEach(function(block) {
+ block.remove.forEach(function(item) {
+ outlist[item.position - lo] = outlist[item.position - lo].replace(/^ /g, block.op); // - or !
+ });
+ });
+ // Add to diff string
+ s = s + outlist.join('\n');
+ }
+
+ s = s + '\n-- ' + r + ' ----\n';
+ lo = hunk.start_new;
+ hi = hunk.end_new;
+ var inserts = hunk.blocks.filter(function(e) { return !(e.insert.length == 0); });
+
+ if(inserts) {
+ outlist = hunk.data_new.slice(lo, hi).map(function(e) { return e.replace(/^/g, ' '); });
+ inserts.forEach(function(block) {
+ block.insert.forEach(function(item) {
+ outlist[item.position - lo] = outlist[item.position - lo].replace(/^ /g, block.op); // + or !
+ });
+ });
+ // Add to diff string
+ s = s + outlist.join('\n');
+ }
+ // Return the diff string
+ return s;
+}
+
+var ed_diff = function(hunk, format) {
+ var opt_act = {'+':'a', '-':'d', '!':'c'};
+ if(hunk.blocks.length > 1) sys.puts("expecting only one block in an old diff hunk!");
+ var s = null;
+
+ if(format == 'reverse_ed') {
+ s = "" + op_act[hunk.blocks[0].op] + context_range(hunk, 'old') + '\n';
+ } else {
+ s = "" + context_range(hunk, 'old').replace(/,/g, ' ') + op_act[hunk.blocks[0].op] + '\n';
+ }
+
+ if(hunk.blocks[0].insert.length > 0) {
+ hunk.data_new.slice(hunk.start_new, hunk.end_new).forEach(function(e) {
+ s = s + '' + e + '\n';
+ });
+ // Add final marker
+ s = s + '.\n';
+ }
+ // Return diff string
+ return s;
+}
+
+// Generate a range of item numbers to print. Only print 1 number if the
+// range has only one item in it. Otherwise, it's 'start,end'
+var context_range = function(hunk, mode) {
+ var s = null, e = null;
+
+ if(mode == 'old') {
+ s = (hunk.start_old + 1);
+ e = (hunk.end_old + 1);
+ } else if(mode == 'new') {
+ s = (hunk.start_new + 1);
+ e = (hunk.end_new + 1);
+ }
+
+ return (s < e) ? ("" + s + "," + e) : ("" + e);
+}
+
+// Generate a range of item numbers to print for unified diff. Print
+// number where block starts, followed by number of lines in the block
+// (don't print number of lines if it's 1)
+var unified_range = function(hunk, mode) {
+ var s = null, e = null;
+
+ if(mode == 'old') {
+ s = (hunk.start_old + 1);
+ e = (hunk.end_old + 1);
+ } else if(mode == 'new') {
+ s = (hunk.start_new + 1);
+ e = (hunk.end_new + 1);
+ }
+
+ var length = e - s + 1;
+ var first = (length < 2) ? e : s; // something weird
+ return (length == 1) ? ("" + first) : (first + "," + length);
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/node_modules/git/lib/git/actor.js b/node_modules/git/lib/git/actor.js
new file mode 100644
index 00000000..6c1deb50
--- /dev/null
+++ b/node_modules/git/lib/git/actor.js
@@ -0,0 +1,21 @@
+var util = require('util');
+
+Actor = exports.Actor = function(name, email) {
+ var _name = name, _email = email;
+ // Control access to internal variables
+ Object.defineProperty(this, "name", { get: function() { return _name; }, set: function(value) { _name = value; }, enumerable: true});
+ Object.defineProperty(this, "email", { get: function() { return _email; }, set: function(value) { _email = value; }, enumerable: true});
+}
+
+Actor.from_string = function(string) {
+ if(string.match(/<.+>/)) {
+ var results = string.match(/(.*) <(.+?)>/);
+ return new Actor(results[1], results[2]);
+ } else {
+ return new Actor(string, null);
+ }
+}
+
+Actor.prototype.toString = function() {
+ return this.name;
+}
\ No newline at end of file
diff --git a/node_modules/git/lib/git/binary_parser.js b/node_modules/git/lib/git/binary_parser.js
new file mode 100644
index 00000000..a574d029
--- /dev/null
+++ b/node_modules/git/lib/git/binary_parser.js
@@ -0,0 +1,253 @@
+//+ Jonas Raoni Soares Silva
+//@ http://jsfromhell.com/classes/binary-parser [v1.0]
+var chr = String.fromCharCode;
+
+var p = exports.BinaryParser = function( bigEndian, allowExceptions ){
+ this.bigEndian = bigEndian;
+ this.allowExceptions = allowExceptions;
+};
+
+var Buffer = exports.BinaryParser.Buffer = function( bigEndian, buffer ){
+ this.bigEndian = bigEndian || 0;
+ this.buffer = [];
+ this.setBuffer( buffer );
+};
+
+Buffer.prototype.setBuffer = function( data ){
+ if( data ){
+ for( var l, i = l = data.length, b = this.buffer = new Array( l ); i; b[l - i] = data.charCodeAt( --i ) );
+ this.bigEndian && b.reverse();
+ }
+};
+
+Buffer.prototype.hasNeededBits = function( neededBits ){
+ return this.buffer.length >= -( -neededBits >> 3 );
+};
+
+Buffer.prototype.checkBuffer = function( neededBits ){
+ if( !this.hasNeededBits( neededBits ) )
+ throw new Error( "checkBuffer::missing bytes" );
+};
+
+Buffer.prototype.readBits = function( start, length ){
+ //shl fix: Henri Torgemane ~1996 (compressed by Jonas Raoni)
+ function shl( a, b ){
+ for( ; b--; a = ( ( a %= 0x7fffffff + 1 ) & 0x40000000 ) == 0x40000000 ? a * 2 : ( a - 0x40000000 ) * 2 + 0x7fffffff + 1 );
+ return a;
+ }
+ if( start < 0 || length <= 0 )
+ return 0;
+ this.checkBuffer( start + length );
+ for( var offsetLeft, offsetRight = start % 8, curByte = this.buffer.length - ( start >> 3 ) - 1, lastByte = this.buffer.length + ( -( start + length ) >> 3 ), diff = curByte - lastByte, sum = ( ( this.buffer[ curByte ] >> offsetRight ) & ( ( 1 << ( diff ? 8 - offsetRight : length ) ) - 1 ) ) + ( diff && ( offsetLeft = ( start + length ) % 8 ) ? ( this.buffer[ lastByte++ ] & ( ( 1 << offsetLeft ) - 1 ) ) << ( diff-- << 3 ) - offsetRight : 0 ); diff; sum += shl( this.buffer[ lastByte++ ], ( diff-- << 3 ) - offsetRight ) );
+ return sum;
+};
+
+p.warn = function( msg ){
+ if( this.allowExceptions )
+ throw new Error( msg );
+ return 1;
+};
+p.decodeFloat = function( data, precisionBits, exponentBits ){
+ var b = new this.Buffer( this.bigEndian, data );
+ b.checkBuffer( precisionBits + exponentBits + 1 );
+ var bias = Math.pow( 2, exponentBits - 1 ) - 1, signal = b.readBits( precisionBits + exponentBits, 1 ), exponent = b.readBits( precisionBits, exponentBits ), significand = 0,
+ divisor = 2, curByte = b.buffer.length + ( -precisionBits >> 3 ) - 1;
+ do{
+ for( var byteValue = b.buffer[ ++curByte ], startBit = precisionBits % 8 || 8, mask = 1 << startBit; mask >>= 1; ( byteValue & mask ) && ( significand += 1 / divisor ), divisor *= 2 );
+ }while( precisionBits -= startBit );
+ return exponent == ( bias << 1 ) + 1 ? significand ? NaN : signal ? -Infinity : +Infinity : ( 1 + signal * -2 ) * ( exponent || significand ? !exponent ? Math.pow( 2, -bias + 1 ) * significand : Math.pow( 2, exponent - bias ) * ( 1 + significand ) : 0 );
+};
+p.decodeInt = function( data, bits, signed, forceBigEndian ){
+ var b = new this.Buffer( this.bigEndian||forceBigEndian, data ), x = b.readBits( 0, bits ), max = Math.pow( 2, bits );
+ return signed && x >= max / 2 ? x - max : x;
+};
+p.encodeFloat = function( data, precisionBits, exponentBits ){
+ var bias = Math.pow( 2, exponentBits - 1 ) - 1, minExp = -bias + 1, maxExp = bias, minUnnormExp = minExp - precisionBits,
+ status = isNaN( n = parseFloat( data ) ) || n == -Infinity || n == +Infinity ? n : 0,
+ exp = 0, len = 2 * bias + 1 + precisionBits + 3, bin = new Array( len ),
+ signal = ( n = status !== 0 ? 0 : n ) < 0, n = Math.abs( n ), intPart = Math.floor( n ), floatPart = n - intPart,
+ i, lastBit, rounded, j, result;
+ for( i = len; i; bin[--i] = 0 );
+ for( i = bias + 2; intPart && i; bin[--i] = intPart % 2, intPart = Math.floor( intPart / 2 ) );
+ for( i = bias + 1; floatPart > 0 && i; ( bin[++i] = ( ( floatPart *= 2 ) >= 1 ) - 0 ) && --floatPart );
+ for( i = -1; ++i < len && !bin[i]; );
+ if( bin[( lastBit = precisionBits - 1 + ( i = ( exp = bias + 1 - i ) >= minExp && exp <= maxExp ? i + 1 : bias + 1 - ( exp = minExp - 1 ) ) ) + 1] ){
+ if( !( rounded = bin[lastBit] ) ){
+ for( j = lastBit + 2; !rounded && j < len; rounded = bin[j++] );
+ }
+ for( j = lastBit + 1; rounded && --j >= 0; ( bin[j] = !bin[j] - 0 ) && ( rounded = 0 ) );
+ }
+ for( i = i - 2 < 0 ? -1 : i - 3; ++i < len && !bin[i]; );
+ if( ( exp = bias + 1 - i ) >= minExp && exp <= maxExp )
+ ++i;
+ else if( exp < minExp ){
+ exp != bias + 1 - len && exp < minUnnormExp && this.warn( "encodeFloat::float underflow" );
+ i = bias + 1 - ( exp = minExp - 1 );
+ }
+ if( intPart || status !== 0 ){
+ this.warn( intPart ? "encodeFloat::float overflow" : "encodeFloat::" + status );
+ exp = maxExp + 1;
+ i = bias + 2;
+ if( status == -Infinity )
+ signal = 1;
+ else if( isNaN( status ) )
+ bin[i] = 1;
+ }
+ for( n = Math.abs( exp + bias ), j = exponentBits + 1, result = ""; --j; result = ( n % 2 ) + result, n = n >>= 1 );
+ for( n = 0, j = 0, i = ( result = ( signal ? "1" : "0" ) + result + bin.slice( i, i + precisionBits ).join( "" ) ).length, r = []; i; j = ( j + 1 ) % 8 ){
+ n += ( 1 << j ) * result.charAt( --i );
+ if( j == 7 ){
+ r[r.length] = String.fromCharCode( n );
+ n = 0;
+ }
+ }
+ r[r.length] = n ? String.fromCharCode( n ) : "";
+ return ( this.bigEndian ? r.reverse() : r ).join( "" );
+};
+p.encodeInt = function( data, bits, signed, forceBigEndian ){
+ var max = Math.pow( 2, bits );
+ ( data >= max || data < -( max / 2 ) ) && this.warn( "encodeInt::overflow" ) && ( data = 0 );
+ data < 0 && ( data += max );
+ for( var r = []; data; r[r.length] = String.fromCharCode( data % 256 ), data = Math.floor( data / 256 ) );
+ for( bits = -( -bits >> 3 ) - r.length; bits--; r[r.length] = "\0" );
+ return ( (this.bigEndian||forceBigEndian) ? r.reverse() : r ).join( "" );
+};
+p.toSmall = function( data ){ return this.decodeInt( data, 8, true ); };
+p.fromSmall = function( data ){ return this.encodeInt( data, 8, true ); };
+p.toByte = function( data ){ return this.decodeInt( data, 8, false ); };
+p.fromByte = function( data ){ return this.encodeInt( data, 8, false ); };
+p.toShort = function( data ){ return this.decodeInt( data, 16, true ); };
+p.fromShort = function( data ){ return this.encodeInt( data, 16, true ); };
+p.toWord = function( data ){ return this.decodeInt( data, 16, false ); };
+p.fromWord = function( data ){ return this.encodeInt( data, 16, false ); };
+p.toInt = function( data ){ return this.decodeInt( data, 32, true ); };
+p.fromInt = function( data ){ return this.encodeInt( data, 32, true ); };
+p.toLong = function( data ){ return this.decodeInt( data, 64, true ); };
+p.fromLong = function( data ){ return this.encodeInt( data, 64, true ); };
+p.toDWord = function( data ){ return this.decodeInt( data, 32, false ); };
+p.fromDWord = function( data ){ return this.encodeInt( data, 32, false ); };
+p.toQWord = function( data ){ return this.decodeInt( data, 64, true ); };
+p.fromQWord = function( data ){ return this.encodeInt( data, 64, true ); };
+p.toFloat = function( data ){ return this.decodeFloat( data, 23, 8 ); };
+p.fromFloat = function( data ){ return this.encodeFloat( data, 23, 8 ); };
+p.toDouble = function( data ){ return this.decodeFloat( data, 52, 11 ); };
+p.fromDouble = function( data ){ return this.encodeFloat( data, 52, 11 ); };
+
+// Factor out the encode so it can be shared by add_header and push_int32
+p.encode_int32 = function(number) {
+ var a, b, c, d, unsigned;
+ unsigned = (number < 0) ? (number + 0x100000000) : number;
+ a = Math.floor(unsigned / 0xffffff);
+ unsigned &= 0xffffff;
+ b = Math.floor(unsigned / 0xffff);
+ unsigned &= 0xffff;
+ c = Math.floor(unsigned / 0xff);
+ unsigned &= 0xff;
+ d = Math.floor(unsigned);
+ return chr(a) + chr(b) + chr(c) + chr(d);
+};
+
+p.encode_int64 = function(number) {
+ var a, b, c, d, e, f, g, h, unsigned;
+ unsigned = (number < 0) ? (number + 0x10000000000000000) : number;
+ a = Math.floor(unsigned / 0xffffffffffffff);
+ unsigned &= 0xffffffffffffff;
+ b = Math.floor(unsigned / 0xffffffffffff);
+ unsigned &= 0xffffffffffff;
+ c = Math.floor(unsigned / 0xffffffffff);
+ unsigned &= 0xffffffffff;
+ d = Math.floor(unsigned / 0xffffffff);
+ unsigned &= 0xffffffff;
+ e = Math.floor(unsigned / 0xffffff);
+ unsigned &= 0xffffff;
+ f = Math.floor(unsigned / 0xffff);
+ unsigned &= 0xffff;
+ g = Math.floor(unsigned / 0xff);
+ unsigned &= 0xff;
+ h = Math.floor(unsigned);
+ return chr(a) + chr(b) + chr(c) + chr(d) + chr(e) + chr(f) + chr(g) + chr(h);
+};
+
+/**
+ UTF8 methods
+**/
+
+// Take a raw binary string and return a utf8 string
+p.decode_utf8 = function(a) {
+ var string = "";
+ var i = 0;
+ var c = c1 = c2 = 0;
+
+ while ( i < a.length ) {
+ c = a.charCodeAt(i);
+ if (c < 128) {
+ string += String.fromCharCode(c);
+ i++;
+ } else if((c > 191) && (c < 224)) {
+ c2 = a.charCodeAt(i+1);
+ string += String.fromCharCode(((c & 31) << 6) | (c2 & 63));
+ i += 2;
+ } else {
+ c2 = a.charCodeAt(i+1);
+ c3 = a.charCodeAt(i+2);
+ string += String.fromCharCode(((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63));
+ i += 3;
+ }
+ }
+ return string;
+};
+
+// Encode a cstring correctly
+p.encode_cstring = function(s) {
+ return unescape(encodeURIComponent(s)) + p.fromByte(0);
+};
+
+// Take a utf8 string and return a binary string
+p.encode_utf8 = function(s) {
+ var a="";
+ for (var n=0; n< s.length; n++) {
+ var c=s.charCodeAt(n);
+ if (c<128) {
+ a += String.fromCharCode(c);
+ } else if ((c>127)&&(c<2048)) {
+ a += String.fromCharCode( (c>>6) | 192) ;
+ a += String.fromCharCode( (c&63) | 128);
+ } else {
+ a += String.fromCharCode( (c>>12) | 224);
+ a += String.fromCharCode( ((c>>6) & 63) | 128);
+ a += String.fromCharCode( (c&63) | 128);
+ }
+ }
+ return a;
+};
+
+p.pprint = function(s) {
+ var util = require('util');
+
+ for (var i=0; i 0) {
+ var part = parts[0];
+
+ // Process the part
+ if(part.match(/^[0-9A-Fa-f]{40}$/)) {
+ // Parse references to SHA keys
+ if(line.match(/^([0-9A-Fa-f]{40}) (\d+) (\d+) (\d+)$/)) {
+ var matches = line.match(/^([0-9A-Fa-f]{40}) (\d+) (\d+) (\d+)$/);
+ var id = matches[1];
+ var origin_line = matches[2];
+ var final_line = matches[3];
+ var group_lines = matches[4];
+ // Set if of the current reference
+ info = {id:id};
+ blames.push([null, []]);
+ } else if(line.match(/^([0-9A-Fa-f]{40}) (\d+) (\d+)$/)) {
+ var matches = line.match(/^([0-9A-Fa-f]{40}) (\d+) (\d+)$/);
+ var id = matches[1];
+ var origin_line = matches[2];
+ var final_line = matches[3];
+ // Set if of the current reference
+ info = {id:id};
+ }
+ } else if(part.match(/^(author|committer)/)) {
+ if(part.match(/^(.+)-mail$/)) {
+ info[part.match(/^(.+)-mail$/)[1] + "_email"] = parts[parts.length - 1];
+ } else if(part.match(/^(.+)-time$/)) {
+ info[part.match(/^(.+)-time$/)[1] + "_date"] = new Date(parseInt(parts[parts.length - 1]) * 1000);
+ } else if(part.match(/^(author|committer)$/)) {
+ info[part.match(/^(author|committer)$/)[1]] = parts.slice(1).join(" ");
+ }
+ } else if(part.match(/^filename/)) {
+ info['filename'] = parts[parts.length - 1];
+ } else if(part.match(/^summary/)) {
+ info['summary'] = parts.slice(1).join(" ").replace('\n', '');
+ } else if(part == '') {
+ var commit = commits[info["id"]];
+ // Create new commit
+ if(!commit) {
+ // commit = new Commit(repo, )
+ var id = info['id'];
+ var author = Actor.from_string(info['author'] + ' ' + info['author_email']);
+ var authored_date = info['author_date'];
+ var committer = Actor.from_string(info['committer'] + ' ' + info['committer_email']);
+ var committed_date = info['committer_date'];
+ var message = info['summary'];
+ // Create a new commit
+ commit = new Commit(repo, id, null, null, author, authored_date, committer, committed_date, message);
+ commits[info['id']] = commit;
+ }
+
+ // Break up the parts
+ parts = line.match(/^\t(.*)$/);
+ blames[blames.length - 1][0] = commit;
+ blames[blames.length - 1][1].push(parts[1]);
+ info = null;
+ }
+ }
+ });
+ // Call back with the list of blames
+ callback(null, blames);
+ });
+}
+
+
+
+
+
+
+
diff --git a/node_modules/git/lib/git/commit.js b/node_modules/git/lib/git/commit.js
new file mode 100644
index 00000000..83aec7b5
--- /dev/null
+++ b/node_modules/git/lib/git/commit.js
@@ -0,0 +1,336 @@
+var util = require('util'),
+ Actor = require('./actor').Actor,
+ Diff = require('./diff').Diff;
+ Tree = require('./tree').Tree;
+
+// Create a commit object
+var Commit = exports.Commit = function(repo, id, parents, tree, author, authored_date, committer, committed_date, message, filechanges) {
+ var _repo = repo, _id = id, _parents = parents, _tree = tree, _author = author, _authored_date = authored_date;
+ var _committer = committer, _committed_date = committed_date, _id_abbrev = null, _filechanges = filechanges;
+ // Ensure we have an empty message at least
+ message = message ? message : [];
+ message = Array.isArray(message) ? message : [message];
+ var _message = message.join("\n");
+ // Extract short message
+ var message_lines_filtered = message.filter(function(line) {
+ return line.trim() == '' ? false : true;
+ })
+ var _short_message = message_lines_filtered.length > 0 ? message_lines_filtered[0] : '';
+ // Internal properties
+ Object.defineProperty(this, "repo", { get: function() { return _repo; }, set: function(value) { _repo = value; }, enumerable: true, configurable:true});
+ Object.defineProperty(this, "id", { get: function() { return _id; }, set: function(value) { _id = value; }, enumerable: true, configurable:true});
+ Object.defineProperty(this, "sha", { get: function() { return _id; }, set: function(value) { _id = value; }, enumerable: true, configurable:true});
+ Object.defineProperty(this, "parents", { get: function() {
+ _parents = lazy_reader(_repo, _id, 'parents', _parents);
+ return _parents;
+ }, set: function(value) { _parents = value; }, enumerable: true, configurable:true});
+ Object.defineProperty(this, "tree", { get: function() {
+ _tree = lazy_reader(_repo, _id, 'tree', _tree);
+ return _tree;
+ }, set: function(value) { _tree = value; }, enumerable: true, configurable:true});
+ Object.defineProperty(this, "author", { get: function() {
+ _author = lazy_reader(_repo, _id, 'author', _author);
+ return _author;
+ }, set: function(value) { _author = value; }, enumerable: true, configurable:true});
+ Object.defineProperty(this, "authored_date", { get: function() {
+ _authored_date = lazy_reader(_repo, _id, 'authored_date', _authored_date);
+ return _authored_date;
+ }, set: function(value) { _authored_date = value; }, enumerable: true, configurable:true});
+ Object.defineProperty(this, "committer", { get: function() {
+ _committer = lazy_reader(_repo, _id, 'comitter', _committer);
+ return _committer;
+ }, set: function(value) { _comitter = value; }, enumerable: true, configurable:true});
+ Object.defineProperty(this, "committed_date", { get: function() {
+ _committed_date = lazy_reader(_repo, _id, 'committed_date', _committed_date);
+ return _committed_date;
+ }, set: function(value) { _committed_date = value; }, enumerable: true, configurable:true});
+ Object.defineProperty(this, "message", { get: function() {
+ _message = lazy_reader(_repo, _id, 'message', _message);
+ return _message;
+ }, set: function(value) { _message = value; }, enumerable: true, configurable:true});
+ Object.defineProperty(this, "short_message", { get: function() {
+ _short_message = lazy_reader(_repo, _id, 'short_message', _short_message);
+ return _short_message;
+ }, set: function(value) { _short_message = value; }, enumerable: true, configurable:true});
+ Object.defineProperty(this, "filechanges", { get: function() {
+ _filechanges = lazy_reader(_repo, _id, 'filechanges', _filechanges);
+ return _filechanges;
+ }, set: function(value) { _filechanges = value; }, enumerable: true, configurable:true});
+
+ Object.defineProperty(this, "_id_abbrev", { get: function() { return _id_abbrev; }, set: function(value) { _id_abbrev = value; }, enumerable: true, configurable:true});
+}
+
+var lazy_reader = function(repo, id, name, variable) {
+ if(variable != null) return variable;
+ // Control the flow
+ var done = false;
+ var value = null;
+ // Fetch all the commits
+ Commit.find_all(repo, id, {max_count:1}, function(err, commits) {
+ if(err) return done = true;
+ value = commits[0][name];
+ done = true;
+ })
+
+ while(!done) {};
+ return value ? value : '';
+}
+
+// Load a commit
+Commit.prototype.load = function(callback) {
+ var self = this;
+
+ Commit.find_all(this.repo, this.id, {max_count:1}, function(err, commits) {
+ if(err) return callback(err, commits);
+ var commit = commits[0];
+ Object.keys(commit).forEach(function(key) {
+ self[key] = commit[key];
+ });
+ callback(null, self);
+ });
+}
+
+// Chomp text removing end carriage returns
+var chomp = function chomp(raw_text) {
+ return raw_text.replace(/(\n|\r)+$/, '');
+}
+
+// Fetch the short form of an id
+Commit.prototype.id_abbrev = function(callback) {
+ var self = this;
+
+ if(this._id_abbrev) return callback(null, this._id_abbrev);
+ this.repo.git.rev_parse({}, this.id, 0, function(err, id) {
+ if(err) return callback(err, id);
+ self._id_abbrev = chomp(id).substr(0, 7);
+ callback(null, self._id_abbrev);
+ })
+}
+
+// Parse the actor and create the object
+var actor = function(line) {
+ var results = line.match(/^.+? (.*) (\d+) .*$/);
+ var actor = results[1];
+ var epoch = results[2];
+ // Return the objects
+ return [Actor.from_string(actor), new Date(parseInt(epoch) * 1000)]
+}
+
+// Convert commit text to list of commits
+Commit.list_from_string = function(repo, text) {
+ // Split up the result
+ var lines = text.split("\n");
+
+ // require('util').debug("-------------------------------------------------- lines")
+ // require('util').debug(require('util').inspect(lines))
+ // require('util').debug("-------------------------------------------------- text end")
+
+ var linesshift = function() {
+ return lines.shift();
+ };
+ var commits = [];
+ // Parse all commit messages
+ while(lines.length > 0) {
+ var id = linesshift().split(/ /).pop();
+ if(lines.length == 0) break;
+ var tree = new Tree(repo, linesshift().split(/ /).pop());
+
+ // Let's get the parents
+ var parents = [];
+ while(lines[0].match(/^parent/)) {
+ parents.push(new Commit(repo, linesshift().split(/ /).pop()))
+ }
+ // Let's get the author and committer
+ var actor_info = actor(linesshift());
+ var author = actor_info[0];
+ var authored_date = actor_info[1]
+ var committer_info = actor(linesshift());
+ var comitter = committer_info[0];
+ var committed_date = committer_info[1];
+ // Unpack encoding
+ var encoding = lines[0].match(/^encoding/) ? linesshift().split().pop() : '';
+ // Jump empty space
+ linesshift();
+ // Unpack message lines
+ var message_lines = [];
+ while(lines.length > 0 && lines[0].match(/^ {4}/)) {
+ var message_line = linesshift();
+ message_lines.push(message_line.substring(4, message_line.length)) ;
+ }
+
+ linesshift();
+ // Parse --raw lines
+ var filechanges = {};
+ var fcre = /:(\d+) (\d+) ([a-z0-9]+) ([a-z0-9]+) (\S+)\s+(.+)/;
+ var numre = /(\S+)\s+(\S+)\s+(.+)/;
+ var line;
+ var matched;
+ while (lines.length > 0) {
+ line = linesshift();
+ matched = line.match(fcre);
+ if (!matched) break;
+ var o = {};
+ var xs = ['a_mode', 'b_mode', 'a_blob', 'b_blob', 'what', 'path'];
+ for(var i = 0; i < xs.length; i++) {
+ o[xs[i]] = matched[i+1];
+ }
+ filechanges[o.path] = o;
+ }
+ while (line) {
+ matched = line.match(numre);
+ if (!matched) break;
+ var o = {};
+ var xs = ['plus', 'minus', 'path'];
+ for(var i = 0; i < xs.length; i++) {
+ o[xs[i]] = matched[i+1];
+ }
+ filechanges[o.path].plus = o.plus;
+ filechanges[o.path].minus = o.minus;
+ if (lines.length == 0) break;
+ line = linesshift();
+ }
+
+ if (!matched && line) lines = [line].concat(lines);
+ // Move and point to next message
+ while(lines[0] != null && lines[0] == '') linesshift();
+ // Create commit object
+ commits.push(new Commit(repo, id, parents, tree, author, authored_date, comitter, committed_date, message_lines, filechanges));
+ }
+ // Return all the commits
+ return commits;
+}
+
+// Locate all commits for a give set of parameters
+Commit.find_all = function(repo, reference, options, callback) {
+ var self = this;
+ var args = Array.prototype.slice.call(arguments, 1);
+ callback = args.pop();
+ reference = args.length ? args.shift() : null;
+ options = args.length ? args.shift() : {};
+
+ // Merge the options with the default_options
+ if(!options.pretty) options['pretty'] = 'raw';
+ // If we have a reference use that for the lookup
+ if(!reference) options['all'] = true;
+
+ // Locate revisions
+ if(reference) {
+ repo.git.rev_list(options, reference, function(err, revision_output) {
+ if(err) return callback(err, []);
+ // Turn string into a list of revisions
+ callback(null, Commit.list_from_string(repo, revision_output));
+ });
+ } else {
+ repo.git.rev_list(options, function(err, revision_output) {
+ if(err) return callback(err, []);
+ // Turn string into a list of revisions
+ callback(null, Commit.list_from_string(repo, revision_output));
+ });
+ }
+}
+
+// Return the count of committs for a given start
+Commit.count = function(repo, ref, callback) {
+ repo.git.rev_list({}, ref, function(err, revision_output) {
+ if(err) return callback(err, revision_output);
+ callback(null, parseInt((revision_output.length/41)));
+ })
+}
+
+// Show diffs between two trees
+// repo: the repo object
+// a: named commit
+// b: optional named commit, passing an array assumes you wish to omit the second
+// named commit and limit the diff to the given paths
+// paths: an array of paths to limit the diff.
+//
+// Returns array of diffs (baked)
+Commit.diff = function(repo, a, b, paths, callback) {
+ var self = this;
+ var args = Array.prototype.slice.call(arguments, 2);
+ callback = args.pop();
+ b = args.length ? args.shift() : null;
+ paths = args.length ? args.shift() : [];
+
+ // If b is an array we skipped the b parameter
+ if(Array.isArray(b)) {
+ paths = b;
+ b = null;
+ }
+
+ // Set up parameters correctly
+ if(paths.length > 0) {
+ if(paths.length > 0) paths.unshift("--");
+ if(b) paths.unshift(b);
+ paths.unshift(a);
+ // Let's execute the native git function
+ repo.git.call_git('', 'diff', '', {full_index:true}, paths, function(err, text) {
+ // Create a list of diffs from the string
+ if(text) {
+ Diff.list_from_string(repo, text, callback);
+ } else {
+ callback(null, []);
+ }
+ });
+ } else {
+ repo.git.diff(a, b, {full_index:true}, function(err, text) {
+ // Create a list of diffs from the string
+ if(text) {
+ Diff.list_from_string(repo, text, callback);
+ } else {
+ callback(null, []);
+ }
+ });
+ }
+}
+
+var process_diff = function(repo, diff, callback) {
+ if(diff.match(/diff --git a/)) {
+ diff = diff.substring(diff.match(/diff --git a/).index, diff.length);
+ } else {
+ diff = '';
+ }
+ // Return the diffs
+ Diff.list_from_string(repo, diff, callback);
+}
+
+// Show the commits
+Commit.prototype.show = function(callback) {
+ var parents = this.parents;
+ var diff = null
+ var self = this;
+
+ if(parents.length > 1) {
+ this.repo.git.native_call("diff " + parents[0].id + "..." + parents[1].id, {full_index:true}, function(err, diff) {
+ if(err) return callback(err, diff);
+ process_diff(this.repo, diff, callback);
+ });
+ } else {
+ this.repo.git.show({full_index:true, pretty:'raw'}, this.id, function(err, diff) {
+ if(err) return callback(err, diff);
+ process_diff(this.repo, diff, callback);
+ });
+ }
+}
+
+// Return the diffs for a commit
+Commit.prototype.diffs = function(callback) {
+ var parents = this.parents;
+ // If we have no parents
+ if(parents.length == 0) {
+ this.show(callback);
+ } else {
+ Commit.diff(this.repo, parents[0].id, this.id, callback)
+ }
+}
+
+// To String method
+Commit.prototype.toString = function() {
+ return this.id;
+}
+
+// Convert commit into patch
+Commit.prototype.toPatch = function(callback) {
+ this.repo.git.format_patch({'1':true, stdout:true}, this.id, callback);
+}
+
diff --git a/node_modules/git/lib/git/commit_stats.js b/node_modules/git/lib/git/commit_stats.js
new file mode 100644
index 00000000..15d8cda0
--- /dev/null
+++ b/node_modules/git/lib/git/commit_stats.js
@@ -0,0 +1,115 @@
+var CommitStats = exports.CommitStats = function(repo, id, files) {
+ var _repo = repo, _id = id, _files = files, _additions = 0, _deletions = 0, _total = 0;
+
+ // Build the stats based on the files
+ _additions = files.reduce(function(previousValue, currentValue, index, array) { return previousValue + currentValue[1]; }, 0);
+ _deletions = files.reduce(function(previousValue, currentValue, index, array) { return previousValue + currentValue[2]; }, 0);
+ _total = files.reduce(function(previousValue, currentValue, index, array) { return previousValue + currentValue[3]; }, 0);
+
+ // Internal properties
+ Object.defineProperty(this, "repo", { get: function() { return _repo; }, set: function(value) { _repo = value; }, enumerable: false});
+ Object.defineProperty(this, "id", { get: function() { return _id; }, set: function(value) { _id = value; }, enumerable: true});
+ Object.defineProperty(this, "files", { get: function() { return _files; }, set: function(value) { _files = value; }, enumerable: true});
+ Object.defineProperty(this, "additions", { get: function() { return _additions; }, set: function(value) { _additions = value; }, enumerable: true});
+ Object.defineProperty(this, "deletions", { get: function() { return _deletions; }, set: function(value) { _deletions = value; }, enumerable: true});
+ Object.defineProperty(this, "total", { get: function() { return _total; }, set: function(value) { _total = value; }, enumerable: true});
+}
+
+// Find all commit stats matching the given criteria
+// repo: the repo
+// ref: the ref from which to begin (SHA1 or name) or nil for all
+// options: hash of optional arguments to git
+// max_count: maximum number of commits to fetch
+// skip: number of commits to skip
+//
+// Returns assoc array (all values are lazy loading)
+CommitStats.find_all = function(repo, reference, options, callback) {
+ var args = Array.prototype.slice.call(arguments, 2);
+ callback = args.pop();
+ var self = this;
+ // Unpack variables
+ options = args.length ? args.shift() : {};
+ // Set up options
+ options['numstat'] = true;
+
+ // Check if we have a reference
+ if(reference) {
+ // Execute log function
+ repo.git.log(options, reference, function(err, output) {
+ if(err) return callback(err, output);
+ callback(null, CommitStats.list_from_string(repo, output));
+ });
+ } else {
+ // Add all options
+ options['all'] = true;
+ // Execute log function
+ repo.git.log(options, function(err, output) {
+ if(err) return callback(err, output);
+ callback(null, CommitStats.list_from_string(repo, output));
+ });
+ }
+}
+
+// Parse out commit information into an array of baked Commit objects
+// repo: the repo
+// text: the text output from the git command (raw format)
+//
+// Returns assoc array of baked commits
+CommitStats.list_from_string = function(repo, text) {
+ var lines = text.trim().split('\n');
+ var commits = {};
+
+ while(lines.length > 0) {
+ // Fetch the commit id
+ var id = lines.shift().replace(/\ +/g, ' ').split(" ").pop();
+ // Remove some rows
+ lines.shift();
+ lines.shift();
+ lines.shift();
+
+ // Process message lines
+ var message_lines = [];
+ // Process out the messages
+ while(lines.length > 0 && lines[0].match(/^ {4}/) || lines[0] == '') {
+ var string = lines.shift().substr(4);
+ message_lines.push(string);
+ }
+
+ // Skip all empty lines
+ while(lines.length > 0 && lines[0] != null && lines[0] == '') lines.shift();
+
+ var files = [];
+ // Process all the files
+ while(lines.length > 0 && lines[0].match(/^([-\d]+)\s+([-\d]+)\s+(.+)/)) {
+ var parts = lines.shift().replace(/\ +/g, ' ').split(" ");
+ var additions = parseInt(parts[0]);
+ var deletions = parseInt(parts[1]);
+ var filename = parts[2];
+ var total = additions + deletions;
+ files.push([filename, additions, deletions, total]);
+ }
+
+ // Skip all empty lines
+ while(lines.length > 0 && lines[0] != null && lines[0] == '') lines.shift();
+ // Add the commit to the list
+ commits[id] = new CommitStats(repo, id, files);
+ }
+
+ // Return the commits
+ return commits;
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/node_modules/git/lib/git/config.js b/node_modules/git/lib/git/config.js
new file mode 100644
index 00000000..33ec0023
--- /dev/null
+++ b/node_modules/git/lib/git/config.js
@@ -0,0 +1,48 @@
+var Config = exports.Config = function(repo) {
+ var _repo = repo, _data = null;
+
+ Object.defineProperty(this, "repo", { get: function() { return _repo; }, set: function(value) { _id = value; }, enumerable: false});
+ Object.defineProperty(this, "data", { get: function() {
+ _data = lazy_reader(_repo, 'data', _data);
+ return _data;
+ }, set: function(value) { _data = value; }, enumerable: true});
+}
+
+var lazy_reader = function(repo, name, variable) {
+ if(variable) return variable;
+ // Control the flow
+ var done = false;
+ var hash = {};
+ // Load the config and parse it
+ repo.git.config({list:true}, function(err, output) {
+ var lines = output.split("\n");
+
+ lines.forEach(function(line) {
+ var parts = line.split(/=/);
+ var key = parts.shift();
+ hash[key] = parts.join("=");
+ })
+ done = true;
+ })
+
+ while(!done) {};
+ return hash;
+}
+
+Config.prototype.fetch = function(key, default_value) {
+ var value = this.data[key];
+ if(!value) return default_value;
+ return this.data[key];
+}
+
+Config.prototype.set = function(key, value, callback) {
+ var self = this;
+
+ this.repo.git.config({}, key, value, function(err, output) {
+ if(err) return callback(err, output);
+ // Reset data variable
+ self.data = null;
+ // Return
+ callback(null, output);
+ });
+}
\ No newline at end of file
diff --git a/node_modules/git/lib/git/diff.js b/node_modules/git/lib/git/diff.js
new file mode 100644
index 00000000..eba5a4e7
--- /dev/null
+++ b/node_modules/git/lib/git/diff.js
@@ -0,0 +1,82 @@
+var Blob = require('./blob').Blob;
+
+var Diff = exports.Diff = function(repo, a_path, b_path, a_blob, b_blob, a_mode, b_mode, new_file, deleted_file, diff) {
+ var _repo = repo, _a_path = a_path, _b_path = b_path
+ var _a_mode = a_mode, _b_mode = b_mode, _diff = diff;
+ // Create blob objects
+ var _a_blob = !a_blob || a_blob.match(/^0{40}$/) ? null : new Blob(repo, a_blob);
+ var _b_blob = !b_blob || b_blob.match(/^0{40}$/) ? null : new Blob(repo, b_blob);
+ // Chec if we have a new_file/deleted_file
+ var _new_file = new_file || _a_blob == null;
+ var _deleted_file = deleted_file || _b_blob == null;
+
+ Object.defineProperty(this, "repo", { get: function() { return _repo; }, enumerable: true});
+ Object.defineProperty(this, "a_path", { get: function() { return _a_path; }, enumerable: true});
+ Object.defineProperty(this, "b_path", { get: function() { return _b_path; }, enumerable: true});
+ Object.defineProperty(this, "a_mode", { get: function() { return _a_mode; }, enumerable: true});
+ Object.defineProperty(this, "b_mode", { get: function() { return _b_mode; }, enumerable: true});
+ Object.defineProperty(this, "diff", { get: function() { return _diff; }, enumerable: true});
+ Object.defineProperty(this, "a_blob", { get: function() { return _a_blob; }, enumerable: true});
+ Object.defineProperty(this, "b_blob", { get: function() { return _b_blob; }, enumerable: true});
+ Object.defineProperty(this, "new_file", { get: function() { return _new_file; }, enumerable: true});
+ Object.defineProperty(this, "deleted_file", { get: function() { return _deleted_file; }, enumerable: true});
+}
+
+// Create a list of diffs from a diff text
+Diff.list_from_string = function(repo, text, callback) {
+ // Ensure we don't have white space at the end
+ text = text.trim();
+ // Split the text into lines
+ var lines = text.split("\n");
+ var diffs = [];
+ var a_path, b_path, a_mode, b_mode, new_file = false, deleted_file = false;
+ var a_blob, b_blob;
+
+ while(text.length > 0 && lines.length > 0) {
+ // Extract a line
+ var parts = lines.shift().match(/^diff --git a\/(.+?) b\/(.+)$/);
+ // Unpack parts
+ var a_path = parts[1];
+ var b_path = parts[2];
+
+ if(lines[0].match(/^old mode/)) {
+ a_mode = lines.shift().match(/^old mode (\d+)/)[1]
+ b_mode = lines.shift().match(/^new mode (\d+)/)[1]
+ }
+
+ if(lines.length == 0 || lines[0].match(/^diff --git/)) {
+ diffs.push(new Diff(repo, a_path, b_path, null, null, a_mode, b_mode, false, false, null));
+ } else {
+ if(lines[0].match(/^new file/)) {
+ b_mode = lines.shift().match(/^new file mode (.+)$/)[1];
+ a_mode = null;
+ new_file = true;
+ } else if(lines[0].match(/^deleted file/)) {
+ a_mode = lines.shift().match(/^deleted file mode (.+)$/)[1];
+ b_mode = null;
+ deleted_file = true;
+ }
+ // Unpack index reference
+ parts = lines.shift().match(/^index ([0-9A-Fa-f]+)\.\.([0-9A-Fa-f]+) ?(.+)?$/);
+ a_blob = parts[1];
+ b_blob = parts[2];
+ b_mode = parts[3];
+
+ // Contains all the diff lines
+ var diff_lines = [];
+ // Fetch all the diff lines
+ while(lines.length > 0 && !lines[0].match(/^diff/)) {
+ diff_lines.push(lines.shift());
+ }
+
+ // Join the difflines
+ var diff = diff_lines.join("\n");
+ // Add the diff to the list
+ diffs.push(new Diff(repo, a_path, b_path, a_blob, b_blob, a_mode, b_mode, new_file, deleted_file, diff));
+ }
+ }
+
+ // Return the list of diffs
+ callback(null, diffs);
+}
+
diff --git a/node_modules/git/lib/git/file_index.js b/node_modules/git/lib/git/file_index.js
new file mode 100644
index 00000000..1fd8b32e
--- /dev/null
+++ b/node_modules/git/lib/git/file_index.js
@@ -0,0 +1,241 @@
+var util = require('util'),
+ fs = require('fs');
+
+var FileIndex = exports.FileIndex = function(repo_path, callback) {
+ var _repo_path = repo_path;
+ var _index_file = repo_path + "/file-index";
+ var self = this;
+ // Set up internal index info
+ var _sha_count = 0, _commit_index = {}, _commit_order = {}, _all_files = {};
+
+ // Set up properites for instance
+ Object.defineProperty(this, "repo_path", { get: function() { return _repo_path; }, enumerable: true});
+ Object.defineProperty(this, "index_file", { get: function() { return _index_file; }, enumerable: true});
+ // Other values that allow setting
+ Object.defineProperty(this, "sha_count", { get: function() { return _sha_count; }, set: function(value) { _sha_count = value; }, enumerable: true});
+ Object.defineProperty(this, "commit_index", { get: function() { return _commit_index; }, set: function(value) { _commit_index = value; }, enumerable: true});
+ Object.defineProperty(this, "commit_order", { get: function() { return _commit_order; }, set: function(value) { _commit_order = value; }, enumerable: true});
+ Object.defineProperty(this, "all_files", { get: function() { return _all_files; }, set: function(value) { _all_files = value; }, enumerable: true});
+
+ fs.stat(_index_file, function(err, stat) {
+ if(err) return callback(err, stat);
+
+ if(stat.isFile() && stat.size < FileIndex.max_file_size) {
+ read_index(self, _index_file, function(err, _index) {
+ if(err) return callback(err, _index);
+ callback(null, _index);
+ })
+ } else {
+ callback("index file not found", null);
+ }
+ });
+}
+
+// Max size for file index
+FileIndex.max_file_size = 10000000;
+
+// Chomp text removing end carriage returns
+var chomp = function chomp(raw_text) {
+ return raw_text.replace(/(\n|\r)+$/, '');
+}
+
+var dirname = function(file_name) {
+ var elements = file_name.split('/');
+ elements.pop();
+ if(elements.length == 0) return ".";
+ return elements.join("/");
+}
+
+// TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO
+// TODO Needs to be async reading files in pieces and parsing them
+// TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO
+// Read and parse the file index for git
+var read_index = function(file_index, _index_file, callback) {
+ var current_sha = null;
+
+ fs.readFile(_index_file, 'ascii', function(err, data) {
+ if(err) return callback(err, data);
+ // Split the text into lines
+ var lines = data.split("\n");
+ // Iterate over all the lines
+ for(var i = 0; i < lines.length; i++) {
+ var line = lines[i];
+
+ // Ensure it's a line with a starting sha
+ if(line.match(/^(\w{40})/)) {
+ // Unpack all the sha values (first one being the current_sha and the rest the parents)
+ var shas = line.match(/(\w{40})/g);
+ current_sha = shas.shift();
+ // The rest of the sha's are the parents
+ file_index.commit_index[current_sha] = {files:[], parents:shas}
+ file_index.commit_order[current_sha] = file_index.sha_count;
+ file_index.sha_count = file_index.sha_count + 1;
+ } else {
+ var file_name = chomp(line);
+ var tree = '';
+ // Retrieve the directory name for the file passed in
+ var dir = dirname(file_name);
+ // Ensure it's not an empty line
+ if(line.length > 0) {
+ // Split up the directory
+ var dir_parts = dir.split("/");
+ for(var j = 0; j < dir_parts.length; j++) {
+ var part = dir_parts[j];
+
+ if(dir_parts[j] != '.') {
+ tree = tree + part + '/'
+ if(file_index.all_files[tree] == null) file_index.all_files[tree] = [];
+ if(file_index.all_files[tree].indexOf(current_sha) == -1)
+ file_index.all_files[tree].unshift(current_sha);
+ }
+ }
+
+ // Finish up
+ if(!file_index.all_files[file_name]) file_index.all_files[file_name] = [];
+ file_index.all_files[file_name].unshift(current_sha);
+ file_index.commit_index[current_sha].files.push(file_name);
+ }
+ }
+ }
+ // Return the parsed index
+ callback(null, file_index);
+ });
+}
+
+// Builds a list of all commits reachable from a single commit
+FileIndex.prototype.commits_from = function(commit_sha, callback) {
+ if(Array.isArray(commit_sha)) return callback("unsuported reference", null);
+ // Define some holding structures
+ var already = {};
+ var final = [];
+ var left_to_do = [commit_sha];
+ var self = this;
+
+ while(left_to_do.length > 0) {
+ commit_sha = left_to_do.shift();
+
+ if(!already[commit_sha]) {
+ // Add commit to list of final commits
+ final.push(commit_sha);
+ already[commit_sha] = true;
+
+ // Get parents of the commit and add them to the list
+ var commit = self.commit_index[commit_sha];
+ if(commit) {
+ commit.parents.forEach(function(sha) {
+ left_to_do.push(sha);
+ });
+ }
+ }
+ }
+ // Sort the commits
+ final = this.sort_commits(final);
+ // Callback
+ callback(null, final);
+}
+
+FileIndex.prototype.sort_commits = function(sha_array) {
+ var self = this;
+
+ return sha_array.sort(function(a, b) {
+ return compare(parseInt(self.commit_order[b]), parseInt(self.commit_order[a]));
+ })
+}
+
+var convert = function(d) {
+ return (
+ d.constructor === Date ? d :
+ d.constructor === Array ? new Date(d[0],d[1],d[2]) :
+ d.constructor === Number ? new Date(d) :
+ d.constructor === String ? new Date(d) :
+ typeof d === "object" ? new Date(d.year,d.month,d.date) :
+ NaN
+ );
+}
+
+var compare = function(a,b) {
+ return (
+ isFinite(a=convert(a).valueOf()) &&
+ isFinite(b=convert(b).valueOf()) ?
+ (a>b)-(a 0) {
+ self.call_git('', 'rev_list', '', options, [reference], function(err, result) {
+ callback(err, result);
+ })
+ } else if(Object.keys(options).length == 0){
+ // Fetch the file index (will create a file index on the first call)
+ file_index(self, function(err, _file_index) {
+ if(err) return callback(err, _file_index);
+ // Parse the revision
+ self.rev_parse({}, reference, 0, function(err, ref) {
+ if(err) return callback(err, ref);
+ // Fetch the commits from the revision passed in
+ _file_index.commits_from(ref, function(err, commits) {
+ if(err) {
+ self.call_git('', 'rev_list', '', options, [reference], function(err, result) {
+ callback(err, result);
+ })
+ } else {
+ callback(null, commits.join("\n") + "\n");
+ }
+ })
+ });
+ })
+ } else {
+ self.rev_parse({}, reference, 0, function(err, ref) {
+ if(err) return callback(err, ref);
+
+ if(Array.isArray(ref)) {
+ self.call_git('', 'rev_list', '', options, [reference], function(err, result) {
+ callback(err, result);
+ })
+ } else {
+ try {
+ // Try to execute revision fetch
+ self.repository.rev_list(ref, options, function(err, result) {
+ callback(err, result);
+ })
+ } catch(err) {
+ callback(err, null);
+ }
+ }
+ });
+ }
+}
+
+// Chomp text removing end carriage returns
+var chomp = function chomp(raw_text) {
+ return raw_text.replace(/(\n|\r)+$/, '');
+}
+
+Git.prototype.rev_parse = function(options, string, level, callback) {
+ if(string != null && string.constructor != String) return callback('only supports single sha reference');
+ var self = this;
+
+ // Allow leaving of level
+ var args = Array.prototype.slice.call(arguments, 2);
+ var callback = args.pop();
+ level = args.length ? args.shift() : 0;
+
+ if(string.match(/\.\./)) {
+ var parts = string.split("..");
+ var sha1 = parts[0], sha2 = parts[1];
+ var value = [this.rev_parse({}, sha1, level + 1, callback), this.rev_parse({}, sha2, level + 1, callback)];
+ if(level == 0) return callback(null, value);
+ }
+
+ // a sha is being passed in, chomp and return
+ if(string.match(/^[0-9a-f]{40}$/)) {
+ var value = chomp(string);
+ if(level == 0) {
+ return callback(null, value);
+ } else {
+ return value;
+ }
+ }
+
+ // Check all the references
+ var head = this.git_directory + "/refs/heads/" + string;
+ try {
+ if(level == 0) {
+ return callback(null, chomp(fs.readFileSync(head, 'utf8')));
+ } else {
+ return chomp(fs.readFileSync(head, 'utf8'));
+ }
+ } catch(err) {}
+
+ var head = this.git_directory + "/refs/remotes/" + string;
+ try {
+ if(level == 0) {
+ return callback(null, chomp(fs.readFileSync(head, 'utf8')));
+ } else {
+ return chomp(fs.readFileSync(head, 'utf8'));
+ }
+ } catch(err) {}
+
+ var head = this.git_directory + "/refs/tags/" + string;
+ try {
+ if(level == 0) {
+ return callback(null, chomp(fs.readFileSync(head, 'utf8')));
+ } else {
+ return chomp(fs.readFileSync(head, 'utf8'));
+ }
+ } catch(err) {}
+
+ // Check packed-refs file, too
+ var packref = this.git_directory + "/packed-refs";
+ try {
+ // Read the file
+ var parts = data.split(/\n/);
+ // Locate head
+ for(var i = 0; i < parts.length; i++) {
+ var match_parts = parts[i].match(/^(\w{40}) refs\/.+?\/(.*?)$/);
+ if(match_parts) {
+ ref = match_parts[1];
+ // If we have a match fetch reference and return
+ if(new RegExp(string + '$').test(match_parts[3])) {
+ if(level == 0) {
+ return callback(null, chomp(ref));
+ } else {
+ return chomp(ref);
+ }
+ }
+ }
+ }
+ } catch(err) {}
+
+ // Wait until we got the git call
+ self.call_git('', 'rev-parse', '', options, [string], function(err, result) {
+ callback(null, result ? chomp(result) : result);
+ })
+}
+
+// List tree content
+Git.prototype.ls_tree = function(treeish, paths, options, callback) {
+ var self = this;
+ var args = Array.prototype.slice.call(arguments, 1);
+ var callback = args.pop();
+ paths = args.length ? args.shift() : [];
+ paths = paths ? paths : [];
+ options = args.length ? args.shift() : {};
+
+ try {
+ // Reverse parse the tree sha
+ this.rev_parse({}, treeish, function(err, sha) {
+ if(err) return callback(err, sha);
+ var tree = self.repository.ls_tree(sha, flatten(paths), options['r']);
+ if(tree == '') return callback('no such sha found', null);
+ // Ls_tree
+ callback(null, tree);
+ })
+ } catch(err) {
+ callback(err, null);
+ }
+}
+
+// Cat a file
+Git.prototype.cat_file = function(type, ref, callback) {
+ if(type == "t") {
+ this.file_type(ref, callback);
+ } else if(type == "s") {
+ this.file_size(ref, callback);
+ } else if(type == "p") {
+ callback(null, this.repository.cat_file(ref));
+ }
+}
+
+Git.prototype.file_size = function(ref, callback) {
+ callback(null, this.repository.cat_file_size(ref));
+}
+
+// Make a directory
+// dir: is the relative path to the directory to create
+//
+// Return nothing
+Git.prototype.fs_mkdir = function(dir, callback) {
+ var path = this.git_directory + "/" + dir;
+ GitFileOperations.fs_mkdir(path, callback);
+}
+
+// Initialize a new git repository (create physical setup)
+Git.prototype.init = function(options, callback) {
+ var self = this;
+ var arguments = Array.prototype.slice(arguments);
+
+ if(Object.keys(options).length == 0) {
+ Repository.init(this.git_directory, callback);
+ } else {
+ // Execute init with call git and return the object
+ this.call_git('', 'init', '', options, arguments, function(err, result) {
+ if(err) return callback(err, result);
+ callback(null, self);
+ });
+ }
+}
+
+// Clone a directory
+Git.prototype.clone = function(options, original_path, target_path, callback) {
+}
+
+// Generate diff from the changes between two shas
+// Git.prototype.diff = function(options, sha1, sha2, callback) {
+// }
+//
+// var simple_diff = function(repo, options, sha1, sha2, callback) {
+//
+// }
+//
+// var native_diff = function(repo, options, sha1, sha2, base, paths, callback) {
+//
+// }
+
+// Gotten from
+var flatten = function(array) {
+ return array.reduce(function(a,b) {
+ return a.concat(b);
+ }, []);
+}
+
+Git.prototype.diff = function(commit1, commit2, options, callback) {
+ try {
+ var self = this;
+ var args = Array.prototype.slice.call(arguments, 2);
+ // Pop the callback
+ var callback = args.pop();
+ options = args.length ? args.shift() : {};
+
+ // Initialize patch variable
+ var patch = '', commit_obj1 = null, tree1 = null, tree2 = null;
+ // Retrieve the first commit object
+ var commit_obj1 = self.repository.get_object_by_sha1(commit1);
+ var tree1 = commit_obj1.tree;
+
+ if(commit2) {
+ tree2 = self.repository.get_object_by_sha1(commit2).tree;
+ } else {
+ tree2 = self.repository.get_object_by_sha1(commit_obj1.parent[0]).tree;
+ }
+
+ var qdiff = self.repository.quick_diff(tree1, tree2).sort();
+ qdiff.forEach(function(diff_arr) {
+ // Set up all the variables
+ var path = diff_arr[0];
+ var status = diff_arr[1];
+ var treeSHA1 = diff_arr[2];
+ var treeSHA2 = diff_arr[3];
+ var format = 'unified';
+ var lines = 3;
+ var output = '';
+ var file_length_difference = 0;
+
+ // Fetch the files
+ var fileA = treeSHA1 ? self.repository.cat_file(treeSHA1) : '';
+ var fileB = treeSHA2 ? self.repository.cat_file(treeSHA2) : '';
+
+ // Get the sha's or set empty shas
+ var sha1 = treeSHA1 || '0000000000000000000000000000000000000000';
+ var sha2 = treeSHA2 || '0000000000000000000000000000000000000000';
+
+ // Split up data
+ var data_old = fileA.trim().split(/\n/).map(function(e) { return chomp(e); });
+ var data_new = fileB.trim().split(/\n/).map(function(e) { return chomp(e); });
+ // Javascript split's a file into [''] if it's an empty file
+ if(data_old.length == 1 && data_old[0] == '') data_old = [];
+ if(data_new.length == 1 && data_new[0] == '') data_new = [];
+
+ // Get diffs
+ var diffs = Difference.LCS.diff(data_old, data_new);
+ if(diffs.length > 0) {
+ // Create paths
+ var a_path = "a/" + path.replace(/\.\//g, '');
+ var b_path = "b/" + path.replace(/\.\//g, '');
+ // Let's create the header
+ var header = "diff --git " + a_path + " " + b_path;
+ if(options['full_index']) {
+ header = header + '\n' + 'index ' + sha1 + '..' + sha2;
+ if(treeSHA2) header = header + "' 100644";
+ } else {
+ header = header + '\n' + 'index ' + sha1.substr(0, 7) + '..' + sha2.substr(0, 7);
+ if(treeSHA2) header = header + ' 100644';
+ }
+
+ header = header + '\n--- ' + (treeSHA1 ? a_path : '/dev/null');
+ header = header + '\n+++ ' + (treeSHA2 ? b_path : '/dev/null');
+ header = header + '\n';
+
+ // standard hunk
+ var old_hunk = null, hunk = null;
+ // Process all the diff changes
+ diffs.forEach(function(piece) {
+
+ try {
+ hunk = new Difference.LCS.Hunk(data_old, data_new, piece, lines, file_length_difference);
+ file_length_difference = hunk.file_length_difference;
+
+ if(old_hunk) {
+ if(lines > 0 && hunk.overlaps(old_hunk)) {
+ hunk.unshift(old_hunk);
+ } else {
+ output = output + old_hunk.diff(format);
+ }
+ }
+ } catch(err) {}
+
+ old_hunk = hunk;
+ output = output + '\n';
+ });
+
+ // Prepare next
+ output = output + old_hunk.diff(format);
+ output = output + '\n';
+ patch = patch + header + output.trimLeft();
+ }
+ });
+
+ // Return the patch
+ callback(null, patch);
+ } catch(err) {
+ callback('tree was bad or lcs is not working', null);
+ }
+}
+
+// Check if a file exists
+Git.prototype.fs_exist = function(path, callback) {
+ GitFileOperations.fs_exist(this.git_directory, path, callback);
+}
+
+// Write a normal file to the filesystem
+// file: relative path from the Git dir
+// contents: String content to be written
+//
+// Return nothing
+Git.prototype.fs_write = function(file, content, callback) {
+ GitFileOperations.fs_write(this.git_directory, file, content, callback);
+}
+
+// Log function, returns the number of logs
+Git.prototype.log = function(commit, path, options, callback) {
+ args = ['--raw', '--no-abbrev', '--numstat'];
+ if (path) {
+ args.push('--');
+ args.push(path);
+ }
+ options.color = 'never';
+ this.call_git('', 'log', '', options, args, callback);
+}
+
+// Select the objects that exists
+// object_ids: array of object sha's
+//
+// Returns array of ids's that exist
+Git.prototype.select_existing_objects = function(object_ids, callback) {
+ var existing_object_ids = [];
+ // Process all the object ids
+ for(var i = 0; i < object_ids.length; i++) {
+ // Check if the object_id exists in the db
+ this.repository.object_exists(object_ids[i], function(err, result) {
+ if(err) return callback(err, result);
+ if(result) existing_object_ids.push(object_ids[i]);
+ });
+ }
+ // Return all the existing objects
+ callback(null, existing_object_ids);
+}
+
+// Format the patch
+Git.prototype.format_patch = function(options, reference, callback) {
+ this.call_git('', 'format_patch', '', options, [reference], function(err, result) {
+ callback(err, result);
+ })
+}
+
+// Fetch the blame
+Git.prototype.blame = function() {
+ // Unpack parameters as commit might be null
+ var args = Array.prototype.slice.call(arguments, 0);
+ var callback = args.pop();
+ var options = args.length ? args.shift() : {};
+ var arguments = args;
+
+ // Execute blame command
+ this.call_git('', 'blame', '', options, arguments, function(err, result) {
+ callback(err, result);
+ });
+}
+
+var clean_paths = function(commits) {
+ var new_commits = {};
+ // Iterate over all the commit hash entries and clean the directory names
+ Object.keys(commits).forEach(function(file) {
+ var sha = commits[file];
+ file = file.substr(file.length - 1, 1) == '/' ? file.substr(0, file.length - 1) : file;
+ new_commits[file] = sha;
+ })
+ // Return all the cleaned commits
+ return new_commits;
+}
+
+// Fetch blame tree
+Git.prototype.blame_tree = function(commit, path, callback) {
+ var self = this;
+ var args = Array.prototype.slice.call(arguments, 1);
+ var callback = args.pop();
+ path = args.length ? args.shift() : null;
+
+ // Create path
+ path = path != null && path != '' ? [path].join("/").toString() + '/' : path;
+ path = !path || path.constructor != String ? '' : path;
+
+ // Fetch the file_index
+ file_index(this, function(err, file_index_instance) {
+ if(err) return callback(err, file_index_instance);
+
+ self.rev_parse({}, commit, 0, function(err, rev_parse_output) {
+ if(err) return callback(err, rev_parse_output);
+
+ self.looking_for(commit, path, function(err, looking_for) {
+ if(err) return callback(err, looking_for);
+
+ file_index_instance.last_commits(rev_parse_output, looking_for, function(err, commits) {
+ if(err) return callback(err, commits);
+
+ callback(null, clean_paths(commits));
+ });
+ });
+ });
+ });
+}
+
+// Looking for
+Git.prototype.looking_for = function(commit, path, callback) {
+ var self = this;
+ var args = Array.prototype.slice.call(arguments, 1);
+ var callback = args.pop();
+ path = args.length ? args.shift() : null;
+ var file = null;
+
+ // Fetch the commit sha
+ self.rev_parse({}, commit, 0, function(err, rev_parse_output) {
+ if(err) return callback(err, rev_parse_output);
+
+ // Fetch the sub tree
+ self.repository.get_subtree(rev_parse_output, path, function(err, tree_sha) {
+ if(err) return callback(err, tree_sha);
+
+ // Contains the files
+ var looking_for = [];
+ // Fetch and return the object by the tree sha
+ var object = self.repository.get_object_by_sha1(tree_sha);
+ // Process all the entries for the object
+ object.entries.forEach(function(entry) {
+ file = path && !(path == '' || path == '.' || path == './') ? path + "/" + entry.name : entry.name;
+ // Ensure no double path characters
+ file = file.replace('//', '/');
+ // Add a slash if it's a directory
+ if(entry.type == 'directory') file = file + "/";
+ // Add to list of looking_for entries
+ looking_for.push(file);
+ });
+
+ // Return the entries
+ return callback(null, looking_for);
+ });
+ });
+}
+
+// Peform commit
+Git.prototype.commit = function() {
+ // Unpack parameters as commit might be null
+ var args = Array.prototype.slice.call(arguments, 0);
+ var callback = args.pop();
+ var options = args.length ? args.shift() : {};
+ var arguments = args;
+
+ // Execute blame command
+ this.call_git('', 'commit', '', options, arguments, function(err, result) {
+ callback(err, result);
+ });
+}
+
+// Fetch config
+Git.prototype.config = function() {
+ // Unpack parameters as commit might be null
+ var args = Array.prototype.slice.call(arguments, 0);
+ var callback = args.pop();
+ var options = args.length ? args.shift() : {};
+ var arguments = args;
+ // Execute blame command
+ this.call_git('', 'config', '', options, arguments, function(err, result) {
+ callback(err, result);
+ });
+}
+
+// Execute add command
+Git.prototype.add = function() {
+ // Unpack parameters as commit might be null
+ var args = Array.prototype.slice.call(arguments, 0);
+ var callback = args.pop();
+ var options = args.length ? args.shift() : {};
+ var arguments = args;
+ // Execute blame command
+ this.call_git('', 'add', '', options, arguments, function(err, result) {
+ callback(err, result);
+ });
+}
+
+// Execute remove command
+Git.prototype.remove = function() {
+ // Unpack parameters as commit might be null
+ var args = Array.prototype.slice.call(arguments, 0);
+ var callback = args.pop();
+ var options = args.length ? args.shift() : {};
+ var arguments = args;
+ // Execute blame command
+ this.call_git('', 'rm', '', options, arguments, function(err, result) {
+ callback(err, result);
+ });
+}
+
+// Execute ls-files
+Git.prototype.ls_files = function() {
+ // Unpack parameters as commit might be null
+ var args = Array.prototype.slice.call(arguments, 0);
+ var callback = args.pop();
+ var options = args.length ? args.shift() : {};
+ var arguments = args;
+ // Execute blame command
+ this.call_git('', 'ls-files', '', options, arguments, function(err, result) {
+ callback(err, result);
+ });
+}
+
+// Execute diff-files
+Git.prototype.diff_files = function() {
+ // Unpack parameters as commit might be null
+ var args = Array.prototype.slice.call(arguments, 0);
+ var callback = args.pop();
+ var options = args.length ? args.shift() : {};
+ var arguments = args;
+ // Execute blame command
+ this.call_git('', 'diff-files', '', options, arguments, function(err, result) {
+ callback(err, result);
+ });
+}
+
+// Execute diff-index
+Git.prototype.diff_index = function() {
+ // Unpack parameters as commit might be null
+ var args = Array.prototype.slice.call(arguments, 0);
+ var callback = args.pop();
+ var options = args.length ? args.shift() : {};
+ var arguments = args;
+ // Execute blame command
+ this.call_git('', 'diff-index', '', options, arguments, function(err, result) {
+ callback(err, result);
+ });
+}
+
+Git.prototype.file_type = function(ref, callback) {
+ return callback(null, this.repository.cat_file_type(ref));
+}
+
+Git.prototype.put_raw_object = function(content, type, callback) {
+ return this.repository.put_raw_object(content, type, callback);
+}
+
+Git.prototype.commit_from_sha = function(id) {
+ var repository = new Repository(this.git_directory);
+ var object = repository.get_object_by_sha1(id);
+
+ if(object.type == "commit") {
+ return id;
+ } else if(object.type == "tag") {
+ return object.object;
+ } else {
+ return '';
+ }
+}
+
+
+// // ===================================================================================================
+// //
+// // Decorates the Class prototype with functions wrapping git native functions (if not defined already)
+// //
+// // ===================================================================================================
+// Git.prototype.call_git('', 'help', '', {}, ['--all'], function(err, result) {
+// var index = result.indexOf("-----------");
+// result = result.substr(index);
+// var lines = result.trim().split("\n");
+// // Ship the first line
+// lines.shift();
+// // Process all the lines
+// while(lines.length > 0 && lines[0] != '') {
+// var line = lines.shift().trim().replace(/ +/g, ' ');
+// var parts = line.split(" ");
+//
+// parts.forEach(function(command) {
+// var function_name = command.replace(/\-/g, '_');
+// // For each entry create a new function if it does not exist on the prototype
+// if(Git.prototype[function_name] == null) {
+// Git.prototype[function_name] = function() {
+// // Unpack parameters as commit might be null
+// var args = Array.prototype.slice.call(arguments, 0);
+// callback = args.pop();
+// var options = args.length ? args.shift() : {};
+// var arguments = args;
+// // Execute blame command
+// this.call_git('', command, '', options, arguments, function(err, result) {
+// callback(err, result);
+// });
+// }
+// }
+// });
+//
+// }
+//
+// // callback(null, null);
+// pre_loading_done = true
+// // var g = new Git("..../")
+// });
+
diff --git a/node_modules/git/lib/git/git_file_operations.js b/node_modules/git/lib/git/git_file_operations.js
new file mode 100644
index 00000000..8af908c1
--- /dev/null
+++ b/node_modules/git/lib/git/git_file_operations.js
@@ -0,0 +1,170 @@
+var util = require('util'),
+ fs = require('fs'),
+ exec = require('child_process').exec;
+
+var GitFileOperations = exports.GitFileOperations = function() {}
+
+// Streaming glob function
+var streaming_glob_function = function(path, stream) {
+ var entries = fs.readdirSync(path);
+ entries.forEach(function(entry) {
+ var entry_path = path + "/" + entry;
+ var stat = fs.statSync(entry_path);
+
+ if(stat.isDirectory()) {
+ stream.emit("data", {path:entry_path, stat:stat});
+ streaming_glob_function(entry_path, stream);
+ } else {
+ stream.emit("data", {path:entry_path, stat:stat});
+ }
+ })
+}
+
+var stat_with_entry = function(entry_path, stream, callback) {
+ return function() {
+ fs.stat(entry_path, function(err, stat) {
+ if(stat.isDirectory()) {
+ // Dive into the directory
+ streaming_glob_function(entry_path, stream);
+ // Emit the directory and then update the count
+ stream.emit("data", {path:entry_path, stat:stat});
+ callback();
+ } else if(stat.isFile()) {
+ // Update the number of processed directories and emit the data event
+ stream.emit("data", {path:entry_path, stat:stat});
+ callback();
+ }
+ });
+ }
+}
+
+// Glob function for the file system
+GitFileOperations.glob_streaming = function(path) {
+ // Comptability
+ var nextTick = global.setImmediate || process.nextTick;
+ // Create a stream object
+ var stream = new process.EventEmitter();
+ var processed_directories_count = 0;
+ var top_level_files_count = -1;
+ // Tick method
+ var tick_function = function() {
+ // If we are done emit end otherwise execute the method again
+ processed_directories_count == top_level_files_count ? stream.emit("end") : nextTick(tick_function);
+ }
+ // set nextTick handler into action
+ nextTick(tick_function);
+ // Fetch the top directory
+ fs.readdir(path, function(err, entries) {
+ // The top level of files that need to be finished processing for us to be done
+ if(entries !== undefined && entries.length > 0) {
+ entries.sort(function(a, b) {
+ return a > b;
+ });
+
+ top_level_files_count = entries.length;
+ // Execute the entries
+ var procesEntry = function(i) {
+ // Entry path
+ var entry_path = path + "/" + entries[i];
+ // Build glob function
+ stat_with_entry(entry_path, stream, function() {
+ processed_directories_count = processed_directories_count + 1;
+ if (++i < entries.length) {
+ procesEntry(i);
+ }
+ })();
+ };
+ procesEntry(0);
+ } else {
+ top_level_files_count = 0;
+ }
+ });
+ // Return the stream for execution
+ return stream;
+}
+
+// Execute recursive glob function (private function)
+var glob_function = function(path, files) {
+ var entries = fs.readdirSync(path);
+ entries.forEach(function(entry) {
+ var entry_path = path + "/" + entry;
+
+ var stat = fs.statSync(entry_path);
+ if(stat.isDirectory()) {
+ glob_function(entry_path, files);
+ } else {
+ files.push(entry_path);
+ }
+ })
+}
+
+// Glob function for the file system
+GitFileOperations.glob = function(path, files, callback) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ callback = args.pop();
+ files = args.length ? args.shift() : [];
+ // Fetch all the files
+ glob_function(path, files);
+ callback(null, files);
+}
+
+// Read a file
+GitFileOperations.fs_read = function(path, file, callback) {
+ fs.readFile(path + "/" + file, callback);
+}
+
+// Make a directory
+GitFileOperations.fs_mkdir = function(dir, callback) {
+ fs.mkdir(dir, 16877, callback);
+}
+
+// Check if a directory exists
+GitFileOperations.fs_exist = function(dir, path, callback) {
+}
+
+// Delete directory
+GitFileOperations.fs_rmdir_r = function(dir, callback) {
+ // Copy the old directory to the new one
+ var child = exec('rm -rf ' + dir, function (error, stdout, stderr) {
+ if (error !== null) {
+ util.puts('exec error: ' + error);
+ return callback(error, null);
+ }
+ return callback(null, null);
+ });
+}
+
+// Write file
+GitFileOperations.fs_write = function(dir, file, content, callback) {
+ // Let's make sure the parent directories exist, split the file into directories and content
+ var file_parts = file.split("/");
+ var file_name = file_parts.pop()
+ var current_path = dir;
+ // Create missing sub directories
+ while(file_parts.length > 0) {
+ var dir_path = file_parts.shift();
+ current_path = current_path + "/" + dir_path;
+ // Check if the directory exists (if it does not then create it)
+ try {
+ fs.statSync(current_path);
+ } catch(err) {
+ fs.mkdirSync(current_path, 16877);
+ }
+ }
+
+ // Write the file to disk
+ current_path = dir + "/" + file;
+ // Append the entry to the file
+ fs.writeFile(current_path, content, callback);
+}
+
+
+
+
+
+
+
+
+
+
+
diff --git a/node_modules/git/lib/git/git_index.js b/node_modules/git/lib/git/git_index.js
new file mode 100644
index 00000000..a457a298
--- /dev/null
+++ b/node_modules/git/lib/git/git_index.js
@@ -0,0 +1,188 @@
+var util = require('util'),
+ Tree = require('./tree').Tree,
+ Config = require('./config').Config;
+
+var GitIndex = exports.GitIndex = function(repo) {
+ var _repo = repo, _tree = {}, _current_tree = null;
+
+ Object.defineProperty(this, "tree", { get: function() { return _tree; }, set: function(value) { _tree = value; }, enumerable: true});
+ Object.defineProperty(this, "current_tree", { get: function() { return _current_tree; }, set: function(value) { _current_tree = value; }, enumerable: true});
+ Object.defineProperty(this, "repo", { get: function() { return _repo; }, set: function(value) { _repo = value; }, enumerable: true});
+}
+
+// Sets the current tree
+// +tree+ the branch/tag/sha... to use - a string
+//
+// Returns index (self)
+GitIndex.prototype.read_tree = function(tree, callback) {
+ var self = this;
+ // Load the tree
+ this.repo.tree(tree, function(err, loaded_tree) {
+ if(err) return callback(err, loaded_tree);
+ self.current_tree = loaded_tree;
+ callback(null, loaded_tree);
+ })
+}
+
+// Add a file to the index
+// +path+ is the path (including filename)
+// +data+ is the binary contents of the file
+//
+// Returns nothing
+GitIndex.prototype.add = function(file_path, data) {
+ var path = file_path.split('/');
+ var filename = path.pop();
+ var current = this.tree;
+
+ path.forEach(function(dir) {
+ current[dir] = current[dir] || {};
+ var node = current[dir];
+ current = node;
+ });
+
+ current[filename] = data;
+}
+
+// Commit the contents of the index
+// +message+ is the commit message [nil]
+// +parents+ is one or more commits to attach this commit to to form a new head [nil]
+// +actor+ is the details of the user making the commit [nil]
+// +last_tree+ is a tree to compare with - to avoid making empty commits [nil]
+// +head+ is the branch to write this head to [master]
+//
+// Returns a String of the SHA1 of the commit
+GitIndex.prototype.commit = function(message, parents, actor, last_tree, head, callback) {
+ var self = this;
+ var args = Array.prototype.slice.call(arguments, 1);
+ callback = args.pop();
+ // Set variables to default values
+ parents = args.length ? args.shift() : null;
+ actor = args.length ? args.shift() : null;
+ last_tree = args.length ? args.shift() : null;
+ head = args.length ? args.shift() : 'master';
+
+ this.write_tree(this.tree, this.current_tree, function(err, tree_sha1) {
+ if(tree_sha1 == last_tree) return callback(null, false); // Don't write identical commits
+ var contents = [];
+ // Add tree to contents
+ contents.push(['tree', tree_sha1].join(' '));
+ // Add all parents if they exist
+ if(parents) {
+ parents.forEach(function(p) {
+ if(p) contents.push(['parent', p].join(' '));
+ });
+ }
+
+ // Define name and email
+ var name = null, email = null;
+
+ if(actor) {
+ name = actor.name;
+ email = actor.email;
+ } else {
+ config = new Config(self.repo);
+ name = config['user.name'];
+ email = config['user.email'];
+ }
+
+ var author_string = "" + name + " <" + email + "> " + parseInt(new Date().getTime()/1000) + " -0700"; // TODO must fix
+ contents.push(['author', author_string].join(' '));
+ contents.push(['committer', author_string].join(' '));
+ contents.push('');
+ contents.push(message);
+ // Write commit and update reference tree
+ self.repo.git.put_raw_object(contents.join("\n"), 'commit', function (commit_sha1) {
+ self.repo.update_ref(head, commit_sha1, callback);
+ });
+ });
+}
+
+var to_bin = function(sha1o) {
+ var sha1 = '';
+ for(var i = 0; i < sha1o.length; i = i + 2) {
+ sha1 = sha1 + String.fromCharCode(parseInt(sha1o.substr(i, 2), 16));
+ }
+ return sha1;
+}
+
+// Recursively write a tree to the index
+// +tree+ is the tree
+//
+// Returns the SHA1 String of the tree
+GitIndex.prototype.write_tree = function(tree, now_tree, callback) {
+ var self = this;
+ var args = Array.prototype.slice.call(arguments, 1);
+ callback = args.pop();
+ // Set variables to default values
+ now_tree = args.length ? args.shift() : null;
+
+ // Holds the tree content
+ var tree_contents = {};
+
+ // Fill in the original tree
+ if(now_tree) {
+ now_tree.contents.forEach(function(obj) {
+ var sha = to_bin(obj.id);
+ var k = obj.name;
+
+ if(obj instanceof Tree) k = k + '/';
+ tree_contents[k] = "" + obj.mode.toString() + " " + obj.name + "\0" + sha;
+ });
+ }
+
+ // overwrite with the new tree contents
+ Object.keys(tree).forEach(function(key) {
+ var value = tree[key];
+
+ if(value.constructor == String) {
+ var sha = self.write_blob(value);
+ sha = to_bin(sha);
+ tree_contents[key] = "" + '100644' + " " + key + '\0' + sha;
+ } else if(Object.prototype.toString.call(value) === '[object Object]') {
+ var ctree = now_tree ? (now_tree.find(key)) : null;
+ // Write the next level
+ self.write_tree(value, ctree, function(err, sha) {
+ var sha = to_bin(sha);
+ tree_contents[key + '/'] = "" + '040000' + " " + key + '\0' + sha;
+ });
+ }
+ })
+
+ var tr = Object.keys(tree_contents).sort().map(function(key) {
+ return tree_contents[key];
+ }).join('');
+
+ // Return the object sha
+ this.repo.git.put_raw_object(tr, 'tree', function (sha1) {
+ callback(null, sha1);
+ })
+}
+
+// Write the blob to the index
+// +data+ is the data to write
+//
+// Returns the SHA1 String of the blob
+GitIndex.prototype.write_blob = function(data) {
+ return this.repo.git.put_raw_object(data, 'blob');
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/node_modules/git/lib/git/git_object.js b/node_modules/git/lib/git/git_object.js
new file mode 100644
index 00000000..1b9703ab
--- /dev/null
+++ b/node_modules/git/lib/git/git_object.js
@@ -0,0 +1,20 @@
+var GitCommit = require('./internal/git_commit').GitCommit,
+ GitTree = require('./internal/git_tree').GitTree,
+ GitTag = require('./internal/git_tag').GitTag,
+ GitBlob = require('./internal/git_blob').GitBlob;
+
+var GitObject = exports.GitObject = function() {}
+
+GitObject.from_raw = function(raw_object, repository) {
+ if(raw_object.type == "blob") {
+ return GitBlob.from_raw(raw_object, repository);
+ } else if(raw_object.type == "tree") {
+ return GitTree.from_raw(raw_object, repository);
+ } else if(raw_object.type == "commit") {
+ return GitCommit.from_raw(raw_object, repository);
+ } else if(raw_object.type == "tag") {
+ return GitTag.from_raw(raw_object, repository);
+ } else {
+ throw "got invalid object-type";
+ }
+}
\ No newline at end of file
diff --git a/node_modules/git/lib/git/head.js b/node_modules/git/lib/git/head.js
new file mode 100644
index 00000000..e851703b
--- /dev/null
+++ b/node_modules/git/lib/git/head.js
@@ -0,0 +1,54 @@
+var util = require('util'),
+ Commit = require('./commit').Commit;
+
+var Head = exports.Head = function(name, commit) {
+ var _name = name;
+ var _commit = commit;
+ // Define the properties
+ Object.defineProperty(this, "name", { get: function() { return _name; }, enumerable: true});
+ Object.defineProperty(this, "commit", { get: function() { return _commit; }, enumerable: true});
+}
+
+var prefix = function(name) {
+ return "refs/" + name + "s";
+ }
+
+Head.current = function(repo, options, callback) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ callback = args.pop();
+ options = args.length ? args.shift() : {};
+ // Let's read the the head
+ repo.git.fs_read('HEAD', function(err, head) {
+ if(err) return callback(err, head);
+ var matches = head.toString().match(/ref: refs\/heads\/(.*)/);
+ if(!matches) return callback(null, null);
+ // we have a correct reference, create a new head reference
+ repo.git.rev_parse(options, 'HEAD', 0, function(err, rev) {
+ if(err) return callback(err, rev);
+ return callback(null, new Head(matches[1], rev));
+ });
+ });
+}
+
+Head.find_all = function(repo, options, callback) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ callback = args.pop();
+ options = args.length ? args.shift() : {};
+ // Let's fetch the references
+ repo.git.refs({}, prefix('head'), function(err, refs) {
+ if(err) return callback(err, refs);
+ // Map the references
+ var mapped_refs = refs.split(/\n/).map(function(ref) {
+ // Fetch the name and id for the reference
+ var split_reference = ref.split(/ /);
+ var name = split_reference[0];
+ var id = split_reference[1];
+
+ // Create a commit object wit the id
+ var commit = new Commit(repo, id);
+ // Wrap the commit object in a head object and return mapped object
+ return new Head(name, commit);
+ })
+ callback(null, mapped_refs);
+ })
+}
\ No newline at end of file
diff --git a/node_modules/git/lib/git/index.js b/node_modules/git/lib/git/index.js
new file mode 100644
index 00000000..a6c1f561
--- /dev/null
+++ b/node_modules/git/lib/git/index.js
@@ -0,0 +1,36 @@
+[
+ 'actor', 'binary_parser', 'blame', 'blame_line', 'blob', 'commit', 'commit_stats', 'config',
+ 'diff', 'file_index', 'file_window', 'git', 'git_file_operations', 'git_index', 'git_object',
+ 'head', 'loose_storage', 'merge', 'pack_storage', 'raw_object', 'ref', 'remote', 'repo', 'repository',
+ 'status', 'status_file', 'sub_module', 'tag', 'tree', 'user_info',
+ 'internal/directory_entry', 'internal/git_blob', 'internal/git_commit', 'internal/git_tag', 'internal/git_tree'
+].forEach(function(path){
+ var module = require('./' + path);
+ for (var i in module)
+ exports[i] = module[i];
+});
+
+[
+ 'zlib'
+].forEach(function(path){
+ var module = require('../zlib/' + path);
+ for (var i in module)
+ exports[i] = module[i];
+});
+
+[
+ 'sprintf'
+].forEach(function(path){
+ var module = require('../sprintf/' + path);
+ for (var i in module)
+ exports[i] = module[i];
+});
+
+[
+ 'block', 'callbacks', 'change', 'diff', 'hunk'
+].forEach(function(path){
+ var module = require('../diff/' + path);
+ for (var i in module)
+ exports[i] = module[i];
+});
+
diff --git a/node_modules/git/lib/git/internal/directory_entry.js b/node_modules/git/lib/git/internal/directory_entry.js
new file mode 100644
index 00000000..0ed41a9a
--- /dev/null
+++ b/node_modules/git/lib/git/internal/directory_entry.js
@@ -0,0 +1,60 @@
+var StringUtil = require('../../sprintf/sprintf').StringUtil;
+
+var S_IFMT = parseInt('00170000', 8);
+var S_IFLNK = parseInt('0120000', 8);
+var S_IFREG = parseInt('0100000', 8);
+var S_IFDIR = parseInt('0040000', 8);
+var S_IFGITLINK = parseInt('0160000', 8);
+
+var DirectoryEntry = exports.DirectoryEntry = function(mode, file_name, sha1) {
+ var _mode = 0, _name = file_name, _sha1 = sha1;
+ var self = this;
+
+ // Process the mode to correctly get the right value
+ for(var i = 0; i < mode.length; i++) {
+ _mode = (_mode << 3) | (mode.charCodeAt(i) - '0'.charCodeAt(0));
+ }
+
+ // Internal properties
+ Object.defineProperty(this, "mode", { get: function() { return _mode; }, set: function(value) { _mode = value; }, enumerable: true});
+ Object.defineProperty(this, "name", { get: function() { return _name; }, set: function(value) { _name = value; }, enumerable: true});
+ Object.defineProperty(this, "sha1", { get: function() { return _sha1; }, set: function(value) { _sha1 = value; }, enumerable: true});
+ // Return the type of entry
+ Object.defineProperty(this, "type", { get: function() {
+ var type = self.mode & S_IFMT;
+ if(type == S_IFGITLINK) {
+ return 'submodule';
+ } else if(type == S_IFLNK) {
+ return 'link';
+ } else if(type == S_IFDIR) {
+ return 'directory';
+ } else if(type == S_IFREG) {
+ return 'file';
+ } else {
+ return null;
+ }
+ }, enumerable: true});
+
+ Object.defineProperty(this, "format_type", { get:function() {
+ var type = this.type;
+ if(type == 'link') {
+ return 'link';
+ } else if(type == 'directory') {
+ return 'tree';
+ } else if(type == 'file') {
+ return 'blob';
+ } else if(type == 'submodule') {
+ return 'commit';
+ }
+ }, enumerable: false});
+
+ Object.defineProperty(this, "format_mode", { get:function() {
+ return StringUtil.sprintf("%06o", _mode);
+ }, enumerable: false});
+
+ // Ensure we don't have an illegal type of directory
+ if([S_IFLNK, S_IFDIR, S_IFREG, S_IFGITLINK].indexOf(_mode & S_IFMT) == -1) {
+ throw "unknown type for directory entry";
+ }
+}
+
diff --git a/node_modules/git/lib/git/internal/git_blob.js b/node_modules/git/lib/git/internal/git_blob.js
new file mode 100644
index 00000000..584e20e9
--- /dev/null
+++ b/node_modules/git/lib/git/internal/git_blob.js
@@ -0,0 +1,14 @@
+var util = require('util');
+
+var GitBlob = exports.GitBlob = function(content, repository) {
+ var _content = content, _repository = repository;
+
+ Object.defineProperty(this, "content", { get: function() { return _content; }, set: function(value) { _content = value; }, enumerable: true});
+ Object.defineProperty(this, "repository", { get: function() { return _repository; }, set: function(value) { _repository = value; }, enumerable: false});
+ Object.defineProperty(this, "type", { get: function() { return "blob"; }, enumerable: true});
+ Object.defineProperty(this, "raw_content", { get: function() { return _content; }, set: function(value) { _content = value; }, enumerable: true});
+}
+
+GitBlob.from_raw = function(raw_object, repository) {
+ return new GitBlob(raw_object.content);
+}
\ No newline at end of file
diff --git a/node_modules/git/lib/git/internal/git_commit.js b/node_modules/git/lib/git/internal/git_commit.js
new file mode 100644
index 00000000..6c6cb770
--- /dev/null
+++ b/node_modules/git/lib/git/internal/git_commit.js
@@ -0,0 +1,87 @@
+var util = require('util'),
+ UserInfo = require('../user_info').UserInfo;
+
+var GitCommit = exports.GitCommit = function(tree, parent, author, committer, message, headers, repository) {
+ var _tree = tree, _parent = parent, _author = author, _committer = committer, _message = message, _headers = headers, _repository = repository;
+
+ Object.defineProperty(this, "tree", { get: function() { return _tree; }, enumerable: true});
+ Object.defineProperty(this, "parent", { get: function() { return _parent; }, enumerable: true});
+ Object.defineProperty(this, "author", { get: function() { return _author; }, enumerable: true});
+ Object.defineProperty(this, "committer", { get: function() { return _committer; }, enumerable: true});
+ Object.defineProperty(this, "message", { get: function() { return _message; }, enumerable: true});
+ Object.defineProperty(this, "headers", { get: function() { return _headers; }, enumerable: true});
+ Object.defineProperty(this, "repository", { get: function() { return _repository; }, enumerable: true});
+ Object.defineProperty(this, "type", { get: function() { return "commit"; }, enumerable: true});
+
+ // Raw content of commit
+ Object.defineProperty(this, "raw_content", { get: function() {
+ return "tree " + _tree + "\n"
+ + _parent.map(function(i) { return "parent " + i +"\n"; }).join('')
+ + "author " + _author + "\ncommitter " + _committer + "\n\n" + _message;
+ }, enumerable: true});
+}
+
+// Create a commit from a raw object
+GitCommit.from_raw = function(raw_object, repository) {
+ var parent = [];
+ var tree = null, author = null, committer = null;
+
+ // Split the text but only grab the 2 first blocks
+ var split_result = raw_object.content.split(/\n\n/);
+ var headers = split_result.shift();
+ var message = split_result.join("\n\n");
+
+ // get all the headers
+ var all_headers = headers.split(/\n/).map(function(header) {
+ var parts = header.split(/ /);
+ return [parts.shift(), parts.join(" ")];
+ })
+ // Iterate over all the headers
+ all_headers.forEach(function(header) {
+ var key = header[0];
+ var value = header[1];
+
+ if(key == "tree") {
+ tree = value;
+ } else if(key == "parent") {
+ parent.push(value);
+ } else if(key == "author") {
+ author = new UserInfo(value);
+ } else if(key == "committer") {
+ committer = new UserInfo(value);
+ } else {
+ // Unknow header
+ util.puts("unknow header '" + key + "' in commit " + raw_object.sha_hex())
+ }
+ })
+
+ if(!tree && !author && !committer) {
+ throw "incomplete raw commit object";
+ }
+ // Return the git commit object
+ return new GitCommit(tree, parent, author, committer, message, headers, repository);
+}
+
+GitCommit.prototype.raw_log = function(sha1) {
+ var output = "commit " + sha1 + "\n";
+ output = output + this.headers + "\n\n";
+ var lines = this.message.split("\n");
+ // Remove the last line which will be empty
+ for(var i = 0; i < (lines.length > 1 ? lines.length - 1 : lines.length); i++) {
+ output = output + ' ' + lines[i] + '\n';
+ }
+ // Return the output
+ return output + '\n';
+}
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/node_modules/git/lib/git/internal/git_tag.js b/node_modules/git/lib/git/internal/git_tag.js
new file mode 100644
index 00000000..ee7886a3
--- /dev/null
+++ b/node_modules/git/lib/git/internal/git_tag.js
@@ -0,0 +1,69 @@
+var util = require('util'),
+ UserInfo = require('../user_info').UserInfo;
+
+var GitTag = exports.GitTag = function(object, type, tag, tagger, message, repository) {
+ var _object = object, _type = type, _tag = tag, _tagger = tagger, _message = message, _repository = repository;
+
+ Object.defineProperty(this, "repository", { get: function() { return _repository; }, set: function(value) { _repository = value; }, enumerable: true});
+ Object.defineProperty(this, "object", { get: function() { return _object; }, set: function(value) { _object = value; }, enumerable: true});
+ // Object.defineProperty(this, "type", { get: function() { return _type; }, set: function(value) { _type = value; }, enumerable: true});
+ Object.defineProperty(this, "tag", { get: function() { return _tag; }, set: function(value) { _tag = value; }, enumerable: true});
+ Object.defineProperty(this, "tagger", { get: function() { return _tagger; }, set: function(value) { _tagger = value; }, enumerable: true});
+ Object.defineProperty(this, "message", { get: function() { return _message; }, set: function(value) { _message = value; }, enumerable: true});
+ Object.defineProperty(this, "type", { get: function() { return "tag"; }, enumerable: true});
+
+ Object.defineProperty(this, "raw_content", { get: function() {
+ return "object " + _object + "\ntype " + _type + "\ntag " + _tag + "\ntagger " + _tagger + " " + _message + "\n\n";
+ }, enumerable: false});
+}
+
+GitTag.from_raw = function(raw_object, repository) {
+ var parts = raw_object.content.split("\n\n");
+ var headers = parts.shift();
+ var message = parts.join(/\n\n/);
+
+ // Further split the headers
+ headers = headers.split(/\n/).map(function(header) {
+ var parts = header.split(/ /);
+ return [parts.shift(), parts.join(" ")];
+ })
+
+ // Initialize base variables
+ var object = '', type = '', tag = '', tagger = '';
+
+ headers.forEach(function(header) {
+ var key = header[0];
+ var value = header[1];
+
+ if(key == 'object') {
+ object = value;
+ } else if (key == 'type') {
+ if(['blob', 'tree', 'commit', 'tag'].indexOf(value) == -1) {
+ throw "invalid type in tag";
+ }
+ // Set the type
+ type = value;
+ } else if(key == 'tag') {
+ tag = value;
+ } else if(key == 'tagger') {
+ tagger = new UserInfo(value);
+ } else {
+ util.puts("unknown header '" + key);
+ }
+ });
+
+ // If we have an illegal tag object
+ if(object == null || type == null || tag == null || tagger == null) {
+ throw "incomplete raw tag object";
+ }
+ // Return the tag
+ return new GitTag(object, type, tag, tagger, message, repository);
+}
+
+
+
+
+
+
+
+
diff --git a/node_modules/git/lib/git/internal/git_tree.js b/node_modules/git/lib/git/internal/git_tree.js
new file mode 100644
index 00000000..82661c42
--- /dev/null
+++ b/node_modules/git/lib/git/internal/git_tree.js
@@ -0,0 +1,70 @@
+var util = require('util'),
+ BinaryParser = require('../binary_parser').BinaryParser,
+ DirectoryEntry = require('../internal/directory_entry').DirectoryEntry;
+
+var GitTree = exports.GitTree = function(entries, repository) {
+ var _entries = entries ? entries : [], _repository = repository;
+
+ // Internal properties
+ Object.defineProperty(this, "entries", { get: function() { return _entries; }, set: function(value) { _entries = value; }, enumerable: true});
+ Object.defineProperty(this, "repository", { get: function() { return _repository; }, set: function(value) { _repository = value; }, enumerable: true});
+ Object.defineProperty(this, "type", { get: function() { return "tree"; }, enumerable: true});
+
+ // Raw content of commit
+ Object.defineProperty(this, "raw_content", { get: function() {
+ return _entries.map(function(e) {
+ return [[e.format_mode, e.format_type, e.sha1].join(' '), e.name].join('\t')
+ }).join('\n');
+ }, enumerable: true});
+}
+
+var read_until_chr = function(index, content, char) {
+ var found = false;
+ var content_length = content.length;
+ var chr_code = char.charCodeAt(0);
+ var offset = 0;
+
+ // Search until we locate the content
+ while(!found && (index + offset) < content_length) {
+ if(content.charCodeAt(index + offset) == chr_code) found = true;
+ offset = offset + 1;
+ }
+
+ // Extract content and return
+ return content.substr(index, offset - 1);
+}
+
+var to_hex_string = function(string) {
+ var hexString = '';
+ for(var index = 0; index < string.length; index++) {
+ var value = BinaryParser.toByte(string.substr(index, 1));
+ var number = value <= 15 ? "0" + value.toString(16) : value.toString(16);
+ hexString = hexString + number;
+ }
+ return hexString;
+};
+
+// Create Tree Object from Raw stream
+GitTree.from_raw = function(raw_object, repository) {
+ var entries = [];
+ var index = 0;
+ var content_length = raw_object.content.length;
+ var content = raw_object.content;
+
+ // Parse the content
+ while(index < content_length) {
+ var mode = read_until_chr(index, content, ' ');
+ index = index + mode.length + 1;
+ var file_name = read_until_chr(index, content, '\0');
+ index = index + file_name.length + 1;
+ var raw_sha = content.substr(index, 20);
+ index = index + raw_sha.length;
+ var sha = to_hex_string(raw_sha);
+
+ // Add the Entry to the directory list
+ entries.push(new DirectoryEntry(mode, file_name, sha));
+ }
+
+ // Return a tree with all the entries
+ return new GitTree(entries, repository);
+}
diff --git a/node_modules/git/lib/git/loose_storage.js b/node_modules/git/lib/git/loose_storage.js
new file mode 100644
index 00000000..11639d3e
--- /dev/null
+++ b/node_modules/git/lib/git/loose_storage.js
@@ -0,0 +1,172 @@
+var util = require('util'),
+ fs = require('fs'),
+ BinaryParser = require('./binary_parser').BinaryParser,
+ Zlib = require('../zlib/zlib').Zlib,
+ RawObject = require('./raw_object').RawObject,
+ crypto = require('crypto'),
+ zlib = require('zlib');
+
+var OBJ_TYPES = [null, "commit", "tree", "blob", "tag"];
+
+LooseStorage = exports.LooseStorage = function(directory) {
+ var _directory = directory;
+
+ Object.defineProperty(this, "directory", { get: function() { return _directory; }, set: function(value) { _directory = value; }, enumerable: true});
+}
+
+LooseStorage.prototype.find = function(sha1) {
+ try {
+ sha1 = to_hex_string(sha1);
+ // If we don't have a valid sha
+ if(sha1.length != 40) return null;
+ // Directory path
+ var path = this.directory + "/" + sha1.substring(0, 2) + '/' + sha1.substring(2, 40);
+ return this.get_raw_object(fs.readFileSync(path));
+ } catch(err) {
+ return null;
+ }
+}
+
+// Read and parse the raw object
+LooseStorage.prototype.get_raw_object = function(buf) {
+ if(buf.length < 2) throw "object file too small";
+
+ // Set up variables
+ var type = null;
+ var size = null;
+ var used = null;
+ var content = null;
+
+ if(this.is_legacy_loose_object(buf)) {
+ content = new Zlib.Unzip(buf).unzip();
+ content = Array.isArray(content) ? content[0] : content;
+ // Let's split the content up
+ var parts = content.split(/\0/)
+ var header = parts.shift();
+ content = parts.join("\0");
+
+ // if no header or content we got an invalid object header
+ if(header == null || content == null) throw "invalid object header";
+
+ // Split out the header
+ parts = header.split(/ /);
+ type = parts[0];
+ size = parts[1];
+ // Check that we have a valid type
+ if(['blob', 'tree', 'commit', 'tag'].indexOf(type) == -1 || !size.match(/^\d+$/)) throw "invalid object header";
+ // Convert parts
+ size = parseInt(size, 10);
+ } else {
+ var parts = this.unpack_object_header_gently(buf);
+ type = parts[0];
+ size = parts[1];
+ used = parts[2];
+ // Unpack content
+ content = new Zlib.Unzip(buf.slice(used, buf.length)).unzip();
+ content = Array.isArray(content) ? content[0] : content;
+ }
+ // Return a raw object
+ return new RawObject(type, content);
+}
+
+LooseStorage.prototype.unpack_object_header_gently = function(buf) {
+ var used = 0
+ var c = buf[used];
+ used = used + 1;
+
+ var type = (c >> 4) & 7;
+ var size = c & 15;
+ var shift = 4;
+
+ while(c & 0x80 != 0) {
+ if(buf.length <= used) throw "object file too short";
+ // Get next char
+ c = buf[used];
+ used = used + 1;
+ // Calculate size
+ size = size + ((c & 0x7f) << shift);
+ }
+
+ // Fetch the type
+ type = OBJ_TYPES[type];
+ // Check that we have a valid type
+ if(['blob', 'tree', 'commit', 'tag'].indexOf(type) == -1) throw "invalid loose object type";
+ return [type, size, used];
+}
+
+LooseStorage.prototype.is_legacy_loose_object = function(buf) {
+ var word = (buf[0] << 8) + buf[1];
+ return buf[0] == 0x78 && word % 31 == 0;
+}
+
+var to_hex_string = function(string) {
+ var hexString = '';
+ for(var index = 0; index < string.length; index++) {
+ var value = BinaryParser.toByte(string.substr(index, 1));
+ var number = value <= 15 ? "0" + value.toString(16) : value.toString(16);
+ hexString = hexString + number;
+ }
+ return hexString;
+};
+
+// currently, I'm using the legacy format because it's easier to do
+// this function takes content and a type and writes out the loose object and returns a sha
+LooseStorage.prototype.put_raw_object = function(content, type, callback) {
+ var self = this;
+ // Retrieve size of message
+ var size = content.length.toString();
+ // Verify that header is ok
+ LooseStorage.verify_header(type, size);
+ // Create header
+ var header = "" + type + " " + size + "\0";
+ var store = header + content;
+ // Use node crypto library to create sha1 hash
+ var hash = crypto.createHash("sha1");
+ hash.update(store);
+ // Return the hash digest
+ var sha1 = hash.digest('hex');
+ // Create path
+ var path = this.directory + "/" + sha1.substr(0, 2) + '/' + sha1.substr(2);
+
+ try {
+ fs.statSync(path);
+ } catch(err) {
+ // Deflate the data
+ var data = zlib.gunzip(store, function (err, buffer) {
+ if (err) {
+ throw err;
+ }
+
+ // File does not exist create the directory
+ fs.mkdir(self.directory + "/" + sha1.substr(0, 2), 16877, function (err) {
+ if (err) {
+ throw err;
+ }
+
+ fs.writeFile(path, data, 'binary', function (err) {
+ if (err) {
+ throw err;
+ }
+
+ callback(sha1);
+ });
+ });
+ });
+ }
+}
+
+LooseStorage.verify_header = function(type, size) {
+ if(["blob", "tree", "commit", "tag"].indexOf(type) == -1 || size.match(/^\d+$/) == null) {
+ throw "invalid object header";
+ }
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/node_modules/git/lib/git/merge.js b/node_modules/git/lib/git/merge.js
new file mode 100644
index 00000000..bcd4deff
--- /dev/null
+++ b/node_modules/git/lib/git/merge.js
@@ -0,0 +1,39 @@
+var Merge = exports.Merge = function(str) {
+ var _conflicts = 0, _text = {}, _sections = null;
+ var section = 0;
+ var status = Merge.STATUS_BOTH;
+
+ Object.defineProperty(this, "conflicts", { get: function() { return _conflicts; }, set: function(value) { _conflicts = value; }, enumerable: true});
+ Object.defineProperty(this, "text", { get: function() { return _text; }, set: function(value) { _text = value; }, enumerable: true});
+ Object.defineProperty(this, "sections", { get: function() { return _sections; }, set: function(value) { _sections = value; }, enumerable: true});
+
+ var lines = str.split("\n");
+ lines.forEach(function(line) {
+ if(line.match(/^<<<<<<< (.*?)/)) {
+ status = Merge.STATUS_OURS;
+ _conflicts = _conflicts + 1;
+ section = section + 1;
+ } else if(line == '=======') {
+ status = Merge.STATUS_THEIRS;
+ } else if(line.match(/^>>>>>>> (.*?)/)) {
+ status = Merge.STATUS_BOTH;
+ section = section + 1;
+ } else {
+ _text[section] = _text[section] == null ? {} : _text[section];
+ _text[section][status] = _text[section][status] == null ? [] : _text[section][status];
+ _text[section][status].push(line);
+ }
+ });
+
+ // Let's set the values
+ _text = Object.keys(_text).map(function(key) {
+ return _text[key];
+ });
+ // Set the number of sections
+ _sections = _text.length;
+}
+
+// Static function
+Merge.STATUS_BOTH = 'both';
+Merge.STATUS_OURS = 'ours';
+Merge.STATUS_THEIRS = 'theirs';
diff --git a/node_modules/git/lib/git/pack_storage.js b/node_modules/git/lib/git/pack_storage.js
new file mode 100644
index 00000000..113b8b3f
--- /dev/null
+++ b/node_modules/git/lib/git/pack_storage.js
@@ -0,0 +1,393 @@
+var util = require('util'),
+ fs = require('fs'),
+ Buffer = require('buffer').Buffer,
+ BinaryParser = require('./binary_parser').BinaryParser,
+ FileWindow = require('./file_window').FileWindow,
+ RawObject = require('./raw_object').RawObject;
+ Zlib = require('../zlib/zlib').Zlib;
+
+var PACK_IDX_SIGNATURE = '\xfftOc';
+var FAN_OUT_COUNT = 256;
+var IDX_OFFSET_SIZE = 4;
+var OFFSET_SIZE = 4;
+var OFFSET_START = FAN_OUT_COUNT * IDX_OFFSET_SIZE;
+var SHA1_SIZE = 20;
+var CRC_SIZE = 4;
+var SHA1_START = OFFSET_START + OFFSET_SIZE;
+var ENTRY_SIZE = OFFSET_SIZE + SHA1_SIZE;
+var ENTRY_SIZE_V2 = SHA1_SIZE + CRC_SIZE + OFFSET_SIZE;
+
+// Default types
+var OBJ_NONE = 0;
+var OBJ_COMMIT = 1;
+var OBJ_TREE = 2;
+var OBJ_BLOB = 3;
+var OBJ_TAG = 4;
+var OBJ_OFS_DELTA = 6;
+var OBJ_REF_DELTA = 7;
+var OBJ_TYPES = [null, "commit", "tree", "blob", "tag"];
+
+var PackStorage = exports.PackStorage = function(file) {
+ var _name = file, _cache = {}, _version = null, _offsets = null, _size = 0;
+ // Replace idx reference with pack
+ if(file.match(/\.idx$/)) {
+ file = file.substr(0, file.length - 3) + "pack";
+ }
+
+ Object.defineProperty(this, "name", { get: function() { return _name; }, set: function(value) { _name = value; }, enumerable: true});
+ Object.defineProperty(this, "cache", { get: function() { return _cache; }, set: function(value) { _cache = value; }, enumerable: true});
+ Object.defineProperty(this, "version", { get: function() { return _version; }, set: function(value) { _version = value; }, enumerable: true});
+ Object.defineProperty(this, "offsets", { get: function() { return _offsets; }, set: function(value) { _offsets = value; }, enumerable: true});
+ Object.defineProperty(this, "size", { get: function() { return _size; }, set: function(value) { _size = value; }, enumerable: true});
+ // Initialize pack
+ init_pack(this);
+}
+
+// Search for a sha1 in the pack
+PackStorage.prototype.find = function(sha1) {
+ // If we have the object in the cache return it
+ if(this.cache[sha1]) return this.cache[sha1];
+ // We need to search for the object in the pack file
+ var offset = find_object(this, sha1);
+ // If no object found return null
+ if(!offset) return null;
+ // Parse the object at the located offset
+ var obj = this.parse_object(this, offset);
+ this.cache[sha1] = obj;
+ return obj;
+}
+
+// Close the pack (nothing should be open, might be able to remove this TODO)
+PackStorage.prototype.close = function() {
+}
+
+PackStorage.prototype.parse_object = function(pack, offset) {
+ // Open the pack file
+ var packfile = fs.openSync(pack.name, "r");
+ var result = this.unpack_object(pack, packfile, offset);
+ var data = result[0];
+ var type = result[1];
+ // Close the packfile
+ fs.closeSync(packfile)
+ return new RawObject(OBJ_TYPES[type], data);
+}
+
+PackStorage.prototype.unpack_object = function(pack, packfile, offset, options) {
+ // Ensure valid options variable
+ options = options ? options : {};
+ var obj_offset = offset;
+
+ // TODO TODO TODO TODO TODO TODO
+ // TODO TODO TODO TODO TODO TODO
+ // TODO TODO TODO TODO TODO TODO
+
+ var buf = new Buffer(1);
+ fs.readSync(packfile, buf, 0, 1, offset);
+ // Fetch the first byte
+ var c = buf[0];
+ var size = c & 0xf
+ var type = (c >> 4) & 7;
+ var shift = 4;
+ var offset = offset + 1;
+ // unpack until we have decoded size
+ while((c & 0x80) != 0) {
+ fs.readSync(packfile, buf, 0, 1, offset);
+ c = buf[0];
+ // Adjust size for the byte
+ size = size | ((c & 0x7f) << shift);
+ shift = shift + 7;
+ offset = offset + 1;
+ }
+
+ // If it's not a commit or tree and caching is enabled then return false
+ if(!(type == OBJ_COMMIT || type == OBJ_TREE) && options['caching']) return [false, false];
+ // Check the type of object and either unpack the delta or the compressed data (gziped)
+ if(type == OBJ_OFS_DELTA || type == OBJ_REF_DELTA) {
+ return this.unpack_deltified(packfile, type, offset, obj_offset, size, options);
+ } else if(type == OBJ_COMMIT || type == OBJ_TREE || type == OBJ_BLOB || type == OBJ_TAG) {
+ var data = unpack_compressed(pack, offset, size);
+ return [data, type];
+ } else {
+ throw new "invalid type " + type;
+ }
+}
+
+PackStorage.prototype.unpack_deltified = function(packfile, type, offset, obj_offset, size, options) {
+ var data = new Buffer(SHA1_SIZE);
+ // Read the SHA
+ fs.readSync(packfile, data, 0, SHA1_SIZE, offset);
+
+ if(type == OBJ_OFS_DELTA) {
+ var i = 0;
+ var c = data[i];
+ var base_offset = c & 0x7f;
+
+ while((c & 0x80) != 0) {
+ c = data[++i];
+ base_offset = base_offset + 1;
+ base_offset = base_offset << 7;
+ base_offset = base_offset | (c & 0x7f);
+ }
+
+ base_offset = obj_offset - base_offset;
+ offset = offset + i + 1;
+ } else {
+ base_offset = find_object(this, data.toString())
+ offset = offset + SHA1_SIZE;
+ }
+
+ // Fetch the object at this offset and unpack the result
+ var object_array = this.unpack_object(this, packfile, base_offset);
+ var base = object_array[0];
+ var type = object_array[1];
+ // If it's not a Commit or Tree return an empty delta
+ if(!(type == OBJ_COMMIT || type == OBJ_TREE) && options['caching']) return [false, false];
+ // Unpack the the data
+ var delta = unpack_compressed(this, offset, size);
+ var delta2 = patch_delta(base, delta);
+ return [delta2, type];
+}
+
+var to_hex_string = function(string) {
+ var hexString = '';
+ for(var index = 0; index < string.length; index++) {
+ var value = BinaryParser.toByte(string.substr(index, 1));
+ var number = value <= 15 ? "0" + value.toString(16) : value.toString(16);
+ hexString = hexString + number;
+ }
+ return hexString;
+};
+
+var patch_delta = function(base, delta) {
+ var delta_header_parts = patch_delta_header_size(delta, 0);
+ var src_size = delta_header_parts[0];
+ var pos = delta_header_parts[1];
+
+ if(src_size != base.length) throw "invalid delta data";
+
+ delta_header_parts = patch_delta_header_size(delta, pos);
+ var dest_size = delta_header_parts[0];
+ pos = delta_header_parts[1];
+ var dest = '';
+
+ while(pos < delta.length) {
+ var c = delta.charCodeAt(pos);
+ pos = pos + 1;
+
+ // Keep reading until end of data pack
+ if((c & 0x80) != 0) {
+ pos = pos - 1;
+ var cp_off = 0;
+ var cp_size = 0;
+
+ if((c & 0x01) != 0) cp_off = delta.charCodeAt(pos += 1);
+ if((c & 0x02) != 0) cp_off = cp_off | (delta.charCodeAt(pos += 1) << 8);
+ if((c & 0x04) != 0) cp_off = cp_off | (delta.charCodeAt(pos += 1) << 16);
+ if((c & 0x08) != 0) cp_off = cp_off | (delta.charCodeAt(pos += 1) << 24);
+
+ if((c & 0x10) != 0) cp_size = delta.charCodeAt(pos += 1);
+ if((c & 0x20) != 0) cp_size = cp_size | (delta.charCodeAt(pos += 1) << 8);
+ if((c & 0x40) != 0) cp_size = cp_size | (delta.charCodeAt(pos += 1) << 16);
+ if(cp_size == 0) cp_size = 0x10000;
+
+ pos = pos + 1;
+ dest = dest + base.substr(cp_off, cp_size);
+ } else if(c != 0) {
+ dest = dest + delta.substr(pos, c);
+ pos = pos + c;
+ } else {
+ throw "invalid delta data";
+ }
+ }
+
+ return dest;
+}
+
+var patch_delta_header_size = function(delta, pos) {
+ var size = 0;
+ var shift = 0;
+
+ do {
+ var c = delta.charCodeAt(pos);
+ if(c == null) throw 'invalid delta data';
+ pos = pos + 1;
+ size = size | ((c & 0x7f) << shift);
+ shift = shift + 7
+
+ } while((c & 0x80) != 0);
+
+ // Return the header size and position
+ return [size, pos];
+}
+
+var unpack_compressed = function(pack, offset, destsize) {
+ var outdata = "";
+ var file_offset = offset;
+ var packfile = fs.openSync(pack.name, "r");
+
+ // Read in the compressed object (this could be huge :()
+ // TODO TODO TODO, change unzip method to allow for initializing the structure and then decoding
+ // pieces
+ var indata = new Buffer(destsize + 100);
+ var bytes_read = fs.readSync(packfile, indata, 0, destsize + 100, file_offset);
+ // Close the file
+ fs.closeSync(packfile);
+ // Adjust the file_offset
+ file_offset = file_offset + destsize;
+ outdata = outdata + new Zlib.Unzip(indata).unzip();
+
+ if(outdata.size > destsize) {
+ throw "error reading pack data";
+ }
+ // Return the data read from the compressed block
+ return outdata;
+}
+
+var find_object_in_index = function(pack, idx, sha1) {
+ // Parse the first value of the sha as an index
+ var slot = sha1.charCodeAt(0);
+ if(slot == NaN) return null;
+
+ // Unpack the variables
+ var first = pack.offsets[slot];
+ var last = pack.offsets[slot + 1];
+
+ while(first < last) {
+ var mid = parseInt((first + last) / 2);
+ // If we have a version 2 pack file
+ if(pack.version == 2) {
+ // Fetch the sha1
+ var midsha1 = idx.index([(OFFSET_START + (mid * SHA1_SIZE)), SHA1_SIZE]);
+ var compare_sha1 = '';
+ // Convert midsha1 to allow for correct string comparision
+ for(var i = 0; i < midsha1.length; i++) {
+ compare_sha1 = compare_sha1 + String.fromCharCode(midsha1[i]);
+ }
+
+ // Do a locale Compare
+ var cmp = compare_sha1.localeCompare(sha1);
+ if(cmp < 0) {
+ first = mid + 1;
+ } else if(cmp > 0) {
+ last = mid;
+ } else {
+ var pos = OFFSET_START + (pack.size * (SHA1_SIZE + CRC_SIZE)) + (mid * OFFSET_SIZE);
+ var offset = idx.index([pos, OFFSET_SIZE]);
+ offset = BinaryParser.toInt(reverse_buffer(offset).toString('binary', 0, 4));
+ return offset;
+ }
+ } else {
+ var midsha1 = idx.index([SHA1_START + mid * ENTRY_SIZE, SHA1_SIZE]);
+ var compare_sha1 = '';
+ // Convert midsha1 to allow for correct string comparision
+ for(var i = 0; i < midsha1.length; i++) {
+ compare_sha1 = compare_sha1 + String.fromCharCode(midsha1[i]);
+ }
+
+ // Do a locale Compare
+ var cmp = compare_sha1.localeCompare(sha1);
+ if(cmp < 0) {
+ first = mid + 1;
+ } else if(cmp > 0) {
+ last = mid;
+ } else {
+ var pos = OFFSET_START + mid * ENTRY_SIZE;
+ var offset = idx.index([pos, OFFSET_SIZE]);
+ offset = BinaryParser.toInt(reverse_buffer(offset).toString('binary', 0, 4));
+ return offset;
+ }
+ }
+ }
+ return null;
+}
+
+var find_object = function(pack, sha1) {
+ var obj = null;
+ // Should I not use the cached version in the future ? TODO
+ with_idx(pack, function(err, idx) {
+ obj = find_object_in_index(pack, idx, sha1);
+ })
+
+ return obj;
+}
+
+var reverse_buffer = function(buffer) {
+ var result_buffer = new Buffer(buffer.length);
+ var length = buffer.length;
+
+ for(var i = 0; i < length; i++) {
+ result_buffer[length - 1 - i] = buffer[i];
+ }
+
+ return result_buffer;
+}
+
+var init_pack = function(pack) {
+ // TODO TODO TODO
+ with_idx(pack, function(err, idx) {
+ // Reset pack offsets
+ pack.offsets = [0];
+ // Do a max of FAN_OUT_COUNT to avoid going crazy
+ for(var i = 0; i < FAN_OUT_COUNT; i++) {
+ // Each offset value is a 4 byte network encoded integer
+ var pos = idx.index([i * IDX_OFFSET_SIZE, IDX_OFFSET_SIZE])
+ pos = BinaryParser.toInt(reverse_buffer(pos).toString('binary', 0, 4));
+ // If the position is less than the pack offset stored the pack index is corrupt
+ if(pos < pack.offsets[i]) {
+ throw "pack " + pack.name + " has discontinuous index " + i;
+ }
+ // Add offset position to list of tracked offsets
+ pack.offsets.push(pos);
+ }
+ // Adjust the pack size
+ pack.size = pack.offsets[pack.offsets.length - 1];
+ // Close all files
+ idx.close();
+ });
+}
+
+var with_idx = function(pack, index_file, callback) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ callback = args.pop();
+ index_file = args.length ? args.shift() : null;
+ // Final idx file name
+ var idx_file_name = null;
+ // Define file handle variable
+ var idxfile = null;
+
+ if(!index_file) {
+ index_file = pack.name;
+ idx_file_name = pack.name.substr(0, pack.name.length - 4) + "idx";
+ idxfile = fs.openSync(pack.name.substr(0, pack.name.length - 4) + "idx", "r");
+ } else {
+ idx_file_name = index_file;
+ idxfile = fs.openSync(index_file, "r");
+ }
+
+ // Read header
+ var sign_buffer = new Buffer(4);
+ var signature = '';
+ fs.readSync(idxfile, sign_buffer, 0, 4);
+ for(var i = 0; i < sign_buffer.length; i++) {
+ signature = signature + BinaryParser.fromByte(sign_buffer[i]);
+ }
+
+ // Extract version of pack
+ var ver_buffer = new Buffer(4);
+ fs.readSync(idxfile, ver_buffer, 0, 4);
+ var ver = BinaryParser.toInt(reverse_buffer(ver_buffer).toString('binary', 0, 4));
+ // Close idx file
+ fs.closeSync(idxfile);
+ // If we have a IDX pack signature this is at least version 2 of the file format
+ if(signature == PACK_IDX_SIGNATURE) {
+ if(ver != 2) {
+ throw ("pack " + pack.name + " has unknown pack file version " + ver);
+ }
+ pack.version = 2;
+ } else {
+ pack.version = 1;
+ }
+ // Create a file window and return it
+ var idx = new FileWindow(idx_file_name, pack.version);
+ callback(null, idx);
+}
\ No newline at end of file
diff --git a/node_modules/git/lib/git/raw_object.js b/node_modules/git/lib/git/raw_object.js
new file mode 100644
index 00000000..c363e5ce
--- /dev/null
+++ b/node_modules/git/lib/git/raw_object.js
@@ -0,0 +1,26 @@
+var crypto = require('crypto'),
+ util = require('util'),
+ Buffer = require('buffer').Buffer,
+ BinaryParser = require('./binary_parser').BinaryParser;
+
+var RawObject = exports.RawObject = function(type, content) {
+ var _type = type, _content = content;
+
+ Object.defineProperty(this, "type", { get: function() { return _type; }, set: function(value) { _type = value; }, enumerable: true});
+ Object.defineProperty(this, "content", { get: function() { return _content; }, set: function(value) { _content = value; }, enumerable: true});
+}
+
+RawObject.prototype.sha1 = function(encoding) {
+ type = type ? type : 'binary';
+ // Create the basis for the digest
+ var digest_content = this.type + " " + this.content.length + BinaryParser.fromByte(0) + this.content;
+ // Use node crypto library to create sha1 hash
+ var hash = crypto.createHash("sha1");
+ hash.update(digest_content);
+ // Return the hash digest
+ return hash.digest('binary');
+}
+
+RawObject.prototype.sha1_hex = function() {
+ this.sha1('hex');
+}
\ No newline at end of file
diff --git a/node_modules/git/lib/git/ref.js b/node_modules/git/lib/git/ref.js
new file mode 100644
index 00000000..b7eb2cdf
--- /dev/null
+++ b/node_modules/git/lib/git/ref.js
@@ -0,0 +1 @@
+var Ref = exports.Ref = function() {}
\ No newline at end of file
diff --git a/node_modules/git/lib/git/remote.js b/node_modules/git/lib/git/remote.js
new file mode 100644
index 00000000..6e0efd88
--- /dev/null
+++ b/node_modules/git/lib/git/remote.js
@@ -0,0 +1,37 @@
+var Commit = require('./commit').Commit;
+
+var Remote = exports.Remote = function(name, commit) {
+ var _name = name;
+ var _commit = commit;
+ // Define the properties
+ Object.defineProperty(this, "name", { get: function() { return _name; }, enumerable: true});
+ Object.defineProperty(this, "commit", { get: function() { return _commit; }, enumerable: true});
+}
+
+var prefix = function(name) {
+ return "refs/" + name + "s";
+ }
+
+Remote.find_all = function(repo, options, callback) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ callback = args.pop();
+ options = args.length ? args.shift() : {};
+
+ // Let's fetch the references
+ repo.git.refs({}, prefix('remote'), function(err, refs) {
+ if(err) return callback(err, refs);
+ // Map the references
+ var mapped_refs = refs.split(/\n/).map(function(ref) {
+ // Fetch the name and id for the reference
+ var split_reference = ref.split(/ /);
+ var name = split_reference[0];
+ var id = split_reference[1];
+
+ // Create a commit object wit the id
+ var commit = new Commit(repo, id);
+ // Wrap the commit object in a head object and return mapped object
+ return new Remote(name, commit);
+ })
+ callback(null, mapped_refs);
+ })
+}
\ No newline at end of file
diff --git a/node_modules/git/lib/git/repo.js b/node_modules/git/lib/git/repo.js
new file mode 100644
index 00000000..6913b4ed
--- /dev/null
+++ b/node_modules/git/lib/git/repo.js
@@ -0,0 +1,507 @@
+var Head = require('./head').Head,
+ Git = require('./git').Git,
+ Commit = require('./commit').Commit,
+ fs = require('fs'),
+ util = require('util'),
+ Tree = require('./tree').Tree,
+ Blob = require('./blob').Blob,
+ Head = require('./head').Head,
+ Tag = require('./tag').Tag,
+ Remote = require('./remote').Remote,
+ Blame = require('./blame').Blame,
+ CommitStats = require('./commit_stats').CommitStats,
+ Config = require('./config').Config,
+ Status = require('./status').Status,
+ GitIndex = require('./git_index').GitIndex;
+
+var Repo = exports.Repo = function(path, options, callback) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ callback = args.pop();
+ options = args.length ? args.shift() : {};
+
+ var _path = path;
+ var _options = options;
+ var _working_directory = _path;
+ var _bare = true;
+ var _git = null;
+ var _config_object = null;
+ // Create git object
+ var self = this;
+ // Control access to internal variables
+ Object.defineProperty(this, "path", { get: function() { return _path; }, enumerable: true});
+ Object.defineProperty(this, "options", { get: function() { return _options; }, enumerable: true});
+ Object.defineProperty(this, "git", { get: function() { return _git; }, enumerable: true});
+ Object.defineProperty(this, "config_object", { get: function() { return _config_object; }, enumerable: true});
+ Object.defineProperty(this, "bare", { get: function() { return _bare; }, enumerable: true});
+ Object.defineProperty(this, "working_directory", { get: function() { return _working_directory; }, enumerable: true});
+
+ try {
+ var epath = fs.realpathSync(path);
+ // Todo checks on paths
+ fs.stat(epath + "/.git", function(err, stat) {
+ if(!err) {
+ _working_directory = epath;
+ _path = epath + "/.git";
+ _bare = false;
+ _git = new Git(_path);
+ // Return the repo
+ callback(null, self);
+ } else {
+ // Check if it's a bare or already is pointing to the .git directory
+ fs.stat(epath, function(err, stat) {
+ if(!err && stat.isDirectory() && (epath.match(/\.git$/) || options.is_bare)) {
+ _path = epath;
+ _bare = true;
+ _git = new Git(_path);
+ // Return the repo
+ callback(null, self);
+ } else if(!err && stat.isDirectory()) {
+ callback("invalid git repository", null);
+ } else {
+ callback("no such path", null);
+ }
+ });
+ }
+ });
+ } catch (err) {
+ callback("no such path", null);
+ }
+}
+
+// Fetch the current head
+Repo.prototype.head = function(callback) {
+ Head.current(this, callback);
+}
+
+// Fetch the repo heads
+Repo.prototype.heads = function(callback) {
+ Head.find_all(this, callback);
+}
+
+// Returns the tag objects that are available in this repo
+Repo.prototype.tags = function(callback) {
+ Tag.find_all(this, callback);
+}
+
+// Fetch a set of commits
+Repo.prototype.commits = function(start, max_count, skip, callback) {
+ var args = Array.prototype.slice.call(arguments, 0);
+ callback = args.pop();
+ start = args.length ? args.shift() : 'master';
+ max_count = args.length ? args.shift() : 10;
+ skip = args.length ? args.shift() : 0;
+
+ var options = {max_count:max_count, skip:skip}
+ // Locate all commits with the specified options
+ Commit.find_all(this, start, options, callback);
+}
+
+// Fetch a specific commit
+Repo.prototype.commit = function(id, callback) {
+ var options = {max_count:1};
+ // Locate commits and return the first one
+ Commit.find_all(this, id, options, function(err, commits) {
+ if(err) return callback(err, commits);
+ callback(null, commits[0]);
+ })
+}
+
+// Fetch the commit count based on a start reference
+Repo.prototype.commit_count = function(start, callback) {
+ start = start ? start : 'master';
+ Commit.count(this, start, callback);
+}
+
+// Fetch a repository tree
+Repo.prototype.tree = function(treeish, paths, callback) {
+ var args = Array.prototype.slice.call(arguments, 0);
+ callback = args.pop();
+ // Set variables to default values
+ treeish = args.length ? args.shift() : 'master';
+ paths = args.length ? args.shift() : [];
+ // Construct the tree
+ Tree.construct(this, treeish, paths, callback);
+}
+
+// Create a blob object
+Repo.prototype.blob = function(id, callback) {
+ callback(null, new Blob(this, id));
+}
+
+// Initialize a bare git repository at the given path
+// path: full path to the repo (traditionally ends with /.git)
+// options: is any additional options to the git init command
+//
+// Examples
+// Repo.init_bare('/var/git/myrepo.git')
+//
+// Return repo (newly created repo)
+Repo.init_bare = function(path, git_options, repo_options, callback) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ callback = args.pop();
+ // Set variables to default values
+ git_options = args.length ? args.shift() : {};
+ repo_options = repo_options ? repo_options : {};
+ // Set up bare option
+ git_options['bare'] = true;
+ repo_options['is_bare'] = true;
+ // Create a git object
+ var git = new Git(path);
+ // Create the directory
+ git.fs_mkdir('', function(err, result) {
+ if(err) return callback(err, result);
+ git.init(git_options, function(err, git) {
+ if(err) return callback(err, git);
+ new Repo(path, repo_options, callback);
+ })
+ })
+}
+
+// Fork a bare git repository from this repo
+// path: is the full path of the new repo (traditionally ends with /.git)
+// options: is additional options to the git clone command (:bare and :shared are true by default)
+//
+// Return repo (newly created forked repo)
+Repo.prototype.fork_bare = function(path, options, callback) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ callback = args.pop();
+ var self = this;
+ // Set variables to default values
+ options = args.length ? args.shift() : {};
+ options['bare'] = true;
+ options['shared'] = true;
+ // Create a git object
+ var git = new Git(path);
+ git.fs_mkdir('', function(err, result) {
+ if(err) return callback(err, result);
+ self.git.clone(options, self.path, path, function(err, result) {
+ if(err) return callback(err, result);
+ new Repo(path, {is_bare:true}, callback);
+ })
+ })
+}
+
+// The diff from commit a to commit b, optionally restricted to the fiven file(s)
+// a: the base commit
+// b: the end commit
+// paths: optional list of file paths on which to restrict the diff
+Repo.prototype.diff = function(a, b, paths, callback) {
+ var self = this;
+ var args = Array.prototype.slice.call(arguments, 0); // XXX doesnt seem correct but it works for me
+ callback = args.pop();
+ b = args.length ? args.shift() : null;
+ paths = args.length ? args.shift() : [];
+
+ Commit.diff(this, a, b, paths, callback);
+}
+
+// The commit diff for the given commit
+// commit: the commit name/id
+//
+// Returns array of diff objects
+Repo.prototype.commit_diff = function(commit, callback) {
+ Commit.diff(this, commit, callback);
+}
+
+// The list of alternates for this repo
+//
+// Returns array of string (pathnames of alternates)
+Repo.prototype.alternates = function(callback) {
+ var alternates_path = "objects/info/alternates";
+ var self = this;
+
+ this.git.fs_exist(alternates_path, function(err, result) {
+ if(err) return callback(err, result);
+ if(!result) return callback(null, []);
+ self.git.fs_read(alternates_path, function(err, data) {
+ if(err) return callback(err, data);
+ callback(null, data.trim().split("\n"));
+ })
+ });
+}
+
+// Sets the alternates
+// alts: array of string paths representing the alternates
+//
+// Returns nothing
+Repo.prototype.set_alternates = function(alts, callback) {
+ var self = this;
+ var fail_msg = null;
+ // check all the entries
+ for(var i = 0; i < alts.length; i++) {
+
+ // Is sync underneath (to avoid to many files open)
+ this.git.fs_exist(alts[i], function(err, result) {
+ if(err) fail_msg = err;
+ if(!result) fail_msg = "could not set alternates. alternate path " + alts[i] + " must exist";
+ });
+ }
+
+ // If we have a fail message return
+ if(fail_msg) { return callback(fail_msg, null); }
+ // Otherwise let's set the alternate
+ if(alts.length == 0) {
+ this.git.fs_write('objects/info/alternates', '', callback);
+ } else {
+ this.git.fs_write('objects/info/alternates', alts.join("\n"), callback);
+ }
+}
+
+// The commit log for a treeish
+//
+// Returns array of commits
+Repo.prototype.log = function(commit, path, options, callback) {
+ var args = Array.prototype.slice.call(arguments, 0);
+ callback = args.pop();
+ var self = this;
+ // Unpack variables
+ commit = args.length ? args.shift() : 'master';
+ path = args.length ? args.shift() : null;
+ options = args.length ? args.shift() : {};
+ // Merge in extra parameters
+ options['pretty'] = "raw";
+ // var arg = path ? [commit, '--', path] : [commit];
+ // Extract the commits
+ this.git.log(commit, path, options, function(err, commits) {
+ if(err) return callback(err, commits);
+ callback(null, Commit.list_from_string(self, commits))
+ });
+}
+
+// Returns a list of commits that is in other_repo but not in self
+//
+// Returns array of commits
+Repo.prototype.commit_deltas_from = function(other_repo, reference, other_reference, callback) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ callback = args.pop();
+ var self = this;
+ // Unpack variables
+ reference = args.length ? args.shift() : 'master';
+ other_reference = args.length ? args.shift() : 'master';
+ // Let's fetch revlist
+ self.git.rev_list({}, reference, function(err, rev_text) {
+ if(err) return callback(err, rev_text);
+ var repo_refs = rev_text.trim().split("\n");
+
+ other_repo.git.rev_list({}, other_reference, function(err, other_rev_text) {
+ if(err) return callback(err, other_rev_text);
+ var other_repo_refs = other_rev_text.trim().split("\n");
+
+ // Subtract the references from other references
+ // create map for the array to avoid binary searches
+ var repo_ref_map = {};
+ repo_refs.forEach(function(line) {
+ repo_ref_map[line] = 1;
+ });
+
+ // Subtract one array from the other
+ var intersection = other_repo_refs.filter(function(line) { return !repo_ref_map[line]; })
+ // Returned commits
+ var commits = [];
+ // Process all the intersected values
+ for(var i = 0; i < intersection.length; i++) {
+ var ref = intersection[i];
+ Commit.find_all(other_repo, ref, {max_count:1}, function(err, ref_commits) {
+ commits.push(ref_commits[0]);
+ })
+ }
+
+ // Return the commits
+ callback(null, commits);
+ })
+ });
+}
+
+// An array of ref objects representing the refs in this repo
+//
+// Returns array of references (baked)
+Repo.prototype.refs = function(callback) {
+ var self = this;
+ // Let's fetch all the references for the head
+ Head.find_all(self, function(err, head_refs) {
+ if(err) return callback(err, head_refs);
+ // Fetch all the references for the tags
+ Tag.find_all(self, function(err, tag_refs) {
+ if(err) return callback(err, tag_refs);
+ // Fetch all the remote references
+ Remote.find_all(self, function(err, remote_refs) {
+ if(err) return callback(err, remote_refs);
+ // Return the complete list of references
+ callback(null, head_refs.concat(tag_refs, remote_refs));
+ });
+ });
+ });
+}
+
+// Chomp text removing end carriage returns
+var chomp = function chomp(raw_text) {
+ return raw_text.replace(/(\n|\r)+$/, '');
+}
+
+// The project's description, Taken directly from /description file in the repo
+//
+// Return string
+Repo.prototype.description = function(callback) {
+ this.git.fs_read('description', function(err, data) {
+ if(err) return callback(err, data);
+ callback(null, chomp(data.toString()));
+ });
+}
+
+// Update a reference
+// head: reference name
+// commit_sha: commit to update
+//
+// Return commit_sha
+Repo.prototype.update_ref = function(head, commit_sha, callback) {
+ if(!commit_sha || commit_sha.length != 40) return callback('not a legal commit sha', null);
+ this.git.fs_write("refs/heads/" + head, commit_sha, function(err, result) {
+ if(err) return callback(err, result);
+ callback(null, commit_sha);
+ });
+}
+
+// Get a specific head by name
+// head_name: name of the head
+//
+// Return the head object
+Repo.prototype.get_head = function(head_name, callback) {
+ this.heads(function(err, heads) {
+ if(err) return callback(err, heads);
+ for(var i = 0; i < heads.length; i++) {
+ if(heads[i].name == head_name) return callback(null, heads[i])
+ }
+ // No head found
+ callback(null, null);
+ });
+}
+
+// Return a blam object
+// file: name of file we want to locate blame for
+// commit: the commit sha for the blame
+//
+// Return blame object
+Repo.prototype.blame = function(file, commit, callback) {
+ new Blame(this, file, commit, callback);
+}
+
+// Returns a stats object about the commits for the repo
+// start: start commit or name
+// max_count: maximum count traversing
+// skip: Number of commits to skip
+Repo.prototype.commit_stats = function(start, max_count, skip, callback) {
+ var args = Array.prototype.slice.call(arguments, 0);
+ callback = args.pop();
+ var self = this;
+ // Unpack variables
+ start = args.length ? args.shift() : 'master';
+ max_count = args.length ? args.shift() : 10;
+ skip = args.length ? args.shift() : 0;
+
+ // Build options hash
+ var options = {max_count:max_count, skip:skip};
+ // Fetch all the commit stats
+ CommitStats.find_all(self, start, options, callback);
+}
+
+// Commits current index
+//
+// Return true/false if commit worked
+Repo.prototype.commit_index = function(message, callback) {
+ this.git.commit({}, '-m' + message, callback);
+}
+
+// Commits all tracked and modified files
+//
+// Return true/false if commit worked
+Repo.prototype.commit_all = function(message, callback) {
+ this.git.commit({}, '-a', '-m' + message, callback);
+}
+
+// Fetch the config for the repo
+//
+// Returns assoc array with config options
+Repo.prototype.config = function(callback) {
+ if(this.config_object) return callback(null, this.config_object);
+ callback(null, new Config(this));
+}
+
+// Gotten from
+var flatten = function(array) {
+ return array.reduce(function(a,b) {
+ return a.concat(b);
+ }, []);
+}
+
+// Adds files to the index
+Repo.prototype.add = function(files, callback) {
+ var flattened_files = null;
+ // Let's unpack the variables
+ var args = Array.prototype.slice.call(arguments, 0);
+ // Pop the callback
+ var callback = args.pop();
+
+ if(Array.isArray(files)) {
+ flattened_files = flatten(files);
+ } else if(args.length > 1){
+ flattened_files = flatten(args);
+ } else {
+ flattened_files = [files];
+ }
+ // Add the parameters
+ flattened_files.unshift({});
+ flattened_files.push(callback);
+ // Execut add function against git repo
+ this.git.add.apply(this.git, flattened_files);
+}
+
+// Removes files to the index
+Repo.prototype.remove = function(files, callback) {
+ var flattened_files = null;
+ // Let's unpack the variables
+ var args = Array.prototype.slice.call(arguments, 0);
+ // Pop the callback
+ var callback = args.pop();
+
+ if(Array.isArray(files)) {
+ flattened_files = flatten(files);
+ } else if(args.length > 1){
+ flattened_files = flatten(args);
+ } else {
+ flattened_files = [files];
+ }
+ // Add the parameters
+ flattened_files.unshift({});
+ flattened_files.push(callback);
+ // Execut add function against git repo
+ this.git.remove.apply(this.git, flattened_files);
+}
+
+// Fetch repo status
+Repo.prototype.status = function(callback) {
+ new Status(this, callback);
+}
+
+// Array off Remote objects representing the remote branches in
+// this repo
+//
+// Return array of remote references (baked)
+Repo.prototype.remotes = function(callback) {
+ Remote.find_all(this, callback);
+}
+
+// Validate if something is a head
+//
+// Return head
+Repo.prototype.is_head = function(head_name, callback) {
+ this.get_head(head_name, function(err, head) {
+ if(err) return callback(err, head);
+ var exists = head == null ? false : true;
+ callback(null, exists);
+ });
+}
+
+Repo.prototype.index = function(callback) {
+ callback(null, new GitIndex(this));
+}
+
diff --git a/node_modules/git/lib/git/repository.js b/node_modules/git/lib/git/repository.js
new file mode 100644
index 00000000..73c8ff23
--- /dev/null
+++ b/node_modules/git/lib/git/repository.js
@@ -0,0 +1,764 @@
+var util = require('util'),
+ GitObject = require('./git_object').GitObject,
+ fs = require('fs'),
+ LooseStorage = require('./loose_storage').LooseStorage,
+ PackStorage = require('./pack_storage').PackStorage,
+ BinaryParser = require('./binary_parser').BinaryParser;
+
+Repository = exports.Repository = function(git_directory, options) {
+ var _git_directory = git_directory;
+ var _options = options ? options : {};
+ var _packs = [];
+ var _loose = null;
+ var _already_searched = {};
+ var self = this;
+
+ Object.defineProperty(this, "git_directory", { get: function() { return _git_directory; }, set: function(value) { _git_directory = value; }, enumerable: true});
+ Object.defineProperty(this, "options", { get: function() { return _options; }, set: function(value) { _options = value; }, enumerable: true});
+ Object.defineProperty(this, "already_searched", { get: function() { return _already_searched; }, set: function(value) { _already_searched = value; }, enumerable: true});
+ Object.defineProperty(this, "packs", { get: function() { return _packs; }, set: function(value) { _packs = value; }, enumerable: true});
+ Object.defineProperty(this, "loose", { get: function() { return _loose; }, set: function(value) { _loose = value; }, enumerable: true});
+}
+
+// Chomp text removing end carriage returns
+var chomp = function chomp(raw_text) {
+ return raw_text.replace(/(\n|\r)+$/, '');
+}
+
+var truncate_array = function(array, sha) {
+
+}
+
+// takes the following options:
+// :since - Time object specifying that you don't want commits BEFORE this
+// :until - Time object specifying that you don't want commit AFTER this
+// :first_parent - tells log to only walk first parent
+// :path_limiter - string or array of strings to limit path
+// :max_count - number to limit the output
+Repository.prototype.log = function(sha, options, callback) {
+ this.already_searched = {}
+ return walk_log(this, sha, options);
+}
+
+var close = function(repo) {
+ if(repo.packs) {
+ repo.packs.forEach(function(pack) {
+ pack.close();
+ });
+ }
+}
+
+var git_path = function(repo, path) { return repo.git_directory + "/" + path; }
+
+var initloose = function(repo) {
+ repo.loaded = [];
+ repo.loose = [];
+ load_loose(repo, git_path(repo, 'objects'));
+ load_alternate_loose(repo, git_path(repo, 'objects'));
+ return repo.loose;
+}
+
+var load_loose = function(repo, path) {
+ repo.loaded.push(path);
+ try {
+ fs.statSync(path);
+ repo.loose.push(new LooseStorage(path));
+ } catch (err) {
+ return;
+ }
+}
+
+var load_alternate_loose = function(repo, path) {
+ // load alternate loose too
+ var alt = path + '/info/alternates';
+ try {
+ fs.statSync(alt);
+ // Read and process all entries in the directory
+ var lines = fs.readFileSync(alt, 'utf8').split('\n');
+ lines.length > 0 && lines[lines.length - 1] == '' ? lines.pop() : null;
+ // Iterate over alternate loose storage locations
+ lines.forEach(function(line) {
+ // Only load the path once
+ if(repo.loaded.indexOf(chomp(line)) == -1) {
+ if(line.substr(0, 2) == "..") {
+ line = fs.realpathSync(repo.git_directory + "/" + line);
+ }
+
+ load_loose(repo, chomp(line));
+ load_alternate_loose(repo, chomp(line));
+ }
+ });
+ } catch(err) {}
+}
+
+var initpacks = function(repo) {
+ close(repo);
+ repo.loaded_packs = [];
+ repo.packs = [];
+ load_packs(repo, git_path(repo, "objects/pack"));
+ load_alternate_packs(repo, git_path(repo, "objects"));
+ return repo.packs;
+}
+
+var load_packs = function(repo, path) {
+ repo.loaded_packs.push(path);
+ try {
+ fs.statSync(path);
+ // Read and process all entries in the directory
+ fs.readdirSync(path).forEach(function(entry) {
+ // If we have a pack file create a new storage object
+ if(entry.match(/\.pack$/i)) {
+ var pack = new PackStorage(path + "/" + entry);
+ // If we have specified the map for the pack then load the entire object map
+ if(repo.options["map_packfile"]) {
+ pack.cache_objects();
+ }
+ // Add pack to list of packs in the repo
+ repo.packs.push(pack)
+ }
+ });
+ } catch (err) {
+ }
+}
+
+var load_alternate_packs = function(repo, path) {
+ var alt = path + "/info/alternates";
+
+ try {
+ fs.statSync(alt);
+ // Read and process all entries in the directory
+ var lines = fs.readFileSync(alt, 'utf8').split('\n');
+ lines.length > 0 && lines[lines.length - 1] == '' ? lines.pop() : null;
+
+ lines.forEach(function(line) {
+ if(line.substr(0, 2) == "..") {
+ line = fs.realpathSync(repo.git_directory + "/" + line);
+ }
+
+ // Get pack file name
+ var full_pack = chomp(line) + "/pack";
+ if(repo.loaded_packs.indexOf(full_pack) == -1) {
+ load_packs(repo, full_pack);
+ load_alternate_packs(repo, chomp(line));
+ }
+ })
+ } catch(err) {
+ }
+}
+
+var get_raw_object_by_sha1 = function(repo, sha1o) {
+ if(!sha1o || sha1o == "" || sha1o.constructor != String) throw "no such sha found";
+
+ var sha1 = '';
+ for(var i = 0; i < sha1o.length; i = i + 2) {
+ sha1 = sha1 + String.fromCharCode(parseInt(sha1o.substr(i, 2), 16));
+ }
+ // Init packs if we have none set yet
+ if(!repo.packs) initpacks(repo);
+ // Try packs
+ var packs = repo.packs;
+ for(var i = 0; i < packs.length; i++) {
+ var o = packs[i].find(sha1);
+ if(o != null) return o;
+ }
+
+ if(!repo.loose) initloose(repo);
+ // Try loose storage
+ var looses = repo.loose;
+ for(var i = 0; i < looses.length; i++) {
+ var o = looses[i].find(sha1);
+ if(o) return o;
+ }
+
+ // try packs again maybe the object got packed in the meantime
+ initpacks(repo);
+ // Try packs
+ var packs = repo.packs;
+ for(var i = 0; i < packs.length; i++) {
+ var o = packs[i].find(sha1);
+ if(o != null) return o;
+ }
+
+ // No results throw an error that no sha pack object was found
+ throw "no such sha found";
+}
+
+Repository.prototype.get_object_by_sha1 = function(sha1) {
+ var r = get_raw_object_by_sha1(this, sha1);
+ if(!r) return null;
+ return GitObject.from_raw(r, this);
+}
+
+// returns true if the files in the path_limiter were changed or no path limiter
+// used by the log() function when passed with a path_limiter
+Repository.prototype.files_changed = function(tree_sha1, tree_sha2, path_limiter) {
+ if(path_limiter == null) return true;
+ // We got a path limiter, let's perform the diff to check for changes
+ var mod = this.quick_diff(tree_sha1, tree_sha2);
+ var files = mod.map(function(c) { return c[0]; });
+ path_limiter = Array.isArray(path_limiter) ? path_limiter : path_limiter != null ? [path_limiter] : [];
+
+ for(var i = 0; i < path_limiter.length; i++) {
+ if(files.indexOf(path_limiter[i]) != -1) return true;
+ }
+ return false;
+}
+
+// Returns the raw file contents of this sha
+Repository.prototype.cat_file = function(sha) {
+ return this.get_object_by_sha1(sha).raw_content;
+}
+
+// Returns the file size (as an int) of this sha
+Repository.prototype.cat_file_size = function(sha) {
+ return get_raw_object_by_sha1(this, sha).content.length;
+}
+
+// Returns the file type as string of this sha
+Repository.prototype.cat_file_type = function(sha) {
+ return get_raw_object_by_sha1(this, sha).type;
+}
+
+// returns the raw (cat-file) output for a tree
+// if given a commit sha, it will print the tree of that commit
+// if given a path limiter array, it will limit the output to those
+// if asked for recursive trees, will traverse trees
+Repository.prototype.ls_tree = function(sha, paths, recursive) {
+ var self = this;
+ paths = paths ? paths : [];
+ recursive = recursive ? recursive : false;
+
+ try {
+ if(paths.length > 0) {
+ // pathing
+ var part = [];
+ paths.forEach(function(path) {
+ part = part.concat(self.ls_tree_path(sha, path));
+ })
+ // Return the parts
+ return part.join("\n");
+ } else {
+ return this.get_raw_tree(sha, recursive);
+ }
+ } catch (err) {
+ return '';
+ }
+}
+
+Repository.prototype.get_raw_tree = function(sha, recursive) {
+ var self = this;
+ recursive = recursive ? recursive : false;
+ var tree = null;
+
+ var o = get_raw_object_by_sha1(this, sha);
+ if(o.type == 'commit') {
+ tree = this.get_object_by_sha1(sha).tree;
+ } else if(o.type == 'tag') {
+ var commit_sha = this.get_object_by_sha1(sha).object;
+ tree = this.get_object_by_sha1(commit_sha).tree;
+ } else if(o.type == 'tree') {
+ tree = sha;
+ } else {
+ return null;
+ }
+
+ // If recursive execute next level of trees otherwise return the raw file
+ return recursive ? this.get_raw_trees(tree) : this.cat_file(tree);
+}
+
+// Grabs tree contents recursively,
+// e.g. `git ls-tree -r sha`
+Repository.prototype.get_raw_trees = function(sha, path) {
+ var self = this;
+ path = path ? path : '';
+ var out = '';
+
+ this.cat_file(sha).split('\n').forEach(function(line) {
+ var parts = line.split(/\s/);
+ var mode = parts[0], type = parts[1], sha = parts[2], name = parts[3];
+
+ if(type == 'tree') {
+ var full_name = path.length == 0 ? name : (path + '/' + name);
+ out = out + self.get_raw_trees(sha, full_name);
+ } else if(path.length == 0) {
+ out = out + line + '\n';
+ } else {
+ out = out + line.replace(new RegExp(name, 'g'), (path + '/' + name)) + '\n';
+ }
+ });
+ // Return the out
+ return out;
+}
+
+// return array of tree entries
+// TODO : refactor this to remove the fugly
+Repository.prototype.ls_tree_path = function(sha, path, append) {
+ var self = this;
+ var tree = this.get_raw_tree(sha);
+
+ if(path.match(/\//)) {
+ var paths = path.split('/');
+ paths.length > 0 && paths[paths.length - 1] == '' ? paths.pop() : null;
+ var last = path.substr(path.length - 1, 1);
+
+ if((last == '/') && (paths.length == 1)) {
+ var append = append ? (append + "/" + paths[0]) : paths[0];
+ var dir_name = tree.split('\n').filter(function(p) { return p.split('\t')[1] == paths[0]; })[0];
+
+ if(dir_name == null) throw "no such path";
+ var next_sha = dir_name.split(/ |\t/)[2];
+ var tree = self.get_raw_tree(next_sha);
+
+ tree = tree.split('\n');
+
+ if(append) {
+ var mod_tree = [];
+ tree.forEach(function(ent) {
+ var parts = ent.split('\t');
+ var info = parts[0], fpath = parts[1];
+ mod_tree.push([info, (append + "/" + fpath)].join('\t'));
+ });
+ return mod_tree;
+ } else {
+ return tree;
+ }
+ } else {
+ if(tree == null) throw "no such path";
+ var next_path = paths.shift();
+ var dir_name = tree.split('\n').filter(function(p) { return p.split('\t')[1] == next_path; })[0];
+ if(dir_name == null) throw "no such path";
+ var next_sha = dir_name.split(/ |\t/)[2];
+ next_path = append ? (append + "/" + next_path) : next_path;
+
+ if(last == '/') {
+ return self.ls_tree_path(next_sha, (paths.join('/') + '/'), next_path);
+ } else {
+ return self.ls_tree_path(next_sha, paths.join('/'), next_path);
+ }
+ }
+ } else {
+ if(tree == null) throw "no such path";
+ var tree = tree.split('\n');
+ tree = tree.filter(function(p) { return p.split('\t')[1] == path; });
+
+ if(append) {
+ var mod_tree = [];
+ tree.forEach(function(ent) {
+ var parts = ent.split('\t');
+ var info = parts[0], fpath = parts[1];
+ mod_tree.push([info, (append + '/' + fpath)].join('\t'));
+ });
+ return mod_tree;
+ } else {
+ return tree;
+ }
+ }
+}
+
+// takes 2 tree shas and recursively walks them to find out what
+// files or directories have been modified in them and returns on
+// array of changes
+// [ [full_path, 'added', tree1_hash, nil],
+// [full_path, 'removed', nil, tree2_hash],
+// [full_path, 'modified', tree1_hash, tree2_hash]
+// ]
+Repository.prototype.quick_diff = function(tree1, tree2, path, recurse) {
+ var self = this;
+ path = path ? path : '.';
+ recurse = recurse ? recurse : true;
+ // Handle empty trees
+ var changed = [];
+ if(tree1 == tree2) return changed;
+
+ var t1 = tree1 ? this.list_tree(tree1) : null;
+ var t2 = tree2 ? this.list_tree(tree2) : null;
+
+ // Check that we have tree 1 blob differences
+ if(t1) {
+ Object.keys(t1['blob']).forEach(function(file) {
+ var hsh = t1['blob'][file];
+ // Fetch the same file in tree 2
+ var t2_file = t2 ? t2['blob'][file] : null;
+ var full = path + "/" + file;
+ if(!t2_file) {
+ changed.push([full, 'added', hsh['sha'], null]); // not in parent
+ } else if(hsh['sha'] != t2_file['sha']) {
+ changed.push([full, 'modified', hsh['sha'], t2_file['sha']]); // file changed
+ }
+ });
+ }
+
+
+ // Check tree 2 blobs
+ if(t2) {
+ Object.keys(t2['blob']).forEach(function(file) {
+ var hsh = t2 ? t2['blob'][file] : null;
+ if(t1 == null || t1['blob'][file] == null) {
+ changed.push([path + "/" + file, 'removed', null, hsh['sha']]);
+ }
+ });
+ }
+
+ // Check for all the tree objects
+ if(t1) {
+ Object.keys(t1['tree']).forEach(function(dir) {
+ var hsh = t1['tree'][dir];
+ var t2_tree = t2 ? t2['tree'][dir] : null;
+ var full = path + "/" + dir;
+
+ if(!t2_tree) {
+ if(recurse) {
+ changed = changed.concat(self.quick_diff(hsh['sha'], null, full, true));
+ } else {
+ changed.push([full, 'added', hsh['sha', null]]);
+ }
+ } else if(hsh['sha'] != t2_tree['sha']) {
+ if(recurse) {
+ changed = changed.concat(self.quick_diff(hsh['sha'], t2_tree['sha'], full, true));
+ } else {
+ changed.push([full, 'modified', hsh['sha'], t2_tree['sha']]);
+ }
+ }
+ });
+ }
+
+ if(t2) {
+ Object.keys(t2['tree']).forEach(function(dir) {
+ var hsh = t2['tree'][dir];
+ var t1_tree = t1 ? t1['tree'][dir] : null;
+
+ if(!t1_tree) {
+ if(recurse) {
+ changed = changed.concat(self.quick_diff(null, hsh['sha'], full, true));
+ } else {
+ changed.push([full, 'removed', null, hsh['sha']]);
+ }
+ }
+ });
+ }
+ // Return all the changed files
+ return changed;
+}
+
+// returna 2-d hash of the tree
+// ['blob']['FILENAME'] = {:mode => '100644', :sha => SHA}
+// ['tree']['DIRNAME'] = {:mode => '040000', :sha => SHA}
+Repository.prototype.list_tree = function(sha) {
+ var data = {blob:{}, tree:{}, link:{}, commit:{}};
+ var object = this.get_object_by_sha1(sha);
+ object.entries.forEach(function(entry) {
+ data[entry.format_type][entry.name] = {mode:entry.format_type, sha:entry.sha1};
+ });
+
+ return data;
+}
+
+var walk_log = function(repo, sha, options, total_size) {
+ if(total_size == null) total_size = 0;
+ if(repo.already_searched[sha]) return [];
+ // Add the sha to the list of allready searched for sha's
+ repo.already_searched[sha] = true;
+ // Empty array
+ var array = [];
+ var o = null, commit_sha = null, c = null, output = null;
+
+ if(sha) {
+ // Get the raw object
+ o = get_raw_object_by_sha1(repo, sha);
+
+ // Create a git object from the raw object
+ if(o.type == "tag") {
+ commit_sha = repo.get_object_by_sha1(sha).object;
+ c = repo.get_object_by_sha1(commit_sha);
+ } else {
+ c = GitObject.from_raw(o, repo);
+ }
+
+ // If it is not a commit
+ if(c.type != "commit") return [];
+
+ // Add sha
+ var add_sha = true;
+ // Check if the commit should be in the results
+ if(options["since"] && (options["since"] && options["since"].constructor == Date) && (options["since"] > c.committer.date)) {
+ add_sha = false;
+ }
+ if(options["until"] && (options["until"] && options["until"].constructor == Date) && (options["until"] < c.committer.date)) {
+ add_sha = false;
+ }
+
+ // Follow all parents unless --first-parent is specified
+ var subarray = [];
+
+ if(c.parent.length == 0 && options["path_limiter"]) {
+ add_sha = false;
+ }
+
+ if(options["max_count"] == null || ((array.length + total_size) < options["max_count"])) {
+ if(options["path_limiter"] == null) {
+ output = c.raw_log(sha);
+ array.push([sha, output, c.committer.date]);
+ }
+
+ if(options["max_count"] != null && (array.length + total_size) >= options["max_count"]) {
+ return array;
+ }
+
+ for(var i = 0; i < c.parent.length; i++) {
+ var psha = c.parent[i];
+ var tree = repo.get_object_by_sha1(psha).tree;
+
+ if(psha && !repo.files_changed(c.tree, tree, options["path_limiter"])) {
+ add_sha = false;
+ }
+
+ // Walk the next level of the tree
+ var results = walk_log(repo, psha, options, (array.length + total_size));
+ subarray = subarray.concat(results);
+ if(options["first_parent"]) break;
+ }
+
+ if(options["path_limiter"] != null && add_sha) {
+ output = c.raw_log(sha);
+ array.push([sha, output, c.comitter.date]);
+ }
+
+ if(add_sha) {
+ array = array.concat(subarray);
+ }
+ }
+ }
+ // Return all the commits
+ return array;
+}
+
+var convert = function(d) {
+ return (
+ d.constructor === Date ? d :
+ d.constructor === Array ? new Date(d[0],d[1],d[2]) :
+ d.constructor === Number ? new Date(d) :
+ d.constructor === String ? new Date(d) :
+ typeof d === "object" ? new Date(d.year,d.month,d.date) :
+ NaN
+ );
+}
+
+var compare = function(a,b) {
+ return (
+ isFinite(a=convert(a).valueOf()) &&
+ isFinite(b=convert(b).valueOf()) ?
+ (a>b)-(a => { 'url' => , 'id' => } }
+// Returns {} if no .gitmodules file was found
+Submodule.config = function(repo, ref, callback) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ callback = args.pop();
+ ref = args.length ? args.shift() : 'master';
+
+ // Fetch code
+ repo.commit(ref, function(err, commit) {
+ if(err) return callback(err, commit);
+ // Fetch the blob
+ var blob = commit.tree.find('.gitmodules');
+ // If there is no blob return an empty object
+ if(blob == null) return callback(null, {});
+ // Parse all the lines
+ var lines = blob.data.trim().replace(/\r\n?/g, "\n").split("\n");
+ var config = {};
+ var current = null;
+
+ lines.forEach(function(line) {
+ if(line.match(/^\[submodule "(.+)"\]$/)) {
+ var parts = line.match(/^\[submodule "(.+)"\]$/);
+ current = parts[1];
+ config[current] = {};
+ config[current]['id'] = commit.tree.find(current).id;
+ } else if(line.match(/^\t(\w+) = (.+)$/)) {
+ var parts = line.match(/^\t(\w+) = (.+)$/);
+ config[current][parts[1]] = parts[2];
+ if(parts[1] == 'path') {
+ config[current]['id'] = commit.tree.find(parts[2]).id;
+ }
+ } else {}
+ });
+ // Return the config
+ callback(null, config);
+ });
+}
\ No newline at end of file
diff --git a/node_modules/git/lib/git/tag.js b/node_modules/git/lib/git/tag.js
new file mode 100644
index 00000000..731f3746
--- /dev/null
+++ b/node_modules/git/lib/git/tag.js
@@ -0,0 +1,41 @@
+var Commit = require('./commit').Commit;
+
+var Tag = exports.Tag = function(name, commit) {
+ var _name = name;
+ var _commit = commit;
+ // Define the properties
+ Object.defineProperty(this, "name", { get: function() { return _name; }, enumerable: true});
+ Object.defineProperty(this, "commit", { get: function() { return _commit; }, enumerable: true});
+}
+
+var prefix = function(name) {
+ return "refs/" + name + "s";
+ }
+
+Tag.find_all = function(repo, options, callback) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ callback = args.pop();
+ options = args.length ? args.shift() : {};
+
+ // Let's fetch the references
+ repo.git.refs({}, prefix('tag'), function(err, refs) {
+ if(err) return callback(err, refs);
+ if(!refs) return callback(null, []);
+ // Map the references
+ var mapped_refs = refs.split(/\n/).map(function(ref) {
+ // Fetch the name and id for the reference
+ var split_reference = ref.split(/ /);
+ var name = split_reference[0];
+ var id = split_reference[1];
+ // Ensure we have the right id (if it's a tag it's the actual commit of the tag not the tag id)
+ var cid = repo.git.commit_from_sha(id)
+ if(cid == '') throw "unknown object type";
+ // Create a commit object wit the id
+ var commit = new Commit(repo, cid);
+ // Wrap the commit object in a head object and return mapped object
+ return new Tag(name, commit);
+ })
+
+ callback(null, mapped_refs);
+ })
+}
diff --git a/node_modules/git/lib/git/tree.js b/node_modules/git/lib/git/tree.js
new file mode 100644
index 00000000..05358e07
--- /dev/null
+++ b/node_modules/git/lib/git/tree.js
@@ -0,0 +1,159 @@
+var util = require('util'),
+ Submodule = require('./sub_module').Submodule,
+ Blob = require('./blob').Blob;
+
+var Tree = exports.Tree = function(repo, id, mode, name, contents) {
+ var _repo = repo, _id = id, _contents = contents, _mode = mode, _name = name;
+
+ // Internal properties
+ Object.defineProperty(this, "repo", { get: function() { return _repo; }, set: function(value) { _repo = value; }, enumerable: true});
+ Object.defineProperty(this, "id", { get: function() { return _id; }, set: function(value) { _id = value; }, enumerable: true});
+ Object.defineProperty(this, "mode", { get: function() { return _mode; }, set: function(value) { _mode = value; }, enumerable: true});
+ Object.defineProperty(this, "name", { get: function() { return _name; }, set: function(value) { _name = value; }, enumerable: true});
+ Object.defineProperty(this, "contents", { get: function() {
+ _contents = lazy_reader(_repo, _id, 'contents', _contents);
+ return _contents;
+ }, set: function(value) { _contents = value; }, enumerable: true});
+
+ // Return the base name
+ Object.defineProperty(this, "basename", { get: function() {
+ if(_name) {
+ var parts = _name.split("/");
+ return parts[parts.length - 1];
+ } else {
+ return null;
+ }
+ }, enumerable: false});
+}
+
+var lazy_reader = function(repo, id, type, variable) {
+ if(variable != null) return variable;
+ // Control the flow
+ var done = false;
+ var value = [];
+
+ // Fetch the content
+ repo.git.ls_tree(id, [], {}, function(err, text) {
+ if(err) return done = true;
+ // Split the output
+ var lines = text.split("\n");
+ // Create objects for all the entries
+ for(var i = 0; i < lines.length; i++) {
+ Tree.content_from_string(repo, lines[i], function(err, entry) {
+ value.push(entry);
+ });
+ }
+
+ done = true;
+ })
+
+ while(!done) {};
+ return value ? value : '';
+}
+
+// Construct the contents of the tree
+// repo: the current rep
+// treeish: the reference
+// paths: optional array of directory paths to restrict the tree
+Tree.construct = function(repo, treeish, paths, callback) {
+ // Set the path to the default if it's null
+ paths = paths ? paths : [];
+ // Run the ls_tree command
+ repo.git.ls_tree(treeish, paths, function(err, output) {
+ if(err) return callback(err, output);
+ construct_initialize(repo, treeish, output, callback);
+ });
+}
+
+// Create a new instance of the tree class
+var construct_initialize = function(repo, id, text, callback) {
+ // Create a tree object
+ var tree = new Tree(repo, id, null, null, []);
+ var lines = text.trim().split("\n");
+ if(lines.length == 1 && lines[0] == '') lines = [];
+ // Fetch all the lines
+ for(var i = 0; i < lines.length; i++) {
+ Tree.content_from_string(repo, lines[i], function(err, entry) {
+ if(err) return callback(err, entry);
+ tree.contents.push(entry);
+ });
+ }
+
+ // Remove all the null entries
+ tree.contents = tree.contents.filter(function(entry) { return entry ? true : false; });
+ // Return the object
+ callback(null, tree);
+}
+
+Tree.content_from_string = function(repo, text, callback) {
+ // Split the text into parts and extract the variables
+ var parts = text.replace(/\t/, ' ').split(" ");
+ var mode = parts[0];
+ var type = parts[1];
+ var id = parts[2];
+ var name = parts[3];
+
+ if(type == "tree") {
+ callback(null, new Tree(repo, id, mode, name));
+ } else if(type == "blob") {
+ callback(null, new Blob(repo, id, mode, name));
+ } else if(type == "link") {
+ callback(null, new Blob(repo, id, mode, name));
+ } else if(type == "commit") {
+ callback(null, new Submodule(repo, id, mode, name));
+ } else {
+ callback("invalid type: " + type, null);
+ }
+}
+
+// Find the named object in this tree's contents
+//
+// Examples
+// Repo.new('/path/to/grit').tree/'lib'
+// // => //
+// Repo.new('/path/to/grit').tree/'README.txt'
+// // => //
+//
+// Returns Grit::Blob or Grit::Tree or nil if not found
+Tree.prototype.find = function(file) {
+ var self = this;
+
+ if(file.match(/\//)) {
+ var paths = file.split('/');
+ paths.length > 0 && paths[paths.length - 1] == '' ? paths.pop() : null;
+ return paths.map(function(x) {
+ return self && (self = self.find(x));
+ });
+ } else {
+ var results = self.contents.filter(function(c) {
+ return c.name == file;
+ })
+
+ return results.length == 1 ? results[0] : null;
+ }
+}
+
+Tree.create = function(repo, attributes, callback) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ callback = args.pop();
+ attributes = args.length ? args.shift() : {};
+
+ var tree = new Tree(repo);
+ for(var name in attributes) {
+ tree[name] = attributes[name];
+ }
+
+ callback(null, tree);
+}
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/node_modules/git/lib/git/user_info.js b/node_modules/git/lib/git/user_info.js
new file mode 100644
index 00000000..88b43d98
--- /dev/null
+++ b/node_modules/git/lib/git/user_info.js
@@ -0,0 +1,42 @@
+var util = require('util'),
+ StringUtil = require('../sprintf/sprintf').StringUtil;
+
+var UserInfo = exports.UserInfo = function(string) {
+ var _email = '', _date = new Date(), _offset = 0, _name = '';
+
+ // Parse the content
+ var match_results = string.match(/^(.*?) <(.*)> (\d+) ([+-])0*(\d+?)$/);
+ // If we don't have a correct match set parse it partially
+ if(!match_results) {
+ if(string.match(/<.+>/)) {
+ var sub_match = string.match(/(.*) <(.+?)>/);
+ _name = sub_match[1];
+ _email = sub_match[2];
+ } else {
+ _name = string;
+ }
+ } else {
+ _name = match_results[1];
+ _email = match_results[2];
+ _date = new Date(parseInt(match_results[3] * 1000));
+ _offset = (match_results[4] == "-" ? -1 : 1) * parseInt(match_results[5]);
+ }
+
+ // Define properties
+ Object.defineProperty(this, "name", { get: function() { return _name; }, enumerable: true});
+ Object.defineProperty(this, "email", { get: function() { return _email; }, enumerable: true});
+ Object.defineProperty(this, "date", { get: function() { return _date; }, enumerable: true});
+ Object.defineProperty(this, "offset", { get: function() { return _offset; }, enumerable: true});
+}
+
+UserInfo.prototype.toString = function() {
+ // Ensure correct formating for the offset
+ var offset_str = this.offset.toString();
+ var add_string = '';
+ if(offset_str.length < 5) {
+ for(var i = 0; i < (5 - offset_str.length); i++) { add_string += '0'; }
+ offset_str = offset_str.substr(0, 1) + add_string + offset_str.substr(1);
+ }
+ // Return the userinfo as a string
+ return "" + this.name + " <" + this.email + "> " + (this.date.getTime()/1000) + " " + offset_str;
+}
\ No newline at end of file
diff --git a/node_modules/git/lib/sprintf/sprintf.js b/node_modules/git/lib/sprintf/sprintf.js
new file mode 100644
index 00000000..fe1bdc40
--- /dev/null
+++ b/node_modules/git/lib/sprintf/sprintf.js
@@ -0,0 +1,100 @@
+/**
+sprintf() for JavaScript 0.6
+
+Copyright (c) Alexandru Marasteanu
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of sprintf() for JavaScript nor the
+ names of its contributors may be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL Alexandru Marasteanu BE LIABLE FOR ANY
+DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+Changelog:
+2007.04.03 - 0.1:
+ - initial release
+2007.09.11 - 0.2:
+ - feature: added argument swapping
+2007.09.17 - 0.3:
+ - bug fix: no longer throws exception on empty paramenters (Hans Pufal)
+2007.10.21 - 0.4:
+ - unit test and patch (David Baird)
+2010.05.09 - 0.5:
+ - bug fix: 0 is now preceeded with a + sign
+ - bug fix: the sign was not at the right position on padded results (Kamal Abdali)
+ - switched from GPL to BSD license
+2010.05.22 - 0.6:
+ - reverted to 0.4 and fixed the bug regarding the sign of the number 0
+ Note:
+ Thanks to Raphael Pigulla (http://www.n3rd.org/)
+ who warned me about a bug in 0.5, I discovered that the last update was
+ a regress. I appologize for that.
+**/
+
+var StringUtil = exports.StringUtil = function() {};
+StringUtil.str_repeat = str_repeat;
+StringUtil.sprintf = sprintf;
+
+function str_repeat(i, m) {
+ for (var o = []; m > 0; o[--m] = i);
+ return o.join('');
+}
+
+function sprintf() {
+ var i = 0, a, f = arguments[i++], o = [], m, p, c, x, s = '';
+ while (f) {
+ if (m = /^[^\x25]+/.exec(f)) {
+ o.push(m[0]);
+ }
+ else if (m = /^\x25{2}/.exec(f)) {
+ o.push('%');
+ }
+ else if (m = /^\x25(?:(\d+)\$)?(\+)?(0|'[^$])?(-)?(\d+)?(?:\.(\d+))?([b-fosuxX])/.exec(f)) {
+ if (((a = arguments[m[1] || i++]) == null) || (a == undefined)) {
+ throw('Too few arguments.');
+ }
+ if (/[^s]/.test(m[7]) && (typeof(a) != 'number')) {
+ throw('Expecting number but found ' + typeof(a));
+ }
+ switch (m[7]) {
+ case 'b': a = a.toString(2); break;
+ case 'c': a = String.fromCharCode(a); break;
+ case 'd': a = parseInt(a); break;
+ case 'e': a = m[6] ? a.toExponential(m[6]) : a.toExponential(); break;
+ case 'f': a = m[6] ? parseFloat(a).toFixed(m[6]) : parseFloat(a); break;
+ case 'o': a = a.toString(8); break;
+ case 's': a = ((a = String(a)) && m[6] ? a.substring(0, m[6]) : a); break;
+ case 'u': a = Math.abs(a); break;
+ case 'x': a = a.toString(16); break;
+ case 'X': a = a.toString(16).toUpperCase(); break;
+ }
+ a = (/[def]/.test(m[7]) && m[2] && a >= 0 ? '+'+ a : a);
+ c = m[3] ? m[3] == '0' ? '0' : m[3].charAt(1) : ' ';
+ x = m[5] - String(a).length - s.length;
+ p = m[5] ? str_repeat(c, x) : '';
+ o.push(s + (m[4] ? a + p : p + a));
+ }
+ else {
+ throw('Huh ?!');
+ }
+ f = f.substring(m[0].length);
+ }
+ return o.join('');
+}
diff --git a/node_modules/git/lib/zlib/zlib.js b/node_modules/git/lib/zlib/zlib.js
new file mode 100644
index 00000000..e4b46fe4
--- /dev/null
+++ b/node_modules/git/lib/zlib/zlib.js
@@ -0,0 +1,1172 @@
+/*
+ Copyright 2008,2009
+ Matthias Ehmann,
+ Michael Gerhaeuser,
+ Carsten Miller,
+ Bianca Valentin,
+ Alfred Wassermann,
+ Peter Wilfahrt
+
+ This file is part of JSXGraph.
+
+ JSXGraph is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Lesser General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ JSXGraph is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public License
+ along with JSXGraph. If not, see .
+*/
+
+/**
+ * @fileoverview Utilities for uncompressing and base64 decoding
+ */
+
+/**
+ * @class Util class
+ * Class for gunzipping, unzipping and base64 decoding of files.
+ * It is used for reading GEONExT, Geogebra and Intergeo files.
+ *
+ * Only Huffman codes are decoded in gunzip.
+ * The code is based on the source code for gunzip.c by Pasi Ojala
+ * @see http://www.cs.tut.fi/~albert/Dev/gunzip/gunzip.c
+ * @see http://www.cs.tut.fi/~albert
+ */
+var util = require('util');
+var Zlib = exports.Zlib = {};
+
+/**
+ * Unzip zip files
+ */
+Zlib.Unzip = function (barray){
+ var outputArr = [],
+ output = "",
+ debug = false,
+ gpflags,
+ files = 0,
+ unzipped = [],
+ crc,
+ buf32k = new Array(32768),
+ bIdx = 0,
+ modeZIP=false,
+
+ CRC, SIZE,
+
+ bitReverse = [
+ 0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0,
+ 0x10, 0x90, 0x50, 0xd0, 0x30, 0xb0, 0x70, 0xf0,
+ 0x08, 0x88, 0x48, 0xc8, 0x28, 0xa8, 0x68, 0xe8,
+ 0x18, 0x98, 0x58, 0xd8, 0x38, 0xb8, 0x78, 0xf8,
+ 0x04, 0x84, 0x44, 0xc4, 0x24, 0xa4, 0x64, 0xe4,
+ 0x14, 0x94, 0x54, 0xd4, 0x34, 0xb4, 0x74, 0xf4,
+ 0x0c, 0x8c, 0x4c, 0xcc, 0x2c, 0xac, 0x6c, 0xec,
+ 0x1c, 0x9c, 0x5c, 0xdc, 0x3c, 0xbc, 0x7c, 0xfc,
+ 0x02, 0x82, 0x42, 0xc2, 0x22, 0xa2, 0x62, 0xe2,
+ 0x12, 0x92, 0x52, 0xd2, 0x32, 0xb2, 0x72, 0xf2,
+ 0x0a, 0x8a, 0x4a, 0xca, 0x2a, 0xaa, 0x6a, 0xea,
+ 0x1a, 0x9a, 0x5a, 0xda, 0x3a, 0xba, 0x7a, 0xfa,
+ 0x06, 0x86, 0x46, 0xc6, 0x26, 0xa6, 0x66, 0xe6,
+ 0x16, 0x96, 0x56, 0xd6, 0x36, 0xb6, 0x76, 0xf6,
+ 0x0e, 0x8e, 0x4e, 0xce, 0x2e, 0xae, 0x6e, 0xee,
+ 0x1e, 0x9e, 0x5e, 0xde, 0x3e, 0xbe, 0x7e, 0xfe,
+ 0x01, 0x81, 0x41, 0xc1, 0x21, 0xa1, 0x61, 0xe1,
+ 0x11, 0x91, 0x51, 0xd1, 0x31, 0xb1, 0x71, 0xf1,
+ 0x09, 0x89, 0x49, 0xc9, 0x29, 0xa9, 0x69, 0xe9,
+ 0x19, 0x99, 0x59, 0xd9, 0x39, 0xb9, 0x79, 0xf9,
+ 0x05, 0x85, 0x45, 0xc5, 0x25, 0xa5, 0x65, 0xe5,
+ 0x15, 0x95, 0x55, 0xd5, 0x35, 0xb5, 0x75, 0xf5,
+ 0x0d, 0x8d, 0x4d, 0xcd, 0x2d, 0xad, 0x6d, 0xed,
+ 0x1d, 0x9d, 0x5d, 0xdd, 0x3d, 0xbd, 0x7d, 0xfd,
+ 0x03, 0x83, 0x43, 0xc3, 0x23, 0xa3, 0x63, 0xe3,
+ 0x13, 0x93, 0x53, 0xd3, 0x33, 0xb3, 0x73, 0xf3,
+ 0x0b, 0x8b, 0x4b, 0xcb, 0x2b, 0xab, 0x6b, 0xeb,
+ 0x1b, 0x9b, 0x5b, 0xdb, 0x3b, 0xbb, 0x7b, 0xfb,
+ 0x07, 0x87, 0x47, 0xc7, 0x27, 0xa7, 0x67, 0xe7,
+ 0x17, 0x97, 0x57, 0xd7, 0x37, 0xb7, 0x77, 0xf7,
+ 0x0f, 0x8f, 0x4f, 0xcf, 0x2f, 0xaf, 0x6f, 0xef,
+ 0x1f, 0x9f, 0x5f, 0xdf, 0x3f, 0xbf, 0x7f, 0xff
+ ],
+
+ cplens = [
+ 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
+ 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0
+ ],
+
+ cplext = [
+ 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
+ 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 99, 99
+ ], /* 99==invalid */
+
+ cpdist = [
+ 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0007, 0x0009, 0x000d,
+ 0x0011, 0x0019, 0x0021, 0x0031, 0x0041, 0x0061, 0x0081, 0x00c1,
+ 0x0101, 0x0181, 0x0201, 0x0301, 0x0401, 0x0601, 0x0801, 0x0c01,
+ 0x1001, 0x1801, 0x2001, 0x3001, 0x4001, 0x6001
+ ],
+
+ cpdext = [
+ 0, 0, 0, 0, 1, 1, 2, 2,
+ 3, 3, 4, 4, 5, 5, 6, 6,
+ 7, 7, 8, 8, 9, 9, 10, 10,
+ 11, 11, 12, 12, 13, 13
+ ],
+
+ border = [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15],
+
+ bA = barray,
+
+ bytepos=0,
+ bitpos=0,
+ bb = 1,
+ bits=0,
+
+ NAMEMAX = 256,
+
+ nameBuf = [],
+
+ fileout;
+
+ function readByte(){
+ bits+=8;
+ if (bytepos");
+ // util.debug(bA[bytepos++].toString(16))
+ // return String.fromCharCode(bA[bytepos++]);
+ return bA[bytepos++];
+ } else
+ return -1;
+ };
+
+ function byteAlign(){
+ bb = 1;
+ };
+
+ function readBit(){
+ var carry;
+ bits++;
+ carry = (bb & 1);
+ bb >>= 1;
+ if (bb==0){
+ bb = readByte();
+ carry = (bb & 1);
+ bb = (bb>>1) | 0x80;
+ }
+ return carry;
+ };
+
+ function readBits(a) {
+ var res = 0,
+ i = a;
+
+ while(i--) {
+ res = (res<<1) | readBit();
+ }
+ if(a) {
+ res = bitReverse[res]>>(8-a);
+ }
+ return res;
+ };
+
+ function flushBuffer(){
+ //document.write('FLUSHBUFFER:'+buf32k);
+ bIdx = 0;
+ };
+ function addBuffer(a){
+ SIZE++;
+ //CRC=updcrc(a,crc);
+ buf32k[bIdx++] = a;
+ outputArr.push(String.fromCharCode(a));
+ //output+=String.fromCharCode(a);
+ if(bIdx==0x8000){
+ //document.write('ADDBUFFER:'+buf32k);
+ bIdx=0;
+ }
+ };
+
+ function HufNode() {
+ this.b0=0;
+ this.b1=0;
+ this.jump = null;
+ this.jumppos = -1;
+ };
+
+ var LITERALS = 288;
+
+ var literalTree = new Array(LITERALS);
+ var distanceTree = new Array(32);
+ var treepos=0;
+ var Places = null;
+ var Places2 = null;
+
+ var impDistanceTree = new Array(64);
+ var impLengthTree = new Array(64);
+
+ var len = 0;
+ var fpos = new Array(17);
+ fpos[0]=0;
+ var flens;
+ var fmax;
+
+ function IsPat() {
+ while (1) {
+ if (fpos[len] >= fmax)
+ return -1;
+ if (flens[fpos[len]] == len)
+ return fpos[len]++;
+ fpos[len]++;
+ }
+ };
+
+ function Rec() {
+ var curplace = Places[treepos];
+ var tmp;
+ if (debug)
+ util.debug("len: " + len + " treepos: " + treepos)
+ if(len==17) { //war 17
+ return -1;
+ }
+ treepos++;
+ len++;
+
+ tmp = IsPat();
+ if (debug)
+ util.debug("IsPat " + tmp)
+ if(tmp >= 0) {
+ curplace.b0 = tmp; /* leaf cell for 0-bit */
+ if (debug)
+ util.debug("b0 " + curplace.b0)
+ } else {
+ /* Not a Leaf cell */
+ curplace.b0 = 0x8000;
+ if (debug)
+ util.debug("b0 " + curplace.b0)
+ if(Rec())
+ return -1;
+ }
+ tmp = IsPat();
+ if(tmp >= 0) {
+ curplace.b1 = tmp; /* leaf cell for 1-bit */
+ if (debug)
+ util.debug("b1 " + curplace.b1);
+ curplace.jump = null; /* Just for the display routine */
+ } else {
+ /* Not a Leaf cell */
+ curplace.b1 = 0x8000;
+ if (debug)
+ util.debug("b1 " + curplace.b1);
+ curplace.jump = Places[treepos];
+ curplace.jumppos = treepos;
+ if(Rec())
+ return -1;
+ }
+ len--;
+ return 0;
+ };
+
+ function CreateTree(currentTree, numval, lengths, show) {
+ var i;
+ /* Create the Huffman decode tree/table */
+ //document.write("
createtree
");
+ if (debug)
+ util.debug("currentTree " + currentTree + " numval " + numval + " lengths " + lengths + " show " + show);
+ Places = currentTree;
+ treepos=0;
+ flens = lengths;
+ fmax = numval;
+ for (i=0;i<17;i++)
+ fpos[i] = 0;
+ len = 0;
+ if(Rec()) {
+ //fprintf(stderr, "invalid huffman tree\n");
+ if (debug)
+ util.debug("invalid huffman tree");
+ return -1;
+ }
+ if (debug){
+ util.debug("Tree: " + Places.length);
+ for (var a=0;a<32;a++){
+ util.debug("Places[" + a + "].b0=" + Places[a].b0);
+ util.debug("Places[" + a + "].b1=" + Places[a].b1);
+ }
+ }
+
+ /*if(show) {
+ var tmp;
+ for(tmp=currentTree;tmpjump?tmp->jump-currentTree:0,(tmp->jump?tmp->jump-currentTree:0)*6+0xcf0);
+ if(!(tmp.b0 & 0x8000)) {
+ //fprintf(stdout, " 0x%03x (%c)", tmp->b0,(tmp->b0<256 && isprint(tmp->b0))?tmp->b0:'�');
+ }
+ if(!(tmp.b1 & 0x8000)) {
+ if((tmp.b0 & 0x8000))
+ fprintf(stdout, " ");
+ fprintf(stdout, " 0x%03x (%c)", tmp->b1,(tmp->b1<256 && isprint(tmp->b1))?tmp->b1:'�');
+ }
+ fprintf(stdout, "\n");
+ }
+ }*/
+ return 0;
+ };
+
+ function DecodeValue(currentTree) {
+ var len, i,
+ xtreepos=0,
+ X = currentTree[xtreepos],
+ b;
+
+ /* decode one symbol of the data */
+ while(1) {
+ b=readBit();
+ if (debug)
+ util.debug("b=" + b);
+ if(b) {
+ if(!(X.b1 & 0x8000)){
+ if (debug)
+ util.debug("ret1");
+ return X.b1; /* If leaf node, return data */
+ }
+ X = X.jump;
+ len = currentTree.length;
+ for (i=0;i>1);
+ if(j > 23) {
+ j = (j<<1) | readBit(); /* 48..255 */
+
+ if(j > 199) { /* 200..255 */
+ j -= 128; /* 72..127 */
+ j = (j<<1) | readBit(); /* 144..255 << */
+ } else { /* 48..199 */
+ j -= 48; /* 0..151 */
+ if(j > 143) {
+ j = j+136; /* 280..287 << */
+ /* 0..143 << */
+ }
+ }
+ } else { /* 0..23 */
+ j += 256; /* 256..279 << */
+ }
+ if(j < 256) {
+ addBuffer(j);
+ //document.write("out:"+String.fromCharCode(j));
+ /*fprintf(errfp, "@%d %02x\n", SIZE, j);*/
+ } else if(j == 256) {
+ /* EOF */
+ break;
+ } else {
+ var len, dist;
+
+ j -= 256 + 1; /* bytes + EOF */
+ len = readBits(cplext[j]) + cplens[j];
+
+ j = bitReverse[readBits(5)]>>3;
+ if(cpdext[j] > 8) {
+ dist = readBits(8);
+ dist |= (readBits(cpdext[j]-8)<<8);
+ } else {
+ dist = readBits(cpdext[j]);
+ }
+ dist += cpdist[j];
+
+ /*fprintf(errfp, "@%d (l%02x,d%04x)\n", SIZE, len, dist);*/
+ for(j=0;jparam: "+literalCodes+" "+distCodes+" "+lenCodes+"
");
+ for(j=0; j<19; j++) {
+ ll[j] = 0;
+ }
+
+ // Get the decode tree code lengths
+
+ //document.write("
");
+ for(j=0; jll:'+ll);
+ len = distanceTree.length;
+ for (i=0; itree created');
+
+ //read in literal and distance code lengths
+ n = literalCodes + distCodes;
+ i = 0;
+ var z=-1;
+ if (debug)
+ util.debug("n=" + n + " bits: " + bits);
+ while(i < n) {
+ z++;
+ j = DecodeValue(distanceTree);
+ if (debug)
+ util.debug("" + z + " i:" + i + " decode: " + j + " bits" + bits);
+ if(j<16) { // length of code in bits (0..15)
+ ll[i++] = j;
+ } else if(j==16) { // repeat last length 3 to 6 times
+ var l;
+ j = 3 + readBits(2);
+ if(i+j > n) {
+ flushBuffer();
+ return 1;
+ }
+ l = i ? ll[i-1] : 0;
+ while(j--) {
+ ll[i++] = l;
+ }
+ } else {
+ if(j==17) { // 3 to 10 zero length codes
+ j = 3 + readBits(3);
+ } else { // j == 18: 11 to 138 zero length codes
+ j = 11 + readBits(7);
+ }
+ if(i+j > n) {
+ flushBuffer();
+ return 1;
+ }
+ while(j--) {
+ ll[i++] = 0;
+ }
+ }
+ }
+ /*for(j=0; j= 256) { // In C64: if carry set
+ var len, dist;
+ j -= 256;
+ if(j == 0) {
+ // EOF
+ break;
+ }
+ j--;
+ len = readBits(cplext[j]) + cplens[j];
+
+ j = DecodeValue(distanceTree);
+ if(cpdext[j] > 8) {
+ dist = readBits(8);
+ dist |= (readBits(cpdext[j]-8)<<8);
+ } else {
+ dist = readBits(cpdext[j]);
+ }
+ dist += cpdist[j];
+ while(len--) {
+ var c = buf32k[(bIdx - dist) & 0x7fff];
+ addBuffer(c);
+ }
+ } else {
+ addBuffer(j);
+ }
+ }
+ }
+ } while(!last);
+ flushBuffer();
+
+ byteAlign();
+ return 0;
+};
+
+Zlib.Unzip.prototype.unzipFile = function(name) {
+ var i;
+ this.unzip();
+ //alert(unzipped[0][1]);
+ for (i=0;i");
+ }
+ */
+ //alert(bA);
+ nextFile();
+ return unzipped;
+ };
+
+ function nextFile(){
+ if (debug)
+ util.debug("NEXTFILE");
+ outputArr = [];
+ var tmp = [];
+ modeZIP = false;
+ tmp[0] = readByte();
+ tmp[1] = readByte();
+
+ if (debug)
+ util.debug("type: " + tmp[0] + " " + tmp[1]);
+ if (tmp[0] == parseInt("78",16) && (tmp[1] == parseInt("156",10) || tmp[1] == parseInt("1", 10))){ //GZIP
+ if (debug)
+ util.debug("GEONExT-GZIP");
+ DeflateLoop();
+ if (debug)
+ util.debug(outputArr.join(''));
+ // unzipped[files] = new Array(2);
+ unzipped[files] = outputArr.join('');
+ files++;
+ }
+ if (tmp[0] == parseInt("1f",16) && tmp[1] == parseInt("8b",16)){ //GZIP
+ if (debug)
+ util.debug("GZIP");
+ //DeflateLoop();
+ skipdir();
+ if (debug)
+ util.debug(outputArr.join(''));
+ unzipped[files] = new Array(2);
+ unzipped[files][0] = outputArr.join('');
+ unzipped[files][1] = "file";
+ files++;
+ }
+ if (tmp[0] == parseInt("50",16) && tmp[1] == parseInt("4b",16)){ //ZIP
+ modeZIP = true;
+ tmp[2] = readByte();
+ tmp[3] = readByte();
+ if (tmp[2] == parseInt("3",16) && tmp[3] == parseInt("4",16)){
+ //MODE_ZIP
+ tmp[0] = readByte();
+ tmp[1] = readByte();
+ if (debug)
+ util.debug("ZIP-Version: "+tmp[1]+" "+tmp[0]/10+"."+tmp[0]%10);
+
+ gpflags = readByte();
+ gpflags |= (readByte()<<8);
+ if (debug)
+ util.debug("gpflags: "+gpflags);
+
+ var method = readByte();
+ method |= (readByte()<<8);
+ if (debug)
+ util.debug("method: "+method);
+
+ readByte();
+ readByte();
+ readByte();
+ readByte();
+
+ var crc = readByte();
+ crc |= (readByte()<<8);
+ crc |= (readByte()<<16);
+ crc |= (readByte()<<24);
+
+ var compSize = readByte();
+ compSize |= (readByte()<<8);
+ compSize |= (readByte()<<16);
+ compSize |= (readByte()<<24);
+
+ var size = readByte();
+ size |= (readByte()<<8);
+ size |= (readByte()<<16);
+ size |= (readByte()<<24);
+
+ if (debug)
+ util.debug("local CRC: "+crc+"\nlocal Size: "+size+"\nlocal CompSize: "+compSize);
+
+ var filelen = readByte();
+ filelen |= (readByte()<<8);
+
+ var extralen = readByte();
+ extralen |= (readByte()<<8);
+
+ if (debug)
+ util.debug("filelen "+filelen);
+ i = 0;
+ nameBuf = [];
+ while (filelen--){
+ var c = readByte();
+ if (c == "/" | c ==":"){
+ i = 0;
+ } else if (i < NAMEMAX-1)
+ nameBuf[i++] = String.fromCharCode(c);
+ }
+ if (debug)
+ util.debug("nameBuf: "+nameBuf);
+
+ //nameBuf[i] = "\0";
+ if (!fileout)
+ fileout = nameBuf;
+
+ var i = 0;
+ while (i < extralen){
+ c = readByte();
+ i++;
+ }
+
+ CRC = 0xffffffff;
+ SIZE = 0;
+
+ if (size = 0 && fileOut.charAt(fileout.length-1)=="/"){
+ //skipdir
+ if (debug)
+ util.debug("skipdir");
+ }
+ if (method == 8){
+ DeflateLoop();
+ if (debug)
+ util.debug(outputArr.join(''));
+ unzipped[files] = new Array(2);
+ unzipped[files][0] = outputArr.join('');
+ unzipped[files][1] = nameBuf.join('');
+ files++;
+ //return outputArr.join('');
+ }
+ skipdir();
+ }
+ }
+ };
+
+function skipdir(){
+ var crc,
+ tmp = [],
+ compSize, size, os, i, c;
+
+ if ((gpflags & 8)) {
+ tmp[0] = readByte();
+ tmp[1] = readByte();
+ tmp[2] = readByte();
+ tmp[3] = readByte();
+
+ if (tmp[0] == parseInt("50",16) &&
+ tmp[1] == parseInt("4b",16) &&
+ tmp[2] == parseInt("07",16) &&
+ tmp[3] == parseInt("08",16))
+ {
+ crc = readByte();
+ crc |= (readByte()<<8);
+ crc |= (readByte()<<16);
+ crc |= (readByte()<<24);
+ } else {
+ crc = tmp[0] | (tmp[1]<<8) | (tmp[2]<<16) | (tmp[3]<<24);
+ }
+
+ compSize = readByte();
+ compSize |= (readByte()<<8);
+ compSize |= (readByte()<<16);
+ compSize |= (readByte()<<24);
+
+ size = readByte();
+ size |= (readByte()<<8);
+ size |= (readByte()<<16);
+ size |= (readByte()<<24);
+
+ if (debug)
+ util.debug("CRC:");
+ }
+
+ if (modeZIP)
+ nextFile();
+
+ tmp[0] = readByte();
+ if (tmp[0] != 8) {
+ if (debug)
+ util.debug("Unknown compression method!");
+ return 0;
+ }
+
+ gpflags = readByte();
+ if (debug){
+ if ((gpflags & ~(parseInt("1f",16))))
+ util.debug("Unknown flags set!");
+ }
+
+ readByte();
+ readByte();
+ readByte();
+ readByte();
+
+ readByte();
+ os = readByte();
+
+ if ((gpflags & 4)){
+ tmp[0] = readByte();
+ tmp[2] = readByte();
+ len = tmp[0] + 256*tmp[1];
+ if (debug)
+ util.debug("Extra field size: "+len);
+ for (i=0;ihttp://www.webtoolkit.info/
+*/
+Zlib.Base64 = {
+
+ // private property
+ _keyStr : "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",
+
+ // public method for encoding
+ encode : function (input) {
+ var output = [],
+ chr1, chr2, chr3, enc1, enc2, enc3, enc4,
+ i = 0;
+
+ input = Zlib.Base64._utf8_encode(input);
+
+ while (i < input.length) {
+
+ chr1 = input.charCodeAt(i++);
+ chr2 = input.charCodeAt(i++);
+ chr3 = input.charCodeAt(i++);
+
+ enc1 = chr1 >> 2;
+ enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
+ enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
+ enc4 = chr3 & 63;
+
+ if (isNaN(chr2)) {
+ enc3 = enc4 = 64;
+ } else if (isNaN(chr3)) {
+ enc4 = 64;
+ }
+
+ output.push([this._keyStr.charAt(enc1),
+ this._keyStr.charAt(enc2),
+ this._keyStr.charAt(enc3),
+ this._keyStr.charAt(enc4)].join(''));
+ }
+
+ return output.join('');
+ },
+
+ // public method for decoding
+ decode : function (input, utf8) {
+ var output = [],
+ chr1, chr2, chr3,
+ enc1, enc2, enc3, enc4,
+ i = 0;
+
+ input = input.replace(/[^A-Za-z0-9\+\/\=]/g, "");
+
+ while (i < input.length) {
+
+ enc1 = this._keyStr.indexOf(input.charAt(i++));
+ enc2 = this._keyStr.indexOf(input.charAt(i++));
+ enc3 = this._keyStr.indexOf(input.charAt(i++));
+ enc4 = this._keyStr.indexOf(input.charAt(i++));
+
+ chr1 = (enc1 << 2) | (enc2 >> 4);
+ chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
+ chr3 = ((enc3 & 3) << 6) | enc4;
+
+ output.push(String.fromCharCode(chr1));
+
+ if (enc3 != 64) {
+ output.push(String.fromCharCode(chr2));
+ }
+ if (enc4 != 64) {
+ output.push(String.fromCharCode(chr3));
+ }
+ }
+
+ output = output.join('');
+
+ if (utf8) {
+ output = Zlib.Base64._utf8_decode(output);
+ }
+ return output;
+
+ },
+
+ // private method for UTF-8 encoding
+ _utf8_encode : function (string) {
+ string = string.replace(/\r\n/g,"\n");
+ var utftext = "";
+
+ for (var n = 0; n < string.length; n++) {
+
+ var c = string.charCodeAt(n);
+
+ if (c < 128) {
+ utftext += String.fromCharCode(c);
+ }
+ else if((c > 127) && (c < 2048)) {
+ utftext += String.fromCharCode((c >> 6) | 192);
+ utftext += String.fromCharCode((c & 63) | 128);
+ }
+ else {
+ utftext += String.fromCharCode((c >> 12) | 224);
+ utftext += String.fromCharCode(((c >> 6) & 63) | 128);
+ utftext += String.fromCharCode((c & 63) | 128);
+ }
+
+ }
+
+ return utftext;
+ },
+
+ // private method for UTF-8 decoding
+ _utf8_decode : function (utftext) {
+ var string = [],
+ i = 0,
+ c = 0, c2 = 0, c3 = 0;
+
+ while ( i < utftext.length ) {
+ c = utftext.charCodeAt(i);
+ if (c < 128) {
+ string.push(String.fromCharCode(c));
+ i++;
+ }
+ else if((c > 191) && (c < 224)) {
+ c2 = utftext.charCodeAt(i+1);
+ string.push(String.fromCharCode(((c & 31) << 6) | (c2 & 63)));
+ i += 2;
+ }
+ else {
+ c2 = utftext.charCodeAt(i+1);
+ c3 = utftext.charCodeAt(i+2);
+ string.push(String.fromCharCode(((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63)));
+ i += 3;
+ }
+ }
+ return string.join('');
+ },
+
+ _destrip: function (stripped, wrap){
+ var lines = [], lineno, i,
+ destripped = [];
+
+ if (wrap==null)
+ wrap = 76;
+
+ stripped.replace(/ /g, "");
+ lineno = stripped.length / wrap;
+ for (i = 0; i < lineno; i++)
+ lines[i]=stripped.substr(i * wrap, wrap);
+ if (lineno != stripped.length / wrap)
+ lines[lines.length]=stripped.substr(lineno * wrap, stripped.length-(lineno * wrap));
+
+ for (i = 0; i < lines.length; i++)
+ destripped.push(lines[i]);
+ return destripped.join('\n');
+ },
+
+ decodeAsArray: function (input){
+ var dec = this.decode(input),
+ ar = [], i;
+ for (i=0;i255){
+ switch (c) {
+ case 8364: c=128;
+ break;
+ case 8218: c=130;
+ break;
+ case 402: c=131;
+ break;
+ case 8222: c=132;
+ break;
+ case 8230: c=133;
+ break;
+ case 8224: c=134;
+ break;
+ case 8225: c=135;
+ break;
+ case 710: c=136;
+ break;
+ case 8240: c=137;
+ break;
+ case 352: c=138;
+ break;
+ case 8249: c=139;
+ break;
+ case 338: c=140;
+ break;
+ case 381: c=142;
+ break;
+ case 8216: c=145;
+ break;
+ case 8217: c=146;
+ break;
+ case 8220: c=147;
+ break;
+ case 8221: c=148;
+ break;
+ case 8226: c=149;
+ break;
+ case 8211: c=150;
+ break;
+ case 8212: c=151;
+ break;
+ case 732: c=152;
+ break;
+ case 8482: c=153;
+ break;
+ case 353: c=154;
+ break;
+ case 8250: c=155;
+ break;
+ case 339: c=156;
+ break;
+ case 382: c=158;
+ break;
+ case 376: c=159;
+ break;
+ default:
+ break;
+ }
+ }
+ return c;
+};
+
+/**
+ * Decoding string into utf-8
+ * @param {String} string to decode
+ * @return {String} utf8 decoded string
+ */
+Zlib.utf8Decode = function(utftext) {
+ var string = [];
+ var i = 0;
+ var c = 0, c1 = 0, c2 = 0;
+
+ while ( i < utftext.length ) {
+ c = utftext.charCodeAt(i);
+
+ if (c < 128) {
+ string.push(String.fromCharCode(c));
+ i++;
+ } else if((c > 191) && (c < 224)) {
+ c2 = utftext.charCodeAt(i+1);
+ string.push(String.fromCharCode(((c & 31) << 6) | (c2 & 63)));
+ i += 2;
+ } else {
+ c2 = utftext.charCodeAt(i+1);
+ c3 = utftext.charCodeAt(i+2);
+ string.push(String.fromCharCode(((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63)));
+ i += 3;
+ }
+ };
+ return string.join('');
+};
\ No newline at end of file
diff --git a/node_modules/git/package.json b/node_modules/git/package.json
new file mode 100644
index 00000000..c4811acd
--- /dev/null
+++ b/node_modules/git/package.json
@@ -0,0 +1,18 @@
+{ "name" : "git"
+, "description" : "A node.js library for git"
+, "version" : "0.1.5"
+, "author" : "Christian Amor Kvalheim "
+, "contributors" : [ "Vincent Giersch " ]
+, "repository" : { "type" : "git"
+ , "url" : "git@github.com:christkv/node-git.git" }
+, "bugs" : { "mail" : "node-git@googlegroups.com"
+ , "web" : "http://groups.google.com/group/nodegit" }
+, "main": "./lib/git/index"
+, "directories" : { "lib" : "./lib/git" }
+, "engines" : { "node" : ">=0.4.0" }
+, "dependencies": { "mime": "1.2.9" }
+, "devDependencies":{ "nodeunit" : ">=0.5.1" }
+, "licenses" : [ { "type" : "Apache License, Version 2.0"
+ , "url" : "http://www.apache.org/licenses/LICENSE-2.0" } ]
+, "scripts" : { "test" : "./node_modules/.bin/nodeunit test" }
+}
diff --git a/node_modules/mime/LICENSE b/node_modules/mime/LICENSE
new file mode 100644
index 00000000..451fc455
--- /dev/null
+++ b/node_modules/mime/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2010 Benjamin Thomas, Robert Kieffer
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/mime/README.md b/node_modules/mime/README.md
new file mode 100644
index 00000000..b90552a3
--- /dev/null
+++ b/node_modules/mime/README.md
@@ -0,0 +1,63 @@
+# mime
+
+Comprehensive MIME type mapping API. Includes all 600+ types and 800+ extensions defined by the Apache project, plus additional types submitted by the node.js community.
+
+## Install
+
+Install with [npm](http://github.com/isaacs/npm):
+
+ npm install mime
+
+## API - Queries
+
+### mime.lookup(path)
+Get the mime type associated with a file. Performs a case-insensitive lookup using the extension in `path` (the substring after the last '/' or '.'). E.g.
+
+ var mime = require('mime');
+
+ mime.lookup('/path/to/file.txt'); // => 'text/plain'
+ mime.lookup('file.txt'); // => 'text/plain'
+ mime.lookup('.TXT'); // => 'text/plain'
+ mime.lookup('htm'); // => 'text/html'
+
+### mime.extension(type)
+Get the default extension for `type`
+
+ mime.extension('text/html'); // => 'html'
+ mime.extension('application/octet-stream'); // => 'bin'
+
+### mime.charsets.lookup()
+
+Map mime-type to charset
+
+ mime.charsets.lookup('text/plain'); // => 'UTF-8'
+
+(The logic for charset lookups is pretty rudimentary. Feel free to suggest improvements.)
+
+## API - Defining Custom Types
+
+The following APIs allow you to add your own type mappings within your project. If you feel a type should be included as part of node-mime, see [requesting new types](https://github.com/broofa/node-mime/wiki/Requesting-New-Types).
+
+### mime.define()
+
+Add custom mime/extension mappings
+
+ mime.define({
+ 'text/x-some-format': ['x-sf', 'x-sft', 'x-sfml'],
+ 'application/x-my-type': ['x-mt', 'x-mtt'],
+ // etc ...
+ });
+
+ mime.lookup('x-sft'); // => 'text/x-some-format'
+
+The first entry in the extensions array is returned by `mime.extension()`. E.g.
+
+ mime.extension('text/x-some-format'); // => 'x-sf'
+
+### mime.load(filepath)
+
+Load mappings from an Apache ".types" format file
+
+ mime.load('./my_project.types');
+
+The .types file format is simple - See the `types` dir for examples.
diff --git a/node_modules/mime/mime.js b/node_modules/mime/mime.js
new file mode 100644
index 00000000..70a63842
--- /dev/null
+++ b/node_modules/mime/mime.js
@@ -0,0 +1,113 @@
+var path = require('path');
+var fs = require('fs');
+
+function Mime() {
+ // Map of extension -> mime type
+ this.types = Object.create(null);
+
+ // Map of mime type -> extension
+ this.extensions = Object.create(null);
+}
+
+/**
+ * Define mimetype -> extension mappings. Each key is a mime-type that maps
+ * to an array of extensions associated with the type. The first extension is
+ * used as the default extension for the type.
+ *
+ * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']});
+ *
+ * @param map (Object) type definitions
+ */
+Mime.prototype.define = function (map) {
+ for (var type in map) {
+ var exts = map[type];
+
+ for (var i = 0; i < exts.length; i++) {
+ if (process.env.DEBUG_MIME && this.types[exts]) {
+ console.warn(this._loading.replace(/.*\//, ''), 'changes "' + exts[i] + '" extension type from ' +
+ this.types[exts] + ' to ' + type);
+ }
+
+ this.types[exts[i]] = type;
+ }
+
+ // Default extension is the first one we encounter
+ if (!this.extensions[type]) {
+ this.extensions[type] = exts[0];
+ }
+ }
+};
+
+/**
+ * Load an Apache2-style ".types" file
+ *
+ * This may be called multiple times (it's expected). Where files declare
+ * overlapping types/extensions, the last file wins.
+ *
+ * @param file (String) path of file to load.
+ */
+Mime.prototype.load = function(file) {
+
+ this._loading = file;
+ // Read file and split into lines
+ var map = {},
+ content = fs.readFileSync(file, 'ascii'),
+ lines = content.split(/[\r\n]+/);
+
+ lines.forEach(function(line) {
+ // Clean up whitespace/comments, and split into fields
+ var fields = line.replace(/\s*#.*|^\s*|\s*$/g, '').split(/\s+/);
+ map[fields.shift()] = fields;
+ });
+
+ this.define(map);
+
+ this._loading = null;
+};
+
+/**
+ * Lookup a mime type based on extension
+ */
+Mime.prototype.lookup = function(path, fallback) {
+ var ext = path.replace(/.*[\.\/]/, '').toLowerCase();
+
+ return this.types[ext] || fallback || this.default_type;
+};
+
+/**
+ * Return file extension associated with a mime type
+ */
+Mime.prototype.extension = function(mimeType) {
+ return this.extensions[mimeType];
+};
+
+// Default instance
+var mime = new Mime();
+
+// Load local copy of
+// http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types
+mime.load(path.join(__dirname, 'types/mime.types'));
+
+// Load additional types from node.js community
+mime.load(path.join(__dirname, 'types/node.types'));
+
+// Default type
+mime.default_type = mime.lookup('bin');
+
+//
+// Additional API specific to the default instance
+//
+
+mime.Mime = Mime;
+
+/**
+ * Lookup a charset based on mime type.
+ */
+mime.charsets = {
+ lookup: function(mimeType, fallback) {
+ // Assume text types are utf8
+ return (/^text\//).test(mimeType) ? 'UTF-8' : fallback;
+ }
+};
+
+module.exports = mime;
diff --git a/node_modules/mime/package.json b/node_modules/mime/package.json
new file mode 100644
index 00000000..6e116b13
--- /dev/null
+++ b/node_modules/mime/package.json
@@ -0,0 +1,22 @@
+{
+ "author": {
+ "name": "Robert Kieffer",
+ "url": "http://github.com/broofa",
+ "email": "robert@broofa.com"
+ },
+ "contributors": [
+ {
+ "name": "Benjamin Thomas",
+ "url": "http://github.com/bentomas",
+ "email": "benjamin@benjaminthomas.org"
+ }
+ ],
+ "dependencies": {},
+ "description": "A comprehensive library for mime-type mapping",
+ "devDependencies": {},
+ "keywords": ["util", "mime"],
+ "main": "mime.js",
+ "name": "mime",
+ "repository": {"url": "https://github.com/broofa/node-mime", "type": "git"},
+ "version": "1.2.9"
+}
diff --git a/node_modules/mime/test.js b/node_modules/mime/test.js
new file mode 100644
index 00000000..cbad034a
--- /dev/null
+++ b/node_modules/mime/test.js
@@ -0,0 +1,55 @@
+/**
+ * Usage: node test.js
+ */
+
+var mime = require('./mime');
+var assert = require('assert');
+
+function eq(a, b) {
+ console.log('Test: ' + a + ' === ' + b);
+ assert.strictEqual.apply(null, arguments);
+}
+
+console.log(Object.keys(mime.extensions).length + ' types');
+console.log(Object.keys(mime.types).length + ' extensions\n');
+
+//
+// Test mime lookups
+//
+
+eq('text/plain', mime.lookup('text.txt'));
+eq('text/plain', mime.lookup('.text.txt'));
+eq('text/plain', mime.lookup('.txt'));
+eq('text/plain', mime.lookup('txt'));
+eq('application/octet-stream', mime.lookup('text.nope'));
+eq('fallback', mime.lookup('text.fallback', 'fallback'));
+eq('application/octet-stream', mime.lookup('constructor'));
+eq('text/plain', mime.lookup('TEXT.TXT'));
+eq('text/event-stream', mime.lookup('text/event-stream'));
+eq('application/x-web-app-manifest+json', mime.lookup('text.webapp'));
+
+//
+// Test extensions
+//
+
+eq('txt', mime.extension(mime.types.text));
+eq('html', mime.extension(mime.types.htm));
+eq('bin', mime.extension('application/octet-stream'));
+eq(undefined, mime.extension('constructor'));
+
+//
+// Test node types
+//
+
+eq('application/octet-stream', mime.lookup('file.buffer'));
+eq('audio/mp4', mime.lookup('file.m4a'));
+
+//
+// Test charsets
+//
+
+eq('UTF-8', mime.charsets.lookup('text/plain'));
+eq(undefined, mime.charsets.lookup(mime.types.js));
+eq('fallback', mime.charsets.lookup('application/octet-stream', 'fallback'));
+
+console.log('\nOK');
diff --git a/node_modules/mime/types/mime.types b/node_modules/mime/types/mime.types
new file mode 100644
index 00000000..b90b1658
--- /dev/null
+++ b/node_modules/mime/types/mime.types
@@ -0,0 +1,1588 @@
+# This file maps Internet media types to unique file extension(s).
+# Although created for httpd, this file is used by many software systems
+# and has been placed in the public domain for unlimited redisribution.
+#
+# The table below contains both registered and (common) unregistered types.
+# A type that has no unique extension can be ignored -- they are listed
+# here to guide configurations toward known types and to make it easier to
+# identify "new" types. File extensions are also commonly used to indicate
+# content languages and encodings, so choose them carefully.
+#
+# Internet media types should be registered as described in RFC 4288.
+# The registry is at .
+#
+# MIME type (lowercased) Extensions
+# ============================================ ==========
+# application/1d-interleaved-parityfec
+# application/3gpp-ims+xml
+# application/activemessage
+application/andrew-inset ez
+# application/applefile
+application/applixware aw
+application/atom+xml atom
+application/atomcat+xml atomcat
+# application/atomicmail
+application/atomsvc+xml atomsvc
+# application/auth-policy+xml
+# application/batch-smtp
+# application/beep+xml
+# application/calendar+xml
+# application/cals-1840
+# application/ccmp+xml
+application/ccxml+xml ccxml
+application/cdmi-capability cdmia
+application/cdmi-container cdmic
+application/cdmi-domain cdmid
+application/cdmi-object cdmio
+application/cdmi-queue cdmiq
+# application/cea-2018+xml
+# application/cellml+xml
+# application/cfw
+# application/cnrp+xml
+# application/commonground
+# application/conference-info+xml
+# application/cpl+xml
+# application/csta+xml
+# application/cstadata+xml
+application/cu-seeme cu
+# application/cybercash
+application/davmount+xml davmount
+# application/dca-rft
+# application/dec-dx
+# application/dialog-info+xml
+# application/dicom
+# application/dns
+application/docbook+xml dbk
+# application/dskpp+xml
+application/dssc+der dssc
+application/dssc+xml xdssc
+# application/dvcs
+application/ecmascript ecma
+# application/edi-consent
+# application/edi-x12
+# application/edifact
+application/emma+xml emma
+# application/epp+xml
+application/epub+zip epub
+# application/eshop
+# application/example
+application/exi exi
+# application/fastinfoset
+# application/fastsoap
+# application/fits
+application/font-tdpfr pfr
+# application/framework-attributes+xml
+application/gml+xml gml
+application/gpx+xml gpx
+application/gxf gxf
+# application/h224
+# application/held+xml
+# application/http
+application/hyperstudio stk
+# application/ibe-key-request+xml
+# application/ibe-pkg-reply+xml
+# application/ibe-pp-data
+# application/iges
+# application/im-iscomposing+xml
+# application/index
+# application/index.cmd
+# application/index.obj
+# application/index.response
+# application/index.vnd
+application/inkml+xml ink inkml
+# application/iotp
+application/ipfix ipfix
+# application/ipp
+# application/isup
+application/java-archive jar
+application/java-serialized-object ser
+application/java-vm class
+application/javascript js
+application/json json
+application/jsonml+json jsonml
+# application/kpml-request+xml
+# application/kpml-response+xml
+application/lost+xml lostxml
+application/mac-binhex40 hqx
+application/mac-compactpro cpt
+# application/macwriteii
+application/mads+xml mads
+application/marc mrc
+application/marcxml+xml mrcx
+application/mathematica ma nb mb
+# application/mathml-content+xml
+# application/mathml-presentation+xml
+application/mathml+xml mathml
+# application/mbms-associated-procedure-description+xml
+# application/mbms-deregister+xml
+# application/mbms-envelope+xml
+# application/mbms-msk+xml
+# application/mbms-msk-response+xml
+# application/mbms-protection-description+xml
+# application/mbms-reception-report+xml
+# application/mbms-register+xml
+# application/mbms-register-response+xml
+# application/mbms-user-service-description+xml
+application/mbox mbox
+# application/media_control+xml
+application/mediaservercontrol+xml mscml
+application/metalink+xml metalink
+application/metalink4+xml meta4
+application/mets+xml mets
+# application/mikey
+application/mods+xml mods
+# application/moss-keys
+# application/moss-signature
+# application/mosskey-data
+# application/mosskey-request
+application/mp21 m21 mp21
+application/mp4 mp4s
+# application/mpeg4-generic
+# application/mpeg4-iod
+# application/mpeg4-iod-xmt
+# application/msc-ivr+xml
+# application/msc-mixer+xml
+application/msword doc dot
+application/mxf mxf
+# application/nasdata
+# application/news-checkgroups
+# application/news-groupinfo
+# application/news-transmission
+# application/nss
+# application/ocsp-request
+# application/ocsp-response
+application/octet-stream bin dms lrf mar so dist distz pkg bpk dump elc deploy
+application/oda oda
+application/oebps-package+xml opf
+application/ogg ogx
+application/omdoc+xml omdoc
+application/onenote onetoc onetoc2 onetmp onepkg
+application/oxps oxps
+# application/parityfec
+application/patch-ops-error+xml xer
+application/pdf pdf
+application/pgp-encrypted pgp
+# application/pgp-keys
+application/pgp-signature asc sig
+application/pics-rules prf
+# application/pidf+xml
+# application/pidf-diff+xml
+application/pkcs10 p10
+application/pkcs7-mime p7m p7c
+application/pkcs7-signature p7s
+application/pkcs8 p8
+application/pkix-attr-cert ac
+application/pkix-cert cer
+application/pkix-crl crl
+application/pkix-pkipath pkipath
+application/pkixcmp pki
+application/pls+xml pls
+# application/poc-settings+xml
+application/postscript ai eps ps
+# application/prs.alvestrand.titrax-sheet
+application/prs.cww cww
+# application/prs.nprend
+# application/prs.plucker
+# application/prs.rdf-xml-crypt
+# application/prs.xsf+xml
+application/pskc+xml pskcxml
+# application/qsig
+application/rdf+xml rdf
+application/reginfo+xml rif
+application/relax-ng-compact-syntax rnc
+# application/remote-printing
+application/resource-lists+xml rl
+application/resource-lists-diff+xml rld
+# application/riscos
+# application/rlmi+xml
+application/rls-services+xml rs
+application/rpki-ghostbusters gbr
+application/rpki-manifest mft
+application/rpki-roa roa
+# application/rpki-updown
+application/rsd+xml rsd
+application/rss+xml rss
+application/rtf rtf
+# application/rtx
+# application/samlassertion+xml
+# application/samlmetadata+xml
+application/sbml+xml sbml
+application/scvp-cv-request scq
+application/scvp-cv-response scs
+application/scvp-vp-request spq
+application/scvp-vp-response spp
+application/sdp sdp
+# application/set-payment
+application/set-payment-initiation setpay
+# application/set-registration
+application/set-registration-initiation setreg
+# application/sgml
+# application/sgml-open-catalog
+application/shf+xml shf
+# application/sieve
+# application/simple-filter+xml
+# application/simple-message-summary
+# application/simplesymbolcontainer
+# application/slate
+# application/smil
+application/smil+xml smi smil
+# application/soap+fastinfoset
+# application/soap+xml
+application/sparql-query rq
+application/sparql-results+xml srx
+# application/spirits-event+xml
+application/srgs gram
+application/srgs+xml grxml
+application/sru+xml sru
+application/ssdl+xml ssdl
+application/ssml+xml ssml
+# application/tamp-apex-update
+# application/tamp-apex-update-confirm
+# application/tamp-community-update
+# application/tamp-community-update-confirm
+# application/tamp-error
+# application/tamp-sequence-adjust
+# application/tamp-sequence-adjust-confirm
+# application/tamp-status-query
+# application/tamp-status-response
+# application/tamp-update
+# application/tamp-update-confirm
+application/tei+xml tei teicorpus
+application/thraud+xml tfi
+# application/timestamp-query
+# application/timestamp-reply
+application/timestamped-data tsd
+# application/tve-trigger
+# application/ulpfec
+# application/vcard+xml
+# application/vemmi
+# application/vividence.scriptfile
+# application/vnd.3gpp.bsf+xml
+application/vnd.3gpp.pic-bw-large plb
+application/vnd.3gpp.pic-bw-small psb
+application/vnd.3gpp.pic-bw-var pvb
+# application/vnd.3gpp.sms
+# application/vnd.3gpp2.bcmcsinfo+xml
+# application/vnd.3gpp2.sms
+application/vnd.3gpp2.tcap tcap
+application/vnd.3m.post-it-notes pwn
+application/vnd.accpac.simply.aso aso
+application/vnd.accpac.simply.imp imp
+application/vnd.acucobol acu
+application/vnd.acucorp atc acutc
+application/vnd.adobe.air-application-installer-package+zip air
+application/vnd.adobe.formscentral.fcdt fcdt
+application/vnd.adobe.fxp fxp fxpl
+# application/vnd.adobe.partial-upload
+application/vnd.adobe.xdp+xml xdp
+application/vnd.adobe.xfdf xfdf
+# application/vnd.aether.imp
+# application/vnd.ah-barcode
+application/vnd.ahead.space ahead
+application/vnd.airzip.filesecure.azf azf
+application/vnd.airzip.filesecure.azs azs
+application/vnd.amazon.ebook azw
+application/vnd.americandynamics.acc acc
+application/vnd.amiga.ami ami
+# application/vnd.amundsen.maze+xml
+application/vnd.android.package-archive apk
+application/vnd.anser-web-certificate-issue-initiation cii
+application/vnd.anser-web-funds-transfer-initiation fti
+application/vnd.antix.game-component atx
+application/vnd.apple.installer+xml mpkg
+application/vnd.apple.mpegurl m3u8
+# application/vnd.arastra.swi
+application/vnd.aristanetworks.swi swi
+application/vnd.astraea-software.iota iota
+application/vnd.audiograph aep
+# application/vnd.autopackage
+# application/vnd.avistar+xml
+application/vnd.blueice.multipass mpm
+# application/vnd.bluetooth.ep.oob
+application/vnd.bmi bmi
+application/vnd.businessobjects rep
+# application/vnd.cab-jscript
+# application/vnd.canon-cpdl
+# application/vnd.canon-lips
+# application/vnd.cendio.thinlinc.clientconf
+application/vnd.chemdraw+xml cdxml
+application/vnd.chipnuts.karaoke-mmd mmd
+application/vnd.cinderella cdy
+# application/vnd.cirpack.isdn-ext
+application/vnd.claymore cla
+application/vnd.cloanto.rp9 rp9
+application/vnd.clonk.c4group c4g c4d c4f c4p c4u
+application/vnd.cluetrust.cartomobile-config c11amc
+application/vnd.cluetrust.cartomobile-config-pkg c11amz
+# application/vnd.collection+json
+# application/vnd.commerce-battelle
+application/vnd.commonspace csp
+application/vnd.contact.cmsg cdbcmsg
+application/vnd.cosmocaller cmc
+application/vnd.crick.clicker clkx
+application/vnd.crick.clicker.keyboard clkk
+application/vnd.crick.clicker.palette clkp
+application/vnd.crick.clicker.template clkt
+application/vnd.crick.clicker.wordbank clkw
+application/vnd.criticaltools.wbs+xml wbs
+application/vnd.ctc-posml pml
+# application/vnd.ctct.ws+xml
+# application/vnd.cups-pdf
+# application/vnd.cups-postscript
+application/vnd.cups-ppd ppd
+# application/vnd.cups-raster
+# application/vnd.cups-raw
+# application/vnd.curl
+application/vnd.curl.car car
+application/vnd.curl.pcurl pcurl
+# application/vnd.cybank
+application/vnd.dart dart
+application/vnd.data-vision.rdz rdz
+application/vnd.dece.data uvf uvvf uvd uvvd
+application/vnd.dece.ttml+xml uvt uvvt
+application/vnd.dece.unspecified uvx uvvx
+application/vnd.dece.zip uvz uvvz
+application/vnd.denovo.fcselayout-link fe_launch
+# application/vnd.dir-bi.plate-dl-nosuffix
+application/vnd.dna dna
+application/vnd.dolby.mlp mlp
+# application/vnd.dolby.mobile.1
+# application/vnd.dolby.mobile.2
+application/vnd.dpgraph dpg
+application/vnd.dreamfactory dfac
+application/vnd.ds-keypoint kpxx
+application/vnd.dvb.ait ait
+# application/vnd.dvb.dvbj
+# application/vnd.dvb.esgcontainer
+# application/vnd.dvb.ipdcdftnotifaccess
+# application/vnd.dvb.ipdcesgaccess
+# application/vnd.dvb.ipdcesgaccess2
+# application/vnd.dvb.ipdcesgpdd
+# application/vnd.dvb.ipdcroaming
+# application/vnd.dvb.iptv.alfec-base
+# application/vnd.dvb.iptv.alfec-enhancement
+# application/vnd.dvb.notif-aggregate-root+xml
+# application/vnd.dvb.notif-container+xml
+# application/vnd.dvb.notif-generic+xml
+# application/vnd.dvb.notif-ia-msglist+xml
+# application/vnd.dvb.notif-ia-registration-request+xml
+# application/vnd.dvb.notif-ia-registration-response+xml
+# application/vnd.dvb.notif-init+xml
+# application/vnd.dvb.pfr
+application/vnd.dvb.service svc
+# application/vnd.dxr
+application/vnd.dynageo geo
+# application/vnd.easykaraoke.cdgdownload
+# application/vnd.ecdis-update
+application/vnd.ecowin.chart mag
+# application/vnd.ecowin.filerequest
+# application/vnd.ecowin.fileupdate
+# application/vnd.ecowin.series
+# application/vnd.ecowin.seriesrequest
+# application/vnd.ecowin.seriesupdate
+# application/vnd.emclient.accessrequest+xml
+application/vnd.enliven nml
+# application/vnd.eprints.data+xml
+application/vnd.epson.esf esf
+application/vnd.epson.msf msf
+application/vnd.epson.quickanime qam
+application/vnd.epson.salt slt
+application/vnd.epson.ssf ssf
+# application/vnd.ericsson.quickcall
+application/vnd.eszigno3+xml es3 et3
+# application/vnd.etsi.aoc+xml
+# application/vnd.etsi.cug+xml
+# application/vnd.etsi.iptvcommand+xml
+# application/vnd.etsi.iptvdiscovery+xml
+# application/vnd.etsi.iptvprofile+xml
+# application/vnd.etsi.iptvsad-bc+xml
+# application/vnd.etsi.iptvsad-cod+xml
+# application/vnd.etsi.iptvsad-npvr+xml
+# application/vnd.etsi.iptvservice+xml
+# application/vnd.etsi.iptvsync+xml
+# application/vnd.etsi.iptvueprofile+xml
+# application/vnd.etsi.mcid+xml
+# application/vnd.etsi.overload-control-policy-dataset+xml
+# application/vnd.etsi.sci+xml
+# application/vnd.etsi.simservs+xml
+# application/vnd.etsi.tsl+xml
+# application/vnd.etsi.tsl.der
+# application/vnd.eudora.data
+application/vnd.ezpix-album ez2
+application/vnd.ezpix-package ez3
+# application/vnd.f-secure.mobile
+application/vnd.fdf fdf
+application/vnd.fdsn.mseed mseed
+application/vnd.fdsn.seed seed dataless
+# application/vnd.ffsns
+# application/vnd.fints
+application/vnd.flographit gph
+application/vnd.fluxtime.clip ftc
+# application/vnd.font-fontforge-sfd
+application/vnd.framemaker fm frame maker book
+application/vnd.frogans.fnc fnc
+application/vnd.frogans.ltf ltf
+application/vnd.fsc.weblaunch fsc
+application/vnd.fujitsu.oasys oas
+application/vnd.fujitsu.oasys2 oa2
+application/vnd.fujitsu.oasys3 oa3
+application/vnd.fujitsu.oasysgp fg5
+application/vnd.fujitsu.oasysprs bh2
+# application/vnd.fujixerox.art-ex
+# application/vnd.fujixerox.art4
+# application/vnd.fujixerox.hbpl
+application/vnd.fujixerox.ddd ddd
+application/vnd.fujixerox.docuworks xdw
+application/vnd.fujixerox.docuworks.binder xbd
+# application/vnd.fut-misnet
+application/vnd.fuzzysheet fzs
+application/vnd.genomatix.tuxedo txd
+# application/vnd.geocube+xml
+application/vnd.geogebra.file ggb
+application/vnd.geogebra.tool ggt
+application/vnd.geometry-explorer gex gre
+application/vnd.geonext gxt
+application/vnd.geoplan g2w
+application/vnd.geospace g3w
+# application/vnd.globalplatform.card-content-mgt
+# application/vnd.globalplatform.card-content-mgt-response
+application/vnd.gmx gmx
+application/vnd.google-earth.kml+xml kml
+application/vnd.google-earth.kmz kmz
+application/vnd.grafeq gqf gqs
+# application/vnd.gridmp
+application/vnd.groove-account gac
+application/vnd.groove-help ghf
+application/vnd.groove-identity-message gim
+application/vnd.groove-injector grv
+application/vnd.groove-tool-message gtm
+application/vnd.groove-tool-template tpl
+application/vnd.groove-vcard vcg
+# application/vnd.hal+json
+application/vnd.hal+xml hal
+application/vnd.handheld-entertainment+xml zmm
+application/vnd.hbci hbci
+# application/vnd.hcl-bireports
+application/vnd.hhe.lesson-player les
+application/vnd.hp-hpgl hpgl
+application/vnd.hp-hpid hpid
+application/vnd.hp-hps hps
+application/vnd.hp-jlyt jlt
+application/vnd.hp-pcl pcl
+application/vnd.hp-pclxl pclxl
+# application/vnd.httphone
+application/vnd.hydrostatix.sof-data sfd-hdstx
+# application/vnd.hzn-3d-crossword
+# application/vnd.ibm.afplinedata
+# application/vnd.ibm.electronic-media
+application/vnd.ibm.minipay mpy
+application/vnd.ibm.modcap afp listafp list3820
+application/vnd.ibm.rights-management irm
+application/vnd.ibm.secure-container sc
+application/vnd.iccprofile icc icm
+application/vnd.igloader igl
+application/vnd.immervision-ivp ivp
+application/vnd.immervision-ivu ivu
+# application/vnd.informedcontrol.rms+xml
+# application/vnd.informix-visionary
+# application/vnd.infotech.project
+# application/vnd.infotech.project+xml
+# application/vnd.innopath.wamp.notification
+application/vnd.insors.igm igm
+application/vnd.intercon.formnet xpw xpx
+application/vnd.intergeo i2g
+# application/vnd.intertrust.digibox
+# application/vnd.intertrust.nncp
+application/vnd.intu.qbo qbo
+application/vnd.intu.qfx qfx
+# application/vnd.iptc.g2.conceptitem+xml
+# application/vnd.iptc.g2.knowledgeitem+xml
+# application/vnd.iptc.g2.newsitem+xml
+# application/vnd.iptc.g2.newsmessage+xml
+# application/vnd.iptc.g2.packageitem+xml
+# application/vnd.iptc.g2.planningitem+xml
+application/vnd.ipunplugged.rcprofile rcprofile
+application/vnd.irepository.package+xml irp
+application/vnd.is-xpr xpr
+application/vnd.isac.fcs fcs
+application/vnd.jam jam
+# application/vnd.japannet-directory-service
+# application/vnd.japannet-jpnstore-wakeup
+# application/vnd.japannet-payment-wakeup
+# application/vnd.japannet-registration
+# application/vnd.japannet-registration-wakeup
+# application/vnd.japannet-setstore-wakeup
+# application/vnd.japannet-verification
+# application/vnd.japannet-verification-wakeup
+application/vnd.jcp.javame.midlet-rms rms
+application/vnd.jisp jisp
+application/vnd.joost.joda-archive joda
+application/vnd.kahootz ktz ktr
+application/vnd.kde.karbon karbon
+application/vnd.kde.kchart chrt
+application/vnd.kde.kformula kfo
+application/vnd.kde.kivio flw
+application/vnd.kde.kontour kon
+application/vnd.kde.kpresenter kpr kpt
+application/vnd.kde.kspread ksp
+application/vnd.kde.kword kwd kwt
+application/vnd.kenameaapp htke
+application/vnd.kidspiration kia
+application/vnd.kinar kne knp
+application/vnd.koan skp skd skt skm
+application/vnd.kodak-descriptor sse
+application/vnd.las.las+xml lasxml
+# application/vnd.liberty-request+xml
+application/vnd.llamagraphics.life-balance.desktop lbd
+application/vnd.llamagraphics.life-balance.exchange+xml lbe
+application/vnd.lotus-1-2-3 123
+application/vnd.lotus-approach apr
+application/vnd.lotus-freelance pre
+application/vnd.lotus-notes nsf
+application/vnd.lotus-organizer org
+application/vnd.lotus-screencam scm
+application/vnd.lotus-wordpro lwp
+application/vnd.macports.portpkg portpkg
+# application/vnd.marlin.drm.actiontoken+xml
+# application/vnd.marlin.drm.conftoken+xml
+# application/vnd.marlin.drm.license+xml
+# application/vnd.marlin.drm.mdcf
+application/vnd.mcd mcd
+application/vnd.medcalcdata mc1
+application/vnd.mediastation.cdkey cdkey
+# application/vnd.meridian-slingshot
+application/vnd.mfer mwf
+application/vnd.mfmp mfm
+application/vnd.micrografx.flo flo
+application/vnd.micrografx.igx igx
+application/vnd.mif mif
+# application/vnd.minisoft-hp3000-save
+# application/vnd.mitsubishi.misty-guard.trustweb
+application/vnd.mobius.daf daf
+application/vnd.mobius.dis dis
+application/vnd.mobius.mbk mbk
+application/vnd.mobius.mqy mqy
+application/vnd.mobius.msl msl
+application/vnd.mobius.plc plc
+application/vnd.mobius.txf txf
+application/vnd.mophun.application mpn
+application/vnd.mophun.certificate mpc
+# application/vnd.motorola.flexsuite
+# application/vnd.motorola.flexsuite.adsi
+# application/vnd.motorola.flexsuite.fis
+# application/vnd.motorola.flexsuite.gotap
+# application/vnd.motorola.flexsuite.kmr
+# application/vnd.motorola.flexsuite.ttc
+# application/vnd.motorola.flexsuite.wem
+# application/vnd.motorola.iprm
+application/vnd.mozilla.xul+xml xul
+application/vnd.ms-artgalry cil
+# application/vnd.ms-asf
+application/vnd.ms-cab-compressed cab
+# application/vnd.ms-color.iccprofile
+application/vnd.ms-excel xls xlm xla xlc xlt xlw
+application/vnd.ms-excel.addin.macroenabled.12 xlam
+application/vnd.ms-excel.sheet.binary.macroenabled.12 xlsb
+application/vnd.ms-excel.sheet.macroenabled.12 xlsm
+application/vnd.ms-excel.template.macroenabled.12 xltm
+application/vnd.ms-fontobject eot
+application/vnd.ms-htmlhelp chm
+application/vnd.ms-ims ims
+application/vnd.ms-lrm lrm
+# application/vnd.ms-office.activex+xml
+application/vnd.ms-officetheme thmx
+# application/vnd.ms-opentype
+# application/vnd.ms-package.obfuscated-opentype
+application/vnd.ms-pki.seccat cat
+application/vnd.ms-pki.stl stl
+# application/vnd.ms-playready.initiator+xml
+application/vnd.ms-powerpoint ppt pps pot
+application/vnd.ms-powerpoint.addin.macroenabled.12 ppam
+application/vnd.ms-powerpoint.presentation.macroenabled.12 pptm
+application/vnd.ms-powerpoint.slide.macroenabled.12 sldm
+application/vnd.ms-powerpoint.slideshow.macroenabled.12 ppsm
+application/vnd.ms-powerpoint.template.macroenabled.12 potm
+# application/vnd.ms-printing.printticket+xml
+application/vnd.ms-project mpp mpt
+# application/vnd.ms-tnef
+# application/vnd.ms-wmdrm.lic-chlg-req
+# application/vnd.ms-wmdrm.lic-resp
+# application/vnd.ms-wmdrm.meter-chlg-req
+# application/vnd.ms-wmdrm.meter-resp
+application/vnd.ms-word.document.macroenabled.12 docm
+application/vnd.ms-word.template.macroenabled.12 dotm
+application/vnd.ms-works wps wks wcm wdb
+application/vnd.ms-wpl wpl
+application/vnd.ms-xpsdocument xps
+application/vnd.mseq mseq
+# application/vnd.msign
+# application/vnd.multiad.creator
+# application/vnd.multiad.creator.cif
+# application/vnd.music-niff
+application/vnd.musician mus
+application/vnd.muvee.style msty
+application/vnd.mynfc taglet
+# application/vnd.ncd.control
+# application/vnd.ncd.reference
+# application/vnd.nervana
+# application/vnd.netfpx
+application/vnd.neurolanguage.nlu nlu
+application/vnd.nitf ntf nitf
+application/vnd.noblenet-directory nnd
+application/vnd.noblenet-sealer nns
+application/vnd.noblenet-web nnw
+# application/vnd.nokia.catalogs
+# application/vnd.nokia.conml+wbxml
+# application/vnd.nokia.conml+xml
+# application/vnd.nokia.isds-radio-presets
+# application/vnd.nokia.iptv.config+xml
+# application/vnd.nokia.landmark+wbxml
+# application/vnd.nokia.landmark+xml
+# application/vnd.nokia.landmarkcollection+xml
+# application/vnd.nokia.n-gage.ac+xml
+application/vnd.nokia.n-gage.data ngdat
+application/vnd.nokia.n-gage.symbian.install n-gage
+# application/vnd.nokia.ncd
+# application/vnd.nokia.pcd+wbxml
+# application/vnd.nokia.pcd+xml
+application/vnd.nokia.radio-preset rpst
+application/vnd.nokia.radio-presets rpss
+application/vnd.novadigm.edm edm
+application/vnd.novadigm.edx edx
+application/vnd.novadigm.ext ext
+# application/vnd.ntt-local.file-transfer
+# application/vnd.ntt-local.sip-ta_remote
+# application/vnd.ntt-local.sip-ta_tcp_stream
+application/vnd.oasis.opendocument.chart odc
+application/vnd.oasis.opendocument.chart-template otc
+application/vnd.oasis.opendocument.database odb
+application/vnd.oasis.opendocument.formula odf
+application/vnd.oasis.opendocument.formula-template odft
+application/vnd.oasis.opendocument.graphics odg
+application/vnd.oasis.opendocument.graphics-template otg
+application/vnd.oasis.opendocument.image odi
+application/vnd.oasis.opendocument.image-template oti
+application/vnd.oasis.opendocument.presentation odp
+application/vnd.oasis.opendocument.presentation-template otp
+application/vnd.oasis.opendocument.spreadsheet ods
+application/vnd.oasis.opendocument.spreadsheet-template ots
+application/vnd.oasis.opendocument.text odt
+application/vnd.oasis.opendocument.text-master odm
+application/vnd.oasis.opendocument.text-template ott
+application/vnd.oasis.opendocument.text-web oth
+# application/vnd.obn
+# application/vnd.oftn.l10n+json
+# application/vnd.oipf.contentaccessdownload+xml
+# application/vnd.oipf.contentaccessstreaming+xml
+# application/vnd.oipf.cspg-hexbinary
+# application/vnd.oipf.dae.svg+xml
+# application/vnd.oipf.dae.xhtml+xml
+# application/vnd.oipf.mippvcontrolmessage+xml
+# application/vnd.oipf.pae.gem
+# application/vnd.oipf.spdiscovery+xml
+# application/vnd.oipf.spdlist+xml
+# application/vnd.oipf.ueprofile+xml
+# application/vnd.oipf.userprofile+xml
+application/vnd.olpc-sugar xo
+# application/vnd.oma-scws-config
+# application/vnd.oma-scws-http-request
+# application/vnd.oma-scws-http-response
+# application/vnd.oma.bcast.associated-procedure-parameter+xml
+# application/vnd.oma.bcast.drm-trigger+xml
+# application/vnd.oma.bcast.imd+xml
+# application/vnd.oma.bcast.ltkm
+# application/vnd.oma.bcast.notification+xml
+# application/vnd.oma.bcast.provisioningtrigger
+# application/vnd.oma.bcast.sgboot
+# application/vnd.oma.bcast.sgdd+xml
+# application/vnd.oma.bcast.sgdu
+# application/vnd.oma.bcast.simple-symbol-container
+# application/vnd.oma.bcast.smartcard-trigger+xml
+# application/vnd.oma.bcast.sprov+xml
+# application/vnd.oma.bcast.stkm
+# application/vnd.oma.cab-address-book+xml
+# application/vnd.oma.cab-feature-handler+xml
+# application/vnd.oma.cab-pcc+xml
+# application/vnd.oma.cab-user-prefs+xml
+# application/vnd.oma.dcd
+# application/vnd.oma.dcdc
+application/vnd.oma.dd2+xml dd2
+# application/vnd.oma.drm.risd+xml
+# application/vnd.oma.group-usage-list+xml
+# application/vnd.oma.pal+xml
+# application/vnd.oma.poc.detailed-progress-report+xml
+# application/vnd.oma.poc.final-report+xml
+# application/vnd.oma.poc.groups+xml
+# application/vnd.oma.poc.invocation-descriptor+xml
+# application/vnd.oma.poc.optimized-progress-report+xml
+# application/vnd.oma.push
+# application/vnd.oma.scidm.messages+xml
+# application/vnd.oma.xcap-directory+xml
+# application/vnd.omads-email+xml
+# application/vnd.omads-file+xml
+# application/vnd.omads-folder+xml
+# application/vnd.omaloc-supl-init
+application/vnd.openofficeorg.extension oxt
+# application/vnd.openxmlformats-officedocument.custom-properties+xml
+# application/vnd.openxmlformats-officedocument.customxmlproperties+xml
+# application/vnd.openxmlformats-officedocument.drawing+xml
+# application/vnd.openxmlformats-officedocument.drawingml.chart+xml
+# application/vnd.openxmlformats-officedocument.drawingml.chartshapes+xml
+# application/vnd.openxmlformats-officedocument.drawingml.diagramcolors+xml
+# application/vnd.openxmlformats-officedocument.drawingml.diagramdata+xml
+# application/vnd.openxmlformats-officedocument.drawingml.diagramlayout+xml
+# application/vnd.openxmlformats-officedocument.drawingml.diagramstyle+xml
+# application/vnd.openxmlformats-officedocument.extended-properties+xml
+# application/vnd.openxmlformats-officedocument.presentationml.commentauthors+xml
+# application/vnd.openxmlformats-officedocument.presentationml.comments+xml
+# application/vnd.openxmlformats-officedocument.presentationml.handoutmaster+xml
+# application/vnd.openxmlformats-officedocument.presentationml.notesmaster+xml
+# application/vnd.openxmlformats-officedocument.presentationml.notesslide+xml
+application/vnd.openxmlformats-officedocument.presentationml.presentation pptx
+# application/vnd.openxmlformats-officedocument.presentationml.presentation.main+xml
+# application/vnd.openxmlformats-officedocument.presentationml.presprops+xml
+application/vnd.openxmlformats-officedocument.presentationml.slide sldx
+# application/vnd.openxmlformats-officedocument.presentationml.slide+xml
+# application/vnd.openxmlformats-officedocument.presentationml.slidelayout+xml
+# application/vnd.openxmlformats-officedocument.presentationml.slidemaster+xml
+application/vnd.openxmlformats-officedocument.presentationml.slideshow ppsx
+# application/vnd.openxmlformats-officedocument.presentationml.slideshow.main+xml
+# application/vnd.openxmlformats-officedocument.presentationml.slideupdateinfo+xml
+# application/vnd.openxmlformats-officedocument.presentationml.tablestyles+xml
+# application/vnd.openxmlformats-officedocument.presentationml.tags+xml
+application/vnd.openxmlformats-officedocument.presentationml.template potx
+# application/vnd.openxmlformats-officedocument.presentationml.template.main+xml
+# application/vnd.openxmlformats-officedocument.presentationml.viewprops+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.calcchain+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.chartsheet+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.comments+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.connections+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.dialogsheet+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.externallink+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcachedefinition+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcacherecords+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.pivottable+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.querytable+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.revisionheaders+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.revisionlog+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.sharedstrings+xml
+application/vnd.openxmlformats-officedocument.spreadsheetml.sheet xlsx
+# application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.sheetmetadata+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.styles+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.table+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.tablesinglecells+xml
+application/vnd.openxmlformats-officedocument.spreadsheetml.template xltx
+# application/vnd.openxmlformats-officedocument.spreadsheetml.template.main+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.usernames+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.volatiledependencies+xml
+# application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml
+# application/vnd.openxmlformats-officedocument.theme+xml
+# application/vnd.openxmlformats-officedocument.themeoverride+xml
+# application/vnd.openxmlformats-officedocument.vmldrawing
+# application/vnd.openxmlformats-officedocument.wordprocessingml.comments+xml
+application/vnd.openxmlformats-officedocument.wordprocessingml.document docx
+# application/vnd.openxmlformats-officedocument.wordprocessingml.document.glossary+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.endnotes+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.fonttable+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.footer+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.footnotes+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.numbering+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.settings+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.styles+xml
+application/vnd.openxmlformats-officedocument.wordprocessingml.template dotx
+# application/vnd.openxmlformats-officedocument.wordprocessingml.template.main+xml
+# application/vnd.openxmlformats-officedocument.wordprocessingml.websettings+xml
+# application/vnd.openxmlformats-package.core-properties+xml
+# application/vnd.openxmlformats-package.digital-signature-xmlsignature+xml
+# application/vnd.openxmlformats-package.relationships+xml
+# application/vnd.quobject-quoxdocument
+# application/vnd.osa.netdeploy
+application/vnd.osgeo.mapguide.package mgp
+# application/vnd.osgi.bundle
+application/vnd.osgi.dp dp
+application/vnd.osgi.subsystem esa
+# application/vnd.otps.ct-kip+xml
+application/vnd.palm pdb pqa oprc
+# application/vnd.paos.xml
+application/vnd.pawaafile paw
+application/vnd.pg.format str
+application/vnd.pg.osasli ei6
+# application/vnd.piaccess.application-licence
+application/vnd.picsel efif
+application/vnd.pmi.widget wg
+# application/vnd.poc.group-advertisement+xml
+application/vnd.pocketlearn plf
+application/vnd.powerbuilder6 pbd
+# application/vnd.powerbuilder6-s
+# application/vnd.powerbuilder7
+# application/vnd.powerbuilder7-s
+# application/vnd.powerbuilder75
+# application/vnd.powerbuilder75-s
+# application/vnd.preminet
+application/vnd.previewsystems.box box
+application/vnd.proteus.magazine mgz
+application/vnd.publishare-delta-tree qps
+application/vnd.pvi.ptid1 ptid
+# application/vnd.pwg-multiplexed
+# application/vnd.pwg-xhtml-print+xml
+# application/vnd.qualcomm.brew-app-res
+application/vnd.quark.quarkxpress qxd qxt qwd qwt qxl qxb
+# application/vnd.radisys.moml+xml
+# application/vnd.radisys.msml+xml
+# application/vnd.radisys.msml-audit+xml
+# application/vnd.radisys.msml-audit-conf+xml
+# application/vnd.radisys.msml-audit-conn+xml
+# application/vnd.radisys.msml-audit-dialog+xml
+# application/vnd.radisys.msml-audit-stream+xml
+# application/vnd.radisys.msml-conf+xml
+# application/vnd.radisys.msml-dialog+xml
+# application/vnd.radisys.msml-dialog-base+xml
+# application/vnd.radisys.msml-dialog-fax-detect+xml
+# application/vnd.radisys.msml-dialog-fax-sendrecv+xml
+# application/vnd.radisys.msml-dialog-group+xml
+# application/vnd.radisys.msml-dialog-speech+xml
+# application/vnd.radisys.msml-dialog-transform+xml
+# application/vnd.rainstor.data
+# application/vnd.rapid
+application/vnd.realvnc.bed bed
+application/vnd.recordare.musicxml mxl
+application/vnd.recordare.musicxml+xml musicxml
+# application/vnd.renlearn.rlprint
+application/vnd.rig.cryptonote cryptonote
+application/vnd.rim.cod cod
+application/vnd.rn-realmedia rm
+application/vnd.rn-realmedia-vbr rmvb
+application/vnd.route66.link66+xml link66
+# application/vnd.rs-274x
+# application/vnd.ruckus.download
+# application/vnd.s3sms
+application/vnd.sailingtracker.track st
+# application/vnd.sbm.cid
+# application/vnd.sbm.mid2
+# application/vnd.scribus
+# application/vnd.sealed.3df
+# application/vnd.sealed.csf
+# application/vnd.sealed.doc
+# application/vnd.sealed.eml
+# application/vnd.sealed.mht
+# application/vnd.sealed.net
+# application/vnd.sealed.ppt
+# application/vnd.sealed.tiff
+# application/vnd.sealed.xls
+# application/vnd.sealedmedia.softseal.html
+# application/vnd.sealedmedia.softseal.pdf
+application/vnd.seemail see
+application/vnd.sema sema
+application/vnd.semd semd
+application/vnd.semf semf
+application/vnd.shana.informed.formdata ifm
+application/vnd.shana.informed.formtemplate itp
+application/vnd.shana.informed.interchange iif
+application/vnd.shana.informed.package ipk
+application/vnd.simtech-mindmapper twd twds
+application/vnd.smaf mmf
+# application/vnd.smart.notebook
+application/vnd.smart.teacher teacher
+# application/vnd.software602.filler.form+xml
+# application/vnd.software602.filler.form-xml-zip
+application/vnd.solent.sdkm+xml sdkm sdkd
+application/vnd.spotfire.dxp dxp
+application/vnd.spotfire.sfs sfs
+# application/vnd.sss-cod
+# application/vnd.sss-dtf
+# application/vnd.sss-ntf
+application/vnd.stardivision.calc sdc
+application/vnd.stardivision.draw sda
+application/vnd.stardivision.impress sdd
+application/vnd.stardivision.math smf
+application/vnd.stardivision.writer sdw vor
+application/vnd.stardivision.writer-global sgl
+application/vnd.stepmania.package smzip
+application/vnd.stepmania.stepchart sm
+# application/vnd.street-stream
+application/vnd.sun.xml.calc sxc
+application/vnd.sun.xml.calc.template stc
+application/vnd.sun.xml.draw sxd
+application/vnd.sun.xml.draw.template std
+application/vnd.sun.xml.impress sxi
+application/vnd.sun.xml.impress.template sti
+application/vnd.sun.xml.math sxm
+application/vnd.sun.xml.writer sxw
+application/vnd.sun.xml.writer.global sxg
+application/vnd.sun.xml.writer.template stw
+# application/vnd.sun.wadl+xml
+application/vnd.sus-calendar sus susp
+application/vnd.svd svd
+# application/vnd.swiftview-ics
+application/vnd.symbian.install sis sisx
+application/vnd.syncml+xml xsm
+application/vnd.syncml.dm+wbxml bdm
+application/vnd.syncml.dm+xml xdm
+# application/vnd.syncml.dm.notification
+# application/vnd.syncml.ds.notification
+application/vnd.tao.intent-module-archive tao
+application/vnd.tcpdump.pcap pcap cap dmp
+application/vnd.tmobile-livetv tmo
+application/vnd.trid.tpt tpt
+application/vnd.triscape.mxs mxs
+application/vnd.trueapp tra
+# application/vnd.truedoc
+# application/vnd.ubisoft.webplayer
+application/vnd.ufdl ufd ufdl
+application/vnd.uiq.theme utz
+application/vnd.umajin umj
+application/vnd.unity unityweb
+application/vnd.uoml+xml uoml
+# application/vnd.uplanet.alert
+# application/vnd.uplanet.alert-wbxml
+# application/vnd.uplanet.bearer-choice
+# application/vnd.uplanet.bearer-choice-wbxml
+# application/vnd.uplanet.cacheop
+# application/vnd.uplanet.cacheop-wbxml
+# application/vnd.uplanet.channel
+# application/vnd.uplanet.channel-wbxml
+# application/vnd.uplanet.list
+# application/vnd.uplanet.list-wbxml
+# application/vnd.uplanet.listcmd
+# application/vnd.uplanet.listcmd-wbxml
+# application/vnd.uplanet.signal
+application/vnd.vcx vcx
+# application/vnd.vd-study
+# application/vnd.vectorworks
+# application/vnd.verimatrix.vcas
+# application/vnd.vidsoft.vidconference
+application/vnd.visio vsd vst vss vsw
+application/vnd.visionary vis
+# application/vnd.vividence.scriptfile
+application/vnd.vsf vsf
+# application/vnd.wap.sic
+# application/vnd.wap.slc
+application/vnd.wap.wbxml wbxml
+application/vnd.wap.wmlc wmlc
+application/vnd.wap.wmlscriptc wmlsc
+application/vnd.webturbo wtb
+# application/vnd.wfa.wsc
+# application/vnd.wmc
+# application/vnd.wmf.bootstrap
+# application/vnd.wolfram.mathematica
+# application/vnd.wolfram.mathematica.package
+application/vnd.wolfram.player nbp
+application/vnd.wordperfect wpd
+application/vnd.wqd wqd
+# application/vnd.wrq-hp3000-labelled
+application/vnd.wt.stf stf
+# application/vnd.wv.csp+wbxml
+# application/vnd.wv.csp+xml
+# application/vnd.wv.ssp+xml
+application/vnd.xara xar
+application/vnd.xfdl xfdl
+# application/vnd.xfdl.webform
+# application/vnd.xmi+xml
+# application/vnd.xmpie.cpkg
+# application/vnd.xmpie.dpkg
+# application/vnd.xmpie.plan
+# application/vnd.xmpie.ppkg
+# application/vnd.xmpie.xlim
+application/vnd.yamaha.hv-dic hvd
+application/vnd.yamaha.hv-script hvs
+application/vnd.yamaha.hv-voice hvp
+application/vnd.yamaha.openscoreformat osf
+application/vnd.yamaha.openscoreformat.osfpvg+xml osfpvg
+# application/vnd.yamaha.remote-setup
+application/vnd.yamaha.smaf-audio saf
+application/vnd.yamaha.smaf-phrase spf
+# application/vnd.yamaha.through-ngn
+# application/vnd.yamaha.tunnel-udpencap
+application/vnd.yellowriver-custom-menu cmp
+application/vnd.zul zir zirz
+application/vnd.zzazz.deck+xml zaz
+application/voicexml+xml vxml
+# application/vq-rtcpxr
+# application/watcherinfo+xml
+# application/whoispp-query
+# application/whoispp-response
+application/widget wgt
+application/winhlp hlp
+# application/wita
+# application/wordperfect5.1
+application/wsdl+xml wsdl
+application/wspolicy+xml wspolicy
+application/x-7z-compressed 7z
+application/x-abiword abw
+application/x-ace-compressed ace
+# application/x-amf
+application/x-apple-diskimage dmg
+application/x-authorware-bin aab x32 u32 vox
+application/x-authorware-map aam
+application/x-authorware-seg aas
+application/x-bcpio bcpio
+application/x-bittorrent torrent
+application/x-blorb blb blorb
+application/x-bzip bz
+application/x-bzip2 bz2 boz
+application/x-cbr cbr cba cbt cbz cb7
+application/x-cdlink vcd
+application/x-cfs-compressed cfs
+application/x-chat chat
+application/x-chess-pgn pgn
+application/x-conference nsc
+# application/x-compress
+application/x-cpio cpio
+application/x-csh csh
+application/x-debian-package deb udeb
+application/x-dgc-compressed dgc
+application/x-director dir dcr dxr cst cct cxt w3d fgd swa
+application/x-doom wad
+application/x-dtbncx+xml ncx
+application/x-dtbook+xml dtb
+application/x-dtbresource+xml res
+application/x-dvi dvi
+application/x-envoy evy
+application/x-eva eva
+application/x-font-bdf bdf
+# application/x-font-dos
+# application/x-font-framemaker
+application/x-font-ghostscript gsf
+# application/x-font-libgrx
+application/x-font-linux-psf psf
+application/x-font-otf otf
+application/x-font-pcf pcf
+application/x-font-snf snf
+# application/x-font-speedo
+# application/x-font-sunos-news
+application/x-font-ttf ttf ttc
+application/x-font-type1 pfa pfb pfm afm
+application/x-font-woff woff
+# application/x-font-vfont
+application/x-freearc arc
+application/x-futuresplash spl
+application/x-gca-compressed gca
+application/x-glulx ulx
+application/x-gnumeric gnumeric
+application/x-gramps-xml gramps
+application/x-gtar gtar
+# application/x-gzip
+application/x-hdf hdf
+application/x-install-instructions install
+application/x-iso9660-image iso
+application/x-java-jnlp-file jnlp
+application/x-latex latex
+application/x-lzh-compressed lzh lha
+application/x-mie mie
+application/x-mobipocket-ebook prc mobi
+application/x-ms-application application
+application/x-ms-shortcut lnk
+application/x-ms-wmd wmd
+application/x-ms-wmz wmz
+application/x-ms-xbap xbap
+application/x-msaccess mdb
+application/x-msbinder obd
+application/x-mscardfile crd
+application/x-msclip clp
+application/x-msdownload exe dll com bat msi
+application/x-msmediaview mvb m13 m14
+application/x-msmetafile wmf wmz emf emz
+application/x-msmoney mny
+application/x-mspublisher pub
+application/x-msschedule scd
+application/x-msterminal trm
+application/x-mswrite wri
+application/x-netcdf nc cdf
+application/x-nzb nzb
+application/x-pkcs12 p12 pfx
+application/x-pkcs7-certificates p7b spc
+application/x-pkcs7-certreqresp p7r
+application/x-rar-compressed rar
+application/x-research-info-systems ris
+application/x-sh sh
+application/x-shar shar
+application/x-shockwave-flash swf
+application/x-silverlight-app xap
+application/x-sql sql
+application/x-stuffit sit
+application/x-stuffitx sitx
+application/x-subrip srt
+application/x-sv4cpio sv4cpio
+application/x-sv4crc sv4crc
+application/x-t3vm-image t3
+application/x-tads gam
+application/x-tar tar
+application/x-tcl tcl
+application/x-tex tex
+application/x-tex-tfm tfm
+application/x-texinfo texinfo texi
+application/x-tgif obj
+application/x-ustar ustar
+application/x-wais-source src
+application/x-x509-ca-cert der crt
+application/x-xfig fig
+application/x-xliff+xml xlf
+application/x-xpinstall xpi
+application/x-xz xz
+application/x-zmachine z1 z2 z3 z4 z5 z6 z7 z8
+# application/x400-bp
+application/xaml+xml xaml
+# application/xcap-att+xml
+# application/xcap-caps+xml
+application/xcap-diff+xml xdf
+# application/xcap-el+xml
+# application/xcap-error+xml
+# application/xcap-ns+xml
+# application/xcon-conference-info-diff+xml
+# application/xcon-conference-info+xml
+application/xenc+xml xenc
+application/xhtml+xml xhtml xht
+# application/xhtml-voice+xml
+application/xml xml xsl
+application/xml-dtd dtd
+# application/xml-external-parsed-entity
+# application/xmpp+xml
+application/xop+xml xop
+application/xproc+xml xpl
+application/xslt+xml xslt
+application/xspf+xml xspf
+application/xv+xml mxml xhvml xvml xvm
+application/yang yang
+application/yin+xml yin
+application/zip zip
+# audio/1d-interleaved-parityfec
+# audio/32kadpcm
+# audio/3gpp
+# audio/3gpp2
+# audio/ac3
+audio/adpcm adp
+# audio/amr
+# audio/amr-wb
+# audio/amr-wb+
+# audio/asc
+# audio/atrac-advanced-lossless
+# audio/atrac-x
+# audio/atrac3
+audio/basic au snd
+# audio/bv16
+# audio/bv32
+# audio/clearmode
+# audio/cn
+# audio/dat12
+# audio/dls
+# audio/dsr-es201108
+# audio/dsr-es202050
+# audio/dsr-es202211
+# audio/dsr-es202212
+# audio/dv
+# audio/dvi4
+# audio/eac3
+# audio/evrc
+# audio/evrc-qcp
+# audio/evrc0
+# audio/evrc1
+# audio/evrcb
+# audio/evrcb0
+# audio/evrcb1
+# audio/evrcwb
+# audio/evrcwb0
+# audio/evrcwb1
+# audio/example
+# audio/fwdred
+# audio/g719
+# audio/g722
+# audio/g7221
+# audio/g723
+# audio/g726-16
+# audio/g726-24
+# audio/g726-32
+# audio/g726-40
+# audio/g728
+# audio/g729
+# audio/g7291
+# audio/g729d
+# audio/g729e
+# audio/gsm
+# audio/gsm-efr
+# audio/gsm-hr-08
+# audio/ilbc
+# audio/ip-mr_v2.5
+# audio/isac
+# audio/l16
+# audio/l20
+# audio/l24
+# audio/l8
+# audio/lpc
+audio/midi mid midi kar rmi
+# audio/mobile-xmf
+audio/mp4 mp4a
+# audio/mp4a-latm
+# audio/mpa
+# audio/mpa-robust
+audio/mpeg mpga mp2 mp2a mp3 m2a m3a
+# audio/mpeg4-generic
+# audio/musepack
+audio/ogg oga ogg spx
+# audio/opus
+# audio/parityfec
+# audio/pcma
+# audio/pcma-wb
+# audio/pcmu-wb
+# audio/pcmu
+# audio/prs.sid
+# audio/qcelp
+# audio/red
+# audio/rtp-enc-aescm128
+# audio/rtp-midi
+# audio/rtx
+audio/s3m s3m
+audio/silk sil
+# audio/smv
+# audio/smv0
+# audio/smv-qcp
+# audio/sp-midi
+# audio/speex
+# audio/t140c
+# audio/t38
+# audio/telephone-event
+# audio/tone
+# audio/uemclip
+# audio/ulpfec
+# audio/vdvi
+# audio/vmr-wb
+# audio/vnd.3gpp.iufp
+# audio/vnd.4sb
+# audio/vnd.audiokoz
+# audio/vnd.celp
+# audio/vnd.cisco.nse
+# audio/vnd.cmles.radio-events
+# audio/vnd.cns.anp1
+# audio/vnd.cns.inf1
+audio/vnd.dece.audio uva uvva
+audio/vnd.digital-winds eol
+# audio/vnd.dlna.adts
+# audio/vnd.dolby.heaac.1
+# audio/vnd.dolby.heaac.2
+# audio/vnd.dolby.mlp
+# audio/vnd.dolby.mps
+# audio/vnd.dolby.pl2
+# audio/vnd.dolby.pl2x
+# audio/vnd.dolby.pl2z
+# audio/vnd.dolby.pulse.1
+audio/vnd.dra dra
+audio/vnd.dts dts
+audio/vnd.dts.hd dtshd
+# audio/vnd.dvb.file
+# audio/vnd.everad.plj
+# audio/vnd.hns.audio
+audio/vnd.lucent.voice lvp
+audio/vnd.ms-playready.media.pya pya
+# audio/vnd.nokia.mobile-xmf
+# audio/vnd.nortel.vbk
+audio/vnd.nuera.ecelp4800 ecelp4800
+audio/vnd.nuera.ecelp7470 ecelp7470
+audio/vnd.nuera.ecelp9600 ecelp9600
+# audio/vnd.octel.sbc
+# audio/vnd.qcelp
+# audio/vnd.rhetorex.32kadpcm
+audio/vnd.rip rip
+# audio/vnd.sealedmedia.softseal.mpeg
+# audio/vnd.vmx.cvsd
+# audio/vorbis
+# audio/vorbis-config
+audio/webm weba
+audio/x-aac aac
+audio/x-aiff aif aiff aifc
+audio/x-caf caf
+audio/x-flac flac
+audio/x-matroska mka
+audio/x-mpegurl m3u
+audio/x-ms-wax wax
+audio/x-ms-wma wma
+audio/x-pn-realaudio ram ra
+audio/x-pn-realaudio-plugin rmp
+# audio/x-tta
+audio/x-wav wav
+audio/xm xm
+chemical/x-cdx cdx
+chemical/x-cif cif
+chemical/x-cmdf cmdf
+chemical/x-cml cml
+chemical/x-csml csml
+# chemical/x-pdb
+chemical/x-xyz xyz
+image/bmp bmp
+image/cgm cgm
+# image/example
+# image/fits
+image/g3fax g3
+image/gif gif
+image/ief ief
+# image/jp2
+image/jpeg jpeg jpg jpe
+# image/jpm
+# image/jpx
+image/ktx ktx
+# image/naplps
+image/png png
+image/prs.btif btif
+# image/prs.pti
+image/sgi sgi
+image/svg+xml svg svgz
+# image/t38
+image/tiff tiff tif
+# image/tiff-fx
+image/vnd.adobe.photoshop psd
+# image/vnd.cns.inf2
+image/vnd.dece.graphic uvi uvvi uvg uvvg
+image/vnd.dvb.subtitle sub
+image/vnd.djvu djvu djv
+image/vnd.dwg dwg
+image/vnd.dxf dxf
+image/vnd.fastbidsheet fbs
+image/vnd.fpx fpx
+image/vnd.fst fst
+image/vnd.fujixerox.edmics-mmr mmr
+image/vnd.fujixerox.edmics-rlc rlc
+# image/vnd.globalgraphics.pgb
+# image/vnd.microsoft.icon
+# image/vnd.mix
+image/vnd.ms-modi mdi
+image/vnd.ms-photo wdp
+image/vnd.net-fpx npx
+# image/vnd.radiance
+# image/vnd.sealed.png
+# image/vnd.sealedmedia.softseal.gif
+# image/vnd.sealedmedia.softseal.jpg
+# image/vnd.svf
+image/vnd.wap.wbmp wbmp
+image/vnd.xiff xif
+image/webp webp
+image/x-3ds 3ds
+image/x-cmu-raster ras
+image/x-cmx cmx
+image/x-freehand fh fhc fh4 fh5 fh7
+image/x-icon ico
+image/x-mrsid-image sid
+image/x-pcx pcx
+image/x-pict pic pct
+image/x-portable-anymap pnm
+image/x-portable-bitmap pbm
+image/x-portable-graymap pgm
+image/x-portable-pixmap ppm
+image/x-rgb rgb
+image/x-tga tga
+image/x-xbitmap xbm
+image/x-xpixmap xpm
+image/x-xwindowdump xwd
+# message/cpim
+# message/delivery-status
+# message/disposition-notification
+# message/example
+# message/external-body
+# message/feedback-report
+# message/global
+# message/global-delivery-status
+# message/global-disposition-notification
+# message/global-headers
+# message/http
+# message/imdn+xml
+# message/news
+# message/partial
+message/rfc822 eml mime
+# message/s-http
+# message/sip
+# message/sipfrag
+# message/tracking-status
+# message/vnd.si.simp
+# model/example
+model/iges igs iges
+model/mesh msh mesh silo
+model/vnd.collada+xml dae
+model/vnd.dwf dwf
+# model/vnd.flatland.3dml
+model/vnd.gdl gdl
+# model/vnd.gs-gdl
+# model/vnd.gs.gdl
+model/vnd.gtw gtw
+# model/vnd.moml+xml
+model/vnd.mts mts
+# model/vnd.parasolid.transmit.binary
+# model/vnd.parasolid.transmit.text
+model/vnd.vtu vtu
+model/vrml wrl vrml
+model/x3d+binary x3db x3dbz
+model/x3d+vrml x3dv x3dvz
+model/x3d+xml x3d x3dz
+# multipart/alternative
+# multipart/appledouble
+# multipart/byteranges
+# multipart/digest
+# multipart/encrypted
+# multipart/example
+# multipart/form-data
+# multipart/header-set
+# multipart/mixed
+# multipart/parallel
+# multipart/related
+# multipart/report
+# multipart/signed
+# multipart/voice-message
+# text/1d-interleaved-parityfec
+text/cache-manifest appcache
+text/calendar ics ifb
+text/css css
+text/csv csv
+# text/directory
+# text/dns
+# text/ecmascript
+# text/enriched
+# text/example
+# text/fwdred
+text/html html htm
+# text/javascript
+text/n3 n3
+# text/parityfec
+text/plain txt text conf def list log in
+# text/prs.fallenstein.rst
+text/prs.lines.tag dsc
+# text/vnd.radisys.msml-basic-layout
+# text/red
+# text/rfc822-headers
+text/richtext rtx
+# text/rtf
+# text/rtp-enc-aescm128
+# text/rtx
+text/sgml sgml sgm
+# text/t140
+text/tab-separated-values tsv
+text/troff t tr roff man me ms
+text/turtle ttl
+# text/ulpfec
+text/uri-list uri uris urls
+text/vcard vcard
+# text/vnd.abc
+text/vnd.curl curl
+text/vnd.curl.dcurl dcurl
+text/vnd.curl.scurl scurl
+text/vnd.curl.mcurl mcurl
+# text/vnd.dmclientscript
+text/vnd.dvb.subtitle sub
+# text/vnd.esmertec.theme-descriptor
+text/vnd.fly fly
+text/vnd.fmi.flexstor flx
+text/vnd.graphviz gv
+text/vnd.in3d.3dml 3dml
+text/vnd.in3d.spot spot
+# text/vnd.iptc.newsml
+# text/vnd.iptc.nitf
+# text/vnd.latex-z
+# text/vnd.motorola.reflex
+# text/vnd.ms-mediapackage
+# text/vnd.net2phone.commcenter.command
+# text/vnd.si.uricatalogue
+text/vnd.sun.j2me.app-descriptor jad
+# text/vnd.trolltech.linguist
+# text/vnd.wap.si
+# text/vnd.wap.sl
+text/vnd.wap.wml wml
+text/vnd.wap.wmlscript wmls
+text/x-asm s asm
+text/x-c c cc cxx cpp h hh dic
+text/x-fortran f for f77 f90
+text/x-java-source java
+text/x-opml opml
+text/x-pascal p pas
+text/x-nfo nfo
+text/x-setext etx
+text/x-sfv sfv
+text/x-uuencode uu
+text/x-vcalendar vcs
+text/x-vcard vcf
+# text/xml
+# text/xml-external-parsed-entity
+# video/1d-interleaved-parityfec
+video/3gpp 3gp
+# video/3gpp-tt
+video/3gpp2 3g2
+# video/bmpeg
+# video/bt656
+# video/celb
+# video/dv
+# video/example
+video/h261 h261
+video/h263 h263
+# video/h263-1998
+# video/h263-2000
+video/h264 h264
+# video/h264-rcdo
+# video/h264-svc
+video/jpeg jpgv
+# video/jpeg2000
+video/jpm jpm jpgm
+video/mj2 mj2 mjp2
+# video/mp1s
+# video/mp2p
+# video/mp2t
+video/mp4 mp4 mp4v mpg4
+# video/mp4v-es
+video/mpeg mpeg mpg mpe m1v m2v
+# video/mpeg4-generic
+# video/mpv
+# video/nv
+video/ogg ogv
+# video/parityfec
+# video/pointer
+video/quicktime qt mov
+# video/raw
+# video/rtp-enc-aescm128
+# video/rtx
+# video/smpte292m
+# video/ulpfec
+# video/vc1
+# video/vnd.cctv
+video/vnd.dece.hd uvh uvvh
+video/vnd.dece.mobile uvm uvvm
+# video/vnd.dece.mp4
+video/vnd.dece.pd uvp uvvp
+video/vnd.dece.sd uvs uvvs
+video/vnd.dece.video uvv uvvv
+# video/vnd.directv.mpeg
+# video/vnd.directv.mpeg-tts
+# video/vnd.dlna.mpeg-tts
+video/vnd.dvb.file dvb
+video/vnd.fvt fvt
+# video/vnd.hns.video
+# video/vnd.iptvforum.1dparityfec-1010
+# video/vnd.iptvforum.1dparityfec-2005
+# video/vnd.iptvforum.2dparityfec-1010
+# video/vnd.iptvforum.2dparityfec-2005
+# video/vnd.iptvforum.ttsavc
+# video/vnd.iptvforum.ttsmpeg2
+# video/vnd.motorola.video
+# video/vnd.motorola.videop
+video/vnd.mpegurl mxu m4u
+video/vnd.ms-playready.media.pyv pyv
+# video/vnd.nokia.interleaved-multimedia
+# video/vnd.nokia.videovoip
+# video/vnd.objectvideo
+# video/vnd.sealed.mpeg1
+# video/vnd.sealed.mpeg4
+# video/vnd.sealed.swf
+# video/vnd.sealedmedia.softseal.mov
+video/vnd.uvvu.mp4 uvu uvvu
+video/vnd.vivo viv
+video/webm webm
+video/x-f4v f4v
+video/x-fli fli
+video/x-flv flv
+video/x-m4v m4v
+video/x-matroska mkv mk3d mks
+video/x-mng mng
+video/x-ms-asf asf asx
+video/x-ms-vob vob
+video/x-ms-wm wm
+video/x-ms-wmv wmv
+video/x-ms-wmx wmx
+video/x-ms-wvx wvx
+video/x-msvideo avi
+video/x-sgi-movie movie
+video/x-smv smv
+x-conference/x-cooltalk ice
diff --git a/node_modules/mime/types/node.types b/node_modules/mime/types/node.types
new file mode 100644
index 00000000..970a1bd8
--- /dev/null
+++ b/node_modules/mime/types/node.types
@@ -0,0 +1,60 @@
+# What: WebVTT
+# Why: To allow formats intended for marking up external text track resources.
+# http://dev.w3.org/html5/webvtt/
+# Added by: niftylettuce
+text/vtt vtt
+
+# What: Google Chrome Extension
+# Why: To allow apps to (work) be served with the right content type header.
+# http://codereview.chromium.org/2830017
+# Added by: niftylettuce
+application/x-chrome-extension crx
+
+# What: HTC support
+# Why: To properly render .htc files such as CSS3PIE
+# Added by: niftylettuce
+text/x-component htc
+
+# What: HTML5 application cache manifest
+# Why: De-facto standard. Required by Mozilla browser when serving HTML5 apps
+# per https://developer.mozilla.org/en/offline_resources_in_firefox
+# Added by: louisremi
+text/cache-manifest appcache manifest
+
+# What: node binary buffer format
+# Why: semi-standard extension w/in the node community
+# Added by: tootallnate
+application/octet-stream buffer
+
+# What: The "protected" MP-4 formats used by iTunes.
+# Why: Required for streaming music to browsers (?)
+# Added by: broofa
+application/mp4 m4p
+audio/mp4 m4a
+
+# What: Video format, Part of RFC1890
+# Why: See https://github.com/bentomas/node-mime/pull/6
+# Added by: mjrusso
+video/MP2T ts
+
+# What: EventSource mime type
+# Why: mime type of Server-Sent Events stream
+# http://www.w3.org/TR/eventsource/#text-event-stream
+# Added by: francois2metz
+text/event-stream event-stream
+
+# What: Mozilla App manifest mime type
+# Why: https://developer.mozilla.org/en/Apps/Manifest#Serving_manifests
+# Added by: ednapiranha
+application/x-web-app-manifest+json webapp
+
+# What: Lua file types
+# Why: Googling around shows de-facto consensus on these
+# Added by: creationix (Issue #45)
+text/x-lua lua
+application/x-lua-bytecode luac
+
+# What: Markdown files, as per http://daringfireball.net/projects/markdown/syntax
+# Why: http://stackoverflow.com/questions/10701983/what-is-the-mime-type-for-markdown
+# Added by: avoidwork
+text/x-markdown markdown md mkd
diff --git a/package-lock.json b/package-lock.json
new file mode 100644
index 00000000..813f2b28
--- /dev/null
+++ b/package-lock.json
@@ -0,0 +1,28 @@
+{
+ "name": "TeachingKidsProgramming.Java",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "dependencies": {
+ "git": "^0.1.5"
+ }
+ },
+ "node_modules/git": {
+ "version": "0.1.5",
+ "resolved": "https://registry.npmjs.org/git/-/git-0.1.5.tgz",
+ "integrity": "sha512-N+bfOrXyKMU/fQtCj6D/U9MQOEN0DAA8TLHSLdUQRSWBOkeRvsjJHdrdkvcq05xO7GSDKWc3nDEGoTZ4DfCCSg==",
+ "dependencies": {
+ "mime": "1.2.9"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ }
+ },
+ "node_modules/mime": {
+ "version": "1.2.9",
+ "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.9.tgz",
+ "integrity": "sha512-WiLgbHTIq5AYUvU/Luli4mZ1bUcHpGNHyCsbl+KPMg4zt+XUDpQehWjuBjdLaEvDTinvKj/FgfQt3fPoT7j08g=="
+ }
+ }
+}
diff --git a/package.json b/package.json
new file mode 100644
index 00000000..3ef4c2ac
--- /dev/null
+++ b/package.json
@@ -0,0 +1,5 @@
+{
+ "dependencies": {
+ "git": "^0.1.5"
+ }
+}
From d5456057a3652e2652c19f9a90d858ba9d055aab Mon Sep 17 00:00:00 2001
From: Samaresh Shil <108946101+samaresh96@users.noreply.github.com>
Date: Wed, 20 Dec 2023 10:20:52 +0000
Subject: [PATCH 3/3] my commit
---
JAVA/{calculator.java => Samaresh.java} | 8 ++------
1 file changed, 2 insertions(+), 6 deletions(-)
rename JAVA/{calculator.java => Samaresh.java} (61%)
diff --git a/JAVA/calculator.java b/JAVA/Samaresh.java
similarity index 61%
rename from JAVA/calculator.java
rename to JAVA/Samaresh.java
index b41e7751..98da1ae7 100644
--- a/JAVA/calculator.java
+++ b/JAVA/Samaresh.java
@@ -1,11 +1,7 @@
/**
- * calculator
+ * Samaresh
*/
-
-/**
- * calculator
- */
-public class calculator {
+public class Samaresh {
public static void main(String[] args) {
System.out.println("samaresh");