New Upstream Snapshot - ruby-hocon

Ready changes

Summary

Merged new upstream version: 1.3.1+git20230113.1.4f5f53a (was: 1.3.1).

Resulting package

Built on 2023-01-20T01:38 (took 2m47s)

The resulting binary packages can be installed (if you have the apt repository enabled) by running one of:

apt install -t fresh-snapshots ruby-hocon

Lintian Result

Diff

diff --git a/CODEOWNERS b/CODEOWNERS
new file mode 100644
index 0000000..03deaa3
--- /dev/null
+++ b/CODEOWNERS
@@ -0,0 +1 @@
+* @puppetlabs/phoenix
diff --git a/Gemfile b/Gemfile
new file mode 100644
index 0000000..0fc8381
--- /dev/null
+++ b/Gemfile
@@ -0,0 +1,8 @@
+source "https://rubygems.org"
+
+# Specify your gem's dependencies in hocon.gemspec
+gemspec
+
+group :tests do
+  gem 'rspec', '~> 3.0'
+end
diff --git a/debian/changelog b/debian/changelog
index 9b0ceb4..5252d4e 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+ruby-hocon (1.3.1+git20230113.1.4f5f53a-1) UNRELEASED; urgency=low
+
+  * New upstream snapshot.
+
+ -- Debian Janitor <janitor@jelmer.uk>  Fri, 20 Jan 2023 01:36:59 -0000
+
 ruby-hocon (1.3.1-2) unstable; urgency=medium
 
   * d/rules: install package as a gem
diff --git a/hocon.gemspec b/hocon.gemspec
index 45d3ea7..cebdf00 100644
--- a/hocon.gemspec
+++ b/hocon.gemspec
@@ -1,39 +1,21 @@
-#########################################################
-# This file has been automatically generated by gem2tgz #
-#########################################################
-# -*- encoding: utf-8 -*-
-# stub: hocon 1.3.1 ruby lib
+$LOAD_PATH.unshift File.expand_path("../lib", __FILE__)
+require 'hocon/version'
 
 Gem::Specification.new do |s|
-  s.name = "hocon".freeze
-  s.version = "1.3.1"
+  s.name        = 'hocon'
+  s.version     = Hocon::Version::STRING
+  s.date        = '2016-10-27'
+  s.summary     = "HOCON Config Library"
+  s.description = "== A port of the Java {Typesafe Config}[https://github.com/typesafehub/config] library to Ruby"
+  s.authors     = ["Chris Price", "Wayne Warren", "Preben Ingvaldsen", "Joe Pinsonault", "Kevin Corcoran", "Jane Lu"]
+  s.email       = 'chris@puppetlabs.com'
+  s.files       = Dir["{lib}/**/*.rb", "bin/*", "LICENSE", "*.md"]
+  s.require_paths = ["lib"]
+  s.executables   = ['hocon']
+  s.homepage      = 'https://github.com/puppetlabs/ruby-hocon'
+  s.license       = 'Apache License, v2'
+  s.required_ruby_version = '>=1.9.0'
 
-  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
-  s.require_paths = ["lib".freeze]
-  s.authors = ["Chris Price".freeze, "Wayne Warren".freeze, "Preben Ingvaldsen".freeze, "Joe Pinsonault".freeze, "Kevin Corcoran".freeze, "Jane Lu".freeze]
-  s.date = "2016-10-27"
-  s.description = "== A port of the Java {Typesafe Config}[https://github.com/typesafehub/config] library to Ruby".freeze
-  s.email = "chris@puppetlabs.com".freeze
-  s.executables = ["hocon".freeze]
-  s.files = ["CHANGELOG.md".freeze, "HISTORY.md".freeze, "LICENSE".freeze, "README.md".freeze, "bin/hocon".freeze, "lib/hocon.rb".freeze, "lib/hocon/cli.rb".freeze, "lib/hocon/config.rb".freeze, "lib/hocon/config_error.rb".freeze, "lib/hocon/config_factory.rb".freeze, "lib/hocon/config_include_context.rb".freeze, "lib/hocon/config_includer_file.rb".freeze, "lib/hocon/config_list.rb".freeze, "lib/hocon/config_mergeable.rb".freeze, "lib/hocon/config_object.rb".freeze, "lib/hocon/config_parse_options.rb".freeze, "lib/hocon/config_parseable.rb".freeze, "lib/hocon/config_render_options.rb".freeze, "lib/hocon/config_resolve_options.rb".freeze, "lib/hocon/config_syntax.rb".freeze, "lib/hocon/config_util.rb".freeze, "lib/hocon/config_value.rb".freeze, "lib/hocon/config_value_factory.rb".freeze, "lib/hocon/config_value_type.rb".freeze, "lib/hocon/impl.rb".freeze, "lib/hocon/impl/abstract_config_node.rb".freeze, "lib/hocon/impl/abstract_config_node_value.rb".freeze, "lib/hocon/impl/abstract_config_object.rb".freeze, "lib/hocon/impl/abstract_config_value.rb".freeze, "lib/hocon/impl/array_iterator.rb".freeze, "lib/hocon/impl/config_boolean.rb".freeze, "lib/hocon/impl/config_concatenation.rb".freeze, "lib/hocon/impl/config_delayed_merge.rb".freeze, "lib/hocon/impl/config_delayed_merge_object.rb".freeze, "lib/hocon/impl/config_document_parser.rb".freeze, "lib/hocon/impl/config_double.rb".freeze, "lib/hocon/impl/config_impl.rb".freeze, "lib/hocon/impl/config_impl_util.rb".freeze, "lib/hocon/impl/config_include_kind.rb".freeze, "lib/hocon/impl/config_int.rb".freeze, "lib/hocon/impl/config_node_array.rb".freeze, "lib/hocon/impl/config_node_comment.rb".freeze, "lib/hocon/impl/config_node_complex_value.rb".freeze, "lib/hocon/impl/config_node_concatenation.rb".freeze, "lib/hocon/impl/config_node_field.rb".freeze, "lib/hocon/impl/config_node_include.rb".freeze, "lib/hocon/impl/config_node_object.rb".freeze, "lib/hocon/impl/config_node_path.rb".freeze, "lib/hocon/impl/config_node_root.rb".freeze, "lib/hocon/impl/config_node_simple_value.rb".freeze, "lib/hocon/impl/config_node_single_token.rb".freeze, "lib/hocon/impl/config_null.rb".freeze, "lib/hocon/impl/config_number.rb".freeze, "lib/hocon/impl/config_parser.rb".freeze, "lib/hocon/impl/config_reference.rb".freeze, "lib/hocon/impl/config_string.rb".freeze, "lib/hocon/impl/container.rb".freeze, "lib/hocon/impl/default_transformer.rb".freeze, "lib/hocon/impl/from_map_mode.rb".freeze, "lib/hocon/impl/full_includer.rb".freeze, "lib/hocon/impl/memo_key.rb".freeze, "lib/hocon/impl/mergeable_value.rb".freeze, "lib/hocon/impl/origin_type.rb".freeze, "lib/hocon/impl/parseable.rb".freeze, "lib/hocon/impl/path.rb".freeze, "lib/hocon/impl/path_builder.rb".freeze, "lib/hocon/impl/path_parser.rb".freeze, "lib/hocon/impl/replaceable_merge_stack.rb".freeze, "lib/hocon/impl/resolve_context.rb".freeze, "lib/hocon/impl/resolve_memos.rb".freeze, "lib/hocon/impl/resolve_result.rb".freeze, "lib/hocon/impl/resolve_source.rb".freeze, "lib/hocon/impl/resolve_status.rb".freeze, "lib/hocon/impl/simple_config.rb".freeze, "lib/hocon/impl/simple_config_document.rb".freeze, "lib/hocon/impl/simple_config_list.rb".freeze, "lib/hocon/impl/simple_config_object.rb".freeze, "lib/hocon/impl/simple_config_origin.rb".freeze, "lib/hocon/impl/simple_include_context.rb".freeze, "lib/hocon/impl/simple_includer.rb".freeze, "lib/hocon/impl/substitution_expression.rb".freeze, "lib/hocon/impl/token.rb".freeze, "lib/hocon/impl/token_type.rb".freeze, "lib/hocon/impl/tokenizer.rb".freeze, "lib/hocon/impl/tokens.rb".freeze, "lib/hocon/impl/unmergeable.rb".freeze, "lib/hocon/impl/unsupported_operation_error.rb".freeze, "lib/hocon/impl/url.rb".freeze, "lib/hocon/parser.rb".freeze, "lib/hocon/parser/config_document.rb".freeze, "lib/hocon/parser/config_document_factory.rb".freeze, "lib/hocon/parser/config_node.rb".freeze, "lib/hocon/version.rb".freeze]
-  s.homepage = "https://github.com/puppetlabs/ruby-hocon".freeze
-  s.licenses = ["Apache License, v2".freeze]
-  s.required_ruby_version = Gem::Requirement.new(">= 1.9.0".freeze)
-  s.rubygems_version = "2.5.2.1".freeze
-  s.summary = "HOCON Config Library".freeze
-
-  if s.respond_to? :specification_version then
-    s.specification_version = 4
-
-    if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
-      s.add_development_dependency(%q<bundler>.freeze, ["~> 1.5"])
-      s.add_development_dependency(%q<rspec>.freeze, ["~> 2.14"])
-    else
-      s.add_dependency(%q<bundler>.freeze, ["~> 1.5"])
-      s.add_dependency(%q<rspec>.freeze, ["~> 2.14"])
-    end
-  else
-    s.add_dependency(%q<bundler>.freeze, ["~> 1.5"])
-    s.add_dependency(%q<rspec>.freeze, ["~> 2.14"])
-  end
+  # Testing dependencies
+  s.add_development_dependency 'rspec', '~> 2.14'
 end
diff --git a/lib/hocon/impl/parseable.rb b/lib/hocon/impl/parseable.rb
index 30c94e4..78e8868 100644
--- a/lib/hocon/impl/parseable.rb
+++ b/lib/hocon/impl/parseable.rb
@@ -442,7 +442,7 @@ class Hocon::Impl::Parseable
       end
       if sibling.nil?
         nil
-      elsif File.exists?(sibling)
+      elsif File.exist?(sibling)
         self.class.trace("#{sibling} exists, so loading it as a file")
         Hocon::Impl::Parseable.new_file(sibling, options.set_origin_description(nil))
       else
diff --git a/spec/fixtures/hocon/by_extension/cat.conf b/spec/fixtures/hocon/by_extension/cat.conf
new file mode 100644
index 0000000..0d757dd
--- /dev/null
+++ b/spec/fixtures/hocon/by_extension/cat.conf
@@ -0,0 +1,4 @@
+# Comment
+{
+  "meow": "cats"
+}
\ No newline at end of file
diff --git a/spec/fixtures/hocon/by_extension/cat.test b/spec/fixtures/hocon/by_extension/cat.test
new file mode 100644
index 0000000..0d757dd
--- /dev/null
+++ b/spec/fixtures/hocon/by_extension/cat.test
@@ -0,0 +1,4 @@
+# Comment
+{
+  "meow": "cats"
+}
\ No newline at end of file
diff --git a/spec/fixtures/hocon/by_extension/cat.test-json b/spec/fixtures/hocon/by_extension/cat.test-json
new file mode 100644
index 0000000..a8e07fd
--- /dev/null
+++ b/spec/fixtures/hocon/by_extension/cat.test-json
@@ -0,0 +1,3 @@
+{
+  "meow": "cats"
+}
\ No newline at end of file
diff --git a/spec/fixtures/hocon/with_substitution/subst.conf b/spec/fixtures/hocon/with_substitution/subst.conf
new file mode 100644
index 0000000..a0a874d
--- /dev/null
+++ b/spec/fixtures/hocon/with_substitution/subst.conf
@@ -0,0 +1,3 @@
+a: true
+b: ${a}
+c: ${ENVARRAY}
diff --git a/spec/fixtures/parse_render/example1/input.conf b/spec/fixtures/parse_render/example1/input.conf
new file mode 100644
index 0000000..c7a67fb
--- /dev/null
+++ b/spec/fixtures/parse_render/example1/input.conf
@@ -0,0 +1,21 @@
+# These are some opening comments
+# These are some additional opening comments
+foo.bar {
+  // the baz is is blah blah
+  baz = 42
+  boom = [1, 2, {derp : duh }, 4]
+  empty = []
+
+  # abracadabra setting
+  abracadabra = "hi"
+}
+
+// as for the yippee
+# it entails some things
+foo.bar.yahoo = "yippee"
+
+# truthy
+foo.bar.truthy = true
+
+# falsy
+foo.bar.falsy  = false
\ No newline at end of file
diff --git a/spec/fixtures/parse_render/example1/output.conf b/spec/fixtures/parse_render/example1/output.conf
new file mode 100644
index 0000000..0fa779c
--- /dev/null
+++ b/spec/fixtures/parse_render/example1/output.conf
@@ -0,0 +1,26 @@
+foo={
+    # These are some opening comments
+    # These are some additional opening comments
+    bar={
+        # falsy
+        falsy=false
+        # truthy
+        truthy=true
+        # as for the yippee
+        # it entails some things
+        yahoo=yippee
+        # the baz is is blah blah
+        baz=42
+        boom=[
+            1,
+            2,
+            {
+                derp=duh
+            },
+            4
+        ]
+        empty=[]
+        # abracadabra setting
+        abracadabra=hi
+    }
+}
diff --git a/spec/fixtures/parse_render/example1/output_nocomments.conf b/spec/fixtures/parse_render/example1/output_nocomments.conf
new file mode 100644
index 0000000..76442a7
--- /dev/null
+++ b/spec/fixtures/parse_render/example1/output_nocomments.conf
@@ -0,0 +1,17 @@
+foo {
+    bar {
+        falsy=false
+        truthy=true
+        yahoo=yippee
+        baz=42
+        boom=[
+            1,
+            2,
+            {
+                derp=duh
+            },
+            4
+        ]
+        abracadabra=hi
+    }
+}
diff --git a/spec/fixtures/parse_render/example2/input.conf b/spec/fixtures/parse_render/example2/input.conf
new file mode 100644
index 0000000..1b64267
--- /dev/null
+++ b/spec/fixtures/parse_render/example2/input.conf
@@ -0,0 +1,10 @@
+jruby-puppet: {
+    jruby-pools: [{environment: production}]
+    load-path: [/usr/lib/ruby/site_ruby/1.8, /usr/lib/ruby/site_ruby/1.8]
+    master-conf-dir: /etc/puppet
+    master-var-dir: /var/lib/puppet
+}
+
+webserver: {
+  host: 1.2.3.4
+}
diff --git a/spec/fixtures/parse_render/example2/output.conf b/spec/fixtures/parse_render/example2/output.conf
new file mode 100644
index 0000000..2ee35c6
--- /dev/null
+++ b/spec/fixtures/parse_render/example2/output.conf
@@ -0,0 +1,17 @@
+jruby-puppet {
+    jruby-pools=[
+        {
+            environment=production
+        }
+    ]
+    load-path=[
+        "/usr/lib/ruby/site_ruby/1.8",
+        "/usr/lib/ruby/site_ruby/1.8"
+    ]
+    master-conf-dir="/etc/puppet"
+    master-var-dir="/var/lib/puppet"
+}
+
+webserver {
+  host="1.2.3.4"
+}
diff --git a/spec/fixtures/parse_render/example2/output_nocomments.conf b/spec/fixtures/parse_render/example2/output_nocomments.conf
new file mode 100644
index 0000000..2ee35c6
--- /dev/null
+++ b/spec/fixtures/parse_render/example2/output_nocomments.conf
@@ -0,0 +1,17 @@
+jruby-puppet {
+    jruby-pools=[
+        {
+            environment=production
+        }
+    ]
+    load-path=[
+        "/usr/lib/ruby/site_ruby/1.8",
+        "/usr/lib/ruby/site_ruby/1.8"
+    ]
+    master-conf-dir="/etc/puppet"
+    master-var-dir="/var/lib/puppet"
+}
+
+webserver {
+  host="1.2.3.4"
+}
diff --git a/spec/fixtures/parse_render/example3/input.conf b/spec/fixtures/parse_render/example3/input.conf
new file mode 100644
index 0000000..0cfda9d
--- /dev/null
+++ b/spec/fixtures/parse_render/example3/input.conf
@@ -0,0 +1,2 @@
+a: true
+b: ${a}
diff --git a/spec/fixtures/parse_render/example3/output.conf b/spec/fixtures/parse_render/example3/output.conf
new file mode 100644
index 0000000..30816f5
--- /dev/null
+++ b/spec/fixtures/parse_render/example3/output.conf
@@ -0,0 +1,2 @@
+a=true
+b=true
\ No newline at end of file
diff --git a/spec/fixtures/parse_render/example4/input.json b/spec/fixtures/parse_render/example4/input.json
new file mode 100644
index 0000000..35335fc
--- /dev/null
+++ b/spec/fixtures/parse_render/example4/input.json
@@ -0,0 +1,6 @@
+{
+    "kermit": "frog",
+    "miss": "piggy",
+    "bert": "ernie",
+    "janice": "guitar"
+}
\ No newline at end of file
diff --git a/spec/fixtures/parse_render/example4/output.conf b/spec/fixtures/parse_render/example4/output.conf
new file mode 100644
index 0000000..163e8e4
--- /dev/null
+++ b/spec/fixtures/parse_render/example4/output.conf
@@ -0,0 +1,6 @@
+{
+    "kermit"="frog",
+    "miss"="piggy",
+    "bert"="ernie",
+    "janice"="guitar"
+}
\ No newline at end of file
diff --git a/spec/fixtures/test_utils/resources/bom.conf b/spec/fixtures/test_utils/resources/bom.conf
new file mode 100644
index 0000000..206fcda
--- /dev/null
+++ b/spec/fixtures/test_utils/resources/bom.conf
@@ -0,0 +1,2 @@
+#
+foo = bar
diff --git a/spec/fixtures/test_utils/resources/cycle.conf b/spec/fixtures/test_utils/resources/cycle.conf
new file mode 100644
index 0000000..a077291
--- /dev/null
+++ b/spec/fixtures/test_utils/resources/cycle.conf
@@ -0,0 +1 @@
+include "cycle.conf"
diff --git a/spec/fixtures/test_utils/resources/file-include.conf b/spec/fixtures/test_utils/resources/file-include.conf
new file mode 100644
index 0000000..36d4efe
--- /dev/null
+++ b/spec/fixtures/test_utils/resources/file-include.conf
@@ -0,0 +1,5 @@
+base=41
+# included without file() in a subdir
+include "subdir/foo.conf"
+# included using file() in a subdir
+include file("subdir/baz.conf")
diff --git a/spec/fixtures/test_utils/resources/include-from-list.conf b/spec/fixtures/test_utils/resources/include-from-list.conf
new file mode 100644
index 0000000..505d048
--- /dev/null
+++ b/spec/fixtures/test_utils/resources/include-from-list.conf
@@ -0,0 +1,4 @@
+// The {} inside the [] is needed because
+// just [ include ] means an array with the
+// string "include" in it.
+a = [ { include "test01.conf" } ]
diff --git a/spec/fixtures/test_utils/resources/subdir/bar.conf b/spec/fixtures/test_utils/resources/subdir/bar.conf
new file mode 100644
index 0000000..567e6b4
--- /dev/null
+++ b/spec/fixtures/test_utils/resources/subdir/bar.conf
@@ -0,0 +1 @@
+bar=43
diff --git a/spec/fixtures/test_utils/resources/subdir/baz.conf b/spec/fixtures/test_utils/resources/subdir/baz.conf
new file mode 100644
index 0000000..dc9b6f5
--- /dev/null
+++ b/spec/fixtures/test_utils/resources/subdir/baz.conf
@@ -0,0 +1 @@
+baz=45
diff --git a/spec/fixtures/test_utils/resources/subdir/foo.conf b/spec/fixtures/test_utils/resources/subdir/foo.conf
new file mode 100644
index 0000000..e75cf7b
--- /dev/null
+++ b/spec/fixtures/test_utils/resources/subdir/foo.conf
@@ -0,0 +1,5 @@
+foo=42
+# included without file()
+include "bar.conf"
+# included using file()
+include file("bar-file.conf")
diff --git a/spec/fixtures/test_utils/resources/test01.conf b/spec/fixtures/test_utils/resources/test01.conf
new file mode 100644
index 0000000..5d708b9
--- /dev/null
+++ b/spec/fixtures/test_utils/resources/test01.conf
@@ -0,0 +1,80 @@
+{
+    "ints" : {
+        "fortyTwo" : 42,
+        "fortyTwoAgain" : ${ints.fortyTwo}
+    },
+
+    "floats" : {
+        "fortyTwoPointOne" : 42.1,
+        "fortyTwoPointOneAgain" : ${floats.fortyTwoPointOne}
+    },
+
+    "strings" : {
+        "abcd" : "abcd",
+        "abcdAgain" : ${strings.a}${strings.b}${strings.c}${strings.d},
+        "a" : "a",
+        "b" : "b",
+        "c" : "c",
+        "d" : "d",
+        "concatenated" : null bar 42 baz true 3.14 hi,
+        "double" : "3.14",
+        "number" : "57",
+        "null" : "null",
+        "true" : "true",
+        "yes" : "yes",
+        "false" : "false",
+        "no" : "no"
+    },
+
+    "arrays" : {
+        "empty" : [],
+        "ofInt" : [1, 2, 3],
+        "ofString" : [ ${strings.a}, ${strings.b}, ${strings.c} ],
+        "ofDouble" : [3.14, 4.14, 5.14],
+        "ofNull" : [null, null, null],
+        "ofBoolean" : [true, false],
+        "ofArray" : [${arrays.ofString}, ${arrays.ofString}, ${arrays.ofString}],
+        "ofObject" : [${ints}, ${booleans}, ${strings}],
+        "firstElementNotASubst" : [ "a", ${strings.b} ]
+    },
+
+    "booleans" : {
+        "true" : true,
+        "trueAgain" : ${booleans.true},
+        "false" : false,
+        "falseAgain" : ${booleans.false}
+    },
+
+    "nulls" : {
+        "null" : null,
+        "nullAgain" : ${nulls.null}
+    },
+
+    "durations" : {
+        "second" : 1s,
+        "secondsList" : [1s,2seconds,3 s, 4000],
+        "secondAsNumber" : 1000,
+        "halfSecond" : 0.5s,
+        "millis" : 1 milli,
+        "micros" : 2000 micros
+    },
+
+    "memsizes" : {
+        "meg" : 1M,
+        "megsList" : [1M, 1024K, 1048576],
+        "megAsNumber" : 1048576,
+        "halfMeg" : 0.5M
+    },
+
+    "system" : {
+        "javaversion" : ${?java.version},
+        "userhome" : ${?user.home},
+        "home" : ${?HOME},
+        "pwd" : ${?PWD},
+        "shell" : ${?SHELL},
+        "lang" : ${?LANG},
+        "path" : ${?PATH},
+        "not_here" : ${?NOT_HERE},
+        "concatenated" : Your Java version is ${?system.javaversion} and your user.home is ${?system.userhome}
+    }
+}
diff --git a/spec/fixtures/test_utils/resources/test01.json b/spec/fixtures/test_utils/resources/test01.json
new file mode 100644
index 0000000..1380915
--- /dev/null
+++ b/spec/fixtures/test_utils/resources/test01.json
@@ -0,0 +1,4 @@
+{
+    "fromJson1" : 1,
+    "fromJsonA" : "A"
+}
\ No newline at end of file
diff --git a/spec/fixtures/test_utils/resources/test03.conf b/spec/fixtures/test_utils/resources/test03.conf
new file mode 100644
index 0000000..1fd1e13
--- /dev/null
+++ b/spec/fixtures/test_utils/resources/test03.conf
@@ -0,0 +1,36 @@
+{
+  "test01" : {
+    "ints" : 12,
+    include "test01",
+    "booleans" : 42
+  },
+
+  "test02" : {
+    include
+
+      "test02.conf"
+  },
+
+  "equiv01" : {
+    include "equiv01/original.json"
+  },
+
+  # missing includes are supposed to be silently ignored
+  nonexistent {
+    include "nothere"
+    include "nothere.conf"
+    include "nothere.json"
+    include "nothere.properties"
+  }
+
+  # make sure included file substitutions fall back to parent file,
+  # both when the include is at the root (so doesn't need to have
+  # substitutions adjusted) and when it is not.
+  foo="This is in the including file"
+  bar="This is in the including file"
+  include "test03-included.conf"
+
+  subtree {
+    include "test03-included.conf"
+  }
+}
diff --git a/spec/fixtures/test_utils/resources/utf16.conf b/spec/fixtures/test_utils/resources/utf16.conf
new file mode 100644
index 0000000..c209896
Binary files /dev/null and b/spec/fixtures/test_utils/resources/utf16.conf differ
diff --git a/spec/fixtures/test_utils/resources/utf8.conf b/spec/fixtures/test_utils/resources/utf8.conf
new file mode 100644
index 0000000..ed693da
--- /dev/null
+++ b/spec/fixtures/test_utils/resources/utf8.conf
@@ -0,0 +1,2 @@
+#
+ᚠᛇᚻ = ᛫ᛒᛦᚦ᛫ᚠᚱᚩᚠᚢ
diff --git "a/spec/fixtures/test_utils/resources/\341\232\240\341\233\207\341\232\273.conf" "b/spec/fixtures/test_utils/resources/\341\232\240\341\233\207\341\232\273.conf"
new file mode 100644
index 0000000..ed693da
--- /dev/null
+++ "b/spec/fixtures/test_utils/resources/\341\232\240\341\233\207\341\232\273.conf"
@@ -0,0 +1,2 @@
+#
+ᚠᛇᚻ = ᛫ᛒᛦᚦ᛫ᚠᚱᚩᚠᚢ
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
new file mode 100644
index 0000000..d03fa53
--- /dev/null
+++ b/spec/spec_helper.rb
@@ -0,0 +1,48 @@
+# encoding: utf-8
+
+FIXTURE_DIR = File.join(dir = File.expand_path(File.dirname(__FILE__)), "fixtures")
+
+EXAMPLE1 = { :hash =>
+    {"foo" => {
+      "bar" => {
+          "baz" => 42,
+          "abracadabra" => "hi",
+          "yahoo" => "yippee",
+          "boom" => [1, 2, {"derp" => "duh"}, 4],
+          "empty" => [],
+          "truthy" => true,
+          "falsy" => false
+      }}},
+    :name => "example1",
+}
+
+EXAMPLE2 = { :hash =>
+    {"jruby-puppet"=> {
+        "jruby-pools" => [{"environment" => "production"}],
+        "load-path" => ["/usr/lib/ruby/site_ruby/1.8", "/usr/lib/ruby/site_ruby/1.8"],
+        "master-conf-dir" => "/etc/puppet",
+        "master-var-dir" => "/var/lib/puppet",
+    },
+    "webserver" => {"host" => "1.2.3.4"}},
+    :name => "example2",
+  }
+
+EXAMPLE3 = { :hash =>
+                 {"a" => true,
+                  "b" => true},
+             :name => "example3",
+}
+
+EXAMPLE4 = { :hash =>
+                 {"kermit" => "frog",
+                  "miss" => "piggy",
+                  "bert" => "ernie",
+                  "janice" => "guitar"},
+             :name => "example4",
+}
+
+# set values out of order to verify they return in-order
+# must be set prior to config_impl.rb loading
+ENV['ENVARRAY.1'] = 'bar'
+ENV['ENVARRAY.2'] = 'baz'
+ENV['ENVARRAY.0'] = 'foo'
diff --git a/spec/test_utils.rb b/spec/test_utils.rb
new file mode 100644
index 0000000..bfeb732
--- /dev/null
+++ b/spec/test_utils.rb
@@ -0,0 +1,758 @@
+# encoding: utf-8
+
+require 'hocon'
+require 'spec_helper'
+require 'rspec'
+require 'hocon/impl/config_reference'
+require 'hocon/impl/substitution_expression'
+require 'hocon/impl/path_parser'
+require 'hocon/impl/config_impl_util'
+require 'hocon/impl/config_node_simple_value'
+require 'hocon/impl/config_node_single_token'
+require 'hocon/impl/config_node_object'
+require 'hocon/impl/config_node_array'
+require 'hocon/impl/config_node_concatenation'
+require 'hocon/cli'
+
+module TestUtils
+  Tokens = Hocon::Impl::Tokens
+  ConfigInt = Hocon::Impl::ConfigInt
+  ConfigDouble = Hocon::Impl::ConfigDouble
+  ConfigString = Hocon::Impl::ConfigString
+  ConfigNull = Hocon::Impl::ConfigNull
+  ConfigBoolean = Hocon::Impl::ConfigBoolean
+  ConfigReference = Hocon::Impl::ConfigReference
+  SubstitutionExpression = Hocon::Impl::SubstitutionExpression
+  ConfigConcatenation = Hocon::Impl::ConfigConcatenation
+  Path = Hocon::Impl::Path
+  EOF = Hocon::Impl::TokenType::EOF
+
+  include RSpec::Matchers
+
+  def self.intercept(exception_type, & block)
+    thrown = nil
+    result = nil
+    begin
+      result = block.call
+    rescue => e
+      if e.is_a?(exception_type)
+        thrown = e
+      else
+        raise "Expected exception #{exception_type} was not thrown, got #{e.class}: #{e}\n#{e.backtrace.join("\n")}"
+      end
+    end
+    if thrown.nil?
+      raise "Expected exception #{exception_type} was not thrown, no exception was thrown and got result #{result}"
+    end
+    thrown
+  end
+
+  class ParseTest
+
+    def self.from_s(test)
+      ParseTest.new(false, false, test)
+    end
+
+    def self.from_pair(lift_behavior_unexpected, test)
+      ParseTest.new(lift_behavior_unexpected, false, test)
+    end
+
+    def initialize(lift_behavior_unexpected, whitespace_matters, test)
+      @lift_behavior_unexpected = lift_behavior_unexpected
+      @whitespace_matters = whitespace_matters
+      @test = test
+    end
+    attr_reader :test
+
+    def lift_behavior_unexpected?
+      @lift_behavior_unexpected
+    end
+
+    def whitespace_matters?
+      @whitespace_matters
+    end
+  end
+
+
+  # note: it's important to put {} or [] at the root if you
+  # want to test "invalidity reasons" other than "wrong root"
+  InvalidJsonInvalidConf = [
+      ParseTest.from_s("{"),
+      ParseTest.from_s("}"),
+      ParseTest.from_s("["),
+      ParseTest.from_s("]"),
+      ParseTest.from_s(","),
+      ParseTest.from_pair(true, "10"), # value not in array or object, lift-json now allows this
+      ParseTest.from_pair(true, "\"foo\""), # value not in array or object, lift-json allows it
+      ParseTest.from_s(")\""), # single quote by itself
+      ParseTest.from_pair(true, "[,]"), # array with just a comma in it; lift is OK with this
+      ParseTest.from_pair(true, "[,,]"), # array with just two commas in it; lift is cool with this too
+      ParseTest.from_pair(true, "[1,2,,]"), # array with two trailing commas
+      ParseTest.from_pair(true, "[,1,2]"), # array with initial comma
+      ParseTest.from_pair(true, "{ , }"), # object with just a comma in it
+      ParseTest.from_pair(true, "{ , , }"), # object with just two commas in it
+      ParseTest.from_s("{ 1,2 }"), # object with single values not key-value pair
+      ParseTest.from_pair(true, '{ , "foo" : 10 }'), # object starts with comma
+      ParseTest.from_pair(true, "{ \"foo\" : 10 ,, }"), # object has two trailing commas
+      ParseTest.from_s(") \"a\" : 10 ,, "), # two trailing commas for braceless root object
+      ParseTest.from_s("{ \"foo\" : }"), # no value in object
+      ParseTest.from_s("{ : 10 }"), # no key in object
+      ParseTest.from_pair(true, " \"foo\" : "), # no value in object with no braces; lift-json thinks this is acceptable
+      ParseTest.from_pair(true, " : 10 "), # no key in object with no braces; lift-json is cool with this too
+      ParseTest.from_s(') "foo" : 10 } '), # close brace but no open
+      ParseTest.from_s(") \"foo\" : 10 } "), # close brace but no open
+      ParseTest.from_s(") \"foo\" : 10 [ "), # no-braces object with trailing gunk
+      ParseTest.from_s("{ \"foo\" }"), # no value or colon
+      ParseTest.from_s("{ \"a\" : [ }"), # [ is not a valid value
+      ParseTest.from_s("{ \"foo\" : 10, true }"), # non-key after comma
+      ParseTest.from_s("{ foo \n bar : 10 }"), # newline in the middle of the unquoted key
+      ParseTest.from_s("[ 1, \\"), # ends with backslash
+      # these two problems are ignored by the lift tokenizer
+      ParseTest.from_s("[:\"foo\", \"bar\"]"), # colon in an array; lift doesn't throw (tokenizer erases it)
+      ParseTest.from_s("[\"foo\" : \"bar\"]"), # colon in an array another way, lift ignores (tokenizer erases it)
+      ParseTest.from_s("[ \"hello ]"), # unterminated string
+      ParseTest.from_pair(true, "{ \"foo\" , true }"), # comma instead of colon, lift is fine with this
+      ParseTest.from_pair(true, "{ \"foo\" : true \"bar\" : false }"), # missing comma between fields, lift fine with this
+      ParseTest.from_s("[ 10, }]"), # array with } as an element
+      ParseTest.from_s("[ 10, {]"), # array with { as an element
+      ParseTest.from_s("{}x"), # trailing invalid token after the root object
+      ParseTest.from_s("[]x"), # trailing invalid token after the root array
+      ParseTest.from_pair(true, "{}{}"), # trailing token after the root object - lift OK with it
+      ParseTest.from_pair(true, "{}true"), # trailing token after the root object; lift ignores the {}
+      ParseTest.from_pair(true, "[]{}"), # trailing valid token after the root array
+      ParseTest.from_pair(true, "[]true"), # trailing valid token after the root array, lift ignores the []
+      ParseTest.from_s("[${]"), # unclosed substitution
+      ParseTest.from_s("[$]"), # '$' by itself
+      ParseTest.from_s("[$  ]"), # '$' by itself with spaces after
+      ParseTest.from_s("[${}]"), # empty substitution (no path)
+      ParseTest.from_s("[${?}]"), # no path with ? substitution
+      ParseTest.new(false, true, "[${ ?foo}]"), # space before ? not allowed
+      ParseTest.from_s(%q|{ "a" : [1,2], "b" : y${a}z }|), # trying to interpolate an array in a string
+      ParseTest.from_s(%q|{ "a" : { "c" : 2 }, "b" : y${a}z }|), # trying to interpolate an object in a string
+      ParseTest.from_s(%q|{ "a" : ${a} }|), # simple cycle
+      ParseTest.from_s(%q|[ { "a" : 2, "b" : ${${a}} } ]|), # nested substitution
+      ParseTest.from_s("[ = ]"), # = is not a valid token in unquoted text
+      ParseTest.from_s("[ + ]"),
+      ParseTest.from_s("[ # ]"),
+      ParseTest.from_s("[ ` ]"),
+      ParseTest.from_s("[ ^ ]"),
+      ParseTest.from_s("[ ? ]"),
+      ParseTest.from_s("[ ! ]"),
+      ParseTest.from_s("[ @ ]"),
+      ParseTest.from_s("[ * ]"),
+      ParseTest.from_s("[ & ]"),
+      ParseTest.from_s("[ \\ ]"),
+      ParseTest.from_s("+="),
+      ParseTest.from_s("[ += ]"),
+      ParseTest.from_s("+= 10"),
+      ParseTest.from_s("10 +="),
+      ParseTest.from_s("[ 10e+3e ]"), # "+" not allowed in unquoted strings, and not a valid number
+      ParseTest.from_pair(true, "[ \"foo\nbar\" ]"), # unescaped newline in quoted string, lift doesn't care
+      ParseTest.from_s("[ # comment ]"),
+      ParseTest.from_s("${ #comment }"),
+      ParseTest.from_s("[ // comment ]"),
+      ParseTest.from_s("${ // comment }"),
+      # ParseTest.from_s("{ include \"bar\" : 10 }"), # include with a value after it
+      ParseTest.from_s("{ include foo }"), # include with unquoted string
+      ParseTest.from_s("{ include : { \"a\" : 1 } }"), # include used as unquoted key
+      ParseTest.from_s("a="), # no value
+      ParseTest.from_s("a:"), # no value with colon
+      ParseTest.from_s("a= "), # no value with whitespace after
+      ParseTest.from_s("a.b="), # no value with path
+      ParseTest.from_s("{ a= }"), # no value inside braces
+      ParseTest.from_s("{ a: }") # no value with colon inside braces
+  ]
+
+  # We'll automatically try each of these with whitespace modifications
+  # so no need to add every possible whitespace variation
+  ValidJson = [
+      ParseTest.from_s("{}"),
+      ParseTest.from_s("[]"),
+      ParseTest.from_s(%q|{ "foo" : "bar" }|),
+      ParseTest.from_s(%q|["foo", "bar"]|),
+      ParseTest.from_s(%q|{ "foo" : 42 }|),
+      ParseTest.from_s("{ \"foo\"\n : 42 }"), # newline after key
+      ParseTest.from_s("{ \"foo\" : \n 42 }"), # newline after colon
+      ParseTest.from_s(%q|[10, 11]|),
+      ParseTest.from_s(%q|[10,"foo"]|),
+      ParseTest.from_s(%q|{ "foo" : "bar", "baz" : "boo" }|),
+      ParseTest.from_s(%q|{ "foo" : { "bar" : "baz" }, "baz" : "boo" }|),
+      ParseTest.from_s(%q|{ "foo" : { "bar" : "baz", "woo" : "w00t" }, "baz" : "boo" }|),
+      ParseTest.from_s(%q|{ "foo" : [10,11,12], "baz" : "boo" }|),
+      ParseTest.from_s(%q|[{},{},{},{}]|),
+      ParseTest.from_s(%q|[[[[[[]]]]]]|),
+      ParseTest.from_s(%q|[[1], [1,2], [1,2,3], []]|), # nested multiple-valued array
+      ParseTest.from_s(%q|{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":42}}}}}}}}|),
+      ParseTest.from_s("[ \"#comment\" ]"), # quoted # comment
+      ParseTest.from_s("[ \"//comment\" ]"), # quoted // comment
+      # this long one is mostly to test rendering
+      ParseTest.from_s(%q|{ "foo" : { "bar" : "baz", "woo" : "w00t" }, "baz" : { "bar" : "baz", "woo" : [1,2,3,4], "w00t" : true, "a" : false, "b" : 3.14, "c" : null } }|),
+      ParseTest.from_s("{}"),
+      ParseTest.from_pair(true, "[ 10e+3 ]") # "+" in a number (lift doesn't handle))
+  ]
+
+  ValidConfInvalidJson = [
+      ParseTest.from_s(""), # empty document
+      ParseTest.from_s(" "), # empty document single space
+      ParseTest.from_s("\n"), # empty document single newline
+      ParseTest.from_s(" \n \n   \n\n\n"), # complicated empty document
+      ParseTest.from_s("# foo"), # just a comment
+      ParseTest.from_s("# bar\n"), # just a comment with a newline
+      ParseTest.from_s("# foo\n//bar"), # comment then another with no newline
+      ParseTest.from_s(%q|{ "foo" = 42 }|), # equals rather than colon
+      ParseTest.from_s(%q|{ foo { "bar" : 42 } }|), # omit the colon for object value
+      ParseTest.from_s(%q|{ foo baz { "bar" : 42 } }|), # omit the colon with unquoted key with spaces
+      ParseTest.from_s(%q| "foo" : 42 |), # omit braces on root object
+      ParseTest.from_s(%q|{ "foo" : bar }|), # no quotes on value
+      ParseTest.from_s(%q|{ "foo" : null bar 42 baz true 3.14 "hi" }|), # bunch of values to concat into a string
+      ParseTest.from_s("{ foo : \"bar\" }"), # no quotes on key
+      ParseTest.from_s("{ foo : bar }"), # no quotes on key or value
+      ParseTest.from_s("{ foo.bar : bar }"), # path expression in key
+      ParseTest.from_s("{ foo.\"hello world\".baz : bar }"), # partly-quoted path expression in key
+      ParseTest.from_s("{ foo.bar \n : bar }"), # newline after path expression in key
+      ParseTest.from_s("{ foo  bar : bar }"), # whitespace in the key
+      ParseTest.from_s("{ true : bar }"), # key is a non-string token
+      ParseTest.from_pair(true, %q|{ "foo" : "bar", "foo" : "bar2" }|), # dup keys - lift just returns both
+      ParseTest.from_pair(true, "[ 1, 2, 3, ]"), # single trailing comma (lift fails to throw)
+      ParseTest.from_pair(true, "[1,2,3  , ]"), # single trailing comma with whitespace
+      ParseTest.from_pair(true, "[1,2,3\n\n , \n]"), # single trailing comma with newlines
+      ParseTest.from_pair(true, "[1,]"), # single trailing comma with one-element array
+      ParseTest.from_pair(true, "{ \"foo\" : 10, }"), # extra trailing comma (lift fails to throw)
+      ParseTest.from_pair(true, "{ \"a\" : \"b\", }"), # single trailing comma in object
+      ParseTest.from_s("{ a : b, }"), # single trailing comma in object (unquoted strings)
+      ParseTest.from_s("{ a : b  \n  , \n }"), # single trailing comma in object with newlines
+      ParseTest.from_s("a : b, c : d,"), # single trailing comma in object with no root braces
+      ParseTest.from_s("{ a : b\nc : d }"), # skip comma if there's a newline
+      ParseTest.from_s("a : b\nc : d"), # skip comma if there's a newline and no root braces
+      ParseTest.from_s("a : b\nc : d,"), # skip one comma but still have one at the end
+      ParseTest.from_s("[ foo ]"), # not a known token in JSON
+      ParseTest.from_s("[ t ]"), # start of "true" but ends wrong in JSON
+      ParseTest.from_s("[ tx ]"),
+      ParseTest.from_s("[ tr ]"),
+      ParseTest.from_s("[ trx ]"),
+      ParseTest.from_s("[ tru ]"),
+      ParseTest.from_s("[ trux ]"),
+      ParseTest.from_s("[ truex ]"),
+      ParseTest.from_s("[ 10x ]"), # number token with trailing junk
+      ParseTest.from_s("[ / ]"), # unquoted string "slash"
+      ParseTest.from_s("{ include \"foo\" }"), # valid include
+      ParseTest.from_s("{ include\n\"foo\" }"), # include with just a newline separating from string
+      ParseTest.from_s("{ include\"foo\" }"), # include with no whitespace after it
+      ParseTest.from_s("[ include ]"), # include can be a string value in an array
+      ParseTest.from_s("{ foo : include }"), # include can be a field value also
+      ParseTest.from_s("{ include \"foo\", \"a\" : \"b\" }"), # valid include followed by comma and field
+      ParseTest.from_s("{ foo include : 42 }"), # valid to have a key not starting with include
+      ParseTest.from_s("[ ${foo} ]"),
+      ParseTest.from_s("[ ${?foo} ]"),
+      ParseTest.from_s("[ ${\"foo\"} ]"),
+      ParseTest.from_s("[ ${foo.bar} ]"),
+      ParseTest.from_s("[ abc  xyz  ${foo.bar}  qrs tuv ]"), # value concatenation
+      ParseTest.from_s("[ 1, 2, 3, blah ]"),
+      ParseTest.from_s("[ ${\"foo.bar\"} ]"),
+      ParseTest.from_s("{} # comment"),
+      ParseTest.from_s("{} // comment"),
+      ParseTest.from_s(%q|{ "foo" #comment
+: 10 }|),
+      ParseTest.from_s(%q|{ "foo" // comment
+: 10 }|),
+      ParseTest.from_s(%q|{ "foo" : #comment
+10 }|),
+      ParseTest.from_s(%q|{ "foo" : // comment
+10 }|),
+      ParseTest.from_s(%q|{ "foo" : 10 #comment
+}|),
+      ParseTest.from_s(%q|{ "foo" : 10 // comment
+}|),
+      ParseTest.from_s(%q|[ 10, # comment
+11]|),
+      ParseTest.from_s(%q|[ 10, // comment
+11]|),
+      ParseTest.from_s(%q|[ 10 # comment
+, 11]|),
+      ParseTest.from_s(%q|[ 10 // comment
+, 11]|),
+      ParseTest.from_s(%q|{ /a/b/c : 10 }|), # key has a slash in it
+      ParseTest.new(false, true, "[${ foo.bar}]"), # substitution with leading spaces
+      ParseTest.new(false, true, "[${foo.bar }]"), # substitution with trailing spaces
+      ParseTest.new(false, true, "[${ \"foo.bar\"}]"), # substitution with leading spaces and quoted
+      ParseTest.new(false, true, "[${\"foo.bar\" }]"), # substitution with trailing spaces and quoted
+      ParseTest.from_s(%q|[ ${"foo""bar"} ]|), # multiple strings in substitution
+      ParseTest.from_s(%q|[ ${foo  "bar"  baz} ]|), # multiple strings and whitespace in substitution
+      ParseTest.from_s("[${true}]"), # substitution with unquoted true token
+      ParseTest.from_s("a = [], a += b"), # += operator with previous init
+      ParseTest.from_s("{ a = [], a += 10 }"), # += in braces object with previous init
+      ParseTest.from_s("a += b"), # += operator without previous init
+      ParseTest.from_s("{ a += 10 }"), # += in braces object without previous init
+      ParseTest.from_s("[ 10e3e3 ]"), # two exponents. this should parse to a number plus string "e3"
+      ParseTest.from_s("[ 1-e3 ]"), # malformed number should end up as a string instead
+      ParseTest.from_s("[ 1.0.0 ]"), # two decimals, should end up as a string
+      ParseTest.from_s("[ 1.0. ]")
+  ]
+
+
+  InvalidConf = InvalidJsonInvalidConf
+
+  # .conf is a superset of JSON so validJson just goes in here
+  ValidConf = ValidConfInvalidJson + ValidJson
+
+  def self.add_offending_json_to_exception(parser_name, s, & block)
+    begin
+      block.call
+    rescue => e
+      tokens =
+          begin
+            "tokens: " + TestUtils.tokenize_as_list(s).join("\n")
+          rescue => tokenize_ex
+            "tokenizer failed: #{tokenize_ex}\n#{tokenize_ex.backtrace.join("\n")}"
+          end
+      raise ArgumentError, "#{parser_name} parser did wrong thing on '#{s}', #{tokens}; error: #{e}\n#{e.backtrace.join("\n")}"
+    end
+  end
+
+  def self.whitespace_variations(tests, valid_in_lift)
+    variations = [
+        Proc.new { |s| s }, # identity
+        Proc.new { |s| " " + s },
+        Proc.new { |s| s + " " },
+        Proc.new { |s| " " + s + " " },
+        Proc.new { |s| s.gsub(" ", "") }, # this would break with whitespace in a key or value
+        Proc.new { |s| s.gsub(":", " : ") }, # could break with : in a key or value
+        Proc.new { |s| s.gsub(",", " , ") }, # could break with , in a key or value
+    ]
+    tests.map { |t|
+      if t.whitespace_matters?
+        t
+      else
+        with_no_ascii =
+            if t.test.include?(" ")
+              [ParseTest.from_pair(valid_in_lift,
+                                   t.test.gsub(" ", "\u2003"))] # 2003 = em space, to test non-ascii whitespace
+            else
+              []
+            end
+
+        with_no_ascii << variations.reduce([]) { |acc, v|
+          acc << ParseTest.from_pair(t.lift_behavior_unexpected?, v.call(t.test))
+          acc
+        }
+      end
+    }.flatten
+  end
+
+
+  ##################
+  # Tokenizer Functions
+  ##################
+  def self.wrap_tokens(token_list)
+    # Wraps token_list in START and EOF tokens
+    [Tokens::START] + token_list + [Tokens::EOF]
+  end
+
+  def self.tokenize(config_origin, input)
+    Hocon::Impl::Tokenizer.tokenize(config_origin, input, Hocon::ConfigSyntax::CONF)
+  end
+
+  def self.tokenize_from_s(s)
+    tokenize(Hocon::Impl::SimpleConfigOrigin.new_simple("anonymous Reader"),
+             StringIO.new(s))
+  end
+
+  def self.tokenize_as_list(input_string)
+    token_iterator = tokenize_from_s(input_string)
+
+    token_iterator.to_list
+  end
+
+  def self.tokenize_as_string(input_string)
+    Hocon::Impl::Tokenizer.render(tokenize_from_s(input_string))
+  end
+
+  def self.config_node_simple_value(value)
+    Hocon::Impl::ConfigNodeSimpleValue.new(value)
+  end
+
+  def self.config_node_key(path)
+    Hocon::Impl::PathParser.parse_path_node(path)
+  end
+
+  def self.config_node_single_token(value)
+    Hocon::Impl::ConfigNodeSingleToken.new(value)
+  end
+
+  def self.config_node_object(nodes)
+    Hocon::Impl::ConfigNodeObject.new(nodes)
+  end
+
+  def self.config_node_array(nodes)
+    Hocon::Impl::ConfigNodeArray.new(nodes)
+  end
+
+  def self.config_node_concatenation(nodes)
+    Hocon::Impl::ConfigNodeConcatenation.new(nodes)
+  end
+
+  def self.node_colon
+    Hocon::Impl::ConfigNodeSingleToken.new(Tokens::COLON)
+  end
+
+  def self.node_space
+    Hocon::Impl::ConfigNodeSingleToken.new(token_unquoted(" "))
+  end
+
+  def self.node_open_brace
+    Hocon::Impl::ConfigNodeSingleToken.new(Tokens::OPEN_CURLY)
+  end
+
+  def self.node_close_brace
+    Hocon::Impl::ConfigNodeSingleToken.new(Tokens::CLOSE_CURLY)
+  end
+
+  def self.node_open_bracket
+    Hocon::Impl::ConfigNodeSingleToken.new(Tokens::OPEN_SQUARE)
+  end
+
+  def self.node_close_bracket
+    Hocon::Impl::ConfigNodeSingleToken.new(Tokens::CLOSE_SQUARE)
+  end
+
+  def self.node_comma
+    Hocon::Impl::ConfigNodeSingleToken.new(Tokens::COMMA)
+  end
+
+  def self.node_line(line)
+    Hocon::Impl::ConfigNodeSingleToken.new(token_line(line))
+  end
+
+  def self.node_whitespace(whitespace)
+    Hocon::Impl::ConfigNodeSingleToken.new(token_whitespace(whitespace))
+  end
+
+  def self.node_key_value_pair(key, value)
+    nodes = [key, node_colon, node_space, value]
+    Hocon::Impl::ConfigNodeField.new(nodes)
+  end
+
+  def self.node_int(value)
+    Hocon::Impl::ConfigNodeSimpleValue.new(token_int(value))
+  end
+
+  def self.node_string(value)
+    Hocon::Impl::ConfigNodeSimpleValue.new(token_string(value))
+  end
+
+  def self.node_double(value)
+    Hocon::Impl::ConfigNodeSimpleValue.new(token_double(value))
+  end
+
+  def self.node_true
+    Hocon::Impl::ConfigNodeSimpleValue.new(token_true)
+  end
+
+  def self.node_false
+    Hocon::Impl::ConfigNodeSimpleValue.new(token_false)
+  end
+
+  def self.node_comment_hash(text)
+    Hocon::Impl::ConfigNodeComment.new(token_comment_hash(text))
+  end
+
+  def self.node_comment_double_slash(text)
+    Hocon::Impl::ConfigNodeComment.new(token_comment_double_slash(text))
+  end
+
+  def self.node_unquoted_text(text)
+    Hocon::Impl::ConfigNodeSimpleValue.new(token_unquoted(text))
+  end
+
+  def self.node_null
+    Hocon::Impl::ConfigNodeSimpleValue.new(token_null)
+  end
+
+  def self.node_key_substitution(s)
+    Hocon::Impl::ConfigNodeSimpleValue.new(token_key_substitution(s))
+  end
+
+  def self.node_optional_substitution(*expression)
+    Hocon::Impl::ConfigNodeSimpleValue.new(token_optional_substitution(*expression))
+  end
+
+  def self.node_substitution(*expression)
+    Hocon::Impl::ConfigNodeSimpleValue.new(token_substitution(*expression))
+  end
+
+  def self.fake_origin
+    Hocon::Impl::SimpleConfigOrigin.new_simple("fake origin")
+  end
+
+  def self.token_line(line_number)
+    Tokens.new_line(fake_origin.with_line_number(line_number))
+  end
+
+  def self.token_true
+    Tokens.new_boolean(fake_origin, true)
+  end
+
+  def self.token_false
+    Tokens.new_boolean(fake_origin, false)
+  end
+
+  def self.token_null
+    Tokens.new_null(fake_origin)
+  end
+
+  def self.token_unquoted(value)
+    Tokens.new_unquoted_text(fake_origin, value)
+  end
+
+  def self.token_comment_double_slash(value)
+    Tokens.new_comment_double_slash(fake_origin, value)
+  end
+
+  def self.token_comment_hash(value)
+    Tokens.new_comment_hash(fake_origin, value)
+  end
+
+  def self.token_whitespace(value)
+    Tokens.new_ignored_whitespace(fake_origin, value)
+  end
+
+  def self.token_string(value)
+    Tokens.new_string(fake_origin, value, "\"#{value}\"")
+  end
+
+  def self.token_double(value)
+    Tokens.new_double(fake_origin, value, "#{value}")
+  end
+
+  def self.token_int(value)
+    Tokens.new_int(fake_origin, value, "#{value}")
+  end
+
+  def self.token_maybe_optional_substitution(optional, token_list)
+    Tokens.new_substitution(fake_origin, optional, token_list)
+  end
+
+  def self.token_substitution(*token_list)
+    token_maybe_optional_substitution(false, token_list)
+  end
+
+  def self.token_optional_substitution(*token_list)
+    token_maybe_optional_substitution(true, token_list)
+  end
+
+  def self.token_key_substitution(value)
+    token_substitution(token_string(value))
+  end
+
+  def self.parse_object(s)
+    parse_config(s).root
+  end
+
+  def self.parse_config(s)
+    options = Hocon::ConfigParseOptions.defaults.
+                  set_origin_description("test string").
+                  set_syntax(Hocon::ConfigSyntax::CONF)
+    Hocon::ConfigFactory.parse_string(s, options)
+  end
+
+  ##################
+  # ConfigValue helpers
+  ##################
+  def self.int_value(value)
+    ConfigInt.new(fake_origin, value, nil)
+  end
+
+  def self.double_value(value)
+    ConfigDouble.new(fake_origin, value, nil)
+  end
+
+  def self.string_value(value)
+    ConfigString::Quoted.new(fake_origin, value)
+  end
+
+  def self.null_value
+    ConfigNull.new(fake_origin)
+  end
+
+  def self.bool_value(value)
+    ConfigBoolean.new(fake_origin, value)
+  end
+
+  def self.config_map(input_map)
+    # Turns {String: Int} maps into {String: ConfigInt} maps
+    Hash[ input_map.map { |k, v| [k, int_value(v)] } ]
+  end
+
+  def self.subst(ref, optional = false)
+    path = Path.new_path(ref)
+    ConfigReference.new(fake_origin, SubstitutionExpression.new(path, optional))
+  end
+
+  def self.subst_in_string(ref, optional = false)
+    pieces = [string_value("start<"), subst(ref, optional), string_value(">end")]
+    ConfigConcatenation.new(fake_origin, pieces)
+  end
+
+  ##################
+  # Token Functions
+  ##################
+  class NotEqualToAnythingElse
+    def ==(other)
+      other.is_a? NotEqualToAnythingElse
+    end
+
+    def hash
+      971
+    end
+  end
+
+  ##################
+  # Path Functions
+  ##################
+  def self.path(*elements)
+    # this is importantly NOT using Path.newPath, which relies on
+    # the parser; in the test suite we are often testing the parser,
+    # so we don't want to use the parser to build the expected result.
+    Path.from_string_list(elements)
+  end
+
+  RESOURCE_DIR = "spec/fixtures/test_utils/resources"
+
+  def self.resource_file(filename)
+    File.join(RESOURCE_DIR, filename)
+  end
+
+  def self.json_quoted_resource_file(filename)
+    quote_json_string(resource_file(filename).to_s)
+  end
+
+  def self.quote_json_string(s)
+    Hocon::Impl::ConfigImplUtil.render_json_string(s)
+  end
+
+  ##################
+  # RSpec Tests
+  ##################
+  def self.check_equal_objects(first_object, second_object)
+    it "should find the two objects to be equal" do
+      not_equal_to_anything_else = TestUtils::NotEqualToAnythingElse.new
+
+      # Equality
+      expect(first_object).to eq(second_object)
+      expect(second_object).to eq(first_object)
+
+      # Hashes
+      expect(first_object.hash).to eq(second_object.hash)
+
+      # Other random object
+      expect(first_object).not_to eq(not_equal_to_anything_else)
+      expect(not_equal_to_anything_else).not_to eq(first_object)
+
+      expect(second_object).not_to eq(not_equal_to_anything_else)
+      expect(not_equal_to_anything_else).not_to eq(second_object)
+    end
+  end
+
+  def self.check_not_equal_objects(first_object, second_object)
+
+    it "should find the two objects to be not equal" do
+      not_equal_to_anything_else = TestUtils::NotEqualToAnythingElse.new
+
+      # Equality
+      expect(first_object).not_to eq(second_object)
+      expect(second_object).not_to eq(first_object)
+
+      # Hashes
+      # hashcode inequality isn't guaranteed, but
+      # as long as it happens to work it might
+      # detect a bug (if hashcodes are equal,
+      # check if it's due to a bug or correct
+      # before you remove this)
+      expect(first_object.hash).not_to eq(second_object.hash)
+
+      # Other random object
+      expect(first_object).not_to eq(not_equal_to_anything_else)
+      expect(not_equal_to_anything_else).not_to eq(first_object)
+
+      expect(second_object).not_to eq(not_equal_to_anything_else)
+      expect(not_equal_to_anything_else).not_to eq(second_object)
+    end
+  end
+end
+
+
+##################
+# RSpec Shared Examples
+##################
+
+# Examples for comparing an object that won't equal anything but itself
+# Used in the object_equality examples below
+shared_examples_for "not_equal_to_other_random_thing" do
+  let(:not_equal_to_anything_else) { TestUtils::NotEqualToAnythingElse.new }
+
+  it "should find the first object not equal to a random other thing" do
+    expect(first_object).not_to eq(not_equal_to_anything_else)
+    expect(not_equal_to_anything_else).not_to eq(first_object)
+  end
+
+  it "should find the second object not equal to a random other thing" do
+    expect(second_object).not_to eq(not_equal_to_anything_else)
+    expect(not_equal_to_anything_else).not_to eq(second_object)
+  end
+end
+
+# Examples for making sure two objects are equal
+shared_examples_for "object_equality" do
+
+  it "should find the first object to be equal to the second object" do
+    expect(first_object).to eq(second_object)
+  end
+
+  it "should find the second object to be equal to the first object" do
+    expect(second_object).to eq(first_object)
+  end
+
+  it "should find the hash codes of the two objects to be equal" do
+    expect(first_object.hash).to eq(second_object.hash)
+  end
+
+  include_examples "not_equal_to_other_random_thing"
+end
+
+# Examples for making sure two objects are not equal
+shared_examples_for "object_inequality" do
+
+  it "should find the first object to not be equal to the second object" do
+    expect(first_object).not_to eq(second_object)
+  end
+
+  it "should find the second object to not be equal to the first object" do
+    expect(second_object).not_to eq(first_object)
+  end
+
+  it "should find the hash codes of the two objects to not be equal" do
+    # hashcode inequality isn't guaranteed, but
+    # as long as it happens to work it might
+    # detect a bug (if hashcodes are equal,
+    # check if it's due to a bug or correct
+    # before you remove this)
+    expect(first_object.hash).not_to eq(second_object.hash)
+  end
+
+  include_examples "not_equal_to_other_random_thing"
+end
+
+
+shared_examples_for "path_render_test" do
+  it "should find the expected rendered text equal to the rendered path" do
+    expect(path.render).to eq(expected)
+  end
+
+  it "should find the path equal to the parsed expected text" do
+    expect(Hocon::Impl::PathParser.parse_path(expected)).to eq(path)
+  end
+
+  it "should find the path equal to the parsed text that came from the rendered path" do
+    expect(Hocon::Impl::PathParser.parse_path(path.render)).to eq(path)
+  end
+end
diff --git a/spec/unit/cli/cli_spec.rb b/spec/unit/cli/cli_spec.rb
new file mode 100644
index 0000000..0e9b5b9
--- /dev/null
+++ b/spec/unit/cli/cli_spec.rb
@@ -0,0 +1,157 @@
+# encoding: utf-8
+
+require 'spec_helper'
+require 'test_utils'
+
+
+describe Hocon::CLI do
+  ####################
+  # Argument Parsing
+  ####################
+  context 'argument parsing' do
+    it 'should find all the flags and arguments' do
+      args = %w(-i foo -o bar set some.path some_value --json)
+      expected_options = {
+        in_file: 'foo',
+        out_file: 'bar',
+        subcommand: 'set',
+        path: 'some.path',
+        new_value: 'some_value',
+        json: true
+      }
+      expect(Hocon::CLI.parse_args(args)).to eq(expected_options)
+    end
+
+    it 'should set -i and -o to -f if given' do
+      args = %w(-f foo set some.path some_value)
+      expected_options = {
+        file: 'foo',
+        in_file: 'foo',
+        out_file: 'foo',
+        subcommand: 'set',
+        path: 'some.path',
+        new_value: 'some_value'
+      }
+      expect(Hocon::CLI.parse_args(args)).to eq(expected_options)
+    end
+  end
+
+  context 'subcommands' do
+    hocon_text =
+'foo.bar {
+  baz = 42
+  array = [1, 2, 3]
+  hash: {key: value}
+}'
+
+    context 'do_get()' do
+      it 'should get simple values' do
+        options = {path: 'foo.bar.baz'}
+        expect(Hocon::CLI.do_get(options, hocon_text)).to eq('42')
+      end
+
+      it 'should work with arrays' do
+        options = {path: 'foo.bar.array'}
+        expected = "[\n    1,\n    2,\n    3\n]"
+        expect(Hocon::CLI.do_get(options, hocon_text)).to eq(expected)
+      end
+
+      it 'should work with hashes' do
+        options = {path: 'foo.bar.hash'}
+        expected = "key: value\n"
+        expect(Hocon::CLI.do_get(options, hocon_text)).to eq(expected)
+      end
+
+      it 'should output json if specified' do
+        options = {path: 'foo.bar.hash', json: true}
+
+        # Note that this is valid json, while the test above is not
+        expected = "{\n    \"key\": \"value\"\n}\n"
+        expect(Hocon::CLI.do_get(options, hocon_text)).to eq(expected)
+      end
+
+      it 'should throw a MissingPathError if the path does not exist' do
+        options = {path: 'not.a.path'}
+        expect {Hocon::CLI.do_get(options, hocon_text)}
+            .to raise_error(Hocon::CLI::MissingPathError)
+      end
+
+      it 'should throw a MissingPathError if the path leads into an array' do
+        options = {path: 'foo.array.1'}
+        expect {Hocon::CLI.do_get(options, hocon_text)}
+            .to raise_error(Hocon::CLI::MissingPathError)
+      end
+
+      it 'should throw a MissingPathError if the path leads into a string' do
+        options = {path: 'foo.hash.key.value'}
+        expect {Hocon::CLI.do_get(options, hocon_text)}
+            .to raise_error(Hocon::CLI::MissingPathError)
+      end
+    end
+
+    context 'do_set()' do
+      it 'should overwrite values' do
+        options = {path: 'foo.bar.baz', new_value: 'pi'}
+        expected = hocon_text.sub(/42/, 'pi')
+        expect(Hocon::CLI.do_set(options, hocon_text)).to eq(expected)
+      end
+
+      it 'should create new nested values' do
+        options = {path: 'new.nested.path', new_value: 'hello'}
+        expected = "new: {\n  nested: {\n    path: hello\n  }\n}"
+        # No config is supplied, so it will need to add new nested hashes
+        expect(Hocon::CLI.do_set(options, '')).to eq(expected)
+      end
+
+      it 'should allow arrays to be set' do
+        options = {path: 'my_array', new_value: '[1, 2, 3]'}
+        expected = 'my_array: [1, 2, 3]'
+        expect(Hocon::CLI.do_set(options, '')).to eq(expected)
+      end
+
+      it 'should allow arrays in strings to be set as strings' do
+        options = {path: 'my_array', new_value: '"[1, 2, 3]"'}
+        expected = 'my_array: "[1, 2, 3]"'
+        expect(Hocon::CLI.do_set(options, '')).to eq(expected)
+      end
+
+      it 'should allow hashes to be set' do
+        do_set_options = {path: 'my_hash', new_value: '{key: value}'}
+        do_set_expected = 'my_hash: {key: value}'
+        do_set_result = Hocon::CLI.do_set(do_set_options, '')
+        expect(do_set_result).to eq(do_set_expected)
+
+        # Make sure it can be parsed again and be seen as a real hash
+        do_get_options = {path: 'my_hash.key'}
+        do_get_expected = 'value'
+        expect(Hocon::CLI.do_get(do_get_options, do_set_result)).to eq(do_get_expected)
+      end
+
+      it 'should allow hashes to be set as strings' do
+        do_set_options = {path: 'my_hash', new_value: '"{key: value}"'}
+        do_set_expected = 'my_hash: "{key: value}"'
+        do_set_result = Hocon::CLI.do_set(do_set_options, '')
+        expect(do_set_result).to eq(do_set_expected)
+
+        # Make sure it can't be parsed again and be seen as a real hash
+        do_get_options = {path: 'my_hash.key'}
+        expect{Hocon::CLI.do_get(do_get_options, do_set_result)}
+            .to raise_error(Hocon::CLI::MissingPathError)
+      end
+    end
+
+    context 'do_unset()' do
+      it 'should remove values' do
+        options = {path: 'foo.bar.baz'}
+        expected = hocon_text.sub(/baz = 42/, '')
+        expect(Hocon::CLI.do_unset(options, hocon_text)).to eq(expected)
+      end
+
+      it 'should throw a MissingPathError if the path does not exist' do
+        options = {path: 'fake.path'}
+        expect{Hocon::CLI.do_unset(options, hocon_text)}
+            .to raise_error(Hocon::CLI::MissingPathError)
+      end
+    end
+  end
+end
diff --git a/spec/unit/hocon/README.md b/spec/unit/hocon/README.md
new file mode 100644
index 0000000..b86b594
--- /dev/null
+++ b/spec/unit/hocon/README.md
@@ -0,0 +1,7 @@
+## RUBY-SPECIFIC TESTS
+
+This directory should only contain tests that are specific to the Ruby library/API.
+Tests ported from the upstream Java library should live in spec/typesafe/config.
+
+Where possible it would be good to avoid sharing fixtures between the two types
+of tests as well.
\ No newline at end of file
diff --git a/spec/unit/hocon/hocon_spec.rb b/spec/unit/hocon/hocon_spec.rb
new file mode 100644
index 0000000..a192c6e
--- /dev/null
+++ b/spec/unit/hocon/hocon_spec.rb
@@ -0,0 +1,114 @@
+# encoding: utf-8
+
+require 'spec_helper'
+require 'hocon'
+require 'hocon/config_render_options'
+require 'hocon/config_error'
+require 'hocon/config_syntax'
+
+ConfigParseError = Hocon::ConfigError::ConfigParseError
+ConfigWrongTypeError = Hocon::ConfigError::ConfigWrongTypeError
+
+describe Hocon do
+  let(:render_options) { Hocon::ConfigRenderOptions.defaults }
+
+  before do
+    render_options.origin_comments = false
+    render_options.json = false
+  end
+
+  RSpec.shared_examples "hocon_parsing" do
+
+    it "should make the config data available as a map" do
+      expect(conf).to eq(expected)
+    end
+
+  end
+
+  [EXAMPLE1, EXAMPLE2].each do |example|
+    let(:input_file) { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/input.conf" }
+    let(:output_file) { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/output.conf" }
+    let(:output) { File.read("#{output_file}") }
+    let(:output_nocomments_file) { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/output_nocomments.conf" }
+    let(:output_nocomments) { File.read("#{output_nocomments_file}") }
+    let(:expected) { example[:hash] }
+    # TODO 'reparsed' appears to be unused
+    let(:reparsed) { Hocon::ConfigFactory.parse_file("#{FIXTURE_DIR}/parse_render/#{example[:name]}/output.conf") }
+
+    context "loading a HOCON file" do
+      let(:conf) { Hocon.load(input_file) }
+      include_examples "hocon_parsing"
+    end
+
+    context "parsing a HOCON string" do
+      let(:string) { File.open(input_file).read }
+      let(:conf) { Hocon.parse(string) }
+      include_examples "hocon_parsing"
+    end
+  end
+
+  it "should fail to parse an array" do
+    puts 
+    expect{(Hocon.parse('[1,2,3]'))}.
+      to raise_error(ConfigWrongTypeError)
+  end
+
+  it "should fail to parse an array" do
+    expect{(Hocon.parse('["one", "two" "three"]'))}.
+      to raise_error(ConfigWrongTypeError)
+  end
+
+  context "loading a HOCON file with a substitution" do
+    conf = Hocon.load("#{FIXTURE_DIR}/parse_render/#{EXAMPLE3[:name]}/input.conf")
+    expected = EXAMPLE3[:hash]
+    it "should successfully resolve the substitution" do
+      expect(conf).to eq(expected)
+    end
+  end
+
+  context "loading a file with an unknown extension" do
+    context "without specifying the config format" do
+      it "should raise an error" do
+        expect {
+          Hocon.load("#{FIXTURE_DIR}/hocon/by_extension/cat.test")
+        }.to raise_error(ConfigParseError, /Unrecognized file extension '.test'/)
+      end
+    end
+
+    context "while specifying the config format" do
+      it "should parse properly if the config format is correct" do
+        expect(Hocon.load("#{FIXTURE_DIR}/hocon/by_extension/cat.test",
+                          {:syntax => Hocon::ConfigSyntax::HOCON})).
+            to eq({"meow" => "cats"})
+        expect(Hocon.load("#{FIXTURE_DIR}/hocon/by_extension/cat.test-json",
+                          {:syntax => Hocon::ConfigSyntax::HOCON})).
+            to eq({"meow" => "cats"})
+      end
+      it "should parse properly if the config format is compatible" do
+        expect(Hocon.load("#{FIXTURE_DIR}/hocon/by_extension/cat.test-json",
+                          {:syntax => Hocon::ConfigSyntax::JSON})).
+            to eq({"meow" => "cats"})
+      end
+      it "should raise an error if the config format is incompatible" do
+        expect {
+          Hocon.load("#{FIXTURE_DIR}/hocon/by_extension/cat.test",
+                     {:syntax => Hocon::ConfigSyntax::JSON})
+        }.to raise_error(ConfigParseError, /Document must have an object or array at root/)
+      end
+    end
+  end
+
+  context "loading config that includes substitutions" do
+    it "should be able to `load` from a file" do
+      expect(Hocon.load("#{FIXTURE_DIR}/hocon/with_substitution/subst.conf")).
+          to eq({"a" => true, "b" => true, "c" => ["foo", "bar", "baz"]})
+    end
+    it "should be able to `parse` from a string" do
+      expect(Hocon.parse(File.read("#{FIXTURE_DIR}/hocon/with_substitution/subst.conf"))).
+          to eq({"a" => true, "b" => true, "c" => ["foo", "bar", "baz"]})
+    end
+  end
+
+
+end
+
diff --git a/spec/unit/typesafe/config/README.md b/spec/unit/typesafe/config/README.md
new file mode 100644
index 0000000..6140c03
--- /dev/null
+++ b/spec/unit/typesafe/config/README.md
@@ -0,0 +1,4 @@
+## TESTS PORTED FROM UPSTREAM
+
+This directory should only contain tests that are ported from the upstream
+Java library.
\ No newline at end of file
diff --git a/spec/unit/typesafe/config/concatenation_spec.rb b/spec/unit/typesafe/config/concatenation_spec.rb
new file mode 100644
index 0000000..bfd7567
--- /dev/null
+++ b/spec/unit/typesafe/config/concatenation_spec.rb
@@ -0,0 +1,417 @@
+# encoding: utf-8
+
+require 'test_utils'
+
+describe "concatenation" do
+
+  it "string concat, no substitutions" do
+    conf = TestUtils.parse_config(' a :  true "xyz" 123 foo  ').resolve
+    expect(conf.get_string("a")).to eq("true xyz 123 foo")
+  end
+
+  it "trivial string concat" do
+    conf = TestUtils.parse_config(" a : ${x}foo, x = 1 ").resolve
+    expect(conf.get_string("a")).to eq("1foo")
+  end
+
+  it "two substitutions and string concat" do
+    conf = TestUtils.parse_config(" a : ${x}foo${x}, x = 1 ").resolve
+    expect(conf.get_string("a")).to eq("1foo1")
+  end
+
+  it "string concat cannot span lines" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigParseError) {
+      TestUtils.parse_config(" a : ${x}
+        foo, x = 1 ")
+    }
+    expect(e.message).to include("not be followed")
+    expect(e.message).to include("','")
+  end
+
+  it "no objects in string concat" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigWrongTypeError) {
+      TestUtils.parse_config(" a : abc { x : y } ")
+    }
+    expect(e.message).to include("Cannot concatenate")
+    expect(e.message).to include("abc")
+    expect(e.message).to include('{"x":"y"}')
+  end
+
+  it "no object concat with nil" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigWrongTypeError) {
+      TestUtils.parse_config(" a : null { x : y } ")
+    }
+    expect(e.message).to include("Cannot concatenate")
+    expect(e.message).to include("null")
+    expect(e.message).to include('{"x":"y"}')
+  end
+
+  it "no arrays in string concat" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigWrongTypeError) {
+      TestUtils.parse_config(" a : abc [1, 2] ")
+    }
+    expect(e.message).to include("Cannot concatenate")
+    expect(e.message).to include("abc")
+    expect(e.message).to include("[1,2]")
+  end
+
+  it "no objects substituted in string concat" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigWrongTypeError) {
+      TestUtils.parse_config(" a : abc ${x}, x : { y : z } ").resolve
+    }
+    expect(e.message).to include("Cannot concatenate")
+    expect(e.message).to include("abc")
+  end
+
+  it "no arrays substituted in string concat" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigWrongTypeError) {
+      TestUtils.parse_config(" a : abc ${x}, x : [1,2] ").resolve
+    }
+    expect(e.message).to include("Cannot concatenate")
+    expect(e.message).to include("abc")
+  end
+
+  it "no substitutions in list concat" do
+    conf = TestUtils.parse_config(" a :  [1,2] [3,4]  ")
+    expect([1, 2, 3, 4]).to eq(conf.get_list("a").unwrapped)
+  end
+
+  it "list concat with substitutions" do
+    conf = TestUtils.parse_config(" a :  ${x} [3,4] ${y}, x : [1,2], y : [5,6]  ").resolve
+    expect([1, 2, 3, 4, 5, 6]).to eq(conf.get_list("a").unwrapped)
+  end
+
+  it "list concat self referential" do
+    conf = TestUtils.parse_config(" a : [1, 2], a : ${a} [3,4], a : ${a} [5,6]  ").resolve
+    expect([1, 2, 3, 4, 5, 6]).to eq(conf.get_list("a").unwrapped)
+  end
+
+  it "no substitutions in list concat cannot span lines" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigParseError) {
+      TestUtils.parse_config(" a :  [1,2]
+                [3,4]  ")
+    }
+    expect(e.message).to include("expecting")
+    expect(e.message).to include("'['")
+  end
+
+  it "list concat can span lines inside brackest" do
+    conf = TestUtils.parse_config(" a :  [1,2
+               ] [3,4]  ")
+    expect([1, 2, 3, 4]).to eq(conf.get_list("a").unwrapped)
+  end
+
+  it "no substitutions object concat" do
+    conf = TestUtils.parse_config(" a : { b : c } { x : y }  ")
+    expect({"b" => "c", "x" => "y"}).to eq(conf.get_object("a").unwrapped)
+  end
+
+  it "object concat merge order" do
+    conf = TestUtils.parse_config(" a : { b : 1 } { b : 2 } { b : 3 } { b : 4 } ")
+    expect(4).to eq(conf.get_int("a.b"))
+  end
+
+  it "object concat with substitutions" do
+    conf = TestUtils.parse_config(" a : ${x} { b : 1 } ${y}, x : { a : 0 }, y : { c : 2 } ").resolve
+    expect({"a" => 0, "b" => 1, "c" => 2}).to eq(conf.get_object("a").unwrapped)
+  end
+
+  it "object concat self referential" do
+    conf = TestUtils.parse_config(" a : { a : 0 }, a : ${a} { b : 1 }, a : ${a} { c : 2 } ").resolve
+    expect({"a" => 0, "b" => 1, "c" => 2}).to eq(conf.get_object("a").unwrapped)
+  end
+
+  it "object concat self referential override" do
+    conf = TestUtils.parse_config(" a : { b : 3 }, a : { b : 2 } ${a} ").resolve
+    expect({"b" => 3}).to eq(conf.get_object("a").unwrapped)
+  end
+
+  it "no substitutions object concat cannot span lines" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigParseError) {
+      TestUtils.parse_config(" a :  { b : c }
+                    { x : y }")
+    }
+    expect(e.message).to include("expecting")
+    expect(e.message).to include("'{'")
+  end
+
+  it "object concat can span lines inside braces" do
+    conf = TestUtils.parse_config(" a :  { b : c
+      } { x : y }  ")
+    expect({"b" => "c", "x" => "y"}).to eq(conf.get_object("a").unwrapped)
+  end
+
+  it "string concat inside array value" do
+    conf = TestUtils.parse_config(" a : [ foo bar 10 ] ")
+    expect(["foo bar 10"]).to eq(conf.get_string_list("a"))
+  end
+
+  it "string non concat inside array value" do
+    conf = TestUtils.parse_config(" a : [ foo
+                bar
+                10 ] ")
+    expect(["foo", "bar", "10"]).to eq(conf.get_string_list("a"))
+  end
+
+  it "object concat inside array value" do
+    conf = TestUtils.parse_config(" a : [ { b : c } { x : y } ] ")
+    expect([{"b" => "c", "x" => "y"}]).to eq(conf.get_object_list("a").map { |x| x.unwrapped })
+  end
+
+  it "object non concat inside array value" do
+    conf = TestUtils.parse_config(" a : [ { b : c }
+                { x : y } ] ")
+    expect([{"b" => "c"}, {"x" => "y"}]).to eq(conf.get_object_list("a").map { |x| x.unwrapped })
+  end
+
+  it "list concat inside array value" do
+    conf = TestUtils.parse_config(" a : [ [1, 2] [3, 4] ] ")
+    expect([[1,2,3,4]]).to eq(conf.get_list("a").unwrapped)
+  end
+
+  it "list non concat inside array value" do
+    conf = TestUtils.parse_config(" a : [ [1, 2]
+                [3, 4] ] ")
+    expect([[1, 2], [3, 4]]).to eq(conf.get_list("a").unwrapped)
+  end
+
+  it "string concats are keys" do
+    conf = TestUtils.parse_config(' 123 foo : "value" ')
+    expect("value").to eq(conf.get_string("123 foo"))
+  end
+
+  it "objects are not keys" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigParseError) {
+      TestUtils.parse_config('{ { a : 1 } : "value" }')
+    }
+    expect(e.message).to include("expecting a close")
+    expect(e.message).to include("'{'")
+  end
+
+  it "arrays are not keys" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigParseError) {
+      TestUtils.parse_config('{ [ "a" ] : "value" }')
+    }
+    expect(e.message).to include("expecting a close")
+    expect(e.message).to include("'['")
+  end
+
+  it "empty array plus equals" do
+    conf = TestUtils.parse_config(' a = [], a += 2 ').resolve
+    expect([2]).to eq(conf.get_int_list("a"))
+  end
+
+  it "missing array plus equals" do
+    conf = TestUtils.parse_config(' a += 2 ').resolve
+    expect([2]).to eq(conf.get_int_list("a"))
+  end
+
+  it "short array plus equals" do
+    conf = TestUtils.parse_config(' a = [1], a += 2 ').resolve
+    expect([1, 2]).to eq(conf.get_int_list("a"))
+  end
+
+  it "number plus equals" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigWrongTypeError) {
+      TestUtils.parse_config(' a = 10, a += 2 ').resolve
+    }
+    expect(e.message).to include("Cannot concatenate")
+    expect(e.message).to include("10")
+    expect(e.message).to include("[2]")
+  end
+
+  it "string plus equals" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigWrongTypeError) {
+      TestUtils.parse_config(' a = abc, a += 2 ').resolve
+    }
+    expect(e.message).to include("Cannot concatenate")
+    expect(e.message).to include("abc")
+    expect(e.message).to include("[2]")
+  end
+
+  it "objects plus equals" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigWrongTypeError) {
+      TestUtils.parse_config(' a = { x : y }, a += 2 ').resolve
+    }
+    expect(e.message).to include("Cannot concatenate")
+    expect(e.message).to include("\"x\":\"y\"")
+    expect(e.message).to include("[2]")
+  end
+
+  it "plus equals nested path" do
+    conf = TestUtils.parse_config(' a.b.c = [1], a.b.c += 2 ').resolve
+    expect([1, 2]).to eq(conf.get_int_list("a.b.c"))
+  end
+
+  it "plus equals nested objects" do
+    conf = TestUtils.parse_config(' a : { b : { c : [1] } }, a : { b : { c += 2 } }').resolve
+    expect([1, 2]).to eq(conf.get_int_list("a.b.c"))
+  end
+
+  it "plus equals single nested object" do
+    conf = TestUtils.parse_config(' a : { b : { c : [1], c += 2 } }').resolve
+    expect([1, 2]).to eq(conf.get_int_list("a.b.c"))
+  end
+
+  it "substitution plus equals substitution" do
+    conf = TestUtils.parse_config(' a = ${x}, a += ${y}, x = [1], y = 2 ').resolve
+    expect([1, 2]).to eq(conf.get_int_list("a"))
+  end
+
+  it "plus equals multiple times" do
+    conf = TestUtils.parse_config(' a += 1, a += 2, a += 3 ').resolve
+    expect([1, 2, 3]).to eq(conf.get_int_list("a"))
+  end
+
+  it "plus equals multiple times nested" do
+    conf = TestUtils.parse_config(' x { a += 1, a += 2, a += 3 } ').resolve
+    expect([1, 2, 3]).to eq(conf.get_int_list("x.a"))
+  end
+
+  it "plus equals an object multiple times" do
+    conf = TestUtils.parse_config(' a += { b: 1 }, a += { b: 2 }, a += { b: 3 } ').resolve
+    expect([1, 2, 3]).to eq(conf.get_object_list("a").map { |x| x.to_config.get_int("b")})
+  end
+
+  it "plus equals an object multiple times nested" do
+    conf = TestUtils.parse_config(' x { a += { b: 1 }, a += { b: 2 }, a += { b: 3 } } ').resolve
+    expect([1, 2, 3]).to eq(conf.get_object_list("x.a").map { |x| x.to_config.get_int("b") })
+  end
+
+  # We would ideally make this case NOT throw an exception but we need to do some work
+  # to get there, see https: // github.com/typesafehub/config/issues/160
+  it "plus equals multiple times nested in array" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigParseError) {
+      conf = TestUtils.parse_config('x = [ { a += 1, a += 2, a += 3 } ] ').resolve
+      expect([1, 2, 3]).to eq(conf.get_object_list("x").to_config.get_int_list("a"))
+    }
+    expect(e.message).to include("limitation")
+  end
+
+  # We would ideally make this case NOT throw an exception but we need to do some work
+  # to get there, see https: // github.com/typesafehub/config/issues/160
+  it "plus equals multiple times nested in plus equals" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigParseError) {
+      conf = TestUtils.parse_config('x += { a += 1, a += 2, a += 3 } ').resolve
+      expect([1, 2, 3]).to eq(conf.get_object_list("x").to_config.get_int_list("a"))
+    }
+    expect(e.message).to include("limitation")
+  end
+
+  # from https://github.com/typesafehub/config/issues/177
+  it "array concatenation in double nested delayed merge" do
+    unresolved = TestUtils.parse_config("d { x = [] }, c : ${d}, c { x += 1, x += 2 }")
+    conf = unresolved.resolve
+    expect([1,2]).to eq(conf.get_int_list("c.x"))
+  end
+
+  # from https://github.com/typesafehub/config/issues/177
+  it "array concatenation as part of delayed merge" do
+    unresolved = TestUtils.parse_config(" c { x: [], x : ${c.x}[1], x : ${c.x}[2] }")
+    conf = unresolved.resolve
+    expect([1,2]).to eq(conf.get_int_list("c.x"))
+  end
+
+  # from https://github.com/typesafehub/config/issues/177
+  it "array concatenation in double nested delayed merge 2" do
+    unresolved = TestUtils.parse_config("d { x = [] }, c : ${d}, c { x : ${c.x}[1], x : ${c.x}[2] }")
+    conf = unresolved.resolve
+    expect([1,2]).to eq(conf.get_int_list("c.x"))
+  end
+
+  # from https://github.com/typesafehub/config/issues/177
+  it "array concatenation in triple nested delayed merge" do
+    unresolved = TestUtils.parse_config("{ r: { d.x=[] }, q: ${r}, q : { d { x = [] }, c : ${q.d}, c { x : ${q.c.x}[1], x : ${q.c.x}[2] } } }")
+    conf = unresolved.resolve
+    expect([1,2]).to eq(conf.get_int_list("q.c.x"))
+  end
+
+  it "concat undefined substitution with string" do
+    conf = TestUtils.parse_config("a = foo${?bar}").resolve
+    expect("foo").to eq(conf.get_string("a"))
+  end
+
+  it "concat defined optional substitution with string" do
+    conf = TestUtils.parse_config("bar=bar, a = foo${?bar}").resolve
+    expect("foobar").to eq(conf.get_string("a"))
+  end
+
+  it "concat defined substitution with array" do
+    conf = TestUtils.parse_config("a = [1] ${?bar}").resolve
+    expect([1]).to eq(conf.get_int_list("a"))
+  end
+
+  it "concat defined optional substitution with array" do
+    conf = TestUtils.parse_config("bar=[2], a = [1] ${?bar}").resolve
+    expect([1, 2]).to eq(conf.get_int_list("a"))
+  end
+
+  it "concat undefined substitution with object" do
+    conf = TestUtils.parse_config('a = { x : "foo" } ${?bar}').resolve
+    expect('foo').to eq(conf.get_string("a.x"))
+  end
+
+  it "concat defined optional substitution with object" do
+    conf = TestUtils.parse_config('bar={ y : 42 }, a = { x : "foo" } ${?bar}').resolve
+    expect('foo').to eq(conf.get_string("a.x"))
+    expect(42).to eq(conf.get_int("a.y"))
+  end
+
+  it "concat two undefined substitutions" do
+    conf = TestUtils.parse_config("a = ${?foo}${?bar}").resolve
+    expect(conf.has_path?("a")).to be_falsey
+  end
+
+  it "concat several undefined substitutions" do
+    conf = TestUtils.parse_config("a = ${?foo}${?bar}${?baz}${?woooo}").resolve
+    expect(conf.has_path?("a")).to be_falsey
+  end
+
+  it "concat two undefined substitutions with a space" do
+    conf = TestUtils.parse_config("a = ${?foo} ${?bar}").resolve
+    expect(conf.get_string("a")).to eq(" ")
+  end
+
+  it "concat two defined substitutions with a space" do
+    conf = TestUtils.parse_config("foo=abc, bar=def, a = ${foo} ${bar}").resolve
+    expect(conf.get_string("a")).to eq("abc def")
+  end
+
+  it "concat two undefined substitutions with empty string" do
+    conf = TestUtils.parse_config('a = ""${?foo}${?bar}').resolve
+    expect(conf.get_string("a")).to eq("")
+  end
+
+  it "concat substitutions that are objects with no space" do
+    conf = TestUtils.parse_config('foo = { a : 1}, bar = { b : 2 }, x = ${foo}${bar}').resolve
+    expect(1).to eq(conf.get_int("x.a"))
+    expect(2).to eq(conf.get_int("x.b"))
+  end
+
+  # whitespace is insignificant if substitutions don't turn out to be a string
+  it "concat substitutions that are objects with space" do
+    conf = TestUtils.parse_config('foo = { a : 1}, bar = { b : 2 }, x = ${foo} ${bar}').resolve
+    expect(1).to eq(conf.get_int("x.a"))
+    expect(2).to eq(conf.get_int("x.b"))
+  end
+
+  # whitespace is insignificant if substitutions don't turn out to be a string
+  it "concat substitutions that are lists with space" do
+    conf = TestUtils.parse_config('foo = [1], bar = [2], x = ${foo} ${bar}').resolve
+    expect([1,2]).to eq(conf.get_int_list("x"))
+  end
+
+  # but quoted whitespace should be an error
+  it "concat substitutions that are objects with quoted space" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigWrongTypeError) {
+      conf = TestUtils.parse_config('foo = { a : 1}, bar = { b : 2 }, x = ${foo}"  "${bar}').resolve
+    }
+  end
+
+  # but quoted whitespace should be an error
+  it "concat substitutions that are lists with quoted space" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigWrongTypeError) {
+      conf = TestUtils.parse_config('foo = [1], bar = [2], x = ${foo}"  "${bar}').resolve
+    }
+  end
+end
diff --git a/spec/unit/typesafe/config/conf_parser_spec.rb b/spec/unit/typesafe/config/conf_parser_spec.rb
new file mode 100644
index 0000000..25a3361
--- /dev/null
+++ b/spec/unit/typesafe/config/conf_parser_spec.rb
@@ -0,0 +1,831 @@
+# encoding: utf-8
+
+require 'spec_helper'
+require 'test_utils'
+require 'hocon/config_parse_options'
+require 'hocon/config_syntax'
+require 'hocon/impl/abstract_config_object'
+require 'hocon/impl/resolve_context'
+require 'hocon/config_resolve_options'
+require 'hocon/config_error'
+require 'hocon/impl/simple_config_origin'
+require 'hocon/config_list'
+require 'hocon/impl/config_reference'
+require 'hocon/impl/path_parser'
+require 'hocon/impl/parseable'
+require 'hocon/config_factory'
+
+def parse_without_resolving(s)
+  options = Hocon::ConfigParseOptions.defaults.
+              set_origin_description("test conf string").
+              set_syntax(Hocon::ConfigSyntax::CONF)
+  Hocon::Impl::Parseable.new_string(s, options).parse_value
+end
+
+def parse(s)
+  tree = parse_without_resolving(s)
+
+  if tree.is_a?(Hocon::Impl::AbstractConfigObject)
+    Hocon::Impl::ResolveContext.resolve(tree, tree,
+      Hocon::ConfigResolveOptions.no_system)
+  else
+    tree
+  end
+end
+
+
+describe "Config Parser" do
+  context "invalid_conf_throws" do
+    TestUtils.whitespace_variations(TestUtils::InvalidConf, false).each do |invalid|
+      it "should raise an error for invalid config string '#{invalid.test}'" do
+        TestUtils.add_offending_json_to_exception("config", invalid.test) {
+          TestUtils.intercept(Hocon::ConfigError) {
+            parse(invalid.test)
+          }
+        }
+      end
+    end
+  end
+
+  context "valid_conf_works" do
+    TestUtils.whitespace_variations(TestUtils::ValidConf, true).each do |valid|
+      it "should successfully parse config string '#{valid.test}'" do
+        our_ast = TestUtils.add_offending_json_to_exception("config-conf", valid.test) {
+          parse(valid.test)
+        }
+        # let's also check round-trip rendering
+        rendered = our_ast.render
+        reparsed = TestUtils.add_offending_json_to_exception("config-conf-reparsed", rendered) {
+          parse(rendered)
+        }
+        expect(our_ast).to eq(reparsed)
+      end
+    end
+  end
+end
+
+def parse_path(s)
+  first_exception = nil
+  second_exception = nil
+  # parser first by wrapping into a whole document and using the regular parser
+  result =
+      begin
+        tree = parse_without_resolving("[${#{s}}]")
+        if tree.is_a?(Hocon::ConfigList)
+          ref = tree[0]
+          if ref.is_a?(Hocon::Impl::ConfigReference)
+            ref.expression.path
+          end
+        end
+      rescue Hocon::ConfigError => e
+        first_exception = e
+        nil
+      end
+
+  # also parse with the standalone path parser and be sure the outcome is the same
+  begin
+    should_be_same = Hocon::Impl::PathParser.parse_path(s)
+    unless result == should_be_same
+      raise "expected '#{result}' to equal '#{should_be_same}'"
+    end
+  rescue Hocon::ConfigError => e
+    second_exception = e
+  end
+
+  if first_exception.nil? && (!second_exception.nil?)
+    raise "only the standalone path parser threw: #{second_exception}"
+  end
+
+  if (!first_exception.nil?) && second_exception.nil?
+    raise "only the whole-document parser threw: #{first_exception}"
+  end
+
+  if !first_exception.nil?
+    raise first_exception
+  end
+  if !second_exception.nil?
+    raise "wtf, should have thrown because not equal"
+  end
+
+  result
+end
+
+def test_path_parsing(first, second)
+  it "'#{first}' should parse to same path as '#{second}'" do
+    expect(TestUtils.path(*first)).to eq(parse_path(second))
+  end
+end
+
+describe "Config Parser" do
+  context "path_parsing" do
+    test_path_parsing(["a"], "a")
+    test_path_parsing(["a", "b"], "a.b")
+    test_path_parsing(["a.b"], "\"a.b\"")
+    test_path_parsing(["a."], "\"a.\"")
+    test_path_parsing([".b"], "\".b\"")
+    test_path_parsing(["true"], "true")
+    test_path_parsing(["a"], " a ")
+    test_path_parsing(["a ", "b"], " a .b")
+    test_path_parsing(["a ", " b"], " a . b")
+    test_path_parsing(["a  b"], " a  b")
+    test_path_parsing(["a", "b.c", "d"], "a.\"b.c\".d")
+    test_path_parsing(["3", "14"], "3.14")
+    test_path_parsing(["3", "14", "159"], "3.14.159")
+    test_path_parsing(["a3", "14"], "a3.14")
+    test_path_parsing([""], "\"\"")
+    test_path_parsing(["a", "", "b"], "a.\"\".b")
+    test_path_parsing(["a", ""], "a.\"\"")
+    test_path_parsing(["", "b"], "\"\".b")
+    test_path_parsing(["", "", ""], ' "".""."" ')
+    test_path_parsing(["a-c"], "a-c")
+    test_path_parsing(["a_c"], "a_c")
+    test_path_parsing(["-"], "\"-\"")
+    test_path_parsing(["-"], "-")
+    test_path_parsing(["-foo"], "-foo")
+    test_path_parsing(["-10"], "-10")
+
+    # here 10.0 is part of an unquoted string
+    test_path_parsing(["foo10", "0"], "foo10.0")
+    # here 10.0 is a number that gets value-concatenated
+    test_path_parsing(["10", "0foo"], "10.0foo")
+    # just a number
+    test_path_parsing(["10", "0"], "10.0")
+    # multiple-decimal number
+    test_path_parsing(["1", "2", "3", "4"], "1.2.3.4")
+
+    ["", " ", "  \n   \n  ", "a.", ".b", "a..b", "a${b}c", "\"\".", ".\"\""].each do |invalid|
+      begin
+        it "should raise a ConfigBadPathError for '#{invalid}'" do
+          TestUtils.intercept(Hocon::ConfigError::ConfigBadPathError) {
+            parse_path(invalid)
+          }
+        end
+      rescue => e
+        $stderr.puts("failed on '#{invalid}'")
+        raise e
+      end
+    end
+  end
+
+  it "should allow the last instance to win when duplicate keys are found" do
+    obj = TestUtils.parse_config('{ "a" : 10, "a" : 11 } ')
+
+    expect(obj.root.size).to eq(1)
+    expect(obj.get_int("a")).to eq(11)
+  end
+
+  it "should merge maps when duplicate keys are found" do
+    obj = TestUtils.parse_config('{ "a" : { "x" : 1, "y" : 2 }, "a" : { "x" : 42, "z" : 100 } }')
+
+    expect(obj.root.size).to eq(1)
+    expect(obj.get_object("a").size).to eq(3)
+    expect(obj.get_int("a.x")).to eq(42)
+    expect(obj.get_int("a.y")).to eq(2)
+    expect(obj.get_int("a.z")).to eq(100)
+  end
+
+  it "should merge maps recursively when duplicate keys are found" do
+    obj = TestUtils.parse_config('{ "a" : { "b" : { "x" : 1, "y" : 2 } }, "a" : { "b" : { "x" : 42, "z" : 100 } } }')
+
+    expect(obj.root.size).to eq(1)
+    expect(obj.get_object("a").size).to eq(1)
+    expect(obj.get_object("a.b").size).to eq(3)
+    expect(obj.get_int("a.b.x")).to eq(42)
+    expect(obj.get_int("a.b.y")).to eq(2)
+    expect(obj.get_int("a.b.z")).to eq(100)
+  end
+
+  it "should merge maps recursively when three levels of duplicate keys are found" do
+    obj = TestUtils.parse_config('{ "a" : { "b" : { "c" : { "x" : 1, "y" : 2 } } }, "a" : { "b" : { "c" : { "x" : 42, "z" : 100 } } } }')
+
+    expect(obj.root.size).to eq(1)
+    expect(obj.get_object("a").size).to eq(1)
+    expect(obj.get_object("a.b").size).to eq(1)
+    expect(obj.get_object("a.b.c").size).to eq(3)
+    expect(obj.get_int("a.b.c.x")).to eq(42)
+    expect(obj.get_int("a.b.c.y")).to eq(2)
+    expect(obj.get_int("a.b.c.z")).to eq(100)
+  end
+
+  it "should 'reset' a key when a null is found" do
+    obj = TestUtils.parse_config('{ a : { b : 1 }, a : null, a : { c : 2 } }')
+
+    expect(obj.root.size).to eq(1)
+    expect(obj.get_object("a").size).to eq(1)
+    expect(obj.get_int("a.c")).to eq(2)
+  end
+
+  it "should 'reset' a map key when a scalar is found" do
+    obj = TestUtils.parse_config('{ a : { b : 1 }, a : 42, a : { c : 2 } }')
+
+    expect(obj.root.size).to eq(1)
+    expect(obj.get_object("a").size).to eq(1)
+    expect(obj.get_int("a.c")).to eq(2)
+  end
+end
+
+def drop_curlies(s)
+  # drop the outside curly braces
+  first = s.index('{')
+  last = s.rindex('}')
+  "#{s.slice(0..first)}#{s.slice(first+1..last)}#{s.slice(last + 1)}"
+end
+
+describe "Config Parser" do
+  context "implied_comma_handling" do
+    valids = ['
+// one line
+{
+  a : y, b : z, c : [ 1, 2, 3 ]
+}', '
+// multiline but with all commas
+{
+  a : y,
+  b : z,
+  c : [
+    1,
+    2,
+    3,
+  ],
+}
+', '
+// multiline with no commas
+{
+  a : y
+  b : z
+  c : [
+    1
+    2
+    3
+  ]
+}
+']
+
+    changes =   [
+        Proc.new { |s| s },
+        Proc.new { |s| s.gsub("\n", "\n\n") },
+        Proc.new { |s| s.gsub("\n", "\n\n\n") },
+        Proc.new { |s| s.gsub(",\n", "\n,\n")},
+        Proc.new { |s| s.gsub(",\n", "\n\n,\n\n") },
+        Proc.new { |s| s.gsub("\n", " \n ") },
+        Proc.new { |s| s.gsub(",\n", "  \n  \n  ,  \n  \n  ") },
+        Proc.new { |s| drop_curlies(s) }
+    ]
+
+    tested = 0
+    changes.each do |change|
+      valids.each do |v|
+        tested += 1
+        s = change.call(v)
+        it "should handle commas and whitespaces properly for string '#{s}'" do
+          obj = TestUtils.parse_config(s)
+          expect(obj.root.size).to eq(3)
+          expect(obj.get_string("a")).to eq("y")
+          expect(obj.get_string("b")).to eq("z")
+          expect(obj.get_int_list("c")).to eq([1,2,3])
+        end
+      end
+    end
+
+    it "should have run one test per change per valid string" do
+      expect(tested).to eq(changes.length * valids.length)
+    end
+
+    context "should concatenate values when there is no newline or comma" do
+      it "with no newline in array" do
+        expect(TestUtils.parse_config(" { c : [ 1 2 3 ] } ").
+                   get_string_list("c")).to eq (["1 2 3"])
+      end
+
+      it "with no newline in array with quoted strings" do
+        expect(TestUtils.parse_config(' { c : [ "4" "5" "6" ] } ').
+                   get_string_list("c")).to eq (["4 5 6"])
+      end
+
+      it "with no newline in object" do
+        expect(TestUtils.parse_config(' { a : b c } ').
+                   get_string("a")).to eq ("b c")
+      end
+
+      it "with no newline at end" do
+        expect(TestUtils.parse_config('a: b').
+                   get_string("a")).to eq ("b")
+      end
+
+      it "errors when no newline between keys" do
+        TestUtils.intercept(Hocon::ConfigError) {
+          TestUtils.parse_config('{ a : y b : z }')
+        }
+      end
+
+      it "errors when no newline between quoted keys" do
+        TestUtils.intercept(Hocon::ConfigError) {
+          TestUtils.parse_config('{ "a" : "y" "b" : "z" }')
+        }
+      end
+    end
+  end
+
+  it "should support keys with slashes" do
+    obj = TestUtils.parse_config('/a/b/c=42, x/y/z : 32')
+    expect(obj.get_int("/a/b/c")).to eq(42)
+    expect(obj.get_int("x/y/z")).to eq(32)
+  end
+end
+
+def line_number_test(num, text)
+  it "should include the line number #{num} in the error message for invalid string '#{text}'" do
+    e = TestUtils.intercept(Hocon::ConfigError) {
+      TestUtils.parse_config(text)
+    }
+    if ! (e.message.include?("#{num}:"))
+      raise "error message did not contain line '#{num}' '#{text.gsub("\n", "\\n")}' (#{e})"
+    end
+  end
+end
+
+describe "Config Parser" do
+  context "line_numbers_in_errors" do
+    # error is at the last char
+    line_number_test(1, "}")
+    line_number_test(2, "\n}")
+    line_number_test(3, "\n\n}")
+
+    # error is before a final newline
+    line_number_test(1, "}\n")
+    line_number_test(2, "\n}\n")
+    line_number_test(3, "\n\n}\n")
+
+    # with unquoted string
+    line_number_test(1, "foo")
+    line_number_test(2, "\nfoo")
+    line_number_test(3, "\n\nfoo")
+
+    # with quoted string
+    line_number_test(1, "\"foo\"")
+    line_number_test(2, "\n\"foo\"")
+    line_number_test(3, "\n\n\"foo\"")
+
+    # newlines in triple-quoted string should not hose up the numbering
+    line_number_test(1, "a : \"\"\"foo\"\"\"}")
+    line_number_test(2, "a : \"\"\"foo\n\"\"\"}")
+    line_number_test(3, "a : \"\"\"foo\nbar\nbaz\"\"\"}")
+    #   newlines after the triple quoted string
+    line_number_test(5, "a : \"\"\"foo\nbar\nbaz\"\"\"\n\n}")
+    #   triple quoted string ends in a newline
+    line_number_test(6, "a : \"\"\"foo\nbar\nbaz\n\"\"\"\n\n}")
+    #   end in the middle of triple-quoted string
+    line_number_test(5, "a : \"\"\"foo\n\n\nbar\n")
+  end
+
+  context "to_string_for_parseables" do
+    # just to be sure the to_string don't throw, to get test coverage
+    options = Hocon::ConfigParseOptions.defaults
+    it "should allow to_s on File Parseable" do
+      Hocon::Impl::Parseable.new_file("foo", options).to_s
+    end
+
+    it "should allow to_s on Resources Parseable" do
+      Hocon::Impl::Parseable.new_resources("foo", options).to_s
+    end
+
+    it "should allow to_s on Resources Parseable" do
+      Hocon::Impl::Parseable.new_string("foo", options).to_s
+    end
+
+    # NOTE: Skipping 'newURL', 'newProperties', 'newReader' tests here
+    # because we don't implement them
+  end
+end
+
+def assert_comments(comments, conf)
+  it "should have comments #{comments} at root" do
+    expect(conf.root.origin.comments).to eq(comments)
+  end
+end
+
+def assert_comments_at_path(comments, conf, path)
+  it "should have comments #{comments} at path #{path}" do
+    expect(conf.get_value(path).origin.comments).to eq(comments)
+  end
+end
+
+def assert_comments_at_path_index(comments, conf, path, index)
+  it "should have comments #{comments} at path #{path} and index #{index}" do
+    expect(conf.get_list(path).get(index).origin.comments).to eq(comments)
+  end
+end
+
+describe "Config Parser" do
+  context "track_comments_for_single_field" do
+    # no comments
+    conf0 = TestUtils.parse_config('
+                {
+                foo=10 }
+                ')
+    assert_comments_at_path([], conf0, "foo")
+
+    # comment in front of a field is used
+    conf1 = TestUtils.parse_config('
+                { # Before
+                foo=10 }
+                ')
+    assert_comments_at_path([" Before"], conf1, "foo")
+
+    # comment with a blank line after is dropped
+    conf2 = TestUtils.parse_config('
+                { # BlankAfter
+
+                foo=10 }
+                ')
+    assert_comments_at_path([], conf2, "foo")
+
+    # comment in front of a field is used with no root {}
+    conf3 = TestUtils.parse_config('
+                # BeforeNoBraces
+                foo=10
+                ')
+    assert_comments_at_path([" BeforeNoBraces"], conf3, "foo")
+
+    # comment with a blank line after is dropped with no root {}
+    conf4 = TestUtils.parse_config('
+                # BlankAfterNoBraces
+
+                foo=10
+                ')
+    assert_comments_at_path([], conf4, "foo")
+
+    # comment same line after field is used
+    conf5 = TestUtils.parse_config('
+                {
+                foo=10 # SameLine
+                }
+                ')
+    assert_comments_at_path([" SameLine"], conf5, "foo")
+
+    # comment before field separator is used
+    conf6 = TestUtils.parse_config('
+                {
+                foo # BeforeSep
+                =10
+                }
+                ')
+    assert_comments_at_path([" BeforeSep"], conf6, "foo")
+
+    # comment after field separator is used
+    conf7 = TestUtils.parse_config('
+                {
+                foo= # AfterSep
+                10
+                }
+                ')
+    assert_comments_at_path([" AfterSep"], conf7, "foo")
+
+    # comment on next line is NOT used
+    conf8 = TestUtils.parse_config('
+                {
+                foo=10
+                # NextLine
+                }
+                ')
+    assert_comments_at_path([], conf8, "foo")
+
+    # comment before field separator on new line
+    conf9 = TestUtils.parse_config('
+                {
+                foo
+                # BeforeSepOwnLine
+                =10
+                }
+                ')
+    assert_comments_at_path([" BeforeSepOwnLine"], conf9, "foo")
+
+    # comment after field separator on its own line
+    conf10 = TestUtils.parse_config('
+                {
+                foo=
+                # AfterSepOwnLine
+                10
+                }
+                ')
+    assert_comments_at_path([" AfterSepOwnLine"], conf10, "foo")
+
+    # comments comments everywhere
+    conf11 = TestUtils.parse_config('
+                {# Before
+                foo
+                # BeforeSep
+                = # AfterSepSameLine
+                # AfterSepNextLine
+                10 # AfterValue
+                # AfterValueNewLine (should NOT be used)
+                }
+                ')
+    assert_comments_at_path([" Before", " BeforeSep", " AfterSepSameLine", " AfterSepNextLine", " AfterValue"], conf11, "foo")
+
+    # empty object
+    conf12 = TestUtils.parse_config('# BeforeEmpty
+                {} #AfterEmpty
+                # NewLine
+                ')
+    assert_comments([" BeforeEmpty", "AfterEmpty"], conf12)
+
+    # empty array
+    conf13 = TestUtils.parse_config('
+                foo=
+                # BeforeEmptyArray
+                  [] #AfterEmptyArray
+                # NewLine
+                ')
+    assert_comments_at_path([" BeforeEmptyArray", "AfterEmptyArray"], conf13, "foo")
+
+    # array element
+    conf14 = TestUtils.parse_config('
+                foo=[
+                # BeforeElement
+                10 # AfterElement
+                ]
+                ')
+    assert_comments_at_path_index(
+        [" BeforeElement", " AfterElement"], conf14, "foo", 0)
+
+    # field with comma after it
+    conf15 = TestUtils.parse_config('
+                foo=10, # AfterCommaField
+                ')
+    assert_comments_at_path([" AfterCommaField"], conf15, "foo")
+
+    # element with comma after it
+    conf16 = TestUtils.parse_config('
+                foo=[10, # AfterCommaElement
+                ]
+                ')
+    assert_comments_at_path_index([" AfterCommaElement"], conf16, "foo", 0)
+
+    # field with comma after it but comment isn't on the field's line, so not used
+    conf17 = TestUtils.parse_config('
+                foo=10
+                , # AfterCommaFieldNotUsed
+                ')
+    assert_comments_at_path([], conf17, "foo")
+
+    # element with comma after it but comment isn't on the field's line, so not used
+    conf18 = TestUtils.parse_config('
+                foo=[10
+                , # AfterCommaElementNotUsed
+                ]
+                ')
+    assert_comments_at_path_index([], conf18, "foo", 0)
+
+    # comment on new line, before comma, should not be used
+    conf19 = TestUtils.parse_config('
+                foo=10
+                # BeforeCommaFieldNotUsed
+                ,
+                ')
+    assert_comments_at_path([], conf19, "foo")
+
+    # comment on new line, before comma, should not be used
+    conf20 = TestUtils.parse_config('
+                foo=[10
+                # BeforeCommaElementNotUsed
+                ,
+                ]
+                ')
+    assert_comments_at_path_index([], conf20, "foo", 0)
+
+    # comment on same line before comma
+    conf21 = TestUtils.parse_config('
+                foo=10 # BeforeCommaFieldSameLine
+                ,
+                ')
+    assert_comments_at_path([" BeforeCommaFieldSameLine"], conf21, "foo")
+
+    # comment on same line before comma
+    conf22 = TestUtils.parse_config('
+                foo=[10 # BeforeCommaElementSameLine
+                ,
+                ]
+                ')
+    assert_comments_at_path_index([" BeforeCommaElementSameLine"], conf22, "foo", 0)
+  end
+
+  context "track_comments_for_multiple_fields" do
+    # nested objects
+    conf5 = TestUtils.parse_config('
+             # Outside
+             bar {
+                # Ignore me
+
+                # Middle
+                # two lines
+                baz {
+                    # Inner
+                    foo=10 # AfterInner
+                    # This should be ignored
+                } # AfterMiddle
+                # ignored
+             } # AfterOutside
+             # ignored!
+             ')
+    assert_comments_at_path([" Inner", " AfterInner"], conf5, "bar.baz.foo")
+    assert_comments_at_path([" Middle", " two lines", " AfterMiddle"], conf5, "bar.baz")
+    assert_comments_at_path([" Outside", " AfterOutside"], conf5, "bar")
+
+    # multiple fields
+    conf6 = TestUtils.parse_config('{
+                # this is not with a field
+
+                # this is field A
+                a : 10,
+                # this is field B
+                b : 12 # goes with field B which has no comma
+                # this is field C
+                c : 14, # goes with field C after comma
+                # not used
+                # this is not used
+                # nor is this
+                # multi-line block
+
+                # this is with field D
+                # this is with field D also
+                d : 16
+
+                # this is after the fields
+    }')
+    assert_comments_at_path([" this is field A"], conf6, "a")
+    assert_comments_at_path([" this is field B", " goes with field B which has no comma"], conf6, "b")
+    assert_comments_at_path([" this is field C", " goes with field C after comma"], conf6, "c")
+    assert_comments_at_path([" this is with field D", " this is with field D also"], conf6, "d")
+
+    # array
+    conf7 = TestUtils.parse_config('
+                # before entire array
+                array = [
+                # goes with 0
+                0,
+                # goes with 1
+                1, # with 1 after comma
+                # goes with 2
+                2 # no comma after 2
+                # not with anything
+                ] # after entire array
+                ')
+    assert_comments_at_path_index([" goes with 0"], conf7, "array", 0)
+    assert_comments_at_path_index([" goes with 1", " with 1 after comma"], conf7, "array", 1)
+    assert_comments_at_path_index([" goes with 2", " no comma after 2"], conf7, "array", 2)
+    assert_comments_at_path([" before entire array", " after entire array"], conf7, "array")
+
+    # properties-like syntax
+    conf8 = TestUtils.parse_config('
+                # ignored comment
+                
+                # x.y comment
+                x.y = 10
+                # x.z comment
+                x.z = 11
+                # x.a comment
+                x.a = 12
+                # a.b comment
+                a.b = 14
+                a.c = 15
+                a.d = 16 # a.d comment
+                # ignored comment
+                ')
+
+    assert_comments_at_path([" x.y comment"], conf8, "x.y")
+    assert_comments_at_path([" x.z comment"], conf8, "x.z")
+    assert_comments_at_path([" x.a comment"], conf8, "x.a")
+    assert_comments_at_path([" a.b comment"], conf8, "a.b")
+    assert_comments_at_path([], conf8, "a.c")
+    assert_comments_at_path([" a.d comment"], conf8, "a.d")
+    # here we're concerned that comments apply only to leaf
+    # nodes, not to parent objects.
+    assert_comments_at_path([], conf8, "x")
+    assert_comments_at_path([], conf8, "a")
+  end
+
+  context "loading unicode file paths" do
+    it "should be able to parse files with unicode file paths" do
+      expect(Hocon.load("#{FIXTURE_DIR}/test_utils/resources/ᚠᛇᚻ.conf")).to eq({'ᚠᛇᚻ' => '᛫ᛒᛦᚦ᛫ᚠᚱᚩᚠᚢ'})
+    end
+  end
+
+  it "includeFile" do
+    conf = Hocon::ConfigFactory.parse_string("include file(" +
+              TestUtils.json_quoted_resource_file("test01") + ")")
+
+    # should have loaded conf, json... skipping properties
+    expect(conf.get_int("ints.fortyTwo")).to eq(42)
+    expect(conf.get_int("fromJson1")).to eq(1)
+  end
+
+  it "includeFileWithExtension" do
+    conf = Hocon::ConfigFactory.parse_string("include file(" +
+              TestUtils.json_quoted_resource_file("test01.conf") + ")")
+
+    expect(conf.get_int("ints.fortyTwo")).to eq(42)
+    expect(conf.has_path?("fromJson1")).to eq(false)
+    expect(conf.has_path?("fromProps.abc")).to eq(false)
+  end
+
+  it "includeFileWhitespaceInsideParens" do
+    conf = Hocon::ConfigFactory.parse_string("include file(  \n  " +
+              TestUtils.json_quoted_resource_file("test01") + "  \n  )")
+
+    # should have loaded conf, json... NOT properties
+    expect(conf.get_int("ints.fortyTwo")).to eq(42)
+    expect(conf.get_int("fromJson1")).to eq(1)
+  end
+
+  it "includeFileNoWhitespaceOutsideParens" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigParseError) {
+      Hocon::ConfigFactory.parse_string("include file (" +
+        TestUtils.json_quoted_resource_file("test01") + ")")
+    }
+    expect(e.message.include?("expecting include parameter")).to eq(true)
+  end
+
+  it "includeFileNotQuoted" do
+    # this test cannot work on Windows
+    f = TestUtils.resource_file("test01")
+    if (f.to_s.include?("\\"))
+      $stderr.puts("includeFileNotQuoted test skipped on Windows")
+    else
+      e = TestUtils.intercept(Hocon::ConfigError::ConfigParseError) {
+        Hocon::ConfigFactory.parse_string("include file(" + f + ")")
+      }
+      expect(e.message.include?("expecting include parameter")).to eq(true)
+    end
+  end
+
+  it "includeFileNotQuotedAndSpecialChar" do
+    f = TestUtils.resource_file("test01")
+    if (f.to_s.include?("\\"))
+      $stderr.puts("includeFileNotQuoted test skipped on Windows")
+    else
+      e = TestUtils.intercept(Hocon::ConfigError::ConfigParseError) {
+        Hocon::ConfigFactory.parse_string("include file(:" + f + ")")
+      }
+      expect(e.message.include?("expecting a quoted string")).to eq(true)
+    end
+
+  end
+
+  it "includeFileUnclosedParens" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigParseError) {
+      Hocon::ConfigFactory.parse_string("include file(" + TestUtils.json_quoted_resource_file("test01") + " something")
+    }
+    expect(e.message.include?("expecting a close paren")).to eq(true)
+  end
+
+  # Skipping 'includeURLBasename' because we don't support URLs
+  # Skipping 'includeURLWithExtension' because we don't support URLs
+  # Skipping 'includeURLInvalid' because we don't support URLs
+  # Skipping 'includeResources' because we don't support classpath resources
+  # Skipping 'includeURLHeuristically' because we don't support URLs
+  # Skipping 'includeURLBasenameHeuristically' because we don't support URLs
+
+  it "acceptsUTF8FileContents" do
+    # utf8.conf is UTF-8 with no BOM
+    rune_utf8 = "\u16EB\u16D2\u16E6\u16A6\u16EB\u16A0\u16B1\u16A9\u16A0\u16A2"
+    conf = Hocon::ConfigFactory.parse_file(TestUtils.resource_file("utf8.conf"))
+    expect(conf.get_string("\u16A0\u16C7\u16BB")).to eq(rune_utf8)
+  end
+
+  it "shouldacceptUTF16FileContents" do
+    skip('supporting UTF-16 requires appropriate BOM detection during parsing') do
+      # utf16.conf is UTF-16LE with a BOM
+      expect { Hocon::ConfigFactory.parse_file(TestUtils.resource_file("utf16.conf")) }.to raise_error
+    end
+  end
+
+  it "acceptBOMStartingFile" do
+    # BOM at start of file should be ignored
+    conf = Hocon::ConfigFactory.parse_file(TestUtils.resource_file("bom.conf"))
+    expect(conf.get_string("foo")).to eq("bar")
+  end
+
+  it "acceptBOMInStringValue" do
+    # BOM inside quotes should be preserved, just as other whitespace would be
+    conf = Hocon::ConfigFactory.parse_string("foo=\"\uFEFF\uFEFF\"")
+    expect(conf.get_string("foo")).to eq("\uFEFF\uFEFF")
+  end
+
+  it "acceptBOMWhitespace" do
+    skip("BOM not parsing properly yet; not fixing this now because it most likely only affects windows") do
+      # BOM here should be treated like other whitespace (ignored, since no quotes)
+      conf = Hocon::ConfigFactory.parse_string("foo= \uFEFFbar\uFEFF")
+      expect(conf.get_string("foo")).to eq("bar")
+    end
+  end
+
+  it "acceptMultiPeriodNumericPath" do
+    conf1 = Hocon::ConfigFactory.parse_string("0.1.2.3=foobar1")
+    expect(conf1.get_string("0.1.2.3")).to eq("foobar1")
+    conf2 = Hocon::ConfigFactory.parse_string("0.1.2.3.ABC=foobar2")
+    expect(conf2.get_string("0.1.2.3.ABC")).to eq("foobar2")
+    conf3 = Hocon::ConfigFactory.parse_string("ABC.0.1.2.3=foobar3")
+    expect(conf3.get_string("ABC.0.1.2.3")).to eq("foobar3")
+  end
+end
diff --git a/spec/unit/typesafe/config/config_document_parser_spec.rb b/spec/unit/typesafe/config/config_document_parser_spec.rb
new file mode 100644
index 0000000..3ffd956
--- /dev/null
+++ b/spec/unit/typesafe/config/config_document_parser_spec.rb
@@ -0,0 +1,494 @@
+# encoding: utf-8
+
+require 'spec_helper'
+require 'hocon'
+require 'hocon/impl/config_document_parser'
+require 'test_utils'
+
+describe "ConfigDocumentParser" do
+  ConfigDocumentParser = Hocon::Impl::ConfigDocumentParser
+  ConfigParseOptions = Hocon::ConfigParseOptions
+  ConfigSyntax = Hocon::ConfigSyntax
+  shared_examples_for "parse test" do
+    it "should correctly render the parsed node" do
+      node = ConfigDocumentParser.parse(TestUtils.tokenize_from_s(orig_text), TestUtils.fake_origin, ConfigParseOptions.defaults)
+      expect(node.render).to eq(orig_text)
+    end
+  end
+
+  shared_examples_for "parse JSON failures test" do
+    it "should thrown an exception when parsing invalid JSON" do
+      e = TestUtils.intercept(Hocon::ConfigError) {
+        ConfigDocumentParser.parse(TestUtils.tokenize_from_s(orig_text), TestUtils.fake_origin, ConfigParseOptions.defaults.set_syntax(ConfigSyntax::JSON))
+      }
+      expect(e.message).to include(contains_message)
+    end
+  end
+
+  shared_examples_for "parse simple value test" do
+    it "should correctly parse and render the original text as CONF" do
+      expected_rendered_text = final_text.nil? ? orig_text : final_text
+      node = ConfigDocumentParser.parse_value(TestUtils.tokenize_from_s(orig_text), TestUtils.fake_origin, ConfigParseOptions.defaults)
+      expect(node.render).to eq(expected_rendered_text)
+      expect(node).to be_a(Hocon::Impl::ConfigNodeSimpleValue)
+    end
+
+    it "should correctly parse and render the original text as JSON" do
+      expected_rendered_text = final_text.nil? ? orig_text : final_text
+      nodeJSON = ConfigDocumentParser.parse_value(TestUtils.tokenize_from_s(orig_text), TestUtils.fake_origin, ConfigParseOptions.defaults.set_syntax(ConfigSyntax::JSON))
+      expect(nodeJSON.render).to eq(expected_rendered_text)
+      expect(nodeJSON).to be_a(Hocon::Impl::ConfigNodeSimpleValue)
+    end
+  end
+
+  shared_examples_for "parse complex value test" do
+    it "should correctly parse and render the original text as CONF" do
+      node = ConfigDocumentParser.parse_value(TestUtils.tokenize_from_s(orig_text), TestUtils.fake_origin, ConfigParseOptions.defaults)
+      expect(node.render).to eq(orig_text)
+      expect(node).to be_a(Hocon::Impl::ConfigNodeComplexValue)
+    end
+
+    it "should correctly parse and render the original text as JSON" do
+      nodeJSON = ConfigDocumentParser.parse_value(TestUtils.tokenize_from_s(orig_text), TestUtils.fake_origin, ConfigParseOptions.defaults.set_syntax(ConfigSyntax::JSON))
+      expect(nodeJSON.render).to eq(orig_text)
+      expect(nodeJSON).to be_a(Hocon::Impl::ConfigNodeComplexValue)
+    end
+  end
+
+  shared_examples_for "parse single value invalid JSON test" do
+    it "should correctly parse and render the original text as CONF" do
+      node = ConfigDocumentParser.parse_value(TestUtils.tokenize_from_s(orig_text), TestUtils.fake_origin, ConfigParseOptions.defaults)
+      expect(node.render).to eq(orig_text)
+    end
+
+    it "should throw an exception when parsing the original text as JSON" do
+      e = TestUtils.intercept(Hocon::ConfigError) {
+        ConfigDocumentParser.parse_value(TestUtils.tokenize_from_s(orig_text), TestUtils.fake_origin, ConfigParseOptions.defaults.set_syntax(ConfigSyntax::JSON))
+      }
+      expect(e.message).to include(contains_message)
+    end
+  end
+
+  shared_examples_for "parse leading trailing failure" do
+    it "should throw an exception when parsing an invalid single value" do
+      e = TestUtils.intercept(Hocon::ConfigError) {
+        ConfigDocumentParser.parse_value(TestUtils.tokenize_from_s(orig_text), TestUtils.fake_origin, ConfigParseOptions.defaults)
+      }
+      expect(e.message).to include("The value from setValue cannot have leading or trailing newlines, whitespace, or comments")
+    end
+  end
+
+  context "parse_success" do
+    context "simple map with no braces" do
+      let (:orig_text) { "foo:bar" }
+      include_examples "parse test"
+    end
+
+    context "simple map with no braces and whitespace" do
+      let (:orig_text) { " foo : bar " }
+      include_examples "parse test"
+    end
+
+    context "include with no braces" do
+      let (:orig_text) { 'include "foo.conf" ' }
+      include_examples "parse test"
+    end
+
+    context "simple map with no braces and newlines" do
+      let (:orig_text) { "   \nfoo:bar\n    " }
+      include_examples "parse test"
+    end
+
+    context "map with no braces and all simple types" do
+      let (:orig_text) { '
+        aUnquoted : bar
+        aString = "qux"
+        aNum:123
+        aDouble=123.456
+        aTrue=true
+        aFalse=false
+        aNull=null
+        aSub =  ${a.b}
+        include "foo.conf"
+        ' }
+      include_examples "parse test"
+    end
+
+    context "empty map" do
+      let (:orig_text) { "{}" }
+      include_examples "parse test"
+    end
+
+    context "simple map with braces" do
+      let (:orig_text) { "{foo:bar}" }
+      include_examples "parse test"
+    end
+
+    context "simple map with braces and whitespace" do
+      let (:orig_text) { "{  foo  :  bar  }" }
+      include_examples "parse test"
+    end
+
+    context "simple map with braces and trailing whitespace" do
+      let (:orig_text) { "{foo:bar}     " }
+      include_examples "parse test"
+    end
+
+    context "simple map with braces and include" do
+      let (:orig_text) { '{include "foo.conf"}' }
+      include_examples "parse test"
+    end
+
+    context "simple map with braces and leading/trailing newlines" do
+      let (:orig_text) { "\n{foo:bar}\n" }
+      include_examples "parse test"
+    end
+
+    context "map with braces and all simple types" do
+      let (:orig_text) { '{
+          aUnquoted : bar
+          aString = "qux"
+          aNum:123
+          aDouble=123.456
+          aTrue=true
+          aFalse=false
+          aNull=null
+          aSub =  ${a.b}
+          include "foo.conf"
+          }' }
+      include_examples "parse test"
+    end
+
+    context "maps can be nested within other maps" do
+      let(:orig_text) {
+        '
+          foo.bar.baz : {
+            qux : "abcdefg"
+            "abc".def."ghi" : 123
+            abc = { foo:bar }
+          }
+          qux = 123.456
+          '}
+      include_examples "parse test"
+    end
+
+    context "comments can be parsed in maps" do
+      let(:orig_text) {
+        '{
+          foo: bar
+          // This is a comment
+          baz:qux // This is another comment
+         }'}
+      include_examples "parse test"
+    end
+
+    context "empty array" do
+      let (:orig_text) { "[]" }
+      include_examples "parse test"
+    end
+
+    context "single-element array" do
+      let (:orig_text) { "[foo]" }
+      include_examples "parse test"
+    end
+
+    context "trailing comment" do
+      let (:orig_text) { "[foo,]" }
+      include_examples "parse test"
+    end
+
+    context "trailing comment and whitespace" do
+      let (:orig_text) { "[foo,]     " }
+      include_examples "parse test"
+    end
+
+    context "leading and trailing whitespace" do
+      let (:orig_text) { "   \n[]\n   " }
+      include_examples "parse test"
+    end
+
+    context "array with all simple types" do
+      let (:orig_text) { '[foo, bar,"qux", 123,123.456, true,false, null, ${a.b}]' }
+      include_examples "parse test"
+    end
+
+    context "array with all simple types and weird whitespace" do
+      let (:orig_text) { '[foo,   bar,"qux"    , 123 ,  123.456, true,false, null,   ${a.b}   ]' }
+      include_examples "parse test"
+    end
+
+    context "basic concatenation inside an array" do
+      let (:orig_text) { "[foo bar baz qux]" }
+      include_examples "parse test"
+    end
+
+    context "basic concatenation inside a map" do
+      let (:orig_text) { "{foo: foo bar baz qux}" }
+      include_examples "parse test"
+    end
+
+    context "complex concatenation in an array with multiple elements" do
+      let (:orig_text) { "[abc 123 123.456 null true false [1, 2, 3] {a:b}, 2]" }
+      include_examples "parse test"
+    end
+
+    context "complex node with all types" do
+      let (:orig_text) {
+        '{
+          foo: bar baz    qux    ernie
+          // The above was a concatenation
+
+          baz   =   [ abc 123, {a:12
+                                b: {
+                                  c: 13
+                                  d: {
+                                    a: 22
+                                    b: "abcdefg" # this is a comment
+                                    c: [1, 2, 3]
+                                  }
+                                }
+                                }, # this was an object in an array
+                                //The above value is a map containing a map containing a map, all in an array
+                                22,
+                                // The below value is an array contained in another array
+                                [1,2,3]]
+          // This is a map with some nested maps and arrays within it, as well as some concatenations
+          qux {
+            baz: abc 123
+            bar: {
+              baz: abcdefg
+              bar: {
+                a: null
+                b: true
+                c: [true false 123, null, [1, 2, 3]]
+              }
+            }
+          }
+        // Did I cover everything?
+        }'
+      }
+      include_examples "parse test"
+    end
+
+    context "can correctly parse a JSON string" do
+      it "should correctly parse and render a JSON string" do
+        orig_text =
+            '{
+              "foo": "bar",
+              "baz": 123,
+              "qux": true,
+              "array": [
+                {"a": true,
+                 "c": false},
+                12
+              ]
+           }
+        '
+        node = ConfigDocumentParser.parse(TestUtils.tokenize_from_s(orig_text), TestUtils.fake_origin, ConfigParseOptions.defaults.set_syntax(ConfigSyntax::JSON))
+        expect(node.render).to eq(orig_text)
+      end
+    end
+  end
+
+  context "parse JSON failures" do
+    context "JSON does not support concatenations" do
+      let (:orig_text) { '{ "foo": 123 456 789 } ' }
+      let (:contains_message) { "Expecting close brace } or a comma" }
+      include_examples "parse JSON failures test"
+    end
+
+    context "JSON must begin with { or [" do
+      let (:orig_text) { '"a": 123, "b": 456' }
+      let (:contains_message) { "Document must have an object or array at root" }
+      include_examples "parse JSON failures test"
+    end
+
+    context "JSON does not support unquoted text" do
+      let (:orig_text) { '{"foo": unquotedtext}' }
+      let (:contains_message) { "Token not allowed in valid JSON" }
+      include_examples "parse JSON failures test"
+    end
+
+    context "JSON does not support substitutions" do
+      let (:orig_text) { '{"foo": ${"a.b"}}' }
+      let (:contains_message) { "Substitutions (${} syntax) not allowed in JSON" }
+      include_examples "parse JSON failures test"
+    end
+
+    context "JSON does not support multi-element paths" do
+      let (:orig_text) { '{"foo"."bar": 123}' }
+      let (:contains_message) { "Token not allowed in valid JSON" }
+      include_examples "parse JSON failures test"
+    end
+
+    context "JSON does not support =" do
+      let (:orig_text) { '{"foo"=123}' }
+      let (:contains_message) { "Key '\"foo\"' may not be followed by token: '='" }
+      include_examples "parse JSON failures test"
+    end
+
+    context "JSON does not support +=" do
+      let (:orig_text) { '{"foo" += "bar"}' }
+      let (:contains_message) { "Key '\"foo\"' may not be followed by token: '+='" }
+      include_examples "parse JSON failures test"
+    end
+
+    context "JSON does not support duplicate keys" do
+      let (:orig_text) { '{"foo" : 123, "foo": 456}' }
+      let (:contains_message) { "JSON does not allow duplicate fields" }
+      include_examples "parse JSON failures test"
+    end
+
+    context "JSON does not support trailing commas" do
+      let (:orig_text) { '{"foo" : 123,}' }
+      let (:contains_message) { "expecting a field name after a comma, got a close brace } instead" }
+      include_examples "parse JSON failures test"
+    end
+
+    context "JSON does not support empty documents" do
+      let (:orig_text) { '' }
+      let (:contains_message) { "Empty document" }
+      include_examples "parse JSON failures test"
+    end
+  end
+
+  context "parse single values" do
+    let (:final_text) { nil }
+
+    context "parse a single integer" do
+      let (:orig_text) { "123" }
+      include_examples "parse simple value test"
+    end
+
+    context "parse a single double" do
+      let (:orig_text) { "123.456" }
+      include_examples "parse simple value test"
+    end
+
+    context "parse a single string" do
+      let (:orig_text) { '"a string"' }
+      include_examples "parse simple value test"
+    end
+
+    context "parse true" do
+      let (:orig_text) { "true" }
+      include_examples "parse simple value test"
+    end
+
+    context "parse false" do
+      let (:orig_text) { "false" }
+      include_examples "parse simple value test"
+    end
+
+    context "parse null" do
+      let (:orig_text) { "null" }
+      include_examples "parse simple value test"
+    end
+
+    context "parse a map" do
+      let (:orig_text) { '{"a": "b"}' }
+      include_examples "parse complex value test"
+    end
+
+    context "parse an array" do
+      let (:orig_text) { '{"a": "b"}' }
+      include_examples "parse complex value test"
+    end
+
+    it "should parse concatenations when using CONF syntax" do
+      orig_text = "123 456 \"abc\""
+      node = ConfigDocumentParser.parse_value(TestUtils.tokenize_from_s(orig_text), TestUtils.fake_origin, ConfigParseOptions.defaults)
+      expect(node.render).to eq(orig_text)
+    end
+
+    it "should parse keys with no separators and object values with CONF parsing" do
+      orig_text = '{"foo" { "bar" : 12 } }'
+      node = ConfigDocumentParser.parse_value(TestUtils.tokenize_from_s(orig_text), TestUtils.fake_origin, ConfigParseOptions.defaults)
+      expect(node.render).to eq(orig_text)
+    end
+  end
+
+  context "parse single values failures" do
+    context "throws on leading whitespace" do
+      let (:orig_text) { "   123" }
+      include_examples "parse leading trailing failure"
+    end
+
+    context "throws on trailing whitespace" do
+      let (:orig_text) { "123   " }
+      include_examples "parse leading trailing failure"
+    end
+
+    context "throws on leading and trailing whitespace" do
+      let (:orig_text) { " 123 " }
+      include_examples "parse leading trailing failure"
+    end
+
+    context "throws on leading newline" do
+      let (:orig_text) { "\n123" }
+      include_examples "parse leading trailing failure"
+    end
+
+    context "throws on trailing newline" do
+      let (:orig_text) { "123\n" }
+      include_examples "parse leading trailing failure"
+    end
+
+    context "throws on leading and trailing newline" do
+      let (:orig_text) { "\n123\n" }
+      include_examples "parse leading trailing failure"
+    end
+
+    context "throws on leading and trailing comments" do
+      let (:orig_text) { "#thisisacomment\n123#comment" }
+      include_examples "parse leading trailing failure"
+    end
+
+    context "throws on whitespace after a concatenation" do
+      let (:orig_text) { "123 456 789   " }
+      include_examples "parse leading trailing failure"
+    end
+
+    context "throws on unquoted text in JSON" do
+      let (:orig_text) { "unquotedtext" }
+      let (:contains_message) { "Token not allowed in valid JSON" }
+      include_examples("parse single value invalid JSON test")
+    end
+
+    context "throws on substitutions in JSON" do
+      let (:orig_text) { "${a.b}" }
+      let (:contains_message) { "Substitutions (${} syntax) not allowed in JSON" }
+      include_examples("parse single value invalid JSON test")
+    end
+
+    it "should throw an error when parsing concatenations in JSON" do
+      orig_text = "123 456 \"abc\""
+      e = TestUtils.intercept(Hocon::ConfigError) {
+        ConfigDocumentParser.parse_value(TestUtils.tokenize_from_s(orig_text), TestUtils.fake_origin, ConfigParseOptions.defaults.set_syntax(ConfigSyntax::JSON))
+      }
+      expect(e.message).to include("Parsing JSON and the value set in setValue was either a concatenation or had trailing whitespace, newlines, or comments")
+    end
+
+    it "should throw an error when parsing keys with no separators in JSON" do
+      orig_text = '{"foo" { "bar" : 12 } }'
+      e = TestUtils.intercept(Hocon::ConfigError) {
+        ConfigDocumentParser.parse_value(TestUtils.tokenize_from_s(orig_text), TestUtils.fake_origin, ConfigParseOptions.defaults.set_syntax(ConfigSyntax::JSON))
+      }
+      expect(e.message).to include("Key '\"foo\"' may not be followed by token: '{'")
+    end
+  end
+
+  context "parse empty document" do
+    it "should parse an empty document with CONF syntax" do
+      node = ConfigDocumentParser.parse(TestUtils.tokenize_from_s(""), TestUtils.fake_origin, ConfigParseOptions.defaults)
+      expect(node.value).to be_a(Hocon::Impl::ConfigNodeObject)
+      expect(node.value.children.empty?).to be_truthy
+    end
+
+    it "should parse a document with only comments and whitespace with CONF syntax" do
+      node = ConfigDocumentParser.parse(TestUtils.tokenize_from_s("#comment\n#comment\n\n"), TestUtils.fake_origin, ConfigParseOptions.defaults)
+      expect(node.value).to be_a(Hocon::Impl::ConfigNodeObject)
+    end
+
+  end
+end
\ No newline at end of file
diff --git a/spec/unit/typesafe/config/config_document_spec.rb b/spec/unit/typesafe/config/config_document_spec.rb
new file mode 100644
index 0000000..a4c42cc
--- /dev/null
+++ b/spec/unit/typesafe/config/config_document_spec.rb
@@ -0,0 +1,576 @@
+# encoding: utf-8
+
+require 'spec_helper'
+require 'hocon'
+require 'hocon/parser/config_document_factory'
+require 'hocon/config_value_factory'
+require 'test_utils'
+
+describe "ConfigDocument" do
+  ConfigDocumentFactory = Hocon::Parser::ConfigDocumentFactory
+  ConfigParseOptions = Hocon::ConfigParseOptions
+  ConfigSyntax = Hocon::ConfigSyntax
+  SimpleConfigDocument = Hocon::Impl::SimpleConfigDocument
+  ConfigValueFactory = Hocon::ConfigValueFactory
+
+  shared_examples_for "config document replace JSON test" do
+    let (:config_document) { ConfigDocumentFactory.parse_string(orig_text, ConfigParseOptions.defaults.set_syntax(ConfigSyntax::JSON)) }
+    it "should correctly render the parsed JSON document" do
+      expect(config_document.render).to eq(orig_text)
+    end
+
+    it "should perform a successful replacement on the parsed JSON document" do
+      new_document = config_document.set_value(replace_path, new_value)
+      #expect(new_document).to be_a(SimpleConfigDocument)
+      expect(new_document.render).to eq(final_text)
+    end
+  end
+
+  shared_examples_for "config document replace CONF test" do
+    let (:config_document) { ConfigDocumentFactory.parse_string(orig_text) }
+    it "should correctly render the parsed CONF document" do
+      expect(config_document.render).to eq(orig_text)
+    end
+
+    it "should perform a successful replacement on the parsed CONF document" do
+      new_document = config_document.set_value(replace_path, new_value)
+      #expect(new_document).to be_a(SimpleConfigDocument)
+      expect(new_document.render).to eq(final_text)
+    end
+  end
+
+  context "ConfigDocument replace" do
+    let (:orig_text) {
+      '{
+              "a":123,
+              "b": 123.456,
+              "c": true,
+              "d": false,
+              "e": null,
+              "f": "a string",
+              "g": [1,2,3,4,5],
+              "h": {
+                "a": 123,
+                "b": {
+                  "a": 12
+                },
+                "c": [1, 2, 3, {"a": "b"}, [1,2,3]]
+              }
+             }'
+    }
+    context "parsing/replacement with a very simple map" do
+      let(:orig_text) { '{"a":1}' }
+      let(:final_text) { '{"a":2}' }
+      let (:new_value) { "2" }
+      let (:replace_path) { "a" }
+      include_examples "config document replace CONF test"
+      include_examples "config document replace JSON test"
+    end
+
+    context "parsing/replacement with a map without surrounding braces" do
+      let (:orig_text) { "a: b\nc = d" }
+      let (:final_text) { "a: b\nc = 12" }
+      let (:new_value) { "12" }
+      let (:replace_path) { "c" }
+      include_examples "config document replace CONF test"
+    end
+
+    context "parsing/replacement with a complicated map" do
+      let (:final_text) {
+        '{
+              "a":123,
+              "b": 123.456,
+              "c": true,
+              "d": false,
+              "e": null,
+              "f": "a string",
+              "g": [1,2,3,4,5],
+              "h": {
+                "a": 123,
+                "b": {
+                  "a": "i am now a string"
+                },
+                "c": [1, 2, 3, {"a": "b"}, [1,2,3]]
+              }
+             }'
+      }
+      let (:new_value) { '"i am now a string"' }
+      let (:replace_path) { "h.b.a" }
+      include_examples "config document replace CONF test"
+      include_examples "config document replace JSON test"
+    end
+
+    context "replacing values with maps" do
+      let (:final_text) {
+        '{
+              "a":123,
+              "b": 123.456,
+              "c": true,
+              "d": false,
+              "e": null,
+              "f": "a string",
+              "g": [1,2,3,4,5],
+              "h": {
+                "a": 123,
+                "b": {
+                  "a": {"a":"b", "c":"d"}
+                },
+                "c": [1, 2, 3, {"a": "b"}, [1,2,3]]
+              }
+             }' }
+      let (:new_value) { '{"a":"b", "c":"d"}' }
+      let (:replace_path) { "h.b.a" }
+      include_examples "config document replace CONF test"
+      include_examples "config document replace JSON test"
+    end
+
+    context "replacing values with arrays" do
+      let (:final_text) {
+        '{
+              "a":123,
+              "b": 123.456,
+              "c": true,
+              "d": false,
+              "e": null,
+              "f": "a string",
+              "g": [1,2,3,4,5],
+              "h": {
+                "a": 123,
+                "b": {
+                  "a": [1,2,3,4,5]
+                },
+                "c": [1, 2, 3, {"a": "b"}, [1,2,3]]
+              }
+             }' }
+      let (:new_value) { "[1,2,3,4,5]" }
+      let (:replace_path) { "h.b.a" }
+      include_examples "config document replace CONF test"
+      include_examples "config document replace JSON test"
+    end
+
+    context "replacing values with concatenations" do
+      let (:final_text) {
+        '{
+              "a":123,
+              "b": 123.456,
+              "c": true,
+              "d": false,
+              "e": null,
+              "f": "a string",
+              "g": [1,2,3,4,5],
+              "h": {
+                "a": 123,
+                "b": {
+                  "a": this is a concatenation 123 456 {a:b} [1,2,3] {a: this is another 123 concatenation null true}
+                },
+                "c": [1, 2, 3, {"a": "b"}, [1,2,3]]
+              }
+             }' }
+      let (:new_value) { "this is a concatenation 123 456 {a:b} [1,2,3] {a: this is another 123 concatenation null true}" }
+      let (:replace_path) { "h.b.a" }
+      include_examples "config document replace CONF test"
+    end
+  end
+
+  context "config document multi element duplicates removed" do
+    it "should remove all duplicates when setting a value" do
+      orig_text = "{a: b, a.b.c: d, a: e}"
+      config_doc = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_doc.set_value("a", "2").render).to eq("{a: 2}")
+    end
+
+    it "should keep a trailing comma if succeeding elements were removed in CONF" do
+      orig_text = "{a: b, a: e, a.b.c: d}"
+      config_doc = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_doc.set_value("a", "2").render).to eq("{a: 2, }")
+    end
+
+    it "should add the setting if only a multi-element duplicate exists" do
+      orig_text = "{a.b.c: d}"
+      config_doc = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_doc.set_value("a", "2").render).to eq("{ a: 2}")
+    end
+  end
+
+  context "config document set new value brace root" do
+    let (:orig_text) { "{\n\t\"a\":\"b\",\n\t\"c\":\"d\"\n}" }
+    let (:new_value) { "\"f\"" }
+    let (:replace_path) { "\"e\"" }
+
+    context "set a new value in CONF" do
+      let (:final_text) { "{\n\t\"a\":\"b\",\n\t\"c\":\"d\"\n\t\"e\": \"f\"\n}" }
+      include_examples "config document replace CONF test"
+    end
+
+    context "set a new value in JSON" do
+      let (:final_text) { "{\n\t\"a\":\"b\",\n\t\"c\":\"d\",\n\t\"e\": \"f\"\n}" }
+      include_examples "config document replace JSON test"
+    end
+  end
+
+  context "config document set new value no braces" do
+    let (:orig_text) { "\"a\":\"b\",\n\"c\":\"d\"\n" }
+    let (:final_text) { "\"a\":\"b\",\n\"c\":\"d\"\n\"e\": \"f\"\n" }
+    let (:new_value) { "\"f\"" }
+    let (:replace_path) { "\"e\"" }
+
+    include_examples "config document replace CONF test"
+  end
+
+  context "config document set new value multi level CONF" do
+    let (:orig_text) { "a:b\nc:d" }
+    let (:final_text) { "a:b\nc:d\ne: {\n  f: {\n    g: 12\n  }\n}" }
+    let (:new_value) { "12" }
+    let (:replace_path) { "e.f.g" }
+
+    include_examples "config document replace CONF test"
+  end
+
+  context "config document set new value multi level JSON" do
+    let (:orig_text) { "{\"a\":\"b\",\n\"c\":\"d\"}" }
+    let (:final_text) { "{\"a\":\"b\",\n\"c\":\"d\",\n  \"e\": {\n    \"f\": {\n      \"g\": 12\n    }\n  }}" }
+    let (:new_value) { "12" }
+    let (:replace_path) { "e.f.g" }
+
+    include_examples "config document replace JSON test"
+  end
+
+  context "config document set new config value" do
+    let (:orig_text) { "{\"a\": \"b\"}" }
+    let (:final_text) { "{\"a\": 12}" }
+    let (:config_doc_hocon) { ConfigDocumentFactory.parse_string(orig_text) }
+    let (:config_doc_json) { ConfigDocumentFactory.parse_string(orig_text, ConfigParseOptions.defaults.set_syntax(ConfigSyntax::JSON)) }
+    let (:new_value) { ConfigValueFactory.from_any_ref(12) }
+
+    it "should successfuly render the original text from both documents" do
+      expect(config_doc_hocon.render).to eq(orig_text)
+      expect(config_doc_json.render).to eq(orig_text)
+    end
+
+    it "should succesfully set a new value on both documents" do
+      expect(config_doc_hocon.set_config_value("a", new_value).render).to eq(final_text)
+      expect(config_doc_json.set_config_value("a", new_value).render).to eq(final_text)
+    end
+  end
+
+  context "config document has value" do
+    let (:orig_text) { "{a: b, a.b.c.d: e, c: {a: {b: c}}}" }
+    let (:config_doc) { ConfigDocumentFactory.parse_string(orig_text) }
+
+    it "should return true on paths that exist in the document" do
+      expect(config_doc.has_value?("a")).to be_truthy
+      expect(config_doc.has_value?("a.b.c")).to be_truthy
+      expect(config_doc.has_value?("c.a.b")).to be_truthy
+    end
+
+    it "should return false on paths that don't exist in the document" do
+      expect(config_doc.has_value?("c.a.b.c")).to be_falsey
+      expect(config_doc.has_value?("a.b.c.d.e")).to be_falsey
+      expect(config_doc.has_value?("this.does.not.exist")).to be_falsey
+    end
+  end
+
+  context "config document remove value" do
+    let (:orig_text) { "{a: b, a.b.c.d: e, c: {a: {b: c}}}" }
+    let (:config_doc) { ConfigDocumentFactory.parse_string(orig_text) }
+
+    it "should remove a top-level setting with a simple value" do
+      expect(config_doc.remove_value("a").render).to eq("{c: {a: {b: c}}}")
+    end
+
+    it "should remove a top-level setting with a complex value" do
+      expect(config_doc.remove_value("c").render).to eq("{a: b, a.b.c.d: e, }")
+    end
+
+    it "should do nothing if the setting does not exist" do
+      expect(config_doc.remove_value("this.does.not.exist")).to eq(config_doc)
+    end
+  end
+
+  context "config document remove value JSON" do
+    it "should not leave a trailing comma when removing a value in JSON" do
+      orig_text = '{"a": "b", "c": "d"}'
+      config_doc = ConfigDocumentFactory.parse_string(orig_text, ConfigParseOptions.defaults.set_syntax(ConfigSyntax::JSON))
+      expect(config_doc.remove_value("c").render).to eq('{"a": "b" }')
+    end
+  end
+
+  context "config document remove multiple" do
+    it "should remove duplicate nested keys" do
+      orig_text = "a { b: 42 }, a.b = 43, a { b: { c: 44 } }"
+      config_doc = ConfigDocumentFactory.parse_string(orig_text)
+      removed = config_doc.remove_value("a.b")
+      expect(removed.render).to eq("a { }, a { }")
+    end
+  end
+
+  context "config document remove overridden" do
+    it "should remove all instances of keys even if overridden by a top-level key/value pair" do
+      orig_text = "a { b: 42 }, a.b = 43, a { b: { c: 44 } }, a: 57 "
+      config_doc = ConfigDocumentFactory.parse_string(orig_text)
+      removed = config_doc.remove_value("a.b")
+      expect(removed.render).to eq("a { }, a { }, a: 57 ")
+    end
+  end
+
+  context "config document remove nested" do
+    it "should remove nested keys if specified" do
+      orig_text = "a { b: 42 }, a.b = 43, a { b: { c: 44 } }"
+      config_doc = ConfigDocumentFactory.parse_string(orig_text)
+      removed = config_doc.remove_value("a.b.c")
+      expect(removed.render).to eq("a { b: 42 }, a.b = 43, a { b: { } }")
+    end
+  end
+
+  context "config document array failures" do
+    let (:orig_text) { "[1, 2, 3, 4, 5]" }
+    let (:document) { ConfigDocumentFactory.parse_string(orig_text) }
+
+    it "should throw when set_value is called and there is an array at the root" do
+      e = TestUtils.intercept(Hocon::ConfigError) { document.set_value("a", "1") }
+      expect(e.message).to include("ConfigDocument had an array at the root level")
+    end
+
+    it "should throw when has_value is called and there is an array at the root" do
+      e = TestUtils.intercept(Hocon::ConfigError) { document.has_value?("a") }
+      expect(e.message).to include("ConfigDocument had an array at the root level")
+    end
+
+    it "should throw when remove_value is called and there is an array at the root" do
+      e = TestUtils.intercept(Hocon::ConfigError) { document.remove_value("a") }
+      expect(e.message).to include("ConfigDocument had an array at the root level")
+    end
+  end
+
+  context "config document JSON replace failure" do
+    it "should fail when trying to replace with a value using HOCON syntax in JSON" do
+      orig_text = "{\"foo\": \"bar\", \"baz\": \"qux\"}"
+      document = ConfigDocumentFactory.parse_string(orig_text, ConfigParseOptions.defaults.set_syntax(ConfigSyntax::JSON))
+
+      e = TestUtils.intercept(Hocon::ConfigError) { document.set_value("foo", "unquoted") }
+      expect(e.message).to include("Token not allowed in valid JSON")
+    end
+  end
+
+  context "config document JSON replace with concatenation failure" do
+    it "should fail when trying to add a concatenation into a JSON document" do
+      orig_text = "{\"foo\": \"bar\", \"baz\": \"qux\"}"
+      document = ConfigDocumentFactory.parse_string(orig_text, ConfigParseOptions.defaults.set_syntax(ConfigSyntax::JSON))
+
+      e = TestUtils.intercept(Hocon::ConfigError) { document.set_value("foo", "1 2 3 concatenation") }
+      expect(e.message).to include("Parsing JSON and the value set in setValue was either a concatenation or had trailing whitespace, newlines, or comments")
+    end
+  end
+
+  context "config document file parse" do
+    let (:config_document) { ConfigDocumentFactory.parse_file(TestUtils.resource_file("test01.conf")) }
+    let (:file_text) {
+      file = File.open(TestUtils.resource_file("test01.conf"), "rb")
+      contents = file.read
+      file.close
+      contents
+    }
+
+    it "should correctly parse from a file" do
+      expect(config_document.render).to eq(file_text)
+    end
+  end
+
+  # skipping reader parsing, since we don't support that in ruby hocon
+
+  context "config document indentation single line object" do
+    it "should properly indent a value in a single-line map" do
+      orig_text = "a { b: c }"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("a.d", "e").render).to eq("a { b: c, d: e }")
+    end
+
+    it "should properly indent a value in the top-level when it is on a single line" do
+      orig_text = "a { b: c }, d: e"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("f", "g").render).to eq("a { b: c }, d: e, f: g")
+    end
+
+    it "should not preserve trailing commas" do
+      orig_text = "a { b: c }, d: e,"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("f", "g").render).to eq("a { b: c }, d: e, f: g")
+    end
+
+    it "should add necessary keys along the path to the value and properly space them" do
+      orig_text = "a { b: c }, d: e,"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("f.g.h", "i").render).to eq("a { b: c }, d: e, f: { g: { h: i } }")
+    end
+
+    it "should properly indent keys added to the top-level with curly braces" do
+      orig_text = "{a { b: c }, d: e}"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("f", "g").render).to eq("{a { b: c }, d: e, f: g}")
+    end
+
+    it "should add necessary keys along the path to the value and properly space them when the root has braces" do
+      orig_text = "{a { b: c }, d: e}"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("f.g.h", "i").render).to eq("{a { b: c }, d: e, f: { g: { h: i } }}")
+    end
+  end
+
+  context "config document indentation multi line object" do
+    context "document with no trailing newlines" do
+      let (:orig_text) { "a {\n  b: c\n}" }
+      let (:config_document) { ConfigDocumentFactory.parse_string(orig_text) }
+
+      it "should properly indent a value in a multi-line map" do
+        expect(config_document.set_value("a.e", "f").render).to eq("a {\n  b: c\n  e: f\n}")
+      end
+
+      it "should properly add/indent any necessary objects along the way to the value" do
+        expect(config_document.set_value("a.d.e.f", "g").render).to eq("a {\n  b: c\n  d: {\n    e: {\n      f: g\n    }\n  }\n}")
+      end
+    end
+
+    context "document with multi-line root" do
+      let (:orig_text) { "a {\n b: c\n}\n" }
+      let (:config_document) { ConfigDocumentFactory.parse_string(orig_text) }
+
+      it "should properly indent a value at the root with multiple lines" do
+        expect(config_document.set_value("d", "e").render).to eq("a {\n b: c\n}\nd: e\n")
+      end
+
+      it "should properly add/indent any necessary objects along the way to the value" do
+        expect(config_document.set_value("d.e.f", "g").render).to eq("a {\n b: c\n}\nd: {\n  e: {\n    f: g\n  }\n}\n")
+      end
+    end
+  end
+
+  context "config document indentation nested" do
+    it "should properly space a new key/value pair in a nested map in a single-line document" do
+      orig_text = "a { b { c { d: e } } }"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("a.b.c.f", "g").render).to eq("a { b { c { d: e, f: g } } }")
+    end
+
+    it "should properly space a new key/value pair in a nested map in a multi-line document" do
+      orig_text = "a {\n  b {\n    c {\n      d: e\n    }\n  }\n}"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("a.b.c.f", "g").render).to eq("a {\n  b {\n    c {\n      d: e\n      f: g\n    }\n  }\n}")
+    end
+  end
+
+  context "config document indentation empty object" do
+    it "should properly space a new key/value pair in a single-line empty object" do
+      orig_text = "a { }"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("a.b", "c").render).to eq("a { b: c }")
+    end
+
+    it "should properly indent a new key/value pair in a multi-line empty object" do
+      orig_text = "a {\n  b {\n  }\n}"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("a.b.c", "d").render).to eq("a {\n  b {\n    c: d\n  }\n}")
+    end
+  end
+
+  context "config document indentation multi line value" do
+    let (:orig_text) { "a {\n  b {\n    c {\n      d: e\n    }\n  }\n}" }
+    let (:config_document) { ConfigDocumentFactory.parse_string(orig_text) }
+
+    it "should successfully insert and indent a multi-line object" do
+      expect(config_document.set_value("a.b.c.f", "{\n  g: h\n  i: j\n  k: {\n    l: m\n  }\n}").render
+            ).to eq("a {\n  b {\n    c {\n      d: e\n      f: {\n        g: h\n        i: j\n        k: {\n          l: m\n        }\n      }\n    }\n  }\n}")
+    end
+
+    it "should successfully insert a concatenation with a multi-line array" do
+      expect(config_document.set_value("a.b.c.f", "12 13 [1,\n2,\n3,\n{\n  a:b\n}]").render
+            ).to eq("a {\n  b {\n    c {\n      d: e\n      f: 12 13 [1,\n      2,\n      3,\n      {\n        a:b\n      }]\n    }\n  }\n}")
+    end
+  end
+
+  context "config document indentation multi line value single line object" do
+    it "should get weird indentation when adding a multi-line value to a single-line object" do
+      orig_text = "a { b { } }"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("a.b.c", "{\n  c:d\n}").render).to eq("a { b { c: {\n   c:d\n } } }")
+    end
+  end
+
+  context "config document indentation single line object containing multi line value" do
+    it "should treat an object with no new-lines outside of its values as a single-line object" do
+      orig_text = "a { b {\n  c: d\n} }"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("a.e", "f").render).to eq("a { b {\n  c: d\n}, e: f }")
+    end
+  end
+
+  context "config document indentation replacing with multi line value" do
+    it "should properly indent a multi-line value when replacing a single-line value" do
+      orig_text = "a {\n  b {\n    c: 22\n  }\n}"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("a.b.c", "{\n  d:e\n}").render).to eq("a {\n  b {\n    c: {\n      d:e\n    }\n  }\n}")
+    end
+
+    it "should properly indent a multi-line value when replacing a single-line value in an object with multiple keys" do
+      orig_text = "a {\n  b {\n                f: 10\n    c: 22\n  }\n}"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("a.b.c", "{\n  d:e\n}").render).to eq("a {\n  b {\n                f: 10\n    c: {\n      d:e\n    }\n  }\n}")
+    end
+  end
+
+  context "config document indentation value with include" do
+    it "should indent an include node" do
+      orig_text = "a {\n  b {\n    c: 22\n  }\n}"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("a.b.d", "{\n  include \"foo\"\n  e:f\n}").render
+            ).to eq("a {\n  b {\n    c: 22\n    d: {\n      include \"foo\"\n      e:f\n    }\n  }\n}")
+    end
+  end
+
+  context "config document indentation based on include node" do
+    it "should indent properly when only an include node is present in the object in which the value is inserted" do
+      orig_text = "a: b\n      include \"foo\"\n"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("c", "d").render).to eq("a: b\n      include \"foo\"\n      c: d\n")
+    end
+  end
+
+  context "insertion into an empty document" do
+    it "should successfully insert a value into an empty document" do
+      orig_text = ""
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("a", "1").render).to eq("a: 1")
+    end
+
+    it "should successfully insert a multi-line object into an empty document" do
+      orig_text = ""
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      expect(config_document.set_value("a.b", "1").render).to eq("a: {\n  b: 1\n}")
+    end
+
+    it "should successfully insert a hash into an empty document" do
+      orig_text = ""
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      map_val = ConfigValueFactory.from_any_ref({"a" => 1, "b" => 2})
+
+      expect(config_document.set_config_value("a", map_val).render).to eq("a: {\n    \"a\": 1,\n    \"b\": 2\n}")
+    end
+
+    it "should successfully insert an array into an empty document" do
+      orig_text = ""
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+      array_val = ConfigValueFactory.from_any_ref([1,2])
+
+      expect(config_document.set_config_value("a", array_val).render).to eq("a: [\n    1,\n    2\n]")
+    end
+  end
+
+  context "can insert a map parsed with ConfigValueFactory" do
+    it "should successfully insert a map into a document" do
+      orig_text = "{ a: b }"
+      config_document = ConfigDocumentFactory.parse_string(orig_text)
+
+      map = ConfigValueFactory.from_any_ref({"a" => 1, "b" => 2})
+      expect(config_document.set_config_value("a", map).render).to eq("{ a: {\n     \"a\": 1,\n     \"b\": 2\n } }")
+    end
+  end
+end
diff --git a/spec/unit/typesafe/config/config_factory_spec.rb b/spec/unit/typesafe/config/config_factory_spec.rb
new file mode 100644
index 0000000..d3258c5
--- /dev/null
+++ b/spec/unit/typesafe/config/config_factory_spec.rb
@@ -0,0 +1,120 @@
+# encoding: utf-8
+
+require 'spec_helper'
+require 'hocon/config_factory'
+require 'hocon/config_render_options'
+require 'hocon/config_error'
+
+def get_comment_config_hash(config_string)
+  split_config_string = config_string.split("\n")
+  r = Regexp.new('^\s*#')
+
+  previous_string_comment = false
+  hash = {}
+  comment_list = []
+
+  split_config_string.each do |s|
+    if r.match(s)
+      comment_list << s
+      previous_string_comment = true
+    else
+      if previous_string_comment
+        hash[s] = comment_list
+        comment_list = []
+      end
+      previous_string_comment = false
+    end
+  end
+  return hash
+end
+
+describe Hocon::ConfigFactory do
+  let(:render_options) { Hocon::ConfigRenderOptions.defaults }
+
+  before do
+    render_options.origin_comments = false
+    render_options.json = false
+  end
+
+  shared_examples_for "config_factory_parsing" do
+    let(:input_file)  { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/input#{extension}" }
+    let(:output_file) { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/output.conf" }
+    let(:expected)    { example[:hash] }
+    let(:reparsed)    { Hocon::ConfigFactory.parse_file("#{output_file}") }
+    let(:output)      { File.read("#{output_file}") }
+
+    it "should make the config data available as a map" do
+      expect(conf.root.unwrapped).to eq(expected)
+    end
+
+    it "should render the config data to a string with comments intact" do
+      rendered_conf = conf.root.render(render_options)
+      rendered_conf_comment_hash = get_comment_config_hash(rendered_conf)
+      output_comment_hash = get_comment_config_hash(output)
+
+      expect(rendered_conf_comment_hash).to eq(output_comment_hash)
+    end
+
+    it "should generate the same conf data via re-parsing the rendered output" do
+      expect(reparsed.root.unwrapped).to eq(expected)
+    end
+  end
+
+  context "example1" do
+    let(:example) { EXAMPLE1 }
+    let (:extension) { ".conf" }
+
+    context "parsing a HOCON string" do
+      let(:string) { File.open(input_file).read }
+      let(:conf) { Hocon::ConfigFactory.parse_string(string) }
+      include_examples "config_factory_parsing"
+    end
+
+    context "parsing a .conf file" do
+      let(:conf) { Hocon::ConfigFactory.parse_file(input_file) }
+      include_examples "config_factory_parsing"
+    end
+  end
+
+  context "example2" do
+    let(:example) { EXAMPLE2 }
+    let (:extension) { ".conf" }
+
+    context "parsing a HOCON string" do
+      let(:string) { File.open(input_file).read }
+      let(:conf) { Hocon::ConfigFactory.parse_string(string) }
+      include_examples "config_factory_parsing"
+    end
+
+    context "parsing a .conf file" do
+      let(:conf) { Hocon::ConfigFactory.parse_file(input_file) }
+      include_examples "config_factory_parsing"
+    end
+  end
+
+  context "example3" do
+    let (:example) { EXAMPLE3 }
+    let (:extension) { ".conf" }
+
+    context "loading a HOCON file with substitutions" do
+      let(:conf) { Hocon::ConfigFactory.load_file(input_file) }
+      include_examples "config_factory_parsing"
+    end
+  end
+
+  context "example4" do
+    let(:example) { EXAMPLE4 }
+    let (:extension) { ".json" }
+
+    context "parsing a .json file" do
+      let (:conf) { Hocon::ConfigFactory.parse_file(input_file) }
+      include_examples "config_factory_parsing"
+    end
+  end
+
+  context "example5" do
+    it "should raise a ConfigParseError when given an invalid .conf file" do
+      expect{Hocon::ConfigFactory.parse_string("abcdefg")}.to raise_error(Hocon::ConfigError::ConfigParseError)
+    end
+  end
+end
diff --git a/spec/unit/typesafe/config/config_node_spec.rb b/spec/unit/typesafe/config/config_node_spec.rb
new file mode 100644
index 0000000..12b7341
--- /dev/null
+++ b/spec/unit/typesafe/config/config_node_spec.rb
@@ -0,0 +1,552 @@
+# encoding: utf-8
+
+require 'spec_helper'
+require 'hocon'
+require 'test_utils'
+
+describe Hocon::Parser::ConfigNode do
+  Tokens = Hocon::Impl::Tokens
+
+  shared_examples_for "single token node test" do
+    it "should render the node with the text of the token" do
+      node = TestUtils.config_node_single_token(token)
+      expect(node.render).to eq(token.token_text)
+    end
+  end
+
+  shared_examples_for "key node test" do
+    it "should render the node with the text of the path" do
+      node = TestUtils.config_node_key(path)
+      expect(path).to eq(node.render)
+    end
+  end
+
+  shared_examples_for "simple value node test" do
+    it "should render the original token text" do
+      node = TestUtils.config_node_simple_value(token)
+      expect(node.render).to eq(token.token_text)
+    end
+  end
+
+  shared_examples_for "field node test" do
+    it "should properly replace the value of a field node" do
+      key_val_node = TestUtils.node_key_value_pair(key, value)
+      expect(key_val_node.render).to eq("#{key.render}: #{value.render}")
+      expect(key_val_node.path.render).to eq(key.render)
+      expect(key_val_node.value.render).to eq(value.render)
+
+      new_key_val_node = key_val_node.replace_value(new_value)
+      expect(new_key_val_node.render).to eq("#{key.render}: #{new_value.render}")
+      expect(new_key_val_node.value.render).to eq(new_value.render)
+    end
+  end
+
+  shared_examples_for "top level value replace test" do
+    it "should replace a value in a ConfigNodeObject" do
+      complex_node_children = [TestUtils.node_open_brace,
+                               TestUtils.node_key_value_pair(TestUtils.config_node_key(key), value),
+                               TestUtils.node_close_brace]
+      complex_node = TestUtils.config_node_object(complex_node_children)
+      new_node = complex_node.set_value_on_path(key, new_value)
+      orig_text = "{#{key}: #{value.render}}"
+      final_text = "{#{key}: #{new_value.render}}"
+
+      expect(complex_node.render).to eq(orig_text)
+      expect(new_node.render).to eq(final_text)
+    end
+  end
+
+  shared_examples_for "replace duplicates test" do
+    it "should remove duplicates of a key when setting a value" do
+      key = TestUtils.config_node_key('foo')
+      key_val_pair_1 = TestUtils.node_key_value_pair(key, value1)
+      key_val_pair_2 = TestUtils.node_key_value_pair(key, value2)
+      key_val_pair_3 = TestUtils.node_key_value_pair(key, value3)
+      complex_node = TestUtils.config_node_object([key_val_pair_1, key_val_pair_2, key_val_pair_3])
+      orig_text = "#{key_val_pair_1.render}#{key_val_pair_2.render}#{key_val_pair_3.render}"
+      final_text = "#{key.render}: 15"
+
+      expect(complex_node.render).to eq(orig_text)
+      expect(complex_node.set_value_on_path("foo", TestUtils.node_int(15)).render).to eq(final_text)
+    end
+  end
+
+  shared_examples_for "non existent path test" do
+    it "should properly add a key/value pair if the key does not exist in the object" do
+      node = TestUtils.config_node_object([TestUtils.node_key_value_pair(TestUtils.config_node_key("bar"), TestUtils.node_int(15))])
+      expect(node.render).to eq('bar: 15')
+      new_node = node.set_value_on_path('foo', value)
+      final_text = "bar: 15, foo: #{value.render}"
+      expect(new_node.render).to eq(final_text)
+    end
+  end
+
+  ########################
+  # ConfigNodeSingleToken
+  ########################
+  context "create basic config node" do
+    # Ensure a ConfigNodeSingleToken can handle all its required token types
+    context "start of file" do
+      let(:token) { Tokens::START }
+      include_examples "single token node test"
+    end
+
+    context "end of file" do
+      let(:token) { Tokens::EOF }
+      include_examples "single token node test"
+    end
+
+    context "{" do
+      let (:token) { Tokens::OPEN_CURLY }
+      include_examples "single token node test"
+    end
+
+    context "}" do
+      let (:token) { Tokens::CLOSE_CURLY }
+      include_examples "single token node test"
+    end
+
+    context "[" do
+      let (:token) { Tokens::OPEN_SQUARE }
+      include_examples "single token node test"
+    end
+
+    context "]" do
+      let (:token) { Tokens::CLOSE_SQUARE }
+      include_examples "single token node test"
+    end
+
+    context "," do
+      let (:token) { Tokens::COMMA }
+      include_examples "single token node test"
+    end
+
+    context "=" do
+      let (:token) { Tokens::EQUALS }
+      include_examples "single token node test"
+    end
+
+    context ":" do
+      let (:token) { Tokens::COLON }
+      include_examples "single token node test"
+    end
+
+    context "+=" do
+      let (:token) { Tokens::PLUS_EQUALS }
+      include_examples "single token node test"
+    end
+
+    context "unquoted text" do
+      let (:token) { TestUtils.token_unquoted('             ') }
+      include_examples "single token node test"
+    end
+
+    context "ignored whitespace" do
+      let (:token) { TestUtils.token_whitespace('             ') }
+      include_examples "single token node test"
+    end
+
+    context '\n' do
+      let (:token) { TestUtils.token_line(1) }
+      include_examples "single token node test"
+    end
+
+    context "double slash comment" do
+      let (:token) { TestUtils.token_comment_double_slash(" this is a double slash comment  ") }
+      include_examples "single token node test"
+    end
+
+    context "hash comment" do
+      let (:token) { TestUtils.token_comment_hash(" this is a hash comment  ") }
+      include_examples "single token node test"
+    end
+  end
+
+  ####################
+  # ConfigNodeSetting
+  ####################
+  context "create config node setting" do
+    # Ensure a ConfigNodeSetting can handle the normal key types
+    context "unquoted key" do
+      let (:path) { "foo" }
+      include_examples "key node test"
+    end
+
+    context "quoted_key" do
+      let (:path) { "\"Hello I am a key how are you today\"" }
+      include_examples "key node test"
+    end
+  end
+
+  context "path node subpath" do
+    it "should produce correct subpaths of path nodes with subpath method" do
+      orig_path = 'a.b.c."@$%#@!@#$"."".1234.5678'
+      path_node = TestUtils.config_node_key(orig_path)
+
+      expect(path_node.render).to eq(orig_path)
+      expect(path_node.sub_path(2).render).to eq('c."@$%#@!@#$"."".1234.5678')
+      expect(path_node.sub_path(6).render).to eq('5678')
+    end
+  end
+
+  ########################
+  # ConfigNodeSimpleValue
+  ########################
+  context "create config node simple value" do
+    context "integer" do
+      let (:token) { TestUtils.token_int(10) }
+      include_examples "simple value node test"
+    end
+
+    context "double" do
+      let (:token) { TestUtils.token_double(3.14159) }
+      include_examples "simple value node test"
+    end
+
+    context "false" do
+      let (:token) { TestUtils.token_false }
+      include_examples "simple value node test"
+    end
+
+    context "true" do
+      let (:token) { TestUtils.token_true }
+      include_examples "simple value node test"
+    end
+
+    context "null" do
+      let (:token) { TestUtils.token_null }
+      include_examples "simple value node test"
+    end
+
+    context "quoted text" do
+      let (:token) { TestUtils.token_string("Hello my name is string") }
+      include_examples "simple value node test"
+    end
+
+    context "unquoted text" do
+      let (:token) { TestUtils.token_unquoted("mynameisunquotedstring") }
+      include_examples "simple value node test"
+    end
+
+    context "key substitution" do
+      let (:token) { TestUtils.token_key_substitution("c.d") }
+      include_examples "simple value node test"
+    end
+
+    context "optional substitution" do
+      let (:token) { TestUtils.token_optional_substitution(TestUtils.token_unquoted("x.y")) }
+      include_examples "simple value node test"
+    end
+
+    context "substitution" do
+      let (:token) { TestUtils.token_substitution(TestUtils.token_unquoted("a.b")) }
+      include_examples "simple value node test"
+    end
+  end
+
+  ####################
+  # ConfigNodeField
+  ####################
+  context "create ConfigNodeField" do
+    let (:key) { TestUtils.config_node_key('"abc"') }
+    let (:value) { TestUtils.node_int(123) }
+
+    context "supports quoted keys" do
+      let (:new_value) { TestUtils.node_int(245) }
+      include_examples "field node test"
+    end
+
+    context "supports unquoted keys" do
+      let (:key) { TestUtils.config_node_key('abc') }
+      let (:new_value) { TestUtils.node_int(245) }
+      include_examples "field node test"
+    end
+
+    context "can replace a simple value with a different type of simple value" do
+      let (:new_value) { TestUtils.node_string('I am a string') }
+      include_examples "field node test"
+    end
+
+    context "can replace a simple value with a complex value" do
+      let (:new_value) { TestUtils.config_node_object([TestUtils.node_open_brace, TestUtils.node_close_brace]) }
+      include_examples "field node test"
+    end
+  end
+
+  ####################
+  # Node Replacement
+  ####################
+  context "replace nodes" do
+    let (:key) { "foo" }
+    array = TestUtils.config_node_array([TestUtils.node_open_bracket, TestUtils.node_int(10), TestUtils.node_space, TestUtils.node_comma,
+                                         TestUtils.node_space, TestUtils.node_int(15), TestUtils.node_close_bracket])
+    nested_map = TestUtils.config_node_object([TestUtils.node_open_brace,
+                                               TestUtils.node_key_value_pair(TestUtils.config_node_key("abc"),
+                                                                             TestUtils.config_node_simple_value(TestUtils.token_string("a string"))),
+                                               TestUtils.node_close_brace])
+
+    context "replace an integer with an integer" do
+      let (:value) { TestUtils.node_int(10) }
+      let (:new_value) { TestUtils.node_int(15) }
+      include_examples "top level value replace test"
+    end
+
+    context "replace a double with an integer" do
+      let (:value) { TestUtils.node_double(3.14159) }
+      let (:new_value) { TestUtils.node_int(10000) }
+      include_examples "top level value replace test"
+    end
+
+    context "replace false with true" do
+      let (:value) { TestUtils.node_false }
+      let (:new_value) { TestUtils.node_true }
+      include_examples "top level value replace test"
+    end
+
+    context "replace true with null" do
+      let (:value) { TestUtils.node_true }
+      let (:new_value) { TestUtils.node_null }
+      include_examples "top level value replace test"
+    end
+
+    context "replace null with a string" do
+      let (:value) { TestUtils.node_null }
+      let (:new_value) { TestUtils.node_string("Hello my name is string") }
+      include_examples "top level value replace test"
+    end
+
+    context "replace a string with unquoted text" do
+      let (:value) { TestUtils.node_string("Hello my name is string") }
+      let (:new_value) { TestUtils.node_unquoted_text("mynameisunquotedstring") }
+      include_examples "top level value replace test"
+    end
+
+    context "replace unquoted text with a key substitution" do
+      let (:value) { TestUtils.node_unquoted_text("mynameisunquotedstring") }
+      let (:new_value) { TestUtils.node_key_substitution("c.d") }
+      include_examples "top level value replace test"
+    end
+
+    context "replace int with an optional substitution" do
+      let (:value) { TestUtils.node_int(10) }
+      let (:new_value) { TestUtils.node_optional_substitution(TestUtils.token_unquoted("x.y")) }
+      include_examples "top level value replace test"
+    end
+
+    context "replace int with a substitution" do
+      let (:value) { TestUtils.node_int(10) }
+      let (:new_value) { TestUtils.node_substitution(TestUtils.token_unquoted("a.b")) }
+      include_examples "top level value replace test"
+    end
+
+    context "replace substitution with an int" do
+      let (:value) { TestUtils.node_substitution(TestUtils.token_unquoted("a.b")) }
+      let (:new_value) { TestUtils.node_int(10) }
+      include_examples "top level value replace test"
+    end
+
+    context "ensure arrays can be replaced" do
+      context "can replace a simple value with an array" do
+        let (:value) { TestUtils.node_int(10) }
+        let (:new_value) { array }
+        include_examples "top level value replace test"
+      end
+
+      context "can replace an array with a simple value" do
+        let (:value) { array }
+        let (:new_value) { TestUtils.node_int(10) }
+        include_examples "top level value replace test"
+      end
+
+      context "can replace an array with another complex value" do
+        let (:value) { array }
+        let (:new_value) { TestUtils.config_node_object([TestUtils.node_open_brace, TestUtils.node_close_brace])}
+        include_examples "top level value replace test"
+      end
+    end
+
+    context "ensure objects can be replaced" do
+      context "can replace an object with a simple value" do
+        let (:value) { nested_map }
+        let (:new_value) { TestUtils.node_int(10) }
+        include_examples "top level value replace test"
+      end
+
+      context "can replace a simple value with an object" do
+        let (:value) { TestUtils.node_int(10) }
+        let (:new_value) { nested_map }
+        include_examples "top level value replace test"
+      end
+
+      context "can replace an array with an object" do
+        let (:value) { array }
+        let (:new_value) { nested_map }
+        include_examples "top level value replace test"
+      end
+
+      context "can replace an object with an array" do
+        let (:value) { nested_map }
+        let (:new_value) { array }
+        include_examples "top level value replace test"
+      end
+
+      context "can replace an object with an empty object" do
+        let (:value) { nested_map }
+        let (:new_value) { TestUtils.config_node_object([TestUtils.node_open_brace, TestUtils.node_close_brace]) }
+        include_examples "top level value replace test"
+      end
+    end
+
+    context "ensure concatenations can be replaced" do
+      concatenation = TestUtils.config_node_concatenation([TestUtils.node_int(10), TestUtils.node_space, TestUtils.node_string("Hello")])
+
+      context "can replace a concatenation with a simple value" do
+        let (:value) { concatenation }
+        let (:new_value) { TestUtils.node_int(12) }
+        include_examples "top level value replace test"
+      end
+
+      context "can replace a simple value with a concatenation" do
+        let (:value) { TestUtils.node_int(12) }
+        let (:new_value) { concatenation }
+        include_examples "top level value replace test"
+      end
+
+      context "can replace an object with a concatenation" do
+        let (:value) { nested_map }
+        let (:new_value) { concatenation }
+        include_examples "top level value replace test"
+      end
+
+      context "can replace a concatenation with an object" do
+        let (:value) { concatenation }
+        let (:new_value) { nested_map }
+        include_examples "top level value replace test"
+      end
+
+      context "can replace an array with a concatenation" do
+        let (:value) { array }
+        let (:new_value) { concatenation }
+        include_examples "top level value replace test"
+      end
+
+      context "can replace a concatenation with an array" do
+        let (:value) { concatenation }
+        let (:new_value) { array }
+        include_examples "top level value replace test"
+      end
+    end
+
+    context 'ensure a key with format "a.b" will be properly replaced' do
+      let (:key) { 'foo.bar' }
+      let (:value) { TestUtils.node_int(10) }
+      let (:new_value) { nested_map }
+      include_examples "top level value replace test"
+    end
+  end
+
+  ####################
+  # Duplicate Removal
+  ####################
+  context "remove duplicates" do
+    empty_map_node = TestUtils.config_node_object([TestUtils.node_open_brace, TestUtils.node_close_brace])
+    empty_array_node = TestUtils.config_node_array([TestUtils.node_open_bracket, TestUtils.node_close_bracket])
+
+    context "duplicates containing simple values will all be removed" do
+      let (:value1) { TestUtils.node_int(10) }
+      let (:value2) { TestUtils.node_true }
+      let (:value3) { TestUtils.node_null }
+      include_examples "replace duplicates test"
+    end
+
+    context "duplicates containing objects will be removed" do
+      let (:value1) { empty_map_node }
+      let (:value2) { empty_map_node }
+      let (:value3) { empty_map_node }
+      include_examples "replace duplicates test"
+    end
+
+    context "duplicates containing arrays will be removed" do
+      let (:value1) { empty_array_node }
+      let (:value2) { empty_array_node }
+      let (:value3) { empty_array_node }
+      include_examples "replace duplicates test"
+    end
+
+    context "duplicates containing a mix of value types will be removed" do
+      let (:value1) { TestUtils.node_int(10) }
+      let (:value2) { empty_map_node }
+      let (:value3) { empty_array_node }
+      include_examples "replace duplicates test"
+    end
+  end
+
+  #################################
+  # Addition of non-existent paths
+  #################################
+  context "add non existent paths" do
+    context "adding an integer" do
+      let (:value) { TestUtils.node_int(10) }
+      include_examples "non existent path test"
+    end
+
+    context "adding an array" do
+      let (:value) { TestUtils.config_node_array([TestUtils.node_open_bracket, TestUtils.node_int(15), TestUtils.node_close_bracket]) }
+      include_examples "non existent path test"
+    end
+
+    context "adding an object" do
+      let (:value) { TestUtils.config_node_object([TestUtils.node_open_brace,
+                                                   TestUtils.node_key_value_pair(TestUtils.config_node_key('foo'),
+                                                                                 TestUtils.node_double(3.14)),
+                                                   TestUtils.node_close_brace]) }
+      include_examples "non existent path test"
+    end
+  end
+
+  #################################
+  # Replacement of nested nodes
+  #################################
+  context "replace nested nodes" do
+    orig_text = "foo: bar\nbaz: {\n\t\"abc.def\": 123\n\t//This is a comment about the below setting\n\n\tabc: {\n\t\t" +
+        "def: \"this is a string\"\n\t\tghi: ${\"a.b\"}\n\t}\n}\nbaz.abc.ghi: 52\nbaz.abc.ghi: 53\n}"
+    lowest_level_map = TestUtils.config_node_object([TestUtils.node_open_brace, TestUtils.node_line(6), TestUtils.node_whitespace("\t\t"),
+                                                     TestUtils.node_key_value_pair(TestUtils.config_node_key("def"), TestUtils.config_node_simple_value(TestUtils.token_string("this is a string"))),
+                                                     TestUtils.node_line(7), TestUtils.node_whitespace("\t\t"),
+                                                     TestUtils.node_key_value_pair(TestUtils.config_node_key("ghi"), TestUtils.config_node_simple_value(TestUtils.token_key_substitution("a.b"))),
+                                                     TestUtils.node_line(8), TestUtils.node_whitespace("\t"), TestUtils.node_close_brace])
+    higher_level_map = TestUtils.config_node_object([TestUtils.node_open_brace, TestUtils.node_line(2), TestUtils.node_whitespace("\t"),
+                                                     TestUtils.node_key_value_pair(TestUtils.config_node_key('"abc.def"'), TestUtils.config_node_simple_value(TestUtils.token_int(123))),
+                                                     TestUtils.node_line(3), TestUtils.node_whitespace("\t"), TestUtils.node_comment_double_slash("This is a comment about the below setting"),
+                                                     TestUtils.node_line(4), TestUtils.node_line(5), TestUtils.node_whitespace("\t"),
+                                                     TestUtils.node_key_value_pair(TestUtils.config_node_key("abc"), lowest_level_map), TestUtils.node_line(9), TestUtils.node_close_brace])
+    orig_node = TestUtils.config_node_object([TestUtils.node_key_value_pair(TestUtils.config_node_key("foo"), TestUtils.config_node_simple_value(TestUtils.token_unquoted("bar"))),
+                                              TestUtils.node_line(1), TestUtils.node_key_value_pair(TestUtils.config_node_key('baz'), higher_level_map), TestUtils.node_line(10),
+                                              TestUtils.node_key_value_pair(TestUtils.config_node_key('baz.abc.ghi'), TestUtils.config_node_simple_value(TestUtils.token_int(52))),
+                                              TestUtils.node_line(11),
+                                              TestUtils.node_key_value_pair(TestUtils.config_node_key('baz.abc.ghi'), TestUtils.config_node_simple_value(TestUtils.token_int(53))),
+                                              TestUtils.node_line(12), TestUtils.node_close_brace])
+    it "should properly render the original node" do
+      expect(orig_node.render).to eq(orig_text)
+    end
+
+    it "should properly replae values in the original node" do
+      final_text = "foo: bar\nbaz: {\n\t\"abc.def\": true\n\t//This is a comment about the below setting\n\n\tabc: {\n\t\t" +
+          "def: false\n\t\t\n\t\t\"this.does.not.exist@@@+$#\": {\n\t\t  end: doesnotexist\n\t\t}\n\t}\n}\n\nbaz.abc.ghi: randomunquotedString\n}"
+
+      # Paths with quotes in the name are treated as a single Path, rather than multiple sub-paths
+      new_node = orig_node.set_value_on_path('baz."abc.def"', TestUtils.config_node_simple_value(TestUtils.token_true))
+      new_node = new_node.set_value_on_path('baz.abc.def', TestUtils.config_node_simple_value(TestUtils.token_false))
+
+      # Repeats are removed from nested maps
+      new_node = new_node.set_value_on_path('baz.abc.ghi', TestUtils.config_node_simple_value(TestUtils.token_unquoted('randomunquotedString')))
+
+      # Missing paths are added to the top level if they don't appear anywhere, including in nested maps
+      new_node = new_node.set_value_on_path('baz.abc."this.does.not.exist@@@+$#".end', TestUtils.config_node_simple_value(TestUtils.token_unquoted('doesnotexist')))
+
+      # The above operations cause the resultant map to be rendered properly
+      expect(new_node.render).to eq(final_text)
+    end
+  end
+
+end
diff --git a/spec/unit/typesafe/config/config_value_factory_spec.rb b/spec/unit/typesafe/config/config_value_factory_spec.rb
new file mode 100644
index 0000000..340846d
--- /dev/null
+++ b/spec/unit/typesafe/config/config_value_factory_spec.rb
@@ -0,0 +1,85 @@
+# encoding: utf-8
+
+require 'spec_helper'
+require 'hocon/config_value_factory'
+require 'hocon/config_render_options'
+require 'hocon/config_error'
+
+describe Hocon::ConfigValueFactory do
+  let(:render_options) { Hocon::ConfigRenderOptions.defaults }
+
+  before do
+    render_options.origin_comments = false
+    render_options.json = false
+  end
+
+  context "converting objects to ConfigValue using ConfigValueFactory" do
+    it "should convert true into a ConfigBoolean" do
+      value = Hocon::ConfigValueFactory.from_any_ref(true, nil)
+      expect(value).to be_instance_of(Hocon::Impl::ConfigBoolean)
+      expect(value.unwrapped).to eql(true)
+    end
+
+    it "should convert false into a ConfigBoolean" do
+      value = Hocon::ConfigValueFactory.from_any_ref(false, nil)
+      expect(value).to be_instance_of(Hocon::Impl::ConfigBoolean)
+      expect(value.unwrapped).to eql(false)
+    end
+
+    it "should convert nil into a ConfigNull object" do
+      value = Hocon::ConfigValueFactory.from_any_ref(nil, nil)
+      expect(value).to be_instance_of(Hocon::Impl::ConfigNull)
+      expect(value.unwrapped).to be_nil
+    end
+
+    it "should convert an string into a ConfigString object" do
+      value = Hocon::ConfigValueFactory.from_any_ref("Hello, World!", nil)
+      expect(value).to be_a(Hocon::Impl::ConfigString)
+      expect(value.unwrapped).to eq("Hello, World!")
+    end
+
+    it "should convert an integer into a ConfigInt object" do
+      value = Hocon::ConfigValueFactory.from_any_ref(123, nil)
+      expect(value).to be_instance_of(Hocon::Impl::ConfigInt)
+      expect(value.unwrapped).to eq(123)
+    end
+
+    it "should convert a double into a ConfigDouble object" do
+      value = Hocon::ConfigValueFactory.from_any_ref(123.456, nil)
+      expect(value).to be_instance_of(Hocon::Impl::ConfigDouble)
+      expect(value.unwrapped).to eq(123.456)
+    end
+
+    it "should convert a map into a SimpleConfigObject" do
+      map = {"a" => 1, "b" => 2, "c" => 3}
+      value = Hocon::ConfigValueFactory.from_any_ref(map, nil)
+      expect(value).to be_instance_of(Hocon::Impl::SimpleConfigObject)
+      expect(value.unwrapped).to eq(map)
+    end
+
+    it "should convert symbol keys in a map to string keys" do
+      orig_map = {a: 1, b: 2, c: {a: 1, b: 2, c: {a: 1}}}
+      map = {"a" => 1, "b" => 2, "c"=>{"a"=>1, "b"=>2, "c"=>{"a"=>1}}}
+      value = Hocon::ConfigValueFactory.from_any_ref(orig_map, nil)
+      expect(value).to be_instance_of(Hocon::Impl::SimpleConfigObject)
+      expect(value.unwrapped).to eq(map)
+
+      value = Hocon::ConfigValueFactory.from_map(orig_map, nil)
+      expect(value).to be_instance_of(Hocon::Impl::SimpleConfigObject)
+      expect(value.unwrapped).to eq(map)
+    end
+
+    it "should not parse maps with non-string and non-symbol keys" do
+      map = {1 => "a", 2 => "b"}
+      expect{ Hocon::ConfigValueFactory.from_any_ref(map, nil) }.to raise_error(Hocon::ConfigError::ConfigBugOrBrokenError)
+    end
+
+    it "should convert an Enumerable into a SimpleConfigList" do
+      list = [1, 2, 3, 4, 5]
+      value = Hocon::ConfigValueFactory.from_any_ref(list, nil)
+      expect(value).to be_instance_of(Hocon::Impl::SimpleConfigList)
+      expect(value.unwrapped).to eq(list)
+    end
+  end
+
+end
diff --git a/spec/unit/typesafe/config/config_value_spec.rb b/spec/unit/typesafe/config/config_value_spec.rb
new file mode 100644
index 0000000..dc1565b
--- /dev/null
+++ b/spec/unit/typesafe/config/config_value_spec.rb
@@ -0,0 +1,959 @@
+require 'spec_helper'
+require 'hocon'
+require 'test_utils'
+
+require 'hocon/impl/config_delayed_merge'
+require 'hocon/impl/config_delayed_merge_object'
+require 'hocon/config_error'
+require 'hocon/impl/unsupported_operation_error'
+require 'hocon/config_value_factory'
+require 'hocon/config_render_options'
+
+
+
+SimpleConfigOrigin = Hocon::Impl::SimpleConfigOrigin
+SimpleConfigObject = Hocon::Impl::SimpleConfigObject
+SimpleConfigList = Hocon::Impl::SimpleConfigList
+SubstitutionExpression = Hocon::Impl::SubstitutionExpression
+ConfigReference = Hocon::Impl::ConfigReference
+ConfigConcatenation = Hocon::Impl::ConfigConcatenation
+ConfigDelayedMerge = Hocon::Impl::ConfigDelayedMerge
+ConfigDelayedMergeObject = Hocon::Impl::ConfigDelayedMergeObject
+ConfigNotResolvedError = Hocon::ConfigError::ConfigNotResolvedError
+UnresolvedSubstitutionError = Hocon::ConfigError::UnresolvedSubstitutionError
+ConfigBugOrBrokenError = Hocon::ConfigError::ConfigBugOrBrokenError
+AbstractConfigObject = Hocon::Impl::AbstractConfigObject
+ConfigValueFactory = Hocon::ConfigValueFactory
+ConfigFactory = Hocon::ConfigFactory
+UnsupportedOperationError = Hocon::Impl::UnsupportedOperationError
+ConfigNumber = Hocon::Impl::ConfigNumber
+ConfigRenderOptions = Hocon::ConfigRenderOptions
+
+describe "SimpleConfigOrigin equality" do
+  context "different origins with the same name should be equal" do
+    let(:a) { SimpleConfigOrigin.new_simple("foo") }
+    let(:same_as_a) { SimpleConfigOrigin.new_simple("foo") }
+    let(:b) { SimpleConfigOrigin.new_simple("bar") }
+
+    context "a equals a" do
+      let(:first_object) { a }
+      let(:second_object) { a }
+      include_examples "object_equality"
+    end
+
+    context "a equals same_as_a" do
+      let(:first_object) { a }
+      let(:second_object) { same_as_a }
+      include_examples "object_equality"
+    end
+
+    context "a does not equal b" do
+      let(:first_object) { a }
+      let(:second_object) { b }
+      include_examples "object_inequality"
+    end
+  end
+end
+
+describe "ConfigInt equality" do
+  context "different ConfigInts with the same value should be equal" do
+    a = TestUtils.int_value(42)
+    same_as_a = TestUtils.int_value(42)
+    b = TestUtils.int_value(43)
+
+    context "a equals a" do
+      let(:first_object) { a }
+      let(:second_object) { a }
+      include_examples "object_equality"
+    end
+
+    context "a equals same_as_a" do
+      let(:first_object) { a }
+      let(:second_object) { same_as_a }
+      include_examples "object_equality"
+    end
+
+    context "a does not equal b" do
+      let(:first_object) { a }
+      let(:second_object) { b }
+      include_examples "object_inequality"
+    end
+  end
+end
+
+describe "ConfigFloat equality" do
+  context "different ConfigFloats with the same value should be equal" do
+    a = TestUtils.double_value(3.14)
+    same_as_a = TestUtils.double_value(3.14)
+    b = TestUtils.double_value(4.14)
+
+    context "a equals a" do
+      let(:first_object) { a }
+      let(:second_object) { a }
+      include_examples "object_equality"
+    end
+
+    context "a equals same_as_a" do
+      let(:first_object) { a }
+      let(:second_object) { same_as_a }
+      include_examples "object_equality"
+    end
+
+    context "a does not equal b" do
+      let(:first_object) { a }
+      let(:second_object) { b }
+      include_examples "object_inequality"
+    end
+  end
+end
+
+describe "ConfigFloat and ConfigInt equality" do
+  context "different ConfigInts with the same value should be equal" do
+    double_val = TestUtils.double_value(3.0)
+    int_value = TestUtils.int_value(3)
+    double_value_b = TestUtils.double_value(4.0)
+    int_value_b = TestUtils.double_value(4)
+
+    context "int equals double" do
+      let(:first_object) { double_val }
+      let(:second_object) { int_value }
+      include_examples "object_equality"
+    end
+
+    context "ConfigFloat made from int equals double" do
+      let(:first_object) { double_value_b }
+      let(:second_object) { int_value_b }
+      include_examples "object_equality"
+    end
+
+    context "3 doesn't equal 4.0" do
+      let(:first_object) { int_value }
+      let(:second_object) { double_value_b }
+      include_examples "object_inequality"
+    end
+
+    context "4.0 doesn't equal 3.0" do
+      let(:first_object) { int_value_b }
+      let(:second_object) { double_val }
+      include_examples "object_inequality"
+    end
+  end
+end
+
+describe "SimpleConfigObject equality" do
+  context "SimpleConfigObjects made from hash maps" do
+    a_map = TestUtils.config_map({a: 1, b: 2, c: 3})
+    same_as_a_map = TestUtils.config_map({a: 1, b: 2, c: 3})
+    b_map = TestUtils.config_map({a: 3, b: 4, c: 5})
+
+    # different keys is a different case in the equals implementation
+    c_map = TestUtils.config_map({x: 3, y: 4, z: 5})
+
+    a = SimpleConfigObject.new(TestUtils.fake_origin, a_map)
+    same_as_a = SimpleConfigObject.new(TestUtils.fake_origin, same_as_a_map)
+    b = SimpleConfigObject.new(TestUtils.fake_origin, b_map)
+    c = SimpleConfigObject.new(TestUtils.fake_origin, c_map)
+
+    # the config for an equal object is also equal
+    config = a.to_config
+
+    context "a equals a" do
+      let(:first_object) { a }
+      let(:second_object) { a }
+      include_examples "object_equality"
+    end
+
+    context "a equals same_as_a" do
+      let(:first_object) { a }
+      let(:second_object) { same_as_a }
+      include_examples "object_equality"
+    end
+
+    context "b equals b" do
+      let(:first_object) { b }
+      let(:second_object) { b }
+      include_examples "object_equality"
+    end
+
+    context "c equals c" do
+      let(:first_object) { c }
+      let(:second_object) { c }
+      include_examples "object_equality"
+    end
+
+    context "a doesn't equal b" do
+      let(:first_object) { a }
+      let(:second_object) { b }
+      include_examples "object_inequality"
+    end
+
+    context "a doesn't equal c" do
+      let(:first_object) { a }
+      let(:second_object) { c }
+      include_examples "object_inequality"
+    end
+
+    context "b doesn't equal c" do
+      let(:first_object) { b }
+      let(:second_object) { c }
+      include_examples "object_inequality"
+    end
+
+    context "a's config equals a's config" do
+      let(:first_object) { config }
+      let(:second_object) { config }
+      include_examples "object_equality"
+    end
+
+    context "a's config equals same_as_a's config" do
+      let(:first_object) { config }
+      let(:second_object) { same_as_a.to_config }
+      include_examples "object_equality"
+    end
+
+    context "a's config equals a's config computed again" do
+      let(:first_object) { config }
+      let(:second_object) { a.to_config }
+      include_examples "object_equality"
+    end
+
+    context "a's config doesn't equal b's config" do
+      let(:first_object) { config }
+      let(:second_object) { b.to_config }
+      include_examples "object_inequality"
+    end
+
+    context "a's config doesn't equal c's config" do
+      let(:first_object) { config }
+      let(:second_object) { c.to_config }
+      include_examples "object_inequality"
+    end
+
+    context "a doesn't equal a's config" do
+      let(:first_object) { a }
+      let(:second_object) { config }
+      include_examples "object_inequality"
+    end
+
+    context "b doesn't equal b's config" do
+      let(:first_object) { b }
+      let(:second_object) { b.to_config }
+      include_examples "object_inequality"
+    end
+  end
+end
+
+describe "SimpleConfigList equality" do
+  a_values = [1, 2, 3].map { |i| TestUtils.int_value(i) }
+  a_list = SimpleConfigList.new(TestUtils.fake_origin, a_values)
+
+  same_as_a_values = [1, 2, 3].map { |i| TestUtils.int_value(i) }
+  same_as_a_list = SimpleConfigList.new(TestUtils.fake_origin, same_as_a_values)
+
+  b_values = [4, 5, 6].map { |i| TestUtils.int_value(i) }
+  b_list = SimpleConfigList.new(TestUtils.fake_origin, b_values)
+
+  context "a_list equals a_list" do
+    let(:first_object) { a_list }
+    let(:second_object) { a_list }
+    include_examples "object_equality"
+  end
+
+  context "a_list equals same_as_a_list" do
+    let(:first_object) { a_list }
+    let(:second_object) { same_as_a_list }
+    include_examples "object_equality"
+  end
+
+  context "a_list doesn't equal b_list" do
+    let(:first_object) { a_list }
+    let(:second_object) { b_list }
+    include_examples "object_inequality"
+  end
+end
+
+describe "ConfigReference equality" do
+  a = TestUtils.subst("foo")
+  same_as_a = TestUtils.subst("foo")
+  b = TestUtils.subst("bar")
+  c = TestUtils.subst("foo", true)
+
+  specify "testing values are of the right type" do
+    expect(a).to be_instance_of(ConfigReference)
+    expect(b).to be_instance_of(ConfigReference)
+    expect(c).to be_instance_of(ConfigReference)
+  end
+
+  context "a equals a" do
+    let(:first_object) { a }
+    let(:second_object) { a }
+    include_examples "object_equality"
+  end
+
+  context "a equals same_as_a" do
+    let(:first_object) { a }
+    let(:second_object) { same_as_a }
+    include_examples "object_equality"
+  end
+
+  context "a doesn't equal b" do
+    let(:first_object) { a }
+    let(:second_object) { b }
+    include_examples "object_inequality"
+  end
+
+  context "a doesn't equal c, an optional substitution" do
+    let(:first_object) { a }
+    let(:second_object) { c }
+    include_examples "object_inequality"
+  end
+end
+
+describe "ConfigConcatenation equality" do
+  a = TestUtils.subst_in_string("foo")
+  same_as_a = TestUtils.subst_in_string("foo")
+  b = TestUtils.subst_in_string("bar")
+  c = TestUtils.subst_in_string("foo", true)
+
+  specify "testing values are of the right type" do
+    expect(a).to be_instance_of(ConfigConcatenation)
+    expect(b).to be_instance_of(ConfigConcatenation)
+    expect(c).to be_instance_of(ConfigConcatenation)
+  end
+
+  context "a equals a" do
+    let(:first_object) { a }
+    let(:second_object) { a }
+    include_examples "object_equality"
+  end
+
+  context "a equals same_as_a" do
+    let(:first_object) { a }
+    let(:second_object) { same_as_a }
+    include_examples "object_equality"
+  end
+
+  context "a doesn't equal b" do
+    let(:first_object) { a }
+    let(:second_object) { b }
+    include_examples "object_inequality"
+  end
+
+  context "a doesn't equal c, an optional substitution" do
+    let(:first_object) { a }
+    let(:second_object) { c }
+    include_examples "object_inequality"
+  end
+end
+
+describe "ConfigDelayedMerge equality" do
+  s1 = TestUtils.subst("foo")
+  s2 = TestUtils.subst("bar")
+  a = ConfigDelayedMerge.new(TestUtils.fake_origin, [s1, s2])
+  same_as_a = ConfigDelayedMerge.new(TestUtils.fake_origin, [s1, s2])
+  b = ConfigDelayedMerge.new(TestUtils.fake_origin, [s2, s1])
+
+  context "a equals a" do
+    let(:first_object) { a }
+    let(:second_object) { a }
+    include_examples "object_equality"
+  end
+
+  context "a equals same_as_a" do
+    let(:first_object) { a }
+    let(:second_object) { same_as_a }
+    include_examples "object_equality"
+  end
+
+  context "a doesn't equal b" do
+    let(:first_object) { a }
+    let(:second_object) { b }
+    include_examples "object_inequality"
+  end
+end
+
+describe "ConfigDelayedMergeObject equality" do
+  empty = SimpleConfigObject.empty
+  s1 = TestUtils.subst("foo")
+  s2 = TestUtils.subst("bar")
+  a = ConfigDelayedMergeObject.new(TestUtils.fake_origin, [empty, s1, s2])
+  same_as_a = ConfigDelayedMergeObject.new(TestUtils.fake_origin, [empty, s1, s2])
+  b = ConfigDelayedMergeObject.new(TestUtils.fake_origin, [empty, s2, s1])
+
+  context "a equals a" do
+    let(:first_object) { a }
+    let(:second_object) { a }
+    include_examples "object_equality"
+  end
+
+  context "a equals same_as_a" do
+    let(:first_object) { a }
+    let(:second_object) { same_as_a }
+    include_examples "object_equality"
+  end
+
+  context "a doesn't equal b" do
+    let(:first_object) { a }
+    let(:second_object) { b }
+    include_examples "object_inequality"
+  end
+end
+
+describe "Values' to_s methods" do
+  # just check that these don't throw, the exact output
+  # isn't super important since it's just for debugging
+
+  specify "to_s doesn't throw error" do
+    TestUtils.int_value(10).to_s
+    TestUtils.double_value(3.14).to_s
+    TestUtils.string_value("hi").to_s
+    TestUtils.null_value.to_s
+    TestUtils.bool_value(true).to_s
+    empty_object = SimpleConfigObject.empty
+    empty_object.to_s
+
+    SimpleConfigList.new(TestUtils.fake_origin, []).to_s
+    TestUtils.subst("a").to_s
+    TestUtils.subst_in_string("b").to_s
+    dm = ConfigDelayedMerge.new(TestUtils.fake_origin, [TestUtils.subst("a"), TestUtils.subst("b")])
+    dm.to_s
+
+    dmo = ConfigDelayedMergeObject.new(TestUtils.fake_origin, [empty_object, TestUtils.subst("a"), TestUtils.subst("b")])
+    dmo.to_s
+
+    TestUtils.fake_origin.to_s
+  end
+end
+
+describe "ConfigObject" do
+  specify "should unwrap correctly" do
+    m = SimpleConfigObject.new(TestUtils.fake_origin, TestUtils.config_map({a: 1, b: 2, c: 3}))
+
+    expect({a: 1, b: 2, c: 3}).to eq(m.unwrapped)
+  end
+
+  specify "should implement read only map" do
+    m = SimpleConfigObject.new(TestUtils.fake_origin, TestUtils.config_map({a: 1, b: 2, c: 3}))
+
+    expect(TestUtils.int_value(1)).to eq(m[:a])
+    expect(TestUtils.int_value(2)).to eq(m[:b])
+    expect(TestUtils.int_value(3)).to eq(m[:c])
+    expect(m[:d]).to be_nil
+    # [] can take a non-string
+    expect(m[[]]).to be_nil
+
+    expect(m.has_key? :a).to be_truthy
+    expect(m.has_key? :z).to be_falsey
+    # has_key? can take a non-string
+    expect(m.has_key? []).to be_falsey
+
+    expect(m.has_value? TestUtils.int_value(1)).to be_truthy
+    expect(m.has_value? TestUtils.int_value(10)).to be_falsey
+    # has_value? can take a non-string
+    expect(m.has_value? []).to be_falsey
+
+    expect(m.empty?).to be_falsey
+
+    expect(m.size).to eq(3)
+
+    values = [TestUtils.int_value(1), TestUtils.int_value(2), TestUtils.int_value(3)]
+    expect(values).to eq(m.values)
+
+    keys = [:a, :b, :c]
+    expect(keys).to eq(m.keys)
+
+    expect { m["hello"] = TestUtils.int_value(41) }.to raise_error(UnsupportedOperationError)
+    expect { m.delete(:a) }.to raise_error(UnsupportedOperationError)
+  end
+end
+
+describe "ConfigList" do
+  specify "should implement read only list" do
+    values = ["a", "b", "c"].map { |i| TestUtils.string_value(i) }
+    l = SimpleConfigList.new(TestUtils.fake_origin, values)
+
+    expect(values[0]).to eq(l[0])
+    expect(values[1]).to eq(l[1])
+    expect(values[2]).to eq(l[2])
+
+    expect(l.include? TestUtils.string_value("a")).to be_truthy
+    expect(l.include_all?([TestUtils.string_value("a")])).to be_truthy
+    expect(l.include_all?([TestUtils.string_value("b")])).to be_truthy
+    expect(l.include_all?(values)).to be_truthy
+
+    expect(l.index(values[1])).to eq(1)
+
+    expect(l.empty?).to be_falsey
+
+    expect(l.map { |v| v }).to eq(values.map { |v| v })
+
+    expect(l.rindex(values[1])).to eq(1)
+
+    expect(l.size).to eq(3)
+
+    expect { l.push(TestUtils.int_value(3)) }.to raise_error(UnsupportedOperationError)
+    expect { l << TestUtils.int_value(3) }.to raise_error(UnsupportedOperationError)
+    expect { l.clear }.to raise_error(UnsupportedOperationError)
+    expect { l.delete(TestUtils.int_value(2)) }.to raise_error(UnsupportedOperationError)
+    expect { l.delete(1) }.to raise_error(UnsupportedOperationError)
+    expect { l[0] = TestUtils.int_value(42) }.to raise_error(UnsupportedOperationError)
+  end
+end
+
+describe "Objects throwing ConfigNotResolvedError" do
+  context "ConfigSubstitution" do
+    specify "should throw ConfigNotResolvedError" do
+      expect{ TestUtils.subst("foo").value_type }.to raise_error(ConfigNotResolvedError)
+      expect{ TestUtils.subst("foo").unwrapped }.to raise_error(ConfigNotResolvedError)
+    end
+  end
+
+  context "ConfigDelayedMerge" do
+    let(:dm) { ConfigDelayedMerge.new(TestUtils.fake_origin, [TestUtils.subst("a"), TestUtils.subst("b")]) }
+
+    specify "should throw ConfigNotResolvedError" do
+      expect{ dm.value_type }.to raise_error(ConfigNotResolvedError)
+      expect{ dm.unwrapped }.to raise_error(ConfigNotResolvedError)
+    end
+  end
+
+  context "ConfigDelayedMergeObject" do
+    empty_object = SimpleConfigObject.empty
+    objects = [empty_object, TestUtils.subst("a"), TestUtils.subst("b")]
+
+    let(:dmo) { ConfigDelayedMergeObject.new(TestUtils.fake_origin, objects) }
+
+    specify "should have value type of OBJECT" do
+      expect(dmo.value_type).to eq(Hocon::ConfigValueType::OBJECT)
+    end
+
+    specify "should throw ConfigNotResolvedError" do
+      expect{ dmo.unwrapped }.to raise_error(ConfigNotResolvedError)
+      expect{ dmo["foo"] }.to raise_error(ConfigNotResolvedError)
+      expect{ dmo.has_key?(nil) }.to raise_error(ConfigNotResolvedError)
+      expect{ dmo.has_value?(nil) }.to raise_error(ConfigNotResolvedError)
+      expect{ dmo.each }.to raise_error(ConfigNotResolvedError)
+      expect{ dmo.empty? }.to raise_error(ConfigNotResolvedError)
+      expect{ dmo.keys }.to raise_error(ConfigNotResolvedError)
+      expect{ dmo.size }.to raise_error(ConfigNotResolvedError)
+      expect{ dmo.values }.to raise_error(ConfigNotResolvedError)
+      expect{ dmo.to_config.get_int("foo") }.to raise_error(ConfigNotResolvedError)
+    end
+  end
+end
+
+describe "Round tripping numbers through parse_string" do
+  specify "should get the same numbers back out" do
+    # formats rounded off with E notation
+    a = "132454454354353245.3254652656454808909932874873298473298472"
+    # formats as 100000.0
+    b = "1e6"
+    # formats as 5.0E-5
+    c = "0.00005"
+    # formats as 1E100 (capital E)
+    d = "1e100"
+
+    object = TestUtils.parse_config("{ a : #{a}, b : #{b}, c : #{c}, d : #{d}}")
+    expect([a, b, c, d]).to eq(["a", "b", "c", "d"].map { |x| object.get_string(x) })
+
+    object2 = TestUtils.parse_config("{ a : xx #{a} yy, b : xx #{b} yy, c : xx #{c} yy, d : xx #{d} yy}")
+    expected2 = [a, b, c, d].map { |x| "xx #{x} yy"}
+    expect(["a", "b", "c", "d"].map { |x| object2.get_string(x) }).to eq(expected2)
+  end
+end
+
+
+
+describe "AbstractConfigObject#merge_origins" do
+  def o(desc, empty)
+    values = {}
+
+    if !empty
+      values["hello"] = TestUtils.int_value(37)
+    end
+
+    SimpleConfigObject.new(SimpleConfigOrigin.new_simple(desc), values)
+  end
+
+  def m(*values)
+    AbstractConfigObject.merge_origins(values).description
+  end
+
+  specify "should merge origins correctly" do
+    # simplest case
+    expect(m(o("a", false), o("b", false))).to eq("merge of a,b")
+
+    # combine duplicate "merge of"
+    expect(m(o("a", false), o("merge of x,y", false))).to eq("merge of a,x,y")
+    expect(m(o("merge of a,b", false), o("merge of x,y", false))).to eq("merge of a,b,x,y")
+    # ignore empty objects
+    expect(m(o("foo", true), o("a", false))).to eq("a")
+    # unless they are all empty, pick the first one
+    expect(m(o("foo", true), o("a", true))).to eq("foo")
+    # merge just one
+    expect(m(o("foo", false))).to eq("foo")
+    # merge three
+    expect(m(o("a", false), o("b", false), o("c", false))).to eq("merge of a,b,c")
+  end
+end
+
+describe "SimpleConfig#has_path?" do
+  specify "should work in various contexts" do
+    empty = TestUtils.parse_config("{}")
+
+    expect(empty.has_path?("foo")).to be_falsey
+
+    object = TestUtils.parse_config("a=null, b.c.d=11, foo=bar")
+
+    # returns true for the non-null values
+    expect(object.has_path?("foo")).to be_truthy
+    expect(object.has_path?("b.c.d")).to be_truthy
+    expect(object.has_path?("b.c")).to be_truthy
+    expect(object.has_path?("b")).to be_truthy
+
+    # has_path is false for null values but contains_key is true
+    expect(object.root["a"]).to eq(TestUtils.null_value)
+    expect(object.root.has_key?("a")).to be_truthy
+    expect(object.has_path?("a")).to be_falsey
+
+    # false for totally absent values
+    expect(object.root.has_key?("notinhere")).to be_falsey
+    expect(object.has_path?("notinhere")).to be_falsey
+
+    # throws proper exceptions
+    expect { empty.has_path?("a.") }.to raise_error(Hocon::ConfigError::ConfigBadPathError)
+    expect { empty.has_path?("..") }.to raise_error(Hocon::ConfigError::ConfigBadPathError)
+  end
+end
+
+describe "ConfigNumber::new_number" do
+  specify "should create new objects correctly" do
+    def n(v)
+      ConfigNumber.new_number(TestUtils.fake_origin, v, nil)
+    end
+
+    expect(n(3.14).unwrapped).to eq(3.14)
+    expect(n(1).unwrapped).to eq(1)
+    expect(n(1).unwrapped).to eq(1.0)
+  end
+end
+
+describe "Boolean conversions" do
+  specify "true, yes, and on all convert to true" do
+    trues = TestUtils.parse_object("{ a=true, b=yes, c=on }").to_config
+    ["a", "b", "c"].map { |x| expect(trues.get_boolean(x)).to be true }
+
+    falses = TestUtils.parse_object("{ a=false, b=no, c=off }").to_config
+    ["a", "b", "c"].map { |x| expect(falses.get_boolean(x)).to be false }
+  end
+end
+
+describe "SimpleConfigOrigin" do
+  let(:has_filename) { SimpleConfigOrigin.new_file("foo") }
+  let(:no_filename) { SimpleConfigOrigin.new_simple("bar") }
+  let(:filename_with_line) { has_filename.with_line_number(3) }
+  let(:no_filename_with_line) { no_filename.with_line_number(4) }
+
+  specify "filename matches what was specified" do
+    expect(has_filename.filename).to eq("foo")
+    expect(filename_with_line.filename).to eq("foo")
+    expect(no_filename.filename).to be nil
+    expect(no_filename_with_line.filename).to be nil
+  end
+
+  specify "description matches correctly" do
+    expect(has_filename.description).to eq("foo")
+    expect(no_filename.description).to eq("bar")
+    expect(filename_with_line.description).to eq("foo: 3")
+    expect(no_filename_with_line.description).to eq("bar: 4")
+  end
+
+  specify "origins with no line number should have line number of -1" do
+    expect(has_filename.line_number).to eq(-1)
+    expect(no_filename.line_number).to eq(-1)
+  end
+
+  specify "line_number returns the right line number" do
+    expect(filename_with_line.line_number).to eq(3)
+    expect(no_filename_with_line.line_number).to eq(4)
+  end
+
+  # Note: skipping tests related to URLs since we aren't implementing that
+end
+
+
+describe "Config#with_only_key and with_only_path" do
+  context "should keep the correct data" do
+    object = TestUtils.parse_object("{ a=1, b=2, c.d.y=3, e.f.g=4, c.d.z=5 }")
+
+    it "should keep only a" do
+      expect(object.with_only_key("a")).to eq(TestUtils.parse_object("{ a=1 }"))
+    end
+
+    it "should keep only e" do
+      expect(object.with_only_key("e")).to eq(TestUtils.parse_object("{ e.f.g=4 }"))
+    end
+
+    it "should keep only c.d" do
+      expect(object.to_config.with_only_path("c.d").root).to eq(TestUtils.parse_object("{ c.d.y=3, c.d.z=5 }"))
+    end
+
+    it "should keep only c.d.z" do
+      expect(object.to_config.with_only_path("c.d.z").root).to eq(TestUtils.parse_object("{ c.d.z=5 }"))
+    end
+
+    it "should keep nonexistent key" do
+      expect(object.with_only_key("nope")).to eq(TestUtils.parse_object("{ }"))
+    end
+
+    it "should keep nonexistent path" do
+      expect(object.to_config.with_only_path("q.w.e.r.t.y").root).to eq(TestUtils.parse_object("{ }"))
+    end
+
+    it "should keep only nonexistent underneath non-object" do
+      expect(object.to_config.with_only_path("a.nonextistent").root).to eq(TestUtils.parse_object("{ }"))
+    end
+
+    it "should keep only nonexistent underneath nested non-object" do
+      expect(object.to_config.with_only_path("c.d.z.nonexistent").root).to eq(TestUtils.parse_object("{ }"))
+    end
+  end
+
+  specify "should handle unresolved correctly" do
+    object = TestUtils.parse_object("{ a = {}, a=${x}, b=${y}, b=${z}, x={asf:1}, y=2, z=3 }")
+
+    expect(object.to_config.resolve.with_only_path("a.asf").root).to eq(TestUtils.parse_object("{ a={asf:1} }"))
+
+    TestUtils.intercept(UnresolvedSubstitutionError) do
+      object.with_only_key("a").to_config.resolve
+    end
+
+    TestUtils.intercept(UnresolvedSubstitutionError) do
+      object.with_only_key("b").to_config.resolve
+    end
+
+    expect(object.resolve_status).to eq(Hocon::Impl::ResolveStatus::UNRESOLVED)
+    expect(object.with_only_key("z").resolve_status).to eq(Hocon::Impl::ResolveStatus::RESOLVED)
+  end
+end
+
+describe "Config#without_key/path" do
+
+  context "should remove keys correctly" do
+    object = TestUtils.parse_object("{ a=1, b=2, c.d.y=3, e.f.g=4, c.d.z=5 }")
+
+    it "should not have a" do
+      expect(object.without_key("a")).to eq(TestUtils.parse_object("{ b=2, c.d.y=3, e.f.g=4, c.d.z=5 }"))
+    end
+
+    it "should not have c" do
+      expect(object.without_key("c")).to eq(TestUtils.parse_object("{ a=1, b=2, e.f.g=4 }"))
+    end
+
+    it "should not have c.d" do
+      expect(object.to_config.without_path("c.d").root).to eq(TestUtils.parse_object("{ a=1, b=2, e.f.g=4, c={} }"))
+    end
+
+    it "should not have c.d.z" do
+      expect(object.to_config.without_path("c.d.z").root).to eq(TestUtils.parse_object("{ a=1, b=2, c.d.y=3, e.f.g=4 }"))
+    end
+
+    it "should not change without nonexistent key" do
+      expect(object.without_key("nonexistent")).to eq(TestUtils.parse_object("{ a=1, b=2, c.d.y=3, e.f.g=4, c.d.z=5 }"))
+    end
+
+    it "should not change without nonexistent path" do
+      expect(object.to_config.without_path("q.w.e.r.t.y").root).to eq(TestUtils.parse_object("{ a=1, b=2, c.d.y=3, e.f.g=4, c.d.z=5 }"))
+    end
+
+    it "should not change without nonexistent path with existing prefix" do
+      expect(object.to_config.without_path("a.foo").root).to eq(TestUtils.parse_object("{ a=1, b=2, c.d.y=3, e.f.g=4, c.d.z=5 }"))
+    end
+  end
+end
+
+describe "Config#without_key/path involving unresolved" do
+
+  specify "should handle unresolved correctly" do
+    object = TestUtils.parse_object("{ a = {}, a=${x}, b=${y}, b=${z}, x={asf:1}, y=2, z=3 }")
+
+    expect(object.to_config.resolve.without_path("a.asf").root).to eq(TestUtils.parse_object("{ a={}, b=3, x={asf:1}, y=2, z=3 }"))
+
+    TestUtils.intercept(UnresolvedSubstitutionError) do
+      object.without_key("x").to_config.resolve
+    end
+
+    TestUtils.intercept(UnresolvedSubstitutionError) do
+      object.without_key("z").to_config.resolve
+    end
+
+    expect(object.resolve_status).to eq(Hocon::Impl::ResolveStatus::UNRESOLVED)
+    expect(object.without_key("a").resolve_status).to eq(Hocon::Impl::ResolveStatus::UNRESOLVED)
+    expect(object.without_key("a").without_key("b").resolve_status).to eq(Hocon::Impl::ResolveStatus::RESOLVED)
+  end
+end
+
+describe "Config#at_path" do
+  specify "works with one element" do
+    v = ConfigValueFactory.from_any_ref(42)
+    config = v.at_path("a")
+
+    expect(config).to eq(TestUtils.parse_config("a=42"))
+    expect(v).to eq(config.get_value("a"))
+    expect(config.origin.description).to include("at_path")
+  end
+
+  specify "works with two elements" do
+    v = ConfigValueFactory.from_any_ref(42)
+    config = v.at_path("a.b")
+
+    expect(config).to eq(TestUtils.parse_config("a.b=42"))
+    expect(v).to eq(config.get_value("a.b"))
+    expect(config.origin.description).to include("at_path")
+  end
+
+  specify "works with four elements" do
+    v = ConfigValueFactory.from_any_ref(42)
+    config = v.at_path("a.b.c.d")
+
+    expect(config).to eq(TestUtils.parse_config("a.b.c.d=42"))
+    expect(v).to eq(config.get_value("a.b.c.d"))
+    expect(config.origin.description).to include("at_path")
+  end
+end
+
+describe "Config#at_key" do
+  specify "at_key works" do
+    v = ConfigValueFactory.from_any_ref(42)
+    config = v.at_key("a")
+
+    expect(config).to eq(TestUtils.parse_config("a=42"))
+    expect(v).to eq(config.get_value("a"))
+    expect(config.origin.description).to include("at_key")
+  end
+
+  specify "works with value depth 1 from empty" do
+    v = ConfigValueFactory.from_any_ref(42)
+    config = ConfigFactory.empty.with_value("a", v)
+
+    expect(config).to eq(TestUtils.parse_config("a=42"))
+    expect(v).to eq(config.get_value("a"))
+  end
+
+  specify "works with value depth 2 from empty" do
+    v = ConfigValueFactory.from_any_ref(42)
+    config = ConfigFactory.empty.with_value("a.b", v)
+
+    expect(config).to eq(TestUtils.parse_config("a.b=42"))
+    expect(v).to eq(config.get_value("a.b"))
+  end
+
+  specify "works with value depth 3 from empty" do
+    v = ConfigValueFactory.from_any_ref(42)
+    config = ConfigFactory.empty.with_value("a.b.c", v)
+
+    expect(config).to eq(TestUtils.parse_config("a.b.c=42"))
+    expect(v).to eq(config.get_value("a.b.c"))
+  end
+
+  specify "with value depth 1 overwrites existing" do
+    v = ConfigValueFactory.from_any_ref(47)
+    old = v.at_path("a")
+    config = old.with_value("a", ConfigValueFactory.from_any_ref(42))
+
+    expect(config).to eq(TestUtils.parse_config("a=42"))
+    expect(config.get_int("a")).to eq(42)
+  end
+
+  specify "with value depth 2 overwrites existing" do
+    v = ConfigValueFactory.from_any_ref(47)
+    old = v.at_path("a.b")
+    config = old.with_value("a.b", ConfigValueFactory.from_any_ref(42))
+
+    expect(config).to eq(TestUtils.parse_config("a.b=42"))
+    expect(config.get_int("a.b")).to eq(42)
+  end
+
+  specify "with value inside existing object" do
+    v = ConfigValueFactory.from_any_ref(47)
+    old = v.at_path("a.c")
+    config = old.with_value("a.b", ConfigValueFactory.from_any_ref(42))
+
+    expect(config).to eq(TestUtils.parse_config("a.b=42,a.c=47"))
+    expect(config.get_int("a.b")).to eq(42)
+    expect(config.get_int("a.c")).to eq(47)
+  end
+
+  specify "with value build complex config" do
+    v1 = ConfigValueFactory.from_any_ref(1)
+    v2 = ConfigValueFactory.from_any_ref(2)
+    v3 = ConfigValueFactory.from_any_ref(3)
+    v4 = ConfigValueFactory.from_any_ref(4)
+
+    config = ConfigFactory.empty.with_value("a", v1)
+      .with_value("b.c", v2)
+      .with_value("b.d", v3)
+      .with_value("x.y.z", v4)
+
+    expect(config).to eq(TestUtils.parse_config("a=1,b.c=2,b.d=3,x.y.z=4"))
+  end
+end
+
+describe "#render" do
+  context "has newlines in description" do
+    v = ConfigValueFactory.from_any_ref(89, "this is a description\nwith some\nnewlines")
+
+    list = SimpleConfigList.new(SimpleConfigOrigin.new_simple("\n5\n6\n7\n"), [v])
+
+    conf = ConfigFactory.empty.with_value("bar", list)
+
+    rendered = conf.root.render
+
+    specify "rendered config should have all the lines that were added, with newlines" do
+      expect(rendered).to include("is a description\n")
+      expect(rendered).to include("with some\n")
+      expect(rendered).to include("newlines\n")
+      expect(rendered).to include("#\n")
+      expect(rendered).to include("5\n")
+      expect(rendered).to include("6\n")
+      expect(rendered).to include("7\n")
+    end
+
+    specify "the rendered config should give back the original config" do
+      parsed = ConfigFactory.parse_string(rendered)
+
+      expect(parsed).to eq(conf)
+    end
+  end
+
+  specify "should sort properly" do
+    config = TestUtils.parse_config('0=a,1=b,2=c,3=d,10=e,20=f,30=g')
+    rendered = config.root.render(ConfigRenderOptions.concise)
+
+    expect(rendered).to eq('{"0":"a","1":"b","2":"c","3":"d","10":"e","20":"f","30":"g"}')
+  end
+
+  context "RenderOptions.key_value_separator" do
+    specify "should use colons when set to :colon" do
+      conf = Hocon::ConfigValueFactory.from_any_ref({foo: {bar: 'baz'}})
+      expected = "foo: {\n    bar: baz\n}\n"
+      render_options = ConfigRenderOptions.defaults
+      render_options.json = false
+      render_options.key_value_separator = :colon
+      render_options.origin_comments = false
+
+      expect(conf.render(render_options)).to eq(expected)
+    end
+
+    specify "should use equals signs when set to :equals" do
+      conf = Hocon::ConfigValueFactory.from_any_ref({foo: {bar: 'baz'}})
+      expected = "foo={\n    bar=baz\n}\n"
+      render_options = ConfigRenderOptions.defaults
+      render_options.json = false
+      render_options.origin_comments = false
+      render_options.key_value_separator = :equals
+
+      expect(conf.render(render_options)).to eq(expected)
+    end
+  end
+end
diff --git a/spec/unit/typesafe/config/path_spec.rb b/spec/unit/typesafe/config/path_spec.rb
new file mode 100644
index 0000000..102e6f0
--- /dev/null
+++ b/spec/unit/typesafe/config/path_spec.rb
@@ -0,0 +1,261 @@
+# encoding: utf-8
+
+require 'spec_helper'
+require 'hocon'
+require 'test_utils'
+
+
+describe Hocon::Impl::Path do
+  Path = Hocon::Impl::Path
+
+  ####################
+  # Path Equality
+  ####################
+  context "Check path equality" do
+    # note: foo.bar is a single key here
+    let(:key_a) { Path.new_key("foo.bar") }
+    let(:same_as_key_a) { Path.new_key("foo.bar") }
+    let(:different_key) { Path.new_key("hello") }
+
+    # Here foo.bar is two elements
+    let(:two_elements) { Path.new_path("foo.bar") }
+    let(:same_as_two_elements) { Path.new_path("foo.bar") }
+
+    context "key_a equals a path of the same name" do
+      let(:first_object) { key_a }
+      let(:second_object) { TestUtils.path("foo.bar") }
+      include_examples "object_equality"
+    end
+
+    context "two_elements equals a path with those two elements" do
+      let(:first_object) { two_elements}
+      let(:second_object) { TestUtils.path("foo", "bar") }
+      include_examples "object_equality"
+    end
+
+    context "key_a equals key_a" do
+      let(:first_object) { key_a }
+      let(:second_object) { key_a }
+      include_examples "object_equality"
+    end
+
+    context "key_a equals same_as_key_a" do
+      let(:first_object) { key_a }
+      let(:second_object) { same_as_key_a }
+      include_examples "object_equality"
+    end
+
+    context "key_a not equal to different_key" do
+      let(:first_object) { key_a }
+      let(:second_object) { different_key }
+      include_examples "object_inequality"
+    end
+
+    context "key_a not equal to the two_elements path" do
+      let(:first_object) { key_a }
+      let(:second_object) { two_elements }
+      include_examples "object_inequality"
+    end
+
+    context "two_elements path equals same_as_two_elements path" do
+      let(:first_object) { two_elements}
+      let(:second_object) { same_as_two_elements }
+      include_examples "object_equality"
+    end
+  end
+
+  ####################
+  # Testing to_s
+  ####################
+  context "testing to_s" do
+    it "should find to_s returning the correct strings" do
+      expect("Path(foo)").to eq(TestUtils.path("foo").to_s)
+      expect("Path(foo.bar)").to eq(TestUtils.path("foo", "bar").to_s)
+      expect('Path(foo."bar*")').to eq(TestUtils.path("foo", "bar*").to_s)
+      expect('Path("foo.bar")').to eq(TestUtils.path("foo.bar").to_s)
+    end
+  end
+
+  ####################
+  # Render
+  ####################
+  context "testing .render" do
+    context "rendering simple one element case" do
+      let(:expected) { "foo" }
+      let(:path) { TestUtils.path("foo") }
+      include_examples "path_render_test"
+    end
+
+    context "rendering simple two element case" do
+      let(:expected) { "foo.bar" }
+      let(:path) { TestUtils.path("foo", "bar") }
+      include_examples "path_render_test"
+    end
+
+    context "rendering non safe char in an element" do
+      let(:expected) { 'foo."bar*"' }
+      let(:path) { TestUtils.path("foo", "bar*") }
+      include_examples "path_render_test"
+    end
+
+    context "rendering period in an element" do
+      let(:expected) { '"foo.bar"' }
+      let(:path) { TestUtils.path("foo.bar") }
+      include_examples "path_render_test"
+    end
+
+    context "rendering hyphen in element" do
+      let(:expected) { "foo-bar" }
+      let(:path) { TestUtils.path("foo-bar") }
+      include_examples "path_render_test"
+    end
+
+    context "rendering hyphen in element" do
+      let(:expected) { "foo_bar" }
+      let(:path) { TestUtils.path("foo_bar") }
+      include_examples "path_render_test"
+    end
+
+    context "rendering element starting with a hyphen" do
+      let(:expected) { "-foo" }
+      let(:path) { TestUtils.path("-foo") }
+      include_examples "path_render_test"
+    end
+
+    context "rendering element starting with a number" do
+      let(:expected) { "10foo" }
+      let(:path) { TestUtils.path("10foo") }
+      include_examples "path_render_test"
+    end
+
+    context "rendering empty elements" do
+      let(:expected) { '"".""' }
+      let(:path) { TestUtils.path("", "") }
+      include_examples "path_render_test"
+    end
+
+    context "rendering element with internal space" do
+      let(:expected) { '"foo bar"' }
+      let(:path) { TestUtils.path("foo bar") }
+      include_examples "path_render_test"
+    end
+
+    context "rendering leading and trailing spaces" do
+      let(:expected) { '" foo "' }
+      let(:path) { TestUtils.path(" foo ") }
+      include_examples "path_render_test"
+    end
+
+    context "rendering trailing space only" do
+      let(:expected) { '"foo "' }
+      let(:path) { TestUtils.path("foo ") }
+      include_examples "path_render_test"
+    end
+
+    context "rendering number with decimal point" do
+      let(:expected) { "1.2" }
+      let(:path) { TestUtils.path("1", "2") }
+      include_examples "path_render_test"
+    end
+
+    context "rendering number with multiple decimal points" do
+      let(:expected) { "1.2.3.4" }
+      let(:path) { TestUtils.path("1", "2", "3", "4") }
+      include_examples "path_render_test"
+    end
+  end
+
+  context "test that paths made from a list of Path objects equal paths made from a list of strings" do
+    it "should find a path made from a list of one path equal to a path from one string" do
+      path_from_path_list = Path.from_path_list([TestUtils.path("foo")])
+      expected_path = TestUtils.path("foo")
+
+      expect(path_from_path_list).to eq(expected_path)
+    end
+
+    it "should find a path made from a list of multiple paths equal to that list of strings" do
+      path_from_path_list = Path.from_path_list([TestUtils.path("foo", "bar"),
+                                                 TestUtils.path("baz", "boo")])
+      expected_path = TestUtils.path("foo", "bar", "baz", "boo")
+
+      expect(path_from_path_list).to eq(expected_path)
+    end
+  end
+
+  context "prepending paths" do
+    it "should find prepending a single path works" do
+      prepended_path = TestUtils.path("bar").prepend(TestUtils.path("foo"))
+      expected_path = TestUtils.path("foo", "bar")
+
+      expect(prepended_path).to eq(expected_path)
+    end
+
+    it "should find prepending multiple paths works" do
+      prepended_path = TestUtils.path("c", "d").prepend(TestUtils.path("a", "b"))
+      expected_path = TestUtils.path("a", "b", "c", "d")
+
+      expect(prepended_path).to eq(expected_path)
+    end
+  end
+
+  context "path length" do
+    it "should find length of single part path to be 1" do
+      path = TestUtils.path("food")
+      expect(path.length).to eq(1)
+    end
+
+    it "should find length of two part path to be 2" do
+      path = TestUtils.path("foo", "bar")
+      expect(path.length).to eq(2)
+
+    end
+  end
+
+  context "parent paths" do
+    it "should find parent of single level path to be nil" do
+      path = TestUtils.path("a")
+
+      expect(path.parent).to be_nil
+    end
+
+    it "should find parent of a.b to be a" do
+      path = TestUtils.path("a", "b")
+      parent = TestUtils.path("a")
+
+      expect(path.parent).to eq(parent)
+    end
+
+    it "should find parent of a.b.c to be a.b" do
+      path = TestUtils.path("a", "b", "c")
+      parent = TestUtils.path("a", "b")
+
+      expect(path.parent).to eq(parent)
+    end
+  end
+
+  context "path last method" do
+    it "should find last of single level path to be itself" do
+      path = TestUtils.path("a")
+
+      expect(path.last).to eq("a")
+    end
+
+    it "should find last of a.b to be b" do
+      path = TestUtils.path("a", "b")
+
+      expect(path.last).to eq("b")
+    end
+  end
+
+  context "invalid paths" do
+    it "should catch exception from empty path" do
+      bad_path = ""
+      expect { Path.new_path(bad_path) }.to raise_error(Hocon::ConfigError::ConfigBadPathError)
+    end
+
+    it "should catch exception from path '..'" do
+      bad_path = ".."
+      expect { Path.new_path(bad_path) }.to raise_error(Hocon::ConfigError::ConfigBadPathError)
+    end
+  end
+end
diff --git a/spec/unit/typesafe/config/public_api_spec.rb b/spec/unit/typesafe/config/public_api_spec.rb
new file mode 100644
index 0000000..435d814
--- /dev/null
+++ b/spec/unit/typesafe/config/public_api_spec.rb
@@ -0,0 +1,520 @@
+# encoding: utf-8
+
+require 'spec_helper'
+require 'test_utils'
+require 'hocon'
+require 'hocon/config_factory'
+require 'hocon/config_value_factory'
+require 'hocon/impl/config_delayed_merge_object'
+require 'hocon/impl/replaceable_merge_stack'
+require 'hocon/config_util'
+
+# Note: Skipping many tests that rely on java's System.getProperties functionality,
+#   which lets you access things like "os.name", "java.vendor", and "user.home"
+# Also skipping
+
+ConfigFactory = Hocon::ConfigFactory
+ConfigValueFactory = Hocon::ConfigValueFactory
+SimpleConfigObject = Hocon::Impl::SimpleConfigObject
+SimpleConfigList = Hocon::Impl::SimpleConfigList
+ConfigUtil = Hocon::ConfigUtil
+
+shared_examples_for "test_from_value" do
+  default_value_description = "hardcoded value"
+
+  specify "create_from made into a config value should equal the expected value" do
+    expect(Hocon::ConfigValueFactory.from_any_ref(create_from)).to eq(expected_value)
+  end
+
+  specify "create_from made into a config value with origin description should equal the expected value" do
+    expect(Hocon::ConfigValueFactory.from_any_ref(create_from, "foo")).to eq(expected_value)
+  end
+
+  specify "descriptions match" do
+    if create_from.is_a?(Hocon::ConfigValue)
+      # description is ignored for createFrom that is already a ConfigValue
+      expect(Hocon::ConfigValueFactory.from_any_ref(create_from).origin.description).to eq(create_from.origin.description)
+    else
+      expect(Hocon::ConfigValueFactory.from_any_ref(create_from).origin.description).to eq(default_value_description)
+      expect(Hocon::ConfigValueFactory.from_any_ref(create_from, "foo").origin.description).to eq("foo")
+    end
+  end
+end
+
+describe "basic load and get" do
+  conf = ConfigFactory.load_file(TestUtils.resource_file("test01"))
+
+  specify "should be able to see some values in the config object" do
+    expect(conf.get_int("ints.fortyTwo")).to eq(42)
+    child = conf.get_config("ints")
+
+    expect(child.get_int("fortyTwo")).to eq(42)
+  end
+end
+
+describe "loading JSON only" do
+  options = Hocon::ConfigParseOptions.defaults.set_syntax(Hocon::ConfigSyntax::JSON)
+  conf = ConfigFactory.load_file_with_parse_options(TestUtils.resource_file("test01"), options)
+
+  specify "should be missing value specific to CONF files" do
+    TestUtils.intercept(Hocon::ConfigError::ConfigMissingError) do
+      conf.get_int("ints.fortyTwo")
+    end
+  end
+
+  specify "should find value specific to the JSON file" do
+    expect(conf.get_int("fromJson1")).to eq(1)
+  end
+end
+
+describe "loading CONF only" do
+  options = Hocon::ConfigParseOptions.defaults.set_syntax(Hocon::ConfigSyntax::CONF)
+  conf = ConfigFactory.load_file_with_parse_options(TestUtils.resource_file("test01"), options)
+
+  specify "should be missing value specific to JSON files" do
+    TestUtils.intercept(Hocon::ConfigError::ConfigMissingError) do
+      conf.get_int("fromJson1")
+    end
+
+    TestUtils.intercept(Hocon::ConfigError::ConfigMissingError) do
+      conf.get_int("fromProps.one")
+    end
+  end
+
+  specify "should find value specific to the CONF file" do
+    expect(conf.get_int("ints.fortyTwo")).to eq(42)
+  end
+end
+
+describe "ConfigFactory#load_file_with_resolve_options" do
+  options = Hocon::ConfigResolveOptions.defaults
+  conf = ConfigFactory.load_file_with_resolve_options(TestUtils.resource_file("test01"), options)
+
+  specify "sanity check to make sure load_file_with_resolve_options act strange" do
+    expect(conf.get_int("ints.fortyTwo")).to eq(42)
+  end
+end
+
+describe "empty configs" do
+  empty = ConfigFactory.empty
+  empty_foo = ConfigFactory.empty("foo")
+
+  specify "empty config is empty" do
+    expect(empty.empty?).to be true
+  end
+
+  specify "empty config's origin should be 'empty config'" do
+    expect(empty.origin.description).to eq("empty config")
+  end
+
+  specify "empty config with origin description is empty" do
+    expect(empty_foo.empty?).to be true
+  end
+
+  specify "empty config with origin description 'foo' is having it's description set" do
+    expect(empty_foo.origin.description).to eq("foo")
+  end
+end
+
+describe "Creating objects with ConfigValueFactory" do
+  context "from true" do
+    let(:expected_value) { TestUtils.bool_value(true) }
+    let(:create_from) { true }
+
+    include_examples "test_from_value"
+  end
+
+  context "from false" do
+    let(:expected_value) { TestUtils.bool_value(false) }
+    let(:create_from) { false }
+
+    include_examples "test_from_value"
+  end
+
+  context "from nil" do
+    let(:expected_value) { TestUtils.null_value }
+    let(:create_from) { nil }
+
+    include_examples "test_from_value"
+  end
+
+  context "from int" do
+    let(:expected_value) { TestUtils.int_value(5) }
+    let(:create_from) { 5 }
+
+    include_examples "test_from_value"
+  end
+
+  context "from float" do
+    let(:expected_value) { TestUtils.double_value(3.14) }
+    let(:create_from) { 3.14 }
+
+    include_examples "test_from_value"
+  end
+
+  context "from string" do
+    let(:expected_value) { TestUtils.string_value("hello world") }
+    let(:create_from) { "hello world" }
+
+    include_examples "test_from_value"
+  end
+
+  context "from empty hash" do
+    let(:expected_value) { SimpleConfigObject.new(TestUtils.fake_origin, {}) }
+    let(:create_from) { {} }
+
+    include_examples "test_from_value"
+  end
+
+  context "from populated hash" do
+    value_hash = TestUtils.config_map({"a" => 1, "b" => 2, "c" => 3})
+
+    let(:expected_value) { SimpleConfigObject.new(TestUtils.fake_origin, value_hash) }
+    let(:create_from) { {"a" => 1, "b" => 2, "c" => 3} }
+
+    include_examples "test_from_value"
+
+    specify "from_map should also work" do
+      # from_map is just a wrapper around from_any_ref
+      expect(ConfigValueFactory.from_map({"a" => 1, "b" => 2, "c" => 3}).origin.description).to eq("hardcoded value")
+      expect(ConfigValueFactory.from_map({"a" => 1, "b" => 2, "c" => 3}, "foo").origin.description).to eq("foo")
+    end
+  end
+
+  context "from empty array" do
+    let(:expected_value) { SimpleConfigList.new(TestUtils.fake_origin, []) }
+    let(:create_from) { [] }
+
+    include_examples "test_from_value"
+  end
+
+  context "from populated array" do
+    value_array = [1, 2, 3].map { |v| TestUtils.int_value(v) }
+
+    let(:expected_value) { SimpleConfigList.new(TestUtils.fake_origin, value_array) }
+    let(:create_from) { [1, 2, 3] }
+
+    include_examples "test_from_value"
+  end
+
+  # Omitting tests that involve trees and iterators
+  # Omitting tests using units (memory size, duration, etc)
+
+  context "from existing Config values" do
+    context "from int" do
+      let(:expected_value) { TestUtils.int_value(1000) }
+      let(:create_from) { TestUtils.int_value(1000) }
+
+      include_examples "test_from_value"
+    end
+
+    context "from string" do
+      let(:expected_value) { TestUtils.string_value("foo") }
+      let(:create_from) { TestUtils.string_value("foo") }
+
+      include_examples "test_from_value"
+    end
+
+    context "from hash" do
+      int_map = {"a" => 1, "b" => 2, "c" => 3}
+      let(:expected_value) { SimpleConfigObject.new(TestUtils.fake_origin, TestUtils.config_map(int_map)) }
+      let(:create_from) { SimpleConfigObject.new(TestUtils.fake_origin, TestUtils.config_map(int_map)) }
+
+      include_examples "test_from_value"
+    end
+  end
+
+  context "from existing list of Config values" do
+    int_list = [1, 2, 3].map { |v| TestUtils.int_value(v) }
+
+    let(:expected_value) { SimpleConfigList.new(TestUtils.fake_origin, int_list) }
+    let(:create_from) { int_list }
+
+    include_examples "test_from_value"
+  end
+end
+
+describe "round tripping unwrap" do
+  conf = ConfigFactory.load_file(TestUtils.resource_file("test01"))
+
+  unwrapped = conf.root.unwrapped
+
+  rewrapped = ConfigValueFactory.from_map(unwrapped, conf.origin.description)
+  reunwrapped = rewrapped.unwrapped
+
+  specify "conf has a lot of stuff in it" do
+    expect(conf.root.size).to be > 4
+  end
+
+  specify "rewrapped conf equals conf" do
+    expect(rewrapped).to eq(conf.root)
+  end
+
+  specify "reunwrapped conf equals unwrapped conf" do
+    expect(unwrapped).to eq(reunwrapped)
+  end
+end
+
+# Omitting Tests (and functionality) for ConfigFactory.parse_map until I know if it's
+# a priority
+
+describe "default parse options" do
+  def check_not_found(e)
+    ["No such", "not found", "were found"].any? { |string| e.message.include?(string)}
+  end
+
+  let(:defaults) { Hocon::ConfigParseOptions::defaults }
+
+  specify "allow missing == true" do
+    expect(defaults.allow_missing?).to be true
+  end
+
+  specify "includer == nil" do
+    expect(defaults.includer).to be_nil
+  end
+
+  specify "origin description == nil" do
+    expect(defaults.origin_description).to be_nil
+  end
+
+  specify "syntax == nil" do
+    expect(defaults.syntax).to be_nil
+  end
+
+  context "allow missing with ConfigFactory#parse_file" do
+    specify "nonexistant conf throws error when allow_missing? == false" do
+      allow_missing_false = Hocon::ConfigParseOptions::defaults.set_allow_missing(false)
+
+      e = TestUtils.intercept(Hocon::ConfigError::ConfigIOError) do
+        ConfigFactory.parse_file(TestUtils.resource_file("nonexistant.conf"), allow_missing_false)
+      end
+
+      expect(check_not_found(e)).to be true
+    end
+
+    specify "nonexistant conf returns empty conf when allow_missing? == false" do
+      allow_missing_true = Hocon::ConfigParseOptions::defaults.set_allow_missing(true)
+
+      conf = ConfigFactory.parse_file(TestUtils.resource_file("nonexistant.conf"), allow_missing_true)
+
+      expect(conf.empty?).to be true
+    end
+  end
+
+  context "allow missing with ConfigFactory#parse_file_any_syntax" do
+    specify "nonexistant conf throws error when allow_missing? == false" do
+      allow_missing_false = Hocon::ConfigParseOptions::defaults.set_allow_missing(false)
+
+      e = TestUtils.intercept(Hocon::ConfigError::ConfigIOError) do
+        ConfigFactory.parse_file_any_syntax(TestUtils.resource_file("nonexistant"), allow_missing_false)
+      end
+
+      expect(check_not_found(e)).to be true
+    end
+
+    specify "nonexistant conf returns empty conf when allow_missing? == false" do
+      allow_missing_true = Hocon::ConfigParseOptions::defaults.set_allow_missing(true)
+
+      conf = ConfigFactory.parse_file_any_syntax(TestUtils.resource_file("nonexistant"), allow_missing_true)
+
+      expect(conf.empty?).to be true
+    end
+  end
+
+  # Omitting ConfigFactory.prase_resources_any_syntax since we're not supporting it
+  context "allow missing shouldn't mess up includes" do
+    # test03.conf contains some nonexistent includes. check that
+    # setAllowMissing on the file (which is not missing) doesn't
+    # change that the includes are allowed to be missing.
+    # This can break because some options might "propagate" through
+    # to includes, but we don't want them all to do so.
+
+    allow_missing_true = Hocon::ConfigParseOptions::defaults.set_allow_missing(true)
+    allow_missing_false = Hocon::ConfigParseOptions::defaults.set_allow_missing(false)
+
+    conf = ConfigFactory.parse_file(TestUtils.resource_file("test03.conf"), allow_missing_false)
+    conf2 = ConfigFactory.parse_file(TestUtils.resource_file("test03.conf"), allow_missing_true)
+
+    specify "conf should have stuff from test01.conf" do
+      expect(conf.get_int("test01.booleans")).to eq(42)
+    end
+
+    specify "both confs should be equal regardless of allow_missing being true or false" do
+      expect(conf).to eq(conf2)
+    end
+  end
+end
+
+# Omitting test that creates a subclass of ConfigIncluder to record everything that's
+# included by a .conf file. It's complex and we've decided the functionality is well
+# tested elsewhere and right now it isn't worth the effort.
+
+describe "string parsing" do
+  specify "should parse correctly" do
+    conf = ConfigFactory.parse_string("{ a : b }", Hocon::ConfigParseOptions.defaults)
+
+    expect(conf.get_string("a")).to eq("b")
+  end
+end
+
+
+# Omitting tests for parse_file_any_syntax in the interests of time since this has already
+# been tested above
+
+# Omitting classpath tests
+
+describe "config_utils" do
+  # This is to test the public wrappers around ConfigImplUtils
+
+  specify "can join and split paths" do
+    expect(ConfigUtil.join_path("", "a", "b", "$")).to eq("\"\".a.b.\"$\"")
+    expect(ConfigUtil.join_path_from_list(["", "a", "b", "$"])).to eq("\"\".a.b.\"$\"")
+    expect(ConfigUtil.split_path("\"\".a.b.\"$\"")).to eq(["", "a", "b", "$"])
+  end
+
+  specify "should throw errors on invalid paths" do
+    TestUtils.intercept(Hocon::ConfigError) do
+      ConfigUtil.split_path("$")
+    end
+
+    TestUtils.intercept(Hocon::ConfigError) do
+      # no args
+      ConfigUtil.join_path
+    end
+
+    TestUtils.intercept(Hocon::ConfigError) do
+      # empty list
+      ConfigUtil.join_path_from_list([])
+    end
+  end
+
+  specify "should quote strings correctly" do
+    expect(ConfigUtil.quote_string("")).to eq("\"\"")
+    expect(ConfigUtil.quote_string("a")).to eq("\"a\"")
+    expect(ConfigUtil.quote_string("\n")).to eq("\"\\n\"")
+  end
+end
+
+# Omitting tests that use class loaders
+
+describe "detecting cycles" do
+  specify "should detect a cycle" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigParseError) do
+      ConfigFactory.load_file(TestUtils.resource_file("cycle.conf"))
+    end
+
+    # Message mentioning cycle
+    expect(e.message).to include("include statements nested")
+  end
+end
+
+describe "including from list" do
+  # We would ideally make this case NOT throw an exception but we need to do some work
+  # to get there, see https://github.com/typesafehub/config/issues/160
+  specify "should throw error when trying to include from list" do
+    e = TestUtils.intercept(Hocon::ConfigError::ConfigParseError) do
+      ConfigFactory.load_file(TestUtils.resource_file("include-from-list.conf"))
+    end
+
+    # Message mentioning current implementation limitations
+    expect(e.message).to include("limitation")
+  end
+end
+
+# Omitting tests using System.getProperty since it's java specific
+
+# Omitting serialization tests since we aren't supporting it
+
+describe "using some values without resolving" do
+  conf = ConfigFactory.parse_string("a=42,b=${NOPE}")
+
+  specify "should be able to use some values without resolving" do
+    expect(conf.get_int("a")).to eq(42)
+  end
+
+  specify "unresolved value should throw error" do
+    TestUtils.intercept(Hocon::ConfigError::ConfigNotResolvedError) do
+      conf.get_int("b")
+    end
+  end
+end
+
+describe "include file statements" do
+  conf = ConfigFactory.parse_file(TestUtils.resource_file("file-include.conf"))
+
+  specify "should find values from each included file" do
+    expect(conf.get_int("base")).to eq(41)
+    expect(conf.get_int("foo")).to eq(42)
+    expect(conf.get_int("bar")).to eq(43)
+    # these two do not work right now, because we do not
+    # treat the filename as relative to the including file
+    # if file() is specified, so `include file("bar-file.conf")`
+    # fails.
+    #assertEquals("got bar-file.conf", 44, conf.getInt("bar-file"))
+    #assertEquals("got subdir/baz.conf", 45, conf.getInt("baz"))
+  end
+
+  specify "should not find certain paths" do
+    expect(conf.has_path?("bar-file")).to be false
+    expect(conf.has_path?("baz")).to be false
+  end
+end
+
+describe "Config#has_path_or_null" do
+  conf = ConfigFactory.parse_string("x.a=null,x.b=42")
+
+  specify "has_path_or_null returns correctly" do
+    # hasPath says false for null
+    expect(conf.has_path?("x.a")).to be false
+    # hasPathOrNull says true for null
+    expect(conf.has_path_or_null?("x.a")).to be true
+
+    # hasPath says true for non-null
+    expect(conf.has_path?("x.b")).to be true
+    # hasPathOrNull says true for non-null
+    expect(conf.has_path_or_null?("x.b")).to be true
+
+    # hasPath says false for missing
+    expect(conf.has_path?("x.c")).to be false
+    # hasPathOrNull says false for missing
+    expect(conf.has_path_or_null?("x.c")).to be false
+
+    # hasPath says false for missing under null
+    expect(conf.has_path?("x.a.y")).to be false
+    # hasPathOrNull says false for missing under null
+    expect(conf.has_path_or_null?("x.a.y")).to be false
+
+    # hasPath says false for missing under missing
+    expect(conf.has_path?("x.c.y")).to be false
+    # hasPathOrNull says false for missing under missing
+    expect(conf.has_path_or_null?("x.c.y")).to be false
+
+  end
+end
+
+describe "Config#get_is_null" do
+  conf = ConfigFactory.parse_string("x.a=null,x.b=42")
+
+  specify "should return whether or not values are null correctly" do
+    expect(conf.is_null?("x.a")).to be true
+    expect(conf.is_null?("x.b")).to be false
+  end
+
+  specify "should throw error for missing values" do
+    TestUtils.intercept(Hocon::ConfigError::ConfigMissingError) do
+      conf.is_null?("x.c")
+    end
+  end
+
+  specify "should throw error for missing underneal null" do
+    TestUtils.intercept(Hocon::ConfigError::ConfigMissingError) do
+      conf.is_null?("x.a.y")
+    end
+  end
+
+  specify "should throw error for missing underneath missing" do
+    TestUtils.intercept(Hocon::ConfigError::ConfigMissingError) do
+      conf.is_null?("x.c.y")
+    end
+  end
+end
diff --git a/spec/unit/typesafe/config/simple_config_spec.rb b/spec/unit/typesafe/config/simple_config_spec.rb
new file mode 100644
index 0000000..32709c3
--- /dev/null
+++ b/spec/unit/typesafe/config/simple_config_spec.rb
@@ -0,0 +1,112 @@
+# encoding: utf-8
+
+require 'spec_helper'
+require 'hocon/config_factory'
+require 'hocon/config_render_options'
+require 'hocon/config_value_factory'
+
+describe Hocon::Impl::SimpleConfig do
+  let(:render_options) { Hocon::ConfigRenderOptions.defaults }
+
+  before do
+    render_options.origin_comments = false
+    render_options.json = false
+  end
+
+  shared_examples_for "config_value_retrieval_single_value" do
+    let(:input_file)  { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/input.conf" }
+    it "should allow you to get a value for a specific configuration setting" do
+      expect(conf.get_value(setting).transform_to_string).to eq(expected_setting)
+    end
+  end
+
+  shared_examples_for "config_value_retrieval_config_list" do
+    let(:input_file)  { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/input.conf" }
+    it "should allow you to get a value for a setting whose value is a data structure" do
+      expect(conf.get_value(setting).
+                 render_value_to_sb(StringIO.new, 2, nil,
+                                    Hocon::ConfigRenderOptions.new(false, false, false, false)).
+                 string).to eq(expected_setting)
+    end
+  end
+
+  shared_examples_for "has_path_check" do
+    let(:input_file) { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/input.conf" }
+    it "should return true if a path exists" do
+      expect(conf.has_path?(setting)).to eql(true)
+    end
+
+    it "should return false if a path does not exist" do
+      expect(conf.has_path?(false_setting)).to eq(false)
+    end
+  end
+
+  shared_examples_for "add_value_to_config" do
+    let(:input_file) { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/input.conf" }
+    it "should add desired setting with desired value" do
+      modified_conf = conf.with_value(setting_to_add, value_to_add)
+      expect(modified_conf.get_value(setting_to_add)).to eq(value_to_add)
+    end
+  end
+
+  shared_examples_for "add_data_structures_to_config" do
+    let(:input_file) { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/input.conf" }
+    it "should add a nested map to a config" do
+      map = Hocon::ConfigValueFactory.from_any_ref({"a" => "b", "c" => {"d" => "e"}}, nil)
+      modified_conf = conf.with_value(setting_to_add, map)
+      expect(modified_conf.get_value(setting_to_add)).to eq(map)
+    end
+
+    it "should add an array to a config" do
+      array = Hocon::ConfigValueFactory.from_any_ref([1,2,3,4,5], nil)
+      modified_conf = conf.with_value(setting_to_add, array)
+      expect(modified_conf.get_value(setting_to_add)).to eq(array)
+    end
+  end
+
+  shared_examples_for "remove_value_from_config" do
+    let(:input_file) { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/input.conf" }
+    it "should remove desired setting" do
+      modified_conf = conf.without_path(setting_to_remove)
+      expect(modified_conf.has_path?(setting_to_remove)).to be false
+    end
+  end
+
+  context "example1" do
+    let(:example) { EXAMPLE1 }
+    let(:setting) { "foo.bar.yahoo" }
+    let(:expected_setting) { "yippee" }
+    let(:false_setting) { "non-existent" }
+    let(:setting_to_add) { "foo.bar.test" }
+    let(:value_to_add) { Hocon::Impl::ConfigString.new(nil, "This is a test string") }
+    let(:setting_to_remove) { "foo.bar" }
+
+    context "parsing a .conf file" do
+      let(:conf) { Hocon::ConfigFactory.parse_file(input_file) }
+      include_examples "config_value_retrieval_single_value"
+      include_examples "has_path_check"
+      include_examples "add_value_to_config"
+      include_examples "add_data_structures_to_config"
+      include_examples "remove_value_from_config"
+    end
+  end
+
+  context "example2" do
+    let(:example) { EXAMPLE2 }
+    let(:setting) { "jruby-puppet.jruby-pools" }
+    let(:expected_setting) { "[{environment=production}]" }
+    let(:false_setting) { "jruby-puppet-false" }
+    let(:setting_to_add) { "top" }
+    let(:value_to_add) { Hocon::Impl::ConfigInt.new(nil, 12345, "12345") }
+    let(:setting_to_remove) { "jruby-puppet.master-conf-dir" }
+
+    context "parsing a .conf file" do
+      let(:conf) { Hocon::ConfigFactory.parse_file(input_file) }
+      include_examples "config_value_retrieval_config_list"
+      include_examples "has_path_check"
+      include_examples "add_value_to_config"
+      include_examples "add_data_structures_to_config"
+      include_examples "remove_value_from_config"
+    end
+  end
+end
diff --git a/spec/unit/typesafe/config/token_spec.rb b/spec/unit/typesafe/config/token_spec.rb
new file mode 100644
index 0000000..0d17350
--- /dev/null
+++ b/spec/unit/typesafe/config/token_spec.rb
@@ -0,0 +1,188 @@
+# encoding: utf-8
+
+require 'spec_helper'
+require 'hocon'
+require 'test_utils'
+require 'pp'
+
+
+describe Hocon::Impl::Token do
+  Tokens = Hocon::Impl::Tokens
+
+  ####################
+  # Equality
+  ####################
+  context "check token equality" do
+    context "syntax tokens" do
+      let(:first_object) { Tokens::START }
+      let(:second_object) { Tokens::START }
+
+      include_examples "object_equality"
+    end
+
+    context "integer tokens" do
+      let(:first_object) { TestUtils.token_int(42) }
+      let(:second_object) { TestUtils.token_int(42) }
+
+      include_examples "object_equality"
+    end
+
+    context "truth tokens" do
+      let(:first_object) { TestUtils.token_true }
+      let(:second_object) { TestUtils.token_true }
+
+      include_examples "object_equality"
+    end
+
+    context "int and double of the same value" do
+      let(:first_object) { TestUtils.token_int(10) }
+      let(:second_object) { TestUtils.token_double(10.0) }
+
+      include_examples "object_equality"
+    end
+
+    context "double tokens" do
+      let(:first_object) { TestUtils.token_int(3.14) }
+      let(:second_object) { TestUtils.token_int(3.14) }
+
+      include_examples "object_equality"
+    end
+
+    context "quoted string tokens" do
+      let(:first_object) { TestUtils.token_string("foo") }
+      let(:second_object) { TestUtils.token_string("foo") }
+
+      include_examples "object_equality"
+    end
+
+    context "unquoted string tokens" do
+      let(:first_object) { TestUtils.token_unquoted("foo") }
+      let(:second_object) { TestUtils.token_unquoted("foo") }
+
+      include_examples "object_equality"
+    end
+
+    context "key substitution tokens" do
+      let(:first_object) { TestUtils.token_key_substitution("foo") }
+      let(:second_object) { TestUtils.token_key_substitution("foo") }
+
+      include_examples "object_equality"
+    end
+
+    context "null tokens" do
+      let(:first_object) { TestUtils.token_null }
+      let(:second_object) { TestUtils.token_null }
+
+      include_examples "object_equality"
+    end
+
+    context "newline tokens" do
+      let(:first_object) { TestUtils.token_line(10) }
+      let(:second_object) { TestUtils.token_line(10) }
+
+      include_examples "object_equality"
+    end
+  end
+
+
+  ####################
+  # Inequality
+  ####################
+  context "check token inequality" do
+    context "syntax tokens" do
+      let(:first_object) { Tokens::START }
+      let(:second_object) { Tokens::OPEN_CURLY }
+
+      include_examples "object_inequality"
+    end
+
+    context "integer tokens" do
+      let(:first_object) { TestUtils.token_int(42) }
+      let(:second_object) { TestUtils.token_int(43) }
+
+      include_examples "object_inequality"
+    end
+
+    context "double tokens" do
+      let(:first_object) { TestUtils.token_int(3.14) }
+      let(:second_object) { TestUtils.token_int(4.14) }
+
+      include_examples "object_inequality"
+    end
+
+    context "truth tokens" do
+      let(:first_object) { TestUtils.token_true }
+      let(:second_object) { TestUtils.token_false }
+
+      include_examples "object_inequality"
+    end
+
+    context "quoted string tokens" do
+      let(:first_object) { TestUtils.token_string("foo") }
+      let(:second_object) { TestUtils.token_string("bar") }
+
+      include_examples "object_inequality"
+    end
+
+    context "unquoted string tokens" do
+      let(:first_object) { TestUtils.token_unquoted("foo") }
+      let(:second_object) { TestUtils.token_unquoted("bar") }
+
+      include_examples "object_inequality"
+    end
+
+    context "key substitution tokens" do
+      let(:first_object) { TestUtils.token_key_substitution("foo") }
+      let(:second_object) { TestUtils.token_key_substitution("bar") }
+
+      include_examples "object_inequality"
+    end
+
+    context "newline tokens" do
+      let(:first_object) { TestUtils.token_line(10) }
+      let(:second_object) { TestUtils.token_line(11) }
+
+      include_examples "object_inequality"
+    end
+
+    context "true and int tokens" do
+      let(:first_object) { TestUtils.token_true }
+      let(:second_object) { TestUtils.token_int(1) }
+
+      include_examples "object_inequality"
+    end
+
+    context "string 'true' and true tokens" do
+      let(:first_object) { TestUtils.token_true }
+      let(:second_object) { TestUtils.token_string("true") }
+
+      include_examples "object_inequality"
+    end
+
+    context "int and double of slightly different values" do
+      let(:first_object) { TestUtils.token_int(10) }
+      let(:second_object) { TestUtils.token_double(10.000001) }
+
+      include_examples "object_inequality"
+    end
+  end
+
+  context "Check that to_s doesn't throw exception" do
+    it "shouldn't throw an exception" do
+      # just be sure to_s doesn't throw an exception. It's for debugging
+      # so its exact output doesn't matter a lot
+      TestUtils.token_true.to_s
+      TestUtils.token_false.to_s
+      TestUtils.token_int(42).to_s
+      TestUtils.token_double(3.14).to_s
+      TestUtils.token_null.to_s
+      TestUtils.token_unquoted("foo").to_s
+      TestUtils.token_string("bar").to_s
+      TestUtils.token_key_substitution("a").to_s
+      TestUtils.token_line(10).to_s
+      Tokens::START.to_s
+      Tokens::EOF.to_s
+      Tokens::COLON.to_s
+    end
+  end
+end
diff --git a/spec/unit/typesafe/config/tokenizer_spec.rb b/spec/unit/typesafe/config/tokenizer_spec.rb
new file mode 100644
index 0000000..ffa7fa9
--- /dev/null
+++ b/spec/unit/typesafe/config/tokenizer_spec.rb
@@ -0,0 +1,801 @@
+# encoding: utf-8
+
+require 'spec_helper'
+require 'hocon'
+require 'test_utils'
+require 'pp'
+
+
+describe Hocon::Impl::Tokenizer do
+  Tokens = Hocon::Impl::Tokens
+
+  shared_examples_for "token_matching" do
+    it "should match the tokenized string to the list of expected tokens" do
+      tokenized_from_string = TestUtils.tokenize_as_list(test_string)
+      tokenized_as_string = TestUtils.tokenize_as_string(test_string)
+
+      # Add START and EOF tokens
+      wrapped_tokens = TestUtils.wrap_tokens(expected_tokens)
+
+      # Compare the two lists of tokens
+      expect(tokenized_from_string).to eq(wrapped_tokens)
+      expect(tokenized_as_string).to eq(test_string)
+    end
+  end
+
+  shared_examples_for "strings_with_problems" do
+    it "should find a problem when tokenizing" do
+      token_list = TestUtils.tokenize_as_list(test_string)
+      expect(token_list.map { |token| Tokens.problem?(token) }).to include(true)
+    end
+  end
+
+  ####################
+  # Whitespace
+  ####################
+  context "tokenizing whitespace" do
+    context "tokenize empty string" do
+      let(:test_string) { "" }
+      let(:expected_tokens) { [] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize newlines" do
+      let(:test_string) { "\n\n" }
+      let(:expected_tokens) { [TestUtils.token_line(1),
+                               TestUtils.token_line(2)] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize unquoted text should keep spaces" do
+      let(:test_string) { "    foo     \n" }
+      let(:expected_tokens) { [TestUtils.token_whitespace("    "),
+                               TestUtils.token_unquoted("foo"),
+                               TestUtils.token_whitespace("     "),
+                               TestUtils.token_line(1)] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize unquoted text with internal spaces should keep spaces" do
+      let(:test_string) { "    foo bar baz   \n" }
+      let(:expected_tokens) { [TestUtils.token_whitespace("    "),
+                               TestUtils.token_unquoted("foo"),
+                               TestUtils.token_unquoted(" "),
+                               TestUtils.token_unquoted("bar"),
+                               TestUtils.token_unquoted(" "),
+                               TestUtils.token_unquoted("baz"),
+                               TestUtils.token_whitespace("   "),
+                               TestUtils.token_line(1)] }
+
+      include_examples "token_matching"
+    end
+  end
+
+  ####################
+  # Booleans and Null
+  ####################
+  context "tokenizing booleans and null" do
+    context "tokenize true and unquoted text" do
+      let(:test_string) { "truefoo" }
+      let(:expected_tokens) { [TestUtils.token_true,
+                               TestUtils.token_unquoted("foo")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize false and unquoted text" do
+      let(:test_string) { "falsefoo" }
+      let(:expected_tokens) { [TestUtils.token_false,
+                               TestUtils.token_unquoted("foo")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize null and unquoted text" do
+      let(:test_string) { "nullfoo" }
+      let(:expected_tokens) { [TestUtils.token_null,
+                               TestUtils.token_unquoted("foo")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize unquoted text containing true" do
+      let(:test_string) { "footrue" }
+      let(:expected_tokens) { [TestUtils.token_unquoted("footrue")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize unquoted text containing space and true" do
+      let(:test_string) { "foo true" }
+      let(:expected_tokens) { [TestUtils.token_unquoted("foo"),
+                               TestUtils.token_unquoted(" "),
+                               TestUtils.token_true] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize true and space and unquoted text" do
+      let(:test_string) { "true foo" }
+      let(:expected_tokens) { [TestUtils.token_true,
+                               TestUtils.token_unquoted(" "),
+                               TestUtils.token_unquoted("foo")] }
+
+      include_examples "token_matching"
+    end
+  end
+
+  ####################
+  # Slashes
+  ####################
+  context "tokenizing slashes" do
+    context "tokenize unquoted text containing slash" do
+      let(:test_string) { "a/b/c/" }
+      let(:expected_tokens) { [TestUtils.token_unquoted("a/b/c/")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize slash" do
+      let(:test_string) { "/" }
+      let(:expected_tokens) { [TestUtils.token_unquoted("/")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize slash space slash" do
+      let(:test_string) { "/ /" }
+      let(:expected_tokens) { [TestUtils.token_unquoted("/"),
+                               TestUtils.token_unquoted(" "),
+                               TestUtils.token_unquoted("/")] }
+
+      include_examples "token_matching"
+    end
+
+    ####################
+    # Quotes
+    ####################
+    context "tokenize mixed unquoted and quoted" do
+      let(:test_string) { "    foo\"bar\"baz   \n" }
+      let(:expected_tokens) { [TestUtils.token_whitespace("    "),
+                               TestUtils.token_unquoted("foo"),
+                               TestUtils.token_string("bar"),
+                               TestUtils.token_unquoted("baz"),
+                               TestUtils.token_whitespace("   "),
+                               TestUtils.token_line(1)] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize empty triple quoted string" do
+      let(:test_string) { '""""""' }
+      let(:expected_tokens) { [TestUtils.token_string("")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize trivial triple quoted string" do
+      let(:test_string) { '"""bar"""' }
+      let(:expected_tokens) { [TestUtils.token_string("bar")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize no escapes in triple quoted string" do
+      let(:test_string) { '"""\n"""' }
+      let(:expected_tokens) { [TestUtils.token_string('\n')] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize trailing quotes in triple quoted string" do
+      let(:test_string) { '"""""""""' }
+      let(:expected_tokens) { [TestUtils.token_string('"""')] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize new line in triple quoted string" do
+      let(:test_string) { '"""foo\nbar"""' }
+      let(:expected_tokens) { [TestUtils.token_string('foo\nbar')] }
+
+      include_examples "token_matching"
+    end
+  end
+
+  ####################
+  # Find problems when tokenizing
+  ####################
+  context "finding problems when tokenizing" do
+    context "nothing after backslash" do
+      let(:test_string) { ' "\" ' }
+      include_examples "strings_with_problems"
+    end
+
+    context "there is no \q escape sequence" do
+      let(:test_string) { ' "\q" ' }
+      include_examples "strings_with_problems"
+    end
+
+    context "unicode byte sequence missing a byte" do
+      let(:test_string) { '"\u123"' }
+      include_examples "strings_with_problems"
+    end
+
+    context "unicode byte sequence missing two bytes" do
+      let(:test_string) { '"\u12"' }
+      include_examples "strings_with_problems"
+    end
+
+    context "unicode byte sequence missing three bytes" do
+      let(:test_string) { '"\u1"' }
+      include_examples "strings_with_problems"
+    end
+
+    context "unicode byte missing" do
+      let(:test_string) { '"\u"' }
+      include_examples "strings_with_problems"
+    end
+
+    context "just a single quote" do
+      let(:test_string) { '"' }
+      include_examples "strings_with_problems"
+    end
+
+    context "no end quote" do
+      let(:test_string) { ' "abcdefg' }
+      include_examples "strings_with_problems"
+    end
+
+    context "file ends with a backslash" do
+      let(:test_string) { '\"\\' }
+      include_examples "strings_with_problems"
+    end
+
+    context "file ends with a $" do
+      let(:test_string) { "$" }
+      include_examples "strings_with_problems"
+    end
+
+    context "file ends with a ${" do
+      let(:test_string) { "${" }
+      include_examples "strings_with_problems"
+    end
+  end
+
+  ####################
+  # Numbers
+  ####################
+  context "tokenizing numbers" do
+    context "parse positive float" do
+      let(:test_string) { "1.2" }
+      let(:expected_tokens) { [TestUtils.token_double(1.2)] }
+      include_examples "token_matching"
+    end
+
+    context "parse negative float" do
+      let(:test_string) { "-1.2" }
+      let(:expected_tokens) { [TestUtils.token_double(-1.2)] }
+      include_examples "token_matching"
+    end
+
+    context "parse exponent notation" do
+      let(:test_string) { "1e6" }
+      let(:expected_tokens) { [TestUtils.token_double(1e6)] }
+      include_examples "token_matching"
+    end
+
+    context "parse negative exponent" do
+      let(:test_string) { "1e-6" }
+      let(:expected_tokens) { [TestUtils.token_double(1e-6)] }
+      include_examples "token_matching"
+    end
+
+    context "parse exponent with capital E" do
+      let(:test_string) { "1E-6" }
+      let(:expected_tokens) { [TestUtils.token_double(1e-6)] }
+      include_examples "token_matching"
+    end
+
+    context "parse negative int" do
+      let(:test_string) { "-1" }
+      let(:expected_tokens) { [TestUtils.token_int(-1)] }
+      include_examples "token_matching"
+    end
+  end
+
+  ####################
+  # Comments
+  ####################
+  context "tokenizing comments" do
+    context "tokenize two slashes as comment" do
+      let(:test_string) { "//" }
+      let(:expected_tokens) { [TestUtils.token_comment_double_slash("")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize two slashes in string as string" do
+      let(:test_string) { '"//bar"' }
+      let(:expected_tokens) { [TestUtils.token_string("//bar")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize hash in string as string" do
+      let(:test_string) { '"#bar"' }
+      let(:expected_tokens) { [TestUtils.token_string("#bar")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize slash comment after unquoted text" do
+      let(:test_string) { "bar//comment" }
+      let(:expected_tokens) { [TestUtils.token_unquoted("bar"),
+                               TestUtils.token_comment_double_slash("comment")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize hash comment after unquoted text" do
+      let(:test_string) { "bar#comment" }
+      let(:expected_tokens) { [TestUtils.token_unquoted("bar"),
+                               TestUtils.token_comment_hash("comment")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize slash comment after int" do
+      let(:test_string) { "10//comment" }
+      let(:expected_tokens) { [TestUtils.token_int(10),
+                               TestUtils.token_comment_double_slash("comment")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize hash comment after int" do
+      let(:test_string) { "10#comment" }
+      let(:expected_tokens) { [TestUtils.token_int(10),
+                               TestUtils.token_comment_hash("comment")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize hash comment after int" do
+      let(:test_string) { "10#comment" }
+      let(:expected_tokens) { [TestUtils.token_int(10),
+                               TestUtils.token_comment_hash("comment")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize slash comment after float" do
+      let(:test_string) { "3.14//comment" }
+      let(:expected_tokens) { [TestUtils.token_double(3.14),
+                               TestUtils.token_comment_double_slash("comment")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize hash comment after float" do
+      let(:test_string) { "3.14#comment" }
+      let(:expected_tokens) { [TestUtils.token_double(3.14),
+                               TestUtils.token_comment_hash("comment")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize slash comment with newline" do
+      let(:test_string) { "10//comment\n12" }
+      let(:expected_tokens) { [TestUtils.token_int(10),
+                               TestUtils.token_comment_double_slash("comment"),
+                               TestUtils.token_line(1),
+                               TestUtils.token_int(12)] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize hash comment with newline" do
+      let(:test_string) { "10#comment\n12" }
+      let(:expected_tokens) { [TestUtils.token_int(10),
+                               TestUtils.token_comment_hash("comment"),
+                               TestUtils.token_line(1),
+                               TestUtils.token_int(12)] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize slash comments on two consecutive lines" do
+      let(:test_string) { "//comment\n//comment2" }
+      let(:expected_tokens) { [TestUtils.token_comment_double_slash("comment"),
+                               TestUtils.token_line(1),
+                               TestUtils.token_comment_double_slash("comment2")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize hash comments on two consecutive lines" do
+      let(:test_string) { "#comment\n#comment2" }
+      let(:expected_tokens) { [TestUtils.token_comment_hash("comment"),
+                               TestUtils.token_line(1),
+                               TestUtils.token_comment_hash("comment2")] }
+      include_examples "token_matching"
+    end
+
+    context "tokenize slash comments on multiple lines with whitespace" do
+      let(:test_string) { "        //comment\r\n        //comment2        \n//comment3        \n\n//comment4" }
+      let(:expected_tokens) { [TestUtils.token_whitespace("        "),
+                               TestUtils.token_comment_double_slash("comment\r"),
+                               TestUtils.token_line(1),
+                               TestUtils.token_whitespace("        "),
+                               TestUtils.token_comment_double_slash("comment2        "),
+                               TestUtils.token_line(2),
+                               TestUtils.token_comment_double_slash("comment3        "),
+                               TestUtils.token_line(3),
+                               TestUtils.token_line(4),
+                               TestUtils.token_comment_double_slash("comment4")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize hash comments on multiple lines with whitespace" do
+      let(:test_string) { "        #comment\r\n        #comment2        \n#comment3        \n\n#comment4" }
+      let(:expected_tokens) { [TestUtils.token_whitespace("        "),
+                               TestUtils.token_comment_hash("comment\r"),
+                               TestUtils.token_line(1),
+                               TestUtils.token_whitespace("        "),
+                               TestUtils.token_comment_hash("comment2        "),
+                               TestUtils.token_line(2),
+                               TestUtils.token_comment_hash("comment3        "),
+                               TestUtils.token_line(3),
+                               TestUtils.token_line(4),
+                               TestUtils.token_comment_hash("comment4")] }
+
+      include_examples "token_matching"
+    end
+  end
+
+  ####################
+  # Brackets, braces
+  ####################
+  context "tokenizing brackets and braces" do
+    context "tokenize open curly braces" do
+      let(:test_string) { "{{" }
+      let(:expected_tokens) { [Tokens::OPEN_CURLY, Tokens::OPEN_CURLY] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize close curly braces" do
+      let(:test_string) { "}}" }
+      let(:expected_tokens) { [Tokens::CLOSE_CURLY, Tokens::CLOSE_CURLY] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize open and close curly braces" do
+      let(:test_string) { "{}" }
+      let(:expected_tokens) { [Tokens::OPEN_CURLY, Tokens::CLOSE_CURLY] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize open and close curly braces" do
+      let(:test_string) { "{}" }
+      let(:expected_tokens) { [Tokens::OPEN_CURLY, Tokens::CLOSE_CURLY] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize open square brackets" do
+      let(:test_string) { "[[" }
+      let(:expected_tokens) { [Tokens::OPEN_SQUARE, Tokens::OPEN_SQUARE] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize close square brackets" do
+      let(:test_string) { "]]" }
+      let(:expected_tokens) { [Tokens::CLOSE_SQUARE, Tokens::CLOSE_SQUARE] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize open and close square brackets" do
+      let(:test_string) { "[]" }
+      let(:expected_tokens) { [Tokens::OPEN_SQUARE, Tokens::CLOSE_SQUARE] }
+
+      include_examples "token_matching"
+    end
+  end
+
+  ####################
+  # comma, colon, equals, plus equals
+  ####################
+  context "tokenizing comma, colon, equals, and plus equals" do
+    context "tokenize comma" do
+      let(:test_string) { "," }
+      let(:expected_tokens) { [Tokens::COMMA] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize colon" do
+      let(:test_string) { ":" }
+      let(:expected_tokens) { [Tokens::COLON] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize equals" do
+      let(:test_string) { "=" }
+      let(:expected_tokens) { [Tokens::EQUALS] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize plus equals" do
+      let(:test_string) { "+=" }
+      let(:expected_tokens) { [Tokens::PLUS_EQUALS] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize comma, colon, plus equals, and equals together" do
+      let(:test_string) { "=:,+=" }
+      let(:expected_tokens) { [Tokens::EQUALS,
+                               Tokens::COLON,
+                               Tokens::COMMA,
+                               Tokens::PLUS_EQUALS] }
+
+      include_examples "token_matching"
+    end
+  end
+
+  ####################
+  # Substitutions
+  ####################
+  context "tokenizing substitutions" do
+    context "tokenize substitution" do
+      let(:test_string) { "${a.b}" }
+      let(:expected_tokens) { [TestUtils.token_substitution(TestUtils.token_unquoted("a.b"))] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize optional substitution" do
+      let(:test_string) { "${?x.y}" }
+      let(:expected_tokens) { [TestUtils.token_optional_substitution(TestUtils.token_unquoted("x.y"))] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize key substitution" do
+      let(:test_string) { '${"c.d"}' }
+      let(:expected_tokens) { [TestUtils.token_key_substitution("c.d")] }
+
+      include_examples "token_matching"
+    end
+  end
+
+  ####################
+  # Unicode and escape characters
+  ####################
+  context "tokenizing unicode and escape characters" do
+    context "tokenize unicode infinity symbol" do
+      let(:test_string) { '"\u221E"' }
+      let(:expected_tokens) { [TestUtils.token_string("\u{221E}")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize null byte" do
+      let(:test_string) { ' "\u0000" ' }
+      let(:expected_tokens) { [TestUtils.token_whitespace(" "),
+                               TestUtils.token_string("\u0000"),
+                               TestUtils.token_whitespace(" ")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize various espace codes" do
+      let(:test_string) { ' "\"\\\/\b\f\n\r\t" ' }
+      let(:expected_tokens) { [TestUtils.token_whitespace(" "),
+                              TestUtils.token_string("\"\\/\b\f\n\r\t"),
+                              TestUtils.token_whitespace(" ")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize unicode F" do
+      let(:test_string) { ' "\u0046" ' }
+      let(:expected_tokens) { [TestUtils.token_whitespace(" "),
+                               TestUtils.token_string("F"),
+                               TestUtils.token_whitespace(" ")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize two unicode Fs" do
+      let(:test_string) { ' "\u0046\u0046" ' }
+      let(:expected_tokens) { [TestUtils.token_whitespace(" "),
+                               TestUtils.token_string("FF"),
+                               TestUtils.token_whitespace(" ")] }
+
+      include_examples "token_matching"
+    end
+  end
+
+  ####################
+  # Reserved Characters
+  ####################
+  context "Finding problems with using reserved characters" do
+    context "problem with reserved character +" do
+      let(:test_string) { "+" }
+      include_examples "strings_with_problems"
+    end
+
+    context "problem with reserved character `" do
+      let(:test_string) { "`" }
+      include_examples "strings_with_problems"
+    end
+
+    context "problem with reserved character ^" do
+      let(:test_string) { "^" }
+      include_examples "strings_with_problems"
+    end
+
+    context "problem with reserved character ?" do
+      let(:test_string) { "?" }
+      include_examples "strings_with_problems"
+    end
+
+    context "problem with reserved character !" do
+      let(:test_string) { "!" }
+      include_examples "strings_with_problems"
+    end
+
+    context "problem with reserved character @" do
+      let(:test_string) { "@" }
+      include_examples "strings_with_problems"
+    end
+
+    context "problem with reserved character *" do
+      let(:test_string) { "*" }
+      include_examples "strings_with_problems"
+    end
+
+    context "problem with reserved character &" do
+      let(:test_string) { "&" }
+      include_examples "strings_with_problems"
+    end
+
+    context "problem with reserved character \\" do
+      let(:test_string) { "\\" }
+      include_examples "strings_with_problems"
+    end
+  end
+
+  ####################
+  # Combine all types
+  ####################
+  context "Tokenizing all types together" do
+    context "tokenize all types no spaces" do
+      let(:test_string) { ',:=}{][+="foo""""bar"""true3.14false42null${a.b}${?x.y}${"c.d"}' + "\n" }
+      let(:expected_tokens) { [Tokens::COMMA,
+                               Tokens::COLON,
+                               Tokens::EQUALS,
+                               Tokens::CLOSE_CURLY,
+                               Tokens::OPEN_CURLY,
+                               Tokens::CLOSE_SQUARE,
+                               Tokens::OPEN_SQUARE,
+                               Tokens::PLUS_EQUALS,
+                               TestUtils.token_string("foo"),
+                               TestUtils.token_string("bar"),
+                               TestUtils.token_true,
+                               TestUtils.token_double(3.14),
+                               TestUtils.token_false,
+                               TestUtils.token_int(42),
+                               TestUtils.token_null,
+                               TestUtils.token_substitution(TestUtils.token_unquoted("a.b")),
+                               TestUtils.token_optional_substitution(TestUtils.token_unquoted("x.y")),
+                               TestUtils.token_key_substitution("c.d"),
+                               TestUtils.token_line(1)] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize all types single spaces" do
+      let(:test_string) { ' , : = } { ] [ += "foo" """bar""" 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} ' + "\n " }
+      let(:expected_tokens) { [TestUtils.token_whitespace(" "),
+                               Tokens::COMMA,
+                               TestUtils.token_whitespace(" "),
+                               Tokens::COLON,
+                               TestUtils.token_whitespace(" "),
+                               Tokens::EQUALS,
+                               TestUtils.token_whitespace(" "),
+                               Tokens::CLOSE_CURLY,
+                               TestUtils.token_whitespace(" "),
+                               Tokens::OPEN_CURLY,
+                               TestUtils.token_whitespace(" "),
+                               Tokens::CLOSE_SQUARE,
+                               TestUtils.token_whitespace(" "),
+                               Tokens::OPEN_SQUARE,
+                               TestUtils.token_whitespace(" "),
+                               Tokens::PLUS_EQUALS,
+                               TestUtils.token_whitespace(" "),
+                               TestUtils.token_string("foo"),
+                               TestUtils.token_unquoted(" "),
+                               TestUtils.token_string("bar"),
+                               TestUtils.token_unquoted(" "),
+                               TestUtils.token_int(42),
+                               TestUtils.token_unquoted(" "),
+                               TestUtils.token_true,
+                               TestUtils.token_unquoted(" "),
+                               TestUtils.token_double(3.14),
+                               TestUtils.token_unquoted(" "),
+                               TestUtils.token_false,
+                               TestUtils.token_unquoted(" "),
+                               TestUtils.token_null,
+                               TestUtils.token_unquoted(" "),
+                               TestUtils.token_substitution(TestUtils.token_unquoted("a.b")),
+                               TestUtils.token_unquoted(" "),
+                               TestUtils.token_optional_substitution(TestUtils.token_unquoted("x.y")),
+                               TestUtils.token_unquoted(" "),
+                               TestUtils.token_key_substitution("c.d"),
+                               TestUtils.token_whitespace(" "),
+                               TestUtils.token_line(1),
+                               TestUtils.token_whitespace(" ")] }
+
+      include_examples "token_matching"
+    end
+
+    context "tokenize all types multiple spaces" do
+      let(:test_string) { '   ,   :   =   }   {   ]   [   +=   "foo"   """bar"""   42   true   3.14   false   null   ${a.b}   ${?x.y}   ${"c.d"}   ' + "\n   " }
+      let(:expected_tokens) { [TestUtils.token_whitespace("   "),
+                               Tokens::COMMA,
+                               TestUtils.token_whitespace("   "),
+                               Tokens::COLON,
+                               TestUtils.token_whitespace("   "),
+                               Tokens::EQUALS,
+                               TestUtils.token_whitespace("   "),
+                               Tokens::CLOSE_CURLY,
+                               TestUtils.token_whitespace("   "),
+                               Tokens::OPEN_CURLY,
+                               TestUtils.token_whitespace("   "),
+                               Tokens::CLOSE_SQUARE,
+                               TestUtils.token_whitespace("   "),
+                               Tokens::OPEN_SQUARE,
+                               TestUtils.token_whitespace("   "),
+                               Tokens::PLUS_EQUALS,
+                               TestUtils.token_whitespace("   "),
+                               TestUtils.token_string("foo"),
+                               TestUtils.token_unquoted("   "),
+                               TestUtils.token_string("bar"),
+                               TestUtils.token_unquoted("   "),
+                               TestUtils.token_int(42),
+                               TestUtils.token_unquoted("   "),
+                               TestUtils.token_true,
+                               TestUtils.token_unquoted("   "),
+                               TestUtils.token_double(3.14),
+                               TestUtils.token_unquoted("   "),
+                               TestUtils.token_false,
+                               TestUtils.token_unquoted("   "),
+                               TestUtils.token_null,
+                               TestUtils.token_unquoted("   "),
+                               TestUtils.token_substitution(TestUtils.token_unquoted("a.b")),
+                               TestUtils.token_unquoted("   "),
+                               TestUtils.token_optional_substitution(TestUtils.token_unquoted("x.y")),
+                               TestUtils.token_unquoted("   "),
+                               TestUtils.token_key_substitution("c.d"),
+                               TestUtils.token_whitespace("   "),
+                               TestUtils.token_line(1),
+                               TestUtils.token_whitespace("   ")] }
+
+      include_examples "token_matching"
+    end
+  end
+end

Debdiff

File lists identical (after any substitutions)

No differences were encountered in the control files

More details

Full run details