diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index dccc901..90b6bc3 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -4,6 +4,14 @@ ### Features +* Upgrade to support RethinkDB version 1.15. [PR #173](https://github.com/mfenniak/rethinkdb-net/issues/173) & [Issue #171](https://github.com/mfenniak/rethinkdb-net/issues/171) + + * New Group method can be used for grouping on an index value, or between 1 and 3 different key values. + + * Count aggregate now supports a predicate for counting only matching rows. + + * Max, Min, Avg, Sum, Count, and Contains aggregates are now fully supported. Previously only Avg and Sum aggregates were supported. + * Support for serializing and deserializing TimeSpan data types, which was added to the Newtonsoft serializer but not the basic serialization implementation. [PR #152](https://github.com/mfenniak/rethinkdb-net/issues/152) * Expressions now support the addition of DateTime and TimeSpan objects, as well as DateTime and DateTimeOffset's Add methods (eg. AddHours, AddDays). [PR #152](https://github.com/mfenniak/rethinkdb-net/issues/152), [Issue #158](https://github.com/mfenniak/rethinkdb-net/issues/158) Note, AddMonths is not supported. @@ -12,6 +20,18 @@ * Support for OrderBy on indexes. [Issue #162](https://github.com/mfenniak/rethinkdb-net/issues/162) +### Breaking Changes + +* [PR #173](https://github.com/mfenniak/rethinkdb-net/issues/173) contained a number of breaking changes to maintain consistency with RethinkDB driver changes on other platforms and remove functionality that is no longer supported by RethinkDB. + + * Remove base parameter from Reduce(); it's been removed in RethinkDB and instead an error occurs when attempting to reduce an empty sequence, and the only element is returned when reducing a single-element sequence. Part of [PR #173](https://github.com/mfenniak/rethinkdb-net/issues/173). + + * UpdateAndReturnValues, InsertAndReturnValues, and DeleteAndReturnValues have all been renamed to "...ReturnChanges", and their return value has changed to support returning multiple changes. These changes are for compatibility and to maintain consistency with other RethinkDB drivers. Part of [PR #173](https://github.com/mfenniak/rethinkdb-net/issues/173). + + * GroupedMapReduce has been removed for consistency with other RethinkDB drivers. .Group(...).Map(...).Reduce(...) can be used as an alternative. Part of [PR #173](https://github.com/mfenniak/rethinkdb-net/issues/173). + + * GroupBy and its prebuilt aggregates have been removed for consistency with other RethinkDB drivers. .Group() followed by an aggregate can be used instead. Part of [PR #173](https://github.com/mfenniak/rethinkdb-net/issues/173). + ## 0.7.0.0 (2013-11-02) diff --git a/rethinkdb-net-newtonsoft-test/Integration/ComplexObjectTests.cs b/rethinkdb-net-newtonsoft-test/Integration/ComplexObjectTests.cs index ed479fc..b4cfd51 100644 --- a/rethinkdb-net-newtonsoft-test/Integration/ComplexObjectTests.cs +++ b/rethinkdb-net-newtonsoft-test/Integration/ComplexObjectTests.cs @@ -105,30 +105,33 @@ private async Task DoReplace() [Test] public void ReplaceAndReturnValue() { - var resp = connection.Run(testTable.Get(insertedObject.Id).ReplaceAndReturnValue(new ComplexObject() {Id = insertedObject.Id, Name = "Jack Black"})); + var resp = connection.Run(testTable.Get(insertedObject.Id).ReplaceAndReturnChanges(new ComplexObject() {Id = insertedObject.Id, Name = "Jack Black"})); Assert.That(resp, Is.Not.Null); Assert.That(resp.FirstError, Is.Null); Assert.That(resp.Replaced, Is.EqualTo(1)); Assert.That(resp.GeneratedKeys, Is.Null); - Assert.That(resp.OldValue, Is.Not.Null); - Assert.That(resp.OldValue.Name, Is.EqualTo("Brian Chavez")); - Assert.That(resp.NewValue, Is.Not.Null); - Assert.That(resp.NewValue.Name, Is.EqualTo("Jack Black")); + Assert.That(resp.Changes, Is.Not.Null); + Assert.That(resp.Changes, Has.Length.EqualTo(1)); + Assert.That(resp.Changes[0].OldValue, Is.Not.Null); + Assert.That(resp.Changes[0].OldValue.Name, Is.EqualTo("Brian Chavez")); + Assert.That(resp.Changes[0].NewValue, Is.Not.Null); + Assert.That(resp.Changes[0].NewValue.Name, Is.EqualTo("Jack Black")); } [Test] public void UpdateAndReturnValue() { - var resp = connection.Run(testTable.Get(insertedObject.Id).UpdateAndReturnValue(o => new ComplexObject() {Name = "Hello " + o.Id + "!"})); + var resp = connection.Run(testTable.Get(insertedObject.Id).UpdateAndReturnChanges(o => new ComplexObject() {Name = "Hello " + o.Id + "!"})); Assert.That(resp, Is.Not.Null); Assert.That(resp.FirstError, Is.Null); Assert.That(resp.Replaced, Is.EqualTo(1)); - Assert.That(resp.NewValue, Is.Not.Null); - Assert.That(resp.OldValue, Is.Not.Null); - - Assert.That(resp.OldValue.Name, Is.EqualTo("Brian Chavez")); - Assert.That(resp.NewValue.Name, Is.EqualTo("Hello " + resp.OldValue.Id + "!")); + Assert.That(resp.Changes, Is.Not.Null); + Assert.That(resp.Changes, Has.Length.EqualTo(1)); + Assert.That(resp.Changes[0].NewValue, Is.Not.Null); + Assert.That(resp.Changes[0].OldValue, Is.Not.Null); + Assert.That(resp.Changes[0].OldValue.Name, Is.EqualTo("Brian Chavez")); + Assert.That(resp.Changes[0].NewValue.Name, Is.EqualTo("Hello " + resp.Changes[0].OldValue.Id + "!")); } [Test] @@ -149,14 +152,16 @@ private async Task DoDelete() [Test] public void DeleteAndReturnValues() { - var resp = connection.Run(testTable.Get(insertedObject.Id).DeleteAndReturnValue()); + var resp = connection.Run(testTable.Get(insertedObject.Id).DeleteAndReturnChanges()); Assert.That(resp, Is.Not.Null); Assert.That(resp.FirstError, Is.Null); Assert.That(resp.Deleted, Is.EqualTo(1)); Assert.That(resp.GeneratedKeys, Is.Null); - Assert.That(resp.OldValue, Is.Not.Null); - Assert.That(resp.OldValue.Id, Is.EqualTo(insertedObject.Id)); - Assert.That(resp.NewValue, Is.Null); + Assert.That(resp.Changes, Is.Not.Null); + Assert.That(resp.Changes, Has.Length.EqualTo(1)); + Assert.That(resp.Changes[0].OldValue, Is.Not.Null); + Assert.That(resp.Changes[0].OldValue.Id, Is.EqualTo(insertedObject.Id)); + Assert.That(resp.Changes[0].NewValue, Is.Null); } [Test] diff --git a/rethinkdb-net-newtonsoft/Configuration/NewtonsoftDatumConverterFactory.cs b/rethinkdb-net-newtonsoft/Configuration/NewtonsoftDatumConverterFactory.cs index 9fcba51..18f23bf 100644 --- a/rethinkdb-net-newtonsoft/Configuration/NewtonsoftDatumConverterFactory.cs +++ b/rethinkdb-net-newtonsoft/Configuration/NewtonsoftDatumConverterFactory.cs @@ -9,6 +9,10 @@ public class NewtonsoftDatumConverterFactory : AbstractDatumConverterFactory public override bool TryGet(IDatumConverterFactory rootDatumConverterFactory, out IDatumConverter datumConverter) { + // Use rethinkdb-net's support for $reql_type$=GROUPED_DATA return values. + if (GroupingDictionaryDatumConverterFactory.Instance.TryGet(rootDatumConverterFactory, out datumConverter)) + return true; + //I guess we could have some more specific checks here //but if we get here last in the NewtonsoftSerializer order, then //I suppose we can handle it if no preceding converters could handle it. diff --git a/rethinkdb-net-test/Integration/GroupingTests.cs b/rethinkdb-net-test/Integration/GroupingTests.cs index f543eb8..f601bcc 100644 --- a/rethinkdb-net-test/Integration/GroupingTests.cs +++ b/rethinkdb-net-test/Integration/GroupingTests.cs @@ -1,11 +1,8 @@ using System; using NUnit.Framework; using RethinkDb; -using System.Net; using System.Linq; -using System.Threading.Tasks; using System.Collections.Generic; -using System.Linq.Expressions; namespace RethinkDb.Test.Integration { @@ -17,20 +14,17 @@ public class GroupingTests : TestBase public override void TestFixtureSetUp() { base.TestFixtureSetUp(); - connection.RunAsync(Query.DbCreate("test")).Wait(); - connection.RunAsync(Query.Db("test").TableCreate("table")).Wait(); - } + connection.Run(Query.DbCreate("test")); + connection.Run(Query.Db("test").TableCreate("table")); - [SetUp] - public virtual void SetUp() - { testTable = Query.Db("test").Table("table"); - connection.RunAsync(testTable.Insert(new TestObject[] { + connection.Run(testTable.Insert(new TestObject[] + { new TestObject() { Name = "1", SomeNumber = 1 }, new TestObject() { Name = "1", SomeNumber = 1 }, - new TestObject() { Name = "2", SomeNumber = 2 }, + new TestObject() { Name = "2", SomeNumber = 2, Tags = new string[] { "A", "B" } }, new TestObject() { Name = "2", SomeNumber = 200 }, - new TestObject() { Name = "2", SomeNumber = 2 }, + new TestObject() { Name = "2", SomeNumber = 2, Tags = new string[] { "A", "C" } }, new TestObject() { Name = "3", SomeNumber = 3 }, new TestObject() { Name = "3", SomeNumber = 3 }, new TestObject() { Name = "4", SomeNumber = 4 }, @@ -38,44 +32,75 @@ public virtual void SetUp() new TestObject() { Name = "6", SomeNumber = 6 }, new TestObject() { Name = "6", SomeNumber = 6 }, new TestObject() { Name = "7", SomeNumber = 7 }, - })).Wait(); + })); + connection.Run(testTable.IndexCreate("name", to => to.Name)); } - [TearDown] - public virtual void TearDown() + [Test] + public void GroupByIndex() { - connection.RunAsync(testTable.Delete()).Wait(); + var query = testTable.Group("name"); + + int count = 0; + foreach (var record in connection.Run(query)) + { + var groupName = record.Key; + var objects = record.Value; + + switch (groupName) + { + case "1": + case "3": + case "6": + Assert.That(objects.Count(), Is.EqualTo(2)); + break; + case "2": + Assert.That(objects.Count(), Is.EqualTo(3)); + break; + case "4": + case "5": + case "7": + Assert.That(objects.Count(), Is.EqualTo(1)); + break; + default: + Assert.Fail("Unexpected group name: {0}", groupName); + break; + } + + ++count; + } + + Assert.That(count, Is.EqualTo(7)); } [Test] - public void GroupedMapReduce() + public void GroupByOneField() { - var query = testTable.GroupedMapReduce( - to => to.Name, // group - to => 1.0, // map - (leftCount, rightCount) => leftCount + rightCount // reduce - ); + var query = testTable.Group(to => to.Name); int count = 0; foreach (var record in connection.Run(query)) { - var groupName = record.Item1; - var reduceCount = record.Item2; + var groupName = record.Key; + var objects = record.Value; switch (groupName) { case "1": case "3": case "6": - Assert.That(reduceCount, Is.EqualTo(2)); + Assert.That(objects.Count(), Is.EqualTo(2)); break; case "2": - Assert.That(reduceCount, Is.EqualTo(3)); + Assert.That(objects.Count(), Is.EqualTo(3)); break; case "4": case "5": case "7": - Assert.That(reduceCount, Is.EqualTo(1)); + Assert.That(objects.Count(), Is.EqualTo(1)); + break; + default: + Assert.Fail("Unexpected group name: {0}", groupName); break; } @@ -86,31 +111,134 @@ public void GroupedMapReduce() } [Test] - public void GroupByCount() + public void GroupByTwoFields() + { + var query = testTable.Group( + to => to.Name, + to => to.SomeNumber + ); + + int count = 0; + foreach (var record in connection.Run(query)) + { + var groupKey = record.Key; + var objects = record.Value; + + if (groupKey.Equals(Tuple.Create("1", 1d)) || + groupKey.Equals(Tuple.Create("2", 2d)) || + groupKey.Equals(Tuple.Create("3", 3d)) || + groupKey.Equals(Tuple.Create("6", 6d))) + { + Assert.That(objects.Count(), Is.EqualTo(2)); + } + else if ( + groupKey.Equals(Tuple.Create("2", 200d)) || + groupKey.Equals(Tuple.Create("4", 4d)) || + groupKey.Equals(Tuple.Create("5", 5d)) || + groupKey.Equals(Tuple.Create("7", 7d))) + { + Assert.That(objects.Count(), Is.EqualTo(1)); + } + else + { + Assert.Fail("Unexpected group key: {0}", groupKey); + break; + } + + ++count; + } + + Assert.That(count, Is.EqualTo(8)); + } + + [Test] + public void GroupByThreeFields() { - // Same query and results as GroupedMapReduce test - var query = testTable.GroupBy(Query.Count(), to => new { name = to.Name }); + var query = testTable.Group( + to => to.Name, + to => to.SomeNumber, + to => to.Tags + ); + + int count = 0; + foreach (var record in connection.Run(query)) + { + var groupKey = record.Key; + var objects = record.Value; + + if (groupKey.Item1 == "2" && Math.Abs(groupKey.Item2 - 2d) <= Double.Epsilon) + { + if (groupKey.Item3.SequenceEqual(new string[] { "A", "B" }) || + groupKey.Item3.SequenceEqual(new string[] { "A", "C" })) + { + Assert.That(objects.Count(), Is.EqualTo(1)); + } + else + { + Assert.Fail("Unexpected Tags on (2, 2): {0}", groupKey.Item3); + } + } + else if (groupKey.Equals(Tuple.Create("1", 1d, null)) || + groupKey.Equals(Tuple.Create("3", 3d, null)) || + groupKey.Equals(Tuple.Create("6", 6d, null))) + { + Assert.That(objects.Count(), Is.EqualTo(2)); + } + else if (groupKey.Equals(Tuple.Create("2", 200d, null)) || + groupKey.Equals(Tuple.Create("4", 4d, null)) || + groupKey.Equals(Tuple.Create("5", 5d, null)) || + groupKey.Equals(Tuple.Create("7", 7d, null))) + { + Assert.That(objects.Count(), Is.EqualTo(1)); + } + else + { + Assert.Fail("Unexpected group key: {0}", groupKey); + break; + } + + ++count; + } + + Assert.That(count, Is.EqualTo(9)); + } + + [Test] + public void GroupAndMaxAggregate() + { + var query = testTable.Group("name").Max(to => to.SomeNumber); int count = 0; foreach (var record in connection.Run(query)) { - var groupName = record.Item1.name; - var reduceCount = record.Item2; + var groupName = record.Key; + var testObject = record.Value; switch (groupName) { case "1": - case "3": - case "6": - Assert.That(reduceCount, Is.EqualTo(2)); + Assert.That(testObject.SomeNumber, Is.EqualTo(1)); break; case "2": - Assert.That(reduceCount, Is.EqualTo(3)); + Assert.That(testObject.SomeNumber, Is.EqualTo(200)); + break; + case "3": + Assert.That(testObject.SomeNumber, Is.EqualTo(3)); break; case "4": + Assert.That(testObject.SomeNumber, Is.EqualTo(4)); + break; case "5": + Assert.That(testObject.SomeNumber, Is.EqualTo(5)); + break; + case "6": + Assert.That(testObject.SomeNumber, Is.EqualTo(6)); + break; case "7": - Assert.That(reduceCount, Is.EqualTo(1)); + Assert.That(testObject.SomeNumber, Is.EqualTo(7)); + break; + default: + Assert.Fail("Unexpected group name: {0}", groupName); break; } @@ -121,76 +249,155 @@ public void GroupByCount() } [Test] - public void GroupByTwoParams() + public void MaxAggregate() { - var query = testTable.GroupBy(Query.Count(), to => new { name = to.Name, number = to.SomeNumber }); + var query = testTable.Max(to => to.SomeNumber); + var testObject = connection.Run(query); + Assert.That(testObject.SomeNumber, Is.EqualTo(200)); + } + + [Test] + public void GroupAndMinAggregate() + { + var query = testTable.Group("name").Min(to => to.SomeNumber); int count = 0; foreach (var record in connection.Run(query)) { - var groupName = record.Item1.name; - var someNumber = record.Item1.number; - var reduceCount = record.Item2; + var groupName = record.Key; + var testObject = record.Value; switch (groupName) { case "1": + Assert.That(testObject.SomeNumber, Is.EqualTo(1)); + break; + case "2": + Assert.That(testObject.SomeNumber, Is.EqualTo(2)); + break; case "3": + Assert.That(testObject.SomeNumber, Is.EqualTo(3)); + break; + case "4": + Assert.That(testObject.SomeNumber, Is.EqualTo(4)); + break; + case "5": + Assert.That(testObject.SomeNumber, Is.EqualTo(5)); + break; case "6": - Assert.That(reduceCount, Is.EqualTo(2)); + Assert.That(testObject.SomeNumber, Is.EqualTo(6)); + break; + case "7": + Assert.That(testObject.SomeNumber, Is.EqualTo(7)); + break; + default: + Assert.Fail("Unexpected group name: {0}", groupName); + break; + } + + ++count; + } + + Assert.That(count, Is.EqualTo(7)); + } + + [Test] + public void MinAggregate() + { + var query = testTable.Min(to => to.SomeNumber); + var testObject = connection.Run(query); + Assert.That(testObject.SomeNumber, Is.EqualTo(1)); + } + + [Test] + public void GroupAndAverageAggregate() + { + var query = testTable.Group("name").Avg(to => to.SomeNumber); + + int count = 0; + foreach (var record in connection.Run(query)) + { + var groupName = record.Key; + var average = record.Value; + + switch (groupName) + { + case "1": + Assert.That(average, Is.EqualTo(1)); break; case "2": - if (someNumber == 2) - Assert.That(reduceCount, Is.EqualTo(2)); - else if (someNumber == 200) - Assert.That(reduceCount, Is.EqualTo(1)); + Assert.That(average, Is.EqualTo(68)); + break; + case "3": + Assert.That(average, Is.EqualTo(3)); break; case "4": + Assert.That(average, Is.EqualTo(4)); + break; case "5": + Assert.That(average, Is.EqualTo(5)); + break; + case "6": + Assert.That(average, Is.EqualTo(6)); + break; case "7": - Assert.That(reduceCount, Is.EqualTo(1)); + Assert.That(average, Is.EqualTo(7)); + break; + default: + Assert.Fail("Unexpected group name: {0}", groupName); break; } ++count; } - Assert.That(count, Is.EqualTo(8)); + Assert.That(count, Is.EqualTo(7)); } [Test] - public void GroupBySum() + public void AverageAggregate() { - var query = testTable.GroupBy(Query.Sum(to => to.SomeNumber), to => new { name = to.Name }); + var query = testTable.Avg(to => to.SomeNumber); + var average = connection.Run(query); + Assert.That(average, Is.EqualTo(20.0d)); + } + + [Test] + public void GroupAndSumAggregate() + { + var query = testTable.Group("name").Sum(to => to.SomeNumber); int count = 0; foreach (var record in connection.Run(query)) { - var groupName = record.Item1.name; - var reduceSum = record.Item2; + var groupName = record.Key; + var average = record.Value; switch (groupName) { case "1": - Assert.That(reduceSum, Is.EqualTo(2)); + Assert.That(average, Is.EqualTo(2)); break; case "2": - Assert.That(reduceSum, Is.EqualTo(204)); + Assert.That(average, Is.EqualTo(204)); break; case "3": - Assert.That(reduceSum, Is.EqualTo(6)); + Assert.That(average, Is.EqualTo(6)); break; case "4": - Assert.That(reduceSum, Is.EqualTo(4)); + Assert.That(average, Is.EqualTo(4)); break; case "5": - Assert.That(reduceSum, Is.EqualTo(5)); + Assert.That(average, Is.EqualTo(5)); break; case "6": - Assert.That(reduceSum, Is.EqualTo(12)); + Assert.That(average, Is.EqualTo(12)); break; case "7": - Assert.That(reduceSum, Is.EqualTo(7)); + Assert.That(average, Is.EqualTo(7)); + break; + default: + Assert.Fail("Unexpected group name: {0}", groupName); break; } @@ -201,38 +408,164 @@ public void GroupBySum() } [Test] - public void GroupByAvg() + public void SumAggregate() { - var query = testTable.GroupBy(Query.Avg(to => to.SomeNumber), to => new { Name = to.Name }); + var query = testTable.Sum(to => to.SomeNumber); + var average = connection.Run(query); + Assert.That(average, Is.EqualTo(240)); + } + + [Test] + public void GroupAndCountAggregate() + { + var query = testTable.Group(to => to.Name).Count(to => to.SomeNumber > 1); int count = 0; foreach (var record in connection.Run(query)) { - var groupName = record.Item1.Name; - var reduceSum = record.Item2; + var groupName = record.Key; + var objectCount = record.Value; + + switch (groupName) + { + // Surprisingly missing; https://groups.google.com/forum/#!topic/rethinkdb/HXCHeTthF64 + //case "1": + // Assert.That(objectCount, Is.EqualTo(0)); + // break; + case "3": + case "6": + Assert.That(objectCount, Is.EqualTo(2)); + break; + case "2": + Assert.That(objectCount, Is.EqualTo(3)); + break; + case "4": + case "5": + case "7": + Assert.That(objectCount, Is.EqualTo(1)); + break; + default: + Assert.Fail("Unexpected group name: {0}", groupName); + break; + } + + ++count; + } + + Assert.That(count, Is.EqualTo(6)); + } + + [Test] + public void CountAggregate() + { + var count = connection.Run(testTable.Count(to => to.SomeNumber > 1)); + Assert.That(count, Is.EqualTo(10)); + } + + [Test] + public void GroupAndContainsAggregate() + { + var query = testTable.Group(to => to.Name).Contains(to => to.SomeNumber > 1); + + int count = 0; + foreach (var record in connection.Run(query)) + { + var groupName = record.Key; + var predicateResult = record.Value; switch (groupName) { case "1": - Assert.That(reduceSum, Is.EqualTo(1)); + Assert.That(predicateResult, Is.False); break; + case "3": case "2": - Assert.That(reduceSum, Is.EqualTo(68)); + case "4": + case "5": + case "6": + case "7": + Assert.That(predicateResult, Is.True); + break; + default: + Assert.Fail("Unexpected group name: {0}", groupName); + break; + } + + ++count; + } + + Assert.That(count, Is.EqualTo(7)); + } + + [Test] + public void ContainsAggregate() + { + var contains = connection.Run(testTable.Contains(to => to.SomeNumber > 1)); + Assert.That(contains, Is.True); + contains = connection.Run(testTable.Contains(to => to.SomeNumber > 1000)); + Assert.That(contains, Is.False); + } + + [Test] + public void Ungroup() + { + var query = testTable + .Group(to => to.Name) + .Count() + .Ungroup() + .OrderBy(t => t.Reduction).ThenBy(t => t.Group); + + var result = connection.Run(query).ToArray(); + Assert.That(result, Has.Length.EqualTo(7)); + Assert.That(result[0].Group, Is.EqualTo("4")); Assert.That(result[0].Reduction, Is.EqualTo(1)); + Assert.That(result[1].Group, Is.EqualTo("5")); Assert.That(result[1].Reduction, Is.EqualTo(1)); + Assert.That(result[2].Group, Is.EqualTo("7")); Assert.That(result[2].Reduction, Is.EqualTo(1)); + Assert.That(result[3].Group, Is.EqualTo("1")); Assert.That(result[3].Reduction, Is.EqualTo(2)); + Assert.That(result[4].Group, Is.EqualTo("3")); Assert.That(result[4].Reduction, Is.EqualTo(2)); + Assert.That(result[5].Group, Is.EqualTo("6")); Assert.That(result[5].Reduction, Is.EqualTo(2)); + Assert.That(result[6].Group, Is.EqualTo("2")); Assert.That(result[6].Reduction, Is.EqualTo(3)); + } + + [Test] + public void GroupedMapReduce() + { + // This is functionally the same as .Group().Sum(), but tests that Map and Reduce work on grouping queries. + var query = testTable + .Group(to => to.Name) + .Map(to => to.SomeNumber) + .Reduce((l, r) => l + r); + + int count = 0; + foreach (var record in connection.Run(query)) + { + var groupName = record.Key; + var average = record.Value; + + switch (groupName) + { + case "1": + Assert.That(average, Is.EqualTo(2)); + break; + case "2": + Assert.That(average, Is.EqualTo(204)); break; case "3": - Assert.That(reduceSum, Is.EqualTo(3)); + Assert.That(average, Is.EqualTo(6)); break; case "4": - Assert.That(reduceSum, Is.EqualTo(4)); + Assert.That(average, Is.EqualTo(4)); break; case "5": - Assert.That(reduceSum, Is.EqualTo(5)); + Assert.That(average, Is.EqualTo(5)); break; case "6": - Assert.That(reduceSum, Is.EqualTo(6)); + Assert.That(average, Is.EqualTo(12)); break; case "7": - Assert.That(reduceSum, Is.EqualTo(7)); + Assert.That(average, Is.EqualTo(7)); + break; + default: + Assert.Fail("Unexpected group name: {0}", groupName); break; } diff --git a/rethinkdb-net-test/Integration/MultiObjectTests.cs b/rethinkdb-net-test/Integration/MultiObjectTests.cs index 7961a47..a500a6c 100644 --- a/rethinkdb-net-test/Integration/MultiObjectTests.cs +++ b/rethinkdb-net-test/Integration/MultiObjectTests.cs @@ -721,16 +721,13 @@ public void ConcatMap_OnSimpleDataType_CanUseParameterExpressionForQuery() { var query = testTable .ConcatMap(to => to.Tags) - .GroupedMapReduce( - tag => tag, - tag => 1, - (l, r) => l+r); + .Group(tag => tag) + .Count(); var enumerable = connection.Run(query); - Assert.That(enumerable.Count(), Is.EqualTo(2)); - Assert.That(enumerable, Has.Member(Tuple.Create("even", 3))); - Assert.That(enumerable, Has.Member(Tuple.Create("odd", 4))); + Assert.That(enumerable["even"], Is.EqualTo(3)); + Assert.That(enumerable["odd"], Is.EqualTo(4)); } [Test] diff --git a/rethinkdb-net-test/Integration/SingleObjectTests.cs b/rethinkdb-net-test/Integration/SingleObjectTests.cs index 128f0b8..31875aa 100644 --- a/rethinkdb-net-test/Integration/SingleObjectTests.cs +++ b/rethinkdb-net-test/Integration/SingleObjectTests.cs @@ -86,30 +86,33 @@ private async Task DoReplace() [Test] public void ReplaceAndReturnValue() { - var resp = connection.Run(testTable.Get(insertedObject.Id).ReplaceAndReturnValue(new TestObject() { Id = insertedObject.Id, Name = "Jack Black" })); + var resp = connection.Run(testTable.Get(insertedObject.Id).ReplaceAndReturnChanges(new TestObject() { Id = insertedObject.Id, Name = "Jack Black" })); Assert.That(resp, Is.Not.Null); Assert.That(resp.FirstError, Is.Null); Assert.That(resp.Replaced, Is.EqualTo(1)); Assert.That(resp.GeneratedKeys, Is.Null); - Assert.That(resp.OldValue, Is.Not.Null); - Assert.That(resp.OldValue.Name, Is.EqualTo("Jim Brown")); - Assert.That(resp.NewValue, Is.Not.Null); - Assert.That(resp.NewValue.Name, Is.EqualTo("Jack Black")); + Assert.That(resp.Changes, Is.Not.Null); + Assert.That(resp.Changes, Has.Length.EqualTo(1)); + Assert.That(resp.Changes[0].OldValue, Is.Not.Null); + Assert.That(resp.Changes[0].OldValue.Name, Is.EqualTo("Jim Brown")); + Assert.That(resp.Changes[0].NewValue, Is.Not.Null); + Assert.That(resp.Changes[0].NewValue.Name, Is.EqualTo("Jack Black")); } [Test] public void UpdateAndReturnValue() { - var resp = connection.Run(testTable.Get(insertedObject.Id).UpdateAndReturnValue(o => new TestObject() { Name = "Hello " + o.Id + "!" })); + var resp = connection.Run(testTable.Get(insertedObject.Id).UpdateAndReturnChanges(o => new TestObject() { Name = "Hello " + o.Id + "!" })); Assert.That(resp, Is.Not.Null); Assert.That(resp.FirstError, Is.Null); Assert.That(resp.Replaced, Is.EqualTo(1)); - Assert.That(resp.NewValue, Is.Not.Null); - Assert.That(resp.OldValue, Is.Not.Null); - - Assert.That(resp.OldValue.Name, Is.EqualTo("Jim Brown")); - Assert.That(resp.NewValue.Name, Is.EqualTo("Hello " + resp.OldValue.Id + "!")); + Assert.That(resp.Changes, Is.Not.Null); + Assert.That(resp.Changes, Has.Length.EqualTo(1)); + Assert.That(resp.Changes[0].NewValue, Is.Not.Null); + Assert.That(resp.Changes[0].OldValue, Is.Not.Null); + Assert.That(resp.Changes[0].OldValue.Name, Is.EqualTo("Jim Brown")); + Assert.That(resp.Changes[0].NewValue.Name, Is.EqualTo("Hello " + resp.Changes[0].OldValue.Id + "!")); } [Test] @@ -130,14 +133,16 @@ private async Task DoDelete() [Test] public void DeleteAndReturnValues() { - var resp = connection.Run(testTable.Get(insertedObject.Id).DeleteAndReturnValue()); + var resp = connection.Run(testTable.Get(insertedObject.Id).DeleteAndReturnChanges()); Assert.That(resp, Is.Not.Null); Assert.That(resp.FirstError, Is.Null); Assert.That(resp.Deleted, Is.EqualTo(1)); Assert.That(resp.GeneratedKeys, Is.Null); - Assert.That(resp.OldValue, Is.Not.Null); - Assert.That(resp.OldValue.Id, Is.EqualTo(insertedObject.Id)); - Assert.That(resp.NewValue, Is.Null); + Assert.That(resp.Changes, Is.Not.Null); + Assert.That(resp.Changes, Has.Length.EqualTo(1)); + Assert.That(resp.Changes[0].OldValue, Is.Not.Null); + Assert.That(resp.Changes[0].OldValue.Id, Is.EqualTo(insertedObject.Id)); + Assert.That(resp.Changes[0].NewValue, Is.Null); } [Test] diff --git a/rethinkdb-net-test/Integration/TableTests.cs b/rethinkdb-net-test/Integration/TableTests.cs index b2a676f..a4b23b3 100644 --- a/rethinkdb-net-test/Integration/TableTests.cs +++ b/rethinkdb-net-test/Integration/TableTests.cs @@ -139,27 +139,17 @@ private async Task DoMultiInsertWithIds() } [Test] - public void Reduce() + public void ReduceEmptyTable() { - DoReduce().Wait(); - } - - private async Task DoReduce() - { - var resp = await connection.RunAsync(testTable.Reduce((acc, val) => new TestObject() { SomeNumber = acc.SomeNumber + val.SomeNumber }, new TestObject() { SomeNumber = -1 })); - Assert.That(resp.SomeNumber, Is.EqualTo(-1)); - } - - [Test] - public void ReduceToPrimitive() - { - DoReduceToPrimitive().Wait(); - } - - private async Task DoReduceToPrimitive() - { - var resp = await connection.RunAsync(testTable.Map(o => o.SomeNumber).Reduce((acc, val) => acc + val, -1.0)); - Assert.That(resp, Is.EqualTo(-1.0)); + try + { + connection.Run(testTable.Reduce((acc, val) => new TestObject() { SomeNumber = acc.SomeNumber + val.SomeNumber })); + Assert.Fail("Expected exception"); + } + catch (AggregateException ex) + { + Assert.That(ex.InnerException is RethinkDbRuntimeException); + } } [Test] diff --git a/rethinkdb-net.nuspec b/rethinkdb-net.nuspec index b4dad13..3ba1e82 100644 --- a/rethinkdb-net.nuspec +++ b/rethinkdb-net.nuspec @@ -12,7 +12,7 @@ - rethinkdb-net compatible with RethinkDB v1.10, see https://github.com/mfenniak/rethinkdb-net/blob/master/RELEASE-NOTES.md for detailed release notes. + rethinkdb-net compatible with RethinkDB v1.15, see https://github.com/mfenniak/rethinkdb-net/blob/master/RELEASE-NOTES.md for detailed release notes. diff --git a/rethinkdb-net.sln b/rethinkdb-net.sln index e287098..71eafdf 100644 --- a/rethinkdb-net.sln +++ b/rethinkdb-net.sln @@ -12,6 +12,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution rethinkdb-net.nuspec = rethinkdb-net.nuspec LICENSE.txt = LICENSE.txt RELEASE-NOTES.md = RELEASE-NOTES.md + rethinkdb-net-newtonsoft.nuspec = rethinkdb-net-newtonsoft.nuspec EndProjectSection EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Examples", "Examples", "{9F8AB94D-246B-420B-91C7-145B94D75FA3}" diff --git a/rethinkdb-net/Connection.cs b/rethinkdb-net/Connection.cs index be6b718..c79a200 100644 --- a/rethinkdb-net/Connection.cs +++ b/rethinkdb-net/Connection.cs @@ -41,7 +41,8 @@ public Connection() EnumDatumConverterFactory.Instance, NullableDatumConverterFactory.Instance, ListDatumConverterFactory.Instance, - TimeSpanDatumConverterFactory.Instance + TimeSpanDatumConverterFactory.Instance, + GroupingDictionaryDatumConverterFactory.Instance ); ConnectTimeout = QueryTimeout = TimeSpan.FromSeconds(30); } diff --git a/rethinkdb-net/DatumConverters/GroupingDictionaryDatumConverterFactory.cs b/rethinkdb-net/DatumConverters/GroupingDictionaryDatumConverterFactory.cs new file mode 100644 index 0000000..cf06eb8 --- /dev/null +++ b/rethinkdb-net/DatumConverters/GroupingDictionaryDatumConverterFactory.cs @@ -0,0 +1,104 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using RethinkDb.Spec; + +namespace RethinkDb.DatumConverters +{ + // A special datum converter to support RethinkDB's $reql_type$ = GROUPED_DATA return values. + public class GroupingDictionaryDatumConverterFactory : AbstractDatumConverterFactory + { + public static readonly GroupingDictionaryDatumConverterFactory Instance = new GroupingDictionaryDatumConverterFactory(); + + private GroupingDictionaryDatumConverterFactory() + { + } + + public override bool TryGet(IDatumConverterFactory rootDatumConverterFactory, out IDatumConverter datumConverter) + { + datumConverter = null; + if (rootDatumConverterFactory == null) + throw new ArgumentNullException("rootDatumConverterFactory"); + + if (typeof(T).IsGenericType && typeof(T).GetGenericTypeDefinition() == typeof(IGroupingDictionary<,>)) + { + Type converterType = typeof(GroupingDictionaryDatumConverter<,>).MakeGenericType(typeof(T).GetGenericArguments()); + datumConverter = (IDatumConverter)Activator.CreateInstance(converterType, rootDatumConverterFactory); + return true; + } + else + return false; + } + } + + public class GroupingDictionaryDatumConverter : AbstractReferenceTypeDatumConverter> + { + private readonly IDatumConverter keyTypeConverter; + private readonly IDatumConverter valueTypeConverter; + + public GroupingDictionaryDatumConverter(IDatumConverterFactory rootDatumConverterFactory) + { + this.keyTypeConverter = rootDatumConverterFactory.Get(); + this.valueTypeConverter = rootDatumConverterFactory.Get(); + } + + #region IDatumConverter Members + + public override IGroupingDictionary ConvertDatum(Datum datum) + { + if (datum.type == Datum.DatumType.R_NULL) + { + return null; + } + else if (datum.type == Datum.DatumType.R_OBJECT) + { + var keys = datum.r_object.ToDictionary(kvp => kvp.key, kvp => kvp.val); + + Datum typeDatum; + if (!keys.TryGetValue("$reql_type$", out typeDatum)) + throw new NotSupportedException("Object without $reql_type$ key cannot be converted to a dictionary"); + if (typeDatum.type != Datum.DatumType.R_STR || typeDatum.r_str != "GROUPED_DATA") + throw new NotSupportedException("Object without $reql_type$ = GROUPED_DATA cannot be converted to a dictionary"); + + Datum dataDatum; + if (!keys.TryGetValue("data", out dataDatum)) + throw new NotSupportedException("Object without data key cannot be converted to a dictionary"); + if (dataDatum.type != Datum.DatumType.R_ARRAY) + throw new NotSupportedException("Object's data key must be an array type"); + + var retval = new GroupingDictionary(dataDatum.r_array.Count); + foreach (var item in dataDatum.r_array) + { + if (item.type != Datum.DatumType.R_ARRAY || item.r_array.Count != 2) + throw new NotSupportedException("GROUPED_DATA data is expected to contain array elements of two items, a key and a value"); + var key = keyTypeConverter.ConvertDatum(item.r_array[0]); + var value = valueTypeConverter.ConvertDatum(item.r_array[1]); + retval[key] = value; + } + + return retval; + } + else + { + throw new NotSupportedException("Attempted to cast Datum to array, but Datum was unsupported type " + datum.type); + } + } + + public override Spec.Datum ConvertObject(IGroupingDictionary dictionary) + { + //if (dictionary == null) + // return new Spec.Datum() { type = Spec.Datum.DatumType.R_NULL }; + throw new NotImplementedException("IGroupingDictionary objects are only currently supported for reading Group results"); + } + + #endregion + } + + class GroupingDictionary : Dictionary, IGroupingDictionary + { + public GroupingDictionary(int capacity) + : base(capacity) + { + } + } +} diff --git a/rethinkdb-net/DmlResponse.cs b/rethinkdb-net/DmlResponse.cs index 1358329..27c8f1c 100644 --- a/rethinkdb-net/DmlResponse.cs +++ b/rethinkdb-net/DmlResponse.cs @@ -38,6 +38,13 @@ public class DmlResponse [DataContract] public class DmlResponse : DmlResponse + { + [DataMember(Name = "changes")] + public DmlResponseChange[] Changes; + } + + [DataContract] + public class DmlResponseChange { [DataMember(Name = "old_val")] public T OldValue; diff --git a/rethinkdb-net/Interfaces/IGroupByReduction.cs b/rethinkdb-net/Interfaces/IGroupByReduction.cs deleted file mode 100644 index bc34df1..0000000 --- a/rethinkdb-net/Interfaces/IGroupByReduction.cs +++ /dev/null @@ -1,13 +0,0 @@ -using System; -using System.ComponentModel; -using RethinkDb.Spec; - -namespace RethinkDb -{ - [ImmutableObject(true)] - public interface IGroupByReduction - { - Term GenerateReductionObject(IDatumConverterFactory datumConverterFactory); - } -} - diff --git a/rethinkdb-net/Interfaces/IGroupingDictionary.cs b/rethinkdb-net/Interfaces/IGroupingDictionary.cs new file mode 100644 index 0000000..902ced5 --- /dev/null +++ b/rethinkdb-net/Interfaces/IGroupingDictionary.cs @@ -0,0 +1,12 @@ +using System; +using System.Collections.Generic; + +namespace RethinkDb +{ + // This interface is used to support $reql_type$=GROUPED_DATA being returned from the server. In order to use + // grouping capabilities (eg. .Group() queries), a datum converter must always be registered to read this type + // from the server. GroupingDictionaryDatumConverterFactory is provided to support this. + public interface IGroupingDictionary : IDictionary + { + } +} diff --git a/rethinkdb-net/Interfaces/IGroupingQuery.cs b/rethinkdb-net/Interfaces/IGroupingQuery.cs new file mode 100644 index 0000000..ef3d19f --- /dev/null +++ b/rethinkdb-net/Interfaces/IGroupingQuery.cs @@ -0,0 +1,8 @@ +using System; + +namespace RethinkDb +{ + public interface IGroupingQuery : IScalarQuery> + { + } +} diff --git a/rethinkdb-net/Query.cs b/rethinkdb-net/Query.cs index 3288ce3..8d7499e 100644 --- a/rethinkdb-net/Query.cs +++ b/rethinkdb-net/Query.cs @@ -119,7 +119,7 @@ public static IWriteQuery Update(this IMutableSingleObjectQuery< return new UpdateQuery(target, updateExpression, nonAtomic); } - public static IWriteQuery> UpdateAndReturnValue(this IMutableSingleObjectQuery target, Expression> updateExpression, bool nonAtomic = false) + public static IWriteQuery> UpdateAndReturnChanges(this IMutableSingleObjectQuery target, Expression> updateExpression, bool nonAtomic = false) { return new UpdateAndReturnValueQuery(target, updateExpression, nonAtomic); } @@ -134,7 +134,7 @@ public static IWriteQuery Delete(this IMutableSingleObjectQuery< return new DeleteQuery(target); } - public static IWriteQuery> DeleteAndReturnValue(this IMutableSingleObjectQuery target) + public static IWriteQuery> DeleteAndReturnChanges(this IMutableSingleObjectQuery target) { return new DeleteAndReturnValueQuery(target); } @@ -144,7 +144,7 @@ public static IWriteQuery Replace(this IMutableSingleObjectQuery return new ReplaceQuery(target, newObject, nonAtomic); } - public static IWriteQuery> ReplaceAndReturnValue(this IMutableSingleObjectQuery target, T newObject, bool nonAtomic = false) + public static IWriteQuery> ReplaceAndReturnChanges(this IMutableSingleObjectQuery target, T newObject, bool nonAtomic = false) { return new ReplaceAndReturnValueQuery(target, newObject, nonAtomic); } @@ -154,11 +154,6 @@ public static ISequenceQuery Between(this ISequenceQ return new BetweenQuery(target, leftKey, rightKey, indexName, leftBound, rightBound); } - public static ISingleObjectQuery Count(this ISequenceQuery target) - { - return new CountQuery(target); - } - public static ISingleObjectQuery Expr(T @object) { return new ExprQuery(@object); @@ -270,11 +265,6 @@ public static ISingleObjectQuery Reduce(this ISequenceQuery sequenceQue return new ReduceQuery(sequenceQuery, reduceFunction); } - public static ISingleObjectQuery Reduce(this ISequenceQuery sequenceQuery, Expression> reduceFunction, T @base) - { - return new ReduceQuery(sequenceQuery, reduceFunction, @base); - } - public static ISingleObjectQuery Nth(this ISequenceQuery sequenceQuery, int index) { return new NthQuery(sequenceQuery, index); @@ -285,16 +275,6 @@ public static ISequenceQuery Distinct(this ISequenceQuery sequenceQuery return new DistinctQuery(sequenceQuery); } - public static ISequenceQuery> GroupedMapReduce(this ISequenceQuery sequenceQuery, Expression> grouping, Expression> mapping, Expression> reduction) - { - return new GroupedMapReduceQuery(sequenceQuery, grouping, mapping, reduction); - } - - public static ISequenceQuery> GroupedMapReduce(this ISequenceQuery sequenceQuery, Expression> grouping, Expression> mapping, Expression> reduction, TMap @base) - { - return new GroupedMapReduceQuery(sequenceQuery, grouping, mapping, reduction, @base); - } - public static ISequenceQuery ConcatMap(this ISequenceQuery sequenceQuery, Expression>> mapping) { return new ConcatMapQuery(sequenceQuery, mapping); @@ -310,11 +290,6 @@ public static ISingleObjectQuery Now() return new NowQuery(); } - public static ISequenceQuery> GroupBy(this ISequenceQuery sequenceQuery, IGroupByReduction reductionObject, Expression> groupKeyConstructor) - { - return new GroupByQuery(sequenceQuery, reductionObject, groupKeyConstructor); - } - public static ISequenceQuery Sample(this ISequenceQuery target, int count) { return new SampleQuery(target, count); @@ -330,24 +305,160 @@ public static ISingleObjectQuery HasFields(this ISingleObjectQuery t return new HasFieldsSingleObjectQuery(target, fields); } - #endregion - #region Prebuilt GroupBy reductions + #region Grouping and Aggregation - public static IGroupByReduction Count() + public static IGroupingQuery Group( + // Can only use indexName on Group on a TABLE, not any arbitrary sequence + this ITableQuery table, + string indexName + ) { - return CountReduction.Instance; + return new GroupByIndexQuery(table, indexName); } - public static IGroupByReduction Sum(Expression> numericMemberReference) + public static IGroupingQuery Group( + this ISequenceQuery sequenceQuery, + Expression> key + ) { - return new SumReduction(numericMemberReference); + return new GroupByFunctionQuery(sequenceQuery, key); } - public static IGroupByReduction Avg(Expression> numericMemberReference) + public static IGroupingQuery, TRecord[]> Group( + this ISequenceQuery sequenceQuery, + Expression> key1, + Expression> key2 + ) { - return new AvgReduction(numericMemberReference); + return new GroupByFunctionQuery(sequenceQuery, key1, key2); } + public static IGroupingQuery, TRecord[]> Group( + this ISequenceQuery sequenceQuery, + Expression> key1, + Expression> key2, + Expression> key3 + ) + { + return new GroupByFunctionQuery(sequenceQuery, key1, key2, key3); + } + + public static ISequenceQuery> Ungroup(this IGroupingQuery groupingQuery) + { + return new UngroupQuery(groupingQuery); + } + + public static IGroupingQuery Map( + this IGroupingQuery groupingQuery, + Expression> mapExpression) + { + return new MapGroupQuery(groupingQuery, mapExpression); + } + + public static IGroupingQuery Reduce( + this IGroupingQuery groupingQuery, + Expression> reduceFunction) + { + return new ReduceGroupQuery(groupingQuery, reduceFunction); + } + + public static IGroupingQuery Min( + this IGroupingQuery groupingQuery, + Expression> field = null + ) + { + return new MinGroupAggregateQuery(groupingQuery, field); + } + + public static ISingleObjectQuery Min( + this ISequenceQuery sequenceQuery, + Expression> field = null + ) + { + return new MinAggregateQuery(sequenceQuery, field); + } + + public static IGroupingQuery Max( + this IGroupingQuery groupingQuery, + Expression> field = null + ) + { + return new MaxGroupAggregateQuery(groupingQuery, field); + } + + public static ISingleObjectQuery Max( + this ISequenceQuery sequenceQuery, + Expression> field = null + ) + { + return new MaxAggregateQuery(sequenceQuery, field); + } + + public static IGroupingQuery Avg( + this IGroupingQuery groupingQuery, + Expression> field = null + ) + { + return new AvgGroupAggregateQuery(groupingQuery, field); + } + + public static ISingleObjectQuery Avg( + this ISequenceQuery sequenceQuery, + Expression> field = null + ) + { + return new AvgAggregateQuery(sequenceQuery, field); + } + + public static IGroupingQuery Sum( + this IGroupingQuery groupingQuery, + Expression> field = null + ) + { + return new SumGroupAggregateQuery(groupingQuery, field); + } + + public static ISingleObjectQuery Sum( + this ISequenceQuery sequenceQuery, + Expression> field = null + ) + { + return new SumAggregateQuery(sequenceQuery, field); + } + + public static IGroupingQuery Count( + this IGroupingQuery groupingQuery, + Expression> predicate = null + ) + { + return new CountGroupAggregateQuery(groupingQuery, predicate); + } + + public static ISingleObjectQuery Count( + this ISequenceQuery target, + Expression> predicate = null + ) + { + return new CountAggregateQuery(target, predicate); + } + + public static IGroupingQuery Contains( + this IGroupingQuery groupingQuery, + Expression> predicate = null + ) + { + return new ContainsGroupAggregateQuery(groupingQuery, predicate); + } + + public static ISingleObjectQuery Contains( + this ISequenceQuery target, + Expression> predicate = null + ) + { + return new ContainsAggregateQuery(target, predicate); + } + + #endregion #endregion } } diff --git a/rethinkdb-net/QueryTerm/AvgAggregateQuery.cs b/rethinkdb-net/QueryTerm/AvgAggregateQuery.cs new file mode 100644 index 0000000..2b2d415 --- /dev/null +++ b/rethinkdb-net/QueryTerm/AvgAggregateQuery.cs @@ -0,0 +1,34 @@ +using RethinkDb.Spec; +using System; +using System.Linq.Expressions; + +namespace RethinkDb.QueryTerm +{ + public class AvgAggregateQuery : ISingleObjectQuery + { + private readonly ISequenceQuery sequenceQuery; + private readonly Expression> field; + + public AvgAggregateQuery(ISequenceQuery sequenceQuery, Expression> field) + { + this.sequenceQuery = sequenceQuery; + this.field = field; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var term = new Term() + { + type = Term.TermType.AVG, + }; + term.args.Add(sequenceQuery.GenerateTerm(datumConverterFactory)); + if (field != null) + { + if (field.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, field)); + } + return term; + } + } +} \ No newline at end of file diff --git a/rethinkdb-net/QueryTerm/AvgGroupAggregateQuery.cs b/rethinkdb-net/QueryTerm/AvgGroupAggregateQuery.cs new file mode 100644 index 0000000..d9b303f --- /dev/null +++ b/rethinkdb-net/QueryTerm/AvgGroupAggregateQuery.cs @@ -0,0 +1,34 @@ +using RethinkDb.Spec; +using System; +using System.Linq.Expressions; + +namespace RethinkDb.QueryTerm +{ + public class AvgGroupAggregateQuery : IGroupingQuery + { + private readonly IGroupingQuery groupingQuery; + private readonly Expression> field; + + public AvgGroupAggregateQuery(IGroupingQuery groupingQuery, Expression> field) + { + this.groupingQuery = groupingQuery; + this.field = field; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var term = new Term() + { + type = Term.TermType.AVG, + }; + term.args.Add(groupingQuery.GenerateTerm(datumConverterFactory)); + if (field != null) + { + if (field.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, field)); + } + return term; + } + } +} \ No newline at end of file diff --git a/rethinkdb-net/QueryTerm/AvgReduction.cs b/rethinkdb-net/QueryTerm/AvgReduction.cs deleted file mode 100644 index 8583214..0000000 --- a/rethinkdb-net/QueryTerm/AvgReduction.cs +++ /dev/null @@ -1,60 +0,0 @@ -using System; -using System.Linq.Expressions; -using RethinkDb.DatumConverters; -using RethinkDb.Spec; - -namespace RethinkDb.QueryTerm -{ - public class AvgReduction : IGroupByReduction - { - private readonly Expression> numericMemberReference; - - public AvgReduction(Expression> numericMemberReference) - { - this.numericMemberReference = numericMemberReference; - } - - public Term GenerateReductionObject(IDatumConverterFactory datumConverterFactory) - { - var retval = new Term() { - type = Term.TermType.MAKE_OBJ - }; - retval.optargs.Add(new Term.AssocPair() { - key = "AVG", - val = new Term() { - type = Term.TermType.DATUM, - datum = new Datum() { - type = Datum.DatumType.R_STR, - r_str = GetMemberName(datumConverterFactory) - } - } - }); - return retval; - } - - private string GetMemberName(IDatumConverterFactory datumConverterFactory) - { - var datumConverter = datumConverterFactory.Get(); - var fieldConverter = datumConverter as IObjectDatumConverter; - if (fieldConverter == null) - throw new NotSupportedException("Cannot map member access into ReQL without implementing IObjectDatumConverter"); - - if (numericMemberReference.NodeType != ExpressionType.Lambda) - throw new NotSupportedException("Unsupported expression type " + numericMemberReference.Type + "; expected Lambda"); - - var body = ((LambdaExpression)numericMemberReference).Body; - MemberExpression memberExpr; - - if (body.NodeType == ExpressionType.MemberAccess) - memberExpr = (MemberExpression)body; - else - throw new NotSupportedException("Unsupported expression type " + body.NodeType + "; expected MemberAccess"); - - if (memberExpr.Expression.NodeType != ExpressionType.Parameter) - throw new NotSupportedException("Unrecognized member access pattern"); - - return fieldConverter.GetDatumFieldName(memberExpr.Member); - } - } -} - diff --git a/rethinkdb-net/QueryTerm/ConcatMapQuery.cs b/rethinkdb-net/QueryTerm/ConcatMapQuery.cs index efd6952..4bb0b18 100644 --- a/rethinkdb-net/QueryTerm/ConcatMapQuery.cs +++ b/rethinkdb-net/QueryTerm/ConcatMapQuery.cs @@ -20,7 +20,7 @@ public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) { var mapTerm = new Term() { - type = Term.TermType.CONCATMAP, + type = Term.TermType.CONCAT_MAP, }; mapTerm.args.Add(sequenceQuery.GenerateTerm(datumConverterFactory)); mapTerm.args.Add(ExpressionUtils.CreateFunctionTerm>(datumConverterFactory, mapExpression)); diff --git a/rethinkdb-net/QueryTerm/ContainsAggregateQuery.cs b/rethinkdb-net/QueryTerm/ContainsAggregateQuery.cs new file mode 100644 index 0000000..74e04b1 --- /dev/null +++ b/rethinkdb-net/QueryTerm/ContainsAggregateQuery.cs @@ -0,0 +1,34 @@ +using RethinkDb.Spec; +using System; +using System.Linq.Expressions; + +namespace RethinkDb.QueryTerm +{ + public class ContainsAggregateQuery : ISingleObjectQuery + { + private readonly ISequenceQuery sequenceQuery; + private readonly Expression> predicate; + + public ContainsAggregateQuery(ISequenceQuery sequenceQuery, Expression> predicate = null) + { + this.sequenceQuery = sequenceQuery; + this.predicate = predicate; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var countTerm = new Term() + { + type = Term.TermType.CONTAINS, + }; + countTerm.args.Add(sequenceQuery.GenerateTerm(datumConverterFactory)); + if (predicate != null) + { + if (predicate.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + countTerm.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, predicate)); + } + return countTerm; + } + } +} diff --git a/rethinkdb-net/QueryTerm/ContainsGroupAggregateQuery.cs b/rethinkdb-net/QueryTerm/ContainsGroupAggregateQuery.cs new file mode 100644 index 0000000..fb6cfc2 --- /dev/null +++ b/rethinkdb-net/QueryTerm/ContainsGroupAggregateQuery.cs @@ -0,0 +1,34 @@ +using RethinkDb.Spec; +using System; +using System.Linq.Expressions; + +namespace RethinkDb.QueryTerm +{ + public class ContainsGroupAggregateQuery : IGroupingQuery + { + private readonly IGroupingQuery groupingQuery; + private readonly Expression> predicate; + + public ContainsGroupAggregateQuery(IGroupingQuery groupingQuery, Expression> predicate) + { + this.groupingQuery = groupingQuery; + this.predicate = predicate; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var term = new Term() + { + type = Term.TermType.CONTAINS, + }; + term.args.Add(groupingQuery.GenerateTerm(datumConverterFactory)); + if (predicate != null) + { + if (predicate.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, predicate)); + } + return term; + } + } +} diff --git a/rethinkdb-net/QueryTerm/CountAggregateQuery.cs b/rethinkdb-net/QueryTerm/CountAggregateQuery.cs new file mode 100644 index 0000000..62da6ac --- /dev/null +++ b/rethinkdb-net/QueryTerm/CountAggregateQuery.cs @@ -0,0 +1,34 @@ +using RethinkDb.Spec; +using System; +using System.Linq.Expressions; + +namespace RethinkDb.QueryTerm +{ + public class CountAggregateQuery : ISingleObjectQuery + { + private readonly ISequenceQuery sequenceQuery; + private readonly Expression> predicate; + + public CountAggregateQuery(ISequenceQuery sequenceQuery, Expression> predicate = null) + { + this.sequenceQuery = sequenceQuery; + this.predicate = predicate; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var countTerm = new Term() + { + type = Term.TermType.COUNT, + }; + countTerm.args.Add(sequenceQuery.GenerateTerm(datumConverterFactory)); + if (predicate != null) + { + if (predicate.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + countTerm.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, predicate)); + } + return countTerm; + } + } +} diff --git a/rethinkdb-net/QueryTerm/CountGroupAggregateQuery.cs b/rethinkdb-net/QueryTerm/CountGroupAggregateQuery.cs new file mode 100644 index 0000000..e921547 --- /dev/null +++ b/rethinkdb-net/QueryTerm/CountGroupAggregateQuery.cs @@ -0,0 +1,34 @@ +using RethinkDb.Spec; +using System; +using System.Linq.Expressions; + +namespace RethinkDb.QueryTerm +{ + public class CountGroupAggregateQuery : IGroupingQuery + { + private readonly IGroupingQuery groupingQuery; + private readonly Expression> predicate; + + public CountGroupAggregateQuery(IGroupingQuery groupingQuery, Expression> predicate) + { + this.groupingQuery = groupingQuery; + this.predicate = predicate; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var term = new Term() + { + type = Term.TermType.COUNT, + }; + term.args.Add(groupingQuery.GenerateTerm(datumConverterFactory)); + if (predicate != null) + { + if (predicate.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, predicate)); + } + return term; + } + } +} diff --git a/rethinkdb-net/QueryTerm/CountQuery.cs b/rethinkdb-net/QueryTerm/CountQuery.cs deleted file mode 100644 index f75e735..0000000 --- a/rethinkdb-net/QueryTerm/CountQuery.cs +++ /dev/null @@ -1,25 +0,0 @@ -using RethinkDb.Spec; -using System; - -namespace RethinkDb.QueryTerm -{ - public class CountQuery : ISingleObjectQuery - { - private readonly ISequenceQuery sequenceQuery; - - public CountQuery(ISequenceQuery sequenceQuery) - { - this.sequenceQuery = sequenceQuery; - } - - public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) - { - var countTerm = new Term() - { - type = Term.TermType.COUNT, - }; - countTerm.args.Add(sequenceQuery.GenerateTerm(datumConverterFactory)); - return countTerm; - } - } -} \ No newline at end of file diff --git a/rethinkdb-net/QueryTerm/CountReduction.cs b/rethinkdb-net/QueryTerm/CountReduction.cs deleted file mode 100644 index 5744b48..0000000 --- a/rethinkdb-net/QueryTerm/CountReduction.cs +++ /dev/null @@ -1,39 +0,0 @@ -using System; -using RethinkDb.Spec; - -namespace RethinkDb.QueryTerm -{ - public class CountReduction : IGroupByReduction - { - public static readonly CountReduction Instance = new CountReduction(); - private Term retval; - - private CountReduction() - { - } - - public Term GenerateReductionObject(IDatumConverterFactory datumConverterFactory) - { - if (retval == null) - { - var newValue = new Term() { - type = Term.TermType.MAKE_OBJ - }; - newValue.optargs.Add(new Term.AssocPair() { - key = "COUNT", - val = new Term() { - type = Term.TermType.DATUM, - datum = new Datum() { - type = Datum.DatumType.R_BOOL, - r_bool = true - } - } - }); - retval = newValue; - } - - return retval; - } - } -} - diff --git a/rethinkdb-net/QueryTerm/DeleteAndReturnValueQuery.cs b/rethinkdb-net/QueryTerm/DeleteAndReturnValueQuery.cs index a8a3b2b..a129141 100644 --- a/rethinkdb-net/QueryTerm/DeleteAndReturnValueQuery.cs +++ b/rethinkdb-net/QueryTerm/DeleteAndReturnValueQuery.cs @@ -12,7 +12,7 @@ public DeleteAndReturnValueQuery(IMutableSingleObjectQuery getTerm) protected override void AddOptionalArguments(Term updateTerm) { updateTerm.optargs.Add(new Term.AssocPair() { - key = "return_vals", + key = "return_changes", val = new Term() { type = Term.TermType.DATUM, datum = new Datum() { diff --git a/rethinkdb-net/QueryTerm/GroupByFunctionQuery.cs b/rethinkdb-net/QueryTerm/GroupByFunctionQuery.cs new file mode 100644 index 0000000..d322575 --- /dev/null +++ b/rethinkdb-net/QueryTerm/GroupByFunctionQuery.cs @@ -0,0 +1,104 @@ +using System; +using System.Linq; +using System.Linq.Expressions; +using System.Collections.Generic; +using RethinkDb.DatumConverters; +using RethinkDb.Spec; + +namespace RethinkDb.QueryTerm +{ + public abstract class GroupByFunctionQueryBase : IGroupingQuery + { + private ISequenceQuery sequenceQuery; + + protected GroupByFunctionQueryBase(ISequenceQuery sequenceQuery) + { + this.sequenceQuery = sequenceQuery; + } + + protected abstract void GenerateFunctionTerms(Term term, IDatumConverterFactory datumConverterFactory); + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var term = new Term() + { + type = Term.TermType.GROUP, + }; + term.args.Add(sequenceQuery.GenerateTerm(datumConverterFactory)); + GenerateFunctionTerms(term, datumConverterFactory); + return term; + } + } + + public class GroupByFunctionQuery : GroupByFunctionQueryBase + { + private Expression> keyExpression; + + public GroupByFunctionQuery(ISequenceQuery sequenceQuery, Expression> keyExpression) + : base(sequenceQuery) + { + this.keyExpression = keyExpression; + } + + protected override void GenerateFunctionTerms(Term term, IDatumConverterFactory datumConverterFactory) + { + if (keyExpression.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, keyExpression)); + } + } + + public class GroupByFunctionQuery : GroupByFunctionQueryBase> + { + private Expression> key1Expression; + private Expression> key2Expression; + + public GroupByFunctionQuery(ISequenceQuery sequenceQuery, Expression> key1Expression, Expression> key2Expression) + : base(sequenceQuery) + { + this.key1Expression = key1Expression; + this.key2Expression = key2Expression; + } + + protected override void GenerateFunctionTerms(Term term, IDatumConverterFactory datumConverterFactory) + { + if (key1Expression.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, key1Expression)); + + if (key2Expression.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, key2Expression)); + } + } + + public class GroupByFunctionQuery : GroupByFunctionQueryBase> + { + private Expression> key1Expression; + private Expression> key2Expression; + private Expression> key3Expression; + + public GroupByFunctionQuery(ISequenceQuery sequenceQuery, Expression> key1Expression, Expression> key2Expression, Expression> key3Expression) + : base(sequenceQuery) + { + this.key1Expression = key1Expression; + this.key2Expression = key2Expression; + this.key3Expression = key3Expression; + } + + protected override void GenerateFunctionTerms(Term term, IDatumConverterFactory datumConverterFactory) + { + if (key1Expression.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, key1Expression)); + + if (key2Expression.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, key2Expression)); + + if (key3Expression.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, key3Expression)); + } + } +} diff --git a/rethinkdb-net/QueryTerm/GroupByIndexQuery.cs b/rethinkdb-net/QueryTerm/GroupByIndexQuery.cs new file mode 100644 index 0000000..4667237 --- /dev/null +++ b/rethinkdb-net/QueryTerm/GroupByIndexQuery.cs @@ -0,0 +1,37 @@ +using RethinkDb.Spec; + +namespace RethinkDb.QueryTerm +{ + public class GroupByIndexQuery : IGroupingQuery + { + private ITableQuery tableQuery; + private string indexName; + + public GroupByIndexQuery(ITableQuery tableQuery, string indexName) + { + this.tableQuery = tableQuery; + this.indexName = indexName; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var term = new Term() + { + type = Term.TermType.GROUP, + }; + term.args.Add(tableQuery.GenerateTerm(datumConverterFactory)); + term.args.Add( + new Term() + { + type = Term.TermType.DATUM, + datum = new Datum() + { + type = Datum.DatumType.R_STR, + r_str = indexName, + } + } + ); + return term; + } + } +} diff --git a/rethinkdb-net/QueryTerm/GroupByQuery.cs b/rethinkdb-net/QueryTerm/GroupByQuery.cs deleted file mode 100644 index 972b65d..0000000 --- a/rethinkdb-net/QueryTerm/GroupByQuery.cs +++ /dev/null @@ -1,97 +0,0 @@ -using System; -using System.Linq; -using System.Linq.Expressions; -using System.Collections.Generic; -using RethinkDb.DatumConverters; -using RethinkDb.Spec; - -namespace RethinkDb.QueryTerm -{ - public abstract class GroupByQueryBase - { - private readonly ISequenceQuery sequenceQuery; - private readonly IGroupByReduction reductionObject; - private readonly Expression groupKeyConstructor; - - protected GroupByQueryBase(ISequenceQuery sequenceQuery, IGroupByReduction reductionObject, Expression groupKeyConstructor) - { - this.sequenceQuery = sequenceQuery; - this.reductionObject = reductionObject; - this.groupKeyConstructor = groupKeyConstructor; - } - - public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) - { - var term = new Term() - { - type = Term.TermType.GROUPBY, - }; - term.args.Add(sequenceQuery.GenerateTerm(datumConverterFactory)); - - var propertyTerm = new Term() { - type = Term.TermType.MAKE_ARRAY - }; - - if (groupKeyConstructor.NodeType != ExpressionType.Lambda) - throw new NotSupportedException("Unsupported expression type " + groupKeyConstructor.NodeType + "; expected Lambda"); - - var body = ((LambdaExpression)groupKeyConstructor).Body; - if (body.NodeType != ExpressionType.New) - throw new NotSupportedException("GroupByQuery expects an expression in the form of: new { key1 = ...[, keyN = ...] }"); - - var newExpression = (NewExpression)body; - if (!AnonymousTypeDatumConverterFactory.Instance.IsTypeSupported(newExpression.Type)) - throw new NotSupportedException(String.Format("Unsupported type in New expression: {0}; only anonymous types are supported", newExpression.Type)); - - foreach (var property in newExpression.Type.GetProperties().Select((p, i) => new { Property = p, Index = i })) - { - var key = property.Property.Name; - var value = GetMemberName(newExpression.Arguments[property.Index], datumConverterFactory); - if (!String.Equals(key, value.r_str, StringComparison.InvariantCultureIgnoreCase)) - throw new Exception(String.Format("Anonymous type property name ({0}) must equal the member name ({1})", key, value.r_str)); - propertyTerm.args.Add(new Term() { - type = Term.TermType.DATUM, - datum = value, - }); - } - term.args.Add(propertyTerm); - - term.args.Add(reductionObject.GenerateReductionObject(datumConverterFactory)); - - return term; - } - - private Datum GetMemberName(Expression memberReference, IDatumConverterFactory datumConverterFactory) - { - var datumConverter = datumConverterFactory.Get(); - var fieldConverter = datumConverter as IObjectDatumConverter; - if (fieldConverter == null) - throw new NotSupportedException("Cannot map member access into ReQL without implementing IObjectDatumConverter"); - - MemberExpression memberExpr; - if (memberReference.NodeType == ExpressionType.MemberAccess) - memberExpr = (MemberExpression)memberReference; - else - throw new NotSupportedException("Unsupported expression type " + memberReference.NodeType + "; expected MemberAccess"); - - if (memberExpr.Expression.NodeType != ExpressionType.Parameter) - throw new NotSupportedException("Unrecognized member access pattern"); - - return new Datum() { - type = Datum.DatumType.R_STR, - r_str = fieldConverter.GetDatumFieldName(memberExpr.Member) - }; - } - } - - public class GroupByQuery - : GroupByQueryBase, ISequenceQuery> - { - public GroupByQuery(ISequenceQuery sequenceQuery, - IGroupByReduction reductionObject, - Expression> groupKeyConstructor) - : base(sequenceQuery, reductionObject, groupKeyConstructor) - { - } - } -} diff --git a/rethinkdb-net/QueryTerm/GroupedMapReduceQuery.cs b/rethinkdb-net/QueryTerm/GroupedMapReduceQuery.cs deleted file mode 100644 index 8c56373..0000000 --- a/rethinkdb-net/QueryTerm/GroupedMapReduceQuery.cs +++ /dev/null @@ -1,57 +0,0 @@ -using System; -using System.Linq.Expressions; -using RethinkDb.DatumConverters; -using RethinkDb.Spec; - -namespace RethinkDb.QueryTerm -{ - public class GroupedMapReduceQuery : ISequenceQuery> - { - private readonly ISequenceQuery sequenceQuery; - private readonly Expression> grouping; - private readonly Expression> mapping; - private readonly Expression> reduction; - private readonly bool baseProvided; - private readonly TMap @base; - - public GroupedMapReduceQuery(ISequenceQuery sequenceQuery, Expression> grouping, Expression> mapping, Expression> reduction) - { - this.sequenceQuery = sequenceQuery; - this.grouping = grouping; - this.mapping = mapping; - this.reduction = reduction; - } - - public GroupedMapReduceQuery(ISequenceQuery sequenceQuery, Expression> grouping, Expression> mapping, Expression> reduction, TMap @base) - : this(sequenceQuery, grouping, mapping, reduction) - { - this.baseProvided = true; - this.@base = @base; - } - - public Term GenerateTerm (IDatumConverterFactory datumConverterFactory) - { - var retval = new Term() { - type = Term.TermType.GROUPED_MAP_REDUCE, - }; - retval.args.Add(sequenceQuery.GenerateTerm(datumConverterFactory)); - retval.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, grouping)); - retval.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, mapping)); - retval.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, reduction)); - - if (this.baseProvided) - { - retval.optargs.Add(new Term.AssocPair() { - key = "base", - val = new Term() { - type = Term.TermType.DATUM, - datum = datumConverterFactory.Get().ConvertObject(@base) - } - }); - } - - return retval; - } - } -} - diff --git a/rethinkdb-net/QueryTerm/MapGroupQuery.cs b/rethinkdb-net/QueryTerm/MapGroupQuery.cs new file mode 100644 index 0000000..367c311 --- /dev/null +++ b/rethinkdb-net/QueryTerm/MapGroupQuery.cs @@ -0,0 +1,29 @@ +using RethinkDb.Spec; +using System; +using System.Linq.Expressions; + +namespace RethinkDb.QueryTerm +{ + public class MapGroupQuery : IGroupingQuery + { + private readonly IGroupingQuery groupingQuery; + private readonly Expression> mapExpression; + + public MapGroupQuery(IGroupingQuery groupingQuery, Expression> mapExpression) + { + this.groupingQuery = groupingQuery; + this.mapExpression = mapExpression; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var mapTerm = new Term() + { + type = Term.TermType.MAP, + }; + mapTerm.args.Add(groupingQuery.GenerateTerm(datumConverterFactory)); + mapTerm.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, mapExpression)); + return mapTerm; + } + } +} diff --git a/rethinkdb-net/QueryTerm/MaxAggregateQuery.cs b/rethinkdb-net/QueryTerm/MaxAggregateQuery.cs new file mode 100644 index 0000000..125f36e --- /dev/null +++ b/rethinkdb-net/QueryTerm/MaxAggregateQuery.cs @@ -0,0 +1,34 @@ +using RethinkDb.Spec; +using System; +using System.Linq.Expressions; + +namespace RethinkDb.QueryTerm +{ + public class MaxAggregateQuery : ISingleObjectQuery + { + private readonly ISequenceQuery sequenceQuery; + private readonly Expression> field; + + public MaxAggregateQuery(ISequenceQuery sequenceQuery, Expression> field) + { + this.sequenceQuery = sequenceQuery; + this.field = field; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var term = new Term() + { + type = Term.TermType.MAX, + }; + term.args.Add(sequenceQuery.GenerateTerm(datumConverterFactory)); + if (field != null) + { + if (field.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, field)); + } + return term; + } + } +} \ No newline at end of file diff --git a/rethinkdb-net/QueryTerm/MaxGroupAggregateQuery.cs b/rethinkdb-net/QueryTerm/MaxGroupAggregateQuery.cs new file mode 100644 index 0000000..0e8ff82 --- /dev/null +++ b/rethinkdb-net/QueryTerm/MaxGroupAggregateQuery.cs @@ -0,0 +1,34 @@ +using RethinkDb.Spec; +using System; +using System.Linq.Expressions; + +namespace RethinkDb.QueryTerm +{ + public class MaxGroupAggregateQuery : IGroupingQuery + { + private readonly IGroupingQuery groupingQuery; + private readonly Expression> field; + + public MaxGroupAggregateQuery(IGroupingQuery groupingQuery, Expression> field) + { + this.groupingQuery = groupingQuery; + this.field = field; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var term = new Term() + { + type = Term.TermType.MAX, + }; + term.args.Add(groupingQuery.GenerateTerm(datumConverterFactory)); + if (field != null) + { + if (field.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, field)); + } + return term; + } + } +} \ No newline at end of file diff --git a/rethinkdb-net/QueryTerm/MinAggregateQuery.cs b/rethinkdb-net/QueryTerm/MinAggregateQuery.cs new file mode 100644 index 0000000..1cd50cc --- /dev/null +++ b/rethinkdb-net/QueryTerm/MinAggregateQuery.cs @@ -0,0 +1,34 @@ +using RethinkDb.Spec; +using System; +using System.Linq.Expressions; + +namespace RethinkDb.QueryTerm +{ + public class MinAggregateQuery : ISingleObjectQuery + { + private readonly ISequenceQuery sequenceQuery; + private readonly Expression> field; + + public MinAggregateQuery(ISequenceQuery sequenceQuery, Expression> field) + { + this.sequenceQuery = sequenceQuery; + this.field = field; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var term = new Term() + { + type = Term.TermType.MIN, + }; + term.args.Add(sequenceQuery.GenerateTerm(datumConverterFactory)); + if (field != null) + { + if (field.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, field)); + } + return term; + } + } +} \ No newline at end of file diff --git a/rethinkdb-net/QueryTerm/MinGroupAggregateQuery.cs b/rethinkdb-net/QueryTerm/MinGroupAggregateQuery.cs new file mode 100644 index 0000000..44dfa09 --- /dev/null +++ b/rethinkdb-net/QueryTerm/MinGroupAggregateQuery.cs @@ -0,0 +1,34 @@ +using RethinkDb.Spec; +using System; +using System.Linq.Expressions; + +namespace RethinkDb.QueryTerm +{ + public class MinGroupAggregateQuery : IGroupingQuery + { + private readonly IGroupingQuery groupingQuery; + private readonly Expression> field; + + public MinGroupAggregateQuery(IGroupingQuery groupingQuery, Expression> field) + { + this.groupingQuery = groupingQuery; + this.field = field; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var term = new Term() + { + type = Term.TermType.MIN, + }; + term.args.Add(groupingQuery.GenerateTerm(datumConverterFactory)); + if (field != null) + { + if (field.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, field)); + } + return term; + } + } +} \ No newline at end of file diff --git a/rethinkdb-net/QueryTerm/OrderByQuery.cs b/rethinkdb-net/QueryTerm/OrderByQuery.cs index 304ddd9..d1e80a0 100644 --- a/rethinkdb-net/QueryTerm/OrderByQuery.cs +++ b/rethinkdb-net/QueryTerm/OrderByQuery.cs @@ -34,7 +34,7 @@ public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) var orderByTerm = new Term() { - type = Term.TermType.ORDERBY, + type = Term.TermType.ORDER_BY, }; orderByTerm.args.Add(sequenceQuery.GenerateTerm(datumConverterFactory)); orderByTerm.args.AddRange(GetMembers(datumConverterFactory, out indexOrderBy)); diff --git a/rethinkdb-net/QueryTerm/ReduceGroupQuery.cs b/rethinkdb-net/QueryTerm/ReduceGroupQuery.cs new file mode 100644 index 0000000..7657f60 --- /dev/null +++ b/rethinkdb-net/QueryTerm/ReduceGroupQuery.cs @@ -0,0 +1,30 @@ +using System; +using System.Linq.Expressions; +using RethinkDb.DatumConverters; +using RethinkDb.Spec; + +namespace RethinkDb.QueryTerm +{ + public class ReduceGroupQuery : IGroupingQuery + { + private readonly IGroupingQuery groupingQuery; + private readonly Expression> reduceFunction; + + public ReduceGroupQuery(IGroupingQuery groupingQuery, Expression> reduceFunction) + { + this.groupingQuery = groupingQuery; + this.reduceFunction = reduceFunction; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var reduceTerm = new Term() + { + type = Term.TermType.REDUCE, + }; + reduceTerm.args.Add(groupingQuery.GenerateTerm(datumConverterFactory)); + reduceTerm.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, reduceFunction)); + return reduceTerm; + } + } +} diff --git a/rethinkdb-net/QueryTerm/ReduceQuery.cs b/rethinkdb-net/QueryTerm/ReduceQuery.cs index 5a38ebb..49f8a3b 100644 --- a/rethinkdb-net/QueryTerm/ReduceQuery.cs +++ b/rethinkdb-net/QueryTerm/ReduceQuery.cs @@ -9,21 +9,11 @@ public class ReduceQuery : ISingleObjectQuery { private readonly ISequenceQuery sequenceQuery; private readonly Expression> reduceFunction; - private readonly bool baseProvided; - private readonly T @base; public ReduceQuery(ISequenceQuery sequenceQuery, Expression> reduceFunction) { this.sequenceQuery = sequenceQuery; this.reduceFunction = reduceFunction; - this.baseProvided = false; - } - - public ReduceQuery(ISequenceQuery sequenceQuery, Expression> reduceFunction, T @base) - : this (sequenceQuery, reduceFunction) - { - this.baseProvided = true; - this.@base = @base; } public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) @@ -34,18 +24,6 @@ public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) }; reduceTerm.args.Add(sequenceQuery.GenerateTerm(datumConverterFactory)); reduceTerm.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, reduceFunction)); - - if (this.baseProvided) - { - reduceTerm.optargs.Add(new Term.AssocPair() { - key = "base", - val = new Term() { - type = Term.TermType.DATUM, - datum = datumConverterFactory.Get().ConvertObject(@base) - } - }); - } - return reduceTerm; } } diff --git a/rethinkdb-net/QueryTerm/ReplaceAndReturnValueQuery.cs b/rethinkdb-net/QueryTerm/ReplaceAndReturnValueQuery.cs index 01186e0..c22631c 100644 --- a/rethinkdb-net/QueryTerm/ReplaceAndReturnValueQuery.cs +++ b/rethinkdb-net/QueryTerm/ReplaceAndReturnValueQuery.cs @@ -12,7 +12,7 @@ public ReplaceAndReturnValueQuery(IMutableSingleObjectQuery getTerm, T newObj protected override void AddOptionalArguments(Term updateTerm) { updateTerm.optargs.Add(new Term.AssocPair() { - key = "return_vals", + key = "return_changes", val = new Term() { type = Term.TermType.DATUM, datum = new Datum() { diff --git a/rethinkdb-net/QueryTerm/SumAggregateQuery.cs b/rethinkdb-net/QueryTerm/SumAggregateQuery.cs new file mode 100644 index 0000000..9655579 --- /dev/null +++ b/rethinkdb-net/QueryTerm/SumAggregateQuery.cs @@ -0,0 +1,34 @@ +using RethinkDb.Spec; +using System; +using System.Linq.Expressions; + +namespace RethinkDb.QueryTerm +{ + public class SumAggregateQuery : ISingleObjectQuery + { + private readonly ISequenceQuery sequenceQuery; + private readonly Expression> field; + + public SumAggregateQuery(ISequenceQuery sequenceQuery, Expression> field) + { + this.sequenceQuery = sequenceQuery; + this.field = field; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var term = new Term() + { + type = Term.TermType.SUM, + }; + term.args.Add(sequenceQuery.GenerateTerm(datumConverterFactory)); + if (field != null) + { + if (field.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, field)); + } + return term; + } + } +} \ No newline at end of file diff --git a/rethinkdb-net/QueryTerm/SumGroupAggregateQuery.cs b/rethinkdb-net/QueryTerm/SumGroupAggregateQuery.cs new file mode 100644 index 0000000..f36d46c --- /dev/null +++ b/rethinkdb-net/QueryTerm/SumGroupAggregateQuery.cs @@ -0,0 +1,34 @@ +using RethinkDb.Spec; +using System; +using System.Linq.Expressions; + +namespace RethinkDb.QueryTerm +{ + public class SumGroupAggregateQuery : IGroupingQuery + { + private readonly IGroupingQuery groupingQuery; + private readonly Expression> field; + + public SumGroupAggregateQuery(IGroupingQuery groupingQuery, Expression> field) + { + this.groupingQuery = groupingQuery; + this.field = field; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var term = new Term() + { + type = Term.TermType.SUM, + }; + term.args.Add(groupingQuery.GenerateTerm(datumConverterFactory)); + if (field != null) + { + if (field.NodeType != ExpressionType.Lambda) + throw new NotSupportedException("Unsupported expression type"); + term.args.Add(ExpressionUtils.CreateFunctionTerm(datumConverterFactory, field)); + } + return term; + } + } +} \ No newline at end of file diff --git a/rethinkdb-net/QueryTerm/SumReduction.cs b/rethinkdb-net/QueryTerm/SumReduction.cs deleted file mode 100644 index a27346f..0000000 --- a/rethinkdb-net/QueryTerm/SumReduction.cs +++ /dev/null @@ -1,60 +0,0 @@ -using System; -using System.Linq.Expressions; -using RethinkDb.DatumConverters; -using RethinkDb.Spec; - -namespace RethinkDb.QueryTerm -{ - public class SumReduction : IGroupByReduction - { - private readonly Expression> numericMemberReference; - - public SumReduction(Expression> numericMemberReference) - { - this.numericMemberReference = numericMemberReference; - } - - public Term GenerateReductionObject(IDatumConverterFactory datumConverterFactory) - { - var retval = new Term() { - type = Term.TermType.MAKE_OBJ - }; - retval.optargs.Add(new Term.AssocPair() { - key = "SUM", - val = new Term() { - type = Term.TermType.DATUM, - datum = new Datum() { - type = Datum.DatumType.R_STR, - r_str = GetMemberName(datumConverterFactory) - } - } - }); - return retval; - } - - private string GetMemberName(IDatumConverterFactory datumConverterFactory) - { - var datumConverter = datumConverterFactory.Get(); - var fieldConverter = datumConverter as IObjectDatumConverter; - if (fieldConverter == null) - throw new NotSupportedException("Cannot map member access into ReQL without implementing IObjectDatumConverter"); - - if (numericMemberReference.NodeType != ExpressionType.Lambda) - throw new NotSupportedException("Unsupported expression type " + numericMemberReference.Type + "; expected Lambda"); - - var body = ((LambdaExpression)numericMemberReference).Body; - MemberExpression memberExpr; - - if (body.NodeType == ExpressionType.MemberAccess) - memberExpr = (MemberExpression)body; - else - throw new NotSupportedException("Unsupported expression type " + body.NodeType + "; expected MemberAccess"); - - if (memberExpr.Expression.NodeType != ExpressionType.Parameter) - throw new NotSupportedException("Unrecognized member access pattern"); - - return fieldConverter.GetDatumFieldName(memberExpr.Member); - } - } -} - diff --git a/rethinkdb-net/QueryTerm/UngroupQuery.cs b/rethinkdb-net/QueryTerm/UngroupQuery.cs new file mode 100644 index 0000000..c420e0b --- /dev/null +++ b/rethinkdb-net/QueryTerm/UngroupQuery.cs @@ -0,0 +1,26 @@ +using RethinkDb.Spec; +using System; +using System.Linq.Expressions; + +namespace RethinkDb.QueryTerm +{ + public class UngroupQuery : ISequenceQuery> + { + private readonly IGroupingQuery groupingQuery; + + public UngroupQuery(IGroupingQuery groupingQuery) + { + this.groupingQuery = groupingQuery; + } + + public Term GenerateTerm(IDatumConverterFactory datumConverterFactory) + { + var term = new Term() + { + type = Term.TermType.UNGROUP, + }; + term.args.Add(groupingQuery.GenerateTerm(datumConverterFactory)); + return term; + } + } +} diff --git a/rethinkdb-net/QueryTerm/UpdateAndReturnValueQuery.cs b/rethinkdb-net/QueryTerm/UpdateAndReturnValueQuery.cs index d519010..09875ad 100644 --- a/rethinkdb-net/QueryTerm/UpdateAndReturnValueQuery.cs +++ b/rethinkdb-net/QueryTerm/UpdateAndReturnValueQuery.cs @@ -14,7 +14,7 @@ public UpdateAndReturnValueQuery(IMutableSingleObjectQuery singleObjectTerm, protected override void AddOptionalArguments(Term updateTerm) { updateTerm.optargs.Add(new Term.AssocPair() { - key = "return_vals", + key = "return_changes", val = new Term() { type = Term.TermType.DATUM, datum = new Datum() { diff --git a/rethinkdb-net/UngroupObject.cs b/rethinkdb-net/UngroupObject.cs new file mode 100644 index 0000000..0fb9f5a --- /dev/null +++ b/rethinkdb-net/UngroupObject.cs @@ -0,0 +1,15 @@ +using System; +using System.Runtime.Serialization; + +namespace RethinkDb +{ + [DataContract] + public class UngroupObject + { + [DataMember(Name = "group")] + public TGroup Group; + + [DataMember(Name = "reduction")] + public TReduction Reduction; + } +} diff --git a/rethinkdb-net/rethinkdb-net.csproj b/rethinkdb-net/rethinkdb-net.csproj index 57624f0..4303113 100644 --- a/rethinkdb-net/rethinkdb-net.csproj +++ b/rethinkdb-net/rethinkdb-net.csproj @@ -71,7 +71,6 @@ - @@ -100,14 +99,9 @@ - - - - - @@ -147,7 +141,6 @@ - @@ -172,6 +165,27 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/rethinkdb-net/rethinkdb_spec.cs b/rethinkdb-net/rethinkdb_spec.cs index 798dbc0..85ae92c 100644 --- a/rethinkdb-net/rethinkdb_spec.cs +++ b/rethinkdb-net/rethinkdb_spec.cs @@ -23,7 +23,21 @@ public enum Version V0_1 = 1063369270, [global::ProtoBuf.ProtoEnum(Name=@"V0_2", Value=1915781601)] - V0_2 = 1915781601 + V0_2 = 1915781601, + + [global::ProtoBuf.ProtoEnum(Name=@"V0_3", Value=1601562686)] + V0_3 = 1601562686 + } + + [global::ProtoBuf.ProtoContract(Name=@"Protocol")] + public enum Protocol + { + + [global::ProtoBuf.ProtoEnum(Name=@"PROTOBUF", Value=656407617)] + PROTOBUF = 656407617, + + [global::ProtoBuf.ProtoEnum(Name=@"JSON", Value=2120839367)] + JSON = 2120839367 } private global::ProtoBuf.IExtension extensionObject; @@ -72,6 +86,15 @@ public bool OBSOLETE_noreply get { return _OBSOLETE_noreply; } set { _OBSOLETE_noreply = value; } } + + private bool _accepts_r_json = (bool)false; + [global::ProtoBuf.ProtoMember(5, IsRequired = false, Name=@"accepts_r_json", DataFormat = global::ProtoBuf.DataFormat.Default)] + [global::System.ComponentModel.DefaultValue((bool)false)] + public bool accepts_r_json + { + get { return _accepts_r_json; } + set { _accepts_r_json = value; } + } private readonly global::System.Collections.Generic.List _global_optargs = new global::System.Collections.Generic.List(); [global::ProtoBuf.ProtoMember(6, Name=@"global_optargs", DataFormat = global::ProtoBuf.DataFormat.Default)] public global::System.Collections.Generic.List global_optargs @@ -118,7 +141,10 @@ public enum QueryType CONTINUE = 2, [global::ProtoBuf.ProtoEnum(Name=@"STOP", Value=3)] - STOP = 3 + STOP = 3, + + [global::ProtoBuf.ProtoEnum(Name=@"NOREPLY_WAIT", Value=4)] + NOREPLY_WAIT = 4 } private global::ProtoBuf.IExtension extensionObject; @@ -230,6 +256,15 @@ public Backtrace backtrace get { return _backtrace; } set { _backtrace = value; } } + + private Datum _profile = null; + [global::ProtoBuf.ProtoMember(5, IsRequired = false, Name=@"profile", DataFormat = global::ProtoBuf.DataFormat.Default)] + [global::System.ComponentModel.DefaultValue(null)] + public Datum profile + { + get { return _profile; } + set { _profile = value; } + } [global::ProtoBuf.ProtoContract(Name=@"ResponseType")] public enum ResponseType { @@ -243,6 +278,12 @@ public enum ResponseType [global::ProtoBuf.ProtoEnum(Name=@"SUCCESS_PARTIAL", Value=3)] SUCCESS_PARTIAL = 3, + [global::ProtoBuf.ProtoEnum(Name=@"SUCCESS_FEED", Value=5)] + SUCCESS_FEED = 5, + + [global::ProtoBuf.ProtoEnum(Name=@"WAIT_COMPLETE", Value=4)] + WAIT_COMPLETE = 4, + [global::ProtoBuf.ProtoEnum(Name=@"CLIENT_ERROR", Value=16)] CLIENT_ERROR = 16, @@ -361,7 +402,10 @@ public enum DatumType R_ARRAY = 5, [global::ProtoBuf.ProtoEnum(Name=@"R_OBJECT", Value=6)] - R_OBJECT = 6 + R_OBJECT = 6, + + [global::ProtoBuf.ProtoEnum(Name=@"R_JSON", Value=7)] + R_JSON = 7 } private global::ProtoBuf.IExtension extensionObject; @@ -453,6 +497,12 @@ public enum TermType [global::ProtoBuf.ProtoEnum(Name=@"JAVASCRIPT", Value=11)] JAVASCRIPT = 11, + [global::ProtoBuf.ProtoEnum(Name=@"UUID", Value=169)] + UUID = 169, + + [global::ProtoBuf.ProtoEnum(Name=@"HTTP", Value=153)] + HTTP = 153, + [global::ProtoBuf.ProtoEnum(Name=@"ERROR", Value=12)] ERROR = 12, @@ -549,6 +599,9 @@ public enum TermType [global::ProtoBuf.ProtoEnum(Name=@"KEYS", Value=94)] KEYS = 94, + [global::ProtoBuf.ProtoEnum(Name=@"OBJECT", Value=143)] + OBJECT = 143, + [global::ProtoBuf.ProtoEnum(Name=@"HAS_FIELDS", Value=32)] HAS_FIELDS = 32, @@ -576,11 +629,11 @@ public enum TermType [global::ProtoBuf.ProtoEnum(Name=@"FILTER", Value=39)] FILTER = 39, - [global::ProtoBuf.ProtoEnum(Name=@"CONCATMAP", Value=40)] - CONCATMAP = 40, + [global::ProtoBuf.ProtoEnum(Name=@"CONCAT_MAP", Value=40)] + CONCAT_MAP = 40, - [global::ProtoBuf.ProtoEnum(Name=@"ORDERBY", Value=41)] - ORDERBY = 41, + [global::ProtoBuf.ProtoEnum(Name=@"ORDER_BY", Value=41)] + ORDER_BY = 41, [global::ProtoBuf.ProtoEnum(Name=@"DISTINCT", Value=42)] DISTINCT = 42, @@ -597,11 +650,8 @@ public enum TermType [global::ProtoBuf.ProtoEnum(Name=@"NTH", Value=45)] NTH = 45, - [global::ProtoBuf.ProtoEnum(Name=@"GROUPED_MAP_REDUCE", Value=46)] - GROUPED_MAP_REDUCE = 46, - - [global::ProtoBuf.ProtoEnum(Name=@"GROUPBY", Value=47)] - GROUPBY = 47, + [global::ProtoBuf.ProtoEnum(Name=@"BRACKET", Value=170)] + BRACKET = 170, [global::ProtoBuf.ProtoEnum(Name=@"INNER_JOIN", Value=48)] INNER_JOIN = 48, @@ -615,6 +665,9 @@ public enum TermType [global::ProtoBuf.ProtoEnum(Name=@"ZIP", Value=72)] ZIP = 72, + [global::ProtoBuf.ProtoEnum(Name=@"RANGE", Value=173)] + RANGE = 173, + [global::ProtoBuf.ProtoEnum(Name=@"INSERT_AT", Value=82)] INSERT_AT = 82, @@ -630,8 +683,8 @@ public enum TermType [global::ProtoBuf.ProtoEnum(Name=@"COERCE_TO", Value=51)] COERCE_TO = 51, - [global::ProtoBuf.ProtoEnum(Name=@"TYPEOF", Value=52)] - TYPEOF = 52, + [global::ProtoBuf.ProtoEnum(Name=@"TYPE_OF", Value=52)] + TYPE_OF = 52, [global::ProtoBuf.ProtoEnum(Name=@"UPDATE", Value=53)] UPDATE = 53, @@ -663,6 +716,9 @@ public enum TermType [global::ProtoBuf.ProtoEnum(Name=@"TABLE_LIST", Value=62)] TABLE_LIST = 62, + [global::ProtoBuf.ProtoEnum(Name=@"SYNC", Value=138)] + SYNC = 138, + [global::ProtoBuf.ProtoEnum(Name=@"INDEX_CREATE", Value=75)] INDEX_CREATE = 75, @@ -672,6 +728,15 @@ public enum TermType [global::ProtoBuf.ProtoEnum(Name=@"INDEX_LIST", Value=77)] INDEX_LIST = 77, + [global::ProtoBuf.ProtoEnum(Name=@"INDEX_STATUS", Value=139)] + INDEX_STATUS = 139, + + [global::ProtoBuf.ProtoEnum(Name=@"INDEX_WAIT", Value=140)] + INDEX_WAIT = 140, + + [global::ProtoBuf.ProtoEnum(Name=@"INDEX_RENAME", Value=156)] + INDEX_RENAME = 156, + [global::ProtoBuf.ProtoEnum(Name=@"FUNCALL", Value=64)] FUNCALL = 64, @@ -684,8 +749,8 @@ public enum TermType [global::ProtoBuf.ProtoEnum(Name=@"ALL", Value=67)] ALL = 67, - [global::ProtoBuf.ProtoEnum(Name=@"FOREACH", Value=68)] - FOREACH = 68, + [global::ProtoBuf.ProtoEnum(Name=@"FOR_EACH", Value=68)] + FOR_EACH = 68, [global::ProtoBuf.ProtoEnum(Name=@"FUNC", Value=69)] FUNC = 69, @@ -702,6 +767,12 @@ public enum TermType [global::ProtoBuf.ProtoEnum(Name=@"MATCH", Value=97)] MATCH = 97, + [global::ProtoBuf.ProtoEnum(Name=@"UPCASE", Value=141)] + UPCASE = 141, + + [global::ProtoBuf.ProtoEnum(Name=@"DOWNCASE", Value=142)] + DOWNCASE = 142, + [global::ProtoBuf.ProtoEnum(Name=@"SAMPLE", Value=81)] SAMPLE = 81, @@ -711,6 +782,9 @@ public enum TermType [global::ProtoBuf.ProtoEnum(Name=@"JSON", Value=98)] JSON = 98, + [global::ProtoBuf.ProtoEnum(Name=@"TO_JSON_STRING", Value=172)] + TO_JSON_STRING = 172, + [global::ProtoBuf.ProtoEnum(Name=@"ISO8601", Value=99)] ISO8601 = 99, @@ -826,7 +900,79 @@ public enum TermType DECEMBER = 125, [global::ProtoBuf.ProtoEnum(Name=@"LITERAL", Value=137)] - LITERAL = 137 + LITERAL = 137, + + [global::ProtoBuf.ProtoEnum(Name=@"GROUP", Value=144)] + GROUP = 144, + + [global::ProtoBuf.ProtoEnum(Name=@"SUM", Value=145)] + SUM = 145, + + [global::ProtoBuf.ProtoEnum(Name=@"AVG", Value=146)] + AVG = 146, + + [global::ProtoBuf.ProtoEnum(Name=@"MIN", Value=147)] + MIN = 147, + + [global::ProtoBuf.ProtoEnum(Name=@"MAX", Value=148)] + MAX = 148, + + [global::ProtoBuf.ProtoEnum(Name=@"SPLIT", Value=149)] + SPLIT = 149, + + [global::ProtoBuf.ProtoEnum(Name=@"UNGROUP", Value=150)] + UNGROUP = 150, + + [global::ProtoBuf.ProtoEnum(Name=@"RANDOM", Value=151)] + RANDOM = 151, + + [global::ProtoBuf.ProtoEnum(Name=@"CHANGES", Value=152)] + CHANGES = 152, + + [global::ProtoBuf.ProtoEnum(Name=@"ARGS", Value=154)] + ARGS = 154, + + [global::ProtoBuf.ProtoEnum(Name=@"BINARY", Value=155)] + BINARY = 155, + + [global::ProtoBuf.ProtoEnum(Name=@"GEOJSON", Value=157)] + GEOJSON = 157, + + [global::ProtoBuf.ProtoEnum(Name=@"TO_GEOJSON", Value=158)] + TO_GEOJSON = 158, + + [global::ProtoBuf.ProtoEnum(Name=@"POINT", Value=159)] + POINT = 159, + + [global::ProtoBuf.ProtoEnum(Name=@"LINE", Value=160)] + LINE = 160, + + [global::ProtoBuf.ProtoEnum(Name=@"POLYGON", Value=161)] + POLYGON = 161, + + [global::ProtoBuf.ProtoEnum(Name=@"DISTANCE", Value=162)] + DISTANCE = 162, + + [global::ProtoBuf.ProtoEnum(Name=@"INTERSECTS", Value=163)] + INTERSECTS = 163, + + [global::ProtoBuf.ProtoEnum(Name=@"INCLUDES", Value=164)] + INCLUDES = 164, + + [global::ProtoBuf.ProtoEnum(Name=@"CIRCLE", Value=165)] + CIRCLE = 165, + + [global::ProtoBuf.ProtoEnum(Name=@"GET_INTERSECTING", Value=166)] + GET_INTERSECTING = 166, + + [global::ProtoBuf.ProtoEnum(Name=@"FILL", Value=167)] + FILL = 167, + + [global::ProtoBuf.ProtoEnum(Name=@"GET_NEAREST", Value=168)] + GET_NEAREST = 168, + + [global::ProtoBuf.ProtoEnum(Name=@"POLYGON_SUB", Value=171)] + POLYGON_SUB = 171 } private global::ProtoBuf.IExtension extensionObject; diff --git a/rethinkdb-net/rethinkdb_spec.proto b/rethinkdb-net/rethinkdb_spec.proto index 4942b05..a5ba1ef 100644 --- a/rethinkdb-net/rethinkdb_spec.proto +++ b/rethinkdb-net/rethinkdb_spec.proto @@ -3,7 +3,7 @@ //////////////////////////////////////////////////////////////////////////////// // Process: When you first open a connection, send the magic number -// for the version of the protobuf you're targetting (in the [Version] +// for the version of the protobuf you're targeting (in the [Version] // enum). This should **NOT** be sent as a protobuf; just send the // little-endian 32-bit integer over the wire raw. This number should // only be sent once per connection. @@ -11,10 +11,16 @@ // The magic number shall be followed by an authorization key. The // first 4 bytes are the length of the key to be sent as a little-endian // 32-bit integer, followed by the key string. Even if there is no key, -// an empty string should be sent (length 0 and no data). The server will -// then respond with a NULL-terminated string response. "SUCCESS" indicates -// that the connection has been accepted. Any other response indicates an -// error, and the response string should describe the error. +// an empty string should be sent (length 0 and no data). + +// Following the authorization key, the client shall send a magic number +// for the communication protocol they want to use (in the [Protocol] +// enum). This shall be a little-endian 32-bit integer. + +// The server will then respond with a NULL-terminated string response. +// "SUCCESS" indicates that the connection has been accepted. Any other +// response indicates an error, and the response string should describe +// the error. // Next, for each query you want to send, construct a [Query] protobuf // and serialize it to a binary blob. Send the blob's size to the @@ -33,13 +39,20 @@ // token to get more results from the original query. //////////////////////////////////////////////////////////////////////////////// -// This enum contains the magic numbers for your version. See **THE HIGH-LEVEL -// VIEW** for what to do with it. message VersionDummy { // We need to wrap it like this for some // non-conforming protobuf libraries + // This enum contains the magic numbers for your version. See **THE HIGH-LEVEL + // VIEW** for what to do with it. enum Version { - V0_1 = 0x3f61ba36; - V0_2 = 0x723081e1; + V0_1 = 0x3f61ba36; + V0_2 = 0x723081e1; // Authorization key during handshake + V0_3 = 0x5f75e83e; // Authorization key and protocol during handshake + } + + // The protocol to use after the handshake, specified in V0_3 + enum Protocol { + PROTOBUF = 0x271ffc41; + JSON = 0x7e6970c7; } } @@ -48,12 +61,16 @@ message VersionDummy { // We need to wrap it like this for some // * A [CONTINUE] query with the same token as a [START] query that returned // [SUCCESS_PARTIAL] in its [Response]. // * A [STOP] query with the same token as a [START] query that you want to stop. +// * A [NOREPLY_WAIT] query with a unique per-connection token. The server answers +// with a [WAIT_COMPLETE] [Response]. message Query { enum QueryType { START = 1; // Start a new query. CONTINUE = 2; // Continue a query that returned [SUCCESS_PARTIAL] // (see [Response]). STOP = 3; // Stop a query partway through executing. + NOREPLY_WAIT = 4; + // Wait for noreply operations to finish. } optional QueryType type = 1; // A [Term] is how we represent the operations we want a query to perform. @@ -64,6 +81,11 @@ message Query { // either true or false). optional bool OBSOLETE_noreply = 4 [default = false]; + // If this is set to [true], then [Datum] values will sometimes be + // of [DatumType] [R_JSON] (see below). This can provide enormous + // speedups in languages with poor protobuf libraries. + optional bool accepts_r_json = 5 [default = false]; + message AssocPair { optional string key = 1; optional Term val = 2; @@ -96,6 +118,8 @@ message Response { // the same token as this response, you will get // more of the sequence. Keep sending [CONTINUE] // queries until you get back [SUCCESS_SEQUENCE]. + SUCCESS_FEED = 5; // Like [SUCCESS_PARTIAL] but for feeds. + WAIT_COMPLETE = 4; // A [NOREPLY_WAIT] query completed. // These response types indicate failure. CLIENT_ERROR = 16; // Means the client is buggy. An example is if the @@ -125,8 +149,17 @@ message Response { // specifies either the index of a positional argument or the name of an // optional argument. (Those words will make more sense if you look at the // [Term] message below.) - optional Backtrace backtrace = 4; // Contains n [Frame]s when you get back an error. + + // If the [global_optargs] in the [Query] that this [Response] is a + // response to contains a key "profile" which maps to a static value of + // true then [profile] will contain a [Datum] which provides profiling + // information about the execution of the query. This field should be + // returned to the user along with the result that would normally be + // returned (a datum or a cursor). In official drivers this is accomplished + // by putting them inside of an object with "value" mapping to the return + // value and "profile" mapping to the profile object. + optional Datum profile = 5; } // A [Datum] is a chunk of data that can be serialized to disk or returned to @@ -140,6 +173,10 @@ message Datum { R_STR = 4; R_ARRAY = 5; R_OBJECT = 6; + // This [DatumType] will only be used if [accepts_r_json] is + // set to [true] in [Query]. [r_str] will be filled with a + // JSON encoding of the [Datum]. + R_JSON = 7; // uses r_str } optional DatumType type = 1; optional bool r_bool = 2; @@ -209,16 +246,35 @@ message Term { MAKE_OBJ = 3; // {...} -> OBJECT // * Compound types + // Takes an integer representing a variable and returns the value stored // in that variable. It's the responsibility of the client to translate - // from their local representation of a variable to a unique integer for - // that variable. (We do it this way instead of letting clients provide - // variable names as strings to discourage variable-capturing client - // libraries, and because it's more efficient on the wire.) + // from their local representation of a variable to a unique _non-negative_ + // integer for that variable. (We do it this way instead of letting + // clients provide variable names as strings to discourage + // variable-capturing client libraries, and because it's more efficient + // on the wire.) VAR = 10; // !NUMBER -> DATUM // Takes some javascript code and executes it. JAVASCRIPT = 11; // STRING {timeout: !NUMBER} -> DATUM | // STRING {timeout: !NUMBER} -> Function(*) + UUID = 169; // () -> DATUM + + // Takes an HTTP URL and gets it. If the get succeeds and + // returns valid JSON, it is converted into a DATUM + HTTP = 153; // STRING {data: OBJECT | STRING, + // timeout: !NUMBER, + // method: STRING, + // params: OBJECT, + // header: OBJECT | ARRAY, + // attempts: NUMBER, + // redirects: NUMBER, + // verify: BOOL, + // page: FUNC | STRING, + // page_limit: NUMBER, + // auth: OBJECT, + // result_format: STRING, + // } -> STRING | STREAM // Takes a string and throws an error with that message. // Inside of a `default` block, you can omit the first @@ -284,6 +340,8 @@ message Term { // | Sequence, STRING -> Sequence // Return an array containing the keys of the object. KEYS = 94; // OBJECT -> ARRAY + // Creates an object + OBJECT = 143; // STRING, DATUM, ... -> OBJECT // Check whether an object contains all the specified fields, // or filters a sequence so that all objects inside of it // contain all the specified fields. @@ -301,8 +359,11 @@ message Term { // Sequence Ops // Get all elements of a sequence between two values. - BETWEEN = 36; // StreamSelection, DATUM, DATUM, {:index:!STRING} -> StreamSelection - REDUCE = 37; // Sequence, Function(2), {base:DATUM} -> DATUM + // Half-open by default, but the openness of either side can be + // changed by passing 'closed' or 'open for `right_bound` or + // `left_bound`. + BETWEEN = 36; // StreamSelection, DATUM, DATUM, {index:!STRING, right_bound:STRING, left_bound:STRING} -> StreamSelection + REDUCE = 37; // Sequence, Function(2) -> DATUM MAP = 38; // Sequence, Function(1) -> Sequence // Filter a sequence with either a function or a shortcut @@ -315,9 +376,9 @@ message Term { FILTER = 39; // Sequence, Function(1), {default:DATUM} -> Sequence | // Sequence, OBJECT, {default:DATUM} -> Sequence // Map a function over a sequence and then concatenate the results together. - CONCATMAP = 40; // Sequence, Function(1) -> Sequence + CONCAT_MAP = 40; // Sequence, Function(1) -> Sequence // Order a sequence based on one or more attributes. - ORDERBY = 41; // Sequence, (!STRING | Ordering)... -> Sequence + ORDER_BY = 41; // Sequence, (!STRING | Ordering)... -> Sequence // Get all distinct elements of a sequence (like `uniq`). DISTINCT = 42; // Sequence -> Sequence // Count the number of elements in a sequence, or only the elements that match @@ -328,24 +389,19 @@ message Term { UNION = 44; // Sequence... -> Sequence // Get the Nth element of a sequence. NTH = 45; // Sequence, NUMBER -> DATUM - // Takes a sequence, and three functions: - // - A function to group the sequence by. - // - A function to map over the groups. - // - A reduction to apply to each of the groups. - GROUPED_MAP_REDUCE = 46; // Sequence, Function(1), Function(1), Function(2), {base:DATUM} -> ARRAY - // Groups a sequence by one or more attributes, and then applies a reduction. - // The third argument is a special object literal giving the kind of operation to be - // performed and any necessary arguments. - // At present, GROUPBY suports the following operations - // * {'COUNT': } - count the size of the group - // * {'SUM': attr} - sum the values of the given attribute across the group - // * {'AVG': attr} - average the values of the given attribute across the group - GROUPBY = 47; // Sequence, ARRAY, !GROUP_BY_OBJECT -> Sequence + // do NTH or GET_FIELD depending on target object + BRACKET = 170; // Sequence | OBJECT, NUMBER | STRING -> DATUM + // OBSOLETE_GROUPED_MAPREDUCE = 46; + // OBSOLETE_GROUPBY = 47; + INNER_JOIN = 48; // Sequence, Sequence, Function(2) -> Sequence OUTER_JOIN = 49; // Sequence, Sequence, Function(2) -> Sequence // An inner-join that does an equality comparison on two attributes. EQ_JOIN = 50; // Sequence, !STRING, Sequence, {index:!STRING} -> Sequence ZIP = 72; // Sequence -> Sequence + RANGE = 173; // -> Sequence [0, +inf) + // NUMBER -> Sequence [0, a) + // NUMBER, NUMBER -> Sequence [a, b) // Array Ops // Insert an element in to an array at a given index. @@ -363,25 +419,27 @@ message Term { // If you previously used `stream_to_array`, you should use this instead // with the type "array". COERCE_TO = 51; // Top, STRING -> Top - // Returns the named type of a datum (e.g. TYPEOF(true) = "BOOL") - TYPEOF = 52; // Top -> STRING + // Returns the named type of a datum (e.g. TYPE_OF(true) = "BOOL") + TYPE_OF = 52; // Top -> STRING // * Write Ops (the OBJECTs contain data about number of errors etc.) // Updates all the rows in a selection. Calls its Function with the row // to be updated, and then merges the result of that call. - UPDATE = 53; // StreamSelection, Function(1), {non_atomic:BOOL, durability:STRING, return_vals:BOOL} -> OBJECT | - // SingleSelection, Function(1), {non_atomic:BOOL, durability:STRING, return_vals:BOOL} -> OBJECT | - // StreamSelection, OBJECT, {non_atomic:BOOL, durability:STRING, return_vals:BOOL} -> OBJECT | - // SingleSelection, OBJECT, {non_atomic:BOOL, durability:STRING, return_vals:BOOL} -> OBJECT + UPDATE = 53; // StreamSelection, Function(1), {non_atomic:BOOL, durability:STRING, return_changes:BOOL} -> OBJECT | + // SingleSelection, Function(1), {non_atomic:BOOL, durability:STRING, return_changes:BOOL} -> OBJECT | + // StreamSelection, OBJECT, {non_atomic:BOOL, durability:STRING, return_changes:BOOL} -> OBJECT | + // SingleSelection, OBJECT, {non_atomic:BOOL, durability:STRING, return_changes:BOOL} -> OBJECT // Deletes all the rows in a selection. - DELETE = 54; // StreamSelection, {durability:STRING, return_vals:BOOL} -> OBJECT | SingleSelection -> OBJECT + DELETE = 54; // StreamSelection, {durability:STRING, return_changes:BOOL} -> OBJECT | SingleSelection -> OBJECT // Replaces all the rows in a selection. Calls its Function with the row // to be replaced, and then discards it and stores the result of that // call. - REPLACE = 55; // StreamSelection, Function(1), {non_atomic:BOOL, durability:STRING, return_vals:BOOL} -> OBJECT | SingleSelection, Function(1), {non_atomic:BOOL, durability:STRING, return_vals:BOOL} -> OBJECT - // Inserts into a table. If `upsert` is true, overwrites entries with - // the same primary key (otherwise errors). - INSERT = 56; // Table, OBJECT, {upsert:BOOL, durability:STRING, return_vals:BOOL} -> OBJECT | Table, Sequence, {upsert:BOOL, durability:STRING, return_vals:BOOL} -> OBJECT + REPLACE = 55; // StreamSelection, Function(1), {non_atomic:BOOL, durability:STRING, return_changes:BOOL} -> OBJECT | SingleSelection, Function(1), {non_atomic:BOOL, durability:STRING, return_changes:BOOL} -> OBJECT + // Inserts into a table. If `conflict` is replace, overwrites + // entries with the same primary key. If `conflict` is + // update, does an update on the entry. If `conflict` is + // error, or is omitted, conflicts will trigger an error. + INSERT = 56; // Table, OBJECT, {conflict:STRING, durability:STRING, return_changes:BOOL} -> OBJECT | Table, Sequence, {conflict:STRING, durability:STRING, return_changes:BOOL} -> OBJECT // * Administrative OPs // Creates a database with a particular name. @@ -393,8 +451,8 @@ message Term { // Creates a table with a particular name in a particular // database. (You may omit the first argument to use the // default database.) - TABLE_CREATE = 60; // Database, STRING, {datacenter:STRING, primary_key:STRING, cache_size:NUMBER, durability:STRING} -> OBJECT - // STRING, {datacenter:STRING, primary_key:STRING, cache_size:NUMBER, durability:STRING} -> OBJECT + TABLE_CREATE = 60; // Database, STRING, {datacenter:STRING, primary_key:STRING, durability:STRING} -> OBJECT + // STRING, {datacenter:STRING, primary_key:STRING, durability:STRING} -> OBJECT // Drops a table with a particular name from a particular // database. (You may omit the first argument to use the // default database.) @@ -404,14 +462,26 @@ message Term { // omit the first argument to use the default database.) TABLE_LIST = 62; // Database -> ARRAY // -> ARRAY + // Ensures that previously issued soft-durability writes are complete and + // written to disk. + SYNC = 138; // Table -> OBJECT // * Secondary indexes OPs // Creates a new secondary index with a particular name and definition. - INDEX_CREATE = 75; // Table, STRING, Function(1) -> OBJECT + INDEX_CREATE = 75; // Table, STRING, Function(1), {multi:BOOL} -> OBJECT // Drops a secondary index with a particular name from the specified table. INDEX_DROP = 76; // Table, STRING -> OBJECT // Lists all secondary indexes on a particular table. INDEX_LIST = 77; // Table -> ARRAY + // Gets information about whether or not a set of indexes are ready to + // be accessed. Returns a list of objects that look like this: + // {index:STRING, ready:BOOL[, blocks_processed:NUMBER, blocks_total:NUMBER]} + INDEX_STATUS = 139; // Table, STRING... -> ARRAY + // Blocks until a set of indexes are ready to be accessed. Returns the + // same values INDEX_STATUS. + INDEX_WAIT = 140; // Table, STRING... -> ARRAY + // Renames the given index to a new name + INDEX_RENAME = 156; // Table, STRING, STRING, {overwrite:BOOL} -> OBJECT // * Control Operators // Calls a function on data @@ -428,7 +498,7 @@ message Term { ALL = 67; // BOOL... -> BOOL // Calls its Function with each entry in the sequence // and executes the array of terms that Function returns. - FOREACH = 68; // Sequence, Function(1) -> OBJECT + FOR_EACH = 68; // Sequence, Function(1) -> OBJECT //////////////////////////////////////////////////////////////////////////////// ////////// Special Terms @@ -484,6 +554,10 @@ message Term { // matches the regular expression `b`. MATCH = 97; // STRING, STRING -> DATUM + // Change the case of a string. + UPCASE = 141; // STRING -> STRING + DOWNCASE = 142; // STRING -> STRING + // Select a number of elements from sequence with uniform distribution. SAMPLE = 81; // Sequence, NUMBER -> Sequence @@ -500,6 +574,11 @@ message Term { // Parses its first argument as a json string and returns it as a // datum. JSON = 98; // STRING -> DATUM + // Returns the datum as a JSON string. + // N.B.: we would really prefer this be named TO_JSON and that exists as + // an alias in Python and JavaScript drivers; however it conflicts with the + // standard `to_json` method defined by Ruby's standard json library. + TO_JSON_STRING = 172; // DATUM -> STRING // Parses its first arguments as an ISO 8601 time and returns it as a // datum. @@ -567,6 +646,44 @@ message Term { // Indicates to MERGE to replace the other object rather than merge it. LITERAL = 137; // JSON -> Merging + + // SEQUENCE, STRING -> GROUPED_SEQUENCE | SEQUENCE, FUNCTION -> GROUPED_SEQUENCE + GROUP = 144; + SUM = 145; + AVG = 146; + MIN = 147; + MAX = 148; + + // `str.split()` splits on whitespace + // `str.split(" ")` splits on spaces only + // `str.split(" ", 5)` splits on spaces with at most 5 results + // `str.split(nil, 5)` splits on whitespace with at most 5 results + SPLIT = 149; // STRING -> ARRAY | STRING, STRING -> ARRAY | STRING, STRING, NUMBER -> ARRAY | STRING, NULL, NUMBER -> ARRAY + + UNGROUP = 150; // GROUPED_DATA -> ARRAY + + // Takes a range of numbers and returns a random number within the range + RANDOM = 151; // NUMBER, NUMBER {float:BOOL} -> DATUM + + CHANGES = 152; // TABLE -> STREAM + ARGS = 154; // ARRAY -> SPECIAL (used to splice arguments) + + // BINARY is client-only at the moment, it is not supported on the server + BINARY = 155; // STRING -> PSEUDOTYPE(BINARY) + + GEOJSON = 157; // OBJECT -> PSEUDOTYPE(GEOMETRY) + TO_GEOJSON = 158; // PSEUDOTYPE(GEOMETRY) -> OBJECT + POINT = 159; // NUMBER, NUMBER -> PSEUDOTYPE(GEOMETRY) + LINE = 160; // (ARRAY | PSEUDOTYPE(GEOMETRY))... -> PSEUDOTYPE(GEOMETRY) + POLYGON = 161; // (ARRAY | PSEUDOTYPE(GEOMETRY))... -> PSEUDOTYPE(GEOMETRY) + DISTANCE = 162; // PSEUDOTYPE(GEOMETRY), PSEUDOTYPE(GEOMETRY) {geo_system:STRING, unit:STRING} -> NUMBER + INTERSECTS = 163; // PSEUDOTYPE(GEOMETRY), PSEUDOTYPE(GEOMETRY) -> BOOL + INCLUDES = 164; // PSEUDOTYPE(GEOMETRY), PSEUDOTYPE(GEOMETRY) -> BOOL + CIRCLE = 165; // PSEUDOTYPE(GEOMETRY), NUMBER {num_vertices:NUMBER, geo_system:STRING, unit:STRING, fill:BOOL} -> PSEUDOTYPE(GEOMETRY) + GET_INTERSECTING = 166; // TABLE, PSEUDOTYPE(GEOMETRY) {index:!STRING} -> StreamSelection + FILL = 167; // PSEUDOTYPE(GEOMETRY) -> PSEUDOTYPE(GEOMETRY) + GET_NEAREST = 168; // TABLE, PSEUDOTYPE(GEOMETRY) {index:!STRING, max_results:NUM, max_dist:NUM, geo_system:STRING, unit:STRING} -> ARRAY + POLYGON_SUB = 171; // PSEUDOTYPE(GEOMETRY), PSEUDOTYPE(GEOMETRY) -> PSEUDOTYPE(GEOMETRY) } optional TermType type = 1;