diff --git a/BuildVersion.cs b/BuildVersion.cs index c7e5579..edd6347 100644 --- a/BuildVersion.cs +++ b/BuildVersion.cs @@ -1,6 +1,6 @@ -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -[assembly: AssemblyVersion("3.0.0.0")] -[assembly: AssemblyFileVersion("3.2.12.0")] \ No newline at end of file +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +[assembly: AssemblyVersion("3.0.0.0")] +[assembly: AssemblyFileVersion("3.2.13.0")] \ No newline at end of file diff --git a/GenericPointerHelpers/GenericPointerHelper.cs b/GenericPointerHelpers/GenericPointerHelper.cs new file mode 100644 index 0000000..6cb4a62 --- /dev/null +++ b/GenericPointerHelpers/GenericPointerHelper.cs @@ -0,0 +1,21 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace GenericPointerHelpers +{ + public unsafe static class GenericPointerHelper + { + public static T Read(void* ptr) + { + throw new NotImplementedException(); + } + + public static void Write(void* ptr, T value) + { + throw new NotImplementedException(); + } + } +} diff --git a/GenericPointerHelpers/GenericPointerHelpers.csproj b/GenericPointerHelpers/GenericPointerHelpers.csproj new file mode 100644 index 0000000..08a3d2b --- /dev/null +++ b/GenericPointerHelpers/GenericPointerHelpers.csproj @@ -0,0 +1,84 @@ + + + + + Debug + AnyCPU + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B} + Library + Properties + GenericPointerHelpers + GenericPointerHelpers + v4.0 + 512 + + + + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + true + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + true + + + true + bin\x64\Debug\ + DEBUG;TRACE + true + full + x64 + prompt + MinimumRecommendedRules.ruleset + + + bin\x64\Release\ + TRACE + true + true + pdbonly + x64 + prompt + MinimumRecommendedRules.ruleset + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/GenericPointerHelpers/GenericPointerHelpers.dll b/GenericPointerHelpers/GenericPointerHelpers.dll new file mode 100644 index 0000000..55bf670 Binary files /dev/null and b/GenericPointerHelpers/GenericPointerHelpers.dll differ diff --git a/GenericPointerHelpers/GenericPointerHelpers.il b/GenericPointerHelpers/GenericPointerHelpers.il new file mode 100644 index 0000000..9945dff --- /dev/null +++ b/GenericPointerHelpers/GenericPointerHelpers.il @@ -0,0 +1,169 @@ + +// Microsoft (R) .NET Framework IL Disassembler. Version 4.0.30319.33440 +// Copyright (c) Microsoft Corporation. All rights reserved. + + + +// Metadata version: v4.0.30319 +.assembly extern mscorlib +{ + .publickeytoken = (B7 7A 5C 56 19 34 E0 89 ) // .z\V.4.. + .ver 4:0:0:0 +} +.assembly GenericPointerHelpers +{ + .custom instance void [mscorlib]System.Runtime.CompilerServices.CompilationRelaxationsAttribute::.ctor(int32) = ( 01 00 08 00 00 00 00 00 ) + .custom instance void [mscorlib]System.Runtime.CompilerServices.RuntimeCompatibilityAttribute::.ctor() = ( 01 00 01 00 54 02 16 57 72 61 70 4E 6F 6E 45 78 // ....T..WrapNonEx + 63 65 70 74 69 6F 6E 54 68 72 6F 77 73 01 ) // ceptionThrows. + + // --- The following custom attribute is added automatically, do not uncomment ------- + // .custom instance void [mscorlib]System.Diagnostics.DebuggableAttribute::.ctor(valuetype [mscorlib]System.Diagnostics.DebuggableAttribute/DebuggingModes) = ( 01 00 02 00 00 00 00 00 ) + + .custom instance void [mscorlib]System.Reflection.AssemblyTitleAttribute::.ctor(string) = ( 01 00 15 47 65 6E 65 72 69 63 50 6F 69 6E 74 65 // ...GenericPointe + 72 48 65 6C 70 65 72 73 00 00 ) // rHelpers.. + .custom instance void [mscorlib]System.Reflection.AssemblyDescriptionAttribute::.ctor(string) = ( 01 00 00 00 00 ) + .custom instance void [mscorlib]System.Reflection.AssemblyConfigurationAttribute::.ctor(string) = ( 01 00 00 00 00 ) + .custom instance void [mscorlib]System.Reflection.AssemblyCompanyAttribute::.ctor(string) = ( 01 00 00 00 00 ) + .custom instance void [mscorlib]System.Reflection.AssemblyProductAttribute::.ctor(string) = ( 01 00 15 47 65 6E 65 72 69 63 50 6F 69 6E 74 65 // ...GenericPointe + 72 48 65 6C 70 65 72 73 00 00 ) // rHelpers.. + .custom instance void [mscorlib]System.Reflection.AssemblyCopyrightAttribute::.ctor(string) = ( 01 00 12 43 6F 70 79 72 69 67 68 74 20 C2 A9 20 // ...Copyright .. + 20 32 30 31 35 00 00 ) // 2015.. + .custom instance void [mscorlib]System.Reflection.AssemblyTrademarkAttribute::.ctor(string) = ( 01 00 00 00 00 ) + .custom instance void [mscorlib]System.Runtime.InteropServices.ComVisibleAttribute::.ctor(bool) = ( 01 00 00 00 00 ) + .custom instance void [mscorlib]System.Runtime.InteropServices.GuidAttribute::.ctor(string) = ( 01 00 24 66 66 36 36 63 61 63 62 2D 65 36 36 65 // ..$ff66cacb-e66e + 2D 34 64 62 36 2D 38 36 66 36 2D 34 66 36 39 64 // -4db6-86f6-4f69d + 35 65 34 30 62 32 62 00 00 ) // 5e40b2b.. + .custom instance void [mscorlib]System.Reflection.AssemblyFileVersionAttribute::.ctor(string) = ( 01 00 07 31 2E 30 2E 30 2E 30 00 00 ) // ...1.0.0.0.. + .custom instance void [mscorlib]System.Runtime.Versioning.TargetFrameworkAttribute::.ctor(string) = ( 01 00 1A 2E 4E 45 54 46 72 61 6D 65 77 6F 72 6B // ....NETFramework + 2C 56 65 72 73 69 6F 6E 3D 76 34 2E 30 01 00 54 // ,Version=v4.0..T + 0E 14 46 72 61 6D 65 77 6F 72 6B 44 69 73 70 6C // ..FrameworkDispl + 61 79 4E 61 6D 65 10 2E 4E 45 54 20 46 72 61 6D // ayName..NET Fram + 65 77 6F 72 6B 20 34 ) // ework 4 + .permissionset reqmin + = {[mscorlib]System.Security.Permissions.SecurityPermissionAttribute = {property bool 'SkipVerification' = bool(true)}} + .hash algorithm 0x00008004 + .ver 1:0:0:0 +} +.module GenericPointerHelpers.dll +// MVID: {EC5C576B-46D7-4E96-BA9F-6FD95CA1EDD9} +.custom instance void [mscorlib]System.Security.UnverifiableCodeAttribute::.ctor() = ( 01 00 00 00 ) +.imagebase 0x10000000 +.file alignment 0x00000200 +.stackreserve 0x00100000 +.subsystem 0x0003 // WINDOWS_CUI +.corflags 0x00000001 // ILONLY +// Image base: 0x02E80000 + + +// =============== CLASS MEMBERS DECLARATION =================== + +.class public abstract auto ansi sealed beforefieldinit GenericPointerHelpers.GenericPointerHelper + extends [mscorlib]System.Object +{ + .method public hidebysig static !!T Read(void* ptr) cil managed aggressiveinlining + { + .maxstack 2 + ldarg.0 + ldobj !!T + ret + } + + .method public hidebysig static !!T ReadLimited(void* 'ptr', unsigned int32 'index', unsigned int32 'length') cil managed aggressiveinlining + { + .maxstack 8 + .locals init ( + [0] !!T + ) + + ldloca.s 0 + dup + initobj !!T + ldarg.1 + add + ldarg.0 + ldarg.2 + unaligned. 1 + cpblk + ldloc.0 + ret + } + + + .method public hidebysig static !!T* AddrOf(!!T& 'refvalue') cil managed aggressiveinlining + { + .maxstack 1 + ldarg.0 + ret + } + + .method public hidebysig static void Write(void* ptr, + !!T 'value') cil managed aggressiveinlining + { + .maxstack 2 + ldarg.0 + ldarg.1 + stobj !!T + ret + } + + .method public hidebysig static void WriteLimited(void* ptr, !!T 'value', unsigned int32 'index', unsigned int32 'length') cil managed aggressiveinlining + { + .maxstack 8 + ldarg.0 + ldarga.s 1 + ldarg.2 + add + ldarg.3 + unaligned. 1 + cpblk + ret + } + + .method public hidebysig static int32 SizeOf() cil managed aggressiveinlining + { + .maxstack 8 + IL_0000: sizeof !!T + IL_0003: ret + } + + + .method public hidebysig static void CopyBytes(void* from, void* 'to', unsigned int32 'count') cil managed aggressiveinlining + { + .maxstack 8 + ldarg.1 + ldarg.0 + ldarg.2 + unaligned. 1 + cpblk + ret + } + + .method public hidebysig static void CopyBytesAlligned(void* from, void* 'to', unsigned int32 'count') cil managed aggressiveinlining + { + .maxstack 8 + ldarg.1 + ldarg.0 + ldarg.2 + cpblk + ret + } + + .method public hidebysig static void InitMemory(void* 'ptr', unsigned int32 'count', unsigned int8 'value') cil managed aggressiveinlining + { + .maxstack 8 + ldarg.0 + ldarg.2 + ldarg.1 + unaligned. 1 + initblk + ret + } + + +} // end of class GenericPointerHelpers.GenericPointerHelper + + +// ============================================================= + +// *********** DISASSEMBLY COMPLETE *********************** +// WARNING: Created Win32 resource file C:\Users\exyi\Source\Repos\RaptorDB-Document\GenericPointerHelpers\assembly2.res diff --git a/GenericPointerHelpers/GenericPointerHelpers.res b/GenericPointerHelpers/GenericPointerHelpers.res new file mode 100644 index 0000000..52b393b Binary files /dev/null and b/GenericPointerHelpers/GenericPointerHelpers.res differ diff --git a/GenericPointerHelpers/Properties/AssemblyInfo.cs b/GenericPointerHelpers/Properties/AssemblyInfo.cs new file mode 100644 index 0000000..a132012 --- /dev/null +++ b/GenericPointerHelpers/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("GenericPointerHelpers")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("GenericPointerHelpers")] +[assembly: AssemblyCopyright("Copyright © 2015")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("ff66cacb-e66e-4db6-86f6-4f69d5e40b2b")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/LICENSE b/LICENSE index c3a5242..715961d 100644 --- a/LICENSE +++ b/LICENSE @@ -1,22 +1,22 @@ -The MIT License (MIT) - -Copyright (c) 2014 Mehdi Gholam - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - +The MIT License (MIT) + +Copyright (c) 2014 Mehdi Gholam + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/Performance1.psess b/Performance1.psess new file mode 100644 index 0000000..7b88f27 --- /dev/null +++ b/Performance1.psess @@ -0,0 +1,21 @@ + + + Sampling + None + true + Timestamp + Cycles + 10000000 + 10 + 10 + + false + + + + false + + + false + + diff --git a/README.md b/README.md index 46e29f8..97b7d2b 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -RaptorDB Document -================= - -NoSql, JSON based, Document store database with compiled .net map functions and automatic hybrid bitmap indexing and LINQ query filters (now with standalone Server mode, Backup and Active Restore, Transactions, Server side queries, MonoDroid support, HQ-Branch Replication) - -see the article here : [http://www.codeproject.com/Articles/375413/RaptorDB-the-Document-Store] (http://www.codeproject.com/Articles/375413/RaptorDB-the-Document-Store) +RaptorDB Document +================= + +NoSql, JSON based, Document store database with compiled .net map functions and automatic hybrid bitmap indexing and LINQ query filters (now with standalone Server mode, Backup and Active Restore, Transactions, Server side queries, MonoDroid support, HQ-Branch Replication) + +see the article here : [http://www.codeproject.com/Articles/375413/RaptorDB-the-Document-Store] (http://www.codeproject.com/Articles/375413/RaptorDB-the-Document-Store) diff --git a/RaptorDB.Common/BitHelper.cs b/RaptorDB.Common/BitHelper.cs new file mode 100644 index 0000000..ee8c378 --- /dev/null +++ b/RaptorDB.Common/BitHelper.cs @@ -0,0 +1,116 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace RaptorDB.Common +{ + static class BitHelper + { + public static void FillArray(T[] arr, T value) + { + for (int i = 0; i < arr.Length; i++) + { + arr[i] = value; + } + } + public static T[] Repeat(T value, int len) + { + var a = new T[len]; + FillArray(a, value); + return a; + } + public static int BitCount(uint n) + { // 32-bit recursive reduction using SWAR + n -= ((n >> 1) & 0x55555555); + n = (((n >> 2) & 0x33333333) + (n & 0x33333333)); + n = (((n >> 4) + n) & 0x0f0f0f0f); + return (int)((n * 0x01010101) >> 24); + } + public static void BitmapSet(uint[] bitmap, int index, bool val) + { + int pointer = index >> 5; + uint mask = (uint)1 << (31 - (index % 32)); + + if (val) + bitmap[pointer] |= mask; + else + bitmap[pointer] &= ~mask; + } + public static unsafe void BitmapSet(void* bitmap, int index, bool val) + { + int pointer = index / 8; + byte mask = (byte)(1 << (7 - (index % 8))); + + if (val) + *((byte*)bitmap + pointer) |= mask; + else + *((byte*)bitmap + pointer) &= (byte)~mask; + } + + public static bool BitmapGet(uint[] bitmap, int index) + { + int pointer = index >> 5; + uint mask = (uint)1 << (31 - (index % 32)); + + if (pointer < bitmap.Length) + return (bitmap[pointer] & mask) != 0; + else + return false; + } + + public static void AndArray(uint[] a, uint[] b) + { + var len = Math.Min(a.Length, b.Length); + for (int i = 0; i < len; i++) + { + a[i] &= b[i]; + } + } + + public static void AndNotArray(uint[] a, uint[] b) + { + var len = Math.Min(a.Length, b.Length); + for (int i = 0; i < len; i++) + { + a[i] &= ~b[i]; + } + } + + public static void OrArray(uint[] a, uint[] b) + { + var len = Math.Min(a.Length, b.Length); + for (int i = 0; i < len; i++) + { + a[i] |= b[i]; + } + } + + public static void XorArray(uint[] a, uint[] b) + { + var len = Math.Min(a.Length, b.Length); + for (int i = 0; i < len; i++) + { + a[i] ^= b[i]; + } + } + + public static IEnumerable GetBitIndexes(uint[] bitmap) + { + for (int i = 0; i < bitmap.Length; i++) + { + var w = bitmap[i]; + if (w > 0) + { + for (int j = 0; j < 32; j++) + { + if ((w & 1) > 0) + yield return (i << 5) + j; + w >>= 1; + } + } + } + } + } +} diff --git a/RaptorDB.Common/DataTypes.cs b/RaptorDB.Common/DataTypes.cs index 2194a40..d91b97c 100644 --- a/RaptorDB.Common/DataTypes.cs +++ b/RaptorDB.Common/DataTypes.cs @@ -1,59 +1,70 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using RaptorDB.Common; - -namespace RaptorDB -{ - /// - /// Result of queries - /// OK : T = Query with data, F = EX has the exception - /// Rows : query rows - /// - public class Result - { - public Result() - { - - } - public Result(bool ok) - { - OK = ok; - } - public Result(bool ok, Exception ex) - { - OK = ok; - EX = ex; - } - /// - /// T=Values return, F=exceptions occurred - /// - public bool OK { get; set; } - public Exception EX { get; set; } - /// - /// Total number of rows of the query - /// - public int TotalCount { get; set; } - /// - /// Rows returned - /// - public int Count { get; set; } - public List Rows { get; set; } - - - // FEATURE : data pending in results - ///// - ///// Data is being indexed, so results will not reflect all documents - ///// - //public bool DataPending { get; set; } - } - - /// - /// Base for row schemas : implements a docid property and is bindable - /// - public abstract class RDBSchema : BindableFields - { - public Guid docid; - } -} +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using RaptorDB.Common; + +namespace RaptorDB +{ + public interface IResult + { + bool OK { get; } + Exception EX { get; } + int TotalCount { get; } + int Count { get; } + IList Rows { get; } + } + /// + /// Result of queries + /// OK : T = Query with data, F = EX has the exception + /// Rows : query rows + /// + public class Result: IResult + { + public Result() + { + + } + public Result(bool ok) + { + OK = ok; + } + public Result(bool ok, Exception ex) + { + OK = ok; + EX = ex; + } + /// + /// T=Values return, F=exceptions occurred + /// + public bool OK { get; set; } + public Exception EX { get; set; } + /// + /// Total number of rows of the query + /// + public int TotalCount { get; set; } + /// + /// Rows returned + /// + public int Count { get; set; } + + IList IResult.Rows { get { return Rows; } } + public List Rows { get; set; } + + + // FEATURE : data pending in results + ///// + ///// Data is being indexed, so results will not reflect all documents + ///// + //public bool DataPending { get; set; } + } + + /// + /// Base for row schemas : implements a docid property and is bindable + /// + public abstract class RDBSchema : BindableFields + { + public Guid docid; + } +} diff --git a/RaptorDB.Common/ExpressionHelper.cs b/RaptorDB.Common/ExpressionHelper.cs new file mode 100644 index 0000000..19cbeba --- /dev/null +++ b/RaptorDB.Common/ExpressionHelper.cs @@ -0,0 +1,21 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Linq.Expressions; +using System.Text; + +namespace RaptorDB.Common +{ + public static class ExpressionHelper + { + /// + /// Gets name of property in member expression + /// + public static string GetPropertyName(System.Linq.Expressions.Expression> lambda) + { + var m = lambda.Body as MemberExpression; + if (m == null) throw new ArgumentException("lambda is not MemberExpression"); + return m.Member.Name; + } + } +} diff --git a/RaptorDB.Common/FieldDescriptor.cs b/RaptorDB.Common/FieldDescriptor.cs index 124c654..9702195 100644 --- a/RaptorDB.Common/FieldDescriptor.cs +++ b/RaptorDB.Common/FieldDescriptor.cs @@ -1,164 +1,164 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Reflection; -using System.ComponentModel; - -namespace RaptorDB -{ - internal class FieldPropertyDescriptor : PropertyDescriptor - { - private FieldInfo _field; - - public FieldPropertyDescriptor(FieldInfo field) : - base(field.Name, (Attribute[])field.GetCustomAttributes(typeof(Attribute), true)) - { - _field = field; - } - - public FieldInfo Field { get { return _field; } } - - public override bool Equals(object obj) - { - FieldPropertyDescriptor other = obj as FieldPropertyDescriptor; - return other != null && other._field.Equals(_field); - } - - public override int GetHashCode() { return _field.GetHashCode(); } - - public override bool IsReadOnly { get { return false; } } - public override void ResetValue(object component) { } - public override bool CanResetValue(object component) { return false; } - public override bool ShouldSerializeValue(object component) { return true; } - - public override Type ComponentType { get { return _field.DeclaringType; } } - public override Type PropertyType { get { return _field.FieldType; } } - - public override object GetValue(object component) { return _field.GetValue(component); } - - public override void SetValue(object component, object value) - { - _field.SetValue(component, value); - OnValueChanged(component, EventArgs.Empty); - } - } - - public abstract class BindableFields : ICustomTypeDescriptor - { - object ICustomTypeDescriptor.GetPropertyOwner(PropertyDescriptor pd) - { - return this; - } - - AttributeCollection ICustomTypeDescriptor.GetAttributes() - { - return TypeDescriptor.GetAttributes(this, true); - } - - string ICustomTypeDescriptor.GetClassName() - { - return TypeDescriptor.GetClassName(this, true); - } - - string ICustomTypeDescriptor.GetComponentName() - { - return TypeDescriptor.GetComponentName(this, true); - } - - TypeConverter ICustomTypeDescriptor.GetConverter() - { - return TypeDescriptor.GetConverter(this, true); - } - - EventDescriptor ICustomTypeDescriptor.GetDefaultEvent() - { - return TypeDescriptor.GetDefaultEvent(this, true); - } - - PropertyDescriptor ICustomTypeDescriptor.GetDefaultProperty() - { - return TypeDescriptor.GetDefaultProperty(this, true); - } - - object ICustomTypeDescriptor.GetEditor(Type editorBaseType) - { - return TypeDescriptor.GetEditor(this, editorBaseType, true); - } - - EventDescriptorCollection ICustomTypeDescriptor.GetEvents(Attribute[] attributes) - { - return TypeDescriptor.GetEvents(this, attributes, true); - } - - EventDescriptorCollection ICustomTypeDescriptor.GetEvents() - { - return TypeDescriptor.GetEvents(this, true); - } - - private PropertyDescriptorCollection _propCache; - private FilterCache _filterCache; - - PropertyDescriptorCollection ICustomTypeDescriptor.GetProperties() - { - return ((ICustomTypeDescriptor)this).GetProperties(null); - } - - PropertyDescriptorCollection ICustomTypeDescriptor.GetProperties(Attribute[] attributes) - { - bool filtering = (attributes != null && attributes.Length > 0); - PropertyDescriptorCollection props = _propCache; - FilterCache cache = _filterCache; - - // Use a cached version if possible - if (filtering && cache != null && cache.IsValid(attributes)) - { - return cache.FilteredProperties; - } - else if (!filtering && props != null) - { - return props; - } - - // Create the property collection and filter - props = new PropertyDescriptorCollection(null); - foreach (PropertyDescriptor prop in TypeDescriptor.GetProperties(this, attributes, true)) - { - props.Add(prop); - } - foreach (FieldInfo field in GetType().GetFields()) - { - FieldPropertyDescriptor fieldDesc = new FieldPropertyDescriptor(field); - if (!filtering || fieldDesc.Attributes.Contains(attributes)) props.Add(fieldDesc); - } - - // Store the computed properties - if (filtering) - { - cache = new FilterCache(); - cache.Attributes = attributes; - cache.FilteredProperties = props; - _filterCache = cache; - } - else _propCache = props; - - return props; - } - - private class FilterCache - { - public Attribute[] Attributes; - public PropertyDescriptorCollection FilteredProperties; - public bool IsValid(Attribute[] other) - { - if (other == null || Attributes == null) return false; - if (Attributes.Length != other.Length) return false; - for (int i = 0; i < other.Length; i++) - { - if (!Attributes[i].Match(other[i])) return false; - } - return true; - } - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Reflection; +using System.ComponentModel; + +namespace RaptorDB.Common +{ + internal class FieldPropertyDescriptor : PropertyDescriptor + { + private FieldInfo _field; + + public FieldPropertyDescriptor(FieldInfo field) : + base(field.Name, (Attribute[])field.GetCustomAttributes(typeof(Attribute), true)) + { + _field = field; + } + + public FieldInfo Field { get { return _field; } } + + public override bool Equals(object obj) + { + FieldPropertyDescriptor other = obj as FieldPropertyDescriptor; + return other != null && other._field.Equals(_field); + } + + public override int GetHashCode() { return _field.GetHashCode(); } + + public override bool IsReadOnly { get { return false; } } + public override void ResetValue(object component) { } + public override bool CanResetValue(object component) { return false; } + public override bool ShouldSerializeValue(object component) { return true; } + + public override Type ComponentType { get { return _field.DeclaringType; } } + public override Type PropertyType { get { return _field.FieldType; } } + + public override object GetValue(object component) { return _field.GetValue(component); } + + public override void SetValue(object component, object value) + { + _field.SetValue(component, value); + OnValueChanged(component, EventArgs.Empty); + } + } + + public abstract class BindableFields : ICustomTypeDescriptor + { + object ICustomTypeDescriptor.GetPropertyOwner(PropertyDescriptor pd) + { + return this; + } + + AttributeCollection ICustomTypeDescriptor.GetAttributes() + { + return TypeDescriptor.GetAttributes(this, true); + } + + string ICustomTypeDescriptor.GetClassName() + { + return TypeDescriptor.GetClassName(this, true); + } + + string ICustomTypeDescriptor.GetComponentName() + { + return TypeDescriptor.GetComponentName(this, true); + } + + TypeConverter ICustomTypeDescriptor.GetConverter() + { + return TypeDescriptor.GetConverter(this, true); + } + + EventDescriptor ICustomTypeDescriptor.GetDefaultEvent() + { + return TypeDescriptor.GetDefaultEvent(this, true); + } + + PropertyDescriptor ICustomTypeDescriptor.GetDefaultProperty() + { + return TypeDescriptor.GetDefaultProperty(this, true); + } + + object ICustomTypeDescriptor.GetEditor(Type editorBaseType) + { + return TypeDescriptor.GetEditor(this, editorBaseType, true); + } + + EventDescriptorCollection ICustomTypeDescriptor.GetEvents(Attribute[] attributes) + { + return TypeDescriptor.GetEvents(this, attributes, true); + } + + EventDescriptorCollection ICustomTypeDescriptor.GetEvents() + { + return TypeDescriptor.GetEvents(this, true); + } + + private PropertyDescriptorCollection _propCache; + private FilterCache _filterCache; + + PropertyDescriptorCollection ICustomTypeDescriptor.GetProperties() + { + return ((ICustomTypeDescriptor)this).GetProperties(null); + } + + PropertyDescriptorCollection ICustomTypeDescriptor.GetProperties(Attribute[] attributes) + { + bool filtering = (attributes != null && attributes.Length > 0); + PropertyDescriptorCollection props = _propCache; + FilterCache cache = _filterCache; + + // Use a cached version if possible + if (filtering && cache != null && cache.IsValid(attributes)) + { + return cache.FilteredProperties; + } + else if (!filtering && props != null) + { + return props; + } + + // Create the property collection and filter + props = new PropertyDescriptorCollection(null); + foreach (PropertyDescriptor prop in TypeDescriptor.GetProperties(this, attributes, true)) + { + props.Add(prop); + } + foreach (FieldInfo field in GetType().GetFields()) + { + FieldPropertyDescriptor fieldDesc = new FieldPropertyDescriptor(field); + if (!filtering || fieldDesc.Attributes.Contains(attributes)) props.Add(fieldDesc); + } + + // Store the computed properties + if (filtering) + { + cache = new FilterCache(); + cache.Attributes = attributes; + cache.FilteredProperties = props; + _filterCache = cache; + } + else _propCache = props; + + return props; + } + + private class FilterCache + { + public Attribute[] Attributes; + public PropertyDescriptorCollection FilteredProperties; + public bool IsValid(Attribute[] other) + { + if (other == null || Attributes == null) return false; + if (Attributes.Length != other.Length) return false; + for (int i = 0; i < other.Length; i++) + { + if (!Attributes[i].Match(other[i])) return false; + } + return true; + } + } + } +} diff --git a/RaptorDB.Common/LINQString.cs b/RaptorDB.Common/LINQString.cs index fba9887..a43a970 100644 --- a/RaptorDB.Common/LINQString.cs +++ b/RaptorDB.Common/LINQString.cs @@ -1,166 +1,166 @@ -using System; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.Linq; -using System.Linq.Expressions; -using System.Reflection; -using System.Text; - -namespace RaptorDB -{ - public class LINQString : ExpressionVisitor - { - public LINQString() - { - } - public StringBuilder sb = new StringBuilder(); - - protected override Expression VisitBinary(BinaryExpression b) - { - sb.Append("("); - this.Visit(b.Left); - ExpressionType t = b.NodeType; - - switch (b.NodeType) - { - //case ExpressionType.Not: - // sb.Append(" NOT "); - // break; - case ExpressionType.AndAlso: - case ExpressionType.And: - sb.Append(" AND "); - break; - case ExpressionType.OrElse: - case ExpressionType.Or: - sb.Append(" OR "); - break; - case ExpressionType.Equal: - sb.Append(" = "); - break; - //case ExpressionType.NotEqual: - // sb.Append(" <> "); - // break; - case ExpressionType.LessThan: - sb.Append(" < "); - break; - case ExpressionType.LessThanOrEqual: - sb.Append(" <= "); - break; - case ExpressionType.GreaterThan: - sb.Append(" > "); - break; - case ExpressionType.GreaterThanOrEqual: - sb.Append(" >= "); - break; - } - - this.Visit(b.Right); - sb.Append(")"); - return b; - } - - protected override Expression VisitMethodCall(MethodCallExpression m) - { - string s = m.ToString(); - sb.Append(s.Substring(s.IndexOf('.') + 1)); - return m; - } - - Stack _stack = new Stack(); - protected override Expression VisitMember(MemberExpression m) - { - var e = base.VisitMember(m); - var c = m.Expression as ConstantExpression; - if (c != null) - { - Type t = c.Value.GetType(); - var x = t.InvokeMember(m.Member.Name, BindingFlags.GetField | - BindingFlags.GetProperty | - BindingFlags.Public | - BindingFlags.NonPublic | - BindingFlags.Instance | - BindingFlags.Static, null, c.Value, null); - - if (x is string) - sb.Append("\"").Append(x).Append("\""); - else if (!x.GetType().IsClass) - { // check for complex structs - if (x is DateTime || x is Guid) - sb.Append("\"").Append(x).Append("\""); - else // numbers - sb.Append(x); - } - else - _stack.Push(x); - } - - if (m.Expression != null) - { - if (m.Expression.NodeType == ExpressionType.Parameter) // property - sb.Append(m.Member.Name); - else if (m.Expression.NodeType == ExpressionType.MemberAccess) // obj.property - { - Type t = m.Expression.Type; - var f = m.Expression as MemberExpression; - var val = t.InvokeMember(m.Member.Name, BindingFlags.GetField | - BindingFlags.GetProperty | - BindingFlags.Public | - BindingFlags.NonPublic | - BindingFlags.Instance | - BindingFlags.Static, null, _stack.Pop(), null); - - if (val is string) - sb.Append("\"").Append(val).Append("\""); - else if (!val.GetType().IsClass) - { // check for complex structs - if (val is DateTime || val is Guid) - sb.Append("\"").Append(val).Append("\""); - else // numbers - sb.Append(val); - } - else - _stack.Push(val); - } - } - return e; - } - - protected override Expression VisitConstant(ConstantExpression c) - { - IQueryable q = c.Value as IQueryable; - if (q != null) - { - sb.Append(q.ElementType.Name); - } - else if (c.Value == null) - { - sb.Append("NULL"); - } - else - { - //_stack.Push(c.Value); - //if (Type.GetTypeCode(c.Value.GetType()) == TypeCode.Object) - // _stack.Pop(); - - switch (Type.GetTypeCode(c.Value.GetType())) - { - case TypeCode.Boolean: - //sb.Append(((bool)c.Value) ? 1 : 0); - sb.Append(((bool)c.Value) ? "True" : "False"); - break; - case TypeCode.String: - sb.Append("\""); - sb.Append(c.Value); - sb.Append("\""); - break; - case TypeCode.Object: - break; - default: - sb.Append(c.Value); - break; - } - } - return c; - } - } -} +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using System.Text; + +namespace RaptorDB +{ + public class LINQString : ExpressionVisitor + { + public LINQString() + { + } + public StringBuilder sb = new StringBuilder(); + + protected override Expression VisitBinary(BinaryExpression b) + { + sb.Append("("); + this.Visit(b.Left); + ExpressionType t = b.NodeType; + + switch (b.NodeType) + { + //case ExpressionType.Not: + // sb.Append(" NOT "); + // break; + case ExpressionType.AndAlso: + case ExpressionType.And: + sb.Append(" AND "); + break; + case ExpressionType.OrElse: + case ExpressionType.Or: + sb.Append(" OR "); + break; + case ExpressionType.Equal: + sb.Append(" = "); + break; + //case ExpressionType.NotEqual: + // sb.Append(" <> "); + // break; + case ExpressionType.LessThan: + sb.Append(" < "); + break; + case ExpressionType.LessThanOrEqual: + sb.Append(" <= "); + break; + case ExpressionType.GreaterThan: + sb.Append(" > "); + break; + case ExpressionType.GreaterThanOrEqual: + sb.Append(" >= "); + break; + } + + this.Visit(b.Right); + sb.Append(")"); + return b; + } + + protected override Expression VisitMethodCall(MethodCallExpression m) + { + string s = m.ToString(); + sb.Append(s.Substring(s.IndexOf('.') + 1)); + return m; + } + + Stack _stack = new Stack(); + protected override Expression VisitMember(MemberExpression m) + { + var e = base.VisitMember(m); + var c = m.Expression as ConstantExpression; + if (c != null) + { + Type t = c.Value.GetType(); + var x = t.InvokeMember(m.Member.Name, BindingFlags.GetField | + BindingFlags.GetProperty | + BindingFlags.Public | + BindingFlags.NonPublic | + BindingFlags.Instance | + BindingFlags.Static, null, c.Value, null); + + if (x is string) + sb.Append("\"").Append(x).Append("\""); + else if (!x.GetType().IsClass) + { // check for complex structs + if (x is DateTime || x is Guid) + sb.Append("\"").Append(x).Append("\""); + else // numbers + sb.Append(x); + } + else + _stack.Push(x); + } + + if (m.Expression != null) + { + if (m.Expression.NodeType == ExpressionType.Parameter) // property + sb.Append(m.Member.Name); + else if (m.Expression.NodeType == ExpressionType.MemberAccess) // obj.property + { + Type t = m.Expression.Type; + var f = m.Expression as MemberExpression; + var val = t.InvokeMember(m.Member.Name, BindingFlags.GetField | + BindingFlags.GetProperty | + BindingFlags.Public | + BindingFlags.NonPublic | + BindingFlags.Instance | + BindingFlags.Static, null, _stack.Pop(), null); + + if (val is string) + sb.Append("\"").Append(val).Append("\""); + else if (!val.GetType().IsClass) + { // check for complex structs + if (val is DateTime || val is Guid) + sb.Append("\"").Append(val).Append("\""); + else // numbers + sb.Append(val); + } + else + _stack.Push(val); + } + } + return e; + } + + protected override Expression VisitConstant(ConstantExpression c) + { + IQueryable q = c.Value as IQueryable; + if (q != null) + { + sb.Append(q.ElementType.Name); + } + else if (c.Value == null) + { + sb.Append("NULL"); + } + else + { + //_stack.Push(c.Value); + //if (Type.GetTypeCode(c.Value.GetType()) == TypeCode.Object) + // _stack.Pop(); + + switch (Type.GetTypeCode(c.Value.GetType())) + { + case TypeCode.Boolean: + //sb.Append(((bool)c.Value) ? 1 : 0); + sb.Append(((bool)c.Value) ? "True" : "False"); + break; + case TypeCode.String: + sb.Append("\""); + sb.Append(c.Value); + sb.Append("\""); + break; + case TypeCode.Object: + break; + default: + sb.Append(c.Value); + break; + } + } + return c; + } + } +} diff --git a/RaptorDB.Common/Logger.cs b/RaptorDB.Common/Logger.cs index 902d3d4..3c96371 100644 --- a/RaptorDB.Common/Logger.cs +++ b/RaptorDB.Common/Logger.cs @@ -1,257 +1,257 @@ -using System; -using System.Collections.Generic; -using System.Text; -using System.Collections; -using System.Threading; -using System.IO; - -namespace RaptorDB -{ - public interface ILog - { - void Debug(object msg, params object[] objs); - void Error(object msg, params object[] objs); - void Info(object msg, params object[] objs); - void Warn(object msg, params object[] objs); - void Fatal(object msg, params object[] objs); - } - - internal class FileLogger - { - // Sinlgeton pattern 4 from : http://csharpindepth.com/articles/general/singleton.aspx - private static readonly FileLogger instance = new FileLogger(); - // Explicit static constructor to tell C# compiler - // not to mark type as beforefieldinit - static FileLogger() - { - } - private FileLogger() - { - } - public static FileLogger Instance { get { return instance; } } - - private Queue _que = new Queue(); - private StreamWriter _output; - private string _filename; - private int _sizeLimit = 0; - private long _lastSize = 0; - private DateTime _lastFileDate; - private bool _showMethodName = false; - private string _FilePath = ""; - System.Timers.Timer _saveTimer; - - public bool ShowMethodNames - { - get { return _showMethodName; } - } - - public void Init(string filename, int sizelimitKB, bool showmethodnames) - { - if (_output != null) - return; - _que = new Queue(); - _showMethodName = showmethodnames; - _sizeLimit = sizelimitKB; - _filename = filename; - // handle folder names as well -> create dir etc. - _FilePath = Path.GetDirectoryName(filename); - if (_FilePath != "") - { - _FilePath = Directory.CreateDirectory(_FilePath).FullName; - if (_FilePath.EndsWith(Path.DirectorySeparatorChar.ToString()) == false) - _FilePath += Path.DirectorySeparatorChar.ToString(); - } - _output = new StreamWriter(filename, true); - FileInfo fi = new FileInfo(filename); - _lastSize = fi.Length; - _lastFileDate = fi.LastWriteTime; - - _saveTimer = new System.Timers.Timer(500); - _saveTimer.Elapsed += new System.Timers.ElapsedEventHandler(_saveTimer_Elapsed); - _saveTimer.Enabled = true; - _saveTimer.AutoReset = true; - } - - void _saveTimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) - { - WriteData(); - } - - public void ShutDown() - { - _saveTimer.Enabled = false; - WriteData(); - if (_output != null) - { - _output.Flush(); - _output.Close(); - _output = null; - } - } - - private void WriteData() - { - if (_output == null) - return; - lock (_que) - { - while (_que.Count > 0) - { - object o = _que.Dequeue(); - if (_output != null && o != null) - { - if (_sizeLimit > 0) - { - // implement size limited logs - // implement rolling logs - #region [ rolling size limit ] - _lastSize += ("" + o).Length; - if (_lastSize > _sizeLimit * 1000) - { - _output.Flush(); - _output.Close(); - int count = 1; - while (File.Exists(_FilePath + Path.GetFileNameWithoutExtension(_filename) + "." + count.ToString("0000"))) - count++; - - File.Move(_filename, - _FilePath + - Path.GetFileNameWithoutExtension(_filename) + - "." + count.ToString("0000")); - _output = new StreamWriter(_filename, true); - _lastSize = 0; - } - #endregion - } - if (DateTime.Now.Subtract(_lastFileDate).Days > 0) - { - // implement date logs - #region [ rolling dates ] - _output.Flush(); - _output.Close(); - int count = 1; - while (File.Exists(_FilePath + Path.GetFileNameWithoutExtension(_filename) + "." + count.ToString("0000"))) - { - File.Move(_FilePath + Path.GetFileNameWithoutExtension(_filename) + "." + count.ToString("0000"), - _FilePath + - Path.GetFileNameWithoutExtension(_filename) + - "." + count.ToString("0000") + - "." + _lastFileDate.ToString("yyyy-MM-dd")); - count++; - } - File.Move(_filename, - _FilePath + - Path.GetFileNameWithoutExtension(_filename) + - "." + count.ToString("0000") + - "." + _lastFileDate.ToString("yyyy-MM-dd")); - - _output = new StreamWriter(_filename, true); - _lastFileDate = DateTime.Now; - _lastSize = 0; - #endregion - } - _output.Write(o); - } - } - if (_output != null) - _output.Flush(); - } - } - - private string FormatLog(string log, string type, string meth, string msg, object[] objs) - { - StringBuilder sb = new StringBuilder(); - sb.Append(DateTime.Now.ToString("yyyy-MM-dd hh:mm:ss")); - sb.Append("|"); - sb.Append(log); - sb.Append("|"); - sb.Append(Thread.CurrentThread.ManagedThreadId.ToString()); - sb.Append("|"); - sb.Append(type); - sb.Append("|"); - sb.Append(meth); - sb.Append("| "); - sb.AppendLine(msg); - - if (objs != null) - foreach (object o in objs) - sb.AppendLine("" + o); - - return sb.ToString(); - } - - public void Log(string logtype, string type, string meth, string msg, params object[] objs) - { - lock (_que) - _que.Enqueue(FormatLog(logtype, type, meth, msg, objs)); - } - } - - internal class logger : ILog - { - public logger(Type type) - { - typename = type.Namespace + "." + type.Name; - } - - private string typename = ""; - - private void log(string logtype, string msg, params object[] objs) - { - string meth = ""; - if (FileLogger.Instance.ShowMethodNames) - { - System.Diagnostics.StackTrace st = new System.Diagnostics.StackTrace(2); - System.Diagnostics.StackFrame sf = st.GetFrame(0); - meth = sf.GetMethod().Name; - } - FileLogger.Instance.Log(logtype, typename, meth, msg, objs); - } - - #region ILog Members - - public void Debug(object msg, params object[] objs) - { - log("DEBUG", "" + msg, objs); - } - - public void Error(object msg, params object[] objs) - { - log("ERROR", "" + msg, objs); - } - - public void Info(object msg, params object[] objs) - { - log("INFO", "" + msg, objs); - } - - public void Warn(object msg, params object[] objs) - { - log("WARN", "" + msg, objs); - } - - public void Fatal(object msg, params object[] objs) - { - log("FATAL", "" + msg, objs); - } - #endregion - } - - public static class LogManager - { - public static ILog GetLogger(Type obj) - { - return new logger(obj); - } - - public static void Configure(string filename, int sizelimitKB, bool showmethodnames) - { - FileLogger.Instance.Init(filename, sizelimitKB, showmethodnames); - } - - public static void Shutdown() - { - FileLogger.Instance.ShutDown(); - } - } -} +using System; +using System.Collections.Generic; +using System.Text; +using System.Collections; +using System.Threading; +using System.IO; + +namespace RaptorDB +{ + public interface ILog + { + void Debug(object msg, params object[] objs); + void Error(object msg, params object[] objs); + void Info(object msg, params object[] objs); + void Warn(object msg, params object[] objs); + void Fatal(object msg, params object[] objs); + } + + internal class FileLogger + { + // Sinlgeton pattern 4 from : http://csharpindepth.com/articles/general/singleton.aspx + private static readonly FileLogger instance = new FileLogger(); + // Explicit static constructor to tell C# compiler + // not to mark type as beforefieldinit + static FileLogger() + { + } + private FileLogger() + { + } + public static FileLogger Instance { get { return instance; } } + + private Queue _que = new Queue(); + private StreamWriter _output; + private string _filename; + private int _sizeLimit = 0; + private long _lastSize = 0; + private DateTime _lastFileDate; + private bool _showMethodName = false; + private string _FilePath = ""; + System.Timers.Timer _saveTimer; + + public bool ShowMethodNames + { + get { return _showMethodName; } + } + + public void Init(string filename, int sizelimitKB, bool showmethodnames) + { + if (_output != null) + return; + _que = new Queue(); + _showMethodName = showmethodnames; + _sizeLimit = sizelimitKB; + _filename = filename; + // handle folder names as well -> create dir etc. + _FilePath = Path.GetDirectoryName(filename); + if (_FilePath != "") + { + _FilePath = Directory.CreateDirectory(_FilePath).FullName; + if (_FilePath.EndsWith(Path.DirectorySeparatorChar.ToString()) == false) + _FilePath += Path.DirectorySeparatorChar.ToString(); + } + _output = new StreamWriter(filename, true); + FileInfo fi = new FileInfo(filename); + _lastSize = fi.Length; + _lastFileDate = fi.LastWriteTime; + + _saveTimer = new System.Timers.Timer(500); + _saveTimer.Elapsed += new System.Timers.ElapsedEventHandler(_saveTimer_Elapsed); + _saveTimer.Enabled = true; + _saveTimer.AutoReset = true; + } + + void _saveTimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) + { + WriteData(); + } + + public void ShutDown() + { + _saveTimer.Enabled = false; + WriteData(); + if (_output != null) + { + _output.Flush(); + _output.Close(); + _output = null; + } + } + + private void WriteData() + { + if (_output == null) + return; + lock (_que) + { + while (_que.Count > 0) + { + object o = _que.Dequeue(); + if (_output != null && o != null) + { + if (_sizeLimit > 0) + { + // implement size limited logs + // implement rolling logs + #region [ rolling size limit ] + _lastSize += ("" + o).Length; + if (_lastSize > _sizeLimit * 1000) + { + _output.Flush(); + _output.Close(); + int count = 1; + while (File.Exists(_FilePath + Path.GetFileNameWithoutExtension(_filename) + "." + count.ToString("0000"))) + count++; + + File.Move(_filename, + _FilePath + + Path.GetFileNameWithoutExtension(_filename) + + "." + count.ToString("0000")); + _output = new StreamWriter(_filename, true); + _lastSize = 0; + } + #endregion + } + if (DateTime.Now.Subtract(_lastFileDate).Days > 0) + { + // implement date logs + #region [ rolling dates ] + _output.Flush(); + _output.Close(); + int count = 1; + while (File.Exists(_FilePath + Path.GetFileNameWithoutExtension(_filename) + "." + count.ToString("0000"))) + { + File.Move(_FilePath + Path.GetFileNameWithoutExtension(_filename) + "." + count.ToString("0000"), + _FilePath + + Path.GetFileNameWithoutExtension(_filename) + + "." + count.ToString("0000") + + "." + _lastFileDate.ToString("yyyy-MM-dd")); + count++; + } + File.Move(_filename, + _FilePath + + Path.GetFileNameWithoutExtension(_filename) + + "." + count.ToString("0000") + + "." + _lastFileDate.ToString("yyyy-MM-dd")); + + _output = new StreamWriter(_filename, true); + _lastFileDate = DateTime.Now; + _lastSize = 0; + #endregion + } + _output.Write(o); + } + } + if (_output != null) + _output.Flush(); + } + } + + private string FormatLog(string log, string type, string meth, string msg, object[] objs) + { + StringBuilder sb = new StringBuilder(); + sb.Append(DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss")); + sb.Append("|"); + sb.Append(log); + sb.Append("|"); + sb.Append(Thread.CurrentThread.ManagedThreadId.ToString()); + sb.Append("|"); + sb.Append(type); + sb.Append("|"); + sb.Append(meth); + sb.Append("| "); + sb.AppendLine(msg); + + if (objs != null) + foreach (object o in objs) + sb.AppendLine("" + o); + + return sb.ToString(); + } + + public void Log(string logtype, string type, string meth, string msg, params object[] objs) + { + lock (_que) + _que.Enqueue(FormatLog(logtype, type, meth, msg, objs)); + } + } + + internal class logger : ILog + { + public logger(Type type) + { + typename = type.Namespace + "." + type.Name; + } + + private string typename = ""; + + private void log(string logtype, string msg, params object[] objs) + { + string meth = ""; + if (FileLogger.Instance.ShowMethodNames) + { + System.Diagnostics.StackTrace st = new System.Diagnostics.StackTrace(2); + System.Diagnostics.StackFrame sf = st.GetFrame(0); + meth = sf.GetMethod().Name; + } + FileLogger.Instance.Log(logtype, typename, meth, msg, objs); + } + + #region ILog Members + + public void Debug(object msg, params object[] objs) + { + log("DEBUG", "" + msg, objs); + } + + public void Error(object msg, params object[] objs) + { + log("ERROR", "" + msg, objs); + } + + public void Info(object msg, params object[] objs) + { + log("INFO", "" + msg, objs); + } + + public void Warn(object msg, params object[] objs) + { + log("WARN", "" + msg, objs); + } + + public void Fatal(object msg, params object[] objs) + { + log("FATAL", "" + msg, objs); + } + #endregion + } + + public static class LogManager + { + public static ILog GetLogger(Type obj) + { + return new logger(obj); + } + + public static void Configure(string filename, int sizelimitKB, bool showmethodnames) + { + FileLogger.Instance.Init(filename, sizelimitKB, showmethodnames); + } + + public static void Shutdown() + { + FileLogger.Instance.ShutDown(); + } + } +} diff --git a/RaptorDB.Common/MiniLZO.cs b/RaptorDB.Common/MiniLZO.cs index 8185d6e..042d9ff 100644 --- a/RaptorDB.Common/MiniLZO.cs +++ b/RaptorDB.Common/MiniLZO.cs @@ -1,690 +1,690 @@ -/** - * - * Modifications by Simon Hewitt - * - change constructors/methods to return byte[] - * - append original source size at the end of the destination buffer - * - add support for MemoryStream internal buffer usage - * - * - * ManagedLZO.MiniLZO - * - * Minimalistic reimplementation of minilzo in C# - * - * @author Shane Eric Bryldt, Copyright (C) 2006, All Rights Reserved - * @note Uses unsafe/fixed pointer contexts internally - * @liscence Bound by same liscence as minilzo as below, see file COPYING - */ - -/* Based on minilzo.c -- mini subset of the LZO real-time data compression library - - This file is part of the LZO real-time data compression library. - - Copyright (C) 2005 Markus Franz Xaver Johannes Oberhumer - Copyright (C) 2004 Markus Franz Xaver Johannes Oberhumer - Copyright (C) 2003 Markus Franz Xaver Johannes Oberhumer - Copyright (C) 2002 Markus Franz Xaver Johannes Oberhumer - Copyright (C) 2001 Markus Franz Xaver Johannes Oberhumer - Copyright (C) 2000 Markus Franz Xaver Johannes Oberhumer - Copyright (C) 1999 Markus Franz Xaver Johannes Oberhumer - Copyright (C) 1998 Markus Franz Xaver Johannes Oberhumer - Copyright (C) 1997 Markus Franz Xaver Johannes Oberhumer - Copyright (C) 1996 Markus Franz Xaver Johannes Oberhumer - All Rights Reserved. - - The LZO library is free software; you can redistribute it and/or - modify it under the terms of the GNU General Public License, - version 2, as published by the Free Software Foundation. - - The LZO library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with the LZO library; see the file COPYING. - If not, write to the Free Software Foundation, Inc., - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - - Markus F.X.J. Oberhumer - - http://www.oberhumer.com/opensource/lzo/ - */ - -/* - * NOTE: - * the full LZO package can be found at - * http://www.oberhumer.com/opensource/lzo/ - */ - -using System; -using System.Diagnostics; -using System.IO; - -namespace RaptorDB -{ - public class MiniLZO - { - private const uint M2_MAX_LEN = 8; - private const uint M4_MAX_LEN = 9; - private const byte M3_MARKER = 32; - private const byte M4_MARKER = 16; - private const uint M2_MAX_OFFSET = 0x0800; - private const uint M3_MAX_OFFSET = 0x4000; - private const uint M4_MAX_OFFSET = 0xbfff; - private const byte BITS = 14; - private const uint D_MASK = (1 << BITS) - 1; - private static uint DICT_SIZE = 65536 + 3; - - static MiniLZO() - { - if (IntPtr.Size == 8) - DICT_SIZE = (65536 + 3) * 2; - } - public static byte[] Compress(byte[] src) { return Compress(src, 0, src.Length); } - public static byte[] Compress(byte[] src, int srcCount) { return Compress(src, 0, srcCount); } - public static byte[] Compress(byte[] src, int srcStart, int srcLength) - { - byte[] workMem = new byte[DICT_SIZE]; - uint dstlen = (uint)(srcLength + (srcLength / 16) + 64 + 3 + 4); - byte[] dst = new byte[dstlen]; - - uint compressedSize = Compress(src, (uint)srcStart, (uint)srcLength, dst, 0, dstlen, workMem, 0); - - if (dst.Length != compressedSize) - { - byte[] final = new byte[compressedSize]; - Buffer.BlockCopy(dst, 0, final, 0, (int)compressedSize); - dst = final; - } - - return dst; - - } - - public static byte[] Compress(MemoryStream source) - { - byte[] destinationBuffer; - byte[] workspaceBuffer; - uint sourceOffset; - uint workspaceOffset; - uint sourceLength; - uint destinationLength; - - byte[] sourceBuffer = source.GetBuffer(); - uint sourceCapacity = (uint)source.Capacity; - sourceLength = (uint)source.Length; - destinationLength = sourceLength + (sourceLength / 16) + 64 + 3 + 4; - - uint unusedSpace = sourceCapacity - sourceLength; - uint inplaceOverhead = Math.Min(sourceLength, M4_MAX_OFFSET) + sourceLength / 64 + 16 + 3 + 4; - - if (unusedSpace < inplaceOverhead) - { - sourceOffset = 0; - destinationBuffer = new byte[destinationLength]; - } - else - { - sourceOffset = inplaceOverhead; - source.SetLength(sourceLength + inplaceOverhead); - destinationBuffer = sourceBuffer; - Buffer.BlockCopy(destinationBuffer, 0, destinationBuffer, (int)inplaceOverhead, (int)sourceLength); - unusedSpace -= inplaceOverhead; - } - - if (unusedSpace < DICT_SIZE) - { - workspaceBuffer = new byte[DICT_SIZE]; - workspaceOffset = 0; - } - else - { - workspaceBuffer = sourceBuffer; - workspaceOffset = sourceCapacity - DICT_SIZE; - } - - uint compressedSize = Compress(sourceBuffer, sourceOffset, sourceLength, destinationBuffer, 0, destinationLength, workspaceBuffer, workspaceOffset); - - if (destinationBuffer == sourceBuffer) - { - source.SetLength(compressedSize); - source.Capacity = (int)compressedSize; - return source.GetBuffer(); - } - else - { - byte[] final = new byte[compressedSize]; - Buffer.BlockCopy(destinationBuffer, 0, final, 0, (int)compressedSize); - return final; - } - - } - - private static unsafe uint Compress(byte[] src, uint srcstart, uint srcLength, byte[] dst, uint dststart, uint dstlen, byte[] workmem, uint workmemstart) - { - uint tmp; - if (srcLength <= M2_MAX_LEN + 5) - { - tmp = (uint)srcLength; - dstlen = 0; - } - else - { - fixed (byte* work = &workmem[workmemstart], input = &src[srcstart], output = &dst[dststart]) - { - byte** dict = (byte**)work; - byte* in_end = input + srcLength; - byte* ip_end = input + srcLength - M2_MAX_LEN - 5; - byte* ii = input; - byte* ip = input + 4; - byte* op = output; - bool literal = false; - bool match = false; - uint offset; - uint length; - uint index; - byte* pos; - - for (; ; ) - { - offset = 0; - index = D_INDEX1(ip); - pos = ip - (ip - dict[index]); - if (pos < input || (offset = (uint)(ip - pos)) <= 0 || offset > M4_MAX_OFFSET) - literal = true; - else if (offset <= M2_MAX_OFFSET || pos[3] == ip[3]) { } - else - { - index = D_INDEX2(index); - pos = ip - (ip - dict[index]); - if (pos < input || (offset = (uint)(ip - pos)) <= 0 || offset > M4_MAX_OFFSET) - literal = true; - else if (offset <= M2_MAX_OFFSET || pos[3] == ip[3]) { } - else - literal = true; - } - - if (!literal) - { - if (*((ushort*)pos) == *((ushort*)ip) && pos[2] == ip[2]) - match = true; - } - - literal = false; - if (!match) - { - dict[index] = ip; - ++ip; - if (ip >= ip_end) - break; - continue; - } - match = false; - dict[index] = ip; - if (ip - ii > 0) - { - uint t = (uint)(ip - ii); - if (t <= 3) - { - //Debug.Assert(op - 2 > output); - op[-2] |= (byte)(t); - } - else if (t <= 18) - *op++ = (byte)(t - 3); - else - { - uint tt = t - 18; - *op++ = 0; - while (tt > 255) - { - tt -= 255; - *op++ = 0; - } - //Debug.Assert(tt > 0); - *op++ = (byte)(tt); - } - do - { - *op++ = *ii++; - } while (--t > 0); - } - //Debug.Assert(ii == ip); - ip += 3; - if (pos[3] != *ip++ || pos[4] != *ip++ || pos[5] != *ip++ - || pos[6] != *ip++ || pos[7] != *ip++ || pos[8] != *ip++) - { - --ip; - length = (uint)(ip - ii); - //Debug.Assert(length >= 3); - //Debug.Assert(length <= M2_MAX_LEN); - if (offset <= M2_MAX_OFFSET) - { - --offset; - *op++ = (byte)(((length - 1) << 5) | ((offset & 7) << 2)); - *op++ = (byte)(offset >> 3); - } - else if (offset <= M3_MAX_OFFSET) - { - --offset; - *op++ = (byte)(M3_MARKER | (length - 2)); - *op++ = (byte)((offset & 63) << 2); - *op++ = (byte)(offset >> 6); - } - else - { - offset -= 0x4000; - //Debug.Assert(offset > 0); - //Debug.Assert(offset <= 0x7FFF); - *op++ = (byte)(M4_MARKER | ((offset & 0x4000) >> 11) | (length - 2)); - *op++ = (byte)((offset & 63) << 2); - *op++ = (byte)(offset >> 6); - } - } - else - { - byte* m = pos + M2_MAX_LEN + 1; - while (ip < in_end && *m == *ip) - { - ++m; - ++ip; - } - length = (uint)(ip - ii); - //Debug.Assert(length > M2_MAX_LEN); - if (offset <= M3_MAX_OFFSET) - { - --offset; - if (length <= 33) - *op++ = (byte)(M3_MARKER | (length - 2)); - else - { - length -= 33; - *op++ = M3_MARKER | 0; - while (length > 255) - { - length -= 255; - *op++ = 0; - } - //Debug.Assert(length > 0); - *op++ = (byte)(length); - } - } - else - { - offset -= 0x4000; - //Debug.Assert(offset > 0); - //Debug.Assert(offset <= 0x7FFF); - if (length <= M4_MAX_LEN) - *op++ = (byte)(M4_MARKER | ((offset & 0x4000) >> 11) | (length - 2)); - else - { - length -= M4_MAX_LEN; - *op++ = (byte)(M4_MARKER | ((offset & 0x4000) >> 11)); - while (length > 255) - { - length -= 255; - *op++ = 0; - } - //Debug.Assert(length > 0); - *op++ = (byte)(length); - } - } - *op++ = (byte)((offset & 63) << 2); - *op++ = (byte)(offset >> 6); - } - ii = ip; - if (ip >= ip_end) - break; - } - dstlen = (uint)(op - output); - tmp = (uint)(in_end - ii); - } - } - if (tmp > 0) - { - uint ii = (uint)srcLength - tmp + srcstart; - if (dstlen == 0 && tmp <= 238) - { - dst[dstlen++] = (byte)(17 + tmp); - } - else if (tmp <= 3) - { - dst[dstlen - 2] |= (byte)(tmp); - } - else if (tmp <= 18) - { - dst[dstlen++] = (byte)(tmp - 3); - } - else - { - uint tt = tmp - 18; - dst[dstlen++] = 0; - while (tt > 255) - { - tt -= 255; - dst[dstlen++] = 0; - } - //Debug.Assert(tt > 0); - dst[dstlen++] = (byte)(tt); - } - do - { - dst[dstlen++] = src[ii++]; - } while (--tmp > 0); - } - dst[dstlen++] = M4_MARKER | 1; - dst[dstlen++] = 0; - dst[dstlen++] = 0; - - // Append the source count - dst[dstlen++] = (byte)srcLength; - dst[dstlen++] = (byte)(srcLength >> 8); - dst[dstlen++] = (byte)(srcLength >> 16); - dst[dstlen++] = (byte)(srcLength >> 24); - - return dstlen; - } - - public static unsafe byte[] Decompress(byte[] src) - { - byte[] dst = new byte[(src[src.Length - 4] | (src[src.Length - 3] << 8) | (src[src.Length - 2] << 16 | src[src.Length - 1] << 24))]; - - uint t = 0; - fixed (byte* input = src, output = dst) - { - byte* pos = null; - byte* ip_end = input + src.Length - 4; - byte* op_end = output + dst.Length; - byte* ip = input; - byte* op = output; - bool match = false; - bool match_next = false; - bool match_done = false; - bool copy_match = false; - bool first_literal_run = false; - bool eof_found = false; - - if (*ip > 17) - { - t = (uint)(*ip++ - 17); - if (t < 4) - match_next = true; - else - { - //Debug.Assert(t > 0); - if ((op_end - op) < t) - throw new OverflowException("Output Overrun"); - if ((ip_end - ip) < t + 1) - throw new OverflowException("Input Overrun"); - do - { - *op++ = *ip++; - } while (--t > 0); - first_literal_run = true; - } - } - while (!eof_found && ip < ip_end) - { - if (!match_next && !first_literal_run) - { - t = *ip++; - if (t >= 16) - match = true; - else - { - if (t == 0) - { - if ((ip_end - ip) < 1) - throw new OverflowException("Input Overrun"); - while (*ip == 0) - { - t += 255; - ++ip; - if ((ip_end - ip) < 1) - throw new OverflowException("Input Overrun"); - } - t += (uint)(15 + *ip++); - } - //Debug.Assert(t > 0); - if ((op_end - op) < t + 3) - throw new OverflowException("Output Overrun"); - if ((ip_end - ip) < t + 4) - throw new OverflowException("Input Overrun"); - for (int x = 0; x < 4; ++x, ++op, ++ip) - *op = *ip; - if (--t > 0) - { - if (t >= 4) - { - do - { - for (int x = 0; x < 4; ++x, ++op, ++ip) - *op = *ip; - t -= 4; - } while (t >= 4); - if (t > 0) - { - do - { - *op++ = *ip++; - } while (--t > 0); - } - } - else - { - do - { - *op++ = *ip++; - } while (--t > 0); - } - } - } - } - if (!match && !match_next) - { - first_literal_run = false; - - t = *ip++; - if (t >= 16) - match = true; - else - { - pos = op - (1 + M2_MAX_OFFSET); - pos -= t >> 2; - pos -= *ip++ << 2; - if (pos < output || pos >= op) - throw new OverflowException("Lookbehind Overrun"); - if ((op_end - op) < 3) - throw new OverflowException("Output Overrun"); - *op++ = *pos++; - *op++ = *pos++; - *op++ = *pos++; - match_done = true; - } - } - match = false; - do - { - if (t >= 64) - { - pos = op - 1; - pos -= (t >> 2) & 7; - pos -= *ip++ << 3; - t = (t >> 5) - 1; - if (pos < output || pos >= op) - throw new OverflowException("Lookbehind Overrun"); - if ((op_end - op) < t + 2) - throw new OverflowException("Output Overrun"); - copy_match = true; - } - else if (t >= 32) - { - t &= 31; - if (t == 0) - { - if ((ip_end - ip) < 1) - throw new OverflowException("Input Overrun"); - while (*ip == 0) - { - t += 255; - ++ip; - if ((ip_end - ip) < 1) - throw new OverflowException("Input Overrun"); - } - t += (uint)(31 + *ip++); - } - pos = op - 1; - pos -= (*(ushort*)ip) >> 2; - ip += 2; - } - else if (t >= 16) - { - pos = op; - pos -= (t & 8) << 11; - - t &= 7; - if (t == 0) - { - if ((ip_end - ip) < 1) - throw new OverflowException("Input Overrun"); - while (*ip == 0) - { - t += 255; - ++ip; - if ((ip_end - ip) < 1) - throw new OverflowException("Input Overrun"); - } - t += (uint)(7 + *ip++); - } - pos -= (*(ushort*)ip) >> 2; - ip += 2; - if (pos == op) - eof_found = true; - else - pos -= 0x4000; - } - else - { - pos = op - 1; - pos -= t >> 2; - pos -= *ip++ << 2; - if (pos < output || pos >= op) - throw new OverflowException("Lookbehind Overrun"); - if ((op_end - op) < 2) - throw new OverflowException("Output Overrun"); - *op++ = *pos++; - *op++ = *pos++; - match_done = true; - } - if (!eof_found && !match_done && !copy_match) - { - if (pos < output || pos >= op) - throw new OverflowException("Lookbehind Overrun"); - //Debug.Assert(t > 0); - if ((op_end - op) < t + 2) - throw new OverflowException("Output Overrun"); - } - if (!eof_found && t >= 2 * 4 - 2 && (op - pos) >= 4 && !match_done && !copy_match) - { - for (int x = 0; x < 4; ++x, ++op, ++pos) - *op = *pos; - t -= 2; - do - { - for (int x = 0; x < 4; ++x, ++op, ++pos) - *op = *pos; - t -= 4; - } while (t >= 4); - if (t > 0) - { - do - { - *op++ = *pos++; - } while (--t > 0); - } - } - else if (!eof_found && !match_done) - { - copy_match = false; - - *op++ = *pos++; - *op++ = *pos++; - do - { - *op++ = *pos++; - } while (--t > 0); - } - - if (!eof_found && !match_next) - { - match_done = false; - - t = (uint)(ip[-2] & 3); - if (t == 0) - break; - } - if (!eof_found) - { - match_next = false; - //Debug.Assert(t > 0); - //Debug.Assert(t < 4); - if ((op_end - op) < t) - throw new OverflowException("Output Overrun"); - if ((ip_end - ip) < t + 1) - throw new OverflowException("Input Overrun"); - *op++ = *ip++; - if (t > 1) - { - *op++ = *ip++; - if (t > 2) - *op++ = *ip++; - } - t = *ip++; - } - } while (!eof_found && ip < ip_end); - } - if (!eof_found) - throw new OverflowException("EOF Marker Not Found"); - else - { - //Debug.Assert(t == 1); - if (ip > ip_end) - throw new OverflowException("Input Overrun"); - else if (ip < ip_end) - throw new OverflowException("Input Not Consumed"); - } - } - - return dst; - } - - private unsafe static uint D_INDEX1(byte* input) - { - return D_MS(D_MUL(0x21, D_X3(input, 5, 5, 6)) >> 5, 0); - } - - private static uint D_INDEX2(uint idx) - { - return (idx & (D_MASK & 0x7FF)) ^ (((D_MASK >> 1) + 1) | 0x1F); - } - - private static uint D_MS(uint v, byte s) - { - return (v & (D_MASK >> s)) << s; - } - - private static uint D_MUL(uint a, uint b) - { - return a * b; - } - - private unsafe static uint D_X2(byte* input, byte s1, byte s2) - { - return (uint)((((input[2] << s2) ^ input[1]) << s1) ^ input[0]); - } - - private unsafe static uint D_X3(byte* input, byte s1, byte s2, byte s3) - { - return (D_X2(input + 1, s2, s3) << s1) ^ input[0]; - } - } +/** + * + * Modifications by Simon Hewitt + * - change constructors/methods to return byte[] + * - append original source size at the end of the destination buffer + * - add support for MemoryStream internal buffer usage + * + * + * ManagedLZO.MiniLZO + * + * Minimalistic reimplementation of minilzo in C# + * + * @author Shane Eric Bryldt, Copyright (C) 2006, All Rights Reserved + * @note Uses unsafe/fixed pointer contexts internally + * @liscence Bound by same liscence as minilzo as below, see file COPYING + */ + +/* Based on minilzo.c -- mini subset of the LZO real-time data compression library + + This file is part of the LZO real-time data compression library. + + Copyright (C) 2005 Markus Franz Xaver Johannes Oberhumer + Copyright (C) 2004 Markus Franz Xaver Johannes Oberhumer + Copyright (C) 2003 Markus Franz Xaver Johannes Oberhumer + Copyright (C) 2002 Markus Franz Xaver Johannes Oberhumer + Copyright (C) 2001 Markus Franz Xaver Johannes Oberhumer + Copyright (C) 2000 Markus Franz Xaver Johannes Oberhumer + Copyright (C) 1999 Markus Franz Xaver Johannes Oberhumer + Copyright (C) 1998 Markus Franz Xaver Johannes Oberhumer + Copyright (C) 1997 Markus Franz Xaver Johannes Oberhumer + Copyright (C) 1996 Markus Franz Xaver Johannes Oberhumer + All Rights Reserved. + + The LZO library is free software; you can redistribute it and/or + modify it under the terms of the GNU General Public License, + version 2, as published by the Free Software Foundation. + + The LZO library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with the LZO library; see the file COPYING. + If not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + + Markus F.X.J. Oberhumer + + http://www.oberhumer.com/opensource/lzo/ + */ + +/* + * NOTE: + * the full LZO package can be found at + * http://www.oberhumer.com/opensource/lzo/ + */ + +using System; +using System.Diagnostics; +using System.IO; + +namespace RaptorDB +{ + public class MiniLZO + { + private const uint M2_MAX_LEN = 8; + private const uint M4_MAX_LEN = 9; + private const byte M3_MARKER = 32; + private const byte M4_MARKER = 16; + private const uint M2_MAX_OFFSET = 0x0800; + private const uint M3_MAX_OFFSET = 0x4000; + private const uint M4_MAX_OFFSET = 0xbfff; + private const byte BITS = 14; + private const uint D_MASK = (1 << BITS) - 1; + private static uint DICT_SIZE = 65536 + 3; + + static MiniLZO() + { + if (IntPtr.Size == 8) + DICT_SIZE = (65536 + 3) * 2; + } + public static byte[] Compress(byte[] src) { return Compress(src, 0, src.Length); } + public static byte[] Compress(byte[] src, int srcCount) { return Compress(src, 0, srcCount); } + public static byte[] Compress(byte[] src, int srcStart, int srcLength) + { + byte[] workMem = new byte[DICT_SIZE]; + uint dstlen = (uint)(srcLength + (srcLength / 16) + 64 + 3 + 4); + byte[] dst = new byte[dstlen]; + + uint compressedSize = Compress(src, (uint)srcStart, (uint)srcLength, dst, 0, dstlen, workMem, 0); + + if (dst.Length != compressedSize) + { + byte[] final = new byte[compressedSize]; + Buffer.BlockCopy(dst, 0, final, 0, (int)compressedSize); + dst = final; + } + + return dst; + + } + + public static byte[] Compress(MemoryStream source) + { + byte[] destinationBuffer; + byte[] workspaceBuffer; + uint sourceOffset; + uint workspaceOffset; + uint sourceLength; + uint destinationLength; + + byte[] sourceBuffer = source.GetBuffer(); + uint sourceCapacity = (uint)source.Capacity; + sourceLength = (uint)source.Length; + destinationLength = sourceLength + (sourceLength / 16) + 64 + 3 + 4; + + uint unusedSpace = sourceCapacity - sourceLength; + uint inplaceOverhead = Math.Min(sourceLength, M4_MAX_OFFSET) + sourceLength / 64 + 16 + 3 + 4; + + if (unusedSpace < inplaceOverhead) + { + sourceOffset = 0; + destinationBuffer = new byte[destinationLength]; + } + else + { + sourceOffset = inplaceOverhead; + source.SetLength(sourceLength + inplaceOverhead); + destinationBuffer = sourceBuffer; + Buffer.BlockCopy(destinationBuffer, 0, destinationBuffer, (int)inplaceOverhead, (int)sourceLength); + unusedSpace -= inplaceOverhead; + } + + if (unusedSpace < DICT_SIZE) + { + workspaceBuffer = new byte[DICT_SIZE]; + workspaceOffset = 0; + } + else + { + workspaceBuffer = sourceBuffer; + workspaceOffset = sourceCapacity - DICT_SIZE; + } + + uint compressedSize = Compress(sourceBuffer, sourceOffset, sourceLength, destinationBuffer, 0, destinationLength, workspaceBuffer, workspaceOffset); + + if (destinationBuffer == sourceBuffer) + { + source.SetLength(compressedSize); + source.Capacity = (int)compressedSize; + return source.GetBuffer(); + } + else + { + byte[] final = new byte[compressedSize]; + Buffer.BlockCopy(destinationBuffer, 0, final, 0, (int)compressedSize); + return final; + } + + } + + private static unsafe uint Compress(byte[] src, uint srcstart, uint srcLength, byte[] dst, uint dststart, uint dstlen, byte[] workmem, uint workmemstart) + { + uint tmp; + if (srcLength <= M2_MAX_LEN + 5) + { + tmp = (uint)srcLength; + dstlen = 0; + } + else + { + fixed (byte* work = &workmem[workmemstart], input = &src[srcstart], output = &dst[dststart]) + { + byte** dict = (byte**)work; + byte* in_end = input + srcLength; + byte* ip_end = input + srcLength - M2_MAX_LEN - 5; + byte* ii = input; + byte* ip = input + 4; + byte* op = output; + bool literal = false; + bool match = false; + uint offset; + uint length; + uint index; + byte* pos; + + for (; ; ) + { + offset = 0; + index = D_INDEX1(ip); + pos = ip - (ip - dict[index]); + if (pos < input || (offset = (uint)(ip - pos)) <= 0 || offset > M4_MAX_OFFSET) + literal = true; + else if (offset <= M2_MAX_OFFSET || pos[3] == ip[3]) { } + else + { + index = D_INDEX2(index); + pos = ip - (ip - dict[index]); + if (pos < input || (offset = (uint)(ip - pos)) <= 0 || offset > M4_MAX_OFFSET) + literal = true; + else if (offset <= M2_MAX_OFFSET || pos[3] == ip[3]) { } + else + literal = true; + } + + if (!literal) + { + if (*((ushort*)pos) == *((ushort*)ip) && pos[2] == ip[2]) + match = true; + } + + literal = false; + if (!match) + { + dict[index] = ip; + ++ip; + if (ip >= ip_end) + break; + continue; + } + match = false; + dict[index] = ip; + if (ip - ii > 0) + { + uint t = (uint)(ip - ii); + if (t <= 3) + { + //Debug.Assert(op - 2 > output); + op[-2] |= (byte)(t); + } + else if (t <= 18) + *op++ = (byte)(t - 3); + else + { + uint tt = t - 18; + *op++ = 0; + while (tt > 255) + { + tt -= 255; + *op++ = 0; + } + //Debug.Assert(tt > 0); + *op++ = (byte)(tt); + } + do + { + *op++ = *ii++; + } while (--t > 0); + } + //Debug.Assert(ii == ip); + ip += 3; + if (pos[3] != *ip++ || pos[4] != *ip++ || pos[5] != *ip++ + || pos[6] != *ip++ || pos[7] != *ip++ || pos[8] != *ip++) + { + --ip; + length = (uint)(ip - ii); + //Debug.Assert(length >= 3); + //Debug.Assert(length <= M2_MAX_LEN); + if (offset <= M2_MAX_OFFSET) + { + --offset; + *op++ = (byte)(((length - 1) << 5) | ((offset & 7) << 2)); + *op++ = (byte)(offset >> 3); + } + else if (offset <= M3_MAX_OFFSET) + { + --offset; + *op++ = (byte)(M3_MARKER | (length - 2)); + *op++ = (byte)((offset & 63) << 2); + *op++ = (byte)(offset >> 6); + } + else + { + offset -= 0x4000; + //Debug.Assert(offset > 0); + //Debug.Assert(offset <= 0x7FFF); + *op++ = (byte)(M4_MARKER | ((offset & 0x4000) >> 11) | (length - 2)); + *op++ = (byte)((offset & 63) << 2); + *op++ = (byte)(offset >> 6); + } + } + else + { + byte* m = pos + M2_MAX_LEN + 1; + while (ip < in_end && *m == *ip) + { + ++m; + ++ip; + } + length = (uint)(ip - ii); + //Debug.Assert(length > M2_MAX_LEN); + if (offset <= M3_MAX_OFFSET) + { + --offset; + if (length <= 33) + *op++ = (byte)(M3_MARKER | (length - 2)); + else + { + length -= 33; + *op++ = M3_MARKER | 0; + while (length > 255) + { + length -= 255; + *op++ = 0; + } + //Debug.Assert(length > 0); + *op++ = (byte)(length); + } + } + else + { + offset -= 0x4000; + //Debug.Assert(offset > 0); + //Debug.Assert(offset <= 0x7FFF); + if (length <= M4_MAX_LEN) + *op++ = (byte)(M4_MARKER | ((offset & 0x4000) >> 11) | (length - 2)); + else + { + length -= M4_MAX_LEN; + *op++ = (byte)(M4_MARKER | ((offset & 0x4000) >> 11)); + while (length > 255) + { + length -= 255; + *op++ = 0; + } + //Debug.Assert(length > 0); + *op++ = (byte)(length); + } + } + *op++ = (byte)((offset & 63) << 2); + *op++ = (byte)(offset >> 6); + } + ii = ip; + if (ip >= ip_end) + break; + } + dstlen = (uint)(op - output); + tmp = (uint)(in_end - ii); + } + } + if (tmp > 0) + { + uint ii = (uint)srcLength - tmp + srcstart; + if (dstlen == 0 && tmp <= 238) + { + dst[dstlen++] = (byte)(17 + tmp); + } + else if (tmp <= 3) + { + dst[dstlen - 2] |= (byte)(tmp); + } + else if (tmp <= 18) + { + dst[dstlen++] = (byte)(tmp - 3); + } + else + { + uint tt = tmp - 18; + dst[dstlen++] = 0; + while (tt > 255) + { + tt -= 255; + dst[dstlen++] = 0; + } + //Debug.Assert(tt > 0); + dst[dstlen++] = (byte)(tt); + } + do + { + dst[dstlen++] = src[ii++]; + } while (--tmp > 0); + } + dst[dstlen++] = M4_MARKER | 1; + dst[dstlen++] = 0; + dst[dstlen++] = 0; + + // Append the source count + dst[dstlen++] = (byte)srcLength; + dst[dstlen++] = (byte)(srcLength >> 8); + dst[dstlen++] = (byte)(srcLength >> 16); + dst[dstlen++] = (byte)(srcLength >> 24); + + return dstlen; + } + + public static unsafe byte[] Decompress(byte[] src) + { + byte[] dst = new byte[(src[src.Length - 4] | (src[src.Length - 3] << 8) | (src[src.Length - 2] << 16 | src[src.Length - 1] << 24))]; + + uint t = 0; + fixed (byte* input = src, output = dst) + { + byte* pos = null; + byte* ip_end = input + src.Length - 4; + byte* op_end = output + dst.Length; + byte* ip = input; + byte* op = output; + bool match = false; + bool match_next = false; + bool match_done = false; + bool copy_match = false; + bool first_literal_run = false; + bool eof_found = false; + + if (*ip > 17) + { + t = (uint)(*ip++ - 17); + if (t < 4) + match_next = true; + else + { + //Debug.Assert(t > 0); + if ((op_end - op) < t) + throw new OverflowException("Output Overrun"); + if ((ip_end - ip) < t + 1) + throw new OverflowException("Input Overrun"); + do + { + *op++ = *ip++; + } while (--t > 0); + first_literal_run = true; + } + } + while (!eof_found && ip < ip_end) + { + if (!match_next && !first_literal_run) + { + t = *ip++; + if (t >= 16) + match = true; + else + { + if (t == 0) + { + if ((ip_end - ip) < 1) + throw new OverflowException("Input Overrun"); + while (*ip == 0) + { + t += 255; + ++ip; + if ((ip_end - ip) < 1) + throw new OverflowException("Input Overrun"); + } + t += (uint)(15 + *ip++); + } + //Debug.Assert(t > 0); + if ((op_end - op) < t + 3) + throw new OverflowException("Output Overrun"); + if ((ip_end - ip) < t + 4) + throw new OverflowException("Input Overrun"); + for (int x = 0; x < 4; ++x, ++op, ++ip) + *op = *ip; + if (--t > 0) + { + if (t >= 4) + { + do + { + for (int x = 0; x < 4; ++x, ++op, ++ip) + *op = *ip; + t -= 4; + } while (t >= 4); + if (t > 0) + { + do + { + *op++ = *ip++; + } while (--t > 0); + } + } + else + { + do + { + *op++ = *ip++; + } while (--t > 0); + } + } + } + } + if (!match && !match_next) + { + first_literal_run = false; + + t = *ip++; + if (t >= 16) + match = true; + else + { + pos = op - (1 + M2_MAX_OFFSET); + pos -= t >> 2; + pos -= *ip++ << 2; + if (pos < output || pos >= op) + throw new OverflowException("Lookbehind Overrun"); + if ((op_end - op) < 3) + throw new OverflowException("Output Overrun"); + *op++ = *pos++; + *op++ = *pos++; + *op++ = *pos++; + match_done = true; + } + } + match = false; + do + { + if (t >= 64) + { + pos = op - 1; + pos -= (t >> 2) & 7; + pos -= *ip++ << 3; + t = (t >> 5) - 1; + if (pos < output || pos >= op) + throw new OverflowException("Lookbehind Overrun"); + if ((op_end - op) < t + 2) + throw new OverflowException("Output Overrun"); + copy_match = true; + } + else if (t >= 32) + { + t &= 31; + if (t == 0) + { + if ((ip_end - ip) < 1) + throw new OverflowException("Input Overrun"); + while (*ip == 0) + { + t += 255; + ++ip; + if ((ip_end - ip) < 1) + throw new OverflowException("Input Overrun"); + } + t += (uint)(31 + *ip++); + } + pos = op - 1; + pos -= (*(ushort*)ip) >> 2; + ip += 2; + } + else if (t >= 16) + { + pos = op; + pos -= (t & 8) << 11; + + t &= 7; + if (t == 0) + { + if ((ip_end - ip) < 1) + throw new OverflowException("Input Overrun"); + while (*ip == 0) + { + t += 255; + ++ip; + if ((ip_end - ip) < 1) + throw new OverflowException("Input Overrun"); + } + t += (uint)(7 + *ip++); + } + pos -= (*(ushort*)ip) >> 2; + ip += 2; + if (pos == op) + eof_found = true; + else + pos -= 0x4000; + } + else + { + pos = op - 1; + pos -= t >> 2; + pos -= *ip++ << 2; + if (pos < output || pos >= op) + throw new OverflowException("Lookbehind Overrun"); + if ((op_end - op) < 2) + throw new OverflowException("Output Overrun"); + *op++ = *pos++; + *op++ = *pos++; + match_done = true; + } + if (!eof_found && !match_done && !copy_match) + { + if (pos < output || pos >= op) + throw new OverflowException("Lookbehind Overrun"); + //Debug.Assert(t > 0); + if ((op_end - op) < t + 2) + throw new OverflowException("Output Overrun"); + } + if (!eof_found && t >= 2 * 4 - 2 && (op - pos) >= 4 && !match_done && !copy_match) + { + for (int x = 0; x < 4; ++x, ++op, ++pos) + *op = *pos; + t -= 2; + do + { + for (int x = 0; x < 4; ++x, ++op, ++pos) + *op = *pos; + t -= 4; + } while (t >= 4); + if (t > 0) + { + do + { + *op++ = *pos++; + } while (--t > 0); + } + } + else if (!eof_found && !match_done) + { + copy_match = false; + + *op++ = *pos++; + *op++ = *pos++; + do + { + *op++ = *pos++; + } while (--t > 0); + } + + if (!eof_found && !match_next) + { + match_done = false; + + t = (uint)(ip[-2] & 3); + if (t == 0) + break; + } + if (!eof_found) + { + match_next = false; + //Debug.Assert(t > 0); + //Debug.Assert(t < 4); + if ((op_end - op) < t) + throw new OverflowException("Output Overrun"); + if ((ip_end - ip) < t + 1) + throw new OverflowException("Input Overrun"); + *op++ = *ip++; + if (t > 1) + { + *op++ = *ip++; + if (t > 2) + *op++ = *ip++; + } + t = *ip++; + } + } while (!eof_found && ip < ip_end); + } + if (!eof_found) + throw new OverflowException("EOF Marker Not Found"); + else + { + //Debug.Assert(t == 1); + if (ip > ip_end) + throw new OverflowException("Input Overrun"); + else if (ip < ip_end) + throw new OverflowException("Input Not Consumed"); + } + } + + return dst; + } + + private unsafe static uint D_INDEX1(byte* input) + { + return D_MS(D_MUL(0x21, D_X3(input, 5, 5, 6)) >> 5, 0); + } + + private static uint D_INDEX2(uint idx) + { + return (idx & (D_MASK & 0x7FF)) ^ (((D_MASK >> 1) + 1) | 0x1F); + } + + private static uint D_MS(uint v, byte s) + { + return (v & (D_MASK >> s)) << s; + } + + private static uint D_MUL(uint a, uint b) + { + return a * b; + } + + private unsafe static uint D_X2(byte* input, byte s1, byte s2) + { + return (uint)((((input[2] << s2) ^ input[1]) << s1) ^ input[0]); + } + + private unsafe static uint D_X3(byte* input, byte s1, byte s2, byte s3) + { + return (D_X2(input + 1, s2, s3) << s1) ^ input[0]; + } + } } \ No newline at end of file diff --git a/RaptorDB.Common/MurMurHash2.cs b/RaptorDB.Common/MurMurHash2.cs index 2cce57c..121a5ef 100644 --- a/RaptorDB.Common/MurMurHash2.cs +++ b/RaptorDB.Common/MurMurHash2.cs @@ -1,160 +1,164 @@ -using System; -using System.Collections.Generic; -using System.Text; - -namespace RaptorDB.Common -{ - //internal static class murmur3 - //{ - // private static uint seed = 7878; - - // public static uint MurmurHash3(byte[] data) - // { - // const uint c1 = 0xcc9e2d51; - // const uint c2 = 0x1b873593; - - // int curLength = data.Length; /* Current position in byte array */ - // int length = curLength; /* the const length we need to fix tail */ - // uint h1 = seed; - // uint k1 = 0; - - // /* body, eat stream a 32-bit int at a time */ - // int currentIndex = 0; - // while (curLength >= 4) - // { - // /* Get four bytes from the input into an UInt32 */ - // k1 = (uint)(data[currentIndex++] - // | data[currentIndex++] << 8 - // | data[currentIndex++] << 16 - // | data[currentIndex++] << 24); - - // /* bitmagic hash */ - // k1 *= c1; - // k1 = rotl32(k1, 15); - // k1 *= c2; - - // h1 ^= k1; - // h1 = rotl32(h1, 13); - // h1 = h1 * 5 + 0xe6546b64; - // curLength -= 4; - // } - - // /* tail, the reminder bytes that did not make it to a full int */ - // /* (this switch is slightly more ugly than the C++ implementation - // * because we can't fall through) */ - // switch (curLength) - // { - // case 3: - // k1 = (UInt32)(data[currentIndex++] - // | data[currentIndex++] << 8 - // | data[currentIndex++] << 16); - // k1 *= c1; - // k1 = rotl32(k1, 15); - // k1 *= c2; - // h1 ^= k1; - // break; - // case 2: - // k1 = (UInt32)(data[currentIndex++] - // | data[currentIndex++] << 8); - // k1 *= c1; - // k1 = rotl32(k1, 15); - // k1 *= c2; - // h1 ^= k1; - // break; - // case 1: - // k1 = (UInt32)(data[currentIndex++]); - // k1 *= c1; - // k1 = rotl32(k1, 15); - // k1 *= c2; - // h1 ^= k1; - // break; - // }; - - // // finalization, magic chants to wrap it all up - // h1 ^= (uint)length; - // h1 = fmix(h1); - - // unchecked - // { - // return (uint)h1; - // } - // } - // private static uint rotl32(uint x, byte r) - // { - // return (x << r) | (x >> (32 - r)); - // } - - // private static uint fmix(uint h) - // { - // h ^= h >> 16; - // h *= 0x85ebca6b; - // h ^= h >> 13; - // h *= 0xc2b2ae35; - // h ^= h >> 16; - // return h; - // } - //} - - public class MurmurHash2Unsafe - { - public UInt32 Hash(Byte[] data) - { - return Hash(data, 0xc58f1a7b); - } - const UInt32 m = 0x5bd1e995; - const Int32 r = 24; - - public unsafe UInt32 Hash(Byte[] data, UInt32 seed) - { - Int32 length = data.Length; - if (length == 0) - return 0; - UInt32 h = seed ^ (UInt32)length; - Int32 remainingBytes = length & 3; // mod 4 - Int32 numberOfLoops = length >> 2; // div 4 - fixed (byte* firstByte = &(data[0])) - { - UInt32* realData = (UInt32*)firstByte; - while (numberOfLoops != 0) - { - UInt32 k = *realData; - k *= m; - k ^= k >> r; - k *= m; - - h *= m; - h ^= k; - numberOfLoops--; - realData++; - } - switch (remainingBytes) - { - case 3: - h ^= (UInt16)(*realData); - h ^= ((UInt32)(*(((Byte*)(realData)) + 2))) << 16; - h *= m; - break; - case 2: - h ^= (UInt16)(*realData); - h *= m; - break; - case 1: - h ^= *((Byte*)realData); - h *= m; - break; - default: - break; - } - } - - // Do a few final mixes of the hash to ensure the last few - // bytes are well-incorporated. - - h ^= h >> 13; - h *= m; - h ^= h >> 15; - - return h; - } - } -} +using System; +using System.Collections.Generic; +using System.Text; + +namespace RaptorDB.Common +{ + //internal static class murmur3 + //{ + // private static uint seed = 7878; + + // public static uint MurmurHash3(byte[] data) + // { + // const uint c1 = 0xcc9e2d51; + // const uint c2 = 0x1b873593; + + // int curLength = data.Length; /* Current position in byte array */ + // int length = curLength; /* the const length we need to fix tail */ + // uint h1 = seed; + // uint k1 = 0; + + // /* body, eat stream a 32-bit int at a time */ + // int currentIndex = 0; + // while (curLength >= 4) + // { + // /* Get four bytes from the input into an UInt32 */ + // k1 = (uint)(data[currentIndex++] + // | data[currentIndex++] << 8 + // | data[currentIndex++] << 16 + // | data[currentIndex++] << 24); + + // /* bitmagic hash */ + // k1 *= c1; + // k1 = rotl32(k1, 15); + // k1 *= c2; + + // h1 ^= k1; + // h1 = rotl32(h1, 13); + // h1 = h1 * 5 + 0xe6546b64; + // curLength -= 4; + // } + + // /* tail, the reminder bytes that did not make it to a full int */ + // /* (this switch is slightly more ugly than the C++ implementation + // * because we can't fall through) */ + // switch (curLength) + // { + // case 3: + // k1 = (UInt32)(data[currentIndex++] + // | data[currentIndex++] << 8 + // | data[currentIndex++] << 16); + // k1 *= c1; + // k1 = rotl32(k1, 15); + // k1 *= c2; + // h1 ^= k1; + // break; + // case 2: + // k1 = (UInt32)(data[currentIndex++] + // | data[currentIndex++] << 8); + // k1 *= c1; + // k1 = rotl32(k1, 15); + // k1 *= c2; + // h1 ^= k1; + // break; + // case 1: + // k1 = (UInt32)(data[currentIndex++]); + // k1 *= c1; + // k1 = rotl32(k1, 15); + // k1 *= c2; + // h1 ^= k1; + // break; + // }; + + // // finalization, magic chants to wrap it all up + // h1 ^= (uint)length; + // h1 = fmix(h1); + + // unchecked + // { + // return (uint)h1; + // } + // } + // private static uint rotl32(uint x, byte r) + // { + // return (x << r) | (x >> (32 - r)); + // } + + // private static uint fmix(uint h) + // { + // h ^= h >> 16; + // h *= 0x85ebca6b; + // h ^= h >> 13; + // h *= 0xc2b2ae35; + // h ^= h >> 16; + // return h; + // } + //} + + public class MurmurHash2Unsafe + { + public uint Hash(byte[] data) + { + return Hash(data, 0xc58f1a7b); + } + + public unsafe uint Hash(byte[] data, uint seed) + { + fixed (byte* firstByte = &data[0]) + { + return Hash(firstByte, data.Length, seed); + } + } + + const uint m = 0x5bd1e995; + const int r = 24; + public unsafe uint Hash(byte* firstByte, int length, uint seed) + { + if (length == 0) + return 0; + uint h = seed ^ (uint)length; + int remainingBytes = length & 3; // mod 4 + int numberOfLoops = length >> 2; // div 4 + uint* realData = (uint*)firstByte; + while (numberOfLoops != 0) + { + uint k = *realData; + k *= m; + k ^= k >> r; + k *= m; + + h *= m; + h ^= k; + numberOfLoops--; + realData++; + } + switch (remainingBytes) + { + case 3: + h ^= (ushort)(*realData); + h ^= ((uint)(*(((byte*)(realData)) + 2))) << 16; + h *= m; + break; + case 2: + h ^= (*(ushort*)realData); + h *= m; + break; + case 1: + h ^= *((byte*)realData); + h *= m; + break; + default: + break; + } + + // Do a few final mixes of the hash to ensure the last few + // bytes are well-incorporated. + + h ^= h >> 13; + h *= m; + h ^= h >> 15; + + return h; + } + } +} diff --git a/RaptorDB.Common/NetworkClient.cs b/RaptorDB.Common/NetworkClient.cs index 05338a7..6ff0d99 100644 --- a/RaptorDB.Common/NetworkClient.cs +++ b/RaptorDB.Common/NetworkClient.cs @@ -1,195 +1,194 @@ -using System; -using System.IO; -using System.Net.Sockets; -using System.Runtime.Serialization.Formatters.Binary; -using System.Threading; - -using System.Collections; -using System.Net; -using System.Threading.Tasks; - -namespace RaptorDB.Common -{ - // - // Header bits format : 0 - json = 1 , bin = 0 - // 1 - binaryjson = 1 , text json = 0 - // 2 - compressed = 1 , uncompressed = 0 - - public class NetworkClient - { - internal static class Config - { - public static int BufferSize = 32 * 1024; - public static int LogDataSizesOver = 1000000; - public static int CompressDataOver = 1000000; - } - - public NetworkClient(string server, int port) - { - _server = server; - _port = port; - } - private TcpClient _client; - private string _server; - private int _port; - - public bool UseBJSON = true; - - public void Connect() - { - _client = new TcpClient(_server, _port); - _client.SendBufferSize = Config.BufferSize; - _client.ReceiveBufferSize = _client.SendBufferSize; - } - - public object Send(object data) - { - CheckConnection(); - - byte[] hdr = new byte[5]; - hdr[0] = (UseBJSON ? (byte)3 : (byte)0); - byte[] dat = fastBinaryJSON.BJSON.ToBJSON(data); - byte[] len = Helper.GetBytes(dat.Length, false); - Array.Copy(len, 0, hdr, 1, 4); - _client.Client.Send(hdr); - _client.Client.Send(dat); - - byte[] rechdr = new byte[5]; - using (NetworkStream n = new NetworkStream(_client.Client)) - { - n.Read(rechdr, 0, 5); - int c = Helper.ToInt32(rechdr, 1); - byte[] recd = new byte[c]; - int bytesRead = 0; - int chunksize = 1; - while (bytesRead < c && chunksize > 0) - bytesRead += - chunksize = n.Read - (recd, bytesRead, c - bytesRead); - if ((rechdr[0] & (byte)4) == (byte)4) - recd = MiniLZO.Decompress(recd); - if ((rechdr[0] & (byte)3) == (byte)3) - return fastBinaryJSON.BJSON.ToObject(recd); - } - return null; - } - - private void CheckConnection() - { - // check connected state before sending - - if (_client == null || !_client.Connected) - Connect(); - } - - public void Close() - { - if (_client != null) - _client.Close(); - } - } - - public class NetworkServer - { - public delegate object ProcessPayload(object data); - - private ILog log = RaptorDB.LogManager.GetLogger(typeof(NetworkServer)); - ProcessPayload _handler; - private bool _run = true; - private int count = 0; - private int _port; - - public void Start(int port, ProcessPayload handler) - { - _handler = handler; - _port = port; - ThreadPool.SetMinThreads(50, 50); - System.Timers.Timer t = new System.Timers.Timer(1000); - t.AutoReset = true; - t.Start(); - t.Elapsed += new System.Timers.ElapsedEventHandler(t_Elapsed); - Task.Factory.StartNew(() => Run(), TaskCreationOptions.AttachedToParent); - } - - private void Run() - { - TcpListener listener = new TcpListener(IPAddress.Any, _port); - listener.Start(); - while (_run) - { - try - { - TcpClient c = listener.AcceptTcpClient(); - Task.Factory.StartNew(() => Accept(c)); - } - catch (Exception ex) { log.Error(ex); } - } - } - - void t_Elapsed(object sender, System.Timers.ElapsedEventArgs e) - { - if (count > 0) - log.Debug("tcp connects/sec = " + count); - count = 0; - } - - public void Stop() - { - _run = false; - } - - void Accept(TcpClient client) - { - using (NetworkStream n = client.GetStream()) - { - while (client.Connected) - { - this.count++; - byte[] c = new byte[5]; - n.Read(c, 0, 5); - int count = BitConverter.ToInt32(c, 1); - byte[] data = new byte[count]; - int bytesRead = 0; - int chunksize = 1; - while (bytesRead < count && chunksize > 0) - bytesRead += - chunksize = n.Read - (data, bytesRead, count - bytesRead); - - object o = fastBinaryJSON.BJSON.ToObject(data); - - object r = _handler(o); - bool compressed = false; - data = fastBinaryJSON.BJSON.ToBJSON(r); - if (data.Length > RaptorDB.Common.NetworkClient.Config.CompressDataOver) - { - log.Debug("compressing data over limit : " + data.Length.ToString("#,#")); - compressed = true; - data = MiniLZO.Compress(data); - log.Debug("new size : " + data.Length.ToString("#,#")); - } - if (data.Length > RaptorDB.Common.NetworkClient.Config.LogDataSizesOver) - log.Debug("data size (bytes) = " + data.Length.ToString("#,#")); - - byte[] b = BitConverter.GetBytes(data.Length); - byte[] hdr = new byte[5]; - hdr[0] = (byte)(3 + (compressed ? 4 : 0)); - Array.Copy(b, 0, hdr, 1, 4); - n.Write(hdr, 0, 5); - n.Write(data, 0, data.Length); - - int wait = 0; - while (n.DataAvailable == false) - { - wait++; - if (wait < 10000) // kludge : for insert performance - Thread.Sleep(0); - else - Thread.Sleep(1); - // FEATURE : if wait > 10 min -> close connection - } - } - } - } - } -} +using System; +using System.IO; +using System.Net.Sockets; +using System.Runtime.Serialization.Formatters.Binary; +using System.Threading; + +using System.Collections; +using System.Net; +using System.Threading.Tasks; + +namespace RaptorDB.Common +{ + // + // Header bits format : 0 - json = 1 , bin = 0 + // 1 - binaryjson = 1 , text json = 0 + // 2 - compressed = 1 , uncompressed = 0 + + public class NetworkClient + { + internal static class Config + { + public static int BufferSize = 32 * 1024; + public static int LogDataSizesOver = 1000000; + public static int CompressDataOver = 1000000; + } + + public NetworkClient(string server, int port) + { + _server = server; + _port = port; + } + private TcpClient _client; + private string _server; + private int _port; + + public bool UseBJSON = true; + + public void Connect() + { + _client = new TcpClient(_server, _port); + _client.SendBufferSize = Config.BufferSize; + _client.ReceiveBufferSize = _client.SendBufferSize; + } + + public object Send(object data) + { + CheckConnection(); + + byte[] hdr = new byte[5]; + hdr[0] = (UseBJSON ? (byte)3 : (byte)0); + byte[] dat = fastBinaryJSON.BJSON.ToBJSON(data); + byte[] len = Helper.GetBytes(dat.Length, false); + Array.Copy(len, 0, hdr, 1, 4); + _client.Client.Send(hdr); + _client.Client.Send(dat); + + byte[] rechdr = new byte[5]; + using (NetworkStream n = new NetworkStream(_client.Client)) + { + n.Read(rechdr, 0, 5); + int c = Helper.ToInt32(rechdr, 1); + byte[] recd = new byte[c]; + int bytesRead = 0; + int chunksize = 1; + while (bytesRead < c && chunksize > 0) + bytesRead += + chunksize = n.Read + (recd, bytesRead, c - bytesRead); + if ((rechdr[0] & (byte)4) == (byte)4) + recd = MiniLZO.Decompress(recd); + if ((rechdr[0] & (byte)3) == (byte)3) + return fastBinaryJSON.BJSON.ToObject(recd); + } + return null; + } + + private void CheckConnection() + { + // check connected state before sending + if (_client == null || !_client.Connected) + Connect(); + } + + public void Close() + { + if (_client != null) + _client.Close(); + } + } + + public class NetworkServer + { + public delegate object ProcessPayload(object data); + + private ILog log = RaptorDB.LogManager.GetLogger(typeof(NetworkServer)); + ProcessPayload _handler; + private bool _run = true; + private int count = 0; + private int _port; + + public void Start(int port, ProcessPayload handler) + { + _handler = handler; + _port = port; + ThreadPool.SetMinThreads(50, 50); + System.Timers.Timer t = new System.Timers.Timer(1000); + t.AutoReset = true; + t.Start(); + t.Elapsed += new System.Timers.ElapsedEventHandler(t_Elapsed); + Task.Factory.StartNew(() => Run(), TaskCreationOptions.AttachedToParent); + } + + private void Run() + { + TcpListener listener = new TcpListener(IPAddress.Any, _port); + listener.Start(); + while (_run) + { + try + { + TcpClient c = listener.AcceptTcpClient(); + Task.Factory.StartNew(() => Accept(c)); + } + catch (Exception ex) { log.Error(ex); } + } + } + + void t_Elapsed(object sender, System.Timers.ElapsedEventArgs e) + { + if (count > 0) + log.Debug("tcp connects/sec = " + count); + count = 0; + } + + public void Stop() + { + _run = false; + } + + void Accept(TcpClient client) + { + using (NetworkStream n = client.GetStream()) + { + while (client.Connected) + { + this.count++; + byte[] c = new byte[5]; + n.Read(c, 0, 5); + int count = BitConverter.ToInt32(c, 1); + byte[] data = new byte[count]; + int bytesRead = 0; + int chunksize = 1; + while (bytesRead < count && chunksize > 0) + bytesRead += + chunksize = n.Read + (data, bytesRead, count - bytesRead); + + object o = fastBinaryJSON.BJSON.ToObject(data); + + object r = _handler(o); + bool compressed = false; + data = fastBinaryJSON.BJSON.ToBJSON(r); + if (data.Length > RaptorDB.Common.NetworkClient.Config.CompressDataOver) + { + log.Debug("compressing data over limit : " + data.Length.ToString("#,#")); + compressed = true; + data = MiniLZO.Compress(data); + log.Debug("new size : " + data.Length.ToString("#,#")); + } + if (data.Length > RaptorDB.Common.NetworkClient.Config.LogDataSizesOver) + log.Debug("data size (bytes) = " + data.Length.ToString("#,#")); + + byte[] b = BitConverter.GetBytes(data.Length); + byte[] hdr = new byte[5]; + hdr[0] = (byte)(3 + (compressed ? 4 : 0)); + Array.Copy(b, 0, hdr, 1, 4); + n.Write(hdr, 0, 5); + n.Write(data, 0, data.Length); + + int wait = 0; + while (n.DataAvailable == false) + { + wait++; + if (wait < 10000) // kludge : for insert performance + Thread.Sleep(0); + else + Thread.Sleep(1); + // FEATURE : if wait > 10 min -> close connection + } + } + } + } + } +} diff --git a/RaptorDB.Common/Packets.cs b/RaptorDB.Common/Packets.cs index cbd5bee..4411b48 100644 --- a/RaptorDB.Common/Packets.cs +++ b/RaptorDB.Common/Packets.cs @@ -1,48 +1,87 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; - -namespace RaptorDB.Common -{ - public class Packet - { - public Packet() - { - OrderBy = ""; - } - public string Username { get; set; } - public string PasswordHash { get; set; } - //public int Token { get; set; } - //public int Session { get; set; } - public string Command { get; set; } - public object Data { get; set; } - public Guid Docid { get; set; } - public string Viewname { get; set; } - public int Start { get; set; } - public int Count { get; set; } - public string OrderBy { get; set; } - } - - public class ReturnPacket - { - public ReturnPacket() - { - - } - public ReturnPacket(bool ok) - { - OK = ok; - } - public ReturnPacket(bool ok, string err) - { - OK = ok; - Error = err; - } - public string Error { get; set; } - public bool OK { get; set; } - //public int Token { get; set; } - //public int Session { get; set; } - public object Data { get; set; } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace RaptorDB.Common +{ + public class Packet + { + public Packet() + { + OrderBy = ""; + } + public string Username { get; set; } + public string PasswordHash { get; set; } + //public int Token { get; set; } + //public int Session { get; set; } + public PacketCommand Command { get; set; } + public object Data { get; set; } + public Guid Docid { get; set; } + public string Viewname { get; set; } + public int Start { get; set; } + public int Count { get; set; } + public string OrderBy { get; set; } + } + + public class ReturnPacket + { + public ReturnPacket() + { + + } + public ReturnPacket(bool ok) + { + OK = ok; + } + public ReturnPacket(bool ok, string err) + { + OK = ok; + Error = err; + } + public string Error { get; set; } + public bool OK { get; set; } + //public int Token { get; set; } + //public int Session { get; set; } + public object Data { get; set; } + } + + public enum PacketCommand: int + { + Save, + SaveBytes, + QueryType, + QueryStr, + Fetch, + FetchBytes, + Backup, + Delete, + DeleteBytes, + Restore, + AddUser, + ServerSide, + FullText, + CountType, + CountStr, + GCount, + DocHistory, + FileHistory, + FetchVersion, + FetchFileVersion, + CheckAssembly, + FetchHistoryInfo, + FetchByteHistoryInfo, + ViewDelete, + ViewDelete_t, + ViewInsert, + ViewInsert_t, + DocCount, + GetObjectHF, + SetObjectHF, + DeleteKeyHF, + CountHF, + ContainsHF, + GetKeysHF, + CompactStorageHF + } +} diff --git a/RaptorDB.Common/PageHashTable.cs b/RaptorDB.Common/PageHashTable.cs new file mode 100644 index 0000000..eda0e1d --- /dev/null +++ b/RaptorDB.Common/PageHashTable.cs @@ -0,0 +1,1001 @@ +using GenericPointerHelpers; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection.Emit; +using System.Runtime.InteropServices; +using System.Text; +using static RaptorDB.Common.PageHashTableHelper; +using System.Collections; +using System.Threading; +using System.Runtime.CompilerServices; +using System.Diagnostics; + +namespace RaptorDB.Common +{ + public unsafe class PageHashTableBase : IDisposable, IEnumerable> + { + + public readonly long Capacity; + public readonly int KeySize; + public readonly IPageSerializer KeySerializer; + public readonly int ValueSize; + public readonly IPageSerializer ValueSerializer; + public readonly int EntrySize; + public readonly int ClusterSize; + public readonly uint Seed = 0xc58f1a7b; + public readonly byte* StartPointer; + public readonly bool AllowDuplicates; + public readonly ReaderWriterLockSlim rwlock = new ReaderWriterLockSlim(); + protected readonly bool dealloc; + protected int count = 0; + // protected int uniqCount = 0; + + // sets are not thread safe, so one buffer is enough + protected byte[] setKeyBuffer; + + protected SimpleBufferManager readKeyBufferManager; + + + public PageHashTableBase(long capacity, + IPageSerializer keySerializer, + IPageSerializer valueSerializer, + byte* startPointer, + int clusterSize, + bool allowDups, + int count = 0) + { + // TODO: smarter capacity to be sure that nothing wrong will happen + + if (capacity % clusterSize != 0) throw new ArgumentException(string.Format("capacity ({0}) must be divisible by cluster size ({1})", capacity, clusterSize)); + + this.Capacity = capacity; + this.KeySerializer = keySerializer; + this.ValueSerializer = valueSerializer; + + KeySize = keySerializer == null ? GenericPointerHelper.SizeOf() : keySerializer.Size; + ValueSize = valueSerializer == null ? GenericPointerHelper.SizeOf() : valueSerializer.Size; + EntrySize = KeySize + ValueSize + 1; + ClusterSize = clusterSize; + AllowDuplicates = allowDups; + + if (startPointer == null) + { + StartPointer = (byte*)Marshal.AllocHGlobal(new IntPtr(capacity * EntrySize)).ToPointer(); + dealloc = true; + } + else + { + StartPointer = startPointer; + GC.SuppressFinalize(this); + } + + this.count = count; + + setKeyBuffer = new byte[KeySize]; + readKeyBufferManager = new SimpleBufferManager(KeySize); + } + + public long FindEntry(byte[] key, bool stopOnDeleted) + { + if (key.Length != KeySize) throw new ArgumentException("wrong key length"); + fixed (byte* keyPtr = key) + { + var hash = Helper.MurMur.Hash(keyPtr, KeySize, Seed); + byte* resultPtr; + return FindEntry(keyPtr, hash, stopOnDeleted, out resultPtr); + } + } + + protected void WriteKey(byte* pointer, TKey value) + { + if (KeySerializer != null) + KeySerializer.Save(pointer, value); + else GenericPointerHelper.Write(pointer, value); + } + + protected TValue ReadValue(byte* pointer) + { + if (ValueSerializer != null) + return ValueSerializer.Read(pointer); + else return GenericPointerHelper.Read(pointer); + } + + protected TKey ReadKey(byte* pointer) + { + if (KeySerializer != null) + return KeySerializer.Read(pointer); + else return GenericPointerHelper.Read(pointer); + } + + protected long FindEntry(byte* key, uint hash, bool insert, out byte* pointer) + { + // on insert: stop on first entry without value + // on lookup: stop on first entry without continuation + var stopMap = insert ? 1u : 3u; + // on insert & AllowDuplicates: return only on the last value + var returnMap = (insert && AllowDuplicates) ? 0u : 2u; + var hashMap = ((hash & 0xfc) | 1 | returnMap); + long hashIndex = ((int)hash & 0x7fffffff) % Capacity; + var clusterOffset = hashIndex % ClusterSize; + var clusterIndex = hashIndex / ClusterSize; + int diffPlus = -1; + do + { + pointer = StartPointer + (EntrySize * hashIndex); + for (int i = 0; i < ClusterSize; i++) + { + if (hashIndex + i == Capacity) pointer = StartPointer; + uint flags = *pointer; + // flags: + // [0] (1b): value + // [1] (1b): continue search => empty = 00, (last) value = 01, deleted = 10, value (and not last) = 11 + // [2] (6b): first 6 hash bits + if ((flags & stopMap) == 0) + { + return -(((hashIndex + i) % Capacity) + 1); + } + if ((flags | returnMap) == hashMap && Helper.Cmp(pointer + 1, key, KeySize)) + { + return hashIndex + i % Capacity; + } + pointer += EntrySize; + } + if (diffPlus == -1) + diffPlus = (int)((hash * 41) % Capacity) | 1; + clusterIndex = (clusterIndex + diffPlus) % (Capacity / ClusterSize); + hashIndex = clusterIndex * ClusterSize + clusterOffset; + } + while (true); + } + + protected long FindNextEntry(byte* key, uint hash, ref byte* pointer) + { + byte hashMap = (byte)(hash | 3); + var clusterOffset = ((int)hash & 0x7fffffff) % ClusterSize; + var startingAt = (int)(((pointer - StartPointer) / EntrySize - clusterOffset) % ClusterSize); + var hashIndex = (pointer - StartPointer) / EntrySize - startingAt; + var clusterIndex = hashIndex / ClusterSize; + int diffPlus = -1; + + + startingAt++; + pointer += EntrySize; + do + { + for (var i = startingAt; i < ClusterSize; i++) + { + if (hashIndex + i == Capacity) + pointer = StartPointer; + byte flags = *pointer; + // flags: + // [0] (1b): value + // [1] (1b): continue search => empty = 00, (last) value = 01, deleted = 10, value (and not last) = 11 + // [2] (6b): first 6 hash bits + if (flags == 0) + { + return -(((hashIndex + i) % Capacity) + 1); + } + else if ((flags | 2) == hashMap && Helper.Cmp(pointer + 1, key, KeySize)) + { + return hashIndex + i % Capacity; + } + + pointer += EntrySize; + } + if (diffPlus == -1) + diffPlus = (int)((hash * 41) % Capacity) | 1; + clusterIndex = (clusterIndex + diffPlus) % (Capacity / ClusterSize); + hashIndex = clusterIndex * ClusterSize + clusterOffset; + pointer = StartPointer + (EntrySize * hashIndex); + startingAt = 0; + } + while (true); + } + + protected long FindEntry(byte* key, uint hash, TValue value, out byte* pointer) + { + // on insert & AllowDuplicates: return only on the last value + var hashMap = ((hash & 0xfc) | 3); + long hashIndex = ((int)hash & 0x7fffffff) % Capacity; + var clusterOffset = hashIndex % ClusterSize; + var clusterIndex = hashIndex / ClusterSize; + int diffPlus = -1; + do + { + pointer = StartPointer + (EntrySize * hashIndex); + for (int i = 0; i < ClusterSize; i++) + { + if (hashIndex + i == Capacity) pointer = StartPointer; + uint flags = *pointer; + // flags: + // [0] (1b): value + // [1] (1b): continue search => empty = 00, (last) value = 01, deleted = 10, value (and not last) = 11 + // [2] (6b): first 6 hash bits + if ((flags & 3u) == 0) + { + return -(((hashIndex + i) % Capacity) + 1); + } + if ((flags | 2u) == hashMap && Helper.Cmp(pointer + 1, key, KeySize)) + { + TValue val; + if (ValueSerializer != null) val = ValueSerializer.Read(pointer + KeySize + 1); + else val = GenericPointerHelper.Read(pointer); + if (value.Equals(val)) + return hashIndex + i % Capacity; + } + pointer += EntrySize; + } + if (diffPlus == -1) + diffPlus = (int)((hash * 41) % Capacity) | 1; + clusterIndex = (clusterIndex + diffPlus) % (Capacity / ClusterSize); + hashIndex = clusterIndex * ClusterSize + clusterOffset; + } + while (true); + } + + protected void SetEntry(byte* ptr, uint keyHash, byte* key, TValue value) + { + *ptr = (byte)((keyHash & ~3) | 1); + ptr++; + GenericPointerHelper.CopyBytes(key, ptr, (uint)KeySize); + if (ValueSerializer != null) ValueSerializer.Save(ptr + KeySize, value); + else GenericPointerHelper.Write(ptr + KeySize, value); + } + + public TValue FirstOrDefault(TKey key) + { + TValue value; + TryGetValue(key, out value); + return value; + } + + public bool TryGetValue(TKey key, out TValue value) + { + var buffer = readKeyBufferManager.GetBuffer(); + fixed (byte* kptr = buffer) + { + WriteKey(kptr, key); + var hash = Helper.MurMur.Hash(kptr, KeySize, Seed); + byte* pointer; + bool ret; + if (ret = FindEntry(kptr, hash, false, out pointer) >= 0) + { + Debug.Assert(ReadKey(pointer + 1).Equals(key), "found entry key is not equal to the searched one"); + value = ReadValue(pointer + 1 + KeySize); + } + else + { + value = default(TValue); + } + readKeyBufferManager.ReleaseBuffer(buffer); + return ret; + } + } + + /// + /// Sets the value + /// + /// if something was replaced + public bool Set(TKey key, TValue value, bool allowOverwrite = true) + { + fixed (byte* kptr = setKeyBuffer) + { + if (KeySerializer != null) + KeySerializer.Save(kptr, key); + else GenericPointerHelper.Write(kptr, key); + var hash = Helper.MurMur.Hash(kptr, KeySize, Seed); + byte* pointer; + var index = FindEntry(kptr, hash, true, out pointer); + + Debug.Assert(index < 0 || ReadKey(pointer + 1).Equals(key)); + + if (index < 0) + { + count++; + } + else if (AllowDuplicates) + { + Debug.Assert((*pointer & 2) == 0, "FindEntry should find last value in chain BUT it has continue flag"); + // mark as with value and continue + *pointer |= 2; + index = FindNextEntry(kptr, hash, ref pointer); + Debug.Assert(index < 0, "FindEntry should find last value in chain BUT the next one exist"); + count++; + } + else if (!allowOverwrite) throw new ArgumentException("An item with the same key already exists"); + + SetEntry(pointer, hash, kptr, value); + + // replaced if FindEntry returned positive index + return index >= 0; + } + } + + private void Remove(byte* ptr) + { + *ptr = 2; + GenericPointerHelper.InitMemory(ptr + 1, (uint)(KeySize + ValueSize), 0); + } + + void ReadEntry(byte* ptr, out TKey key, out TValue value) + { + if (KeySerializer != null) + key = KeySerializer.Read(ptr + 1); + else key = GenericPointerHelper.Read(ptr + 1); + + if (ValueSerializer != null) + value = ValueSerializer.Read(ptr + 1 + KeySize); + else value = GenericPointerHelper.Read(ptr + 1 + KeySize); + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + protected virtual void Dispose(bool disposing) + { + if (dealloc) + Marshal.FreeHGlobal(new IntPtr(StartPointer)); + } + + public int CopyTo(TKey[] keys, TValue[] values) + { + var ptr = StartPointer; + int i = 0; + for (int hIndex = 0; hIndex < Capacity & (keys != null | values != null); hIndex++) + { + if ((*ptr & 1) == 1) + { + TKey key; + TValue value; + ReadEntry(ptr, out key, out value); + if (values != null) + { + values[i] = value; + if (values.Length == i + 1) values = null; + } + if (keys != null) + { + keys[i] = key; + if (keys.Length == i + 1) keys = null; + } + i++; + } + ptr++; + } + return i; + } + + public ICollection Keys + { + get + { + var keys = new TKey[count]; + CopyTo(keys, null); + return keys; + } + } + + public ICollection Values + { + get + { + var values = new TValue[count]; + CopyTo(null, values); + return values; + } + } + + public int Count + { + get + { + return count; + } + } + + public bool IsReadOnly + { + get + { + return false; + } + } + + public bool Contains(TKey key) + { + var buffer = readKeyBufferManager.GetBuffer(); + fixed (byte* kptr = buffer) + { + if (KeySerializer != null) + KeySerializer.Save(kptr, key); + else + GenericPointerHelper.Write(kptr, key); + var hash = Helper.MurMur.Hash(kptr, KeySize, Seed); + byte* pointer; + var index = FindEntry(kptr, hash, false, out pointer); + readKeyBufferManager.ReleaseBuffer(buffer); + return index >= 0; + } + } + + public void Add(TKey key, TValue value) + { + Set(key, value, false); + } + + public bool RemoveFirst(TKey key) + { + fixed (byte* kptr = setKeyBuffer) + { + if (KeySerializer != null) KeySerializer.Save(kptr, key); + else GenericPointerHelper.Write(kptr, key); + var hash = Helper.MurMur.Hash(kptr, KeySize, Seed); + byte* pointer; + if (FindEntry(kptr, hash, false, out pointer) >= 0) + { + Debug.Assert(ReadKey(pointer + 1).Equals(key), "found entry key is not equal to the searched one"); + Remove(pointer); + count--; + return true; + } + } + return false; + } + + public int RemoveAll(TKey key) + { + fixed (byte* kptr = setKeyBuffer) + { + if (KeySerializer != null) KeySerializer.Save(kptr, key); + else GenericPointerHelper.Write(kptr, key); + var hash = Helper.MurMur.Hash(kptr, KeySize, Seed); + + byte* pointer; + if (FindEntry(kptr, hash, true, out pointer) >= 0) + { + byte flags = *pointer; + Debug.Assert(ReadKey(pointer + 1).Equals(key), "found entry key is not equal to the searched one"); + Debug.Assert(flags != 0); + Remove(pointer); + var c = 1; + while ((flags & 2) != 0) + { + var index = FindNextEntry(kptr, hash, ref pointer); + Debug.Assert(index >= 0); + Debug.Assert(ReadKey(pointer + 1).Equals(key), "found entry key is not equal to the searched one"); + flags = *pointer; + Remove(pointer); + c++; + } + count -= c; + return c; + } + } + return 0; + } + + public bool RemoveFirst(TKey key, TValue value) + { + fixed (byte* kptr = setKeyBuffer) + { + if (KeySerializer != null) KeySerializer.Save(kptr, key); + else GenericPointerHelper.Write(kptr, key); + var hash = Helper.MurMur.Hash(kptr, KeySize, Seed); + byte* pointer; + if (FindEntry(kptr, hash, value, out pointer) >= 0) + { + Debug.Assert(ReadKey(pointer + 1).Equals(key), "found entry key is not equal to the searched one"); + Debug.Assert(ReadValue(pointer + 1 + KeySize).Equals(value), "found entry value is not equal to the searched one"); + Remove(pointer); + count--; + return true; + } + } + return false; + } + + public void Add(KeyValuePair item) + { + Set(item.Key, item.Value, false); + } + + public void Clear() + { + // TODO: clean only flags ? + GenericPointerHelper.InitMemory(StartPointer, (uint)(Capacity * EntrySize), 0); + count = 0; + } + + public bool Contains(TKey key, TValue value) + { + var buffer = readKeyBufferManager.GetBuffer(); + fixed (byte* kptr = buffer) + { + if (KeySerializer != null) + KeySerializer.Save(kptr, key); + else + GenericPointerHelper.Write(kptr, key); + var hash = Helper.MurMur.Hash(kptr, KeySize, Seed); + byte* pointer; + var index = FindEntry(kptr, hash, value, out pointer); + readKeyBufferManager.ReleaseBuffer(buffer); + return index >= 0; + } + } + + public int CopyTo(KeyValuePair[] array, int arrayIndex) + { + var ptr = StartPointer; + int i = arrayIndex; + for (int hIndex = 0; hIndex < Capacity && i < array.Length; hIndex++) + { + if ((*ptr & 1) == 1) + { + TKey key; + TValue value; + ReadEntry(ptr, out key, out value); + array[i] = new KeyValuePair(key, value); + i++; + } + ptr += EntrySize; + } + return i; + } + + public bool Recount() + { + int count = 0; + byte* pointer = StartPointer; + for (int i = 0; i < Capacity; i++) + { + count += *pointer & 1; + pointer += EntrySize; + } + if (this.count != count) + { + this.count = count; + return false; + } + return true; + } + + public BitArray GetBlockUsageBitmap() + { + var bm = new BitArray(checked((int)Capacity)); + byte* pointer = StartPointer; + for (int i = 0; i < Capacity; i++) + { + bm.Set(i, *pointer != 0); + pointer += EntrySize; + } + return bm; + } + + public IEnumerator> GetEnumerator() + { + var array = new KeyValuePair[count]; + CopyTo(array, 0); + return ((IEnumerable>)array).GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + [Flags] + enum EntryFlags : byte + { + Empty = 0, + Value = 1, + ContinueSearch = 2, + ValueAndContinue = 3 + } + + ~PageHashTableBase() + { + Dispose(false); + } + + protected struct SimpleBufferManager + { + // TODO: multiple buffers + public readonly int Length; + byte[] buffer; + + public SimpleBufferManager(int length) + { + this.Length = length; + this.buffer = null; + } + + public byte[] GetBuffer() + { + return Interlocked.Exchange(ref buffer, null) ?? new byte[Length]; + } + + public void ReleaseBuffer(byte[] buffer) + { + // set only if null. To not keep new object to next GC generation + Interlocked.CompareExchange(ref this.buffer, buffer, null); + } + } + } + + public unsafe class PageHashTable : PageHashTableBase, IDictionary + { + public PageHashTable(long capacity, + IPageSerializer keySerializer, + IPageSerializer valueSerializer, + byte* startPointer = null, + int clusterSize = 16) + : base(capacity, keySerializer, valueSerializer, startPointer, clusterSize, false) + { } + + public TValue this[TKey key] + { + get + { + TValue value; + if (TryGetValue(key, out value)) + return value; + throw new KeyNotFoundException("key not found"); + } + + set + { + Set(key, value, true); + } + } + + public bool Contains(KeyValuePair item) + { + TValue value; + return TryGetValue(item.Key, out value) && item.Value.Equals(value); + } + + public bool ContainsKey(TKey key) + { + return base.Contains(key); + } + + public bool Remove(TKey key) + { + return RemoveFirst(key); + } + + public bool Remove(KeyValuePair item) + { + if (Contains(item)) + return Remove(item.Key); + return false; + } + + void ICollection>.CopyTo(KeyValuePair[] array, int arrayIndex) + { + this.CopyTo(array, arrayIndex); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + } + + public unsafe class PageMultiValueHashTable : PageHashTableBase, ILookup + { + public PageMultiValueHashTable( + long capacity, + IPageSerializer keySerializer, + IPageSerializer valueSerializer, + byte* startPointer = null, + int clusterSize = 32, + int count = 0) + : base(capacity, keySerializer, valueSerializer, startPointer, clusterSize, true, count) + { } + + public IEnumerable this[TKey key] + { + get + { + return new Grouping(key, this); + } + } + + IEnumerator> IEnumerable>.GetEnumerator() + { + // TODO: performance :) + var array = new KeyValuePair[count]; + CopyTo(array, 0); + var group = array.GroupBy(a => a.Key, a => a.Value); + return group.GetEnumerator(); + } + + public TValue[] GetValues(TKey key, int maxsize) + { + //var keyBuffer = new byte[KeySize]; + //fixed (byte* kptr = keyBuffer) + //{ + // if (KeySerializer == null) GenericPointerHelper.Write(kptr, key); + // else KeySerializer.Save(kptr, key); + + //} + return this[key].ToArray(); + } + + struct KeyEnumerator : IEnumerator + { + private PageMultiValueHashTable hashtable; + private byte* pointer; + private byte[] key; + private uint keyHash; + + public TValue Current { get; private set; } + + object IEnumerator.Current + { + get + { + return Current; + } + } + + public void Dispose() + { + key = null; + hashtable = null; + } + + public bool MoveNext() + { + if (pointer == null) + { + Reset(); + return pointer != null; + } + long index; + fixed (byte* kptr = key) + { + index = hashtable.FindNextEntry(kptr, keyHash, ref pointer); + } + + if (index >= 0) + { + if (hashtable.ValueSerializer != null) Current = hashtable.ValueSerializer.Read(pointer + 1 + hashtable.KeySize); + else Current = GenericPointerHelper.Read(pointer + 1 + hashtable.KeySize); + return true; + } + return false; + } + + public void Reset() + { + fixed (byte* kptr = key) + { + hashtable.FindEntry(kptr, keyHash, false, out pointer); + Current = hashtable.ReadValue(pointer + 1 + hashtable.KeySize); + } + } + + public KeyEnumerator(PageMultiValueHashTable hashtable, byte[] key, uint keyHash) + { + this.hashtable = hashtable; + this.key = key; + this.keyHash = keyHash; + this.pointer = null; + this.Current = default(TValue); + } + } + // TODO: measure struct performance + struct Grouping : IGrouping + { + private PageMultiValueHashTable hashtable; + public TKey Key { get; } + + public Grouping(TKey key, PageMultiValueHashTable hashtable) + { + this.Key = key; + this.hashtable = hashtable; + } + + public IEnumerator GetEnumerator() + { + // TODO: cache buffer + var keyBuffer = new byte[hashtable.KeySize]; + uint hash; + fixed (byte* kptr = keyBuffer) + { + hashtable.WriteKey(kptr, Key); + hash = Helper.MurMur.Hash(kptr, hashtable.KeySize, hashtable.Seed); + } + return new KeyEnumerator(hashtable, keyBuffer, hash); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + } + } + public unsafe interface IPageSerializer + { + int Size { get; } + T Read(byte* ptr); + void Save(byte* ptr, T value); + } + public unsafe static class PageHashTableHelper + { + public unsafe class StructPageSerializer : IPageSerializer where T : struct + { + private readonly int size = GenericPointerHelper.SizeOf(); + public int Size { get { return size; } } + + public T Read(byte* ptr) + { + return GenericPointerHelper.Read(ptr); + } + + public void Save(byte* ptr, T value) + { + GenericPointerHelper.Write(ptr, value); + //Marshal.StructureToPtr(value, new IntPtr(ptr), false); + } + } + + public unsafe class StringPageSerializer : IPageSerializer + { + public StringPageSerializer(int size, Encoding encoding = null) + { + this.size = size; + this.StringEncoding = encoding ?? Encoding.UTF8; + } + private readonly int size; + public int Size { get { return size; } } + public Encoding StringEncoding; + + public string Read(byte* ptr) + { + int length = 0; + while (length < size) + { + if (*(ptr + length) == 0) break; + length++; + } + return new string((sbyte*)ptr, 0, length, StringEncoding); + } + + public void Save(byte* ptr, string value) + { + fixed (char* strptr = value) + { + var count = StringEncoding.GetBytes(strptr, value.Length, ptr, Size); + if (count < size) *(ptr + count) = 0; + } + } + } + + + public unsafe class UnicodeStringPageSerializer : IPageSerializer + { + public UnicodeStringPageSerializer(int size) + { + this.size = size; + } + private readonly int size; + public int Size { get { return size * 2; } } + public Encoding StringEncoding; + + public string Read(byte* ptr) + { + return new string((sbyte*)ptr, 0, size, StringEncoding); + } + + public void Save(byte* ptr, string value) + { + char* ptrchar = (char*)ptr; + fixed (char* strptrFix = value) + { + char* strptr = strptrFix; + for (int i = 0; i < size; i++) + { + *ptrchar++ = *strptr++; + } + } + } + } + + public static PageHashTable CreateStringKey(int count, int stringLength, byte* startPointer = null) + where TStructValue : struct + { + return new PageHashTable(count, + new StringPageSerializer(stringLength), + null, + startPointer); + } + + public static PageHashTable CreateStructStruct(int count, byte* startPointer = null) + where TValue : struct + where TKey : struct + { + return new PageHashTable(count, + //new StructPageSerializer(), new StructPageSerializer(), + null, null, + startPointer); + } + + public static PageHashTable CreateStringString(int count, int keyLength, int valueLength, byte* startPointer = null) + where TStructValue : struct + { + return new PageHashTable(count, + new StringPageSerializer(keyLength), + new StringPageSerializer(valueLength), + startPointer); + } + + public static PageMultiValueHashTable CreateStructStructMulti(int count, byte* startPointer = null) + where TValue : struct + where TKey : struct + { + return new PageMultiValueHashTable(count, + //new StructPageSerializer(), new StructPageSerializer(), + null, null, + startPointer); + } + + public static int GetEntrySize(IPageSerializer key, IPageSerializer value) + { + return 1 + ((key == null) ? GenericPointerHelper.SizeOf() : key.Size) + + ((value == null) ? GenericPointerHelper.SizeOf() : value.Size); + } + + //public static class TypeHelper + //{ + // private static Func CreateSizeOfFunction() + // { + // var method = new DynamicMethod("_", typeof(int), new Type[0]); + // var il = method.GetILGenerator(); + // il.Emit(OpCodes.Sizeof, typeof(T)); + // il.Emit(OpCodes.Ret); + // return (Func)method.CreateDelegate(typeof(Func)); + // } + + // private static BinaryWriter CreateWriter() + // { + // var method = new DynamicMethod("_", typeof(void), new Type[] { typeof(byte*), typeof(T) }); + // var il = method.GetILGenerator(); + // il.Emit(OpCodes.Nop); + // il.Emit(OpCodes.Ldarg_0); + // il.Emit(OpCodes.Ldarg_1); + // if(typeof(T) == typeof(int)) + // il.Emit(OpCodes.Stind_I4); + // else il.Emit(OpCodes.Stobj, typeof(T)); + // il.Emit(OpCodes.Ret); + // return (BinaryWriter)method.CreateDelegate(typeof(BinaryWriter<>).MakeGenericType(typeof(T))); + // } + + // private static BinaryReader CreateReader() + // { + // var method = new DynamicMethod("_", typeof(T), new Type[] { typeof(byte*)}); + // var il = method.GetILGenerator(); + // il.Emit(OpCodes.Ldarg_0); + // if (typeof(T) == typeof(int)) + // il.Emit(OpCodes.Ldind_I4); + // else il.Emit(OpCodes.Ldobj, typeof(T)); + // il.Emit(OpCodes.Ret); + // return (BinaryReader)method.CreateDelegate(typeof(BinaryReader)); + // } + + // public static readonly int SizeOf = CreateSizeOfFunction()(); + // public static readonly int MarshalSizeOf = Marshal.SizeOf(typeof(T)); + // public static readonly BinaryWriter Writer = CreateWriter(); + // public static readonly BinaryReader Reader = CreateReader(); + //} + //public delegate void BinaryWriter(byte* ptr, Tb value); + //public delegate Tb BinaryReader(byte* ptr); + } +} diff --git a/RaptorDB.Common/Properties/AssemblyInfo.cs b/RaptorDB.Common/Properties/AssemblyInfo.cs index 488a812..185b12f 100644 --- a/RaptorDB.Common/Properties/AssemblyInfo.cs +++ b/RaptorDB.Common/Properties/AssemblyInfo.cs @@ -1,11 +1,11 @@ -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - - -[assembly: AssemblyTitle("RaptorDB.Common")] -[assembly: AssemblyDescription("Common classes for RaptorDB client and server")] -[assembly: AssemblyProduct("RaptorDB.Common")] - - - +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + + +[assembly: AssemblyTitle("RaptorDB.Common")] +[assembly: AssemblyDescription("Common classes for RaptorDB client and server")] +[assembly: AssemblyProduct("RaptorDB.Common")] + + + diff --git a/RaptorDB.Common/RWLockExtensions.cs b/RaptorDB.Common/RWLockExtensions.cs new file mode 100644 index 0000000..a48af00 --- /dev/null +++ b/RaptorDB.Common/RWLockExtensions.cs @@ -0,0 +1,62 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading; + +namespace RaptorDB.Common +{ + public static class RWLockExtensions + { + class ReadingDisposableLock : IDisposable + { + ReaderWriterLockSlim rwl; + public ReadingDisposableLock(ReaderWriterLockSlim rwl) { + this.rwl = rwl; + this.rwl.EnterReadLock(); + } + public void Dispose() + { + rwl.ExitReadLock(); + } + } + class WritingDisposableLock : IDisposable + { + ReaderWriterLockSlim rwl; + public WritingDisposableLock(ReaderWriterLockSlim rwl) + { + this.rwl = rwl; + this.rwl.EnterWriteLock(); + } + public void Dispose() + { + rwl.ExitWriteLock(); + } + } + class UpgradeableReadingDisposableLock : IDisposable + { + ReaderWriterLockSlim rwl; + public UpgradeableReadingDisposableLock(ReaderWriterLockSlim rwl) + { + this.rwl = rwl; + this.rwl.EnterUpgradeableReadLock(); + } + public void Dispose() + { + rwl.ExitUpgradeableReadLock(); + } + } + public static IDisposable Reading(this ReaderWriterLockSlim rwl) + { + return new ReadingDisposableLock(rwl); + } + public static IDisposable UpgradeableReading(this ReaderWriterLockSlim rwl) + { + return new UpgradeableReadingDisposableLock(rwl); + } + public static IDisposable Writing(this ReaderWriterLockSlim rwl) + { + return new WritingDisposableLock(rwl); + } + } +} diff --git a/RaptorDB.Common/RaptorDB.Common.csproj b/RaptorDB.Common/RaptorDB.Common.csproj index 229d95f..7e8c202 100644 --- a/RaptorDB.Common/RaptorDB.Common.csproj +++ b/RaptorDB.Common/RaptorDB.Common.csproj @@ -1,89 +1,120 @@ - - - - Debug - AnyCPU - 8.0.30703 - 2.0 - {32331D51-5BE0-41E2-AF1A-9B086C5AE809} - Library - Properties - RaptorDB.Common - RaptorDB.Common - 512 - - - True - full - True - bin\Debug\ - NETSERVER; NETJSON; net4 - prompt - 4 - True - false - - - pdbonly - True - bin\Release\ - TRACE;NETSERVER; NETJSON; net4 - prompt - 4 - false - true - - - ..\raptordb.snk - - - true - - - - - - - - - BuildVersion.cs - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - md "$(SolutionDir)nuget\net40" -copy "$(TargetPath)" "$(SolutionDir)nuget\net40\$(TargetFileName)" - - + + + + Debug + AnyCPU + 8.0.30703 + 2.0 + {32331D51-5BE0-41E2-AF1A-9B086C5AE809} + Library + Properties + RaptorDB.Common + RaptorDB.Common + 512 + v4.5 + + + + True + full + false + bin\Debug\ + NETSERVER; NETJSON; net4 + prompt + 4 + True + false + + + pdbonly + True + bin\Release\ + TRACE;NETSERVER; NETJSON; net4 + prompt + 4 + false + true + + + ..\raptordb.snk + + + false + + + true + bin\x64\Debug\ + DEBUG;NETSERVER; NETJSON; net4 + true + full + x64 + prompt + MinimumRecommendedRules.ruleset + false + + + bin\x64\Release\ + TRACE;NETSERVER; NETJSON; net4 + true + true + pdbonly + x64 + prompt + MinimumRecommendedRules.ruleset + false + + + + ..\GenericPointerHelpers\GenericPointerHelpers.dll + + + + + + + + BuildVersion.cs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + md "$(SolutionDir)nuget\net40" +copy "$(TargetPath)" "$(SolutionDir)nuget\net40\$(TargetFileName)" + + \ No newline at end of file diff --git a/RaptorDB.Common/SafeDictionary.cs b/RaptorDB.Common/SafeDictionary.cs index e33c109..c9e8736 100644 --- a/RaptorDB.Common/SafeDictionary.cs +++ b/RaptorDB.Common/SafeDictionary.cs @@ -1,264 +1,329 @@ -using System; -using System.Collections.Generic; -using System.Runtime.InteropServices; -using System.Text; - -namespace RaptorDB.Common -{ - public class SafeDictionary - { - private readonly object _Padlock = new object(); - private readonly Dictionary _Dictionary; - - public SafeDictionary(int capacity) - { - _Dictionary = new Dictionary(capacity); - } - - public SafeDictionary() - { - _Dictionary = new Dictionary(); - } - - public bool TryGetValue(TKey key, out TValue value) - { - lock (_Padlock) - return _Dictionary.TryGetValue(key, out value); - } - - public TValue this[TKey key] - { - get - { - lock (_Padlock) - return _Dictionary[key]; - } - set - { - lock (_Padlock) - _Dictionary[key] = value; - } - } - - public int Count - { - get { lock (_Padlock) return _Dictionary.Count; } - } - - public ICollection> GetList() - { - return (ICollection>)_Dictionary; - } - - public IEnumerator> GetEnumerator() - { - return ((ICollection>)_Dictionary).GetEnumerator(); - } - - public void Add(TKey key, TValue value) - { - lock (_Padlock) - { - if (_Dictionary.ContainsKey(key) == false) - _Dictionary.Add(key, value); - else - _Dictionary[key] = value; - } - } - - public TKey[] Keys() - { - lock (_Padlock) - { - TKey[] keys = new TKey[_Dictionary.Keys.Count]; - _Dictionary.Keys.CopyTo(keys, 0); - return keys; - } - } - - public bool Remove(TKey key) - { - lock (_Padlock) - { - return _Dictionary.Remove(key); - } - } - } - - public class SafeSortedList - { - private object _padlock = new object(); - SortedList _list = new SortedList(); - - public int Count - { - get { lock(_padlock) return _list.Count; } - } - - public void Add(T key, V val) - { - lock (_padlock) - _list.Add(key, val); - } - - public void Remove(T key) - { - if (key == null) - return; - lock (_padlock) - _list.Remove(key); - } - - public T GetKey(int index) - { - lock (_padlock) return _list.Keys[index]; - } - - public V GetValue(int index) - { - lock (_padlock) return _list.Values[index]; - } - } - - //------------------------------------------------------------------------------------------------------------------ - - public static class FastDateTime - { - public static TimeSpan LocalUtcOffset; - - public static DateTime Now - { - get { return DateTime.UtcNow + LocalUtcOffset; } - } - - static FastDateTime() - { - LocalUtcOffset = TimeZone.CurrentTimeZone.GetUtcOffset(DateTime.Now); - } - } - - - //------------------------------------------------------------------------------------------------------------------ - - public static class Helper - { - public static MurmurHash2Unsafe MurMur = new MurmurHash2Unsafe(); - public static int CompareMemCmp(byte[] left, byte[] right) - { - int c = left.Length; - if (c > right.Length) - c = right.Length; - return memcmp(left, right, c); - } - - [DllImport("msvcrt.dll", CallingConvention = CallingConvention.Cdecl)] - private static extern int memcmp(byte[] arr1, byte[] arr2, int cnt); - - public static int ToInt32(byte[] value, int startIndex, bool reverse) - { - if (reverse) - { - byte[] b = new byte[4]; - Buffer.BlockCopy(value, startIndex, b, 0, 4); - Array.Reverse(b); - return ToInt32(b, 0); - } - - return ToInt32(value, startIndex); - } - - public static unsafe int ToInt32(byte[] value, int startIndex) - { - fixed (byte* numRef = &(value[startIndex])) - { - return *((int*)numRef); - } - } - - public static long ToInt64(byte[] value, int startIndex, bool reverse) - { - if (reverse) - { - byte[] b = new byte[8]; - Buffer.BlockCopy(value, startIndex, b, 0, 8); - Array.Reverse(b); - return ToInt64(b, 0); - } - return ToInt64(value, startIndex); - } - - public static unsafe long ToInt64(byte[] value, int startIndex) - { - fixed (byte* numRef = &(value[startIndex])) - { - return *(((long*)numRef)); - } - } - - public static short ToInt16(byte[] value, int startIndex, bool reverse) - { - if (reverse) - { - byte[] b = new byte[2]; - Buffer.BlockCopy(value, startIndex, b, 0, 2); - Array.Reverse(b); - return ToInt16(b, 0); - } - return ToInt16(value, startIndex); - } - - public static unsafe short ToInt16(byte[] value, int startIndex) - { - fixed (byte* numRef = &(value[startIndex])) - { - return *(((short*)numRef)); - } - } - - public static unsafe byte[] GetBytes(long num, bool reverse) - { - byte[] buffer = new byte[8]; - fixed (byte* numRef = buffer) - { - *((long*)numRef) = num; - } - if (reverse) - Array.Reverse(buffer); - return buffer; - } - - public static unsafe byte[] GetBytes(int num, bool reverse) - { - byte[] buffer = new byte[4]; - fixed (byte* numRef = buffer) - { - *((int*)numRef) = num; - } - if (reverse) - Array.Reverse(buffer); - return buffer; - } - - public static unsafe byte[] GetBytes(short num, bool reverse) - { - byte[] buffer = new byte[2]; - fixed (byte* numRef = buffer) - { - *((short*)numRef) = num; - } - if (reverse) - Array.Reverse(buffer); - return buffer; - } - - public static byte[] GetBytes(string s) - { - return Encoding.UTF8.GetBytes(s); - } - - public static string GetString(byte[] buffer, int index, short length) - { - return Encoding.UTF8.GetString(buffer, index, length); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq.Expressions; +using System.Runtime.InteropServices; +using System.Text; +using System.Linq; + +namespace RaptorDB.Common +{ + public class SafeDictionary + { + private readonly object _Padlock = new object(); + private readonly Dictionary _Dictionary; + + public SafeDictionary(int capacity) + { + _Dictionary = new Dictionary(capacity); + } + + public SafeDictionary() + { + _Dictionary = new Dictionary(); + } + + public bool TryGetValue(TKey key, out TValue value) + { + lock (_Padlock) + return _Dictionary.TryGetValue(key, out value); + } + + public TValue this[TKey key] + { + get + { + lock (_Padlock) + return _Dictionary[key]; + } + set + { + lock (_Padlock) + _Dictionary[key] = value; + } + } + + public int Count + { + get { lock (_Padlock) return _Dictionary.Count; } + } + + public ICollection> GetList() + { + return (ICollection>)_Dictionary; + } + + public IEnumerator> GetEnumerator() + { + return ((ICollection>)_Dictionary).GetEnumerator(); + } + + public void Add(TKey key, TValue value) + { + lock (_Padlock) + { + if (_Dictionary.ContainsKey(key) == false) + _Dictionary.Add(key, value); + else + _Dictionary[key] = value; + } + } + + public TKey[] Keys() + { + lock (_Padlock) + { + TKey[] keys = new TKey[_Dictionary.Keys.Count]; + _Dictionary.Keys.CopyTo(keys, 0); + return keys; + } + } + + public bool Remove(TKey key) + { + lock (_Padlock) + { + return _Dictionary.Remove(key); + } + } + } + + public class SafeSortedList + { + private object _padlock = new object(); + SortedList _list = new SortedList(); + + public int Count + { + get { lock (_padlock) return _list.Count; } + } + + public void Add(T key, V val) + { + lock (_padlock) + _list.Add(key, val); + } + + public void Remove(T key) + { + if (key == null) + return; + lock (_padlock) + _list.Remove(key); + } + + public T GetKey(int index) + { + lock (_padlock) return _list.Keys[index]; + } + + public V GetValue(int index) + { + lock (_padlock) return _list.Values[index]; + } + } + + //------------------------------------------------------------------------------------------------------------------ + + public static class FastDateTime + { + public static TimeSpan LocalUtcOffset; + + public static DateTime Now + { + get { return DateTime.UtcNow + LocalUtcOffset; } + } + + static FastDateTime() + { + LocalUtcOffset = TimeZone.CurrentTimeZone.GetUtcOffset(DateTime.Now); + } + } + + + //------------------------------------------------------------------------------------------------------------------ + + public static class Helper + { + public static MurmurHash2Unsafe MurMur = new MurmurHash2Unsafe(); + + public static unsafe bool Cmp(byte[] a, byte[] b) + { + if (a.Length != b.Length) + return false; + fixed (byte* aptr = a) + { + fixed (byte* bptr = b) + { + return Cmp(aptr, bptr, a.Length); + } + } + } + + public static unsafe bool Cmp(byte* ptra, byte* ptrb, int len) + { + while (len >= 8) + { + if (*(long*)ptra != *(long*)ptrb) + return false; + ptra += 8; + ptrb += 8; + len -= 8; + } + while (len >= 4) + { + if (*(int*)ptra != *(int*)ptrb) + return false; + ptra += 4; + ptrb += 4; + len -= 4; + } + while (len > 0) + { + if (*ptra != *ptrb) + return false; + ptrb++; + ptra++; + len--; + } + return true; + } + + + public static int Log2(int n) + { + int bits = 0; + if (n > 32767) + { + n >>= 16; + bits += 16; + } + if (n > 127) + { + n >>= 8; + bits += 8; + } + if (n > 7) + { + n >>= 4; + bits += 4; + } + if (n > 1) + { + n >>= 2; + bits += 2; + } + if (n > 0) + { + bits++; + } + return bits; + } + + public static int ToInt32(byte[] value, int startIndex, bool reverse) + { + if (reverse) + { + byte[] b = new byte[4]; + Buffer.BlockCopy(value, startIndex, b, 0, 4); + Array.Reverse(b); + return ToInt32(b, 0); + } + + return ToInt32(value, startIndex); + } + + public static unsafe int ToInt32(byte[] value, int startIndex) + { + fixed (byte* numRef = &(value[startIndex])) + { + return *((int*)numRef); + } + } + + public static long ToInt64(byte[] value, int startIndex, bool reverse) + { + if (reverse) + { + byte[] b = new byte[8]; + Buffer.BlockCopy(value, startIndex, b, 0, 8); + Array.Reverse(b); + return ToInt64(b, 0); + } + return ToInt64(value, startIndex); + } + + public static unsafe long ToInt64(byte[] value, int startIndex) + { + fixed (byte* numRef = &(value[startIndex])) + { + return *(((long*)numRef)); + } + } + + public static short ToInt16(byte[] value, int startIndex, bool reverse) + { + if (reverse) + { + byte[] b = new byte[2]; + Buffer.BlockCopy(value, startIndex, b, 0, 2); + Array.Reverse(b); + return ToInt16(b, 0); + } + return ToInt16(value, startIndex); + } + + public static unsafe short ToInt16(byte[] value, int startIndex) + { + fixed (byte* numRef = &(value[startIndex])) + { + return *(((short*)numRef)); + } + } + + public static unsafe byte[] GetBytes(long num, bool reverse) + { + byte[] buffer = new byte[8]; + fixed (byte* numRef = buffer) + { + *((long*)numRef) = num; + } + if (reverse) + Array.Reverse(buffer); + return buffer; + } + + public static unsafe byte[] GetBytes(int num, bool reverse) + { + byte[] buffer = new byte[4]; + fixed (byte* numRef = buffer) + { + *((int*)numRef) = num; + } + if (reverse) + Array.Reverse(buffer); + return buffer; + } + + public static unsafe byte[] GetBytes(short num, bool reverse) + { + byte[] buffer = new byte[2]; + fixed (byte* numRef = buffer) + { + *((short*)numRef) = num; + } + if (reverse) + Array.Reverse(buffer); + return buffer; + } + + public static byte[] GetBytes(string s) + { + return Encoding.UTF8.GetBytes(s); + } + + public static string GetString(byte[] buffer, int index, int length) + { + return Encoding.UTF8.GetString(buffer, index, length); + } + } +} diff --git a/RaptorDB.Common/View.cs b/RaptorDB.Common/View.cs deleted file mode 100644 index ed40538..0000000 --- a/RaptorDB.Common/View.cs +++ /dev/null @@ -1,129 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Xml.Serialization; - -namespace RaptorDB -{ - public abstract class ViewBase - { - public delegate void MapFunctionDelgate(IMapAPI api, Guid docid, V doc); - /// - /// Increment this when you change view definitions so the engine can rebuild the contents - /// - public int Version { get; set; } - - /// - /// Name of the view will be used for foldernames and filename and generated code - /// - public string Name { get; set;} - - /// - /// A text for describing this views purpose for other developers - /// - public string Description { get; set; } - - /// - /// Column definitions for the view storage - /// - public Type Schema { get; set; } - - /// - /// Is this the primary list and will be populated synchronously - /// - public bool isPrimaryList { get; set; } - - /// - /// Is this view active and will recieve data - /// - public bool isActive { get; set; } - - /// - /// Delete items on DocID before inserting new rows (default = true) - /// - public bool DeleteBeforeInsert { get; set; } - - /// - /// Index in the background : better performance but reads might not have all the data - /// - public bool BackgroundIndexing { get; set; } - - /// - /// Save documents to this view in the save process, like primary views - /// - public bool ConsistentSaveToThisView { get; set; } - - /// - /// Apply to a Primary View and all the mappings of all views will be done in a transaction. - /// You can use Rollback for failures. - /// - public bool TransactionMode { get; set; } - - /// - /// When defining your own schema and you don't want dependancies to RaptorDB to propogate through your code - /// define your full text columns here - /// - public List FullTextColumns; - - /// - /// When defining your own schems and you don't want dependancies to RaptorDB to propogate through your code - /// define your case insensitive columns here - /// - public List CaseInsensitiveColumns; - - public Dictionary StringIndexLength; - - /// - /// Columns that you don't want to index - /// - public List NoIndexingColumns; - } - - - public class View : ViewBase - { - public View() - { - isActive = true; - DeleteBeforeInsert = true; - BackgroundIndexing = true; - FullTextColumns = new List(); - CaseInsensitiveColumns = new List(); - StringIndexLength = new Dictionary(); - NoIndexingColumns = new List(); - } - - /// - /// Inline delegate for the mapper function used for quick applications - /// - [XmlIgnore] - public MapFunctionDelgate Mapper { get; set; } - - public Result Verify() - { - if (Name == null || Name == "") - throw new Exception("Name must be given"); - if (Schema == null) - throw new Exception("Schema must be defined"); - if (Schema.IsSubclassOf(typeof(RDBSchema)) == false) - { - var pi = Schema.GetProperty("docid"); - if (pi == null || pi.PropertyType != typeof(Guid)) - { - var fi = Schema.GetField("docid"); - if( fi == null || fi.FieldType != typeof(Guid)) - throw new Exception("The schema must be derived from RaptorDB.RDBSchema or must contain a 'docid' Guid field or property"); - } - } - if (Mapper == null) - throw new Exception("A map function must be defined"); - - if (TransactionMode == true && isPrimaryList == false) - throw new Exception("Transaction mode can only be enabled on Primary Views"); - - // FEATURE : add more verifications - return new Result(true); - } - } -} diff --git a/RaptorDB.Common/WAHBitarray2.cs b/RaptorDB.Common/WAHBitarray2.cs new file mode 100644 index 0000000..cdb9e89 --- /dev/null +++ b/RaptorDB.Common/WAHBitarray2.cs @@ -0,0 +1,757 @@ +using System; +using System.Collections.Generic; +using System.Text; +using System.Collections; +using System.Runtime.CompilerServices; +using System.Diagnostics; +using System.Linq; +using System.Diagnostics.Contracts; + +namespace RaptorDB.Common +{ + public enum WahBitArrayState : byte + { + Bitarray = 0, + Wah = 1, + Indexes = 2, + Index = 3, + } + public class WahBitArray + { + public const int WahPerformanceRatio = 5; + public const int BitmapPerformanceRatio = 1; + public WahBitArray(int index = -1) + { + state = WahBitArrayState.Index; + singleIndex = index; + } + + public WahBitArray(WahBitArrayState type, uint[] ints) + { + state = type & (WahBitArrayState)4; + def = ((byte)type & 4) != 0; + switch (state) + { + case WahBitArrayState.Wah: + wah = ints; + break; + case WahBitArrayState.Bitarray: + bitmap = ints; + break; + case WahBitArrayState.Indexes: + offsets = new HashSet(ints); + break; + case WahBitArrayState.Index: + this.singleIndex = (int)ints[0]; + break; + } + } + + /// + /// Creates new instance of WahBitArray in 'Indexes' state with specified values + /// + public static WahBitArray FromIndexes(int[] ints, bool def = false) + { + var wah = new WahBitArray(); + wah.def = def; + wah.offsets = new HashSet(); + foreach (var i in ints) wah.offsets.Add((uint)i); + return wah; + } + + private HashSet offsets; + private uint[] wah; + private uint[] bitmap; + private uint currentMax = 0; + private int singleIndex; + private WahBitArrayState state; + private bool def = false; + public bool isDirty = false; + + /// + /// Clones the bitarray + /// + public WahBitArray Copy() + { + if (state == WahBitArrayState.Wah) + { + var c = new uint[wah.Length]; + Array.Copy(wah, c, c.Length); + return new WahBitArray(WahBitArrayState.Wah, c) { def = def }; + } + else if (state == WahBitArrayState.Bitarray) + { + var c = new uint[bitmap.Length]; + Array.Copy(bitmap, c, c.Length); + return new WahBitArray(WahBitArrayState.Bitarray, c) { def = def }; + } + else if (state == WahBitArrayState.Indexes) + { + return new WahBitArray(WahBitArrayState.Indexes, new uint[0]) + { + offsets = new HashSet(offsets), + def = def + }; + } + else if (state == WahBitArrayState.Index) + { + return new WahBitArray(singleIndex) { def = def }; + } + else throw new NotSupportedException("invalid bitarray state"); + } + + public int GetFirstIndex() + { + if (state == WahBitArrayState.Indexes) + { + return (int)offsets.Min(); + } + return GetBitIndexes().First(); + } + + /// + /// Gets bit value at specified index + /// if in state the array is decompressed + /// + public bool Get(int index) + { + if (state == WahBitArrayState.Indexes) + return offsets.Contains((uint)index) != def; + else if (state == WahBitArrayState.Index) return (singleIndex == index) != def; + + DecompressWah(); + + return BitHelper.BitmapGet(bitmap, index) != def; + } + + /// + /// Sets bit value at specified index + /// if in state the array is decompressed and switched to bitmap state + /// + public void Set(int index, bool val) + { + if (state == WahBitArrayState.Indexes) + { + if (val != def) + { + isDirty |= offsets.Add((uint)index); + // set max + if (index > currentMax) + currentMax = (uint)index; + } + else + { + isDirty |= offsets.Remove((uint)index); + } + + ChangeTypeIfNeeded(); + return; + } + if (state == WahBitArrayState.Index) + { + if (index == singleIndex && val == def) + { + singleIndex = -1; + isDirty = true; + } + else if (index != singleIndex && val != def) + { + isDirty = true; + state = WahBitArrayState.Indexes; + offsets = new HashSet() { (uint)index, (uint)singleIndex }; + } + } + if (state != WahBitArrayState.Bitarray) + DecompressWah(); + isDirty = true; + if (index > bitmap.Length * 32) + { + if (val == def) return; + ResizeBitmap(index); + } + BitHelper.BitmapSet(bitmap, index, val); + } + + [Obsolete] + public int Length + { + get + { + if (state == WahBitArrayState.Index) + { + return singleIndex + 1; + } + else if (state == WahBitArrayState.Indexes) + { + return (int)currentMax; + } + DecompressWah(); + return bitmap.Length / 32; + } + } + + #region bit operations + + [Pure] + public static uint[] CloneArray(uint[] arr) + { + var na = new uint[arr.Length]; + Buffer.BlockCopy(arr, 0, na, 0, arr.Length * 4); + return na; + } + + public static WahBitArray GenericBitOp(WahBitArray a, WahBitArray b, + Func op, + Action bitBitOp, + Action bitWahOp = null, + Func wahWahOp = null, + Action> bitIndexOp = null, + Action, HashSet> indexIndexOp = null, + Func, HashSet, HashSet> indexIndexImmutableOp = null, + bool inPlace = false) + { + var sta = a.state; + var stb = b.state; + uint[] result = null; + WahBitArrayState resultSt = WahBitArrayState.Bitarray; + + if (sta == WahBitArrayState.Wah && stb == WahBitArrayState.Wah && wahWahOp != null) + { + // Wah + Wah + result = wahWahOp(a.wah, b.wah); + resultSt = WahBitArrayState.Wah; + goto Finalize; + } + if (sta == WahBitArrayState.Bitarray && stb == WahBitArrayState.Wah && bitWahOp != null) + { + // Bitamp + Wah + result = inPlace ? a.bitmap : CloneArray(a.bitmap); + bitWahOp(a.bitmap, b.wah); + goto Finalize; + } + if (stb == WahBitArrayState.Bitarray && sta == WahBitArrayState.Wah && bitWahOp != null) + { + // Wah + Bitmap + result = inPlace ? b.bitmap : CloneArray(b.bitmap); + bitWahOp(result, a.wah); + goto Finalize; + } + if (sta == WahBitArrayState.Bitarray && stb == WahBitArrayState.Indexes && bitIndexOp != null) + { + result = inPlace ? a.bitmap : CloneArray(a.bitmap); + bitIndexOp(result, b.offsets); + goto Finalize; + } + if (stb == WahBitArrayState.Bitarray && sta == WahBitArrayState.Indexes && bitIndexOp != null) + { + result = inPlace ? b.bitmap : CloneArray(b.bitmap); + bitIndexOp(result, a.offsets); + goto Finalize; + } + if(sta == WahBitArrayState.Indexes && sta == WahBitArrayState.Indexes) + { + if(indexIndexOp != null) + { + resultSt = WahBitArrayState.Indexes; + + } + } + + Finalize: + // T+ + // TODO: implement + throw new NotImplementedException(); + } + + public WahBitArray And(WahBitArray op, bool inPlace = false) + { + return GenericBitOp(this, op, + (a, b) => a & b, + BitHelper.AndArray, + wahWahOp: WahHelper.WahAnd); + // TODO: WAH + } + + public WahBitArray AndNot(WahBitArray op, bool inPlace = false) + { + return GenericBitOp(this, op, + (a, b) => a & !b, + BitHelper.AndNotArray); + } + + public WahBitArray Or(WahBitArray op, bool inPlace = false) + { + return GenericBitOp(this, op, + (a, b) => a | b, + BitHelper.OrArray); + } + + public WahBitArray Xor(WahBitArray op, bool inPlace = false) + { + return GenericBitOp(this, op, + (a, b) => a ^ b, + BitHelper.XorArray); + } + + public WahBitArray Not(bool cloneBitmap = true) + { + if (cloneBitmap) + { + var c = Copy(); + c.def = !def; + return c; + } + else + { + switch (state) + { + case WahBitArrayState.Bitarray: + return new WahBitArray(state, bitmap) { def = !def }; + case WahBitArrayState.Wah: + return new WahBitArray(state, wah) { def = !def }; + case WahBitArrayState.Indexes: + return new WahBitArray() { state = WahBitArrayState.Indexes, offsets = offsets, def = !def }; + case WahBitArrayState.Index: + return new WahBitArray(singleIndex) { def = !def }; + default: + throw new NotSupportedException(); + } + } + } + + #endregion + + /// + /// Counts all ones (!= def) in the bitmap + /// + public long CountOnes() + { + if (state == WahBitArrayState.Index) + { + return singleIndex < 0 ? 0 : 1; + } + if (state == WahBitArrayState.Indexes) + { + return offsets.Count; + } + if (state == WahBitArrayState.Wah) + { + int c = 0; + foreach (var i in wah) + c += WahHelper.BitCount(i); + return c; + } + if (state == WahBitArrayState.Bitarray) + { + long c = 0; + foreach (uint i in bitmap) + c += BitHelper.BitCount(i); + return c; + } + throw new NotSupportedException(); + } + + /// + /// If the state is the bitmap is compressed + /// + public void CompressBitmap() + { + if (state == WahBitArrayState.Bitarray) + { + if (bitmap != null) + { + wah = Compress(bitmap); + bitmap = null; + state = WahBitArrayState.Wah; + } + } + } + + /// + /// Decompress wah bitmap and assigns it to 'bitmap' + /// State is switched to bitmap only if 'bitmap.Length * bitmapRatio < wah.Length * wahRatio' + /// + /// if the state was switched + public bool CompressAndSwitchBetterState(int wahRatio = WahPerformanceRatio, int bitmapRatio = BitmapPerformanceRatio) + { + if (state != WahBitArrayState.Bitarray) throw new InvalidOperationException("operation valid only in Bitmap state"); + + bitmap = Uncompress(wah); + if (bitmap.Length * bitmapRatio < wah.Length * wahRatio) + { + state = WahBitArrayState.Bitarray; + return true; + } + return false; + } + + /// + /// Gets compressed array using the best method + /// + public uint[] GetCompressed(out WahBitArrayState type) + { + type = WahBitArrayState.Wah; + + if (state == WahBitArrayState.Indexes) + { + if (offsets.Count * 32 > currentMax) + { + type = WahBitArrayState.Wah; + return WahHelper.FromIndexes(GetOffsets()); + } + else + { + type = WahBitArrayState.Indexes; + return GetOffsets(); + } + } + else if (state == WahBitArrayState.Wah) + { + return wah; + } + // TODO: cache the wah bitmap + return Compress(bitmap); + } + + /// + /// Gets indices of bits not equal to def + /// + public IEnumerable GetBitIndexes() + { + if (state == WahBitArrayState.Index && singleIndex >= 0) + { + return new[] { singleIndex }; + } + else if (state == WahBitArrayState.Indexes) + { + return GetOffsets().Cast(); + } + else if (state == WahBitArrayState.Wah) + { + return WahHelper.BitIndexes(wah); + } + else if (state == WahBitArrayState.Bitarray) + { + return BitHelper.GetBitIndexes(bitmap); + } + throw new NotSupportedException(); + } + + /// + /// Gets ordered offsets from 'offsets' + /// + public uint[] GetOffsets(bool sorted = true) + { + Debug.Assert(state == WahBitArrayState.Indexes); + var k = new uint[offsets.Count]; + offsets.CopyTo(k, 0); + if (sorted) Array.Sort(k); + return k; + } + + #region [ P R I V A T E ] + + [Obsolete] + private void prelogic(WahBitArray op, out uint[] left, out uint[] right) + { + this.DecompressWah(); + + left = this.GetBitArray(); + right = op.GetBitArray(); + int ic = left.Length; + int uc = right.Length; + if (ic > uc) + { + uint[] ar = new uint[ic]; + right.CopyTo(ar, 0); + right = ar; + } + else if (ic < uc) + { + uint[] ar = new uint[uc]; + left.CopyTo(ar, 0); + left = ar; + } + } + + /// + /// Gets pure bitarray + /// + public uint[] GetBitArray() + { + if (state == WahBitArrayState.Indexes) + return UnpackOffsets(offsets, (int)currentMax); + + DecompressWah(switchState: false); + uint[] ui = new uint[bitmap.Length]; + bitmap.CopyTo(ui, 0); + + return ui; + } + + /// + /// returns offsets unpacked as bitarray + /// + public static uint[] UnpackOffsets(IEnumerable offsets, int len) + { + + uint[] bitmap = new uint[(len + 31) / 32]; + foreach (int index in offsets) + { + if (index < len) + { + BitHelper.BitmapSet(bitmap, index, true); + } + } + + return bitmap; + } + + public const int BitmapOffsetSwitchOverCount = 10; + /// + /// Changes type to bitarray from offsets if it is good idea + /// + private void ChangeTypeIfNeeded() + { + if (state != WahBitArrayState.Indexes) + return; + + var bitmapLength = (int)(currentMax / 32) + 1; + int c = offsets.Count; + if (c > bitmapLength && c > BitmapOffsetSwitchOverCount) + { + state = WahBitArrayState.Bitarray; + if (bitmap == null || bitmap.Length < bitmapLength) bitmap = new uint[bitmapLength]; + else Array.Clear(bitmap, 0, bitmap.Length); + // populate bitmap + foreach (var i in offsets) + BitHelper.BitmapSet(bitmap, (int)i, true); + // clear list + offsets = null; + } + } + + /// + /// Resizes a bitmap array to size 'length * (2 ^ k)' >= required to store 'index' (for lowest integer 'k') + /// + private void ResizeBitmap(int index) + { + Debug.Assert(state == WahBitArrayState.Bitarray); + if (bitmap == null) + { + bitmap = new uint[index >> 5]; + } + else + { + var len = bitmap.Length; + while (len * 32 < index) len *= 2; + if (len > bitmap.Length) + { + uint[] ar = new uint[index >> 5]; + bitmap.CopyTo(ar, 0); + bitmap = ar; + } + } + } + + /// + /// decompresses to BitArray state from WAH + /// + private void DecompressWah(bool switchState = true) + { + if (state == WahBitArrayState.Bitarray) + return; + + if (state == WahBitArrayState.Wah) + { + bitmap = Uncompress(wah); + if (switchState) + { + state = WahBitArrayState.Bitarray; + wah = null; + } + } + } + #endregion + + #region compress / uncompress + /// + /// Takes 31 bit block at bit index + /// + public static uint Take31Bits(uint[] data, int index) + { + ulong l1 = 0; + ulong l2 = 0; + ulong l = 0; + ulong ret = 0; + int off = (index % 32); + int pointer = index >> 5; + + l1 = data[pointer]; + pointer++; + if (pointer < data.Length) + l2 = data[pointer]; + + l = (l1 << 32) + l2; + ret = (l >> (33 - off)) & 0x7fffffff; + + return (uint)ret; + } + + public static uint[] Compress(uint[] data) + { + List compressed = new List(); + uint zeros = 0; + uint ones = 0; + int count = data.Length << 5; + for (int i = 0; i < count;) + { + uint num = Take31Bits(data, i); + i += 31; + if (num == 0) // all zero + { + // FIX: 31WAH + zeros += 1; + FlushOnes(compressed, ref ones); + } + else if (num == 0x7fffffff) // all ones + { + // FIX: 31WAH + ones += 1; + FlushZeros(compressed, ref zeros); + } + else // literal + { + FlushOnes(compressed, ref ones); + FlushZeros(compressed, ref zeros); + compressed.Add(num); + } + } + FlushOnes(compressed, ref ones); + FlushZeros(compressed, ref zeros); + return compressed.ToArray(); + } + + public static uint[] Uncompress(uint[] data) + { + int index = 0; + List list = new List(); + if (data == null) + return null; + + foreach (uint ci in data) + { + if ((ci & 0x80000000) == 0) // literal + { + Write31Bits(list, index, ci); + // FIX: 31WAH + index += 31; + } + else + { + uint count = (ci & 0x3fffffff) * 31; + if ((ci & 0x40000000) > 0) // ones count + WriteOnes(list, index, count); + + index += (int)count; + } + } + if (list.Count * 32 < index) list.AddRange(new uint[index / 32 - list.Count]); + return list.ToArray(); + } + + private static void FlushOnes(List compressed, ref uint ones) + { + if (ones > 0) + { + uint n = 0xc0000000 | ones; + ones = 0; + compressed.Add(n); + } + } + + private static void FlushZeros(List compressed, ref uint zeros) + { + if (zeros > 0) + { + uint n = 0x80000000 | zeros; + zeros = 0; + compressed.Add(n); + } + } + + private static void EnsureLength(List l, int index) + { + if (l.Count * 31 < index) l.AddRange(new uint[index / 31 - l.Count + 1]); + } + + + private static void Write31Bits(List list, int index, uint val) + { + EnsureLength(list, index + 31); + + int off = (index % 32); + int pointer = index >> 5; + + if (pointer >= list.Count - 1) + list.Add(0); + + ulong l = ((ulong)list[pointer] << 32) + list[pointer + 1]; + l |= (ulong)val << (33 - off); + + list[pointer] = (uint)(l >> 32); + list[pointer + 1] = (uint)(l & 0xffffffff); + } + + private static void WriteOnes(List list, int index, uint count) + { + if (list.Count * 32 < index) list.AddRange(new uint[list.Count - index / 32]); + + int off = index % 32; + int pointer = index >> 5; + int ccount = (int)count; + int indx = index; + int x = 32 - off; + + if (pointer >= list.Count) + list.Add(0); + + if (ccount > x || x == 32) //current pointer + { + list[pointer] |= (uint)((0xffffffff >> off)); + ccount -= x; + indx += x; + } + else + { + list[pointer] |= (uint)((0xffffffff << ccount) >> off); + ccount = 0; + } + + bool checklast = true; + while (ccount >= 32)//full ints + { + if (checklast && list[list.Count - 1] == 0) + { + list.RemoveAt(list.Count - 1); + checklast = false; + } + + list.Add(0xffffffff); + ccount -= 32; + indx += 32; + } + int p = indx >> 5; + off = indx % 32; + if (ccount > 0) + { + uint i = 0xffffffff << (32 - ccount); + if (p > (list.Count - 1)) //remaining + list.Add(i); + else + list[p] |= (uint)(i >> off); + } + } + #endregion + } +} diff --git a/RaptorDB.Common/WahHelper.cs b/RaptorDB.Common/WahHelper.cs new file mode 100644 index 0000000..19e5888 --- /dev/null +++ b/RaptorDB.Common/WahHelper.cs @@ -0,0 +1,234 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Text; +using System.Threading.Tasks; + +namespace RaptorDB.Common +{ + + static class WahHelper + { + public const uint SecondBitMask = 1u << 30; + public const uint FirstBitMask = 1u << 31; + public const uint CountBitMask = ~(3 << 30); + + struct IterState + { + public uint[] Arr; + public int Index; + public int Count; + public uint Value; + public uint TakeWord() + { + if (Count == 0) + { + uint ai = Arr[Index++]; + if ((ai >> 31) == 1) + { + Count = (int)((ai & 0x3fffffff) - 1); + Value = (ai & SecondBitMask >> 30) * 0x7fffffff; + } + else + { + Value = ai; + } + } + else Count--; + return Value; + } + public void Skip(int len) + { + Count -= len; + while (Count < 0) + { + var ai = Arr[Index++]; + if ((ai & FirstBitMask) != 0) + { + Count += (int)(ai & 0x3fffffff - 1); + if (Count >= 0) Value = (ai & SecondBitMask >> 30) * 0x7fffffff; + } + else + { + Count++; + Value = ai; + } + } + } + public void CopyTo(WahWriter w, int len) + { + while (len > 0) + { + var ai = Arr[Index++]; + if ((ai >> 31) == 1) + { + Count = (int)(ai & 0x3fffffff); + len -= Count; + Value = (ai & SecondBitMask >> 30); + w.WriteSum((uint)Count, Value); + Value *= 0x7fffffff; + } + else + { + Count = 0; + len--; + w.WriteLit(Value = ai); + } + } + Count += len; + } + public IterState(uint[] arr) + { + this.Arr = arr; + Index = 0; + Count = 0; + Value = 0; + } + } + + public static uint[] WahNot(uint[] bitmap) + { + var result = new uint[bitmap.Length]; + for (int i = 0; i < bitmap.Length; i++) + { + if ((bitmap[i] & FirstBitMask) == 0) + result[i] = bitmap[i] ^ 0x7fffffffu; + else result[i] = bitmap[i] ^ SecondBitMask; + } + return result; + } + + public static void InPlaceNot(uint[] wah) + { + for (int i = 0; i < wah.Length; i++) + { + if ((wah[i] & FirstBitMask) == 0) + wah[i] ^= 0x7fffffffu; + else wah[i] ^= SecondBitMask; + } + } + + public static uint[] WahAnd(uint[] ap, uint[] bp) + { + var a = new IterState(ap); + var b = new IterState(bp); + var w = new WahWriter(ap.Length); + while (a.Index < ap.Length && b.Index < bp.Length) + { + w.WriteLit(a.TakeWord() & b.TakeWord()); + if (a.Count > 0) + { + if (a.Value == 0) b.Skip(a.Count); + else b.CopyTo(w, a.Count); + a.Count = 0; + } + if (b.Count > 0) + { + if (b.Value == 0) a.Skip(b.Count); + else a.CopyTo(w, b.Count); + b.Count = 0; + } + } + return w.ToArray(); + } + + public static uint[] Compress(uint[] arr) + { + var w = new WahWriter(4096); + var len = arr.Length; + var i = 0; + bool last; + do + { + w.WriteLit(Take31Bits(arr, len, i, out last)); + i++; + } while (!last); + return w.ToArray(); + } + + public static IEnumerable BitIndexes(uint[] wah) + { + var index = 0; + for (int i = 0; i < wah.Length; i++) + { + var w = wah[i]; + if ((w & FirstBitMask) > 0) // wah word with ones + { + var count = (int)(w & CountBitMask) * 31; + index += count; + if ((w & SecondBitMask) > 0) + { + for (var j = 0; j < count; j++) + { + yield return j + index; + } + } + } + else + { + for (int j = 0; j < 31; j++) + { + if ((w & 1) > 0) + yield return index + j; + w >>= 1; + } + index += 31; + } + } + } + + /// + /// Takes 31 bit block at block index + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static uint Take31Bits(uint[] arr, int len, int index, out bool last) + { + // index from the end of word + // if (!last & arr[wordIndex] == 0 && arr[wordIndex + 1] == 0) return 0; + var wordIndex = index - ((index + 31) >> 5); + var bitIndex = 33 - ((32 - index) & 31); + last = wordIndex + 1 == len; + if (!last) + { + long words = ((long)arr[wordIndex] << 32) | arr[wordIndex + 1]; + return (uint)((words >> bitIndex) & 0x7fffffffu); + } + else + { + if (bitIndex == 33) last = false; + return (arr[wordIndex] << 32 - bitIndex) & 0x7fffffffu; + } + } + + public static uint[] FromIndexes(uint[] offsets) + { + var w = new WahWriter(4096); + uint index = 0; + uint map = 0; + for (int i = 0; i < offsets.Length; i++) + { + var o = offsets[i]; + if (o / 32 != index) + { + if (map != 0) { w.WriteLit(map); index++; } + if (index < o) + { + w.WriteSum(o - index, 0); + index = o; + } + map = 0; + } + map |= (uint)1 << (int)(31 - (index % 32)); + } + return w.ToArray(); + } + + public static int BitCount(uint element) + { + if ((element & FirstBitMask) == 0) return BitHelper.BitCount(element); + if ((element & SecondBitMask) == 0) return 0; + else return (int)(element & CountBitMask) * 32; + } + } +} diff --git a/RaptorDB.Common/WahWriter.cs b/RaptorDB.Common/WahWriter.cs new file mode 100644 index 0000000..c16caa1 --- /dev/null +++ b/RaptorDB.Common/WahWriter.cs @@ -0,0 +1,96 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Text; +using System.Threading.Tasks; + +namespace RaptorDB.Common +{ + struct WahWriter + { + List bits; + int index; + uint[] arr; + readonly int len; + int lastSumVal; + public WahWriter(int capacity) + { + lastSumVal = -1; + len = capacity; + arr = new uint[len]; + index = -1; + bits = null; + } + void AddChunk() + { + index = 0; + if (bits == null) bits = new List(); + bits.Add(arr); + arr = new uint[len]; + } + [MethodImpl(MethodImplOptions.AggressiveInlining)] + void Add(uint val) + { + index++; + if (len == index) AddChunk(); + arr[index] = val; + } + public void WriteLit(uint val) + { + if (val == 0) + { + if (lastSumVal == 0) arr[index]++; + else + { + Add((1u << 31) | (0u << 30) | 1u); + lastSumVal = 0; + } + } + else if (val == 0x7fffffff) + { + if (lastSumVal == 1) arr[index]++; + else + { + Add((1u << 31) | (1u << 30) | 1u); + lastSumVal = 1; + } + } + else + { + lastSumVal = -1; + Add(val); + } + } + /// 0/1 + public void WriteSum(uint len, uint value) + { + if (lastSumVal != value) + { + Add((1u << 31) | (value << 30) | len); + lastSumVal = (int)value; + } + else + { + arr[index] += len; + } + } + public uint[] ToArray() + { + int bc = 0; + uint[] wharr = null; + if (bits != null) + { + bc = bits.Count * len; + wharr = new uint[bc + index + 1]; + for (int i = 0; i < bits.Count; i++) + { + Buffer.BlockCopy(bits[i], 0, wharr, i * len * 4, len * 4); + } + } + wharr = wharr ?? new uint[index + 1]; + Array.Copy(arr, 0, wharr, bc, index + 1); + return wharr; + } + } +} diff --git a/RaptorDB.Common/fastBinaryJSON/BJSON.cs b/RaptorDB.Common/fastBinaryJSON/BJSON.cs index 09e90c0..3dd5bd3 100644 --- a/RaptorDB.Common/fastBinaryJSON/BJSON.cs +++ b/RaptorDB.Common/fastBinaryJSON/BJSON.cs @@ -1,780 +1,780 @@ -using System; -using System.Collections; -using System.Collections.Generic; -#if !SILVERLIGHT -using System.Data; -#endif -using System.Globalization; -using System.IO; -using System.Reflection; -using System.Reflection.Emit; -using System.Xml; -using System.Text; -using fastJSON; -using RaptorDB.Common; -using System.Collections.Specialized; - -namespace fastBinaryJSON -{ - public sealed class TOKENS - { - public const byte DOC_START = 1; - public const byte DOC_END = 2; - public const byte ARRAY_START = 3; - public const byte ARRAY_END = 4; - public const byte COLON = 5; - public const byte COMMA = 6; - public const byte NAME = 7; - public const byte STRING = 8; - public const byte BYTE = 9; - public const byte INT = 10; - public const byte UINT = 11; - public const byte LONG = 12; - public const byte ULONG = 13; - public const byte SHORT = 14; - public const byte USHORT = 15; - public const byte DATETIME = 16; - public const byte GUID = 17; - public const byte DOUBLE = 18; - public const byte FLOAT = 19; - public const byte DECIMAL = 20; - public const byte CHAR = 21; - public const byte BYTEARRAY = 22; - public const byte NULL = 23; - public const byte TRUE = 24; - public const byte FALSE = 25; - public const byte UNICODE_STRING = 26; - } - - //public delegate string Serialize(object data); - //public delegate object Deserialize(string data); - - public sealed class BJSONParameters - { - /// - /// Optimize the schema for Datasets (default = True) - /// - public bool UseOptimizedDatasetSchema = true; - /// - /// Serialize readonly properties (default = False) - /// - public bool ShowReadOnlyProperties = false; - /// - /// Use global types $types for more compact size when using a lot of classes (default = True) - /// - public bool UsingGlobalTypes = true; - /// - /// Use Unicode strings = T (faster), Use UTF8 strings = F (smaller) (default = True) - /// - public bool UseUnicodeStrings = true; - /// - /// Serialize Null values to the output (default = False) - /// - public bool SerializeNulls = false; - /// - /// Enable fastBinaryJSON extensions $types, $type, $map (default = True) - /// - public bool UseExtensions = true; - /// - /// Anonymous types have read only properties - /// - public bool EnableAnonymousTypes = false; - /// - /// Use the UTC date format (default = False) - /// - public bool UseUTCDateTime = false; - /// - /// Ignore attributes to check for (default : XmlIgnoreAttribute) - /// - public List IgnoreAttributes = new List { typeof(System.Xml.Serialization.XmlIgnoreAttribute) }; - /// - /// If you have parametric and no default constructor for you classes (default = False) - /// - /// IMPORTANT NOTE : If True then all initial values within the class will be ignored and will be not set - /// - public bool ParametricConstructorOverride = false; - /// - /// Maximum depth the serializer will go to to avoid loops (default = 20 levels) - /// - public short SerializerMaxDepth = 20; - - public void FixValues() - { - if (UseExtensions == false) // disable conflicting params - UsingGlobalTypes = false; - - if (EnableAnonymousTypes) - ShowReadOnlyProperties = true; - } - } - - public static class BJSON - { - /// - /// Globally set-able parameters for controlling the serializer - /// - public static BJSONParameters Parameters = new BJSONParameters(); - /// - /// Parse a json and generate a Dictionary<string,object> or List<object> structure - /// - /// - /// - public static object Parse(byte[] json) - { - return new BJsonParser(json, Parameters.UseUTCDateTime).Decode(); - } - /// - /// Create a .net4 dynamic object from the binary json byte array - /// - /// - /// - public static dynamic ToDynamic(byte[] json) - { - return new DynamicJson(json); - } - /// - /// Register custom type handlers for your own types not natively handled by fastBinaryJSON - /// - /// - /// - /// - public static void RegisterCustomType(Type type, Serialize serializer, Deserialize deserializer) - { - Reflection.Instance.RegisterCustomType(type, serializer, deserializer); - } - /// - /// Create a binary json representation for an object - /// - /// - /// - public static byte[] ToBJSON(object obj) - { - return ToBJSON(obj, Parameters); - } - /// - /// Create a binary json representation for an object with parameter override on this call - /// - /// - /// - /// - public static byte[] ToBJSON(object obj, BJSONParameters param) - { - param.FixValues(); - Type t = null; - if (obj == null) - return new byte[] { TOKENS.NULL }; - if (obj.GetType().IsGenericType) - t = Reflection.Instance.GetGenericTypeDefinition(obj.GetType());// obj.GetType().GetGenericTypeDefinition(); - if (t == typeof(Dictionary<,>) || t == typeof(List<>)) - param.UsingGlobalTypes = false; - // FEATURE : enable extensions when you can deserialize anon types - if (param.EnableAnonymousTypes) { param.UseExtensions = false; param.UsingGlobalTypes = false; } - - return new BJSONSerializer(param).ConvertToBJSON(obj); - } - /// - /// Fill a given object with the binary json represenation - /// - /// - /// - /// - public static object FillObject(object input, byte[] json) - { - return new deserializer(Parameters).FillObject(input, json); - } - /// - /// Create a generic object from the json - /// - /// - /// - /// - public static T ToObject(byte[] json) - { - return new deserializer(Parameters).ToObject(json); - } - /// - /// Create a generic object from the json with parameter override on this call - /// - /// - /// - /// - /// - public static T ToObject(byte[] json, BJSONParameters param) - { - return new deserializer(param).ToObject(json); - } - /// - /// Create an object from the json - /// - /// - /// - public static object ToObject(byte[] json) - { - return new deserializer(Parameters).ToObject(json, null); - } - /// - /// Create an object from the json with parameter override on this call - /// - /// - /// - /// - public static object ToObject(byte[] json, BJSONParameters param) - { - param.FixValues(); - return new deserializer(param).ToObject(json, null); - } - /// - /// Create a typed object from the json - /// - /// - /// - /// - public static object ToObject(byte[] json, Type type) - { - return new deserializer(Parameters).ToObject(json, type); - } - /// - /// Clear the internal reflection cache so you can start from new (you will loose performance) - /// - public static void ClearReflectionCache() - { - Reflection.Instance.ClearReflectionCache(); - } - /// - /// Deep copy an object i.e. clone to a new object - /// - /// - /// - public static object DeepCopy(object obj) - { - return new deserializer(Parameters).ToObject(ToBJSON(obj)); - } - } - - internal class deserializer - { - public deserializer(BJSONParameters param) - { - _params = param; - } - - private BJSONParameters _params; - private Dictionary _circobj = new Dictionary(); - private Dictionary _cirrev = new Dictionary(); - - public T ToObject(byte[] json) - { - return (T)ToObject(json, typeof(T)); - } - - public object ToObject(byte[] json) - { - return ToObject(json, null); - } - - public object ToObject(byte[] json, Type type) - { - _params.FixValues(); - Type t = null; - if (type != null && type.IsGenericType) - t = Reflection.Instance.GetGenericTypeDefinition(type);// type.GetGenericTypeDefinition(); - if (t == typeof(Dictionary<,>) || t == typeof(List<>)) - _params.UsingGlobalTypes = false; - _globalTypes = _params.UsingGlobalTypes; - - var o = new BJsonParser(json, _params.UseUTCDateTime).Decode(); -#if !SILVERLIGHT - if (type != null && type == typeof(DataSet)) - return CreateDataset(o as Dictionary, null); - - if (type != null && type == typeof(DataTable)) - return CreateDataTable(o as Dictionary, null); -#endif - if (o is IDictionary) - { - if (type != null && t == typeof(Dictionary<,>)) // deserialize a dictionary - return RootDictionary(o, type); - else // deserialize an object - return ParseDictionary(o as Dictionary, null, type, null); - } - - if (o is List) - { - if (type != null && t == typeof(Dictionary<,>)) // kv format - return RootDictionary(o, type); - - if (type != null && t == typeof(List<>)) // deserialize to generic list - return RootList(o, type); - - if (type == typeof(Hashtable)) - return RootHashTable((List)o); - else - return (o as List).ToArray(); - } - - return o; - } - - public object FillObject(object input, byte[] json) - { - _params.FixValues(); - Dictionary ht = new BJsonParser(json, _params.UseUTCDateTime).Decode() as Dictionary; - if (ht == null) return null; - return ParseDictionary(ht, null, input.GetType(), input); - } - - - private object RootHashTable(List o) - { - Hashtable h = new Hashtable(); - - foreach (Dictionary values in o) - { - object key = values["k"]; - object val = values["v"]; - if (key is Dictionary) - key = ParseDictionary((Dictionary)key, null, typeof(object), null); - - if (val is Dictionary) - val = ParseDictionary((Dictionary)val, null, typeof(object), null); - - h.Add(key, val); - } - - return h; - } - - private object RootList(object parse, Type type) - { - Type[] gtypes = Reflection.Instance.GetGenericArguments(type);// type.GetGenericArguments(); - IList o = (IList)Reflection.Instance.FastCreateInstance(type); - foreach (var k in (IList)parse) - { - _globalTypes = false; - object v = k; - if (k is Dictionary) - v = ParseDictionary(k as Dictionary, null, gtypes[0], null); - else - v = k; - - o.Add(v); - } - return o; - } - - private object RootDictionary(object parse, Type type) - { - Type[] gtypes = Reflection.Instance.GetGenericArguments(type); - Type t1 = null; - Type t2 = null; - if (gtypes != null) - { - t1 = gtypes[0]; - t2 = gtypes[1]; - } - if (parse is Dictionary) - { - IDictionary o = (IDictionary)Reflection.Instance.FastCreateInstance(type); - - foreach (var kv in (Dictionary)parse) - { - _globalTypes = false; - object v; - object k = kv.Key; - - if (kv.Value is Dictionary) - v = ParseDictionary(kv.Value as Dictionary, null, gtypes[1], null); - - else if (gtypes != null && t2.IsArray) - v = CreateArray((List)kv.Value, t2, t2.GetElementType(), null); - - else if (kv.Value is IList) - v = CreateGenericList((List)kv.Value, t2, t1, null); - - else - v = kv.Value; - - o.Add(k, v); - } - - return o; - } - if (parse is List) - return CreateDictionary(parse as List, type, gtypes, null); - - return null; - } - - private bool _globalTypes = false; - private object ParseDictionary(Dictionary d, Dictionary globaltypes, Type type, object input) - { - object tn = ""; - if (type == typeof(NameValueCollection)) - return CreateNV(d); - if (type == typeof(StringDictionary)) - return CreateSD(d); - - if (d.TryGetValue("$i", out tn)) - { - object v = null; - _cirrev.TryGetValue((int)tn, out v); - return v; - } - - if (d.TryGetValue("$types", out tn)) - { - _globalTypes = true; - if (globaltypes == null) - globaltypes = new Dictionary(); - foreach (var kv in (Dictionary)tn) - { - globaltypes.Add((string)kv.Key, kv.Value); - } - } - - bool found = d.TryGetValue("$type", out tn); -#if !SILVERLIGHT - if (found == false && type == typeof(System.Object)) - { - return d; // CreateDataset(d, globaltypes); - } -#endif - if (found) - { - if (_globalTypes && globaltypes != null) - { - object tname = ""; - if (globaltypes != null && globaltypes.TryGetValue((string)tn, out tname)) - tn = tname; - } - type = Reflection.Instance.GetTypeFromCache((string)tn); - } - - if (type == null) - throw new Exception("Cannot determine type"); - - string typename = type.FullName; - object o = input; - if (o == null) - { - if (_params.ParametricConstructorOverride) - o = System.Runtime.Serialization.FormatterServices.GetUninitializedObject(type); - else - o = Reflection.Instance.FastCreateInstance(type); - } - - int circount = 0; - if (_circobj.TryGetValue(o, out circount) == false) - { - circount = _circobj.Count + 1; - _circobj.Add(o, circount); - _cirrev.Add(circount, o); - } - - Dictionary props = Reflection.Instance.Getproperties(type, typename, Reflection.Instance.IsTypeRegistered(type)); - foreach (var kv in d) - { - var n = kv.Key; - var v = kv.Value; - string name = n.ToLower(); - myPropInfo pi; - if (props.TryGetValue(name, out pi) == false) - continue; - if (pi.CanWrite) - { - //object v = d[n]; - - if (v != null) - { - object oset = v; - - switch (pi.Type) - { -#if !SILVERLIGHT - case myPropInfoType.DataSet: - oset = CreateDataset((Dictionary)v, globaltypes); - break; - case myPropInfoType.DataTable: - oset = CreateDataTable((Dictionary)v, globaltypes); - break; -#endif - case myPropInfoType.Custom : - oset = Reflection.Instance.CreateCustom((string)v, pi.pt); - break; - case myPropInfoType.Enum: - oset = CreateEnum(pi.pt, v); - break; - case myPropInfoType.StringKeyDictionary: - oset = CreateStringKeyDictionary((Dictionary)v, pi.pt, pi.GenericTypes, globaltypes); - break; - case myPropInfoType.Hashtable: - case myPropInfoType.Dictionary: - oset = CreateDictionary((List)v, pi.pt, pi.GenericTypes, globaltypes); - break; - case myPropInfoType.NameValue: oset = CreateNV((Dictionary)v); break; - case myPropInfoType.StringDictionary: oset = CreateSD((Dictionary)v); break; - case myPropInfoType.Array: - oset = CreateArray((List)v, pi.pt, pi.bt, globaltypes); - break; - default: - { - if (pi.IsGenericType && pi.IsValueType == false) - oset = CreateGenericList((List)v, pi.pt, pi.bt, globaltypes); - else if ((pi.IsClass || pi.IsStruct) && v is Dictionary) - oset = ParseDictionary((Dictionary)v, globaltypes, pi.pt, input); - - else if (v is List) - oset = CreateArray((List)v, pi.pt, typeof(object), globaltypes); - break; - } - } - - o = pi.setter(o, oset); - } - } - } - return o; - } - - private StringDictionary CreateSD(Dictionary d) - { - StringDictionary nv = new StringDictionary(); - - foreach (var o in d) - nv.Add(o.Key, (string)o.Value); - - return nv; - } - - private NameValueCollection CreateNV(Dictionary d) - { - NameValueCollection nv = new NameValueCollection(); - - foreach (var o in d) - nv.Add(o.Key, (string)o.Value); - - return nv; - } - - private object CreateEnum(Type pt, object v) - { - // FEATURE : optimize create enum -#if !SILVERLIGHT - return Enum.Parse(pt, v.ToString()); -#else - return Enum.Parse(pt, v, true); -#endif - } - - private object CreateArray(List data, Type pt, Type bt, Dictionary globalTypes) - { - Array col = Array.CreateInstance(bt, data.Count); - // create an array of objects - for (int i = 0; i < data.Count; i++)// each (object ob in data) - { - object ob = data[i]; - if (ob == null) - { - continue; - } - if (ob is IDictionary) - col.SetValue(ParseDictionary((Dictionary)ob, globalTypes, bt, null), i); - else if (ob is ICollection) - col.SetValue(CreateArray((List)ob, bt, bt.GetElementType(), globalTypes), i); - else - col.SetValue(ob, i); - } - - return col; - } - - private object CreateGenericList(List data, Type pt, Type bt, Dictionary globalTypes) - { - IList col = (IList)Reflection.Instance.FastCreateInstance(pt); - // create an array of objects - foreach (object ob in data) - { - if (ob is IDictionary) - col.Add(ParseDictionary((Dictionary)ob, globalTypes, bt, null)); - - else if (ob is List) - { - if (bt.IsGenericType) - col.Add((List)ob); - else - col.Add(((List)ob).ToArray()); - } - - else - col.Add(ob); - } - return col; - } - - private object CreateStringKeyDictionary(Dictionary reader, Type pt, Type[] types, Dictionary globalTypes) - { - var col = (IDictionary)Reflection.Instance.FastCreateInstance(pt); - Type t1 = null; - Type t2 = null; - if (types != null) - { - t1 = types[0]; - t2 = types[1]; - } - - foreach (KeyValuePair values in reader) - { - var key = values.Key; - object val = null; - - if (values.Value is Dictionary) - val = ParseDictionary((Dictionary)values.Value, globalTypes, t2, null); - - else if (types != null && t2.IsArray) - { - if (values.Value is Array) - val = values.Value; - else - val = CreateArray((List)values.Value, t2, t2.GetElementType(), globalTypes); - } - else if (values.Value is IList) - val = CreateGenericList((List)values.Value, t2, t1, globalTypes); - - else - val = values.Value; - - col.Add(key, val); - } - - return col; - } - - private object CreateDictionary(List reader, Type pt, Type[] types, Dictionary globalTypes) - { - IDictionary col = (IDictionary)Reflection.Instance.FastCreateInstance(pt); - Type t1 = null; - Type t2 = null; - if (types != null) - { - t1 = types[0]; - t2 = types[1]; - } - - foreach (Dictionary values in reader) - { - object key = values["k"]; - object val = values["v"]; - - if (key is Dictionary) - key = ParseDictionary((Dictionary)key, globalTypes, t1, null); - - if (val is Dictionary) - val = ParseDictionary((Dictionary)val, globalTypes, t2, null); - - col.Add(key, val); - } - - return col; - } - -#if !SILVERLIGHT - private DataSet CreateDataset(Dictionary reader, Dictionary globalTypes) - { - DataSet ds = new DataSet(); - ds.EnforceConstraints = false; - ds.BeginInit(); - - // read dataset schema here - var schema = reader["$schema"]; - - if (schema is string) - { - TextReader tr = new StringReader((string)schema); - ds.ReadXmlSchema(tr); - } - else - { - DatasetSchema ms = (DatasetSchema)ParseDictionary((Dictionary)schema, globalTypes, typeof(DatasetSchema), null); - ds.DataSetName = ms.Name; - for (int i = 0; i < ms.Info.Count; i += 3) - { - if (ds.Tables.Contains(ms.Info[i]) == false) - ds.Tables.Add(ms.Info[i]); - ds.Tables[ms.Info[i]].Columns.Add(ms.Info[i + 1], Type.GetType(ms.Info[i + 2])); - } - } - - foreach (KeyValuePair pair in reader) - { - if (pair.Key == "$type" || pair.Key == "$schema") continue; - - List rows = (List)pair.Value; - if (rows == null) continue; - - DataTable dt = ds.Tables[pair.Key]; - ReadDataTable(rows, dt); - } - - ds.EndInit(); - - return ds; - } - - private void ReadDataTable(List rows, DataTable dt) - { - dt.BeginInit(); - dt.BeginLoadData(); - - foreach (List row in rows) - { - object[] v = new object[row.Count]; - row.CopyTo(v, 0); - dt.Rows.Add(v); - } - - dt.EndLoadData(); - dt.EndInit(); - } - - DataTable CreateDataTable(Dictionary reader, Dictionary globalTypes) - { - var dt = new DataTable(); - - // read dataset schema here - var schema = reader["$schema"]; - - if (schema is string) - { - TextReader tr = new StringReader((string)schema); - dt.ReadXmlSchema(tr); - } - else - { - var ms = (DatasetSchema)this.ParseDictionary((Dictionary)schema, globalTypes, typeof(DatasetSchema), null); - dt.TableName = ms.Info[0]; - for (int i = 0; i < ms.Info.Count; i += 3) - { - dt.Columns.Add(ms.Info[i + 1], Type.GetType(ms.Info[i + 2])); - } - } - - foreach (var pair in reader) - { - if (pair.Key == "$type" || pair.Key == "$schema") - continue; - - var rows = (List)pair.Value; - if (rows == null) - continue; - - if (!dt.TableName.Equals(pair.Key, StringComparison.InvariantCultureIgnoreCase)) - continue; - - ReadDataTable(rows, dt); - } - - return dt; - } -#endif - } +using System; +using System.Collections; +using System.Collections.Generic; +#if !SILVERLIGHT +using System.Data; +#endif +using System.Globalization; +using System.IO; +using System.Reflection; +using System.Reflection.Emit; +using System.Xml; +using System.Text; +using fastJSON; +using RaptorDB.Common; +using System.Collections.Specialized; + +namespace fastBinaryJSON +{ + public sealed class TOKENS + { + public const byte DOC_START = 1; + public const byte DOC_END = 2; + public const byte ARRAY_START = 3; + public const byte ARRAY_END = 4; + public const byte COLON = 5; + public const byte COMMA = 6; + public const byte NAME = 7; + public const byte STRING = 8; + public const byte BYTE = 9; + public const byte INT = 10; + public const byte UINT = 11; + public const byte LONG = 12; + public const byte ULONG = 13; + public const byte SHORT = 14; + public const byte USHORT = 15; + public const byte DATETIME = 16; + public const byte GUID = 17; + public const byte DOUBLE = 18; + public const byte FLOAT = 19; + public const byte DECIMAL = 20; + public const byte CHAR = 21; + public const byte BYTEARRAY = 22; + public const byte NULL = 23; + public const byte TRUE = 24; + public const byte FALSE = 25; + public const byte UNICODE_STRING = 26; + } + + //public delegate string Serialize(object data); + //public delegate object Deserialize(string data); + + public sealed class BJSONParameters + { + /// + /// Optimize the schema for Datasets (default = True) + /// + public bool UseOptimizedDatasetSchema = true; + /// + /// Serialize readonly properties (default = False) + /// + public bool ShowReadOnlyProperties = false; + /// + /// Use global types $types for more compact size when using a lot of classes (default = True) + /// + public bool UsingGlobalTypes = true; + /// + /// Use Unicode strings = T (faster), Use UTF8 strings = F (smaller) (default = True) + /// + public bool UseUnicodeStrings = true; + /// + /// Serialize Null values to the output (default = False) + /// + public bool SerializeNulls = false; + /// + /// Enable fastBinaryJSON extensions $types, $type, $map (default = True) + /// + public bool UseExtensions = true; + /// + /// Anonymous types have read only properties + /// + public bool EnableAnonymousTypes = false; + /// + /// Use the UTC date format (default = False) + /// + public bool UseUTCDateTime = false; + /// + /// Ignore attributes to check for (default : XmlIgnoreAttribute) + /// + public List IgnoreAttributes = new List { typeof(System.Xml.Serialization.XmlIgnoreAttribute) }; + /// + /// If you have parametric and no default constructor for you classes (default = False) + /// + /// IMPORTANT NOTE : If True then all initial values within the class will be ignored and will be not set + /// + public bool ParametricConstructorOverride = false; + /// + /// Maximum depth the serializer will go to to avoid loops (default = 20 levels) + /// + public short SerializerMaxDepth = 20; + + public void FixValues() + { + if (UseExtensions == false) // disable conflicting params + UsingGlobalTypes = false; + + if (EnableAnonymousTypes) + ShowReadOnlyProperties = true; + } + } + + public static class BJSON + { + /// + /// Globally set-able parameters for controlling the serializer + /// + public static BJSONParameters Parameters = new BJSONParameters(); + /// + /// Parse a json and generate a Dictionary<string,object> or List<object> structure + /// + /// + /// + public static object Parse(byte[] json) + { + return new BJsonParser(json, Parameters.UseUTCDateTime).Decode(); + } + /// + /// Create a .net4 dynamic object from the binary json byte array + /// + /// + /// + public static dynamic ToDynamic(byte[] json) + { + return new DynamicJson(json); + } + /// + /// Register custom type handlers for your own types not natively handled by fastBinaryJSON + /// + /// + /// + /// + public static void RegisterCustomType(Type type, Serialize serializer, Deserialize deserializer) + { + Reflection.Instance.RegisterCustomType(type, serializer, deserializer); + } + /// + /// Create a binary json representation for an object + /// + /// + /// + public static byte[] ToBJSON(object obj) + { + return ToBJSON(obj, Parameters); + } + /// + /// Create a binary json representation for an object with parameter override on this call + /// + /// + /// + /// + public static byte[] ToBJSON(object obj, BJSONParameters param) + { + param.FixValues(); + Type t = null; + if (obj == null) + return new byte[] { TOKENS.NULL }; + if (obj.GetType().IsGenericType) + t = Reflection.Instance.GetGenericTypeDefinition(obj.GetType());// obj.GetType().GetGenericTypeDefinition(); + if (t == typeof(Dictionary<,>) || t == typeof(List<>)) + param.UsingGlobalTypes = false; + // FEATURE : enable extensions when you can deserialize anon types + if (param.EnableAnonymousTypes) { param.UseExtensions = false; param.UsingGlobalTypes = false; } + + return new BJSONSerializer(param).ConvertToBJSON(obj); + } + /// + /// Fill a given object with the binary json represenation + /// + /// + /// + /// + public static object FillObject(object input, byte[] json) + { + return new deserializer(Parameters).FillObject(input, json); + } + /// + /// Create a generic object from the json + /// + /// + /// + /// + public static T ToObject(byte[] json) + { + return new deserializer(Parameters).ToObject(json); + } + /// + /// Create a generic object from the json with parameter override on this call + /// + /// + /// + /// + /// + public static T ToObject(byte[] json, BJSONParameters param) + { + return new deserializer(param).ToObject(json); + } + /// + /// Create an object from the json + /// + /// + /// + public static object ToObject(byte[] json) + { + return new deserializer(Parameters).ToObject(json, null); + } + /// + /// Create an object from the json with parameter override on this call + /// + /// + /// + /// + public static object ToObject(byte[] json, BJSONParameters param) + { + param.FixValues(); + return new deserializer(param).ToObject(json, null); + } + /// + /// Create a typed object from the json + /// + /// + /// + /// + public static object ToObject(byte[] json, Type type) + { + return new deserializer(Parameters).ToObject(json, type); + } + /// + /// Clear the internal reflection cache so you can start from new (you will loose performance) + /// + public static void ClearReflectionCache() + { + Reflection.Instance.ClearReflectionCache(); + } + /// + /// Deep copy an object i.e. clone to a new object + /// + /// + /// + public static object DeepCopy(object obj) + { + return new deserializer(Parameters).ToObject(ToBJSON(obj)); + } + } + + internal class deserializer + { + public deserializer(BJSONParameters param) + { + _params = param; + } + + private BJSONParameters _params; + private Dictionary _circobj = new Dictionary(); + private Dictionary _cirrev = new Dictionary(); + + public T ToObject(byte[] json) + { + return (T)ToObject(json, typeof(T)); + } + + public object ToObject(byte[] json) + { + return ToObject(json, null); + } + + public object ToObject(byte[] json, Type type) + { + _params.FixValues(); + Type t = null; + if (type != null && type.IsGenericType) + t = Reflection.Instance.GetGenericTypeDefinition(type);// type.GetGenericTypeDefinition(); + if (t == typeof(Dictionary<,>) || t == typeof(List<>)) + _params.UsingGlobalTypes = false; + _globalTypes = _params.UsingGlobalTypes; + + var o = new BJsonParser(json, _params.UseUTCDateTime).Decode(); +#if !SILVERLIGHT + if (type != null && type == typeof(DataSet)) + return CreateDataset(o as Dictionary, null); + + if (type != null && type == typeof(DataTable)) + return CreateDataTable(o as Dictionary, null); +#endif + if (o is IDictionary) + { + if (type != null && t == typeof(Dictionary<,>)) // deserialize a dictionary + return RootDictionary(o, type); + else // deserialize an object + return ParseDictionary(o as Dictionary, null, type, null); + } + + if (o is List) + { + if (type != null && t == typeof(Dictionary<,>)) // kv format + return RootDictionary(o, type); + + if (type != null && t == typeof(List<>)) // deserialize to generic list + return RootList(o, type); + + if (type == typeof(Hashtable)) + return RootHashTable((List)o); + else + return (o as List).ToArray(); + } + + return o; + } + + public object FillObject(object input, byte[] json) + { + _params.FixValues(); + Dictionary ht = new BJsonParser(json, _params.UseUTCDateTime).Decode() as Dictionary; + if (ht == null) return null; + return ParseDictionary(ht, null, input.GetType(), input); + } + + + private object RootHashTable(List o) + { + Hashtable h = new Hashtable(); + + foreach (Dictionary values in o) + { + object key = values["k"]; + object val = values["v"]; + if (key is Dictionary) + key = ParseDictionary((Dictionary)key, null, typeof(object), null); + + if (val is Dictionary) + val = ParseDictionary((Dictionary)val, null, typeof(object), null); + + h.Add(key, val); + } + + return h; + } + + private object RootList(object parse, Type type) + { + Type[] gtypes = Reflection.Instance.GetGenericArguments(type);// type.GetGenericArguments(); + IList o = (IList)Reflection.Instance.FastCreateInstance(type); + foreach (var k in (IList)parse) + { + _globalTypes = false; + object v = k; + if (k is Dictionary) + v = ParseDictionary(k as Dictionary, null, gtypes[0], null); + else + v = k; + + o.Add(v); + } + return o; + } + + private object RootDictionary(object parse, Type type) + { + Type[] gtypes = Reflection.Instance.GetGenericArguments(type); + Type t1 = null; + Type t2 = null; + if (gtypes != null) + { + t1 = gtypes[0]; + t2 = gtypes[1]; + } + if (parse is Dictionary) + { + IDictionary o = (IDictionary)Reflection.Instance.FastCreateInstance(type); + + foreach (var kv in (Dictionary)parse) + { + _globalTypes = false; + object v; + object k = kv.Key; + + if (kv.Value is Dictionary) + v = ParseDictionary(kv.Value as Dictionary, null, gtypes[1], null); + + else if (gtypes != null && t2.IsArray) + v = CreateArray((List)kv.Value, t2, t2.GetElementType(), null); + + else if (kv.Value is IList) + v = CreateGenericList((List)kv.Value, t2, t1, null); + + else + v = kv.Value; + + o.Add(k, v); + } + + return o; + } + if (parse is List) + return CreateDictionary(parse as List, type, gtypes, null); + + return null; + } + + private bool _globalTypes = false; + private object ParseDictionary(Dictionary d, Dictionary globaltypes, Type type, object input) + { + object tn = ""; + if (type == typeof(NameValueCollection)) + return CreateNV(d); + if (type == typeof(StringDictionary)) + return CreateSD(d); + + if (d.TryGetValue("$i", out tn)) + { + object v = null; + _cirrev.TryGetValue((int)tn, out v); + return v; + } + + if (d.TryGetValue("$types", out tn)) + { + _globalTypes = true; + if (globaltypes == null) + globaltypes = new Dictionary(); + foreach (var kv in (Dictionary)tn) + { + globaltypes.Add((string)kv.Key, kv.Value); + } + } + + bool found = d.TryGetValue("$type", out tn); +#if !SILVERLIGHT + if (found == false && type == typeof(System.Object)) + { + return d; // CreateDataset(d, globaltypes); + } +#endif + if (found) + { + if (_globalTypes && globaltypes != null) + { + object tname = ""; + if (globaltypes != null && globaltypes.TryGetValue((string)tn, out tname)) + tn = tname; + } + type = Reflection.Instance.GetTypeFromCache((string)tn); + } + + if (type == null) + throw new Exception("Cannot determine type"); + + string typename = type.FullName; + object o = input; + if (o == null) + { + if (_params.ParametricConstructorOverride) + o = System.Runtime.Serialization.FormatterServices.GetUninitializedObject(type); + else + o = Reflection.Instance.FastCreateInstance(type); + } + + int circount = 0; + if (_circobj.TryGetValue(o, out circount) == false) + { + circount = _circobj.Count + 1; + _circobj.Add(o, circount); + _cirrev.Add(circount, o); + } + + Dictionary props = Reflection.Instance.Getproperties(type, typename, Reflection.Instance.IsTypeRegistered(type)); + foreach (var kv in d) + { + var n = kv.Key; + var v = kv.Value; + string name = n.ToLower(); + myPropInfo pi; + if (props.TryGetValue(name, out pi) == false) + continue; + if (pi.CanWrite) + { + //object v = d[n]; + + if (v != null) + { + object oset = v; + + switch (pi.Type) + { +#if !SILVERLIGHT + case myPropInfoType.DataSet: + oset = CreateDataset((Dictionary)v, globaltypes); + break; + case myPropInfoType.DataTable: + oset = CreateDataTable((Dictionary)v, globaltypes); + break; +#endif + case myPropInfoType.Custom : + oset = Reflection.Instance.CreateCustom((string)v, pi.pt); + break; + case myPropInfoType.Enum: + oset = CreateEnum(pi.pt, v); + break; + case myPropInfoType.StringKeyDictionary: + oset = CreateStringKeyDictionary((Dictionary)v, pi.pt, pi.GenericTypes, globaltypes); + break; + case myPropInfoType.Hashtable: + case myPropInfoType.Dictionary: + oset = CreateDictionary((List)v, pi.pt, pi.GenericTypes, globaltypes); + break; + case myPropInfoType.NameValue: oset = CreateNV((Dictionary)v); break; + case myPropInfoType.StringDictionary: oset = CreateSD((Dictionary)v); break; + case myPropInfoType.Array: + oset = CreateArray((List)v, pi.pt, pi.bt, globaltypes); + break; + default: + { + if (pi.IsGenericType && pi.IsValueType == false) + oset = CreateGenericList((List)v, pi.pt, pi.bt, globaltypes); + else if ((pi.IsClass || pi.IsStruct) && v is Dictionary) + oset = ParseDictionary((Dictionary)v, globaltypes, pi.pt, input); + + else if (v is List) + oset = CreateArray((List)v, pi.pt, typeof(object), globaltypes); + break; + } + } + + o = pi.setter(o, oset); + } + } + } + return o; + } + + private StringDictionary CreateSD(Dictionary d) + { + StringDictionary nv = new StringDictionary(); + + foreach (var o in d) + nv.Add(o.Key, (string)o.Value); + + return nv; + } + + private NameValueCollection CreateNV(Dictionary d) + { + NameValueCollection nv = new NameValueCollection(); + + foreach (var o in d) + nv.Add(o.Key, (string)o.Value); + + return nv; + } + + private object CreateEnum(Type pt, object v) + { + // FEATURE : optimize create enum +#if !SILVERLIGHT + return Enum.Parse(pt, v.ToString()); +#else + return Enum.Parse(pt, v, true); +#endif + } + + private object CreateArray(List data, Type pt, Type bt, Dictionary globalTypes) + { + Array col = Array.CreateInstance(bt, data.Count); + // create an array of objects + for (int i = 0; i < data.Count; i++)// each (object ob in data) + { + object ob = data[i]; + if (ob == null) + { + continue; + } + if (ob is IDictionary) + col.SetValue(ParseDictionary((Dictionary)ob, globalTypes, bt, null), i); + else if (ob is ICollection) + col.SetValue(CreateArray((List)ob, bt, bt.GetElementType(), globalTypes), i); + else + col.SetValue(ob, i); + } + + return col; + } + + private object CreateGenericList(List data, Type pt, Type bt, Dictionary globalTypes) + { + IList col = (IList)Reflection.Instance.FastCreateInstance(pt); + // create an array of objects + foreach (object ob in data) + { + if (ob is IDictionary) + col.Add(ParseDictionary((Dictionary)ob, globalTypes, bt, null)); + + else if (ob is List) + { + if (bt.IsGenericType) + col.Add((List)ob); + else + col.Add(((List)ob).ToArray()); + } + + else + col.Add(ob); + } + return col; + } + + private object CreateStringKeyDictionary(Dictionary reader, Type pt, Type[] types, Dictionary globalTypes) + { + var col = (IDictionary)Reflection.Instance.FastCreateInstance(pt); + Type t1 = null; + Type t2 = null; + if (types != null) + { + t1 = types[0]; + t2 = types[1]; + } + + foreach (KeyValuePair values in reader) + { + var key = values.Key; + object val = null; + + if (values.Value is Dictionary) + val = ParseDictionary((Dictionary)values.Value, globalTypes, t2, null); + + else if (types != null && t2.IsArray) + { + if (values.Value is Array) + val = values.Value; + else + val = CreateArray((List)values.Value, t2, t2.GetElementType(), globalTypes); + } + else if (values.Value is IList) + val = CreateGenericList((List)values.Value, t2, t1, globalTypes); + + else + val = values.Value; + + col.Add(key, val); + } + + return col; + } + + private object CreateDictionary(List reader, Type pt, Type[] types, Dictionary globalTypes) + { + IDictionary col = (IDictionary)Reflection.Instance.FastCreateInstance(pt); + Type t1 = null; + Type t2 = null; + if (types != null) + { + t1 = types[0]; + t2 = types[1]; + } + + foreach (Dictionary values in reader) + { + object key = values["k"]; + object val = values["v"]; + + if (key is Dictionary) + key = ParseDictionary((Dictionary)key, globalTypes, t1, null); + + if (val is Dictionary) + val = ParseDictionary((Dictionary)val, globalTypes, t2, null); + + col.Add(key, val); + } + + return col; + } + +#if !SILVERLIGHT + private DataSet CreateDataset(Dictionary reader, Dictionary globalTypes) + { + DataSet ds = new DataSet(); + ds.EnforceConstraints = false; + ds.BeginInit(); + + // read dataset schema here + var schema = reader["$schema"]; + + if (schema is string) + { + TextReader tr = new StringReader((string)schema); + ds.ReadXmlSchema(tr); + } + else + { + DatasetSchema ms = (DatasetSchema)ParseDictionary((Dictionary)schema, globalTypes, typeof(DatasetSchema), null); + ds.DataSetName = ms.Name; + for (int i = 0; i < ms.Info.Count; i += 3) + { + if (ds.Tables.Contains(ms.Info[i]) == false) + ds.Tables.Add(ms.Info[i]); + ds.Tables[ms.Info[i]].Columns.Add(ms.Info[i + 1], Type.GetType(ms.Info[i + 2])); + } + } + + foreach (KeyValuePair pair in reader) + { + if (pair.Key == "$type" || pair.Key == "$schema") continue; + + List rows = (List)pair.Value; + if (rows == null) continue; + + DataTable dt = ds.Tables[pair.Key]; + ReadDataTable(rows, dt); + } + + ds.EndInit(); + + return ds; + } + + private void ReadDataTable(List rows, DataTable dt) + { + dt.BeginInit(); + dt.BeginLoadData(); + + foreach (List row in rows) + { + object[] v = new object[row.Count]; + row.CopyTo(v, 0); + dt.Rows.Add(v); + } + + dt.EndLoadData(); + dt.EndInit(); + } + + DataTable CreateDataTable(Dictionary reader, Dictionary globalTypes) + { + var dt = new DataTable(); + + // read dataset schema here + var schema = reader["$schema"]; + + if (schema is string) + { + TextReader tr = new StringReader((string)schema); + dt.ReadXmlSchema(tr); + } + else + { + var ms = (DatasetSchema)this.ParseDictionary((Dictionary)schema, globalTypes, typeof(DatasetSchema), null); + dt.TableName = ms.Info[0]; + for (int i = 0; i < ms.Info.Count; i += 3) + { + dt.Columns.Add(ms.Info[i + 1], Type.GetType(ms.Info[i + 2])); + } + } + + foreach (var pair in reader) + { + if (pair.Key == "$type" || pair.Key == "$schema") + continue; + + var rows = (List)pair.Value; + if (rows == null) + continue; + + if (!dt.TableName.Equals(pair.Key, StringComparison.InvariantCultureIgnoreCase)) + continue; + + ReadDataTable(rows, dt); + } + + return dt; + } +#endif + } } \ No newline at end of file diff --git a/RaptorDB.Common/fastBinaryJSON/BJsonParser.cs b/RaptorDB.Common/fastBinaryJSON/BJsonParser.cs index 8474ac6..1852cc7 100644 --- a/RaptorDB.Common/fastBinaryJSON/BJsonParser.cs +++ b/RaptorDB.Common/fastBinaryJSON/BJsonParser.cs @@ -1,290 +1,292 @@ -using System; -using System.Collections; -using System.Collections.Generic; -using System.Globalization; -using System.Text; -using RaptorDB.Common; -using fastJSON; - -namespace fastBinaryJSON -{ - internal sealed class BJsonParser - { - readonly byte[] json; - int index; - bool _useUTC = true; - - internal BJsonParser(byte[] json, bool useUTC) - { - this.json = json; - _useUTC = useUTC; - } - - public object Decode() - { - bool b = false; - return ParseValue(out b); - } - - private Dictionary ParseObject() - { - Dictionary dic = new Dictionary(); - bool breakparse = false; - while (!breakparse) - { - byte t = GetToken(); - if (t == TOKENS.COMMA) - continue; - if (t == TOKENS.DOC_END) - break; - string key = ""; - if (t != TOKENS.NAME) - throw new Exception("excpecting a name field"); - key = ParseName(); - t = GetToken(); - if (t != TOKENS.COLON) - throw new Exception("expecting a colon"); - object val = ParseValue(out breakparse); - - if (breakparse == false) - { - dic.Add(key, val); - } - } - return dic; - } - - private string ParseName() - { - byte c = json[index++]; - string s = Reflection.Instance.utf8.GetString(json, index, c); - index += c; - return s; - } - - private List ParseArray() - { - List array = new List(); - - bool breakparse = false; - while (!breakparse) - { - object o = ParseValue(out breakparse); - byte t = 0; - if (breakparse == false) - { - array.Add(o); - t = GetToken(); - } - else t = (byte)o; - - if (t == TOKENS.COMMA) - continue; - if (t == TOKENS.ARRAY_END) - break; - } - return array; - } - - private object ParseValue(out bool breakparse) - { - byte t = GetToken(); - breakparse = false; - switch (t) - { - case TOKENS.BYTE: - return ParseByte(); - case TOKENS.BYTEARRAY: - return ParseByteArray(); - case TOKENS.CHAR: - return ParseChar(); - case TOKENS.DATETIME: - return ParseDateTime(); - case TOKENS.DECIMAL: - return ParseDecimal(); - case TOKENS.DOUBLE: - return ParseDouble(); - case TOKENS.FLOAT: - return ParseFloat(); - case TOKENS.GUID: - return ParseGuid(); - case TOKENS.INT: - return ParseInt(); - case TOKENS.LONG: - return ParseLong(); - case TOKENS.SHORT: - return ParseShort(); - //case TOKENS.SINGLE: - // return ParseSingle(); - case TOKENS.UINT: - return ParseUint(); - case TOKENS.ULONG: - return ParseULong(); - case TOKENS.USHORT: - return ParseUShort(); - case TOKENS.UNICODE_STRING: - return ParseUnicodeString(); - case TOKENS.STRING: - return ParseString(); - case TOKENS.DOC_START: - return ParseObject(); - case TOKENS.ARRAY_START: - return ParseArray(); - case TOKENS.TRUE: - return true; - case TOKENS.FALSE: - return false; - case TOKENS.NULL: - return null; - case TOKENS.ARRAY_END: - breakparse = true; - return TOKENS.ARRAY_END; - case TOKENS.DOC_END: - breakparse = true; - return TOKENS.DOC_END; - case TOKENS.COMMA: - breakparse = true; - return TOKENS.COMMA; - } - - throw new Exception("Unrecognized token at index = " + index); - } - - private object ParseChar() - { - throw new NotImplementedException(); - } - - private Guid ParseGuid() - { - byte[] b = new byte[16]; - Buffer.BlockCopy(json, index, b, 0, 16); - index += 16; - return new Guid(b); - } - - private float ParseFloat() - { - float f = BitConverter.ToSingle(json, index); - index += 4; - return f; - } - - private ushort ParseUShort() - { - ushort u = (ushort)Helper.ToInt16(json, index); - index += 2; - return u; - } - - private ulong ParseULong() - { - ulong u = (ulong)Helper.ToInt64(json, index); - index += 8; - return u; - } - - private uint ParseUint() - { - uint u = (uint)Helper.ToInt32(json, index); - index += 4; - return u; - } - - private short ParseShort() - { - short u = (short)Helper.ToInt16(json, index); - index += 2; - return u; - } - - private long ParseLong() - { - long u = (long)Helper.ToInt64(json, index); - index += 8; - return u; - } - - private int ParseInt() - { - int u = (int)Helper.ToInt32(json, index); - index += 4; - return u; - } - - private double ParseDouble() - { - double d = BitConverter.ToDouble(json, index); - index += 8; - return d; - } - - private object ParseUnicodeString() - { - int c = Helper.ToInt32(json, index); - index += 4; - - string s = Reflection.Instance.unicode.GetString(json, index, c); - index += c; - return s; - } - - private string ParseString() - { - int c = Helper.ToInt32(json, index); - index += 4; - - string s = Reflection.Instance.utf8.GetString(json, index, c); - index += c; - return s; - } - - private decimal ParseDecimal() - { - int[] i = new int[4]; - i[0] = Helper.ToInt32(json, index); - index += 4; - i[1] = Helper.ToInt32(json, index); - index += 4; - i[2] = Helper.ToInt32(json, index); - index += 4; - i[3] = Helper.ToInt32(json, index); - index += 4; - - return new decimal(i); - } - - private DateTime ParseDateTime() - { - long l = Helper.ToInt64(json, index); - index += 8; - - DateTime dt = new DateTime(l); - if (_useUTC) - dt = dt.ToLocalTime(); // to local time - - return dt; - } - - private byte[] ParseByteArray() - { - int c = Helper.ToInt32(json, index); - index += 4; - byte[] b = new byte[c]; - Buffer.BlockCopy(json, index, b, 0, c); - index += c; - return b; - } - - private byte ParseByte() - { - return json[index++]; - } - - private byte GetToken() - { - byte b = json[index++]; - return b; - } - } -} +using System; +using System.Collections; +using System.Collections.Generic; +using System.Globalization; +using System.Text; +using RaptorDB.Common; +using fastJSON; + +namespace fastBinaryJSON +{ + internal sealed class BJsonParser + { + readonly byte[] json; + int index; + bool _useUTC = true; + + internal BJsonParser(byte[] json, bool useUTC) + { + this.json = json; + _useUTC = useUTC; + } + + public object Decode() + { + bool b = false; + return ParseValue(out b); + } + + private Dictionary ParseObject() + { + Dictionary dic = new Dictionary(); + bool breakparse = false; + while (!breakparse) + { + byte t = GetToken(); + if (t == TOKENS.COMMA) + continue; + if (t == TOKENS.DOC_END) + break; + string key = ""; + if (t != TOKENS.NAME) + throw new Exception("excpecting a name field"); + key = ParseName(); + t = GetToken(); + if (t != TOKENS.COLON) + throw new Exception("expecting a colon"); + object val = ParseValue(out breakparse); + + if (breakparse == false) + { + dic.Add(key, val); + } + } + return dic; + } + + private string ParseName() + { + byte c = json[index++]; + string s = Reflection.Instance.utf8.GetString(json, index, c); + index += c; + return s; + } + + private List ParseArray() + { + List array = new List(); + + bool breakparse = false; + while (!breakparse) + { + object o = ParseValue(out breakparse); + byte t = 0; + if (breakparse == false) + { + array.Add(o); + t = GetToken(); + } + else t = (byte)o; + + if (t == TOKENS.COMMA) + continue; + if (t == TOKENS.ARRAY_END) + break; + } + return array; + } + + private object ParseValue(out bool breakparse) + { + byte t = GetToken(); + breakparse = false; + switch (t) + { + case TOKENS.BYTE: + return ParseByte(); + case TOKENS.BYTEARRAY: + return ParseByteArray(); + case TOKENS.CHAR: + return ParseChar(); + case TOKENS.DATETIME: + return ParseDateTime(); + case TOKENS.DECIMAL: + return ParseDecimal(); + case TOKENS.DOUBLE: + return ParseDouble(); + case TOKENS.FLOAT: + return ParseFloat(); + case TOKENS.GUID: + return ParseGuid(); + case TOKENS.INT: + return ParseInt(); + case TOKENS.LONG: + return ParseLong(); + case TOKENS.SHORT: + return ParseShort(); + //case TOKENS.SINGLE: + // return ParseSingle(); + case TOKENS.UINT: + return ParseUint(); + case TOKENS.ULONG: + return ParseULong(); + case TOKENS.USHORT: + return ParseUShort(); + case TOKENS.UNICODE_STRING: + return ParseUnicodeString(); + case TOKENS.STRING: + return ParseString(); + case TOKENS.DOC_START: + return ParseObject(); + case TOKENS.ARRAY_START: + return ParseArray(); + case TOKENS.TRUE: + return true; + case TOKENS.FALSE: + return false; + case TOKENS.NULL: + return null; + case TOKENS.ARRAY_END: + breakparse = true; + return TOKENS.ARRAY_END; + case TOKENS.DOC_END: + breakparse = true; + return TOKENS.DOC_END; + case TOKENS.COMMA: + breakparse = true; + return TOKENS.COMMA; + } + + throw new Exception("Unrecognized token at index = " + index); + } + + private object ParseChar() + { + short u = (short)Helper.ToInt16(json, index); + index += 2; + return u; + } + + private Guid ParseGuid() + { + byte[] b = new byte[16]; + Buffer.BlockCopy(json, index, b, 0, 16); + index += 16; + return new Guid(b); + } + + private float ParseFloat() + { + float f = BitConverter.ToSingle(json, index); + index += 4; + return f; + } + + private ushort ParseUShort() + { + ushort u = (ushort)Helper.ToInt16(json, index); + index += 2; + return u; + } + + private ulong ParseULong() + { + ulong u = (ulong)Helper.ToInt64(json, index); + index += 8; + return u; + } + + private uint ParseUint() + { + uint u = (uint)Helper.ToInt32(json, index); + index += 4; + return u; + } + + private short ParseShort() + { + short u = (short)Helper.ToInt16(json, index); + index += 2; + return u; + } + + private long ParseLong() + { + long u = (long)Helper.ToInt64(json, index); + index += 8; + return u; + } + + private int ParseInt() + { + int u = (int)Helper.ToInt32(json, index); + index += 4; + return u; + } + + private double ParseDouble() + { + double d = BitConverter.ToDouble(json, index); + index += 8; + return d; + } + + private object ParseUnicodeString() + { + int c = Helper.ToInt32(json, index); + index += 4; + + string s = Reflection.Instance.unicode.GetString(json, index, c); + index += c; + return s; + } + + private string ParseString() + { + int c = Helper.ToInt32(json, index); + index += 4; + + string s = Reflection.Instance.utf8.GetString(json, index, c); + index += c; + return s; + } + + private decimal ParseDecimal() + { + int[] i = new int[4]; + i[0] = Helper.ToInt32(json, index); + index += 4; + i[1] = Helper.ToInt32(json, index); + index += 4; + i[2] = Helper.ToInt32(json, index); + index += 4; + i[3] = Helper.ToInt32(json, index); + index += 4; + + return new decimal(i); + } + + private DateTime ParseDateTime() + { + long l = Helper.ToInt64(json, index); + index += 8; + + DateTime dt = new DateTime(l); + if (_useUTC) + dt = dt.ToLocalTime(); // to local time + + return dt; + } + + private byte[] ParseByteArray() + { + int c = Helper.ToInt32(json, index); + index += 4; + byte[] b = new byte[c]; + Buffer.BlockCopy(json, index, b, 0, c); + index += c; + return b; + } + + private byte ParseByte() + { + return json[index++]; + } + + private byte GetToken() + { + byte b = json[index++]; + return b; + } + } +} diff --git a/RaptorDB.Common/fastBinaryJSON/BJsonSerializer.cs b/RaptorDB.Common/fastBinaryJSON/BJsonSerializer.cs index 0ba25df..57827f8 100644 --- a/RaptorDB.Common/fastBinaryJSON/BJsonSerializer.cs +++ b/RaptorDB.Common/fastBinaryJSON/BJsonSerializer.cs @@ -1,631 +1,629 @@ -using System; -using System.Collections; -using System.Collections.Generic; -#if SILVERLIGHT - -#else -using System.Data; -#endif -using System.Globalization; -using System.IO; -using System.Text; -using fastJSON; -using RaptorDB.Common; -using System.Collections.Specialized; - -namespace fastBinaryJSON -{ - internal sealed class BJSONSerializer : IDisposable - { - private MemoryStream _output = new MemoryStream(); - private MemoryStream _before = new MemoryStream(); - private int _MAX_DEPTH = 20; - int _current_depth = 0; - private Dictionary _globalTypes = new Dictionary(); - private Dictionary _cirobj = new Dictionary(); - private BJSONParameters _params; - - private void Dispose(bool disposing) - { - if (disposing) - { - // dispose managed resources - _output.Close(); - _before.Close(); - } - // free native resources - } - - public void Dispose() - { - Dispose(true); - GC.SuppressFinalize(this); - } - - internal BJSONSerializer(BJSONParameters param) - { - _params = param; - _MAX_DEPTH = param.SerializerMaxDepth; - } - - internal byte[] ConvertToBJSON(object obj) - { - WriteValue(obj); - - // add $types - if (_params.UsingGlobalTypes && _globalTypes != null && _globalTypes.Count > 0) - { - byte[] after = _output.ToArray(); - _output = _before; - WriteName("$types"); - WriteColon(); - WriteTypes(_globalTypes); - WriteComma(); - _output.Write(after, 0, after.Length); - - return _output.ToArray(); - } - - return _output.ToArray(); - } - - private void WriteTypes(Dictionary dic) - { - _output.WriteByte(TOKENS.DOC_START); - - bool pendingSeparator = false; - - foreach (var entry in dic) - { - if (pendingSeparator) WriteComma(); - - WritePair(entry.Value.ToString(), entry.Key); - - pendingSeparator = true; - } - _output.WriteByte(TOKENS.DOC_END); - } - - private void WriteValue(object obj) - { - if (obj == null || obj is DBNull) - WriteNull(); - - else if (obj is string) - WriteString((string)obj); - - else if (obj is char) - WriteChar((char)obj); - - else if (obj is Guid) - WriteGuid((Guid)obj); - - else if (obj is bool) - WriteBool((bool)obj); - - else if (obj is int) - WriteInt((int)obj); - - else if (obj is uint) - WriteUInt((uint)obj); - - else if (obj is long) - WriteLong((long)obj); - - else if (obj is ulong) - WriteULong((ulong)obj); - - else if (obj is decimal) - WriteDecimal((decimal)obj); - - else if (obj is byte) - WriteByte((byte)obj); - - else if (obj is double) - WriteDouble((double)obj); - - else if (obj is float) - WriteFloat((float)obj); - - else if (obj is short) - WriteShort((short)obj); - - else if (obj is ushort) - WriteUShort((ushort)obj); - - else if (obj is DateTime) - WriteDateTime((DateTime)obj); - - else if (obj is IDictionary && obj.GetType().IsGenericType && obj.GetType().GetGenericArguments()[0] == typeof(string)) - WriteStringDictionary((IDictionary)obj); - - else if (obj is IDictionary) - WriteDictionary((IDictionary)obj); -#if !SILVERLIGHT - else if (obj is DataSet) - WriteDataset((DataSet)obj); - - else if (obj is DataTable) - this.WriteDataTable((DataTable)obj); -#endif - else if (obj is byte[]) - WriteBytes((byte[])obj); - - else if (obj is StringDictionary) - WriteSD((StringDictionary)obj); - - else if (obj is NameValueCollection) - WriteNV((NameValueCollection)obj); - - else if (obj is IEnumerable) - WriteArray((IEnumerable)obj); - - else if (obj is Enum) - WriteEnum((Enum)obj); - - else if (Reflection.Instance.IsTypeRegistered(obj.GetType())) - WriteCustom(obj); - - else - WriteObject(obj); - } - - private void WriteNV(NameValueCollection nameValueCollection) - { - _output.WriteByte(TOKENS.DOC_START); - - bool pendingSeparator = false; - - foreach (string key in nameValueCollection) - { - if (pendingSeparator) _output.WriteByte(TOKENS.COMMA); - - WritePair(key, nameValueCollection[key]); - - pendingSeparator = true; - } - _output.WriteByte(TOKENS.DOC_END); - } - - private void WriteSD(StringDictionary stringDictionary) - { - _output.WriteByte(TOKENS.DOC_START); - - bool pendingSeparator = false; - - foreach (DictionaryEntry entry in stringDictionary) - { - if (pendingSeparator) _output.WriteByte(TOKENS.COMMA); - - WritePair((string)entry.Key, entry.Value); - - pendingSeparator = true; - } - _output.WriteByte(TOKENS.DOC_END); - } - - private void WriteUShort(ushort p) - { - _output.WriteByte(TOKENS.USHORT); - _output.Write(Helper.GetBytes(p, false), 0, 2); - } - - private void WriteShort(short p) - { - _output.WriteByte(TOKENS.SHORT); - _output.Write(Helper.GetBytes(p, false), 0, 2); - } - - private void WriteFloat(float p) - { - _output.WriteByte(TOKENS.FLOAT); - byte[] b = BitConverter.GetBytes(p); - _output.Write(b, 0, b.Length); - } - - private void WriteDouble(double p) - { - _output.WriteByte(TOKENS.DOUBLE); - var b = BitConverter.GetBytes(p); - _output.Write(b, 0, b.Length); - } - - private void WriteByte(byte p) - { - _output.WriteByte(TOKENS.BYTE); - _output.WriteByte(p); - } - - private void WriteDecimal(decimal p) - { - _output.WriteByte(TOKENS.DECIMAL); - var b = decimal.GetBits(p); - foreach (var c in b) - _output.Write(Helper.GetBytes(c, false), 0, 4); - } - - private void WriteULong(ulong p) - { - _output.WriteByte(TOKENS.ULONG); - _output.Write(Helper.GetBytes((long)p, false), 0, 8); - } - - private void WriteUInt(uint p) - { - _output.WriteByte(TOKENS.UINT); - _output.Write(Helper.GetBytes(p, false), 0, 4); - } - - private void WriteLong(long p) - { - _output.WriteByte(TOKENS.LONG); - _output.Write(Helper.GetBytes(p, false), 0, 8); - } - - private void WriteChar(char p) - { - // TODO : - //_output.WriteByte(TOKENS.CHAR); - //_output.Write(Helper.GetBytes( - throw new Exception("char not implemented yet"); - } - - private void WriteBytes(byte[] p) - { - _output.WriteByte(TOKENS.BYTEARRAY); - _output.Write(Helper.GetBytes(p.Length, false), 0, 4); - _output.Write(p, 0, p.Length); - } - - private void WriteBool(bool p) - { - if (p) - _output.WriteByte(TOKENS.TRUE); - else - _output.WriteByte(TOKENS.FALSE); - } - - private void WriteNull() - { - _output.WriteByte(TOKENS.NULL); - } - - - private void WriteCustom(object obj) - { - Serialize s; - Reflection.Instance._customSerializer.TryGetValue(obj.GetType(), out s); - WriteString(s(obj)); - } - - private void WriteColon() - { - _output.WriteByte(TOKENS.COLON); - } - - private void WriteComma() - { - _output.WriteByte(TOKENS.COMMA); - } - - private void WriteEnum(Enum e) - { - WriteString(e.ToString()); - } - - private void WriteInt(int i) - { - _output.WriteByte(TOKENS.INT); - _output.Write(Helper.GetBytes(i, false), 0, 4); - } - - private void WriteGuid(Guid g) - { - _output.WriteByte(TOKENS.GUID); - _output.Write(g.ToByteArray(), 0, 16); - } - - private void WriteDateTime(DateTime dateTime) - { - DateTime dt = dateTime; - if (_params.UseUTCDateTime) - dt = dateTime.ToUniversalTime(); - - _output.WriteByte(TOKENS.DATETIME); - byte[] b = Helper.GetBytes(dt.Ticks, false); - _output.Write(b, 0, b.Length); - } - -#if !SILVERLIGHT - private DatasetSchema GetSchema(DataTable ds) - { - if (ds == null) return null; - - DatasetSchema m = new DatasetSchema(); - m.Info = new List(); - m.Name = ds.TableName; - - foreach (DataColumn c in ds.Columns) - { - m.Info.Add(ds.TableName); - m.Info.Add(c.ColumnName); - m.Info.Add(c.DataType.ToString()); - } - // FEATURE : serialize relations and constraints here - - return m; - } - - private DatasetSchema GetSchema(DataSet ds) - { - if (ds == null) return null; - - DatasetSchema m = new DatasetSchema(); - m.Info = new List(); - m.Name = ds.DataSetName; - - foreach (DataTable t in ds.Tables) - { - foreach (DataColumn c in t.Columns) - { - m.Info.Add(t.TableName); - m.Info.Add(c.ColumnName); - m.Info.Add(c.DataType.ToString()); - } - } - // FEATURE : serialize relations and constraints here - - return m; - } - - private string GetXmlSchema(DataTable dt) - { - using (var writer = new StringWriter()) - { - dt.WriteXmlSchema(writer); - return dt.ToString(); - } - } - - private void WriteDataset(DataSet ds) - { - _output.WriteByte(TOKENS.DOC_START); - { - WritePair("$schema", _params.UseOptimizedDatasetSchema ? (object)GetSchema(ds) : ds.GetXmlSchema()); - WriteComma(); - } - bool tablesep = false; - foreach (DataTable table in ds.Tables) - { - if (tablesep) WriteComma(); - tablesep = true; - WriteDataTableData(table); - } - // end dataset - _output.WriteByte(TOKENS.DOC_END); - } - - private void WriteDataTableData(DataTable table) - { - WriteName(table.TableName); - WriteColon(); - _output.WriteByte(TOKENS.ARRAY_START); - DataColumnCollection cols = table.Columns; - bool rowseparator = false; - foreach (DataRow row in table.Rows) - { - if (rowseparator) WriteComma(); - rowseparator = true; - _output.WriteByte(TOKENS.ARRAY_START); - - bool pendingSeperator = false; - foreach (DataColumn column in cols) - { - if (pendingSeperator) WriteComma(); - WriteValue(row[column]); - pendingSeperator = true; - } - _output.WriteByte(TOKENS.ARRAY_END); - } - - _output.WriteByte(TOKENS.ARRAY_END); - } - - void WriteDataTable(DataTable dt) - { - _output.WriteByte(TOKENS.DOC_START); - //if (this.useExtension) - { - this.WritePair("$schema", _params.UseOptimizedDatasetSchema ? (object)this.GetSchema(dt) : this.GetXmlSchema(dt)); - WriteComma(); - } - - WriteDataTableData(dt); - - // end datatable - _output.WriteByte(TOKENS.DOC_END); - } -#endif - bool _TypesWritten = false; - - private void WriteObject(object obj) - { - int i = 0; - if (_cirobj.TryGetValue(obj, out i) == false) - _cirobj.Add(obj, _cirobj.Count + 1); - else - { - if (_current_depth > 0) - { - //_circular = true; - _output.WriteByte(TOKENS.DOC_START); - WriteName("$i"); - WriteColon(); - WriteValue(i); - _output.WriteByte(TOKENS.DOC_END); - return; - } - } - if (_params.UsingGlobalTypes == false) - _output.WriteByte(TOKENS.DOC_START); - else - { - if (_TypesWritten == false) - { - _output.WriteByte(TOKENS.DOC_START); - _before = _output; - _output = new MemoryStream(); - } - else - _output.WriteByte(TOKENS.DOC_START); - - } - _TypesWritten = true; - _current_depth++; - if (_current_depth > _MAX_DEPTH) - throw new Exception("Serializer encountered maximum depth of " + _MAX_DEPTH); - - Type t = obj.GetType(); - bool append = false; - if (_params.UseExtensions) - { - if (_params.UsingGlobalTypes == false) - WritePairFast("$type", Reflection.Instance.GetTypeAssemblyName(t)); - else - { - int dt = 0; - string ct = Reflection.Instance.GetTypeAssemblyName(t); - if (_globalTypes.TryGetValue(ct, out dt) == false) - { - dt = _globalTypes.Count + 1; - _globalTypes.Add(ct, dt); - } - WritePairFast("$type", dt.ToString()); - } - append = true; - } - - Getters[] g = Reflection.Instance.GetGetters(t, _params.ShowReadOnlyProperties, _params.IgnoreAttributes); - int c = g.Length; - for (int ii = 0; ii < c; ii++) - { - var p = g[ii]; - var o = p.Getter(obj); - if (_params.SerializeNulls == false && (o == null || o is DBNull)) - { - - } - else - { - if (append) - WriteComma(); - WritePair(p.Name, o); - append = true; - } - } - _output.WriteByte(TOKENS.DOC_END); - _current_depth--; - } - - private void WritePairFast(string name, string value) - { - if ( _params.SerializeNulls == false && (value == null)) - return; - WriteName(name); - - WriteColon(); - - WriteString(value); - } - - private void WritePair(string name, object value) - { - if (_params.SerializeNulls == false && (value == null || value is DBNull)) - return; - WriteName(name); - - WriteColon(); - - WriteValue(value); - } - - private void WriteArray(IEnumerable array) - { - _output.WriteByte(TOKENS.ARRAY_START); - - bool pendingSeperator = false; - - foreach (object obj in array) - { - if (pendingSeperator) WriteComma(); - - WriteValue(obj); - - pendingSeperator = true; - } - _output.WriteByte(TOKENS.ARRAY_END); - } - - private void WriteStringDictionary(IDictionary dic) - { - _output.WriteByte(TOKENS.DOC_START); - - bool pendingSeparator = false; - - foreach (DictionaryEntry entry in dic) - { - if (pendingSeparator) WriteComma(); - - WritePair((string)entry.Key, entry.Value); - - pendingSeparator = true; - } - _output.WriteByte(TOKENS.DOC_END); - } - - private void WriteDictionary(IDictionary dic) - { - _output.WriteByte(TOKENS.ARRAY_START); - - bool pendingSeparator = false; - - foreach (DictionaryEntry entry in dic) - { - if (pendingSeparator) WriteComma(); - _output.WriteByte(TOKENS.DOC_START); - WritePair("k", entry.Key); - WriteComma(); - WritePair("v", entry.Value); - _output.WriteByte(TOKENS.DOC_END); - - pendingSeparator = true; - } - _output.WriteByte(TOKENS.ARRAY_END); - } - - private void WriteName(string s) - { - _output.WriteByte(TOKENS.NAME); - byte[] b = Reflection.Instance.utf8.GetBytes(s); - _output.WriteByte((byte)b.Length); - _output.Write(b, 0, b.Length % 256); - } - - private void WriteString(string s) - { - byte[] b = null; - if (_params.UseUnicodeStrings) - { - _output.WriteByte(TOKENS.UNICODE_STRING); - b = Reflection.Instance.unicode.GetBytes(s); - } - else - { - _output.WriteByte(TOKENS.STRING); - b = Reflection.Instance.utf8.GetBytes(s); - } - _output.Write(Helper.GetBytes(b.Length, false), 0, 4); - _output.Write(b, 0, b.Length); - } - } -} +using System; +using System.Collections; +using System.Collections.Generic; +#if SILVERLIGHT + +#else +using System.Data; +#endif +using System.Globalization; +using System.IO; +using System.Text; +using fastJSON; +using RaptorDB.Common; +using System.Collections.Specialized; + +namespace fastBinaryJSON +{ + internal sealed class BJSONSerializer : IDisposable + { + private MemoryStream _output = new MemoryStream(); + private MemoryStream _before = new MemoryStream(); + private int _MAX_DEPTH = 20; + int _current_depth = 0; + private Dictionary _globalTypes = new Dictionary(); + private Dictionary _cirobj = new Dictionary(); + private BJSONParameters _params; + + private void Dispose(bool disposing) + { + if (disposing) + { + // dispose managed resources + _output.Close(); + _before.Close(); + } + // free native resources + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + internal BJSONSerializer(BJSONParameters param) + { + _params = param; + _MAX_DEPTH = param.SerializerMaxDepth; + } + + internal byte[] ConvertToBJSON(object obj) + { + WriteValue(obj); + + // add $types + if (_params.UsingGlobalTypes && _globalTypes != null && _globalTypes.Count > 0) + { + byte[] after = _output.ToArray(); + _output = _before; + WriteName("$types"); + WriteColon(); + WriteTypes(_globalTypes); + WriteComma(); + _output.Write(after, 0, after.Length); + + return _output.ToArray(); + } + + return _output.ToArray(); + } + + private void WriteTypes(Dictionary dic) + { + _output.WriteByte(TOKENS.DOC_START); + + bool pendingSeparator = false; + + foreach (var entry in dic) + { + if (pendingSeparator) WriteComma(); + + WritePair(entry.Value.ToString(), entry.Key); + + pendingSeparator = true; + } + _output.WriteByte(TOKENS.DOC_END); + } + + private void WriteValue(object obj) + { + if (obj == null || obj is DBNull) + WriteNull(); + + else if (obj is string) + WriteString((string)obj); + + else if (obj is char) + WriteChar((char)obj); + + else if (obj is Guid) + WriteGuid((Guid)obj); + + else if (obj is bool) + WriteBool((bool)obj); + + else if (obj is int) + WriteInt((int)obj); + + else if (obj is uint) + WriteUInt((uint)obj); + + else if (obj is long) + WriteLong((long)obj); + + else if (obj is ulong) + WriteULong((ulong)obj); + + else if (obj is decimal) + WriteDecimal((decimal)obj); + + else if (obj is byte) + WriteByte((byte)obj); + + else if (obj is double) + WriteDouble((double)obj); + + else if (obj is float) + WriteFloat((float)obj); + + else if (obj is short) + WriteShort((short)obj); + + else if (obj is ushort) + WriteUShort((ushort)obj); + + else if (obj is DateTime) + WriteDateTime((DateTime)obj); + + else if (obj is IDictionary && obj.GetType().IsGenericType && obj.GetType().GetGenericArguments()[0] == typeof(string)) + WriteStringDictionary((IDictionary)obj); + + else if (obj is IDictionary) + WriteDictionary((IDictionary)obj); +#if !SILVERLIGHT + else if (obj is DataSet) + WriteDataset((DataSet)obj); + + else if (obj is DataTable) + this.WriteDataTable((DataTable)obj); +#endif + else if (obj is byte[]) + WriteBytes((byte[])obj); + + else if (obj is StringDictionary) + WriteSD((StringDictionary)obj); + + else if (obj is NameValueCollection) + WriteNV((NameValueCollection)obj); + + else if (obj is IEnumerable) + WriteArray((IEnumerable)obj); + + else if (obj is Enum) + WriteEnum((Enum)obj); + + else if (Reflection.Instance.IsTypeRegistered(obj.GetType())) + WriteCustom(obj); + + else + WriteObject(obj); + } + + private void WriteNV(NameValueCollection nameValueCollection) + { + _output.WriteByte(TOKENS.DOC_START); + + bool pendingSeparator = false; + + foreach (string key in nameValueCollection) + { + if (pendingSeparator) _output.WriteByte(TOKENS.COMMA); + + WritePair(key, nameValueCollection[key]); + + pendingSeparator = true; + } + _output.WriteByte(TOKENS.DOC_END); + } + + private void WriteSD(StringDictionary stringDictionary) + { + _output.WriteByte(TOKENS.DOC_START); + + bool pendingSeparator = false; + + foreach (DictionaryEntry entry in stringDictionary) + { + if (pendingSeparator) _output.WriteByte(TOKENS.COMMA); + + WritePair((string)entry.Key, entry.Value); + + pendingSeparator = true; + } + _output.WriteByte(TOKENS.DOC_END); + } + + private void WriteUShort(ushort p) + { + _output.WriteByte(TOKENS.USHORT); + _output.Write(Helper.GetBytes(p, false), 0, 2); + } + + private void WriteShort(short p) + { + _output.WriteByte(TOKENS.SHORT); + _output.Write(Helper.GetBytes(p, false), 0, 2); + } + + private void WriteFloat(float p) + { + _output.WriteByte(TOKENS.FLOAT); + byte[] b = BitConverter.GetBytes(p); + _output.Write(b, 0, b.Length); + } + + private void WriteDouble(double p) + { + _output.WriteByte(TOKENS.DOUBLE); + var b = BitConverter.GetBytes(p); + _output.Write(b, 0, b.Length); + } + + private void WriteByte(byte p) + { + _output.WriteByte(TOKENS.BYTE); + _output.WriteByte(p); + } + + private void WriteDecimal(decimal p) + { + _output.WriteByte(TOKENS.DECIMAL); + var b = decimal.GetBits(p); + foreach (var c in b) + _output.Write(Helper.GetBytes(c, false), 0, 4); + } + + private void WriteULong(ulong p) + { + _output.WriteByte(TOKENS.ULONG); + _output.Write(Helper.GetBytes((long)p, false), 0, 8); + } + + private void WriteUInt(uint p) + { + _output.WriteByte(TOKENS.UINT); + _output.Write(Helper.GetBytes(p, false), 0, 4); + } + + private void WriteLong(long p) + { + _output.WriteByte(TOKENS.LONG); + _output.Write(Helper.GetBytes(p, false), 0, 8); + } + + private void WriteChar(char p) + { + _output.WriteByte(TOKENS.CHAR); + _output.Write(Helper.GetBytes((short)p, false), 0, 2); + } + + private void WriteBytes(byte[] p) + { + _output.WriteByte(TOKENS.BYTEARRAY); + _output.Write(Helper.GetBytes(p.Length, false), 0, 4); + _output.Write(p, 0, p.Length); + } + + private void WriteBool(bool p) + { + if (p) + _output.WriteByte(TOKENS.TRUE); + else + _output.WriteByte(TOKENS.FALSE); + } + + private void WriteNull() + { + _output.WriteByte(TOKENS.NULL); + } + + + private void WriteCustom(object obj) + { + Serialize s; + Reflection.Instance._customSerializer.TryGetValue(obj.GetType(), out s); + WriteString(s(obj)); + } + + private void WriteColon() + { + _output.WriteByte(TOKENS.COLON); + } + + private void WriteComma() + { + _output.WriteByte(TOKENS.COMMA); + } + + private void WriteEnum(Enum e) + { + WriteString(e.ToString()); + } + + private void WriteInt(int i) + { + _output.WriteByte(TOKENS.INT); + _output.Write(Helper.GetBytes(i, false), 0, 4); + } + + private void WriteGuid(Guid g) + { + _output.WriteByte(TOKENS.GUID); + _output.Write(g.ToByteArray(), 0, 16); + } + + private void WriteDateTime(DateTime dateTime) + { + DateTime dt = dateTime; + if (_params.UseUTCDateTime) + dt = dateTime.ToUniversalTime(); + + _output.WriteByte(TOKENS.DATETIME); + byte[] b = Helper.GetBytes(dt.Ticks, false); + _output.Write(b, 0, b.Length); + } + +#if !SILVERLIGHT + private DatasetSchema GetSchema(DataTable ds) + { + if (ds == null) return null; + + DatasetSchema m = new DatasetSchema(); + m.Info = new List(); + m.Name = ds.TableName; + + foreach (DataColumn c in ds.Columns) + { + m.Info.Add(ds.TableName); + m.Info.Add(c.ColumnName); + m.Info.Add(c.DataType.ToString()); + } + // FEATURE : serialize relations and constraints here + + return m; + } + + private DatasetSchema GetSchema(DataSet ds) + { + if (ds == null) return null; + + DatasetSchema m = new DatasetSchema(); + m.Info = new List(); + m.Name = ds.DataSetName; + + foreach (DataTable t in ds.Tables) + { + foreach (DataColumn c in t.Columns) + { + m.Info.Add(t.TableName); + m.Info.Add(c.ColumnName); + m.Info.Add(c.DataType.ToString()); + } + } + // FEATURE : serialize relations and constraints here + + return m; + } + + private string GetXmlSchema(DataTable dt) + { + using (var writer = new StringWriter()) + { + dt.WriteXmlSchema(writer); + return dt.ToString(); + } + } + + private void WriteDataset(DataSet ds) + { + _output.WriteByte(TOKENS.DOC_START); + { + WritePair("$schema", _params.UseOptimizedDatasetSchema ? (object)GetSchema(ds) : ds.GetXmlSchema()); + WriteComma(); + } + bool tablesep = false; + foreach (DataTable table in ds.Tables) + { + if (tablesep) WriteComma(); + tablesep = true; + WriteDataTableData(table); + } + // end dataset + _output.WriteByte(TOKENS.DOC_END); + } + + private void WriteDataTableData(DataTable table) + { + WriteName(table.TableName); + WriteColon(); + _output.WriteByte(TOKENS.ARRAY_START); + DataColumnCollection cols = table.Columns; + bool rowseparator = false; + foreach (DataRow row in table.Rows) + { + if (rowseparator) WriteComma(); + rowseparator = true; + _output.WriteByte(TOKENS.ARRAY_START); + + bool pendingSeperator = false; + foreach (DataColumn column in cols) + { + if (pendingSeperator) WriteComma(); + WriteValue(row[column]); + pendingSeperator = true; + } + _output.WriteByte(TOKENS.ARRAY_END); + } + + _output.WriteByte(TOKENS.ARRAY_END); + } + + void WriteDataTable(DataTable dt) + { + _output.WriteByte(TOKENS.DOC_START); + //if (this.useExtension) + { + this.WritePair("$schema", _params.UseOptimizedDatasetSchema ? (object)this.GetSchema(dt) : this.GetXmlSchema(dt)); + WriteComma(); + } + + WriteDataTableData(dt); + + // end datatable + _output.WriteByte(TOKENS.DOC_END); + } +#endif + bool _TypesWritten = false; + + private void WriteObject(object obj) + { + int i = 0; + if (_cirobj.TryGetValue(obj, out i) == false) + _cirobj.Add(obj, _cirobj.Count + 1); + else + { + if (_current_depth > 0) + { + //_circular = true; + _output.WriteByte(TOKENS.DOC_START); + WriteName("$i"); + WriteColon(); + WriteValue(i); + _output.WriteByte(TOKENS.DOC_END); + return; + } + } + if (_params.UsingGlobalTypes == false) + _output.WriteByte(TOKENS.DOC_START); + else + { + if (_TypesWritten == false) + { + _output.WriteByte(TOKENS.DOC_START); + _before = _output; + _output = new MemoryStream(); + } + else + _output.WriteByte(TOKENS.DOC_START); + + } + _TypesWritten = true; + _current_depth++; + if (_current_depth > _MAX_DEPTH) + throw new Exception("Serializer encountered maximum depth of " + _MAX_DEPTH); + + Type t = obj.GetType(); + bool append = false; + if (_params.UseExtensions) + { + if (_params.UsingGlobalTypes == false) + WritePairFast("$type", Reflection.Instance.GetTypeAssemblyName(t)); + else + { + int dt = 0; + string ct = Reflection.Instance.GetTypeAssemblyName(t); + if (_globalTypes.TryGetValue(ct, out dt) == false) + { + dt = _globalTypes.Count + 1; + _globalTypes.Add(ct, dt); + } + WritePairFast("$type", dt.ToString()); + } + append = true; + } + + Getters[] g = Reflection.Instance.GetGetters(t, _params.ShowReadOnlyProperties, _params.IgnoreAttributes); + int c = g.Length; + for (int ii = 0; ii < c; ii++) + { + var p = g[ii]; + var o = p.Getter(obj); + if (_params.SerializeNulls == false && (o == null || o is DBNull)) + { + + } + else + { + if (append) + WriteComma(); + WritePair(p.Name, o); + append = true; + } + } + _output.WriteByte(TOKENS.DOC_END); + _current_depth--; + } + + private void WritePairFast(string name, string value) + { + if ( _params.SerializeNulls == false && (value == null)) + return; + WriteName(name); + + WriteColon(); + + WriteString(value); + } + + private void WritePair(string name, object value) + { + if (_params.SerializeNulls == false && (value == null || value is DBNull)) + return; + WriteName(name); + + WriteColon(); + + WriteValue(value); + } + + private void WriteArray(IEnumerable array) + { + _output.WriteByte(TOKENS.ARRAY_START); + + bool pendingSeperator = false; + + foreach (object obj in array) + { + if (pendingSeperator) WriteComma(); + + WriteValue(obj); + + pendingSeperator = true; + } + _output.WriteByte(TOKENS.ARRAY_END); + } + + private void WriteStringDictionary(IDictionary dic) + { + _output.WriteByte(TOKENS.DOC_START); + + bool pendingSeparator = false; + + foreach (DictionaryEntry entry in dic) + { + if (pendingSeparator) WriteComma(); + + WritePair((string)entry.Key, entry.Value); + + pendingSeparator = true; + } + _output.WriteByte(TOKENS.DOC_END); + } + + private void WriteDictionary(IDictionary dic) + { + _output.WriteByte(TOKENS.ARRAY_START); + + bool pendingSeparator = false; + + foreach (DictionaryEntry entry in dic) + { + if (pendingSeparator) WriteComma(); + _output.WriteByte(TOKENS.DOC_START); + WritePair("k", entry.Key); + WriteComma(); + WritePair("v", entry.Value); + _output.WriteByte(TOKENS.DOC_END); + + pendingSeparator = true; + } + _output.WriteByte(TOKENS.ARRAY_END); + } + + private void WriteName(string s) + { + _output.WriteByte(TOKENS.NAME); + byte[] b = Reflection.Instance.utf8.GetBytes(s); + _output.WriteByte((byte)b.Length); + _output.Write(b, 0, b.Length % 256); + } + + private void WriteString(string s) + { + byte[] b = null; + if (_params.UseUnicodeStrings) + { + _output.WriteByte(TOKENS.UNICODE_STRING); + b = Reflection.Instance.unicode.GetBytes(s); + } + else + { + _output.WriteByte(TOKENS.STRING); + b = Reflection.Instance.utf8.GetBytes(s); + } + _output.Write(Helper.GetBytes(b.Length, false), 0, 4); + _output.Write(b, 0, b.Length); + } + } +} diff --git a/RaptorDB.Common/fastBinaryJSON/dynamic.cs b/RaptorDB.Common/fastBinaryJSON/dynamic.cs index 1d13b83..9c9ee5e 100644 --- a/RaptorDB.Common/fastBinaryJSON/dynamic.cs +++ b/RaptorDB.Common/fastBinaryJSON/dynamic.cs @@ -1,67 +1,74 @@ -#if net4 -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Dynamic; - -namespace fastBinaryJSON -{ - internal class DynamicJson : DynamicObject - { - private IDictionary _dictionary { get; set; } - private List _list { get; set; } - - public DynamicJson(byte[] json) - { - var parse = fastBinaryJSON.BJSON.Parse(json); - - if (parse is IDictionary) - _dictionary = (IDictionary)parse; - else - _list = (List)parse; - } - - private DynamicJson(object dictionary) - { - if (dictionary is IDictionary) - _dictionary = (IDictionary)dictionary; - } - - public override bool TryGetIndex(GetIndexBinder binder, Object[] indexes, out Object result) - { - int index = (int)indexes[0]; - result = _list[index]; - if (result is IDictionary) - result = new DynamicJson(result as IDictionary); - return true; - } - - public override bool TryGetMember(GetMemberBinder binder, out object result) - { - if (_dictionary.TryGetValue(binder.Name, out result) == false) - if (_dictionary.TryGetValue(binder.Name.ToLower(), out result) == false) - return false;// throw new Exception("property not found " + binder.Name); - - if (result is IDictionary) - { - result = new DynamicJson(result as IDictionary); - } - else if (result is List) - { - List list = new List(); - foreach (object item in (List)result) - { - if (item is IDictionary) - list.Add(new DynamicJson(item as IDictionary)); - else - list.Add(item); - } - result = list; - } - - return _dictionary.ContainsKey(binder.Name); - } - } -} +#if net4 +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Dynamic; + +namespace fastBinaryJSON +{ + internal class DynamicJson : DynamicObject + { + private IDictionary _dictionary { get; set; } + private List _list { get; set; } + + public DynamicJson(byte[] json) + { + var parse = fastBinaryJSON.BJSON.Parse(json); + + if (parse is IDictionary) + _dictionary = (IDictionary)parse; + else + _list = (List)parse; + } + + private DynamicJson(object dictionary) + { + if (dictionary is IDictionary) + _dictionary = (IDictionary)dictionary; + } + + public override bool TryGetIndex(GetIndexBinder binder, Object[] indexes, out Object result) + { + var index = indexes[0]; + if (index is int) + { + result = _list[(int)index]; + } + else + { + result = _dictionary[(string)index]; + } + if (result is IDictionary) + result = new DynamicJson(result as IDictionary); + return true; + } + + public override bool TryGetMember(GetMemberBinder binder, out object result) + { + if (_dictionary.TryGetValue(binder.Name, out result) == false) + if (_dictionary.TryGetValue(binder.Name.ToLower(), out result) == false) + return false;// throw new Exception("property not found " + binder.Name); + + if (result is IDictionary) + { + result = new DynamicJson(result as IDictionary); + } + else if (result is List) + { + List list = new List(); + foreach (object item in (List)result) + { + if (item is IDictionary) + list.Add(new DynamicJson(item as IDictionary)); + else + list.Add(item); + } + result = list; + } + + return _dictionary.ContainsKey(binder.Name); + } + } +} #endif \ No newline at end of file diff --git a/RaptorDB.Common/fastJSON/Formatter.cs b/RaptorDB.Common/fastJSON/Formatter.cs index c20c30c..ac5c390 100644 --- a/RaptorDB.Common/fastJSON/Formatter.cs +++ b/RaptorDB.Common/fastJSON/Formatter.cs @@ -1,74 +1,74 @@ -using System.Collections.Generic; -using System.Text; - -namespace fastJSON -{ - internal static class Formatter - { - public static string Indent = " "; - - public static void AppendIndent(StringBuilder sb, int count) - { - for (; count > 0; --count) sb.Append(Indent); - } - - public static string PrettyPrint(string input) - { - var output = new StringBuilder(); - int depth = 0; - int len = input.Length; - char[] chars = input.ToCharArray(); - for (int i = 0; i < len; ++i) - { - char ch = chars[i]; - - if (ch == '\"') // found string span - { - bool str = true; - while (str) - { - output.Append(ch); - ch = chars[++i]; - if (ch == '\\') - { - output.Append(ch); - ch = chars[++i]; - } - else if (ch == '\"') - str = false; - } - } - - switch (ch) - { - case '{': - case '[': - output.Append(ch); - output.AppendLine(); - AppendIndent(output, ++depth); - break; - case '}': - case ']': - output.AppendLine(); - AppendIndent(output, --depth); - output.Append(ch); - break; - case ',': - output.Append(ch); - output.AppendLine(); - AppendIndent(output, depth); - break; - case ':': - output.Append(" : "); - break; - default: - if (!char.IsWhiteSpace(ch)) - output.Append(ch); - break; - } - } - - return output.ToString(); - } - } +using System.Collections.Generic; +using System.Text; + +namespace fastJSON +{ + internal static class Formatter + { + public static string Indent = " "; + + public static void AppendIndent(StringBuilder sb, int count) + { + for (; count > 0; --count) sb.Append(Indent); + } + + public static string PrettyPrint(string input) + { + var output = new StringBuilder(); + int depth = 0; + int len = input.Length; + char[] chars = input.ToCharArray(); + for (int i = 0; i < len; ++i) + { + char ch = chars[i]; + + if (ch == '\"') // found string span + { + bool str = true; + while (str) + { + output.Append(ch); + ch = chars[++i]; + if (ch == '\\') + { + output.Append(ch); + ch = chars[++i]; + } + else if (ch == '\"') + str = false; + } + } + + switch (ch) + { + case '{': + case '[': + output.Append(ch); + output.AppendLine(); + AppendIndent(output, ++depth); + break; + case '}': + case ']': + output.AppendLine(); + AppendIndent(output, --depth); + output.Append(ch); + break; + case ',': + output.Append(ch); + output.AppendLine(); + AppendIndent(output, depth); + break; + case ':': + output.Append(" : "); + break; + default: + if (!char.IsWhiteSpace(ch)) + output.Append(ch); + break; + } + } + + return output.ToString(); + } + } } \ No newline at end of file diff --git a/RaptorDB.Common/fastJSON/Getters.cs b/RaptorDB.Common/fastJSON/Getters.cs index aba9e10..4901775 100644 --- a/RaptorDB.Common/fastJSON/Getters.cs +++ b/RaptorDB.Common/fastJSON/Getters.cs @@ -1,11 +1,11 @@ -using System; -using System.Collections.Generic; - -namespace fastJSON -{ - public sealed class DatasetSchema - { - public List Info ;//{ get; set; } - public string Name ;//{ get; set; } - } -} +using System; +using System.Collections.Generic; + +namespace fastJSON +{ + public sealed class DatasetSchema + { + public List Info ;//{ get; set; } + public string Name ;//{ get; set; } + } +} diff --git a/RaptorDB.Common/fastJSON/JSON.cs b/RaptorDB.Common/fastJSON/JSON.cs index 32d2149..10aaefb 100644 --- a/RaptorDB.Common/fastJSON/JSON.cs +++ b/RaptorDB.Common/fastJSON/JSON.cs @@ -1,1007 +1,1007 @@ -using System; -using System.Collections; -using System.Collections.Generic; -#if !SILVERLIGHT -using System.Data; -#endif -using System.Globalization; -using System.IO; -using System.Reflection; -using System.Reflection.Emit; -using RaptorDB.Common; -using System.Collections.Specialized; - -namespace fastJSON -{ - public delegate string Serialize(object data); - public delegate object Deserialize(string data); - - public sealed class JSONParameters - { - /// - /// Use the optimized fast Dataset Schema format (default = True) - /// - public bool UseOptimizedDatasetSchema = true; - /// - /// Use the fast GUID format (default = True) - /// - public bool UseFastGuid = true; - /// - /// Serialize null values to the output (default = True) - /// - public bool SerializeNullValues = true; - /// - /// Use the UTC date format (default = True) - /// - public bool UseUTCDateTime = true; - /// - /// Show the readonly properties of types in the output (default = False) - /// - public bool ShowReadOnlyProperties = false; - /// - /// Use the $types extension to optimise the output json (default = True) - /// - public bool UsingGlobalTypes = true; - /// - /// Ignore case when processing json and deserializing - /// - [Obsolete("Not needed anymore and will always match")] - public bool IgnoreCaseOnDeserialize = false; - /// - /// Anonymous types have read only properties - /// - public bool EnableAnonymousTypes = false; - /// - /// Enable fastJSON extensions $types, $type, $map (default = True) - /// - public bool UseExtensions = true; - /// - /// Use escaped unicode i.e. \uXXXX format for non ASCII characters (default = True) - /// - public bool UseEscapedUnicode = true; - /// - /// Output string key dictionaries as "k"/"v" format (default = False) - /// - public bool KVStyleStringDictionary = false; - /// - /// Output Enum values instead of names (default = False) - /// - public bool UseValuesOfEnums = false; - /// - /// Ignore attributes to check for (default : XmlIgnoreAttribute) - /// - public List IgnoreAttributes = new List { typeof(System.Xml.Serialization.XmlIgnoreAttribute) }; - /// - /// If you have parametric and no default constructor for you classes (default = False) - /// - /// IMPORTANT NOTE : If True then all initial values within the class will be ignored and will be not set - /// - public bool ParametricConstructorOverride = false; - /// - /// Serialize DateTime milliseconds i.e. yyyy-MM-dd HH:mm:ss.nnn (default = false) - /// - public bool DateTimeMilliseconds = false; - /// - /// Maximum depth for circular references in inline mode (default = 20) - /// - public byte SerializerMaxDepth = 20; - /// - /// Inline circular or already seen objects instead of replacement with $i (default = False) - /// - public bool InlineCircularReferences = false; - /// - /// Save property/field names as lowercase (default = false) - /// - public bool SerializeToLowerCaseNames = false; - - public void FixValues() - { - if (UseExtensions == false) // disable conflicting params - { - UsingGlobalTypes = false; - InlineCircularReferences = true; - } - if (EnableAnonymousTypes) - ShowReadOnlyProperties = true; - } - } - - public static class JSON - { - /// - /// Globally set-able parameters for controlling the serializer - /// - public static JSONParameters Parameters = new JSONParameters(); - /// - /// Create a formatted json string (beautified) from an object - /// - /// - /// - /// - public static string ToNiceJSON(object obj, JSONParameters param) - { - string s = ToJSON(obj, param); - - return Beautify(s); - } - /// - /// Create a json representation for an object - /// - /// - /// - public static string ToJSON(object obj) - { - return ToJSON(obj, JSON.Parameters); - } - /// - /// Create a json representation for an object with parameter override on this call - /// - /// - /// - /// - public static string ToJSON(object obj, JSONParameters param) - { - param.FixValues(); - Type t = null; - - if (obj == null) - return "null"; - - if (obj.GetType().IsGenericType) - t = Reflection.Instance.GetGenericTypeDefinition(obj.GetType()); - if (t == typeof(Dictionary<,>) || t == typeof(List<>)) - param.UsingGlobalTypes = false; - - // FEATURE : enable extensions when you can deserialize anon types - if (param.EnableAnonymousTypes) { param.UseExtensions = false; param.UsingGlobalTypes = false; } - return new JSONSerializer(param).ConvertToJSON(obj); - } - /// - /// Parse a json string and generate a Dictionary<string,object> or List<object> structure - /// - /// - /// - public static object Parse(string json) - { - return new JsonParser(json).Decode(); - } -#if net4 - /// - /// Create a .net4 dynamic object from the json string - /// - /// - /// - public static dynamic ToDynamic(string json) - { - return new DynamicJson(json); - } -#endif - /// - /// Create a typed generic object from the json - /// - /// - /// - /// - public static T ToObject(string json) - { - return new deserializer(Parameters).ToObject(json); - } - /// - /// Create a typed generic object from the json with parameter override on this call - /// - /// - /// - /// - /// - public static T ToObject(string json, JSONParameters param) - { - return new deserializer(param).ToObject(json); - } - /// - /// Create an object from the json - /// - /// - /// - public static object ToObject(string json) - { - return new deserializer(Parameters).ToObject(json, null); - } - /// - /// Create an object from the json with parameter override on this call - /// - /// - /// - /// - public static object ToObject(string json, JSONParameters param) - { - return new deserializer(param).ToObject(json, null); - } - /// - /// Create an object of type from the json - /// - /// - /// - /// - public static object ToObject(string json, Type type) - { - return new deserializer(Parameters).ToObject(json, type); - } - /// - /// Fill a given object with the json represenation - /// - /// - /// - /// - public static object FillObject(object input, string json) - { - Dictionary ht = new JsonParser(json).Decode() as Dictionary; - if (ht == null) return null; - return new deserializer(Parameters).ParseDictionary(ht, null, input.GetType(), input); - } - /// - /// Deep copy an object i.e. clone to a new object - /// - /// - /// - public static object DeepCopy(object obj) - { - return new deserializer(Parameters).ToObject(ToJSON(obj)); - } - /// - /// - /// - /// - /// - /// - public static T DeepCopy(T obj) - { - return new deserializer(Parameters).ToObject(ToJSON(obj)); - } - - /// - /// Create a human readable string from the json - /// - /// - /// - public static string Beautify(string input) - { - return Formatter.PrettyPrint(input); - } - /// - /// Register custom type handlers for your own types not natively handled by fastJSON - /// - /// - /// - /// - public static void RegisterCustomType(Type type, Serialize serializer, Deserialize deserializer) - { - Reflection.Instance.RegisterCustomType(type, serializer, deserializer); - } - /// - /// Clear the internal reflection cache so you can start from new (you will loose performance) - /// - public static void ClearReflectionCache() - { - Reflection.Instance.ClearReflectionCache(); - } - - internal static long CreateLong(out long num, string s, int index, int count) - { - num = 0; - bool neg = false; - for (int x = 0; x < count; x++, index++) - { - char cc = s[index]; - - if (cc == '-') - neg = true; - else if (cc == '+') - neg = false; - else - { - num *= 10; - num += (int)(cc - '0'); - } - } - if (neg) num = -num; - - return num; - } - } - - internal class deserializer - { - public deserializer(JSONParameters param) - { - _params = param; - } - - private JSONParameters _params; - private bool _usingglobals = false; - private Dictionary _circobj = new Dictionary(); - private Dictionary _cirrev = new Dictionary(); - - public T ToObject(string json) - { - Type t = typeof(T); - var o = ToObject(json, t); - - if (t.IsArray) - { - if ((o as ICollection).Count == 0) // edge case for "[]" -> T[] - { - Type tt = t.GetElementType(); - object oo = Array.CreateInstance(tt, 0); - return (T)oo; - } - else - return (T)o; - } - else - return (T)o; - } - - public object ToObject(string json) - { - return ToObject(json, null); - } - - public object ToObject(string json, Type type) - { - //_params = Parameters; - _params.FixValues(); - Type t = null; - if (type != null && type.IsGenericType) - t = Reflection.Instance.GetGenericTypeDefinition(type); - if (t == typeof(Dictionary<,>) || t == typeof(List<>)) - _params.UsingGlobalTypes = false; - _usingglobals = _params.UsingGlobalTypes; - - object o = new JsonParser(json).Decode(); - if (o == null) - return null; -#if !SILVERLIGHT - if (type != null && type == typeof(DataSet)) - return CreateDataset(o as Dictionary, null); - else if (type != null && type == typeof(DataTable)) - return CreateDataTable(o as Dictionary, null); -#endif - if (o is IDictionary) - { - if (type != null && t == typeof(Dictionary<,>)) // deserialize a dictionary - return RootDictionary(o, type); - else // deserialize an object - return ParseDictionary(o as Dictionary, null, type, null); - } - else if (o is List) - { - if (type != null && t == typeof(Dictionary<,>)) // kv format - return RootDictionary(o, type); - else if (type != null && t == typeof(List<>)) // deserialize to generic list - return RootList(o, type); - else if (type == typeof(Hashtable)) - return RootHashTable((List)o); - else - return (o as List).ToArray(); - } - else if (type != null && o.GetType() != type) - return ChangeType(o, type); - - return o; - } - - #region [ p r i v a t e m e t h o d s ] - private object RootHashTable(List o) - { - Hashtable h = new Hashtable(); - - foreach (Dictionary values in o) - { - object key = values["k"]; - object val = values["v"]; - if (key is Dictionary) - key = ParseDictionary((Dictionary)key, null, typeof(object), null); - - if (val is Dictionary) - val = ParseDictionary((Dictionary)val, null, typeof(object), null); - - h.Add(key, val); - } - - return h; - } - - private object ChangeType(object value, Type conversionType) - { - if (conversionType == typeof(int)) - return (int)((long)value); - - else if (conversionType == typeof(long)) - return (long)value; - - else if (conversionType == typeof(string)) - return (string)value; - - else if (conversionType.IsEnum) - return CreateEnum(conversionType, value); - - else if (conversionType == typeof(DateTime)) - return CreateDateTime((string)value); - - else if (Reflection.Instance.IsTypeRegistered(conversionType)) - return Reflection.Instance.CreateCustom((string)value, conversionType); - - // 8-30-2014 - James Brooks - Added code for nullable types. - if (IsNullable(conversionType)) - { - if (value == null) - { - return value; - } - conversionType = UnderlyingTypeOf(conversionType); - } - - // 8-30-2014 - James Brooks - Nullable Guid is a special case so it was moved after the "IsNullable" check. - if (conversionType == typeof(Guid)) - return CreateGuid((string)value); - - return Convert.ChangeType(value, conversionType, CultureInfo.InvariantCulture); - } - - private bool IsNullable(Type t) - { - if (!t.IsGenericType) return false; - Type g = t.GetGenericTypeDefinition(); - return (g.Equals(typeof(Nullable<>))); - } - - private Type UnderlyingTypeOf(Type t) - { - return t.GetGenericArguments()[0]; - } - - private object RootList(object parse, Type type) - { - Type[] gtypes = Reflection.Instance.GetGenericArguments(type); - IList o = (IList)Reflection.Instance.FastCreateInstance(type); - foreach (var k in (IList)parse) - { - _usingglobals = false; - object v = k; - if (k is Dictionary) - v = ParseDictionary(k as Dictionary, null, gtypes[0], null); - else - v = ChangeType(k, gtypes[0]); - - o.Add(v); - } - return o; - } - - private object RootDictionary(object parse, Type type) - { - Type[] gtypes = Reflection.Instance.GetGenericArguments(type); - Type t1 = null; - Type t2 = null; - if (gtypes != null) - { - t1 = gtypes[0]; - t2 = gtypes[1]; - } - if (parse is Dictionary) - { - IDictionary o = (IDictionary)Reflection.Instance.FastCreateInstance(type); - - foreach (var kv in (Dictionary)parse) - { - object v; - object k = ChangeType(kv.Key, t1); - - if (kv.Value is Dictionary) - v = ParseDictionary(kv.Value as Dictionary, null, t2, null); - - else if (t2.IsArray) - v = CreateArray((List)kv.Value, t2, t2.GetElementType(), null); - - else if (kv.Value is IList) - v = CreateGenericList((List)kv.Value, t2, t1, null); - - else - v = ChangeType(kv.Value, t2); - - o.Add(k, v); - } - - return o; - } - if (parse is List) - return CreateDictionary(parse as List, type, gtypes, null); - - return null; - } - - internal object ParseDictionary(Dictionary d, Dictionary globaltypes, Type type, object input) - { - object tn = ""; - if (type == typeof(NameValueCollection)) - return CreateNV(d); - if (type == typeof(StringDictionary)) - return CreateSD(d); - - if (d.TryGetValue("$i", out tn)) - { - object v = null; - _cirrev.TryGetValue((int)(long)tn, out v); - return v; - } - - if (d.TryGetValue("$types", out tn)) - { - _usingglobals = true; - globaltypes = new Dictionary(); - foreach (var kv in (Dictionary)tn) - { - globaltypes.Add((string)kv.Value, kv.Key); - } - } - - bool found = d.TryGetValue("$type", out tn); -#if !SILVERLIGHT - if (found == false && type == typeof(System.Object)) - { - return d; // CreateDataset(d, globaltypes); - } -#endif - if (found) - { - if (_usingglobals) - { - object tname = ""; - if (globaltypes != null && globaltypes.TryGetValue((string)tn, out tname)) - tn = tname; - } - type = Reflection.Instance.GetTypeFromCache((string)tn); - } - - if (type == null) - throw new Exception("Cannot determine type"); - - string typename = type.FullName; - object o = input; - if (o == null) - { - if (_params.ParametricConstructorOverride) - o = System.Runtime.Serialization.FormatterServices.GetUninitializedObject(type); - else - o = Reflection.Instance.FastCreateInstance(type); - } - int circount = 0; - if (_circobj.TryGetValue(o, out circount) == false) - { - circount = _circobj.Count + 1; - _circobj.Add(o, circount); - _cirrev.Add(circount, o); - } - - Dictionary props = Reflection.Instance.Getproperties(type, typename, Reflection.Instance.IsTypeRegistered(type)); - foreach (var kv in d) - { - var n = kv.Key; - var v = kv.Value; - string name = n.ToLower(); - if (name == "$map") - { - ProcessMap(o, props, (Dictionary)d[name]); - continue; - } - myPropInfo pi; - if (props.TryGetValue(name, out pi) == false) - continue; - if (pi.CanWrite) - { - //object v = d[n]; - - if (v != null) - { - object oset = null; - - switch (pi.Type) - { - case myPropInfoType.Int: oset = (int)((long)v); break; - case myPropInfoType.Long: oset = (long)v; break; - case myPropInfoType.String: oset = (string)v; break; - case myPropInfoType.Bool: oset = (bool)v; break; - case myPropInfoType.DateTime: oset = CreateDateTime((string)v); break; - case myPropInfoType.Enum: oset = CreateEnum(pi.pt, v); break; - case myPropInfoType.Guid: oset = CreateGuid((string)v); break; - - case myPropInfoType.Array: - if (!pi.IsValueType) - oset = CreateArray((List)v, pi.pt, pi.bt, globaltypes); - // what about 'else'? - break; - case myPropInfoType.ByteArray: oset = Convert.FromBase64String((string)v); break; -#if !SILVERLIGHT - case myPropInfoType.DataSet: oset = CreateDataset((Dictionary)v, globaltypes); break; - case myPropInfoType.DataTable: oset = CreateDataTable((Dictionary)v, globaltypes); break; - case myPropInfoType.Hashtable: // same case as Dictionary -#endif - case myPropInfoType.Dictionary: oset = CreateDictionary((List)v, pi.pt, pi.GenericTypes, globaltypes); break; - case myPropInfoType.StringKeyDictionary: oset = CreateStringKeyDictionary((Dictionary)v, pi.pt, pi.GenericTypes, globaltypes); break; - case myPropInfoType.NameValue: oset = CreateNV((Dictionary)v); break; - case myPropInfoType.StringDictionary: oset = CreateSD((Dictionary)v); break; - case myPropInfoType.Custom: oset = Reflection.Instance.CreateCustom((string)v, pi.pt); break; - default: - { - if (pi.IsGenericType && pi.IsValueType == false && v is List) - oset = CreateGenericList((List)v, pi.pt, pi.bt, globaltypes); - - else if ((pi.IsClass || pi.IsStruct) && v is Dictionary) - oset = ParseDictionary((Dictionary)v, globaltypes, pi.pt, pi.getter(o)); - - else if (v is List) - oset = CreateArray((List)v, pi.pt, typeof(object), globaltypes); - - else if (pi.IsValueType) - oset = ChangeType(v, pi.changeType); - - else - oset = v; - } - break; - } - - o = pi.setter(o, oset); - } - } - } - return o; - } - - private StringDictionary CreateSD(Dictionary d) - { - StringDictionary nv = new StringDictionary(); - - foreach (var o in d) - nv.Add(o.Key, (string)o.Value); - - return nv; - } - - private NameValueCollection CreateNV(Dictionary d) - { - NameValueCollection nv = new NameValueCollection(); - - foreach (var o in d) - nv.Add(o.Key, (string)o.Value); - - return nv; - } - - private void ProcessMap(object obj, Dictionary props, Dictionary dic) - { - foreach (KeyValuePair kv in dic) - { - myPropInfo p = props[kv.Key]; - object o = p.getter(obj); - Type t = Type.GetType((string)kv.Value); - if (t == typeof(Guid)) - p.setter(obj, CreateGuid((string)o)); - } - } - - private int CreateInteger(string s, int index, int count) - { - int num = 0; - bool neg = false; - for (int x = 0; x < count; x++, index++) - { - char cc = s[index]; - - if (cc == '-') - neg = true; - else if (cc == '+') - neg = false; - else - { - num *= 10; - num += (int)(cc - '0'); - } - } - if (neg) num = -num; - - return num; - } - - private object CreateEnum(Type pt, object v) - { - // FEATURE : optimize create enum -#if !SILVERLIGHT - return Enum.Parse(pt, v.ToString()); -#else - return Enum.Parse(pt, v, true); -#endif - } - - private Guid CreateGuid(string s) - { - if (s.Length > 30) - return new Guid(s); - else - return new Guid(Convert.FromBase64String(s)); - } - - private DateTime CreateDateTime(string value) - { - bool utc = false; - // 0123456789012345678 9012 9/3 - // datetime format = yyyy-MM-ddTHH:mm:ss .nnn Z - int year; - int month; - int day; - int hour; - int min; - int sec; - int ms = 0; - - year = CreateInteger(value, 0, 4); - month = CreateInteger(value, 5, 2); - day = CreateInteger(value, 8, 2); - hour = CreateInteger(value, 11, 2); - min = CreateInteger(value, 14, 2); - sec = CreateInteger(value, 17, 2); - if (value.Length > 21 && value[19] == '.') - ms = CreateInteger(value, 20, 3); - - if (value[value.Length - 1] == 'Z') - utc = true; - - if (_params.UseUTCDateTime == false && utc == false) - return new DateTime(year, month, day, hour, min, sec, ms); - else - return new DateTime(year, month, day, hour, min, sec, ms, DateTimeKind.Utc).ToLocalTime(); - } - - private object CreateArray(List data, Type pt, Type bt, Dictionary globalTypes) - { - Array col = Array.CreateInstance(bt, data.Count); - // create an array of objects - for (int i = 0; i < data.Count; i++) - { - object ob = data[i]; - if (ob == null) - { - continue; - } - if (ob is IDictionary) - col.SetValue(ParseDictionary((Dictionary)ob, globalTypes, bt, null), i); - else if (ob is ICollection) - col.SetValue(CreateArray((List)ob, bt, bt.GetElementType(), globalTypes), i); - else - col.SetValue(ChangeType(ob, bt), i); - } - - return col; - } - - - private object CreateGenericList(List data, Type pt, Type bt, Dictionary globalTypes) - { - IList col = (IList)Reflection.Instance.FastCreateInstance(pt); - // create an array of objects - foreach (object ob in data) - { - if (ob is IDictionary) - col.Add(ParseDictionary((Dictionary)ob, globalTypes, bt, null)); - - else if (ob is List) - { - if (bt.IsGenericType) - col.Add((List)ob);//).ToArray()); - else - col.Add(((List)ob).ToArray()); - } - else - col.Add(ChangeType(ob, bt)); - } - return col; - } - - private object CreateStringKeyDictionary(Dictionary reader, Type pt, Type[] types, Dictionary globalTypes) - { - var col = (IDictionary)Reflection.Instance.FastCreateInstance(pt); - Type t1 = null; - Type t2 = null; - if (types != null) - { - t1 = types[0]; - t2 = types[1]; - } - - foreach (KeyValuePair values in reader) - { - var key = values.Key; - object val = null; - - if (values.Value is Dictionary) - val = ParseDictionary((Dictionary)values.Value, globalTypes, t2, null); - - else if (types != null && t2.IsArray) - { - if (values.Value is Array) - val = values.Value; - else - val = CreateArray((List)values.Value, t2, t2.GetElementType(), globalTypes); - } - else if (values.Value is IList) - val = CreateGenericList((List)values.Value, t2, t1, globalTypes); - - else - val = ChangeType(values.Value, t2); - - col.Add(key, val); - } - - return col; - } - - private object CreateDictionary(List reader, Type pt, Type[] types, Dictionary globalTypes) - { - IDictionary col = (IDictionary)Reflection.Instance.FastCreateInstance(pt); - Type t1 = null; - Type t2 = null; - if (types != null) - { - t1 = types[0]; - t2 = types[1]; - } - - foreach (Dictionary values in reader) - { - object key = values["k"]; - object val = values["v"]; - - if (key is Dictionary) - key = ParseDictionary((Dictionary)key, globalTypes, t1, null); - else - key = ChangeType(key, t1); - - if (val is Dictionary) - val = ParseDictionary((Dictionary)val, globalTypes, t2, null); - else - val = ChangeType(val, t2); - - col.Add(key, val); - } - - return col; - } - -#if !SILVERLIGHT - private DataSet CreateDataset(Dictionary reader, Dictionary globalTypes) - { - DataSet ds = new DataSet(); - ds.EnforceConstraints = false; - ds.BeginInit(); - - // read dataset schema here - var schema = reader["$schema"]; - - if (schema is string) - { - TextReader tr = new StringReader((string)schema); - ds.ReadXmlSchema(tr); - } - else - { - DatasetSchema ms = (DatasetSchema)ParseDictionary((Dictionary)schema, globalTypes, typeof(DatasetSchema), null); - ds.DataSetName = ms.Name; - for (int i = 0; i < ms.Info.Count; i += 3) - { - if (ds.Tables.Contains(ms.Info[i]) == false) - ds.Tables.Add(ms.Info[i]); - ds.Tables[ms.Info[i]].Columns.Add(ms.Info[i + 1], Type.GetType(ms.Info[i + 2])); - } - } - - foreach (KeyValuePair pair in reader) - { - if (pair.Key == "$type" || pair.Key == "$schema") continue; - - List rows = (List)pair.Value; - if (rows == null) continue; - - DataTable dt = ds.Tables[pair.Key]; - ReadDataTable(rows, dt); - } - - ds.EndInit(); - - return ds; - } - - private void ReadDataTable(List rows, DataTable dt) - { - dt.BeginInit(); - dt.BeginLoadData(); - List guidcols = new List(); - List datecol = new List(); - - foreach (DataColumn c in dt.Columns) - { - if (c.DataType == typeof(Guid) || c.DataType == typeof(Guid?)) - guidcols.Add(c.Ordinal); - if (_params.UseUTCDateTime && (c.DataType == typeof(DateTime) || c.DataType == typeof(DateTime?))) - datecol.Add(c.Ordinal); - } - - foreach (List row in rows) - { - object[] v = new object[row.Count]; - row.CopyTo(v, 0); - foreach (int i in guidcols) - { - string s = (string)v[i]; - if (s != null && s.Length < 36) - v[i] = new Guid(Convert.FromBase64String(s)); - } - if (_params.UseUTCDateTime) - { - foreach (int i in datecol) - { - string s = (string)v[i]; - if (s != null) - v[i] = CreateDateTime(s); - } - } - dt.Rows.Add(v); - } - - dt.EndLoadData(); - dt.EndInit(); - } - - DataTable CreateDataTable(Dictionary reader, Dictionary globalTypes) - { - var dt = new DataTable(); - - // read dataset schema here - var schema = reader["$schema"]; - - if (schema is string) - { - TextReader tr = new StringReader((string)schema); - dt.ReadXmlSchema(tr); - } - else - { - var ms = (DatasetSchema)this.ParseDictionary((Dictionary)schema, globalTypes, typeof(DatasetSchema), null); - dt.TableName = ms.Info[0]; - for (int i = 0; i < ms.Info.Count; i += 3) - { - dt.Columns.Add(ms.Info[i + 1], Type.GetType(ms.Info[i + 2])); - } - } - - foreach (var pair in reader) - { - if (pair.Key == "$type" || pair.Key == "$schema") - continue; - - var rows = (List)pair.Value; - if (rows == null) - continue; - - if (!dt.TableName.Equals(pair.Key, StringComparison.InvariantCultureIgnoreCase)) - continue; - - ReadDataTable(rows, dt); - } - - return dt; - } -#endif - #endregion - } - +using System; +using System.Collections; +using System.Collections.Generic; +#if !SILVERLIGHT +using System.Data; +#endif +using System.Globalization; +using System.IO; +using System.Reflection; +using System.Reflection.Emit; +using RaptorDB.Common; +using System.Collections.Specialized; + +namespace fastJSON +{ + public delegate string Serialize(object data); + public delegate object Deserialize(string data); + + public sealed class JSONParameters + { + /// + /// Use the optimized fast Dataset Schema format (default = True) + /// + public bool UseOptimizedDatasetSchema = true; + /// + /// Use the fast GUID format (default = True) + /// + public bool UseFastGuid = true; + /// + /// Serialize null values to the output (default = True) + /// + public bool SerializeNullValues = true; + /// + /// Use the UTC date format (default = True) + /// + public bool UseUTCDateTime = true; + /// + /// Show the readonly properties of types in the output (default = False) + /// + public bool ShowReadOnlyProperties = false; + /// + /// Use the $types extension to optimise the output json (default = True) + /// + public bool UsingGlobalTypes = true; + /// + /// Ignore case when processing json and deserializing + /// + [Obsolete("Not needed anymore and will always match")] + public bool IgnoreCaseOnDeserialize = false; + /// + /// Anonymous types have read only properties + /// + public bool EnableAnonymousTypes = false; + /// + /// Enable fastJSON extensions $types, $type, $map (default = True) + /// + public bool UseExtensions = true; + /// + /// Use escaped unicode i.e. \uXXXX format for non ASCII characters (default = True) + /// + public bool UseEscapedUnicode = true; + /// + /// Output string key dictionaries as "k"/"v" format (default = False) + /// + public bool KVStyleStringDictionary = false; + /// + /// Output Enum values instead of names (default = False) + /// + public bool UseValuesOfEnums = false; + /// + /// Ignore attributes to check for (default : XmlIgnoreAttribute) + /// + public List IgnoreAttributes = new List { typeof(System.Xml.Serialization.XmlIgnoreAttribute) }; + /// + /// If you have parametric and no default constructor for you classes (default = False) + /// + /// IMPORTANT NOTE : If True then all initial values within the class will be ignored and will be not set + /// + public bool ParametricConstructorOverride = false; + /// + /// Serialize DateTime milliseconds i.e. yyyy-MM-dd HH:mm:ss.nnn (default = false) + /// + public bool DateTimeMilliseconds = false; + /// + /// Maximum depth for circular references in inline mode (default = 20) + /// + public byte SerializerMaxDepth = 20; + /// + /// Inline circular or already seen objects instead of replacement with $i (default = False) + /// + public bool InlineCircularReferences = false; + /// + /// Save property/field names as lowercase (default = false) + /// + public bool SerializeToLowerCaseNames = false; + + public void FixValues() + { + if (UseExtensions == false) // disable conflicting params + { + UsingGlobalTypes = false; + InlineCircularReferences = true; + } + if (EnableAnonymousTypes) + ShowReadOnlyProperties = true; + } + } + + public static class JSON + { + /// + /// Globally set-able parameters for controlling the serializer + /// + public static JSONParameters Parameters = new JSONParameters(); + /// + /// Create a formatted json string (beautified) from an object + /// + /// + /// + /// + public static string ToNiceJSON(object obj, JSONParameters param) + { + string s = ToJSON(obj, param); + + return Beautify(s); + } + /// + /// Create a json representation for an object + /// + /// + /// + public static string ToJSON(object obj) + { + return ToJSON(obj, JSON.Parameters); + } + /// + /// Create a json representation for an object with parameter override on this call + /// + /// + /// + /// + public static string ToJSON(object obj, JSONParameters param) + { + param.FixValues(); + Type t = null; + + if (obj == null) + return "null"; + + if (obj.GetType().IsGenericType) + t = Reflection.Instance.GetGenericTypeDefinition(obj.GetType()); + if (t == typeof(Dictionary<,>) || t == typeof(List<>)) + param.UsingGlobalTypes = false; + + // FEATURE : enable extensions when you can deserialize anon types + if (param.EnableAnonymousTypes) { param.UseExtensions = false; param.UsingGlobalTypes = false; } + return new JSONSerializer(param).ConvertToJSON(obj); + } + /// + /// Parse a json string and generate a Dictionary<string,object> or List<object> structure + /// + /// + /// + public static object Parse(string json) + { + return new JsonParser(json).Decode(); + } +#if net4 + /// + /// Create a .net4 dynamic object from the json string + /// + /// + /// + public static dynamic ToDynamic(string json) + { + return new DynamicJson(json); + } +#endif + /// + /// Create a typed generic object from the json + /// + /// + /// + /// + public static T ToObject(string json) + { + return new deserializer(Parameters).ToObject(json); + } + /// + /// Create a typed generic object from the json with parameter override on this call + /// + /// + /// + /// + /// + public static T ToObject(string json, JSONParameters param) + { + return new deserializer(param).ToObject(json); + } + /// + /// Create an object from the json + /// + /// + /// + public static object ToObject(string json) + { + return new deserializer(Parameters).ToObject(json, null); + } + /// + /// Create an object from the json with parameter override on this call + /// + /// + /// + /// + public static object ToObject(string json, JSONParameters param) + { + return new deserializer(param).ToObject(json, null); + } + /// + /// Create an object of type from the json + /// + /// + /// + /// + public static object ToObject(string json, Type type) + { + return new deserializer(Parameters).ToObject(json, type); + } + /// + /// Fill a given object with the json represenation + /// + /// + /// + /// + public static object FillObject(object input, string json) + { + Dictionary ht = new JsonParser(json).Decode() as Dictionary; + if (ht == null) return null; + return new deserializer(Parameters).ParseDictionary(ht, null, input.GetType(), input); + } + /// + /// Deep copy an object i.e. clone to a new object + /// + /// + /// + public static object DeepCopy(object obj) + { + return new deserializer(Parameters).ToObject(ToJSON(obj)); + } + /// + /// + /// + /// + /// + /// + public static T DeepCopy(T obj) + { + return new deserializer(Parameters).ToObject(ToJSON(obj)); + } + + /// + /// Create a human readable string from the json + /// + /// + /// + public static string Beautify(string input) + { + return Formatter.PrettyPrint(input); + } + /// + /// Register custom type handlers for your own types not natively handled by fastJSON + /// + /// + /// + /// + public static void RegisterCustomType(Type type, Serialize serializer, Deserialize deserializer) + { + Reflection.Instance.RegisterCustomType(type, serializer, deserializer); + } + /// + /// Clear the internal reflection cache so you can start from new (you will loose performance) + /// + public static void ClearReflectionCache() + { + Reflection.Instance.ClearReflectionCache(); + } + + internal static long CreateLong(out long num, string s, int index, int count) + { + num = 0; + bool neg = false; + for (int x = 0; x < count; x++, index++) + { + char cc = s[index]; + + if (cc == '-') + neg = true; + else if (cc == '+') + neg = false; + else + { + num *= 10; + num += (int)(cc - '0'); + } + } + if (neg) num = -num; + + return num; + } + } + + internal class deserializer + { + public deserializer(JSONParameters param) + { + _params = param; + } + + private JSONParameters _params; + private bool _usingglobals = false; + private Dictionary _circobj = new Dictionary(); + private Dictionary _cirrev = new Dictionary(); + + public T ToObject(string json) + { + Type t = typeof(T); + var o = ToObject(json, t); + + if (t.IsArray) + { + if ((o as ICollection).Count == 0) // edge case for "[]" -> T[] + { + Type tt = t.GetElementType(); + object oo = Array.CreateInstance(tt, 0); + return (T)oo; + } + else + return (T)o; + } + else + return (T)o; + } + + public object ToObject(string json) + { + return ToObject(json, null); + } + + public object ToObject(string json, Type type) + { + //_params = Parameters; + _params.FixValues(); + Type t = null; + if (type != null && type.IsGenericType) + t = Reflection.Instance.GetGenericTypeDefinition(type); + if (t == typeof(Dictionary<,>) || t == typeof(List<>)) + _params.UsingGlobalTypes = false; + _usingglobals = _params.UsingGlobalTypes; + + object o = new JsonParser(json).Decode(); + if (o == null) + return null; +#if !SILVERLIGHT + if (type != null && type == typeof(DataSet)) + return CreateDataset(o as Dictionary, null); + else if (type != null && type == typeof(DataTable)) + return CreateDataTable(o as Dictionary, null); +#endif + if (o is IDictionary) + { + if (type != null && t == typeof(Dictionary<,>)) // deserialize a dictionary + return RootDictionary(o, type); + else // deserialize an object + return ParseDictionary(o as Dictionary, null, type, null); + } + else if (o is List) + { + if (type != null && t == typeof(Dictionary<,>)) // kv format + return RootDictionary(o, type); + else if (type != null && t == typeof(List<>)) // deserialize to generic list + return RootList(o, type); + else if (type == typeof(Hashtable)) + return RootHashTable((List)o); + else + return (o as List).ToArray(); + } + else if (type != null && o.GetType() != type) + return ChangeType(o, type); + + return o; + } + + #region [ p r i v a t e m e t h o d s ] + private object RootHashTable(List o) + { + Hashtable h = new Hashtable(); + + foreach (Dictionary values in o) + { + object key = values["k"]; + object val = values["v"]; + if (key is Dictionary) + key = ParseDictionary((Dictionary)key, null, typeof(object), null); + + if (val is Dictionary) + val = ParseDictionary((Dictionary)val, null, typeof(object), null); + + h.Add(key, val); + } + + return h; + } + + private object ChangeType(object value, Type conversionType) + { + if (conversionType == typeof(int)) + return (int)((long)value); + + else if (conversionType == typeof(long)) + return (long)value; + + else if (conversionType == typeof(string)) + return (string)value; + + else if (conversionType.IsEnum) + return CreateEnum(conversionType, value); + + else if (conversionType == typeof(DateTime)) + return CreateDateTime((string)value); + + else if (Reflection.Instance.IsTypeRegistered(conversionType)) + return Reflection.Instance.CreateCustom((string)value, conversionType); + + // 8-30-2014 - James Brooks - Added code for nullable types. + if (IsNullable(conversionType)) + { + if (value == null) + { + return value; + } + conversionType = UnderlyingTypeOf(conversionType); + } + + // 8-30-2014 - James Brooks - Nullable Guid is a special case so it was moved after the "IsNullable" check. + if (conversionType == typeof(Guid)) + return CreateGuid((string)value); + + return Convert.ChangeType(value, conversionType, CultureInfo.InvariantCulture); + } + + private bool IsNullable(Type t) + { + if (!t.IsGenericType) return false; + Type g = t.GetGenericTypeDefinition(); + return (g.Equals(typeof(Nullable<>))); + } + + private Type UnderlyingTypeOf(Type t) + { + return t.GetGenericArguments()[0]; + } + + private object RootList(object parse, Type type) + { + Type[] gtypes = Reflection.Instance.GetGenericArguments(type); + IList o = (IList)Reflection.Instance.FastCreateInstance(type); + foreach (var k in (IList)parse) + { + _usingglobals = false; + object v = k; + if (k is Dictionary) + v = ParseDictionary(k as Dictionary, null, gtypes[0], null); + else + v = ChangeType(k, gtypes[0]); + + o.Add(v); + } + return o; + } + + private object RootDictionary(object parse, Type type) + { + Type[] gtypes = Reflection.Instance.GetGenericArguments(type); + Type t1 = null; + Type t2 = null; + if (gtypes != null) + { + t1 = gtypes[0]; + t2 = gtypes[1]; + } + if (parse is Dictionary) + { + IDictionary o = (IDictionary)Reflection.Instance.FastCreateInstance(type); + + foreach (var kv in (Dictionary)parse) + { + object v; + object k = ChangeType(kv.Key, t1); + + if (kv.Value is Dictionary) + v = ParseDictionary(kv.Value as Dictionary, null, t2, null); + + else if (t2.IsArray) + v = CreateArray((List)kv.Value, t2, t2.GetElementType(), null); + + else if (kv.Value is IList) + v = CreateGenericList((List)kv.Value, t2, t1, null); + + else + v = ChangeType(kv.Value, t2); + + o.Add(k, v); + } + + return o; + } + if (parse is List) + return CreateDictionary(parse as List, type, gtypes, null); + + return null; + } + + internal object ParseDictionary(Dictionary d, Dictionary globaltypes, Type type, object input) + { + object tn = ""; + if (type == typeof(NameValueCollection)) + return CreateNV(d); + if (type == typeof(StringDictionary)) + return CreateSD(d); + + if (d.TryGetValue("$i", out tn)) + { + object v = null; + _cirrev.TryGetValue((int)(long)tn, out v); + return v; + } + + if (d.TryGetValue("$types", out tn)) + { + _usingglobals = true; + globaltypes = new Dictionary(); + foreach (var kv in (Dictionary)tn) + { + globaltypes.Add((string)kv.Value, kv.Key); + } + } + + bool found = d.TryGetValue("$type", out tn); +#if !SILVERLIGHT + if (found == false && type == typeof(System.Object)) + { + return d; // CreateDataset(d, globaltypes); + } +#endif + if (found) + { + if (_usingglobals) + { + object tname = ""; + if (globaltypes != null && globaltypes.TryGetValue((string)tn, out tname)) + tn = tname; + } + type = Reflection.Instance.GetTypeFromCache((string)tn); + } + + if (type == null) + throw new Exception("Cannot determine type"); + + string typename = type.FullName; + object o = input; + if (o == null) + { + if (_params.ParametricConstructorOverride) + o = System.Runtime.Serialization.FormatterServices.GetUninitializedObject(type); + else + o = Reflection.Instance.FastCreateInstance(type); + } + int circount = 0; + if (_circobj.TryGetValue(o, out circount) == false) + { + circount = _circobj.Count + 1; + _circobj.Add(o, circount); + _cirrev.Add(circount, o); + } + + Dictionary props = Reflection.Instance.Getproperties(type, typename, Reflection.Instance.IsTypeRegistered(type)); + foreach (var kv in d) + { + var n = kv.Key; + var v = kv.Value; + string name = n.ToLower(); + if (name == "$map") + { + ProcessMap(o, props, (Dictionary)d[name]); + continue; + } + myPropInfo pi; + if (props.TryGetValue(name, out pi) == false) + continue; + if (pi.CanWrite) + { + //object v = d[n]; + + if (v != null) + { + object oset = null; + + switch (pi.Type) + { + case myPropInfoType.Int: oset = (int)((long)v); break; + case myPropInfoType.Long: oset = (long)v; break; + case myPropInfoType.String: oset = (string)v; break; + case myPropInfoType.Bool: oset = (bool)v; break; + case myPropInfoType.DateTime: oset = CreateDateTime((string)v); break; + case myPropInfoType.Enum: oset = CreateEnum(pi.pt, v); break; + case myPropInfoType.Guid: oset = CreateGuid((string)v); break; + + case myPropInfoType.Array: + if (!pi.IsValueType) + oset = CreateArray((List)v, pi.pt, pi.bt, globaltypes); + // what about 'else'? + break; + case myPropInfoType.ByteArray: oset = Convert.FromBase64String((string)v); break; +#if !SILVERLIGHT + case myPropInfoType.DataSet: oset = CreateDataset((Dictionary)v, globaltypes); break; + case myPropInfoType.DataTable: oset = CreateDataTable((Dictionary)v, globaltypes); break; + case myPropInfoType.Hashtable: // same case as Dictionary +#endif + case myPropInfoType.Dictionary: oset = CreateDictionary((List)v, pi.pt, pi.GenericTypes, globaltypes); break; + case myPropInfoType.StringKeyDictionary: oset = CreateStringKeyDictionary((Dictionary)v, pi.pt, pi.GenericTypes, globaltypes); break; + case myPropInfoType.NameValue: oset = CreateNV((Dictionary)v); break; + case myPropInfoType.StringDictionary: oset = CreateSD((Dictionary)v); break; + case myPropInfoType.Custom: oset = Reflection.Instance.CreateCustom((string)v, pi.pt); break; + default: + { + if (pi.IsGenericType && pi.IsValueType == false && v is List) + oset = CreateGenericList((List)v, pi.pt, pi.bt, globaltypes); + + else if ((pi.IsClass || pi.IsStruct) && v is Dictionary) + oset = ParseDictionary((Dictionary)v, globaltypes, pi.pt, pi.getter(o)); + + else if (v is List) + oset = CreateArray((List)v, pi.pt, typeof(object), globaltypes); + + else if (pi.IsValueType) + oset = ChangeType(v, pi.changeType); + + else + oset = v; + } + break; + } + + o = pi.setter(o, oset); + } + } + } + return o; + } + + private StringDictionary CreateSD(Dictionary d) + { + StringDictionary nv = new StringDictionary(); + + foreach (var o in d) + nv.Add(o.Key, (string)o.Value); + + return nv; + } + + private NameValueCollection CreateNV(Dictionary d) + { + NameValueCollection nv = new NameValueCollection(); + + foreach (var o in d) + nv.Add(o.Key, (string)o.Value); + + return nv; + } + + private void ProcessMap(object obj, Dictionary props, Dictionary dic) + { + foreach (KeyValuePair kv in dic) + { + myPropInfo p = props[kv.Key]; + object o = p.getter(obj); + Type t = Type.GetType((string)kv.Value); + if (t == typeof(Guid)) + p.setter(obj, CreateGuid((string)o)); + } + } + + private int CreateInteger(string s, int index, int count) + { + int num = 0; + bool neg = false; + for (int x = 0; x < count; x++, index++) + { + char cc = s[index]; + + if (cc == '-') + neg = true; + else if (cc == '+') + neg = false; + else + { + num *= 10; + num += (int)(cc - '0'); + } + } + if (neg) num = -num; + + return num; + } + + private object CreateEnum(Type pt, object v) + { + // FEATURE : optimize create enum +#if !SILVERLIGHT + return Enum.Parse(pt, v.ToString()); +#else + return Enum.Parse(pt, v, true); +#endif + } + + private Guid CreateGuid(string s) + { + if (s.Length > 30) + return new Guid(s); + else + return new Guid(Convert.FromBase64String(s)); + } + + private DateTime CreateDateTime(string value) + { + bool utc = false; + // 0123456789012345678 9012 9/3 + // datetime format = yyyy-MM-ddTHH:mm:ss .nnn Z + int year; + int month; + int day; + int hour; + int min; + int sec; + int ms = 0; + + year = CreateInteger(value, 0, 4); + month = CreateInteger(value, 5, 2); + day = CreateInteger(value, 8, 2); + hour = CreateInteger(value, 11, 2); + min = CreateInteger(value, 14, 2); + sec = CreateInteger(value, 17, 2); + if (value.Length > 21 && value[19] == '.') + ms = CreateInteger(value, 20, 3); + + if (value[value.Length - 1] == 'Z') + utc = true; + + if (_params.UseUTCDateTime == false && utc == false) + return new DateTime(year, month, day, hour, min, sec, ms); + else + return new DateTime(year, month, day, hour, min, sec, ms, DateTimeKind.Utc).ToLocalTime(); + } + + private object CreateArray(List data, Type pt, Type bt, Dictionary globalTypes) + { + Array col = Array.CreateInstance(bt, data.Count); + // create an array of objects + for (int i = 0; i < data.Count; i++) + { + object ob = data[i]; + if (ob == null) + { + continue; + } + if (ob is IDictionary) + col.SetValue(ParseDictionary((Dictionary)ob, globalTypes, bt, null), i); + else if (ob is ICollection) + col.SetValue(CreateArray((List)ob, bt, bt.GetElementType(), globalTypes), i); + else + col.SetValue(ChangeType(ob, bt), i); + } + + return col; + } + + + private object CreateGenericList(List data, Type pt, Type bt, Dictionary globalTypes) + { + IList col = (IList)Reflection.Instance.FastCreateInstance(pt); + // create an array of objects + foreach (object ob in data) + { + if (ob is IDictionary) + col.Add(ParseDictionary((Dictionary)ob, globalTypes, bt, null)); + + else if (ob is List) + { + if (bt.IsGenericType) + col.Add((List)ob);//).ToArray()); + else + col.Add(((List)ob).ToArray()); + } + else + col.Add(ChangeType(ob, bt)); + } + return col; + } + + private object CreateStringKeyDictionary(Dictionary reader, Type pt, Type[] types, Dictionary globalTypes) + { + var col = (IDictionary)Reflection.Instance.FastCreateInstance(pt); + Type t1 = null; + Type t2 = null; + if (types != null) + { + t1 = types[0]; + t2 = types[1]; + } + + foreach (KeyValuePair values in reader) + { + var key = values.Key; + object val = null; + + if (values.Value is Dictionary) + val = ParseDictionary((Dictionary)values.Value, globalTypes, t2, null); + + else if (types != null && t2.IsArray) + { + if (values.Value is Array) + val = values.Value; + else + val = CreateArray((List)values.Value, t2, t2.GetElementType(), globalTypes); + } + else if (values.Value is IList) + val = CreateGenericList((List)values.Value, t2, t1, globalTypes); + + else + val = ChangeType(values.Value, t2); + + col.Add(key, val); + } + + return col; + } + + private object CreateDictionary(List reader, Type pt, Type[] types, Dictionary globalTypes) + { + IDictionary col = (IDictionary)Reflection.Instance.FastCreateInstance(pt); + Type t1 = null; + Type t2 = null; + if (types != null) + { + t1 = types[0]; + t2 = types[1]; + } + + foreach (Dictionary values in reader) + { + object key = values["k"]; + object val = values["v"]; + + if (key is Dictionary) + key = ParseDictionary((Dictionary)key, globalTypes, t1, null); + else + key = ChangeType(key, t1); + + if (val is Dictionary) + val = ParseDictionary((Dictionary)val, globalTypes, t2, null); + else + val = ChangeType(val, t2); + + col.Add(key, val); + } + + return col; + } + +#if !SILVERLIGHT + private DataSet CreateDataset(Dictionary reader, Dictionary globalTypes) + { + DataSet ds = new DataSet(); + ds.EnforceConstraints = false; + ds.BeginInit(); + + // read dataset schema here + var schema = reader["$schema"]; + + if (schema is string) + { + TextReader tr = new StringReader((string)schema); + ds.ReadXmlSchema(tr); + } + else + { + DatasetSchema ms = (DatasetSchema)ParseDictionary((Dictionary)schema, globalTypes, typeof(DatasetSchema), null); + ds.DataSetName = ms.Name; + for (int i = 0; i < ms.Info.Count; i += 3) + { + if (ds.Tables.Contains(ms.Info[i]) == false) + ds.Tables.Add(ms.Info[i]); + ds.Tables[ms.Info[i]].Columns.Add(ms.Info[i + 1], Type.GetType(ms.Info[i + 2])); + } + } + + foreach (KeyValuePair pair in reader) + { + if (pair.Key == "$type" || pair.Key == "$schema") continue; + + List rows = (List)pair.Value; + if (rows == null) continue; + + DataTable dt = ds.Tables[pair.Key]; + ReadDataTable(rows, dt); + } + + ds.EndInit(); + + return ds; + } + + private void ReadDataTable(List rows, DataTable dt) + { + dt.BeginInit(); + dt.BeginLoadData(); + List guidcols = new List(); + List datecol = new List(); + + foreach (DataColumn c in dt.Columns) + { + if (c.DataType == typeof(Guid) || c.DataType == typeof(Guid?)) + guidcols.Add(c.Ordinal); + if (_params.UseUTCDateTime && (c.DataType == typeof(DateTime) || c.DataType == typeof(DateTime?))) + datecol.Add(c.Ordinal); + } + + foreach (List row in rows) + { + object[] v = new object[row.Count]; + row.CopyTo(v, 0); + foreach (int i in guidcols) + { + string s = (string)v[i]; + if (s != null && s.Length < 36) + v[i] = new Guid(Convert.FromBase64String(s)); + } + if (_params.UseUTCDateTime) + { + foreach (int i in datecol) + { + string s = (string)v[i]; + if (s != null) + v[i] = CreateDateTime(s); + } + } + dt.Rows.Add(v); + } + + dt.EndLoadData(); + dt.EndInit(); + } + + DataTable CreateDataTable(Dictionary reader, Dictionary globalTypes) + { + var dt = new DataTable(); + + // read dataset schema here + var schema = reader["$schema"]; + + if (schema is string) + { + TextReader tr = new StringReader((string)schema); + dt.ReadXmlSchema(tr); + } + else + { + var ms = (DatasetSchema)this.ParseDictionary((Dictionary)schema, globalTypes, typeof(DatasetSchema), null); + dt.TableName = ms.Info[0]; + for (int i = 0; i < ms.Info.Count; i += 3) + { + dt.Columns.Add(ms.Info[i + 1], Type.GetType(ms.Info[i + 2])); + } + } + + foreach (var pair in reader) + { + if (pair.Key == "$type" || pair.Key == "$schema") + continue; + + var rows = (List)pair.Value; + if (rows == null) + continue; + + if (!dt.TableName.Equals(pair.Key, StringComparison.InvariantCultureIgnoreCase)) + continue; + + ReadDataTable(rows, dt); + } + + return dt; + } +#endif + #endregion + } + } \ No newline at end of file diff --git a/RaptorDB.Common/fastJSON/JsonParser.cs b/RaptorDB.Common/fastJSON/JsonParser.cs index 863a7c8..6972a9a 100644 --- a/RaptorDB.Common/fastJSON/JsonParser.cs +++ b/RaptorDB.Common/fastJSON/JsonParser.cs @@ -1,429 +1,429 @@ -using System; -using System.Collections; -using System.Collections.Generic; -using System.Globalization; -using System.Text; - -namespace fastJSON -{ - /// - /// This class encodes and decodes JSON strings. - /// Spec. details, see http://www.json.org/ - /// - internal sealed class JsonParser - { - enum Token - { - None = -1, // Used to denote no Lookahead available - Curly_Open, - Curly_Close, - Squared_Open, - Squared_Close, - Colon, - Comma, - String, - Number, - True, - False, - Null - } - - readonly string json; - readonly StringBuilder s = new StringBuilder(); - Token lookAheadToken = Token.None; - int index; - - internal JsonParser(string json) - { - this.json = json; - } - - public object Decode() - { - return ParseValue(); - } - - private Dictionary ParseObject() - { - Dictionary table = new Dictionary(); - - ConsumeToken(); // { - - while (true) - { - switch (LookAhead()) - { - - case Token.Comma: - ConsumeToken(); - break; - - case Token.Curly_Close: - ConsumeToken(); - return table; - - default: - { - // name - string name = ParseString(); - - // : - if (NextToken() != Token.Colon) - { - throw new Exception("Expected colon at index " + index); - } - - // value - object value = ParseValue(); - - table[name] = value; - } - break; - } - } - } - - private List ParseArray() - { - List array = new List(); - ConsumeToken(); // [ - - while (true) - { - switch (LookAhead()) - { - case Token.Comma: - ConsumeToken(); - break; - - case Token.Squared_Close: - ConsumeToken(); - return array; - - default: - array.Add(ParseValue()); - break; - } - } - } - - private object ParseValue() - { - switch (LookAhead()) - { - case Token.Number: - return ParseNumber(); - - case Token.String: - return ParseString(); - - case Token.Curly_Open: - return ParseObject(); - - case Token.Squared_Open: - return ParseArray(); - - case Token.True: - ConsumeToken(); - return true; - - case Token.False: - ConsumeToken(); - return false; - - case Token.Null: - ConsumeToken(); - return null; - } - - throw new Exception("Unrecognized token at index" + index); - } - - private string ParseString() - { - ConsumeToken(); // " - - s.Length = 0; - - int runIndex = -1; - - while (index < json.Length) - { - var c = json[index++]; - - if (c == '"') - { - if (runIndex != -1) - { - if (s.Length == 0) - return json.Substring(runIndex, index - runIndex - 1); - - s.Append(json, runIndex, index - runIndex - 1); - } - return s.ToString(); - } - - if (c != '\\') - { - if (runIndex == -1) - runIndex = index - 1; - - continue; - } - - if (index == json.Length) break; - - if (runIndex != -1) - { - s.Append(json, runIndex, index - runIndex - 1); - runIndex = -1; - } - - switch (json[index++]) - { - case '"': - s.Append('"'); - break; - - case '\\': - s.Append('\\'); - break; - - case '/': - s.Append('/'); - break; - - case 'b': - s.Append('\b'); - break; - - case 'f': - s.Append('\f'); - break; - - case 'n': - s.Append('\n'); - break; - - case 'r': - s.Append('\r'); - break; - - case 't': - s.Append('\t'); - break; - - case 'u': - { - int remainingLength = json.Length - index; - if (remainingLength < 4) break; - - // parse the 32 bit hex into an integer codepoint - uint codePoint = ParseUnicode(json[index], json[index + 1], json[index + 2], json[index + 3]); - s.Append((char)codePoint); - - // skip 4 chars - index += 4; - } - break; - } - } - - throw new Exception("Unexpectedly reached end of string"); - } - - private uint ParseSingleChar(char c1, uint multipliyer) - { - uint p1 = 0; - if (c1 >= '0' && c1 <= '9') - p1 = (uint)(c1 - '0') * multipliyer; - else if (c1 >= 'A' && c1 <= 'F') - p1 = (uint)((c1 - 'A') + 10) * multipliyer; - else if (c1 >= 'a' && c1 <= 'f') - p1 = (uint)((c1 - 'a') + 10) * multipliyer; - return p1; - } - - private uint ParseUnicode(char c1, char c2, char c3, char c4) - { - uint p1 = ParseSingleChar(c1, 0x1000); - uint p2 = ParseSingleChar(c2, 0x100); - uint p3 = ParseSingleChar(c3, 0x10); - uint p4 = ParseSingleChar(c4, 1); - - return p1 + p2 + p3 + p4; - } - - private long CreateLong(string s) - { - long num = 0; - bool neg = false; - foreach (char cc in s) - { - if (cc == '-') - neg = true; - else if (cc == '+') - neg = false; - else - { - num *= 10; - num += (int)(cc - '0'); - } - } - - return neg ? -num : num; - } - - private object ParseNumber() - { - ConsumeToken(); - - // Need to start back one place because the first digit is also a token and would have been consumed - var startIndex = index - 1; - bool dec = false; - do - { - if (index == json.Length) - break; - var c = json[index]; - - if ((c >= '0' && c <= '9') || c == '.' || c == '-' || c == '+' || c == 'e' || c == 'E') - { - if (c == '.' || c == 'e' || c == 'E') - dec = true; - if (++index == json.Length) - break;//throw new Exception("Unexpected end of string whilst parsing number"); - continue; - } - break; - } while (true); - - if (dec) - { - string s = json.Substring(startIndex, index - startIndex); - return double.Parse(s, NumberFormatInfo.InvariantInfo); - } - long num; - return JSON.CreateLong(out num, json, startIndex, index - startIndex); - } - - private Token LookAhead() - { - if (lookAheadToken != Token.None) return lookAheadToken; - - return lookAheadToken = NextTokenCore(); - } - - private void ConsumeToken() - { - lookAheadToken = Token.None; - } - - private Token NextToken() - { - var result = lookAheadToken != Token.None ? lookAheadToken : NextTokenCore(); - - lookAheadToken = Token.None; - - return result; - } - - private Token NextTokenCore() - { - char c; - - // Skip past whitespace - do - { - c = json[index]; - - if (c > ' ') break; - if (c != ' ' && c != '\t' && c != '\n' && c != '\r') break; - - } while (++index < json.Length); - - if (index == json.Length) - { - throw new Exception("Reached end of string unexpectedly"); - } - - c = json[index]; - - index++; - - switch (c) - { - case '{': - return Token.Curly_Open; - - case '}': - return Token.Curly_Close; - - case '[': - return Token.Squared_Open; - - case ']': - return Token.Squared_Close; - - case ',': - return Token.Comma; - - case '"': - return Token.String; - - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - case '+': - case '.': - return Token.Number; - - case ':': - return Token.Colon; - - case 'f': - if (json.Length - index >= 4 && - json[index + 0] == 'a' && - json[index + 1] == 'l' && - json[index + 2] == 's' && - json[index + 3] == 'e') - { - index += 4; - return Token.False; - } - break; - - case 't': - if (json.Length - index >= 3 && - json[index + 0] == 'r' && - json[index + 1] == 'u' && - json[index + 2] == 'e') - { - index += 3; - return Token.True; - } - break; - - case 'n': - if (json.Length - index >= 3 && - json[index + 0] == 'u' && - json[index + 1] == 'l' && - json[index + 2] == 'l') - { - index += 3; - return Token.Null; - } - break; - } - throw new Exception("Could not find token at index " + --index); - } - } -} +using System; +using System.Collections; +using System.Collections.Generic; +using System.Globalization; +using System.Text; + +namespace fastJSON +{ + /// + /// This class encodes and decodes JSON strings. + /// Spec. details, see http://www.json.org/ + /// + internal sealed class JsonParser + { + enum Token + { + None = -1, // Used to denote no Lookahead available + Curly_Open, + Curly_Close, + Squared_Open, + Squared_Close, + Colon, + Comma, + String, + Number, + True, + False, + Null + } + + readonly string json; + readonly StringBuilder s = new StringBuilder(); + Token lookAheadToken = Token.None; + int index; + + internal JsonParser(string json) + { + this.json = json; + } + + public object Decode() + { + return ParseValue(); + } + + private Dictionary ParseObject() + { + Dictionary table = new Dictionary(); + + ConsumeToken(); // { + + while (true) + { + switch (LookAhead()) + { + + case Token.Comma: + ConsumeToken(); + break; + + case Token.Curly_Close: + ConsumeToken(); + return table; + + default: + { + // name + string name = ParseString(); + + // : + if (NextToken() != Token.Colon) + { + throw new Exception("Expected colon at index " + index); + } + + // value + object value = ParseValue(); + + table[name] = value; + } + break; + } + } + } + + private List ParseArray() + { + List array = new List(); + ConsumeToken(); // [ + + while (true) + { + switch (LookAhead()) + { + case Token.Comma: + ConsumeToken(); + break; + + case Token.Squared_Close: + ConsumeToken(); + return array; + + default: + array.Add(ParseValue()); + break; + } + } + } + + private object ParseValue() + { + switch (LookAhead()) + { + case Token.Number: + return ParseNumber(); + + case Token.String: + return ParseString(); + + case Token.Curly_Open: + return ParseObject(); + + case Token.Squared_Open: + return ParseArray(); + + case Token.True: + ConsumeToken(); + return true; + + case Token.False: + ConsumeToken(); + return false; + + case Token.Null: + ConsumeToken(); + return null; + } + + throw new Exception("Unrecognized token at index" + index); + } + + private string ParseString() + { + ConsumeToken(); // " + + s.Length = 0; + + int runIndex = -1; + + while (index < json.Length) + { + var c = json[index++]; + + if (c == '"') + { + if (runIndex != -1) + { + if (s.Length == 0) + return json.Substring(runIndex, index - runIndex - 1); + + s.Append(json, runIndex, index - runIndex - 1); + } + return s.ToString(); + } + + if (c != '\\') + { + if (runIndex == -1) + runIndex = index - 1; + + continue; + } + + if (index == json.Length) break; + + if (runIndex != -1) + { + s.Append(json, runIndex, index - runIndex - 1); + runIndex = -1; + } + + switch (json[index++]) + { + case '"': + s.Append('"'); + break; + + case '\\': + s.Append('\\'); + break; + + case '/': + s.Append('/'); + break; + + case 'b': + s.Append('\b'); + break; + + case 'f': + s.Append('\f'); + break; + + case 'n': + s.Append('\n'); + break; + + case 'r': + s.Append('\r'); + break; + + case 't': + s.Append('\t'); + break; + + case 'u': + { + int remainingLength = json.Length - index; + if (remainingLength < 4) break; + + // parse the 32 bit hex into an integer codepoint + uint codePoint = ParseUnicode(json[index], json[index + 1], json[index + 2], json[index + 3]); + s.Append((char)codePoint); + + // skip 4 chars + index += 4; + } + break; + } + } + + throw new Exception("Unexpectedly reached end of string"); + } + + private uint ParseSingleChar(char c1, uint multipliyer) + { + uint p1 = 0; + if (c1 >= '0' && c1 <= '9') + p1 = (uint)(c1 - '0') * multipliyer; + else if (c1 >= 'A' && c1 <= 'F') + p1 = (uint)((c1 - 'A') + 10) * multipliyer; + else if (c1 >= 'a' && c1 <= 'f') + p1 = (uint)((c1 - 'a') + 10) * multipliyer; + return p1; + } + + private uint ParseUnicode(char c1, char c2, char c3, char c4) + { + uint p1 = ParseSingleChar(c1, 0x1000); + uint p2 = ParseSingleChar(c2, 0x100); + uint p3 = ParseSingleChar(c3, 0x10); + uint p4 = ParseSingleChar(c4, 1); + + return p1 + p2 + p3 + p4; + } + + private long CreateLong(string s) + { + long num = 0; + bool neg = false; + foreach (char cc in s) + { + if (cc == '-') + neg = true; + else if (cc == '+') + neg = false; + else + { + num *= 10; + num += (int)(cc - '0'); + } + } + + return neg ? -num : num; + } + + private object ParseNumber() + { + ConsumeToken(); + + // Need to start back one place because the first digit is also a token and would have been consumed + var startIndex = index - 1; + bool dec = false; + do + { + if (index == json.Length) + break; + var c = json[index]; + + if ((c >= '0' && c <= '9') || c == '.' || c == '-' || c == '+' || c == 'e' || c == 'E') + { + if (c == '.' || c == 'e' || c == 'E') + dec = true; + if (++index == json.Length) + break;//throw new Exception("Unexpected end of string whilst parsing number"); + continue; + } + break; + } while (true); + + if (dec) + { + string s = json.Substring(startIndex, index - startIndex); + return double.Parse(s, NumberFormatInfo.InvariantInfo); + } + long num; + return JSON.CreateLong(out num, json, startIndex, index - startIndex); + } + + private Token LookAhead() + { + if (lookAheadToken != Token.None) return lookAheadToken; + + return lookAheadToken = NextTokenCore(); + } + + private void ConsumeToken() + { + lookAheadToken = Token.None; + } + + private Token NextToken() + { + var result = lookAheadToken != Token.None ? lookAheadToken : NextTokenCore(); + + lookAheadToken = Token.None; + + return result; + } + + private Token NextTokenCore() + { + char c; + + // Skip past whitespace + do + { + c = json[index]; + + if (c > ' ') break; + if (c != ' ' && c != '\t' && c != '\n' && c != '\r') break; + + } while (++index < json.Length); + + if (index == json.Length) + { + throw new Exception("Reached end of string unexpectedly"); + } + + c = json[index]; + + index++; + + switch (c) + { + case '{': + return Token.Curly_Open; + + case '}': + return Token.Curly_Close; + + case '[': + return Token.Squared_Open; + + case ']': + return Token.Squared_Close; + + case ',': + return Token.Comma; + + case '"': + return Token.String; + + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + case '+': + case '.': + return Token.Number; + + case ':': + return Token.Colon; + + case 'f': + if (json.Length - index >= 4 && + json[index + 0] == 'a' && + json[index + 1] == 'l' && + json[index + 2] == 's' && + json[index + 3] == 'e') + { + index += 4; + return Token.False; + } + break; + + case 't': + if (json.Length - index >= 3 && + json[index + 0] == 'r' && + json[index + 1] == 'u' && + json[index + 2] == 'e') + { + index += 3; + return Token.True; + } + break; + + case 'n': + if (json.Length - index >= 3 && + json[index + 0] == 'u' && + json[index + 1] == 'l' && + json[index + 2] == 'l') + { + index += 3; + return Token.Null; + } + break; + } + throw new Exception("Could not find token at index " + --index); + } + } +} diff --git a/RaptorDB.Common/fastJSON/JsonSerializer.cs b/RaptorDB.Common/fastJSON/JsonSerializer.cs index ec39967..8e835ff 100644 --- a/RaptorDB.Common/fastJSON/JsonSerializer.cs +++ b/RaptorDB.Common/fastJSON/JsonSerializer.cs @@ -1,617 +1,616 @@ -using System; -using System.Collections; -using System.Collections.Generic; -#if !SILVERLIGHT -using System.Data; -#endif -using System.Globalization; -using System.IO; -using System.Text; -using System.Collections.Specialized; - -namespace fastJSON -{ - internal sealed class JSONSerializer - { - private StringBuilder _output = new StringBuilder(); - //private StringBuilder _before = new StringBuilder(); - private int _before; - private int _MAX_DEPTH = 20; - int _current_depth = 0; - private Dictionary _globalTypes = new Dictionary(); - private Dictionary _cirobj = new Dictionary(); - private JSONParameters _params; - private bool _useEscapedUnicode = false; - - internal JSONSerializer(JSONParameters param) - { - _params = param; - _useEscapedUnicode = _params.UseEscapedUnicode; - _MAX_DEPTH = _params.SerializerMaxDepth; - } - - internal string ConvertToJSON(object obj) - { - WriteValue(obj); - - string str = ""; - if (_params.UsingGlobalTypes && _globalTypes != null && _globalTypes.Count > 0) - { - var sb = new StringBuilder(); - sb.Append("\"$types\":{"); - var pendingSeparator = false; - foreach (var kv in _globalTypes) - { - if (pendingSeparator) sb.Append(','); - pendingSeparator = true; - sb.Append('\"'); - sb.Append(kv.Key); - sb.Append("\":\""); - sb.Append(kv.Value); - sb.Append('\"'); - } - sb.Append("},"); - _output.Insert(_before, sb.ToString()); - } - return _output.ToString(); - } - - private void WriteValue(object obj) - { - if (obj == null || obj is DBNull) - _output.Append("null"); - - else if (obj is string || obj is char) - WriteString(obj.ToString()); - - else if (obj is Guid) - WriteGuid((Guid)obj); - - else if (obj is bool) - _output.Append(((bool)obj) ? "true" : "false"); // conform to standard - - else if ( - obj is int || obj is long || obj is double || - obj is decimal || obj is float || - obj is byte || obj is short || - obj is sbyte || obj is ushort || - obj is uint || obj is ulong - ) - _output.Append(((IConvertible)obj).ToString(NumberFormatInfo.InvariantInfo)); - - else if (obj is DateTime) - WriteDateTime((DateTime)obj); - - else if (_params.KVStyleStringDictionary == false && obj is IDictionary && - obj.GetType().IsGenericType && obj.GetType().GetGenericArguments()[0] == typeof(string)) - - WriteStringDictionary((IDictionary)obj); -#if net4 - else if (_params.KVStyleStringDictionary == false && obj is System.Dynamic.ExpandoObject) - WriteStringDictionary((IDictionary)obj); -#endif - else if (obj is IDictionary) - WriteDictionary((IDictionary)obj); -#if !SILVERLIGHT - else if (obj is DataSet) - WriteDataset((DataSet)obj); - - else if (obj is DataTable) - this.WriteDataTable((DataTable)obj); -#endif - else if (obj is byte[]) - WriteBytes((byte[])obj); - - else if (obj is StringDictionary) - WriteSD((StringDictionary)obj); - - else if (obj is NameValueCollection) - WriteNV((NameValueCollection)obj); - - else if (obj is IEnumerable) - WriteArray((IEnumerable)obj); - - else if (obj is Enum) - WriteEnum((Enum)obj); - - else if (Reflection.Instance.IsTypeRegistered(obj.GetType())) - WriteCustom(obj); - - else - WriteObject(obj); - } - - private void WriteNV(NameValueCollection nameValueCollection) - { - _output.Append('{'); - - bool pendingSeparator = false; - - foreach (string key in nameValueCollection) - { - if (_params.SerializeNullValues == false && (nameValueCollection[key] == null)) - { - } - else - { - if (pendingSeparator) _output.Append(','); - if (_params.SerializeToLowerCaseNames) - WritePair(key.ToLower(), nameValueCollection[key]); - else - WritePair(key, nameValueCollection[key]); - pendingSeparator = true; - } - } - _output.Append('}'); - } - - private void WriteSD(StringDictionary stringDictionary) - { - _output.Append('{'); - - bool pendingSeparator = false; - - foreach (DictionaryEntry entry in stringDictionary) - { - if (_params.SerializeNullValues == false && (entry.Value == null)) - { - } - else - { - if (pendingSeparator) _output.Append(','); - - string k = (string)entry.Key; - if (_params.SerializeToLowerCaseNames) - WritePair(k.ToLower(), entry.Value); - else - WritePair(k, entry.Value); - pendingSeparator = true; - } - } - _output.Append('}'); - } - - private void WriteCustom(object obj) - { - Serialize s; - Reflection.Instance._customSerializer.TryGetValue(obj.GetType(), out s); - WriteStringFast(s(obj)); - } - - private void WriteEnum(Enum e) - { - // TODO : optimize enum write - if (_params.UseValuesOfEnums) - WriteValue(Convert.ToInt32(e)); - else - WriteStringFast(e.ToString()); - } - - private void WriteGuid(Guid g) - { - if (_params.UseFastGuid == false) - WriteStringFast(g.ToString()); - else - WriteBytes(g.ToByteArray()); - } - - private void WriteBytes(byte[] bytes) - { -#if !SILVERLIGHT - WriteStringFast(Convert.ToBase64String(bytes, 0, bytes.Length, Base64FormattingOptions.None)); -#else - WriteStringFast(Convert.ToBase64String(bytes, 0, bytes.Length)); -#endif - } - - private void WriteDateTime(DateTime dateTime) - { - // datetime format standard : yyyy-MM-dd HH:mm:ss - DateTime dt = dateTime; - if (_params.UseUTCDateTime) - dt = dateTime.ToUniversalTime(); - - _output.Append('\"'); - _output.Append(dt.Year.ToString("0000", NumberFormatInfo.InvariantInfo)); - _output.Append('-'); - _output.Append(dt.Month.ToString("00", NumberFormatInfo.InvariantInfo)); - _output.Append('-'); - _output.Append(dt.Day.ToString("00", NumberFormatInfo.InvariantInfo)); - _output.Append('T'); // strict ISO date compliance - _output.Append(dt.Hour.ToString("00", NumberFormatInfo.InvariantInfo)); - _output.Append(':'); - _output.Append(dt.Minute.ToString("00", NumberFormatInfo.InvariantInfo)); - _output.Append(':'); - _output.Append(dt.Second.ToString("00", NumberFormatInfo.InvariantInfo)); - if (_params.DateTimeMilliseconds) - { - _output.Append('.'); - _output.Append(dt.Millisecond.ToString("000", NumberFormatInfo.InvariantInfo)); - } - if (_params.UseUTCDateTime) - _output.Append('Z'); - - _output.Append('\"'); - } - -#if !SILVERLIGHT - private DatasetSchema GetSchema(DataTable ds) - { - if (ds == null) return null; - - DatasetSchema m = new DatasetSchema(); - m.Info = new List(); - m.Name = ds.TableName; - - foreach (DataColumn c in ds.Columns) - { - m.Info.Add(ds.TableName); - m.Info.Add(c.ColumnName); - m.Info.Add(c.DataType.ToString()); - } - // FEATURE : serialize relations and constraints here - - return m; - } - - private DatasetSchema GetSchema(DataSet ds) - { - if (ds == null) return null; - - DatasetSchema m = new DatasetSchema(); - m.Info = new List(); - m.Name = ds.DataSetName; - - foreach (DataTable t in ds.Tables) - { - foreach (DataColumn c in t.Columns) - { - m.Info.Add(t.TableName); - m.Info.Add(c.ColumnName); - m.Info.Add(c.DataType.ToString()); - } - } - // FEATURE : serialize relations and constraints here - - return m; - } - - private string GetXmlSchema(DataTable dt) - { - using (var writer = new StringWriter()) - { - dt.WriteXmlSchema(writer); - return dt.ToString(); - } - } - - private void WriteDataset(DataSet ds) - { - _output.Append('{'); - if (_params.UseExtensions) - { - WritePair("$schema", _params.UseOptimizedDatasetSchema ? (object)GetSchema(ds) : ds.GetXmlSchema()); - _output.Append(','); - } - bool tablesep = false; - foreach (DataTable table in ds.Tables) - { - if (tablesep) _output.Append(','); - tablesep = true; - WriteDataTableData(table); - } - // end dataset - _output.Append('}'); - } - - private void WriteDataTableData(DataTable table) - { - _output.Append('\"'); - _output.Append(table.TableName); - _output.Append("\":["); - DataColumnCollection cols = table.Columns; - bool rowseparator = false; - foreach (DataRow row in table.Rows) - { - if (rowseparator) _output.Append(','); - rowseparator = true; - _output.Append('['); - - bool pendingSeperator = false; - foreach (DataColumn column in cols) - { - if (pendingSeperator) _output.Append(','); - WriteValue(row[column]); - pendingSeperator = true; - } - _output.Append(']'); - } - - _output.Append(']'); - } - - void WriteDataTable(DataTable dt) - { - this._output.Append('{'); - if (_params.UseExtensions) - { - this.WritePair("$schema", _params.UseOptimizedDatasetSchema ? (object)this.GetSchema(dt) : this.GetXmlSchema(dt)); - this._output.Append(','); - } - - WriteDataTableData(dt); - - // end datatable - this._output.Append('}'); - } -#endif - - bool _TypesWritten = false; - private void WriteObject(object obj) - { - int i = 0; - if (_cirobj.TryGetValue(obj, out i) == false) - _cirobj.Add(obj, _cirobj.Count + 1); - else - { - if (_current_depth > 0 && _params.InlineCircularReferences == false) - { - //_circular = true; - _output.Append("{\"$i\":"); - _output.Append(i.ToString()); - _output.Append("}"); - return; - } - } - if (_params.UsingGlobalTypes == false) - _output.Append('{'); - else - { - if (_TypesWritten == false) - { - _output.Append('{'); - _before = _output.Length; - //_output = new StringBuilder(); - } - else - _output.Append('{'); - } - _TypesWritten = true; - _current_depth++; - if (_current_depth > _MAX_DEPTH) - throw new Exception("Serializer encountered maximum depth of " + _MAX_DEPTH); - - - Dictionary map = new Dictionary(); - Type t = obj.GetType(); - bool append = false; - if (_params.UseExtensions) - { - if (_params.UsingGlobalTypes == false) - WritePairFast("$type", Reflection.Instance.GetTypeAssemblyName(t)); - else - { - int dt = 0; - string ct = Reflection.Instance.GetTypeAssemblyName(t); - if (_globalTypes.TryGetValue(ct, out dt) == false) - { - dt = _globalTypes.Count + 1; - _globalTypes.Add(ct, dt); - } - WritePairFast("$type", dt.ToString()); - } - append = true; - } - - Getters[] g = Reflection.Instance.GetGetters(t, _params.ShowReadOnlyProperties, _params.IgnoreAttributes); - int c = g.Length; - for (int ii = 0; ii < c; ii++) - { - var p = g[ii]; - object o = p.Getter(obj); - if (_params.SerializeNullValues == false && (o == null || o is DBNull)) - { - //append = false; - } - else - { - if (append) - _output.Append(','); - if (_params.SerializeToLowerCaseNames) - WritePair(p.lcName, o); - else - WritePair(p.Name, o); - if (o != null && _params.UseExtensions) - { - Type tt = o.GetType(); - if (tt == typeof(System.Object)) - map.Add(p.Name, tt.ToString()); - } - append = true; - } - } - if (map.Count > 0 && _params.UseExtensions) - { - _output.Append(",\"$map\":"); - WriteStringDictionary(map); - } - _output.Append('}'); - _current_depth--; - } - - private void WritePairFast(string name, string value) - { - WriteStringFast(name); - - _output.Append(':'); - - WriteStringFast(value); - } - - private void WritePair(string name, object value) - { - WriteStringFast(name); - - _output.Append(':'); - - WriteValue(value); - } - - private void WriteArray(IEnumerable array) - { - _output.Append('['); - - bool pendingSeperator = false; - - foreach (object obj in array) - { - if (pendingSeperator) _output.Append(','); - - WriteValue(obj); - - pendingSeperator = true; - } - _output.Append(']'); - } - - private void WriteStringDictionary(IDictionary dic) - { - _output.Append('{'); - - bool pendingSeparator = false; - - foreach (DictionaryEntry entry in dic) - { - if (_params.SerializeNullValues == false && (entry.Value == null)) - { - } - else - { - if (pendingSeparator) _output.Append(','); - - string k = (string)entry.Key; - if (_params.SerializeToLowerCaseNames) - WritePair(k.ToLower(), entry.Value); - else - WritePair(k, entry.Value); - pendingSeparator = true; - } - } - _output.Append('}'); - } - - private void WriteStringDictionary(IDictionary dic) - { - _output.Append('{'); - bool pendingSeparator = false; - foreach (KeyValuePair entry in dic) - { - if (_params.SerializeNullValues == false && (entry.Value == null)) - { - } - else - { - if (pendingSeparator) _output.Append(','); - string k = entry.Key; - - if (_params.SerializeToLowerCaseNames) - WritePair(k.ToLower(), entry.Value); - else - WritePair(k, entry.Value); - pendingSeparator = true; - } - } - _output.Append('}'); - } - - private void WriteDictionary(IDictionary dic) - { - _output.Append('['); - - bool pendingSeparator = false; - - foreach (DictionaryEntry entry in dic) - { - if (pendingSeparator) _output.Append(','); - _output.Append('{'); - WritePair("k", entry.Key); - _output.Append(","); - WritePair("v", entry.Value); - _output.Append('}'); - - pendingSeparator = true; - } - _output.Append(']'); - } - - private void WriteStringFast(string s) - { - _output.Append('\"'); - _output.Append(s); - _output.Append('\"'); - } - - private void WriteString(string s) - { - _output.Append('\"'); - - int runIndex = -1; - int l = s.Length; - for (var index = 0; index < l; ++index) - { - var c = s[index]; - - if (_useEscapedUnicode) - { - if (c >= ' ' && c < 128 && c != '\"' && c != '\\') - { - if (runIndex == -1) - runIndex = index; - - continue; - } - } - else - { - if (c != '\t' && c != '\n' && c != '\r' && c != '\"' && c != '\\')// && c != ':' && c!=',') - { - if (runIndex == -1) - runIndex = index; - - continue; - } - } - - if (runIndex != -1) - { - _output.Append(s, runIndex, index - runIndex); - runIndex = -1; - } - - switch (c) - { - case '\t': _output.Append("\\t"); break; - case '\r': _output.Append("\\r"); break; - case '\n': _output.Append("\\n"); break; - case '"': - case '\\': _output.Append('\\'); _output.Append(c); break; - default: - if (_useEscapedUnicode) - { - _output.Append("\\u"); - _output.Append(((int)c).ToString("X4", NumberFormatInfo.InvariantInfo)); - } - else - _output.Append(c); - - break; - } - } - - if (runIndex != -1) - _output.Append(s, runIndex, s.Length - runIndex); - - _output.Append('\"'); - } - } -} +using System; +using System.Collections; +using System.Collections.Generic; +#if !SILVERLIGHT +using System.Data; +#endif +using System.Globalization; +using System.IO; +using System.Text; +using System.Collections.Specialized; + +namespace fastJSON +{ + internal sealed class JSONSerializer + { + private StringBuilder _output = new StringBuilder(); + //private StringBuilder _before = new StringBuilder(); + private int _before; + private int _MAX_DEPTH = 20; + int _current_depth = 0; + private Dictionary _globalTypes = new Dictionary(); + private Dictionary _cirobj = new Dictionary(); + private JSONParameters _params; + private bool _useEscapedUnicode = false; + + internal JSONSerializer(JSONParameters param) + { + _params = param; + _useEscapedUnicode = _params.UseEscapedUnicode; + _MAX_DEPTH = _params.SerializerMaxDepth; + } + + internal string ConvertToJSON(object obj) + { + WriteValue(obj); + + if (_params.UsingGlobalTypes && _globalTypes != null && _globalTypes.Count > 0) + { + var sb = new StringBuilder(); + sb.Append("\"$types\":{"); + var pendingSeparator = false; + foreach (var kv in _globalTypes) + { + if (pendingSeparator) sb.Append(','); + pendingSeparator = true; + sb.Append('\"'); + sb.Append(kv.Key); + sb.Append("\":\""); + sb.Append(kv.Value); + sb.Append('\"'); + } + sb.Append("},"); + _output.Insert(_before, sb.ToString()); + } + return _output.ToString(); + } + + private void WriteValue(object obj) + { + if (obj == null || obj is DBNull) + _output.Append("null"); + + else if (obj is string || obj is char) + WriteString(obj.ToString()); + + else if (obj is Guid) + WriteGuid((Guid)obj); + + else if (obj is bool) + _output.Append(((bool)obj) ? "true" : "false"); // conform to standard + + else if ( + obj is int || obj is long || obj is double || + obj is decimal || obj is float || + obj is byte || obj is short || + obj is sbyte || obj is ushort || + obj is uint || obj is ulong + ) + _output.Append(((IConvertible)obj).ToString(NumberFormatInfo.InvariantInfo)); + + else if (obj is DateTime) + WriteDateTime((DateTime)obj); + + else if (_params.KVStyleStringDictionary == false && obj is IDictionary && + obj.GetType().IsGenericType && obj.GetType().GetGenericArguments()[0] == typeof(string)) + + WriteStringDictionary((IDictionary)obj); +#if net4 + else if (_params.KVStyleStringDictionary == false && obj is System.Dynamic.ExpandoObject) + WriteStringDictionary((IDictionary)obj); +#endif + else if (obj is IDictionary) + WriteDictionary((IDictionary)obj); +#if !SILVERLIGHT + else if (obj is DataSet) + WriteDataset((DataSet)obj); + + else if (obj is DataTable) + this.WriteDataTable((DataTable)obj); +#endif + else if (obj is byte[]) + WriteBytes((byte[])obj); + + else if (obj is StringDictionary) + WriteSD((StringDictionary)obj); + + else if (obj is NameValueCollection) + WriteNV((NameValueCollection)obj); + + else if (obj is IEnumerable) + WriteArray((IEnumerable)obj); + + else if (obj is Enum) + WriteEnum((Enum)obj); + + else if (Reflection.Instance.IsTypeRegistered(obj.GetType())) + WriteCustom(obj); + + else + WriteObject(obj); + } + + private void WriteNV(NameValueCollection nameValueCollection) + { + _output.Append('{'); + + bool pendingSeparator = false; + + foreach (string key in nameValueCollection) + { + if (_params.SerializeNullValues == false && (nameValueCollection[key] == null)) + { + } + else + { + if (pendingSeparator) _output.Append(','); + if (_params.SerializeToLowerCaseNames) + WritePair(key.ToLower(), nameValueCollection[key]); + else + WritePair(key, nameValueCollection[key]); + pendingSeparator = true; + } + } + _output.Append('}'); + } + + private void WriteSD(StringDictionary stringDictionary) + { + _output.Append('{'); + + bool pendingSeparator = false; + + foreach (DictionaryEntry entry in stringDictionary) + { + if (_params.SerializeNullValues == false && (entry.Value == null)) + { + } + else + { + if (pendingSeparator) _output.Append(','); + + string k = (string)entry.Key; + if (_params.SerializeToLowerCaseNames) + WritePair(k.ToLower(), entry.Value); + else + WritePair(k, entry.Value); + pendingSeparator = true; + } + } + _output.Append('}'); + } + + private void WriteCustom(object obj) + { + Serialize s; + Reflection.Instance._customSerializer.TryGetValue(obj.GetType(), out s); + WriteStringFast(s(obj)); + } + + private void WriteEnum(Enum e) + { + // TODO : optimize enum write + if (_params.UseValuesOfEnums) + WriteValue(Convert.ToInt32(e)); + else + WriteStringFast(e.ToString()); + } + + private void WriteGuid(Guid g) + { + if (_params.UseFastGuid == false) + WriteStringFast(g.ToString()); + else + WriteBytes(g.ToByteArray()); + } + + private void WriteBytes(byte[] bytes) + { +#if !SILVERLIGHT + WriteStringFast(Convert.ToBase64String(bytes, 0, bytes.Length, Base64FormattingOptions.None)); +#else + WriteStringFast(Convert.ToBase64String(bytes, 0, bytes.Length)); +#endif + } + + private void WriteDateTime(DateTime dateTime) + { + // datetime format standard : yyyy-MM-dd HH:mm:ss + DateTime dt = dateTime; + if (_params.UseUTCDateTime) + dt = dateTime.ToUniversalTime(); + + _output.Append('\"'); + _output.Append(dt.Year.ToString("0000", NumberFormatInfo.InvariantInfo)); + _output.Append('-'); + _output.Append(dt.Month.ToString("00", NumberFormatInfo.InvariantInfo)); + _output.Append('-'); + _output.Append(dt.Day.ToString("00", NumberFormatInfo.InvariantInfo)); + _output.Append('T'); // strict ISO date compliance + _output.Append(dt.Hour.ToString("00", NumberFormatInfo.InvariantInfo)); + _output.Append(':'); + _output.Append(dt.Minute.ToString("00", NumberFormatInfo.InvariantInfo)); + _output.Append(':'); + _output.Append(dt.Second.ToString("00", NumberFormatInfo.InvariantInfo)); + if (_params.DateTimeMilliseconds) + { + _output.Append('.'); + _output.Append(dt.Millisecond.ToString("000", NumberFormatInfo.InvariantInfo)); + } + if (_params.UseUTCDateTime) + _output.Append('Z'); + + _output.Append('\"'); + } + +#if !SILVERLIGHT + private DatasetSchema GetSchema(DataTable ds) + { + if (ds == null) return null; + + DatasetSchema m = new DatasetSchema(); + m.Info = new List(); + m.Name = ds.TableName; + + foreach (DataColumn c in ds.Columns) + { + m.Info.Add(ds.TableName); + m.Info.Add(c.ColumnName); + m.Info.Add(c.DataType.ToString()); + } + // FEATURE : serialize relations and constraints here + + return m; + } + + private DatasetSchema GetSchema(DataSet ds) + { + if (ds == null) return null; + + DatasetSchema m = new DatasetSchema(); + m.Info = new List(); + m.Name = ds.DataSetName; + + foreach (DataTable t in ds.Tables) + { + foreach (DataColumn c in t.Columns) + { + m.Info.Add(t.TableName); + m.Info.Add(c.ColumnName); + m.Info.Add(c.DataType.ToString()); + } + } + // FEATURE : serialize relations and constraints here + + return m; + } + + private string GetXmlSchema(DataTable dt) + { + using (var writer = new StringWriter()) + { + dt.WriteXmlSchema(writer); + return dt.ToString(); + } + } + + private void WriteDataset(DataSet ds) + { + _output.Append('{'); + if (_params.UseExtensions) + { + WritePair("$schema", _params.UseOptimizedDatasetSchema ? (object)GetSchema(ds) : ds.GetXmlSchema()); + _output.Append(','); + } + bool tablesep = false; + foreach (DataTable table in ds.Tables) + { + if (tablesep) _output.Append(','); + tablesep = true; + WriteDataTableData(table); + } + // end dataset + _output.Append('}'); + } + + private void WriteDataTableData(DataTable table) + { + _output.Append('\"'); + _output.Append(table.TableName); + _output.Append("\":["); + DataColumnCollection cols = table.Columns; + bool rowseparator = false; + foreach (DataRow row in table.Rows) + { + if (rowseparator) _output.Append(','); + rowseparator = true; + _output.Append('['); + + bool pendingSeperator = false; + foreach (DataColumn column in cols) + { + if (pendingSeperator) _output.Append(','); + WriteValue(row[column]); + pendingSeperator = true; + } + _output.Append(']'); + } + + _output.Append(']'); + } + + void WriteDataTable(DataTable dt) + { + this._output.Append('{'); + if (_params.UseExtensions) + { + this.WritePair("$schema", _params.UseOptimizedDatasetSchema ? (object)this.GetSchema(dt) : this.GetXmlSchema(dt)); + this._output.Append(','); + } + + WriteDataTableData(dt); + + // end datatable + this._output.Append('}'); + } +#endif + + bool _TypesWritten = false; + private void WriteObject(object obj) + { + int i = 0; + if (_cirobj.TryGetValue(obj, out i) == false) + _cirobj.Add(obj, _cirobj.Count + 1); + else + { + if (_current_depth > 0 && _params.InlineCircularReferences == false) + { + //_circular = true; + _output.Append("{\"$i\":"); + _output.Append(i.ToString()); + _output.Append("}"); + return; + } + } + if (_params.UsingGlobalTypes == false) + _output.Append('{'); + else + { + if (_TypesWritten == false) + { + _output.Append('{'); + _before = _output.Length; + //_output = new StringBuilder(); + } + else + _output.Append('{'); + } + _TypesWritten = true; + _current_depth++; + if (_current_depth > _MAX_DEPTH) + throw new Exception("Serializer encountered maximum depth of " + _MAX_DEPTH); + + + Dictionary map = new Dictionary(); + Type t = obj.GetType(); + bool append = false; + if (_params.UseExtensions) + { + if (_params.UsingGlobalTypes == false) + WritePairFast("$type", Reflection.Instance.GetTypeAssemblyName(t)); + else + { + int dt = 0; + string ct = Reflection.Instance.GetTypeAssemblyName(t); + if (_globalTypes.TryGetValue(ct, out dt) == false) + { + dt = _globalTypes.Count + 1; + _globalTypes.Add(ct, dt); + } + WritePairFast("$type", dt.ToString()); + } + append = true; + } + + Getters[] g = Reflection.Instance.GetGetters(t, _params.ShowReadOnlyProperties, _params.IgnoreAttributes); + int c = g.Length; + for (int ii = 0; ii < c; ii++) + { + var p = g[ii]; + object o = p.Getter(obj); + if (_params.SerializeNullValues == false && (o == null || o is DBNull)) + { + //append = false; + } + else + { + if (append) + _output.Append(','); + if (_params.SerializeToLowerCaseNames) + WritePair(p.lcName, o); + else + WritePair(p.Name, o); + if (o != null && _params.UseExtensions) + { + Type tt = o.GetType(); + if (tt == typeof(System.Object)) + map.Add(p.Name, tt.ToString()); + } + append = true; + } + } + if (map.Count > 0 && _params.UseExtensions) + { + _output.Append(",\"$map\":"); + WriteStringDictionary(map); + } + _output.Append('}'); + _current_depth--; + } + + private void WritePairFast(string name, string value) + { + WriteStringFast(name); + + _output.Append(':'); + + WriteStringFast(value); + } + + private void WritePair(string name, object value) + { + WriteStringFast(name); + + _output.Append(':'); + + WriteValue(value); + } + + private void WriteArray(IEnumerable array) + { + _output.Append('['); + + bool pendingSeperator = false; + + foreach (object obj in array) + { + if (pendingSeperator) _output.Append(','); + + WriteValue(obj); + + pendingSeperator = true; + } + _output.Append(']'); + } + + private void WriteStringDictionary(IDictionary dic) + { + _output.Append('{'); + + bool pendingSeparator = false; + + foreach (DictionaryEntry entry in dic) + { + if (_params.SerializeNullValues == false && (entry.Value == null)) + { + } + else + { + if (pendingSeparator) _output.Append(','); + + string k = (string)entry.Key; + if (_params.SerializeToLowerCaseNames) + WritePair(k.ToLower(), entry.Value); + else + WritePair(k, entry.Value); + pendingSeparator = true; + } + } + _output.Append('}'); + } + + private void WriteStringDictionary(IDictionary dic) + { + _output.Append('{'); + bool pendingSeparator = false; + foreach (KeyValuePair entry in dic) + { + if (_params.SerializeNullValues == false && (entry.Value == null)) + { + } + else + { + if (pendingSeparator) _output.Append(','); + string k = entry.Key; + + if (_params.SerializeToLowerCaseNames) + WritePair(k.ToLower(), entry.Value); + else + WritePair(k, entry.Value); + pendingSeparator = true; + } + } + _output.Append('}'); + } + + private void WriteDictionary(IDictionary dic) + { + _output.Append('['); + + bool pendingSeparator = false; + + foreach (DictionaryEntry entry in dic) + { + if (pendingSeparator) _output.Append(','); + _output.Append('{'); + WritePair("k", entry.Key); + _output.Append(","); + WritePair("v", entry.Value); + _output.Append('}'); + + pendingSeparator = true; + } + _output.Append(']'); + } + + private void WriteStringFast(string s) + { + _output.Append('\"'); + _output.Append(s); + _output.Append('\"'); + } + + private void WriteString(string s) + { + _output.Append('\"'); + + int runIndex = -1; + int l = s.Length; + for (var index = 0; index < l; ++index) + { + var c = s[index]; + + if (_useEscapedUnicode) + { + if (c >= ' ' && c < 128 && c != '\"' && c != '\\') + { + if (runIndex == -1) + runIndex = index; + + continue; + } + } + else + { + if (c != '\t' && c != '\n' && c != '\r' && c != '\"' && c != '\\')// && c != ':' && c!=',') + { + if (runIndex == -1) + runIndex = index; + + continue; + } + } + + if (runIndex != -1) + { + _output.Append(s, runIndex, index - runIndex); + runIndex = -1; + } + + switch (c) + { + case '\t': _output.Append("\\t"); break; + case '\r': _output.Append("\\r"); break; + case '\n': _output.Append("\\n"); break; + case '"': + case '\\': _output.Append('\\'); _output.Append(c); break; + default: + if (_useEscapedUnicode) + { + _output.Append("\\u"); + _output.Append(((int)c).ToString("X4", NumberFormatInfo.InvariantInfo)); + } + else + _output.Append(c); + + break; + } + } + + if (runIndex != -1) + _output.Append(s, runIndex, s.Length - runIndex); + + _output.Append('\"'); + } + } +} diff --git a/RaptorDB.Common/fastJSON/Reflection.cs b/RaptorDB.Common/fastJSON/Reflection.cs index 239b715..5feb437 100644 --- a/RaptorDB.Common/fastJSON/Reflection.cs +++ b/RaptorDB.Common/fastJSON/Reflection.cs @@ -1,587 +1,587 @@ -using System; -using System.Collections.Generic; -using System.Text; -using System.Reflection.Emit; -using System.Reflection; -using System.Collections; -using System.Linq; -using RaptorDB.Common; -using System.Collections.Specialized; -using System.Data; - -namespace fastJSON -{ - public struct Getters - { - public string Name; - public string lcName; - public Reflection.GenericGetter Getter; - } - - public enum myPropInfoType - { - Int, - Long, - String, - Bool, - DateTime, - Enum, - Guid, - - Array, - ByteArray, - Dictionary, - StringKeyDictionary, - NameValue, - StringDictionary, -#if !SILVERLIGHT - Hashtable, - DataSet, - DataTable, -#endif - Custom, - Unknown, - } - - public struct myPropInfo - { - public Type pt; - public Type bt; - public Type changeType; - public Reflection.GenericSetter setter; - public Reflection.GenericGetter getter; - public Type[] GenericTypes; - public string Name; - public myPropInfoType Type; - public bool CanWrite; - - public bool IsClass; - public bool IsValueType; - public bool IsGenericType; - public bool IsStruct; - } - - public sealed class Reflection - { - // Sinlgeton pattern 4 from : http://csharpindepth.com/articles/general/singleton.aspx - private static readonly Reflection instance = new Reflection(); - // Explicit static constructor to tell C# compiler - // not to mark type as beforefieldinit - static Reflection() - { - } - private Reflection() - { - } - public static Reflection Instance { get { return instance; } } - - public delegate object GenericSetter(object target, object value); - public delegate object GenericGetter(object obj); - private delegate object CreateObject(); - - private SafeDictionary _tyname = new SafeDictionary(); - private SafeDictionary _typecache = new SafeDictionary(); - private SafeDictionary _constrcache = new SafeDictionary(); - private SafeDictionary _getterscache = new SafeDictionary(); - private SafeDictionary> _propertycache = new SafeDictionary>(); - private SafeDictionary _genericTypes = new SafeDictionary(); - private SafeDictionary _genericTypeDef = new SafeDictionary(); - - #region bjson specific - internal UnicodeEncoding unicode = new UnicodeEncoding(); - internal UTF8Encoding utf8 = new UTF8Encoding(); - #endregion - - #region json custom types - // JSON custom - internal SafeDictionary _customSerializer = new SafeDictionary(); - internal SafeDictionary _customDeserializer = new SafeDictionary(); - internal object CreateCustom(string v, Type type) - { - Deserialize d; - _customDeserializer.TryGetValue(type, out d); - return d(v); - } - - internal void RegisterCustomType(Type type, Serialize serializer, Deserialize deserializer) - { - if (type != null && serializer != null && deserializer != null) - { - _customSerializer.Add(type, serializer); - _customDeserializer.Add(type, deserializer); - // reset property cache - Reflection.Instance.ResetPropertyCache(); - } - } - - internal bool IsTypeRegistered(Type t) - { - if (_customSerializer.Count == 0) - return false; - Serialize s; - return _customSerializer.TryGetValue(t, out s); - } - #endregion - - public Type GetGenericTypeDefinition(Type t) - { - Type tt = null; - if (_genericTypeDef.TryGetValue(t, out tt)) - return tt; - else - { - tt = t.GetGenericTypeDefinition(); - _genericTypeDef.Add(t, tt); - return tt; - } - } - - public Type[] GetGenericArguments(Type t) - { - Type[] tt = null; - if (_genericTypes.TryGetValue(t, out tt)) - return tt; - else - { - tt = t.GetGenericArguments(); - _genericTypes.Add(t, tt); - return tt; - } - } - - public Dictionary Getproperties(Type type, string typename, bool customType) - { - Dictionary sd = null; - if (_propertycache.TryGetValue(typename, out sd)) - { - return sd; - } - else - { - sd = new Dictionary(); - PropertyInfo[] pr = type.GetProperties(BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static); - foreach (PropertyInfo p in pr) - { - if (p.GetIndexParameters().Length > 0) - {// Property is an indexer - continue; - } - myPropInfo d = CreateMyProp(p.PropertyType, p.Name, customType); - - d.setter = Reflection.CreateSetMethod(type, p); - if (d.setter != null) - d.CanWrite = true; - d.getter = Reflection.CreateGetMethod(type, p); - sd.Add(p.Name.ToLower(), d); - } - FieldInfo[] fi = type.GetFields(BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static); - foreach (FieldInfo f in fi) - { - myPropInfo d = CreateMyProp(f.FieldType, f.Name, customType); - if (f.IsLiteral == false) - { - d.setter = Reflection.CreateSetField(type, f); - if (d.setter != null) - d.CanWrite = true; - d.getter = Reflection.CreateGetField(type, f); - sd.Add(f.Name.ToLower(), d); - } - } - - _propertycache.Add(typename, sd); - return sd; - } - } - - private myPropInfo CreateMyProp(Type t, string name, bool customType) - { - myPropInfo d = new myPropInfo(); - myPropInfoType d_type = myPropInfoType.Unknown; - - if (t == typeof(int) || t == typeof(int?)) d_type = myPropInfoType.Int; - else if (t == typeof(long) || t == typeof(long?)) d_type = myPropInfoType.Long; - else if (t == typeof(string)) d_type = myPropInfoType.String; - else if (t == typeof(bool) || t == typeof(bool?)) d_type = myPropInfoType.Bool; - else if (t == typeof(DateTime) || t == typeof(DateTime?)) d_type = myPropInfoType.DateTime; - else if (t.IsEnum) d_type = myPropInfoType.Enum; - else if (t == typeof(Guid) || t == typeof(Guid?)) d_type = myPropInfoType.Guid; - else if (t == typeof(StringDictionary)) d_type = myPropInfoType.StringDictionary; - else if (t == typeof(NameValueCollection)) d_type = myPropInfoType.NameValue; - else if (t.IsArray) - { - d.bt = t.GetElementType(); - if (t == typeof(byte[])) - d_type = myPropInfoType.ByteArray; - else - d_type = myPropInfoType.Array; - } - else if (t.Name.Contains("Dictionary")) - { - d.GenericTypes = Reflection.Instance.GetGenericArguments(t);// t.GetGenericArguments(); - if (d.GenericTypes.Length > 0 && d.GenericTypes[0] == typeof(string)) - d_type = myPropInfoType.StringKeyDictionary; - else - d_type = myPropInfoType.Dictionary; - } -#if !SILVERLIGHT - else if (t == typeof(Hashtable)) d_type = myPropInfoType.Hashtable; - else if (t == typeof(DataSet)) d_type = myPropInfoType.DataSet; - else if (t == typeof(DataTable)) d_type = myPropInfoType.DataTable; -#endif - else if (customType) - d_type = myPropInfoType.Custom; - - if (t.IsValueType && !t.IsPrimitive && !t.IsEnum && t != typeof(decimal)) - d.IsStruct = true; - - d.IsClass = t.IsClass; - d.IsValueType = t.IsValueType; - if (t.IsGenericType) - { - d.IsGenericType = true; - d.bt = t.GetGenericArguments()[0]; - } - - d.pt = t; - d.Name = name; - d.changeType = GetChangeType(t); - d.Type = d_type; - - return d; - } - - private Type GetChangeType(Type conversionType) - { - if (conversionType.IsGenericType && conversionType.GetGenericTypeDefinition().Equals(typeof(Nullable<>))) - return Reflection.Instance.GetGenericArguments(conversionType)[0];// conversionType.GetGenericArguments()[0]; - - return conversionType; - } - - #region [ PROPERTY GET SET ] - public string GetTypeAssemblyName(Type t) - { - string val = ""; - if (_tyname.TryGetValue(t, out val)) - return val; - else - { - string s = t.AssemblyQualifiedName; - _tyname.Add(t, s); - return s; - } - } - - internal Type GetTypeFromCache(string typename) - { - Type val = null; - if (_typecache.TryGetValue(typename, out val)) - return val; - else - { - Type t = Type.GetType(typename); - if (t == null) // RaptorDB : loading runtime assemblies - { - t = Type.GetType(typename, (name) => { - return AppDomain.CurrentDomain.GetAssemblies().Where(z => z.FullName == name.FullName).FirstOrDefault(); - }, null, true); - } - _typecache.Add(typename, t); - return t; - } - } - - internal object FastCreateInstance(Type objtype) - { - try - { - CreateObject c = null; - if (_constrcache.TryGetValue(objtype, out c)) - { - return c(); - } - else - { - if (objtype.IsClass) - { - DynamicMethod dynMethod = new DynamicMethod("_", objtype, null); - ILGenerator ilGen = dynMethod.GetILGenerator(); - ilGen.Emit(OpCodes.Newobj, objtype.GetConstructor(Type.EmptyTypes)); - ilGen.Emit(OpCodes.Ret); - c = (CreateObject)dynMethod.CreateDelegate(typeof(CreateObject)); - _constrcache.Add(objtype, c); - } - else // structs - { - DynamicMethod dynMethod = new DynamicMethod("_", typeof(object), null); - ILGenerator ilGen = dynMethod.GetILGenerator(); - var lv = ilGen.DeclareLocal(objtype); - ilGen.Emit(OpCodes.Ldloca_S, lv); - ilGen.Emit(OpCodes.Initobj, objtype); - ilGen.Emit(OpCodes.Ldloc_0); - ilGen.Emit(OpCodes.Box, objtype); - ilGen.Emit(OpCodes.Ret); - c = (CreateObject)dynMethod.CreateDelegate(typeof(CreateObject)); - _constrcache.Add(objtype, c); - } - return c(); - } - } - catch (Exception exc) - { - throw new Exception(string.Format("Failed to fast create instance for type '{0}' from assembly '{1}'", - objtype.FullName, objtype.AssemblyQualifiedName), exc); - } - } - - internal static GenericSetter CreateSetField(Type type, FieldInfo fieldInfo) - { - Type[] arguments = new Type[2]; - arguments[0] = arguments[1] = typeof(object); - - DynamicMethod dynamicSet = new DynamicMethod("_", typeof(object), arguments, type); - - ILGenerator il = dynamicSet.GetILGenerator(); - - if (!type.IsClass) // structs - { - var lv = il.DeclareLocal(type); - il.Emit(OpCodes.Ldarg_0); - il.Emit(OpCodes.Unbox_Any, type); - il.Emit(OpCodes.Stloc_0); - il.Emit(OpCodes.Ldloca_S, lv); - il.Emit(OpCodes.Ldarg_1); - if (fieldInfo.FieldType.IsClass) - il.Emit(OpCodes.Castclass, fieldInfo.FieldType); - else - il.Emit(OpCodes.Unbox_Any, fieldInfo.FieldType); - il.Emit(OpCodes.Stfld, fieldInfo); - il.Emit(OpCodes.Ldloc_0); - il.Emit(OpCodes.Box, type); - il.Emit(OpCodes.Ret); - } - else - { - il.Emit(OpCodes.Ldarg_0); - il.Emit(OpCodes.Ldarg_1); - if (fieldInfo.FieldType.IsValueType) - il.Emit(OpCodes.Unbox_Any, fieldInfo.FieldType); - il.Emit(OpCodes.Stfld, fieldInfo); - il.Emit(OpCodes.Ldarg_0); - il.Emit(OpCodes.Ret); - } - return (GenericSetter)dynamicSet.CreateDelegate(typeof(GenericSetter)); - } - - internal static GenericSetter CreateSetMethod(Type type, PropertyInfo propertyInfo) - { - MethodInfo setMethod = propertyInfo.GetSetMethod(); - if (setMethod == null) - return null; - - Type[] arguments = new Type[2]; - arguments[0] = arguments[1] = typeof(object); - - DynamicMethod setter = new DynamicMethod("_", typeof(object), arguments); - ILGenerator il = setter.GetILGenerator(); - - if (!type.IsClass) // structs - { - var lv = il.DeclareLocal(type); - il.Emit(OpCodes.Ldarg_0); - il.Emit(OpCodes.Unbox_Any, type); - il.Emit(OpCodes.Stloc_0); - il.Emit(OpCodes.Ldloca_S, lv); - il.Emit(OpCodes.Ldarg_1); - if (propertyInfo.PropertyType.IsClass) - il.Emit(OpCodes.Castclass, propertyInfo.PropertyType); - else - il.Emit(OpCodes.Unbox_Any, propertyInfo.PropertyType); - il.EmitCall(OpCodes.Call, setMethod, null); - il.Emit(OpCodes.Ldloc_0); - il.Emit(OpCodes.Box, type); - } - else - { - if (!setMethod.IsStatic) - { - il.Emit(OpCodes.Ldarg_0); - il.Emit(OpCodes.Castclass, propertyInfo.DeclaringType); - il.Emit(OpCodes.Ldarg_1); - if (propertyInfo.PropertyType.IsClass) - il.Emit(OpCodes.Castclass, propertyInfo.PropertyType); - else - il.Emit(OpCodes.Unbox_Any, propertyInfo.PropertyType); - il.EmitCall(OpCodes.Callvirt, setMethod, null); - il.Emit(OpCodes.Ldarg_0); - } - else - { - il.Emit(OpCodes.Ldarg_0); - il.Emit(OpCodes.Ldarg_1); - if (propertyInfo.PropertyType.IsClass) - il.Emit(OpCodes.Castclass, propertyInfo.PropertyType); - else - il.Emit(OpCodes.Unbox_Any, propertyInfo.PropertyType); - il.Emit(OpCodes.Call, setMethod); - } - } - - il.Emit(OpCodes.Ret); - - return (GenericSetter)setter.CreateDelegate(typeof(GenericSetter)); - } - - internal static GenericGetter CreateGetField(Type type, FieldInfo fieldInfo) - { - DynamicMethod dynamicGet = new DynamicMethod("_", typeof(object), new Type[] { typeof(object) }, type); - - ILGenerator il = dynamicGet.GetILGenerator(); - - if (!type.IsClass) // structs - { - var lv = il.DeclareLocal(type); - il.Emit(OpCodes.Ldarg_0); - il.Emit(OpCodes.Unbox_Any, type); - il.Emit(OpCodes.Stloc_0); - il.Emit(OpCodes.Ldloca_S, lv); - il.Emit(OpCodes.Ldfld, fieldInfo); - if (fieldInfo.FieldType.IsValueType) - il.Emit(OpCodes.Box, fieldInfo.FieldType); - } - else - { - il.Emit(OpCodes.Ldarg_0); - il.Emit(OpCodes.Ldfld, fieldInfo); - if (fieldInfo.FieldType.IsValueType) - il.Emit(OpCodes.Box, fieldInfo.FieldType); - } - - il.Emit(OpCodes.Ret); - - return (GenericGetter)dynamicGet.CreateDelegate(typeof(GenericGetter)); - } - - internal static GenericGetter CreateGetMethod(Type type, PropertyInfo propertyInfo) - { - MethodInfo getMethod = propertyInfo.GetGetMethod(); - if (getMethod == null) - return null; - - DynamicMethod getter = new DynamicMethod("_", typeof(object), new Type[] { typeof(object) }, type); - - ILGenerator il = getter.GetILGenerator(); - - if (!type.IsClass) // structs - { - var lv = il.DeclareLocal(type); - il.Emit(OpCodes.Ldarg_0); - il.Emit(OpCodes.Unbox_Any, type); - il.Emit(OpCodes.Stloc_0); - il.Emit(OpCodes.Ldloca_S, lv); - il.EmitCall(OpCodes.Call, getMethod, null); - if (propertyInfo.PropertyType.IsValueType) - il.Emit(OpCodes.Box, propertyInfo.PropertyType); - } - else - { - if (!getMethod.IsStatic) - { - il.Emit(OpCodes.Ldarg_0); - il.Emit(OpCodes.Castclass, propertyInfo.DeclaringType); - il.EmitCall(OpCodes.Callvirt, getMethod, null); - } - else - il.Emit(OpCodes.Call, getMethod); - - if (propertyInfo.PropertyType.IsValueType) - il.Emit(OpCodes.Box, propertyInfo.PropertyType); - } - - il.Emit(OpCodes.Ret); - - return (GenericGetter)getter.CreateDelegate(typeof(GenericGetter)); - } - - public Getters[] GetGetters(Type type, bool ShowReadOnlyProperties, List IgnoreAttributes)// JSONParameters param) - { - Getters[] val = null; - if (_getterscache.TryGetValue(type, out val)) - return val; - - PropertyInfo[] props = type.GetProperties(BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static); - List getters = new List(); - foreach (PropertyInfo p in props) - { - if (p.GetIndexParameters().Length > 0) - {// Property is an indexer - continue; - } - if (!p.CanWrite && ShowReadOnlyProperties == false) continue; - if (IgnoreAttributes != null) - { - bool found = false; - foreach (var ignoreAttr in IgnoreAttributes) - { - if (p.IsDefined(ignoreAttr, false)) - { - found = true; - break; - } - } - if (found) - continue; - } - GenericGetter g = CreateGetMethod(type, p); - if (g != null) - getters.Add(new Getters { Getter = g, Name = p.Name, lcName = p.Name.ToLower() }); - } - - FieldInfo[] fi = type.GetFields(BindingFlags.Instance | BindingFlags.Public | BindingFlags.Static); - foreach (var f in fi) - { - if (IgnoreAttributes != null) - { - bool found = false; - foreach (var ignoreAttr in IgnoreAttributes) - { - if (f.IsDefined(ignoreAttr, false)) - { - found = true; - break; - } - } - if (found) - continue; - } - if (f.IsLiteral == false) - { - GenericGetter g = CreateGetField(type, f); - if (g != null) - getters.Add(new Getters { Getter = g, Name = f.Name, lcName = f.Name.ToLower() }); - } - } - val = getters.ToArray(); - _getterscache.Add(type, val); - return val; - } - - #endregion - - internal void ResetPropertyCache() - { - _propertycache = new SafeDictionary>(); - } - - internal void ClearReflectionCache() - { - _tyname = new SafeDictionary(); - _typecache = new SafeDictionary(); - _constrcache = new SafeDictionary(); - _getterscache = new SafeDictionary(); - _propertycache = new SafeDictionary>(); - _genericTypes = new SafeDictionary(); - _genericTypeDef = new SafeDictionary(); - } - } -} +using System; +using System.Collections.Generic; +using System.Text; +using System.Reflection.Emit; +using System.Reflection; +using System.Collections; +using System.Linq; +using RaptorDB.Common; +using System.Collections.Specialized; +using System.Data; + +namespace fastJSON +{ + public struct Getters + { + public string Name; + public string lcName; + public Reflection.GenericGetter Getter; + } + + public enum myPropInfoType + { + Int, + Long, + String, + Bool, + DateTime, + Enum, + Guid, + + Array, + ByteArray, + Dictionary, + StringKeyDictionary, + NameValue, + StringDictionary, +#if !SILVERLIGHT + Hashtable, + DataSet, + DataTable, +#endif + Custom, + Unknown, + } + + public struct myPropInfo + { + public Type pt; + public Type bt; + public Type changeType; + public Reflection.GenericSetter setter; + public Reflection.GenericGetter getter; + public Type[] GenericTypes; + public string Name; + public myPropInfoType Type; + public bool CanWrite; + + public bool IsClass; + public bool IsValueType; + public bool IsGenericType; + public bool IsStruct; + } + + public sealed class Reflection + { + // Sinlgeton pattern 4 from : http://csharpindepth.com/articles/general/singleton.aspx + private static readonly Reflection instance = new Reflection(); + // Explicit static constructor to tell C# compiler + // not to mark type as beforefieldinit + static Reflection() + { + } + private Reflection() + { + } + public static Reflection Instance { get { return instance; } } + + public delegate object GenericSetter(object target, object value); + public delegate object GenericGetter(object obj); + private delegate object CreateObject(); + + private SafeDictionary _tyname = new SafeDictionary(); + private SafeDictionary _typecache = new SafeDictionary(); + private SafeDictionary _constrcache = new SafeDictionary(); + private SafeDictionary _getterscache = new SafeDictionary(); + private SafeDictionary> _propertycache = new SafeDictionary>(); + private SafeDictionary _genericTypes = new SafeDictionary(); + private SafeDictionary _genericTypeDef = new SafeDictionary(); + + #region bjson specific + internal UnicodeEncoding unicode = new UnicodeEncoding(); + internal UTF8Encoding utf8 = new UTF8Encoding(); + #endregion + + #region json custom types + // JSON custom + internal SafeDictionary _customSerializer = new SafeDictionary(); + internal SafeDictionary _customDeserializer = new SafeDictionary(); + internal object CreateCustom(string v, Type type) + { + Deserialize d; + _customDeserializer.TryGetValue(type, out d); + return d(v); + } + + internal void RegisterCustomType(Type type, Serialize serializer, Deserialize deserializer) + { + if (type != null && serializer != null && deserializer != null) + { + _customSerializer.Add(type, serializer); + _customDeserializer.Add(type, deserializer); + // reset property cache + Reflection.Instance.ResetPropertyCache(); + } + } + + internal bool IsTypeRegistered(Type t) + { + if (_customSerializer.Count == 0) + return false; + Serialize s; + return _customSerializer.TryGetValue(t, out s); + } + #endregion + + public Type GetGenericTypeDefinition(Type t) + { + Type tt = null; + if (_genericTypeDef.TryGetValue(t, out tt)) + return tt; + else + { + tt = t.GetGenericTypeDefinition(); + _genericTypeDef.Add(t, tt); + return tt; + } + } + + public Type[] GetGenericArguments(Type t) + { + Type[] tt = null; + if (_genericTypes.TryGetValue(t, out tt)) + return tt; + else + { + tt = t.GetGenericArguments(); + _genericTypes.Add(t, tt); + return tt; + } + } + + public Dictionary Getproperties(Type type, string typename, bool customType) + { + Dictionary sd = null; + if (_propertycache.TryGetValue(typename, out sd)) + { + return sd; + } + else + { + sd = new Dictionary(); + PropertyInfo[] pr = type.GetProperties(BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static); + foreach (PropertyInfo p in pr) + { + if (p.GetIndexParameters().Length > 0) + {// Property is an indexer + continue; + } + myPropInfo d = CreateMyProp(p.PropertyType, p.Name, customType); + + d.setter = Reflection.CreateSetMethod(type, p); + if (d.setter != null) + d.CanWrite = true; + d.getter = Reflection.CreateGetMethod(type, p); + sd.Add(p.Name.ToLower(), d); + } + FieldInfo[] fi = type.GetFields(BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static); + foreach (FieldInfo f in fi) + { + myPropInfo d = CreateMyProp(f.FieldType, f.Name, customType); + if (f.IsLiteral == false) + { + d.setter = Reflection.CreateSetField(type, f); + if (d.setter != null) + d.CanWrite = true; + d.getter = Reflection.CreateGetField(type, f); + sd.Add(f.Name.ToLower(), d); + } + } + + _propertycache.Add(typename, sd); + return sd; + } + } + + private myPropInfo CreateMyProp(Type t, string name, bool customType) + { + myPropInfo d = new myPropInfo(); + myPropInfoType d_type = myPropInfoType.Unknown; + + if (t == typeof(int) || t == typeof(int?)) d_type = myPropInfoType.Int; + else if (t == typeof(long) || t == typeof(long?)) d_type = myPropInfoType.Long; + else if (t == typeof(string)) d_type = myPropInfoType.String; + else if (t == typeof(bool) || t == typeof(bool?)) d_type = myPropInfoType.Bool; + else if (t == typeof(DateTime) || t == typeof(DateTime?)) d_type = myPropInfoType.DateTime; + else if (t.IsEnum) d_type = myPropInfoType.Enum; + else if (t == typeof(Guid) || t == typeof(Guid?)) d_type = myPropInfoType.Guid; + else if (t == typeof(StringDictionary)) d_type = myPropInfoType.StringDictionary; + else if (t == typeof(NameValueCollection)) d_type = myPropInfoType.NameValue; + else if (t.IsArray) + { + d.bt = t.GetElementType(); + if (t == typeof(byte[])) + d_type = myPropInfoType.ByteArray; + else + d_type = myPropInfoType.Array; + } + else if (t.Name.Contains("Dictionary")) + { + d.GenericTypes = Reflection.Instance.GetGenericArguments(t);// t.GetGenericArguments(); + if (d.GenericTypes.Length > 0 && d.GenericTypes[0] == typeof(string)) + d_type = myPropInfoType.StringKeyDictionary; + else + d_type = myPropInfoType.Dictionary; + } +#if !SILVERLIGHT + else if (t == typeof(Hashtable)) d_type = myPropInfoType.Hashtable; + else if (t == typeof(DataSet)) d_type = myPropInfoType.DataSet; + else if (t == typeof(DataTable)) d_type = myPropInfoType.DataTable; +#endif + else if (customType) + d_type = myPropInfoType.Custom; + + if (t.IsValueType && !t.IsPrimitive && !t.IsEnum && t != typeof(decimal)) + d.IsStruct = true; + + d.IsClass = t.IsClass; + d.IsValueType = t.IsValueType; + if (t.IsGenericType) + { + d.IsGenericType = true; + d.bt = t.GetGenericArguments()[0]; + } + + d.pt = t; + d.Name = name; + d.changeType = GetChangeType(t); + d.Type = d_type; + + return d; + } + + private Type GetChangeType(Type conversionType) + { + if (conversionType.IsGenericType && conversionType.GetGenericTypeDefinition().Equals(typeof(Nullable<>))) + return Reflection.Instance.GetGenericArguments(conversionType)[0];// conversionType.GetGenericArguments()[0]; + + return conversionType; + } + + #region [ PROPERTY GET SET ] + public string GetTypeAssemblyName(Type t) + { + string val = ""; + if (_tyname.TryGetValue(t, out val)) + return val; + else + { + string s = t.AssemblyQualifiedName; + _tyname.Add(t, s); + return s; + } + } + + internal Type GetTypeFromCache(string typename) + { + Type val = null; + if (_typecache.TryGetValue(typename, out val)) + return val; + else + { + Type t = Type.GetType(typename); + if (t == null) // RaptorDB : loading runtime assemblies + { + t = Type.GetType(typename, (name) => { + return AppDomain.CurrentDomain.GetAssemblies().Where(z => z.FullName == name.FullName).FirstOrDefault(); + }, null, true); + } + _typecache.Add(typename, t); + return t; + } + } + + internal object FastCreateInstance(Type objtype) + { + try + { + CreateObject c = null; + if (_constrcache.TryGetValue(objtype, out c)) + { + return c(); + } + else + { + if (objtype.IsClass) + { + DynamicMethod dynMethod = new DynamicMethod("_", objtype, null); + ILGenerator ilGen = dynMethod.GetILGenerator(); + ilGen.Emit(OpCodes.Newobj, objtype.GetConstructor(Type.EmptyTypes)); + ilGen.Emit(OpCodes.Ret); + c = (CreateObject)dynMethod.CreateDelegate(typeof(CreateObject)); + _constrcache.Add(objtype, c); + } + else // structs + { + DynamicMethod dynMethod = new DynamicMethod("_", typeof(object), null); + ILGenerator ilGen = dynMethod.GetILGenerator(); + var lv = ilGen.DeclareLocal(objtype); + ilGen.Emit(OpCodes.Ldloca_S, lv); + ilGen.Emit(OpCodes.Initobj, objtype); + ilGen.Emit(OpCodes.Ldloc_0); + ilGen.Emit(OpCodes.Box, objtype); + ilGen.Emit(OpCodes.Ret); + c = (CreateObject)dynMethod.CreateDelegate(typeof(CreateObject)); + _constrcache.Add(objtype, c); + } + return c(); + } + } + catch (Exception exc) + { + throw new Exception(string.Format("Failed to fast create instance for type '{0}' from assembly '{1}'", + objtype.FullName, objtype.AssemblyQualifiedName), exc); + } + } + + internal static GenericSetter CreateSetField(Type type, FieldInfo fieldInfo) + { + Type[] arguments = new Type[2]; + arguments[0] = arguments[1] = typeof(object); + + DynamicMethod dynamicSet = new DynamicMethod("_", typeof(object), arguments, type); + + ILGenerator il = dynamicSet.GetILGenerator(); + + if (!type.IsClass) // structs + { + var lv = il.DeclareLocal(type); + il.Emit(OpCodes.Ldarg_0); + il.Emit(OpCodes.Unbox_Any, type); + il.Emit(OpCodes.Stloc_0); + il.Emit(OpCodes.Ldloca_S, lv); + il.Emit(OpCodes.Ldarg_1); + if (fieldInfo.FieldType.IsClass) + il.Emit(OpCodes.Castclass, fieldInfo.FieldType); + else + il.Emit(OpCodes.Unbox_Any, fieldInfo.FieldType); + il.Emit(OpCodes.Stfld, fieldInfo); + il.Emit(OpCodes.Ldloc_0); + il.Emit(OpCodes.Box, type); + il.Emit(OpCodes.Ret); + } + else + { + il.Emit(OpCodes.Ldarg_0); + il.Emit(OpCodes.Ldarg_1); + if (fieldInfo.FieldType.IsValueType) + il.Emit(OpCodes.Unbox_Any, fieldInfo.FieldType); + il.Emit(OpCodes.Stfld, fieldInfo); + il.Emit(OpCodes.Ldarg_0); + il.Emit(OpCodes.Ret); + } + return (GenericSetter)dynamicSet.CreateDelegate(typeof(GenericSetter)); + } + + internal static GenericSetter CreateSetMethod(Type type, PropertyInfo propertyInfo) + { + MethodInfo setMethod = propertyInfo.GetSetMethod(); + if (setMethod == null) + return null; + + Type[] arguments = new Type[2]; + arguments[0] = arguments[1] = typeof(object); + + DynamicMethod setter = new DynamicMethod("_", typeof(object), arguments); + ILGenerator il = setter.GetILGenerator(); + + if (!type.IsClass) // structs + { + var lv = il.DeclareLocal(type); + il.Emit(OpCodes.Ldarg_0); + il.Emit(OpCodes.Unbox_Any, type); + il.Emit(OpCodes.Stloc_0); + il.Emit(OpCodes.Ldloca_S, lv); + il.Emit(OpCodes.Ldarg_1); + if (propertyInfo.PropertyType.IsClass) + il.Emit(OpCodes.Castclass, propertyInfo.PropertyType); + else + il.Emit(OpCodes.Unbox_Any, propertyInfo.PropertyType); + il.EmitCall(OpCodes.Call, setMethod, null); + il.Emit(OpCodes.Ldloc_0); + il.Emit(OpCodes.Box, type); + } + else + { + if (!setMethod.IsStatic) + { + il.Emit(OpCodes.Ldarg_0); + il.Emit(OpCodes.Castclass, propertyInfo.DeclaringType); + il.Emit(OpCodes.Ldarg_1); + if (propertyInfo.PropertyType.IsClass) + il.Emit(OpCodes.Castclass, propertyInfo.PropertyType); + else + il.Emit(OpCodes.Unbox_Any, propertyInfo.PropertyType); + il.EmitCall(OpCodes.Callvirt, setMethod, null); + il.Emit(OpCodes.Ldarg_0); + } + else + { + il.Emit(OpCodes.Ldarg_0); + il.Emit(OpCodes.Ldarg_1); + if (propertyInfo.PropertyType.IsClass) + il.Emit(OpCodes.Castclass, propertyInfo.PropertyType); + else + il.Emit(OpCodes.Unbox_Any, propertyInfo.PropertyType); + il.Emit(OpCodes.Call, setMethod); + } + } + + il.Emit(OpCodes.Ret); + + return (GenericSetter)setter.CreateDelegate(typeof(GenericSetter)); + } + + internal static GenericGetter CreateGetField(Type type, FieldInfo fieldInfo) + { + DynamicMethod dynamicGet = new DynamicMethod("_", typeof(object), new Type[] { typeof(object) }, type); + + ILGenerator il = dynamicGet.GetILGenerator(); + + if (!type.IsClass) // structs + { + var lv = il.DeclareLocal(type); + il.Emit(OpCodes.Ldarg_0); + il.Emit(OpCodes.Unbox_Any, type); + il.Emit(OpCodes.Stloc_0); + il.Emit(OpCodes.Ldloca_S, lv); + il.Emit(OpCodes.Ldfld, fieldInfo); + if (fieldInfo.FieldType.IsValueType) + il.Emit(OpCodes.Box, fieldInfo.FieldType); + } + else + { + il.Emit(OpCodes.Ldarg_0); + il.Emit(OpCodes.Ldfld, fieldInfo); + if (fieldInfo.FieldType.IsValueType) + il.Emit(OpCodes.Box, fieldInfo.FieldType); + } + + il.Emit(OpCodes.Ret); + + return (GenericGetter)dynamicGet.CreateDelegate(typeof(GenericGetter)); + } + + internal static GenericGetter CreateGetMethod(Type type, PropertyInfo propertyInfo) + { + MethodInfo getMethod = propertyInfo.GetGetMethod(); + if (getMethod == null) + return null; + + DynamicMethod getter = new DynamicMethod("_", typeof(object), new Type[] { typeof(object) }, type); + + ILGenerator il = getter.GetILGenerator(); + + if (!type.IsClass) // structs + { + var lv = il.DeclareLocal(type); + il.Emit(OpCodes.Ldarg_0); + il.Emit(OpCodes.Unbox_Any, type); + il.Emit(OpCodes.Stloc_0); + il.Emit(OpCodes.Ldloca_S, lv); + il.EmitCall(OpCodes.Call, getMethod, null); + if (propertyInfo.PropertyType.IsValueType) + il.Emit(OpCodes.Box, propertyInfo.PropertyType); + } + else + { + if (!getMethod.IsStatic) + { + il.Emit(OpCodes.Ldarg_0); + il.Emit(OpCodes.Castclass, propertyInfo.DeclaringType); + il.EmitCall(OpCodes.Callvirt, getMethod, null); + } + else + il.Emit(OpCodes.Call, getMethod); + + if (propertyInfo.PropertyType.IsValueType) + il.Emit(OpCodes.Box, propertyInfo.PropertyType); + } + + il.Emit(OpCodes.Ret); + + return (GenericGetter)getter.CreateDelegate(typeof(GenericGetter)); + } + + public Getters[] GetGetters(Type type, bool ShowReadOnlyProperties, List IgnoreAttributes)// JSONParameters param) + { + Getters[] val = null; + if (_getterscache.TryGetValue(type, out val)) + return val; + + PropertyInfo[] props = type.GetProperties(BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static); + List getters = new List(); + foreach (PropertyInfo p in props) + { + if (p.GetIndexParameters().Length > 0) + {// Property is an indexer + continue; + } + if (!p.CanWrite && ShowReadOnlyProperties == false) continue; + if (IgnoreAttributes != null) + { + bool found = false; + foreach (var ignoreAttr in IgnoreAttributes) + { + if (p.IsDefined(ignoreAttr, false)) + { + found = true; + break; + } + } + if (found) + continue; + } + GenericGetter g = CreateGetMethod(type, p); + if (g != null) + getters.Add(new Getters { Getter = g, Name = p.Name, lcName = p.Name.ToLower() }); + } + + FieldInfo[] fi = type.GetFields(BindingFlags.Instance | BindingFlags.Public | BindingFlags.Static); + foreach (var f in fi) + { + if (IgnoreAttributes != null) + { + bool found = false; + foreach (var ignoreAttr in IgnoreAttributes) + { + if (f.IsDefined(ignoreAttr, false)) + { + found = true; + break; + } + } + if (found) + continue; + } + if (f.IsLiteral == false) + { + GenericGetter g = CreateGetField(type, f); + if (g != null) + getters.Add(new Getters { Getter = g, Name = f.Name, lcName = f.Name.ToLower() }); + } + } + val = getters.ToArray(); + _getterscache.Add(type, val); + return val; + } + + #endregion + + internal void ResetPropertyCache() + { + _propertycache = new SafeDictionary>(); + } + + internal void ClearReflectionCache() + { + _tyname = new SafeDictionary(); + _typecache = new SafeDictionary(); + _constrcache = new SafeDictionary(); + _getterscache = new SafeDictionary(); + _propertycache = new SafeDictionary>(); + _genericTypes = new SafeDictionary(); + _genericTypeDef = new SafeDictionary(); + } + } +} diff --git a/RaptorDB.Common/fastJSON/dynamic.cs b/RaptorDB.Common/fastJSON/dynamic.cs index e1b1cca..a8130d1 100644 --- a/RaptorDB.Common/fastJSON/dynamic.cs +++ b/RaptorDB.Common/fastJSON/dynamic.cs @@ -1,67 +1,78 @@ -#if net4 -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Dynamic; - -namespace fastJSON -{ - internal class DynamicJson : DynamicObject - { - private IDictionary _dictionary { get; set; } - private List _list { get; set; } - - public DynamicJson(string json) - { - var parse = fastJSON.JSON.Parse(json); - - if (parse is IDictionary) - _dictionary = (IDictionary)parse; - else - _list = (List)parse; - } - - private DynamicJson(object dictionary) - { - if (dictionary is IDictionary) - _dictionary = (IDictionary)dictionary; - } - - public override bool TryGetIndex(GetIndexBinder binder, Object[] indexes, out Object result) - { - int index = (int)indexes[0]; - result = _list[index]; - if (result is IDictionary) - result = new DynamicJson(result as IDictionary); - return true; - } - - public override bool TryGetMember(GetMemberBinder binder, out object result) - { - if (_dictionary.TryGetValue(binder.Name, out result) == false) - if (_dictionary.TryGetValue(binder.Name.ToLower(), out result) == false) - return false;// throw new Exception("property not found " + binder.Name); - - if (result is IDictionary) - { - result = new DynamicJson(result as IDictionary); - } - else if (result is List) - { - List list = new List(); - foreach (object item in (List)result) - { - if (item is IDictionary) - list.Add(new DynamicJson(item as IDictionary)); - else - list.Add(item); - } - result = list; - } - - return _dictionary.ContainsKey(binder.Name); - } - } -} +#if net4 +using System; +using System.Collections.Generic; +using System.Dynamic; +using System.Linq; + +namespace fastJSON +{ + internal class DynamicJson : DynamicObject + { + private IDictionary _dictionary { get; set; } + private List _list { get; set; } + + public DynamicJson(string json) + { + var parse = fastJSON.JSON.Parse(json); + + if (parse is IDictionary) + _dictionary = (IDictionary)parse; + else + _list = (List)parse; + } + + private DynamicJson(object dictionary) + { + if (dictionary is IDictionary) + _dictionary = (IDictionary)dictionary; + } + + public override IEnumerable GetDynamicMemberNames() + { + return _dictionary.Keys.ToList(); + } + + public override bool TryGetIndex(GetIndexBinder binder, Object[] indexes, out Object result) + { + var index = indexes[0]; + if (index is int) + { + result = _list[(int) index]; + } + else + { + result = _dictionary[(string) index]; + } + if (result is IDictionary) + result = new DynamicJson(result as IDictionary); + return true; + } + + public override bool TryGetMember(GetMemberBinder binder, out object result) + { + if (_dictionary.TryGetValue(binder.Name, out result) == false) + if (_dictionary.TryGetValue(binder.Name.ToLower(), out result) == false) + return false;// throw new Exception("property not found " + binder.Name); + + if (result is IDictionary) + { + result = new DynamicJson(result as IDictionary); + } + else if (result is List) + { + List list = new List(); + foreach (object item in (List)result) + { + if (item is IDictionary) + list.Add(new DynamicJson(item as IDictionary)); + else + list.Add(item); + } + result = list; + } + + return _dictionary.ContainsKey(binder.Name); + } + } +} #endif \ No newline at end of file diff --git a/RaptorDB/AssemblyInfo.cs b/RaptorDB/AssemblyInfo.cs index 6d3305e..5da686d 100644 --- a/RaptorDB/AssemblyInfo.cs +++ b/RaptorDB/AssemblyInfo.cs @@ -1,8 +1,8 @@ -using System.Reflection; -using System.Runtime.CompilerServices; - -[assembly: AssemblyTitle("RaptorDB Document Store")] -[assembly: AssemblyDescription("NoSql, JSON based, Document store database with compiled .net map functions and automatic hybrid bitmap indexing and LINQ query filters (now with standalone Server mode, Backup and Active Restore, Transactions, Server side queries, MonoDroid support, HQ-Branch Replication)")] -[assembly: AssemblyProduct("RaptorDB Document Store")] - - +using System.Reflection; +using System.Runtime.CompilerServices; + +[assembly: AssemblyTitle("RaptorDB Document Store")] +[assembly: AssemblyDescription("NoSql, JSON based, Document store database with compiled .net map functions and automatic hybrid bitmap indexing and LINQ query filters (now with standalone Server mode, Backup and Active Restore, Transactions, Server side queries, MonoDroid support, HQ-Branch Replication)")] +[assembly: AssemblyProduct("RaptorDB Document Store")] + + diff --git a/RaptorDB/DataTypes/DataTypes.cs b/RaptorDB/DataTypes/DataTypes.cs index cc5c213..e4b1448 100644 --- a/RaptorDB/DataTypes/DataTypes.cs +++ b/RaptorDB/DataTypes/DataTypes.cs @@ -1,268 +1,267 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using RaptorDB.Common; - -namespace RaptorDB -{ - /// - /// Used to track ViewDelete usage for view rebuilds - /// - internal class View_delete - { - public Guid ID = Guid.NewGuid(); - public string Viewname; - public string Filter; - } - - internal class View_insert - { - public Guid ID = Guid.NewGuid(); - public string Viewname; - public object RowObject; - } - - internal class FullTextString - { - - } - - internal class NoIndexing - { - - } - - public interface IRowFiller - { - object FillRow(object row, object[] data); - } - - internal interface IGetBytes - { - byte[] GetBytes(T obj); - T GetObject(byte[] buffer, int offset, int count); - } - - internal class RDBDataType - { - public static IGetBytes ByteHandler() - { - Type type = typeof(T); - - if (type == typeof(int)) return (IGetBytes)new int_handler(); - else if (type == typeof(uint)) return (IGetBytes)new uint_handler(); - else if (type == typeof(long)) return (IGetBytes)new long_handler(); - else if (type == typeof(Guid)) return (IGetBytes)new guid_handler(); - else if (type == typeof(string)) return (IGetBytes)new string_handler(); - else if (type == typeof(DateTime)) return (IGetBytes)new datetime_handler(); - else if (type == typeof(decimal)) return (IGetBytes)new decimal_handler(); - else if (type == typeof(short)) return (IGetBytes)new short_handler(); - else if (type == typeof(float)) return (IGetBytes)new float_handler(); - else if (type == typeof(byte)) return (IGetBytes)new byte_handler(); - else if (type == typeof(double)) return (IGetBytes)new double_handler(); - - return null; - } - - public static byte GetByteSize(byte keysize) - { - byte size = 4; - Type t = typeof(T); - - if (t == typeof(int)) size = 4; - if (t == typeof(uint)) size = 4; - if (t == typeof(long)) size = 8; - if (t == typeof(Guid)) size = 16; - if (t == typeof(DateTime)) size = 8; - if (t == typeof(decimal)) size = 16; - if (t == typeof(float)) size = 4; - if (t == typeof(short)) size = 2; - if (t == typeof(string)) size = keysize; - if (t == typeof(byte)) size = 1; - if (t == typeof(double)) size = 8; - - return size; - } - - internal static object GetEmpty() - { - Type t = typeof(T); - - if (t == typeof(string)) - return ""; - - return default(T); - } - } - - #region [ handlers ] - - internal class double_handler : IGetBytes - { - public byte[] GetBytes(double obj) - { - return BitConverter.GetBytes(obj); - } - - public double GetObject(byte[] buffer, int offset, int count) - { - return BitConverter.ToDouble(buffer, offset); - } - } - - internal class byte_handler : IGetBytes - { - public byte[] GetBytes(byte obj) - { - return new byte[1] { obj }; - } - - public byte GetObject(byte[] buffer, int offset, int count) - { - return buffer[offset]; - } - } - - internal class float_handler : IGetBytes - { - public byte[] GetBytes(float obj) - { - return BitConverter.GetBytes(obj); - } - - public float GetObject(byte[] buffer, int offset, int count) - { - return BitConverter.ToSingle(buffer, offset); - } - } - - internal class decimal_handler : IGetBytes - { - public byte[] GetBytes(decimal obj) - { - byte[] b = new byte[16]; - var bb = decimal.GetBits(obj); - int index = 0; - foreach (var d in bb) - { - byte[] db = Helper.GetBytes(d, false); - Buffer.BlockCopy(db, 0, b, index, 4); - index += 4; - } - - return b; - } - - public decimal GetObject(byte[] buffer, int offset, int count) - { - int[] i = new int[4]; - i[0] = Helper.ToInt32(buffer, offset); - offset += 4; - i[1] = Helper.ToInt32(buffer, offset); - offset += 4; - i[2] = Helper.ToInt32(buffer, offset); - offset += 4; - i[3] = Helper.ToInt32(buffer, offset); - offset += 4; - - return new decimal(i); - } - } - - internal class short_handler : IGetBytes - { - public byte[] GetBytes(short obj) - { - return Helper.GetBytes(obj, false); - } - - public short GetObject(byte[] buffer, int offset, int count) - { - return Helper.ToInt16(buffer, offset); - } - } - - internal class string_handler : IGetBytes - { - public byte[] GetBytes(string obj) - { - return Helper.GetBytes(obj); - } - - public string GetObject(byte[] buffer, int offset, int count) - { - return Helper.GetString(buffer, offset, (short)count); - } - } - - internal class int_handler : IGetBytes - { - public byte[] GetBytes(int obj) - { - return Helper.GetBytes(obj, false); - } - - public int GetObject(byte[] buffer, int offset, int count) - { - return Helper.ToInt32(buffer, offset); - } - } - - internal class uint_handler : IGetBytes - { - public byte[] GetBytes(uint obj) - { - return Helper.GetBytes(obj, false); - } - - public uint GetObject(byte[] buffer, int offset, int count) - { - return (uint)Helper.ToInt32(buffer, offset); - } - } - - internal class long_handler : IGetBytes - { - public byte[] GetBytes(long obj) - { - return Helper.GetBytes(obj, false); - } - - public long GetObject(byte[] buffer, int offset, int count) - { - return Helper.ToInt64(buffer, offset); - } - } - - internal class guid_handler : IGetBytes - { - public byte[] GetBytes(Guid obj) - { - return obj.ToByteArray(); - } - - public Guid GetObject(byte[] buffer, int offset, int count) - { - byte[] b = new byte[16]; - Buffer.BlockCopy(buffer, offset, b, 0, 16); - return new Guid(b); - } - } - - internal class datetime_handler : IGetBytes - { - public byte[] GetBytes(DateTime obj) - { - return Helper.GetBytes(obj.Ticks, false); - } - - public DateTime GetObject(byte[] buffer, int offset, int count) - { - long ticks = Helper.ToInt64(buffer, offset); - - return new DateTime(ticks); - } - } - #endregion +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using RaptorDB.Common; + +namespace RaptorDB +{ + /// + /// Used to track ViewDelete usage for view rebuilds + /// + internal class View_delete + { + public Guid ID = Guid.NewGuid(); + public string Viewname; + public string Filter; + } + + internal class View_insert + { + public Guid ID = Guid.NewGuid(); + public string Viewname; + public object RowObject; + } + + + internal interface IGetBytes + { + byte[] GetBytes(T obj); + T GetObject(byte[] buffer, int offset, int count); + } + + + internal class RDBDataType + { + public static IGetBytes ByteHandler() + { + Type type = typeof(T); + + if (type == typeof(int)) return (IGetBytes)int_handler .Instance; + else if (type == typeof(uint)) return (IGetBytes)uint_handler .Instance; + else if (type == typeof(long)) return (IGetBytes)long_handler .Instance; + else if (type == typeof(Guid)) return (IGetBytes)guid_handler .Instance; + else if (type == typeof(string)) return (IGetBytes)string_handler .Instance; + else if (type == typeof(DateTime)) return (IGetBytes)datetime_handler.Instance; + else if (type == typeof(decimal)) return (IGetBytes)decimal_handler .Instance; + else if (type == typeof(short)) return (IGetBytes)short_handler .Instance; + else if (type == typeof(float)) return (IGetBytes)float_handler .Instance; + else if (type == typeof(byte)) return (IGetBytes)byte_handler .Instance; + else if (type == typeof(double)) return (IGetBytes)double_handler .Instance; + + return null; + } + + public static byte GetByteSize(byte keysize) + { + byte size = 4; + Type t = typeof(T); + + if (t == typeof(int)) size = 4; + if (t == typeof(uint)) size = 4; + if (t == typeof(long)) size = 8; + if (t == typeof(Guid)) size = 16; + if (t == typeof(DateTime)) size = 8; + if (t == typeof(decimal)) size = 16; + if (t == typeof(float)) size = 4; + if (t == typeof(short)) size = 2; + if (t == typeof(string)) size = keysize; + if (t == typeof(byte)) size = 1; + if (t == typeof(double)) size = 8; + + return size; + } + + internal static object GetEmpty() + { + Type t = typeof(T); + + if (t == typeof(string)) + return ""; + + return default(T); + } + } + + #region [ handlers ] + + internal class double_handler : IGetBytes + { + public static double_handler Instance = new double_handler(); + public byte[] GetBytes(double obj) + { + return BitConverter.GetBytes(obj); + } + + public double GetObject(byte[] buffer, int offset, int count) + { + return BitConverter.ToDouble(buffer, offset); + } + } + + internal class byte_handler : IGetBytes + { + public static byte_handler Instance = new byte_handler(); + public byte[] GetBytes(byte obj) + { + return new byte[1] { obj }; + } + + public byte GetObject(byte[] buffer, int offset, int count) + { + return buffer[offset]; + } + } + + internal class float_handler : IGetBytes + { + public static float_handler Instance = new float_handler(); + public byte[] GetBytes(float obj) + { + return BitConverter.GetBytes(obj); + } + + public float GetObject(byte[] buffer, int offset, int count) + { + return BitConverter.ToSingle(buffer, offset); + } + } + + internal class decimal_handler : IGetBytes + { + public static decimal_handler Instance = new decimal_handler(); + public byte[] GetBytes(decimal obj) + { + byte[] b = new byte[16]; + var bb = decimal.GetBits(obj); + int index = 0; + foreach (var d in bb) + { + byte[] db = Helper.GetBytes(d, false); + Buffer.BlockCopy(db, 0, b, index, 4); + index += 4; + } + + return b; + } + + public decimal GetObject(byte[] buffer, int offset, int count) + { + int[] i = new int[4]; + i[0] = Helper.ToInt32(buffer, offset); + offset += 4; + i[1] = Helper.ToInt32(buffer, offset); + offset += 4; + i[2] = Helper.ToInt32(buffer, offset); + offset += 4; + i[3] = Helper.ToInt32(buffer, offset); + offset += 4; + + return new decimal(i); + } + } + + internal class short_handler : IGetBytes + { + public static short_handler Instance = new short_handler(); + public byte[] GetBytes(short obj) + { + return Helper.GetBytes(obj, false); + } + + public short GetObject(byte[] buffer, int offset, int count) + { + return Helper.ToInt16(buffer, offset); + } + } + + internal class string_handler : IGetBytes + { + public static string_handler Instance = new string_handler(); + public byte[] GetBytes(string obj) + { + return Helper.GetBytes(obj); + } + + public string GetObject(byte[] buffer, int offset, int count) + { + return Helper.GetString(buffer, offset, count); + } + } + + internal class int_handler : IGetBytes + { + public static int_handler Instance = new int_handler(); + public byte[] GetBytes(int obj) + { + return Helper.GetBytes(obj, false); + } + + public int GetObject(byte[] buffer, int offset, int count) + { + return Helper.ToInt32(buffer, offset); + } + } + + internal class uint_handler : IGetBytes + { + public static uint_handler Instance = new uint_handler(); + public byte[] GetBytes(uint obj) + { + return Helper.GetBytes(obj, false); + } + + public uint GetObject(byte[] buffer, int offset, int count) + { + return (uint)Helper.ToInt32(buffer, offset); + } + } + + internal class long_handler : IGetBytes + { + public static long_handler Instance = new long_handler(); + public byte[] GetBytes(long obj) + { + return Helper.GetBytes(obj, false); + } + + public long GetObject(byte[] buffer, int offset, int count) + { + return Helper.ToInt64(buffer, offset); + } + } + + internal class guid_handler : IGetBytes + { + public static guid_handler Instance = new guid_handler(); + public byte[] GetBytes(Guid obj) + { + return obj.ToByteArray(); + } + + public Guid GetObject(byte[] buffer, int offset, int count) + { + byte[] b = new byte[16]; + Buffer.BlockCopy(buffer, offset, b, 0, 16); + return new Guid(b); + } + } + + internal class datetime_handler: IGetBytes + { + public static datetime_handler Instance = new datetime_handler(); + + public byte[] GetBytes(DateTime obj) + { + return Helper.GetBytes(obj.Ticks, false); + } + + public DateTime GetObject(byte[] buffer, int offset, int count) + { + long ticks = Helper.ToInt64(buffer, offset); + + return new DateTime(ticks); + } + } + #endregion } \ No newline at end of file diff --git a/RaptorDB/Global.cs b/RaptorDB/Global.cs index 06949ec..5935d8a 100644 --- a/RaptorDB/Global.cs +++ b/RaptorDB/Global.cs @@ -1,110 +1,120 @@ -using System; -using System.Collections.Generic; -using System.Text; - -namespace RaptorDB -{ - public class Global - { - /// - /// Store bitmap as int offsets then switch over to bitarray - /// - public static int BitmapOffsetSwitchOverCount = 10; - /// - /// True = Save to other views in process , False = background save to other views - /// - public static bool BackgroundSaveToOtherViews = true; - /// - /// Default maximum string key size for indexes - /// - public static byte DefaultStringKeySize = 60; - /// - /// Free bitmap index memory on save - /// - public static bool FreeBitmapMemoryOnSave = false; - /// - /// Number of items in each index page (default = 10000) [Expert only, do not change] - /// - public static ushort PageItemCount = 10000; - /// - /// KeyStore save to disk timer - /// - public static int SaveIndexToDiskTimerSeconds = 1800; - /// - /// Flush the StorageFile stream immediately - /// - public static bool FlushStorageFileImmediately = false; - /// - /// Save doc as binary json - /// - public static bool SaveAsBinaryJSON = true; - /// - /// Remove completed tasks timer - /// - public static int TaskCleanupTimerSeconds = 3; - /// - /// Save to other views timer seconds if enabled - /// - public static int BackgroundSaveViewTimer = 1; - /// - /// How many items to process in a background view save event - /// - public static int BackgroundViewSaveBatchSize = 1000000; - ///// - ///// Check the restore folder for new backup files to restore - ///// - //public static int RestoreTimerSeconds = 10; // TODO : implement this - /// - /// Timer for full text indexing of original documents (default = 15 sec) - /// - public static int FullTextTimerSeconds = 15; - /// - /// How many documents to full text index in a batch - /// - public static int BackgroundFullTextIndexBatchSize = 10000; - /// - /// Free memory checking timer (default = 1800 sec ~ 30 min) - /// - public static int FreeMemoryTimerSeconds = 1800; - /// - /// Memory usage limit for internal caching (default = 100 Mb) [using GC.GetTotalMemory()] - /// - public static long MemoryLimit = 100; - /// - /// Backup cron schedule (default = "0 * * * *" [every hour]) - /// - public static string BackupCronSchedule = "0 * * * *"; - /// - /// Require primary view to be defined for save, false = key/value store (default = true) - /// - public static bool RequirePrimaryView = true; - /// - /// Maximum documents in each package for replication - /// - public static int PackageSizeItemCountLimit = 10000; - /// - /// Process inbox timer (default = 60 sec) - /// - public static int ProcessInboxTimerSeconds = 10; - /// - /// Split the data storage files in MegaBytes (default 0 = off) [500 = 500mb] - /// - You can set and unset this value anytime and it will operate from that point on. - /// - If you unset (0) the value previous split files will remain and all the data will go to the last file. - /// - public static ushort SplitStorageFilesMegaBytes = 0; - /// - /// Compress the documents in the storage file if it is over this size (default = 100 Kilobytes) - /// - You will be trading CPU for disk IO - /// - public static ushort CompressDocumentOverKiloBytes = 100; - /// - /// Disk block size for high frequency KV storage file (default = 2048) - /// * Do not use anything under 512 with large string keys - /// - public static ushort HighFrequencyKVDiskBlockSize = 2048; - /// - /// String key MGIndex that stores keys in an external file for smaller index files - /// - public static bool EnableOptimizedStringIndex = true; - } -} +using System; +using System.Collections.Generic; +using System.Text; + +namespace RaptorDB +{ + public class Global + { + /// + /// Store bitmap as int offsets then switch over to bitarray + /// + public static int BitmapOffsetSwitchOverCount = 10; + /// + /// True = Save to other views in process , False = background save to other views + /// + public static bool BackgroundSaveToOtherViews = true; + /// + /// Default maximum string key size for indexes + /// + public static byte DefaultStringKeySize = 60; + /// + /// Free bitmap index memory on save + /// + public static bool FreeBitmapMemoryOnSave = false; + /// + /// Number of items in each index page (default = 10000) [Expert only, do not change] + /// + public static ushort PageItemCount = 10000; + /// + /// Number of items in first index page to split. + /// Low values can speed up parallel set preformance early. + /// (default = 5000) + /// + public static ushort EarlyPageSplitSize = 5000; + /// + /// Number of pages considered as early + /// + public static int EarlyPageCount = 1; + /// + /// KeyStore save to disk timer + /// + public static int SaveIndexToDiskTimerSeconds = 1800; + /// + /// Flush the StorageFile stream immediately + /// + public static bool FlushStorageFileImmediately = false; + /// + /// Save doc as binary json + /// + public static bool SaveAsBinaryJSON = true; + /// + /// Remove completed tasks timer + /// + public static int TaskCleanupTimerSeconds = 3; + /// + /// Save to other views timer seconds if enabled + /// + public static int BackgroundSaveViewTimer = 1; + /// + /// How many items to process in a background view save event + /// + public static int BackgroundViewSaveBatchSize = 1000000; + ///// + ///// Check the restore folder for new backup files to restore + ///// + //public static int RestoreTimerSeconds = 10; // TODO : implement this + /// + /// Timer for full text indexing of original documents (default = 15 sec) + /// + public static int FullTextTimerSeconds = 15; + /// + /// How many documents to full text index in a batch + /// + public static int BackgroundFullTextIndexBatchSize = 10000; + /// + /// Free memory checking timer (default = 1800 sec ~ 30 min) + /// + public static int FreeMemoryTimerSeconds = 1800; + /// + /// Memory usage limit for internal caching (default = 100 Mb) [using GC.GetTotalMemory()] + /// + public static long MemoryLimit = 100; + /// + /// Backup cron schedule (default = "0 * * * *" [every hour]) + /// + public static string BackupCronSchedule = "0 * * * *"; + /// + /// Require primary view to be defined for save, false = key/value store (default = true) + /// + public static bool RequirePrimaryView = true; + /// + /// Maximum documents in each package for replication + /// + public static int PackageSizeItemCountLimit = 10000; + /// + /// Process inbox timer (default = 60 sec) + /// + public static int ProcessInboxTimerSeconds = 10; + /// + /// Split the data storage files in MegaBytes (default 0 = off) [500 = 500mb] + /// - You can set and unset this value anytime and it will operate from that point on. + /// - If you unset (0) the value previous split files will remain and all the data will go to the last file. + /// + public static ushort SplitStorageFilesMegaBytes = 0; + /// + /// Compress the documents in the storage file if it is over this size (default = 100 Kilobytes) + /// - You will be trading CPU for disk IO + /// + public static ushort CompressDocumentOverKiloBytes = 100; + /// + /// Disk block size for high frequency KV storage file (default = 2048) + /// * Do not use anything under 512 with large string keys + /// + public static ushort HighFrequencyKVDiskBlockSize = 2048; + /// + /// String key MGIndex that stores keys in an external file for smaller index files + /// + public static bool EnableOptimizedStringIndex = true; + } +} diff --git a/RaptorDB/Helper/WAHBitarray2.cs b/RaptorDB/Helper/WAHBitarray2.cs deleted file mode 100644 index 2dcffc8..0000000 --- a/RaptorDB/Helper/WAHBitarray2.cs +++ /dev/null @@ -1,712 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Text; -using System.Collections; - -namespace RaptorDB -{ - internal class WAHBitArray - { - public enum TYPE - { - WAH = 1, - Bitarray = 0, - Indexes = 2 - } - - public WAHBitArray() - { - _state = TYPE.Indexes; - } - - public WAHBitArray(TYPE type, uint[] ints) - { - _state = type; - switch (type) - { - case TYPE.WAH: - _compressed = ints; - Uncompress(); - _state = TYPE.Bitarray; - _compressed = null; - break; - case TYPE.Bitarray: - _uncompressed = ints; - break; - case TYPE.Indexes: - _offsets = new Dictionary(); - foreach (var i in ints) - _offsets.Add(i, true); - break; - } - } - - private uint[] _compressed; - private uint[] _uncompressed; - private Dictionary _offsets = new Dictionary(); - private uint _curMax = 0; - private TYPE _state; - public bool isDirty = false; - - public WAHBitArray Copy() - { - lock (_lock) - { - uint[] i = GetBitArray(); - return new WAHBitArray(TYPE.Bitarray, i); - } - } - - public bool Get(int index) - { - lock (_lock) - { - if (_state == TYPE.Indexes) - { - bool b = false; - var f = _offsets.TryGetValue((uint)index, out b); - if (f) - return b; - else - return false; - } - CheckBitArray(); - - Resize(index); - - return internalGet(index); - } - } - - private object _lock = new object(); - public void Set(int index, bool val) - { - lock (_lock) - { - if (_state == TYPE.Indexes) - { - isDirty = true; - - if (val == true) - { - // bool b = false; - // if (_offsets.TryGetValue((uint)index, out b) == false) - // _offsets.Add((uint)index, true); - _offsets[(uint)index] = true; - // set max - if (index > _curMax) - _curMax = (uint)index; - } - else - { - _offsets.Remove((uint)index); - } - - ChangeTypeIfNeeded(); - return; - } - CheckBitArray(); - - Resize(index); - - internalSet(index, val); - } - } - - public int Length - { - set - { - if (_state == TYPE.Indexes) - { - // ignore - return; - } - CheckBitArray(); - int c = value >> 5; - c++; - if (c > _uncompressed.Length) - { - uint[] ar = new uint[c]; - _uncompressed.CopyTo(ar, 0); - _uncompressed = ar; - } - } - get - { - if (_state == TYPE.Indexes) - { - if (_offsets.Count == 0) return 0; - uint[] k = GetOffsets(); - - uint l = k[k.Length - 1]; - return (int)l; - } - CheckBitArray(); - return _uncompressed.Length << 5; - } - } - - #region [ B I T O P E R T A I O N S ] - public WAHBitArray And(WAHBitArray op) - { - lock (_lock) - { - uint[] left; - uint[] right; - prelogic(op, out left, out right); - - for (int i = 0; i < left.Length; i++) - left[i] &= right[i]; - - return new WAHBitArray(TYPE.Bitarray, left); - } - } - - public WAHBitArray AndNot(WAHBitArray op) - { - lock (_lock) - { - uint[] left; - uint[] right; - prelogic(op, out left, out right); - - for (int i = 0; i < left.Length; i++) - left[i] &= ~right[i]; - - return new WAHBitArray(TYPE.Bitarray, left); - } - } - - public WAHBitArray Or(WAHBitArray op) - { - lock (_lock) - { - uint[] left; - uint[] right; - prelogic(op, out left, out right); - - for (int i = 0; i < left.Length; i++) - left[i] |= right[i]; - - return new WAHBitArray(TYPE.Bitarray, left); - } - } - - public WAHBitArray Not(int size) - { - lock (_lock) - { - this.CheckBitArray(); - - uint[] left = this.GetBitArray(); - int c = left.Length; - int ms = size >> 5; - if (size - (ms << 5) > 0) - ms++; // include remainder - if (ms > c) - { - var a = new uint[ms]; - Array.Copy(left, 0, a, 0, c); - left = a; - c = ms; - } - - for (int i = 0; i < c; i++) - left[i] = ~left[i]; - - return new WAHBitArray(TYPE.Bitarray, left); - } - } - - public WAHBitArray Xor(WAHBitArray op) - { - lock (_lock) - { - uint[] left; - uint[] right; - prelogic(op, out left, out right); - - for (int i = 0; i < left.Length; i++) - left[i] ^= right[i]; - - return new WAHBitArray(TYPE.Bitarray, left); - } - } - #endregion - - private static int BitCount(uint n) - { // 32-bit recursive reduction using SWAR - n -= ((n >> 1) & 0x55555555); - n = (((n >> 2) & 0x33333333) + (n & 0x33333333)); - n = (((n >> 4) + n) & 0x0f0f0f0f); - return (int)((n * 0x01010101) >> 24); - } - - public long CountOnes() - { - if (_state == TYPE.Indexes) - { - return _offsets.Count; - } - - long c = 0; - CheckBitArray(); - - foreach (uint i in _uncompressed) - c += BitCount(i); - - return c; - } - - public long CountZeros() - { - if (_state == TYPE.Indexes) - { - long ones = _offsets.Count; - uint[] k = GetOffsets(); - long l = k[k.Length - 1]; - return l - ones; - } - - CheckBitArray(); - int count = _uncompressed.Length << 5; - long cc = CountOnes(); - - return count - cc; - } - - public void FreeMemory() - { - if (_state == TYPE.Bitarray) - { - if (_uncompressed != null) - { - Compress(_uncompressed); - _uncompressed = null; - _state = TYPE.WAH; - } - } - } - - public uint[] GetCompressed(out TYPE type) - { - type = TYPE.WAH; - - ChangeTypeIfNeeded(); - if (_state == TYPE.Indexes) - { - //data = UnpackOffsets(); - type = TYPE.Indexes; - return GetOffsets(); - } - else if (_uncompressed == null) - return new uint[] { 0 }; - uint[] data = _uncompressed; - Compress(data); - uint[] d = new uint[_compressed.Length]; - _compressed.CopyTo(d, 0); - return d; - } - - public IEnumerable GetBitIndexes() - { - if (_state == TYPE.Indexes) - { - foreach (int i in GetOffsets()) - yield return i; - } - else - { - CheckBitArray(); - int count = _uncompressed.Length; - - for (int i = 0; i < count; i++) - { - if (_uncompressed[i] > 0) - { - for (int j = 0; j < 32; j++) - { - bool b = internalGet((i << 5) + j); - if (b == true)// ones) - yield return (i << 5) + j; - } - } - } - } - } - - #region [ P R I V A T E ] - private uint[] GetOffsets() - { - uint[] k; - lock (_lock) - { - k = new uint[_offsets.Count]; - _offsets.Keys.CopyTo(k, 0); - } - Array.Sort(k); - return k; - } - - private void prelogic(WAHBitArray op, out uint[] left, out uint[] right) - { - this.CheckBitArray(); - - left = this.GetBitArray(); - right = op.GetBitArray(); - int ic = left.Length; - int uc = right.Length; - if (ic > uc) - { - uint[] ar = new uint[ic]; - right.CopyTo(ar, 0); - right = ar; - } - else if (ic < uc) - { - uint[] ar = new uint[uc]; - left.CopyTo(ar, 0); - left = ar; - } - } - - internal uint[] GetBitArray() - { - lock (_lock) - { - if (_state == TYPE.Indexes) - return UnpackOffsets(); - - this.CheckBitArray(); - uint[] ui = new uint[_uncompressed.Length]; - _uncompressed.CopyTo(ui, 0); - - return ui; - } - } - - private uint[] UnpackOffsets() - { - // return bitmap uints - uint max = 0; - if (_offsets.Count == 0) return new uint[0]; - uint[] k = GetOffsets(); - max = k[k.Length - 1]; - - uint[] ints = new uint[(max >> 5) + 1]; - - foreach (int index in k) - { - int pointer = ((int)index) >> 5; - uint mask = (uint)1 << (31 - // high order bit set - ((int)index % 32)); - - ints[pointer] |= mask; - } - - return ints; - } - - private void ChangeTypeIfNeeded() - { - if (_state != TYPE.Indexes) - return; - - uint T = (_curMax >> 5) + 1; - int c = _offsets.Count; - if (c > T && c > Global.BitmapOffsetSwitchOverCount) - { - // change type to WAH - _state = TYPE.Bitarray; - _uncompressed = new uint[0]; - // create bitmap - foreach (var i in _offsets.Keys) - Set((int)i, true); - // clear list - _offsets = new Dictionary(); - } - } - - private void Resize(int index) - { - if (_state == TYPE.Indexes) - return; - int c = index >> 5; - c++; - if(_uncompressed == null) - { - _uncompressed = new uint[c]; - return; - } - if (c > _uncompressed.Length) - { - uint[] ar = new uint[c]; - _uncompressed.CopyTo(ar, 0); - _uncompressed = ar; - } - } - - private void ResizeAsNeeded(List list, int index) - { - int count = index >> 5; - - while (list.Count < count) - list.Add(0); - } - - private void internalSet(int index, bool val) - { - isDirty = true; - int pointer = index >> 5; - uint mask = (uint)1 << (31 - // high order bit set - (index % 32)); - - if (val) - _uncompressed[pointer] |= mask; - else - _uncompressed[pointer] &= ~mask; - } - - private bool internalGet(int index) - { - int pointer = index >> 5; - uint mask = (uint)1 << (31 - // high order bit get - (index % 32)); - - if (pointer < _uncompressed.Length) - return (_uncompressed[pointer] & mask) != 0; - else - return false; - } - - private void CheckBitArray() - { - if (_state == TYPE.Bitarray) - return; - - if (_state == TYPE.WAH) - { - _uncompressed = new uint[0]; - Uncompress(); - _state = TYPE.Bitarray; - _compressed = null; - return; - } - } - - #region compress / uncompress - private uint Take31Bits(uint[] data, int index) - { - ulong l1 = 0; - ulong l2 = 0; - ulong l = 0; - ulong ret = 0; - int off = (index % 32); - int pointer = index >> 5; - - l1 = data[pointer]; - pointer++; - if (pointer < data.Length) - l2 = data[pointer]; - - l = (l1 << 32) + l2; - ret = (l >> (33 - off)) & 0x7fffffff; - - return (uint)ret; - } - - private void Compress(uint[] data) - { - List compressed = new List(); - uint zeros = 0; - uint ones = 0; - int count = data.Length << 5; - for (int i = 0; i < count; ) - { - uint num = Take31Bits(data, i); - i += 31; - if (num == 0) // all zero - { - zeros += 31; - FlushOnes(compressed, ref ones); - } - else if (num == 0x7fffffff) // all ones - { - ones += 31; - FlushZeros(compressed, ref zeros); - } - else // literal - { - FlushOnes(compressed, ref ones); - FlushZeros(compressed, ref zeros); - compressed.Add(num); - } - } - FlushOnes(compressed, ref ones); - FlushZeros(compressed, ref zeros); - _compressed = compressed.ToArray(); - } - - private void FlushOnes(List compressed, ref uint ones) - { - if (ones > 0) - { - uint n = 0xc0000000 + ones; - ones = 0; - compressed.Add(n); - } - } - - private void FlushZeros(List compressed, ref uint zeros) - { - if (zeros > 0) - { - uint n = 0x80000000 + zeros; - zeros = 0; - compressed.Add(n); - } - } - - private void Write31Bits(List list, int index, uint val) - { - this.ResizeAsNeeded(list, index + 32); - - int off = (index % 32); - int pointer = index >> 5; - - if (pointer >= list.Count - 1) - list.Add(0); - - ulong l = ((ulong)list[pointer] << 32) + list[pointer + 1]; - l |= (ulong)val << (33 - off); - - list[pointer] = (uint)(l >> 32); - list[pointer + 1] = (uint)(l & 0xffffffff); - } - - private void WriteOnes(List list, int index, uint count) - { - this.ResizeAsNeeded(list, index); - - int off = index % 32; - int pointer = index >> 5; - int ccount = (int)count; - int indx = index; - int x = 32 - off; - - if (pointer >= list.Count) - list.Add(0); - - if (ccount > x || x == 32) //current pointer - { - list[pointer] |= (uint)((0xffffffff >> off)); - ccount -= x; - indx += x; - } - else - { - list[pointer] |= (uint)((0xffffffff << ccount) >> off); - ccount = 0; - } - - bool checklast = true; - while (ccount >= 32)//full ints - { - if (checklast && list[list.Count - 1] == 0) - { - list.RemoveAt(list.Count - 1); - checklast = false; - } - - list.Add(0xffffffff); - ccount -= 32; - indx += 32; - } - int p = indx >> 5; - off = indx % 32; - if (ccount > 0) - { - uint i = 0xffffffff << (32 - ccount); - if (p > (list.Count - 1)) //remaining - list.Add(i); - else - list[p] |= (uint)(i >> off); - } - } - - private void Uncompress() - { - int index = 0; - List list = new List(); - if (_compressed == null) - return; - - foreach (uint ci in _compressed) - { - if ((ci & 0x80000000) == 0) // literal - { - Write31Bits(list, index, ci); - index += 31; - } - else - { - uint count = ci & 0x3fffffff; - if ((ci & 0x40000000) > 0) // ones count - WriteOnes(list, index, count); - - index += (int)count; - } - } - ResizeAsNeeded(list, index); - _uncompressed = list.ToArray(); - } - #endregion - - #endregion - - internal static WAHBitArray Fill(int count) - { - if (count > 0) - { - int c = count >> 5; - int r = count % 32; - if (r > 0) - c++; - uint[] ints = new uint[c]; - for (int i = 0; i < c - 1; i++) - ints[i] = 0xffffffff; - ints[c - 1] = 0xffffffff << (32 - r); - return new WAHBitArray(TYPE.Bitarray, ints); - } - return new WAHBitArray(); - } - - internal int GetFirst() - { - if (_state == TYPE.Indexes) - { - return (int)GetOffsets()[0]; - } - else - { - CheckBitArray(); - int count = _uncompressed.Length; - - for (int i = 0; i < count; i++) - { - if (_uncompressed[i] > 0) - { - for (int j = 0; j < 32; j++) - { - bool b = internalGet((i << 5) + j); - if (b == true)// ones) - return (i << 5) + j; - } - } - } - } - return 0; - } - } -} diff --git a/RaptorDB.Common/IRaptorDB.cs b/RaptorDB/IRaptorDB.cs similarity index 93% rename from RaptorDB.Common/IRaptorDB.cs rename to RaptorDB/IRaptorDB.cs index 7a77871..fd902b9 100644 --- a/RaptorDB.Common/IRaptorDB.cs +++ b/RaptorDB/IRaptorDB.cs @@ -1,350 +1,350 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Linq.Expressions; - -namespace RaptorDB.Common -{ - public delegate List ServerSideFunc(IRaptorDB rap, string filter); - - public class HistoryInfo - { - public int Version; - public DateTime ChangeDate; - } - - /// - /// High frequency mode Key/Value store with recycled storage file. - /// Use for rapid saves of the same key. - /// Views are not effected by saves in this storage. - /// NOTE : You do not have history of changes in this storage. - /// - public interface IKeyStoreHF - { - object GetObjectHF(string key); - bool SetObjectHF(string key, object obj); - bool DeleteKeyHF(string key); - int CountHF(); - bool ContainsHF(string key); - string[] GetKeysHF(); - void CompactStorageHF(); - - //IEnumerable EnumerateObjects(); - //string[] SearchKeys(string contains); // FIX : implement - } - - public interface IRaptorDB - { - /// - /// Save Bytes (files) to RptorDB storage - /// - /// - /// - /// - bool SaveBytes(Guid fileID, byte[] bytes); - - /// - /// Save a Document to RaptorDB - /// - /// - /// - /// - /// - bool Save(Guid docID, T document); - - - /// - /// Query all data in a view - /// - /// - /// - Result Query(string viewname); - - /// - /// Query all data in a view with paging - /// - /// - /// - /// - /// - Result Query(string viewname, int start, int count); - - /// - /// Query a View with a string filter - /// - /// - /// - /// - Result Query(string viewname, string filter); - - /// - /// Query a View with a string filter with paging - /// - /// - /// - /// - /// - /// - Result Query(string viewname, string filter, int start, int count); - - /// - /// Query a view with filter, paging and sorting - /// - /// - /// - /// - /// - /// - Result Query(string viewname, string filter, int start, int count, string orderby); - - /// - /// Count all data associated with View name - /// - /// - /// - int Count(string viewname); - - /// - /// Count all data associated with View name and string filter - /// - /// - /// - /// - int Count(string viewname, string filter); - - /// - /// Fetch a Document - /// - /// - /// - object Fetch(Guid docID); - - /// - /// Fetch a file bytes - /// - /// - /// - byte[] FetchBytes(Guid fileID); - - ///// - ///// Shutdown RaptorDB and flush all data to disk - ///// - //void Shutdown(); - - /// - /// Backup the document storage file incrementally to "Backup" folder - /// - /// True = done - bool Backup(); - - /// - /// Start background restore of backups in the "Restore" folder - /// - void Restore(); - - /// - /// Delete a Document - /// - /// - /// - bool Delete(Guid docid); - - /// - /// Delete a File - /// - /// - /// - bool DeleteBytes(Guid fileid); - - /// - /// Add users - /// - /// - /// - /// - /// - bool AddUser(string username, string oldpassword, string newpassword); - - /// - /// Do server side data aggregate queries, so you don't transfer large data rows to clients for processing - /// - /// - /// - object[] ServerSide(ServerSideFunc func, string filter); - - /// - /// Do server side data aggregate queries, so you don't transfer large data rows to clients for processing - /// - /// - /// - object[] ServerSide(ServerSideFunc func, Expression> filter); - - /// - /// Full text search the entire original document - /// - /// - /// - int[] FullTextSearch(string filter); - - - // new query model - /// - /// Query a view with linq filter - /// - /// Use the Row Schema type for your view - /// - /// - Result Query(Expression> filter); - - /// - /// Query a view with paging - /// - /// Use the Row Schema type for your view - /// - /// - /// - /// - Result Query(Expression> filter, int start, int count); - - /// - /// Query a view with linq filter, paging and sorting - /// - /// Use the Row Schema type for your view - /// - /// - /// - /// - /// - Result Query(Expression> filter, int start, int count, string orderby); - - /// - /// Query a view with a string filter - /// - /// Use the Row Schema type for your view - /// - /// - Result Query(string filter); - - /// - /// Query a view with string filter and paging - /// - /// Use the Row Schema type for your view - /// - /// - /// - /// - Result Query(string filter, int start, int count); - - /// - /// Query a view with string filter, paging and sorting - /// - /// Use the Row Schema type for your view - /// - /// - /// - /// - /// - Result Query(string filter, int start, int count, string orderby); - - /// - /// Count rows with a linq filter - /// - /// - /// - /// - int Count(Expression> filter); - - /// - /// Fetch the change history for a document - /// - /// - /// - int[] FetchHistory(Guid docid); - - /// - /// Fetch the change history for a document with dates - /// - /// - /// - HistoryInfo[] FetchHistoryInfo(Guid docid); - - /// - /// Fetch a change history for a file - /// - /// - /// - int[] FetchBytesHistory(Guid fileid); - - /// - /// Fetch a change history for a file with dates - /// - /// - /// - HistoryInfo[] FetchBytesHistoryInfo(Guid docid); - - /// - /// Fetch the specific document version - /// - /// - /// - object FetchVersion(int versionNumber); - - /// - /// Fetch the specific file version - /// - /// - /// - byte[] FetchBytesVersion(int versionNumber); - - /// - /// Delete rows from a view - /// - /// - /// - /// Number of rows deleted - int ViewDelete(Expression> filter); - - /// - /// Delete rows from a view - /// - /// - /// - /// Number of rows deleted - int ViewDelete(string viewname, string filter); - - /// - /// Insert directly into a view - /// - /// - /// - /// - /// - bool ViewInsert(Guid id, TRowSchema row); - - /// - /// Insert directly into a view - /// - /// - /// - /// - /// - bool ViewInsert(string viewname, Guid id, object row); - - /// - /// Get the number of documents in the storage file regardless of versions - /// - /// - long DocumentCount(); - - /// - /// High frequency mode Key/Value store with recycled storage file. - /// Use for rapid saves of the same key. - /// Views are not effected by saves in this storage. - /// NOTE : You do not have history of changes in this storage. - /// - IKeyStoreHF GetKVHF(); - - void Shutdown(); - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Linq.Expressions; + +namespace RaptorDB.Common +{ + public delegate List ServerSideFunc(IRaptorDB rap, string filter); + + public class HistoryInfo + { + public int Version; + public DateTime ChangeDate; + } + + /// + /// High frequency mode Key/Value store with recycled storage file. + /// Use for rapid saves of the same key. + /// Views are not effected by saves in this storage. + /// NOTE : You do not have history of changes in this storage. + /// + public interface IKeyStoreHF + { + object GetObjectHF(string key); + bool SetObjectHF(string key, object obj); + bool DeleteKeyHF(string key); + int CountHF(); + bool ContainsHF(string key); + string[] GetKeysHF(); + void CompactStorageHF(); + + //IEnumerable EnumerateObjects(); + //string[] SearchKeys(string contains); // FIX : implement + } + + public interface IRaptorDB + { + /// + /// Save Bytes (files) to RptorDB storage + /// + /// + /// + /// + bool SaveBytes(Guid fileID, byte[] bytes); + + /// + /// Save a Document to RaptorDB + /// + /// + /// + /// + /// + bool Save(Guid docID, T document); + + + /// + /// Query all data in a view + /// + /// + /// + IResult Query(string viewname); + + /// + /// Query all data in a view with paging + /// + /// + /// + /// + /// + IResult Query(string viewname, int start, int count); + + /// + /// Query a View with a string filter + /// + /// + /// + /// + IResult Query(string viewname, string filter); + + /// + /// Query a View with a string filter with paging + /// + /// + /// + /// + /// + /// + IResult Query(string viewname, string filter, int start, int count); + + /// + /// Query a view with filter, paging and sorting + /// + /// + /// + /// + /// + /// + IResult Query(string viewname, string filter, int start, int count, string orderby); + + /// + /// Count all data associated with View name + /// + /// + /// + int Count(string viewname); + + /// + /// Count all data associated with View name and string filter + /// + /// + /// + /// + int Count(string viewname, string filter); + + /// + /// Fetch a Document + /// + /// + /// + object Fetch(Guid docID); + + /// + /// Fetch a file bytes + /// + /// + /// + byte[] FetchBytes(Guid fileID); + + ///// + ///// Shutdown RaptorDB and flush all data to disk + ///// + //void Shutdown(); + + /// + /// Backup the document storage file incrementally to "Backup" folder + /// + /// True = done + bool Backup(); + + /// + /// Start background restore of backups in the "Restore" folder + /// + void Restore(); + + /// + /// Delete a Document + /// + /// + /// + bool Delete(Guid docid); + + /// + /// Delete a File + /// + /// + /// + bool DeleteBytes(Guid fileid); + + /// + /// Add users + /// + /// + /// + /// + /// + bool AddUser(string username, string oldpassword, string newpassword); + + /// + /// Do server side data aggregate queries, so you don't transfer large data rows to clients for processing + /// + /// + /// + object[] ServerSide(ServerSideFunc func, string filter); + + /// + /// Do server side data aggregate queries, so you don't transfer large data rows to clients for processing + /// + /// + /// + object[] ServerSide(ServerSideFunc func, Expression> filter); + + /// + /// Full text search the entire original document + /// + /// + /// + int[] FullTextSearch(string filter); + + + // new query model + /// + /// Query a view with linq filter + /// + /// Use the Row Schema type for your view + /// + /// + Result Query(Expression> filter); + + /// + /// Query a view with paging + /// + /// Use the Row Schema type for your view + /// + /// + /// + /// + Result Query(Expression> filter, int start, int count); + + /// + /// Query a view with linq filter, paging and sorting + /// + /// Use the Row Schema type for your view + /// + /// + /// + /// + /// + Result Query(Expression> filter, int start, int count, string orderby); + + /// + /// Query a view with a string filter + /// + /// Use the Row Schema type for your view + /// + /// + Result Query(string filter); + + /// + /// Query a view with string filter and paging + /// + /// Use the Row Schema type for your view + /// + /// + /// + /// + Result Query(string filter, int start, int count); + + /// + /// Query a view with string filter, paging and sorting + /// + /// Use the Row Schema type for your view + /// + /// + /// + /// + /// + Result Query(string filter, int start, int count, string orderby); + + /// + /// Count rows with a linq filter + /// + /// + /// + /// + int Count(Expression> filter); + + /// + /// Fetch the change history for a document + /// + /// + /// + IEnumerable FetchHistory(Guid docid); + + /// + /// Fetch the change history for a document with dates + /// + /// + /// + HistoryInfo[] FetchHistoryInfo(Guid docid); + + /// + /// Fetch a change history for a file + /// + /// + /// + IEnumerable FetchBytesHistory(Guid fileid); + + /// + /// Fetch a change history for a file with dates + /// + /// + /// + HistoryInfo[] FetchBytesHistoryInfo(Guid docid); + + /// + /// Fetch the specific document version + /// + /// + /// + object FetchVersion(int versionNumber); + + /// + /// Fetch the specific file version + /// + /// + /// + byte[] FetchBytesVersion(int versionNumber); + + /// + /// Delete rows from a view + /// + /// + /// + /// Number of rows deleted + int ViewDelete(Expression> filter); + + /// + /// Delete rows from a view + /// + /// + /// + /// Number of rows deleted + int ViewDelete(string viewname, string filter); + + /// + /// Insert directly into a view + /// + /// + /// + /// + /// + bool ViewInsert(Guid id, TRowSchema row); + + /// + /// Insert directly into a view + /// + /// + /// + /// + /// + bool ViewInsert(string viewname, Guid id, object row); + + /// + /// Get the number of documents in the storage file regardless of versions + /// + /// + long DocumentCount(); + + /// + /// High frequency mode Key/Value store with recycled storage file. + /// Use for rapid saves of the same key. + /// Views are not effected by saves in this storage. + /// NOTE : You do not have history of changes in this storage. + /// + IKeyStoreHF GetKVHF(); + + void Shutdown(); + } +} diff --git a/RaptorDB/Indexes/BitmapIndex.cs b/RaptorDB/Indexes/BitmapIndex.cs index 9e4504a..5d8587e 100644 --- a/RaptorDB/Indexes/BitmapIndex.cs +++ b/RaptorDB/Indexes/BitmapIndex.cs @@ -1,428 +1,429 @@ -using System; -using System.Collections.Generic; -using System.Text; -using System.IO; -using RaptorDB.Common; -using System.Threading; -using System.Collections; - -namespace RaptorDB -{ - internal class BitmapIndex - { - public BitmapIndex(string path, string filename) - { - _FileName = Path.GetFileNameWithoutExtension(filename); - _Path = path; - if (_Path.EndsWith(Path.DirectorySeparatorChar.ToString()) == false) - _Path += Path.DirectorySeparatorChar.ToString(); - - Initialize(); - } - - class L : IDisposable - { - BitmapIndex _sc; - public L(BitmapIndex sc) - { - _sc = sc; - _sc.CheckInternalOP(); - } - void IDisposable.Dispose() - { - _sc.Done(); - } - } - - private string _recExt = ".mgbmr"; - private string _bmpExt = ".mgbmp"; - private string _FileName = ""; - private string _Path = ""; - private FileStream _bitmapFileWriteOrg; - private BufferedStream _bitmapFileWrite; - private FileStream _bitmapFileRead; - private FileStream _recordFileRead; - private FileStream _recordFileWriteOrg; - private BufferedStream _recordFileWrite; - private long _lastBitmapOffset = 0; - private int _lastRecordNumber = 0; - private SafeDictionary _cache = new SafeDictionary(); - private SafeDictionary _offsetCache = new SafeDictionary(); - private ILog log = LogManager.GetLogger(typeof(BitmapIndex)); - private bool _optimizing = false; - private bool _shutdownDone = false; - //private Queue _que = new Queue(); - private int _workingCount = 0; - - #region [ P U B L I C ] - public void Shutdown() - { - using (new L(this)) - { - log.Debug("Shutdown BitmapIndex"); - - InternalShutdown(); - } - } - - public int GetFreeRecordNumber() - { - using (new L(this)) - { - int i = _lastRecordNumber++; - - _cache.Add(i, new WAHBitArray()); - return i; - } - } - - public void Commit(bool freeMemory) - { - using (new L(this)) - { - int[] keys = _cache.Keys(); - Array.Sort(keys); - - foreach (int k in keys) - { - var bmp = _cache[k]; - if (bmp.isDirty) - { - SaveBitmap(k, bmp); - bmp.FreeMemory(); - bmp.isDirty = false; - } - } - Flush(); - if (freeMemory) - { - _cache = new SafeDictionary(); - } - } - } - - public void SetDuplicate(int bitmaprecno, int record) - { - using (new L(this)) - { - WAHBitArray ba = null; - - ba = internalGetBitmap(bitmaprecno); //GetBitmap(bitmaprecno); - - ba.Set(record, true); - } - } - - public WAHBitArray GetBitmap(int recno) - { - using (new L(this)) - { - return internalGetBitmap(recno); - } - } - - private object _oplock = new object(); - public void Optimize() - { - lock (_oplock) - lock (_readlock) - lock (_writelock) - { - _optimizing = true; - while (_workingCount > 0) Thread.SpinWait(1); - Flush(); - - if (File.Exists(_Path + _FileName + "$" + _bmpExt)) - File.Delete(_Path + _FileName + "$" + _bmpExt); - - if (File.Exists(_Path + _FileName + "$" + _recExt)) - File.Delete(_Path + _FileName + "$" + _recExt); - - FileStream _newrec = new FileStream(_Path + _FileName + "$" + _recExt, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite); - FileStream _newbmp = new FileStream(_Path + _FileName + "$" + _bmpExt, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite); - - long newoffset = 0; - int c = (int)(_recordFileRead.Length / 8); - for (int i = 0; i < c; i++) - { - long offset = ReadRecordOffset(i); - - byte[] b = ReadBMPData(offset); - if (b == null) - { - _optimizing = false; - throw new Exception("bitmap index file is corrupted"); - } - - _newrec.Write(Helper.GetBytes(newoffset, false), 0, 8); - newoffset += b.Length; - _newbmp.Write(b, 0, b.Length); - - } - _newbmp.Flush(); - _newbmp.Close(); - _newrec.Flush(); - _newrec.Close(); - - InternalShutdown(); - - File.Delete(_Path + _FileName + _bmpExt); - File.Delete(_Path + _FileName + _recExt); - File.Move(_Path + _FileName + "$" + _bmpExt, _Path + _FileName + _bmpExt); - File.Move(_Path + _FileName + "$" + _recExt, _Path + _FileName + _recExt); - - Initialize(); - _optimizing = false; - } - } - #endregion - - - #region [ P R I V A T E ] - private byte[] ReadBMPData(long offset) - { - _bitmapFileRead.Seek(offset, SeekOrigin.Begin); - - byte[] b = new byte[8]; - - _bitmapFileRead.Read(b, 0, 8); - if (b[0] == (byte)'B' && b[1] == (byte)'M' && b[7] == 0) - { - int c = Helper.ToInt32(b, 2) * 4 + 8; - byte[] data = new byte[c]; - _bitmapFileRead.Seek(offset, SeekOrigin.Begin); - _bitmapFileRead.Read(data, 0, c); - return data; - } - return null; - } - - private long ReadRecordOffset(int recnum) - { - byte[] b = new byte[8]; - long off = ((long)recnum) * 8; - _recordFileRead.Seek(off, SeekOrigin.Begin); - _recordFileRead.Read(b, 0, 8); - return Helper.ToInt64(b, 0); - } - - private void Initialize() - { - _recordFileRead = new FileStream(_Path + _FileName + _recExt, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite); - _recordFileWriteOrg = new FileStream(_Path + _FileName + _recExt, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite); - _recordFileWrite = new BufferedStream(_recordFileWriteOrg); - - _bitmapFileRead = new FileStream(_Path + _FileName + _bmpExt, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite); - _bitmapFileWriteOrg = new FileStream(_Path + _FileName + _bmpExt, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite); - _bitmapFileWrite = new BufferedStream(_bitmapFileWriteOrg); - - _bitmapFileWrite.Seek(0L, SeekOrigin.End); - _lastBitmapOffset = _bitmapFileWrite.Length; - _lastRecordNumber = (int)(_recordFileRead.Length / 8); - _shutdownDone = false; - } - - private void InternalShutdown() - { - bool d1 = false; - bool d2 = false; - - if (_shutdownDone == false) - { - Flush(); - if (_recordFileWrite.Length == 0) d1 = true; - if (_bitmapFileWrite.Length == 0) d2 = true; - _recordFileRead.Close(); - _bitmapFileRead.Close(); - _bitmapFileWriteOrg.Close(); - _recordFileWriteOrg.Close(); - _recordFileWrite.Close(); - _bitmapFileWrite.Close(); - if (d1) - File.Delete(_Path + _FileName + _recExt); - if (d2) - File.Delete(_Path + _FileName + _bmpExt); - _recordFileWrite = null; - _recordFileRead = null; - _bitmapFileRead = null; - _bitmapFileWrite = null; - _recordFileRead = null; - _recordFileWrite = null; - _shutdownDone = true; - } - } - - private void Flush() - { - if (_shutdownDone) - return; - if (_recordFileWrite != null) - _recordFileWrite.Flush(); - if (_bitmapFileWrite != null) - _bitmapFileWrite.Flush(); - if (_recordFileRead != null) - _recordFileRead.Flush(); - if (_bitmapFileRead != null) - _bitmapFileRead.Flush(); - if (_bitmapFileWriteOrg != null) - _bitmapFileWriteOrg.Flush(); - if (_recordFileWriteOrg != null) - _recordFileWriteOrg.Flush(); - } - - private object _readlock = new object(); - private WAHBitArray internalGetBitmap(int recno) - { - lock (_readlock) - { - WAHBitArray ba = new WAHBitArray(); - if (recno == -1) - return ba; - - if (_cache.TryGetValue(recno, out ba)) - { - return ba; - } - else - { - long offset = 0; - if (_offsetCache.TryGetValue(recno, out offset) == false) - { - offset = ReadRecordOffset(recno); - _offsetCache.Add(recno, offset); - } - ba = LoadBitmap(offset); - - _cache.Add(recno, ba); - - return ba; - } - } - } - - private object _writelock = new object(); - private void SaveBitmap(int recno, WAHBitArray bmp) - { - lock (_writelock) - { - long offset = SaveBitmapToFile(bmp); - long v; - if (_offsetCache.TryGetValue(recno, out v)) - _offsetCache[recno] = offset; - else - _offsetCache.Add(recno, offset); - - long pointer = ((long)recno) * 8; - _recordFileWrite.Seek(pointer, SeekOrigin.Begin); - byte[] b = new byte[8]; - b = Helper.GetBytes(offset, false); - _recordFileWrite.Write(b, 0, 8); - } - } - - //----------------------------------------------------------------- - // BITMAP FILE FORMAT - // 0 'B','M' - // 2 uint count = 4 bytes - // 6 Bitmap type : - // 0 = int record list - // 1 = uint bitmap - // 2 = rec# indexes - // 7 '0' - // 8 uint data - //----------------------------------------------------------------- - private long SaveBitmapToFile(WAHBitArray bmp) - { - long off = _lastBitmapOffset; - WAHBitArray.TYPE t; - uint[] bits = bmp.GetCompressed(out t); - - byte[] b = new byte[bits.Length * 4 + 8]; - // write header data - b[0] = ((byte)'B'); - b[1] = ((byte)'M'); - Buffer.BlockCopy(Helper.GetBytes(bits.Length, false), 0, b, 2, 4); - - b[6] = (byte)t; - b[7] = (byte)(0); - - for (int i = 0; i < bits.Length; i++) - { - byte[] u = Helper.GetBytes((int)bits[i], false); - Buffer.BlockCopy(u, 0, b, i * 4 + 8, 4); - } - _bitmapFileWrite.Write(b, 0, b.Length); - _lastBitmapOffset += b.Length; - return off; - } - - private WAHBitArray LoadBitmap(long offset) - { - WAHBitArray bc = new WAHBitArray(); - if (offset == -1) - return bc; - - List ar = new List(); - WAHBitArray.TYPE type = WAHBitArray.TYPE.WAH; - FileStream bmp = _bitmapFileRead; - { - bmp.Seek(offset, SeekOrigin.Begin); - - byte[] b = new byte[8]; - - bmp.Read(b, 0, 8); - if (b[0] == (byte)'B' && b[1] == (byte)'M' && b[7] == 0) - { - type = (WAHBitArray.TYPE)Enum.ToObject(typeof(WAHBitArray.TYPE), b[6]); - int c = Helper.ToInt32(b, 2); - byte[] buf = new byte[c * 4]; - bmp.Read(buf, 0, c * 4); - for (int i = 0; i < c; i++) - { - ar.Add((uint)Helper.ToInt32(buf, i * 4)); - } - } - } - bc = new WAHBitArray(type, ar.ToArray()); - - return bc; - } - -//#pragma warning disable 642 - private void CheckInternalOP() - { - if (_optimizing) - lock (_oplock) { } // yes! this is good - //lock (_que) - // _que.Enqueue(1); - Interlocked.Increment(ref _workingCount); - } -//#pragma warning restore 642 - - private void Done() - { - //lock (_que) - // if (_que.Count > 0) - // _que.Dequeue(); - Interlocked.Decrement(ref _workingCount); - } - #endregion - - internal void FreeMemory() - { - try - { - List free = new List(); - foreach (var b in _cache) - { - if (b.Value.isDirty == false) - free.Add(b.Key); - } - log.Debug("releasing bmp count = " + free.Count + " out of " + _cache.Count); - foreach (int i in free) - _cache.Remove(i); - } - catch { } - } - } -} +using System; +using System.Collections.Generic; +using System.Text; +using System.IO; +using RaptorDB.Common; +using System.Threading; +using System.Collections; + +namespace RaptorDB +{ + public class BitmapIndex + { + public BitmapIndex(string path, string filename) + { + _FileName = Path.GetFileNameWithoutExtension(filename); + _Path = path; + if (_Path.EndsWith(Path.DirectorySeparatorChar.ToString()) == false) + _Path += Path.DirectorySeparatorChar.ToString(); + + Initialize(); + } + + class L : IDisposable + { + BitmapIndex _sc; + public L(BitmapIndex sc) + { + _sc = sc; + _sc.CheckInternalOP(); + } + void IDisposable.Dispose() + { + _sc.Done(); + } + } + private readonly string _recExt = ".mgbmr"; + private readonly string _bmpExt = ".mgbmp"; + private string _FileName = ""; + private string _Path = ""; + private FileStream _bitmapFileWriteOrg; + private BufferedStream _bitmapFileWrite; + private FileStream _bitmapFileRead; + private FileStream _recordFileRead; + private FileStream _recordFileWriteOrg; + private BufferedStream _recordFileWrite; + private long _lastBitmapOffset = 0; + private int _lastRecordNumber = 0; + private SafeDictionary _cache = new SafeDictionary(); + private SafeDictionary _offsetCache = new SafeDictionary(); + private readonly ILog log = LogManager.GetLogger(typeof(BitmapIndex)); + private bool _optimizing = false; + private bool _shutdownDone = false; + private int _workingCount = 0; + + #region [ P U B L I C ] + public void Shutdown() + { + using (new L(this)) + { + log.Debug("Shutdown BitmapIndex"); + + InternalShutdown(); + } + } + + public int GetFreeRecordNumber() + { + using (new L(this)) + { + int i = _lastRecordNumber++; + + _cache.Add(i, new WahBitArray()); + return i; + } + } + + public void Commit(bool freeMemory) + { + using (new L(this)) + { + int[] keys = _cache.Keys(); + Array.Sort(keys); + + foreach (int k in keys) + { + WahBitArray bmp; + + if (_cache.TryGetValue(k, out bmp)) + { + if (bmp.isDirty) + { + SaveBitmap(k, bmp); + bmp.CompressBitmap(); + bmp.isDirty = false; + } + } + else + { + + } + } + Flush(); + if (freeMemory) + { + _cache = new SafeDictionary(); + } + } + } + + public void SetDuplicate(int bitmaprecno, int record) + { + using (new L(this)) + { + WahBitArray ba = null; + + ba = internalGetBitmap(bitmaprecno); //GetBitmap(bitmaprecno); + + ba.Set(record, true); + } + } + + public WahBitArray GetBitmap(int recno) + { + WahBitArray ba; + if(this._cache.TryGetValue(recno, out ba)) return ba; + using (new L(this)) + { + return internalGetBitmap(recno); + } + } + + private object _oplock = new object(); + public void Optimize() + { + lock (_oplock) + lock (_readlock) + lock (_writelock) + { + _optimizing = true; + while (_workingCount > 0) Thread.SpinWait(1); + Flush(); + + if (File.Exists(_Path + _FileName + "$" + _bmpExt)) + File.Delete(_Path + _FileName + "$" + _bmpExt); + + if (File.Exists(_Path + _FileName + "$" + _recExt)) + File.Delete(_Path + _FileName + "$" + _recExt); + + FileStream _newrec = new FileStream(_Path + _FileName + "$" + _recExt, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite); + FileStream _newbmp = new FileStream(_Path + _FileName + "$" + _bmpExt, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite); + + long newoffset = 0; + int c = (int)(_recordFileRead.Length / 8); + for (int i = 0; i < c; i++) + { + long offset = ReadRecordOffset(i); + + byte[] b = ReadBMPData(offset); + if (b == null) + { + _optimizing = false; + throw new Exception("bitmap index file is corrupted"); + } + + _newrec.Write(Helper.GetBytes(newoffset, false), 0, 8); + newoffset += b.Length; + _newbmp.Write(b, 0, b.Length); + + } + _newbmp.Flush(); + _newbmp.Close(); + _newrec.Flush(); + _newrec.Close(); + + InternalShutdown(); + + File.Delete(_Path + _FileName + _bmpExt); + File.Delete(_Path + _FileName + _recExt); + File.Move(_Path + _FileName + "$" + _bmpExt, _Path + _FileName + _bmpExt); + File.Move(_Path + _FileName + "$" + _recExt, _Path + _FileName + _recExt); + + Initialize(); + _optimizing = false; + } + } + #endregion + + + #region [ P R I V A T E ] + private byte[] ReadBMPData(long offset) + { + _bitmapFileRead.Seek(offset, SeekOrigin.Begin); + + byte[] b = new byte[8]; + + _bitmapFileRead.Read(b, 0, 8); + if (b[0] == (byte)'B' && b[1] == (byte)'M' && b[7] == 0) + { + int c = Helper.ToInt32(b, 2) * 4 + 8; + byte[] data = new byte[c]; + _bitmapFileRead.Seek(offset, SeekOrigin.Begin); + _bitmapFileRead.Read(data, 0, c); + return data; + } + return null; + } + + private long ReadRecordOffset(int recnum) + { + byte[] b = new byte[8]; + long off = ((long)recnum) * 8; + _recordFileRead.Seek(off, SeekOrigin.Begin); + _recordFileRead.Read(b, 0, 8); + return Helper.ToInt64(b, 0); + } + + private void Initialize() + { + _recordFileRead = new FileStream(_Path + _FileName + _recExt, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite); + _recordFileWriteOrg = new FileStream(_Path + _FileName + _recExt, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite); + _recordFileWrite = new BufferedStream(_recordFileWriteOrg); + + _bitmapFileRead = new FileStream(_Path + _FileName + _bmpExt, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite); + _bitmapFileWriteOrg = new FileStream(_Path + _FileName + _bmpExt, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite); + _bitmapFileWrite = new BufferedStream(_bitmapFileWriteOrg); + + _bitmapFileWrite.Seek(0L, SeekOrigin.End); + _lastBitmapOffset = _bitmapFileWrite.Length; + _lastRecordNumber = (int)(_recordFileRead.Length / 8); + _shutdownDone = false; + } + + private void InternalShutdown() + { + bool d1 = false; + bool d2 = false; + + if (_shutdownDone == false) + { + Flush(); + if (_recordFileWrite.Length == 0) d1 = true; + if (_bitmapFileWrite.Length == 0) d2 = true; + _recordFileRead.Close(); + _bitmapFileRead.Close(); + _bitmapFileWriteOrg.Close(); + _recordFileWriteOrg.Close(); + _recordFileWrite.Close(); + _bitmapFileWrite.Close(); + if (d1) + File.Delete(_Path + _FileName + _recExt); + if (d2) + File.Delete(_Path + _FileName + _bmpExt); + _recordFileWrite = null; + _recordFileRead = null; + _bitmapFileRead = null; + _bitmapFileWrite = null; + _recordFileRead = null; + _recordFileWrite = null; + _shutdownDone = true; + } + } + + private void Flush() + { + if (_shutdownDone) + return; + if (_recordFileWrite != null) + _recordFileWrite.Flush(); + if (_bitmapFileWrite != null) + _bitmapFileWrite.Flush(); + if (_recordFileRead != null) + _recordFileRead.Flush(); + if (_bitmapFileRead != null) + _bitmapFileRead.Flush(); + if (_bitmapFileWriteOrg != null) + _bitmapFileWriteOrg.Flush(); + if (_recordFileWriteOrg != null) + _recordFileWriteOrg.Flush(); + } + + private object _readlock = new object(); + private WahBitArray internalGetBitmap(int recno) + { + lock (_readlock) + { + if (recno == -1) + return new WahBitArray(); + + WahBitArray ba; + if (_cache.TryGetValue(recno, out ba)) + { + return ba; + } + else + { + long offset = 0; + if (_offsetCache.TryGetValue(recno, out offset) == false) + { + offset = ReadRecordOffset(recno); + _offsetCache.Add(recno, offset); + } + ba = LoadBitmap(offset); + + _cache.Add(recno, ba); + + return ba; + } + } + } + + private object _writelock = new object(); + private void SaveBitmap(int recno, WahBitArray bmp) + { + lock (_writelock) + { + long offset = SaveBitmapToFile(bmp); + long v; + if (_offsetCache.TryGetValue(recno, out v)) + _offsetCache[recno] = offset; + else + _offsetCache.Add(recno, offset); + + long pointer = ((long)recno) * 8; + _recordFileWrite.Seek(pointer, SeekOrigin.Begin); + byte[] b = new byte[8]; + b = Helper.GetBytes(offset, false); + _recordFileWrite.Write(b, 0, 8); + } + } + + //----------------------------------------------------------------- + // BITMAP FILE FORMAT + // 0 'B','M' + // 2 uint count = 4 bytes + // 6 Bitmap type : + // 0 = int record list + // 1 = uint bitmap + // 2 = rec# indexes + // 7 '0' + // 8 uint data + //----------------------------------------------------------------- + private long SaveBitmapToFile(WahBitArray bmp) + { + long off = _lastBitmapOffset; + WahBitArrayState t; + uint[] bits = bmp.GetCompressed(out t); + + byte[] b = new byte[bits.Length * 4 + 8]; + // write header data + b[0] = ((byte)'B'); + b[1] = ((byte)'M'); + Buffer.BlockCopy(Helper.GetBytes(bits.Length, false), 0, b, 2, 4); + + b[6] = (byte)t; + b[7] = (byte)(0); + + for (int i = 0; i < bits.Length; i++) + { + byte[] u = Helper.GetBytes((int)bits[i], false); + Buffer.BlockCopy(u, 0, b, i * 4 + 8, 4); + } + _bitmapFileWrite.Write(b, 0, b.Length); + _lastBitmapOffset += b.Length; + return off; + } + + private WahBitArray LoadBitmap(long offset) + { + WahBitArray bc = new WahBitArray(); + if (offset == -1) + return bc; + + List ar = new List(); + WahBitArrayState type = WahBitArrayState.Wah; + FileStream bmp = _bitmapFileRead; + { + bmp.Seek(offset, SeekOrigin.Begin); + + byte[] b = new byte[8]; + + bmp.Read(b, 0, 8); + if (b[0] == (byte)'B' && b[1] == (byte)'M' && b[7] == 0) + { + type = (WahBitArrayState)Enum.ToObject(typeof(WahBitArrayState), b[6]); + int c = Helper.ToInt32(b, 2); + byte[] buf = new byte[c * 4]; + bmp.Read(buf, 0, c * 4); + for (int i = 0; i < c; i++) + { + ar.Add((uint)Helper.ToInt32(buf, i * 4)); + } + } + } + bc = new WahBitArray(type, ar.ToArray()); + + return bc; + } + + private void CheckInternalOP() + { + if (_optimizing) + lock (_oplock) { } // yes! this is good + Interlocked.Increment(ref _workingCount); + } + + private void Done() + { + Interlocked.Decrement(ref _workingCount); + } + #endregion + + internal void FreeMemory() + { + try + { + List free = new List(); + foreach (var b in _cache) + { + if (b.Value.isDirty == false) + free.Add(b.Key); + } + log.Debug("releasing bmp count = " + free.Count + " out of " + _cache.Count); + foreach (int i in free) + _cache.Remove(i); + } + catch { } + } + } +} diff --git a/RaptorDB/Indexes/Cache.cs b/RaptorDB/Indexes/Cache.cs index 9279b95..4ddb004 100644 --- a/RaptorDB/Indexes/Cache.cs +++ b/RaptorDB/Indexes/Cache.cs @@ -1,38 +1,38 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Xml.Serialization; - -namespace RaptorDB -{ - public enum OPERATION - { - AND, - OR, - ANDNOT - } - - public class Document - { - public Document() - { - DocNumber = -1; - } - public Document(string filename, string text) - { - FileName = filename; - Text = text; - DocNumber = -1; - } - public int DocNumber { get; set; } - [XmlIgnore] - public string Text { get; set; } - public string FileName { get; set; } - - public override string ToString() - { - return FileName; - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Xml.Serialization; + +namespace RaptorDB +{ + public enum OPERATION + { + AND, + OR, + ANDNOT + } + + public class Document + { + public Document() + { + DocNumber = -1; + } + public Document(string filename, string text) + { + FileName = filename; + Text = text; + DocNumber = -1; + } + public int DocNumber { get; set; } + [XmlIgnore] + public string Text { get; set; } + public string FileName { get; set; } + + public override string ToString() + { + return FileName; + } + } +} diff --git a/RaptorDB/Indexes/EnumIndex.cs b/RaptorDB/Indexes/EnumIndex.cs new file mode 100644 index 0000000..cc9876b --- /dev/null +++ b/RaptorDB/Indexes/EnumIndex.cs @@ -0,0 +1,56 @@ +using RaptorDB.Common; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace RaptorDB.Indexes +{ + internal class EnumIntIndex : MGIndex, IEqualsQueryIndex where T : struct, IConvertible + { + public EnumIntIndex(string path, string filename) + : base(path, filename + ".mgidx", 4, Global.PageItemCount, true) + { + } + + public void Set(object key, int recnum) + { + if (key == null) return; + base.Set((int)key, recnum); + } + void IIndex.FreeMemory() + { + base.FreeMemory(); + base.SaveIndex(); + } + + public override void Dispose() + { + base.SaveIndex(); + base.Dispose(); + } + + T[] IIndex.GetKeys() + { + throw new NotImplementedException("enum is not sortable"); + } + + public TResult Accept(IIndexAcceptable acc) + => acc.Accept(this); + public void Set(T key, int recnum) + { + base.Set((int)(object)key, recnum); + } + + public WahBitArray QueryEquals(T key) + => QueryEquals((int)(object)key); + + public WahBitArray QueryNotEquals(T key) + => QueryNotEquals((int)(object)key); + + public bool GetFirst(T key, out int idx) + { + return base.GetFirst((int)(object)key, out idx); + } + } +} diff --git a/RaptorDB/Indexes/HashIndex.cs b/RaptorDB/Indexes/HashIndex.cs new file mode 100644 index 0000000..33b2c19 --- /dev/null +++ b/RaptorDB/Indexes/HashIndex.cs @@ -0,0 +1,138 @@ +using RaptorDB.Common; +using System; +using System.Collections.Generic; +using System.IO; +using System.IO.MemoryMappedFiles; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace RaptorDB.Indexes +{ + public unsafe class HashIndex : IEqualsQueryIndex, IDisposable + { + private readonly string filePath; + private MemoryMappedFile file; + private MemoryMappedViewAccessor accessor; + private readonly int entrySize; + private long size; + private PageMultiValueHashTable hashtable; + private readonly ReaderWriterLockSlim rwlock = new ReaderWriterLockSlim(LockRecursionPolicy.NoRecursion); + + public bool AllowsDuplicates => true; + + public HashIndex(string path, string filename, + long size = 4096, + IPageSerializer keySerializer = null + ) + { + this.filePath = Path.Combine(path, filename); + this.size = size; + this.entrySize = PageHashTableHelper.GetEntrySize(keySerializer, null); + Load(keySerializer); + } + + public void FreeMemory() + { + } + protected int[] EqualsQuery(TKey key) + { + return hashtable[key].ToArray(); + } + + public void SaveIndex() + { + // TODO: flush memory-mapped file? + } + + public void Set(object key, int recnum) + { + Set((TKey)key, recnum); + } + + public void Dispose(bool rwlockDispose = true) + { + *(int*)(hashtable.StartPointer - 4) = hashtable.Count; + accessor.SafeMemoryMappedViewHandle.ReleasePointer(); + hashtable.Dispose(); + accessor.Dispose(); + file.Dispose(); + if (rwlockDispose) rwlock.Dispose(); + } + + public void Set(TKey key, int recnum) + { + try + { + rwlock.EnterWriteLock(); + hashtable.Set(key, recnum, false); + if (hashtable.Count * 3 > (size * 2)) + { + ResizeFile(size * 4); + } + } + finally + { + rwlock.ExitWriteLock(); + } + } + + public void ResizeFile(long size) + { + throw new NotSupportedException("you can't insert more than is capacity to HashIndex"); + //var keySerializer = hashtable.KeySerializer; + //Dispose(false); + //this.size = size; + //Load(keySerializer); + } + + protected void Load(IPageSerializer keySerializer) + { + file = MemoryMappedFile.CreateFromFile(filePath, FileMode.OpenOrCreate, null, size * entrySize + 4); + accessor = file.CreateViewAccessor(); + byte* pointer = null; + accessor.SafeMemoryMappedViewHandle.AcquirePointer(ref pointer); + var count = *(int*)pointer; + hashtable = new PageMultiValueHashTable(size, keySerializer, null, pointer + 4, 256, count < 0 ? 0 : count); + if (count < 0) hashtable.Recount(); + *(int*)pointer = -1; + } + + public TKey[] GetKeys() + { + try + { + rwlock.EnterReadLock(); + return hashtable.Keys.AsArray(); + } + finally + { + rwlock.ExitReadLock(); + } + } + + void IDisposable.Dispose() + { + Dispose(); + } + + public WahBitArray QueryEquals(TKey key) + { + return WahBitArray.FromIndexes(EqualsQuery(key)); + } + + public WahBitArray QueryNotEquals(TKey key) + { + return QueryEquals(key).Not(); + } + + public TResult Accept(IIndexAcceptable acc) + => acc.Accept(this); + + public bool GetFirst(TKey key, out int idx) + { + return hashtable.TryGetValue(key, out idx); + } + } +} diff --git a/RaptorDB/Indexes/Hoot.cs b/RaptorDB/Indexes/Hoot.cs index a509966..b0479ec 100644 --- a/RaptorDB/Indexes/Hoot.cs +++ b/RaptorDB/Indexes/Hoot.cs @@ -1,480 +1,480 @@ -using System; -using System.Collections.Generic; -using System.Text; -using System.Collections; -using System.IO; -using System.Threading; -using System.Text.RegularExpressions; -using RaptorDB.Common; - -namespace RaptorDB -{ - internal class Hoot - { - public Hoot(string IndexPath, string FileName, bool DocMode) - { - _Path = IndexPath; - _FileName = FileName; - _docMode = DocMode; - if (_Path.EndsWith(Path.DirectorySeparatorChar.ToString()) == false) _Path += Path.DirectorySeparatorChar; - Directory.CreateDirectory(IndexPath); - - _log.Debug("Starting hOOt...."); - _log.Debug("Storage Folder = " + _Path); - - if (DocMode) - { - _docs = new KeyStoreString(_Path + "files.docs", false); - // read deleted - _deleted = new BoolIndex(_Path, "_deleted" , ".hoot"); - _lastDocNum = (int)_docs.Count(); - } - _bitmaps = new BitmapIndex(_Path, _FileName + "_hoot.bmp"); - // read words - LoadWords(); - } - - private SafeDictionary _words = new SafeDictionary(); - private BitmapIndex _bitmaps; - private BoolIndex _deleted; - private ILog _log = LogManager.GetLogger(typeof(Hoot)); - private int _lastDocNum = 0; - private string _FileName = "words"; - private string _Path = ""; - private KeyStoreString _docs; - private bool _docMode = false; - - public int WordCount - { - get { return _words.Count; } - } - - public int DocumentCount - { - get { return _lastDocNum - (int)_deleted.GetBits().CountOnes(); } - } - - public void Save() - { - lock (_lock) - InternalSave(); - } - - public void Index(int recordnumber, string text) - { - AddtoIndex(recordnumber, text); - } - - public WAHBitArray Query(string filter, int maxsize) - { - return ExecutionPlan(filter, maxsize); - } - - public int Index(Document doc, bool deleteold) - { - _log.Debug("indexing doc : " + doc.FileName); - DateTime dt = FastDateTime.Now; - - if (deleteold && doc.DocNumber > -1) - _deleted.Set(true, doc.DocNumber); - - if (deleteold == true || doc.DocNumber == -1) - doc.DocNumber = _lastDocNum++; - - // save doc to disk - string dstr = fastJSON.JSON.ToJSON(doc, new fastJSON.JSONParameters { UseExtensions = false }); - _docs.Set(doc.FileName.ToLower(), Encoding.Unicode.GetBytes(dstr)); - - _log.Debug("writing doc to disk (ms) = " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); - - dt = FastDateTime.Now; - // index doc - AddtoIndex(doc.DocNumber, doc.Text); - _log.Debug("indexing time (ms) = " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); - - return _lastDocNum; - } - - public IEnumerable FindRows(string filter) - { - WAHBitArray bits = ExecutionPlan(filter, _docs.RecordCount()); - // enumerate records - return bits.GetBitIndexes(); - } - - public IEnumerable FindDocuments(string filter) - { - WAHBitArray bits = ExecutionPlan(filter, _docs.RecordCount()); - // enumerate documents - foreach (int i in bits.GetBitIndexes()) - { - if (i > _lastDocNum - 1) - break; - string b = _docs.ReadData(i); - Document d = fastJSON.JSON.ToObject(b); - - yield return d; - } - } - - public IEnumerable FindDocumentFileNames(string filter) - { - WAHBitArray bits = ExecutionPlan(filter, _docs.RecordCount()); - // enumerate documents - foreach (int i in bits.GetBitIndexes()) - { - if (i > _lastDocNum - 1) - break; - string b = _docs.ReadData(i); - var d = (Dictionary)fastJSON.JSON.Parse(b); - - yield return d["FileName"].ToString(); - } - } - - public void RemoveDocument(int number) - { - // add number to deleted bitmap - _deleted.Set(true, number); - } - - public bool RemoveDocument(string filename) - { - // remove doc based on filename - byte[] b; - if (_docs.Get(filename.ToLower(), out b)) - { - Document d = fastJSON.JSON.ToObject(Encoding.Unicode.GetString(b)); - RemoveDocument(d.DocNumber); - return true; - } - return false; - } - - public bool IsIndexed(string filename) - { - byte[] b; - return _docs.Get(filename.ToLower(), out b); - } - - public void OptimizeIndex() - { - _bitmaps.Commit(false); - _bitmaps.Optimize(); - } - - #region [ P R I V A T E M E T H O D S ] - - private WAHBitArray ExecutionPlan(string filter, int maxsize) - { - //_log.Debug("query : " + filter); - DateTime dt = FastDateTime.Now; - // query indexes - string[] words = filter.Split(' '); - bool defaulttoand = true; - if (filter.IndexOfAny(new char[] { '+', '-' }, 0) > 0) - defaulttoand = false; - - WAHBitArray bits = null; - - foreach (string s in words) - { - int c; - string word = s; - if (s == "") continue; - - OPERATION op = OPERATION.OR; - if (defaulttoand) - op = OPERATION.AND; - - if (s.StartsWith("+")) - { - op = OPERATION.AND; - word = s.Replace("+", ""); - } - - if (s.StartsWith("-")) - { - op = OPERATION.ANDNOT; - word = s.Replace("-", ""); - } - - if (s.Contains("*") || s.Contains("?")) - { - WAHBitArray wildbits = null; - // do wildcard search - Regex reg = new Regex("^" + s.Replace("*", ".*").Replace("?", "."), RegexOptions.IgnoreCase); - foreach (string key in _words.Keys()) - { - if (reg.IsMatch(key)) - { - _words.TryGetValue(key, out c); - WAHBitArray ba = _bitmaps.GetBitmap(c); - - wildbits = DoBitOperation(wildbits, ba, OPERATION.OR, maxsize); - } - } - if (bits == null) - bits = wildbits; - else - { - if (op == OPERATION.AND) - bits = bits.And(wildbits); - else - bits = bits.Or(wildbits); - } - } - else if (_words.TryGetValue(word.ToLowerInvariant(), out c)) - { - // bits logic - WAHBitArray ba = _bitmaps.GetBitmap(c); - bits = DoBitOperation(bits, ba, op, maxsize); - } - } - if (bits == null) - return new WAHBitArray(); - - // remove deleted docs - WAHBitArray ret; - if (_docMode) - ret = bits.AndNot(_deleted.GetBits()); - else - ret = bits; - //_log.Debug("query time (ms) = " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); - return ret; - } - - private static WAHBitArray DoBitOperation(WAHBitArray bits, WAHBitArray c, OPERATION op, int maxsize) - { - if (bits != null) - { - switch (op) - { - case OPERATION.AND: - bits = bits.And(c); - break; - case OPERATION.OR: - bits = bits.Or(c); - break; - case OPERATION.ANDNOT: - bits = bits.And(c.Not(maxsize)); - break; - } - } - else - bits = c; - return bits; - } - - private object _lock = new object(); - private void InternalSave() - { - _log.Debug("saving index..."); - DateTime dt = FastDateTime.Now; - // save deleted - if (_deleted != null) - _deleted.SaveIndex(); - - // save docs - if (_docMode) - _docs.SaveIndex(); - _bitmaps.Commit(false); - - MemoryStream ms = new MemoryStream(); - BinaryWriter bw = new BinaryWriter(ms, Encoding.UTF8); - - // save words and bitmaps - using (FileStream words = new FileStream(_Path + _FileName + ".words", FileMode.Create)) - { - foreach (string key in _words.Keys()) - { - bw.Write(key); - bw.Write(_words[key]); - } - byte[] b = ms.ToArray(); - words.Write(b, 0, b.Length); - words.Flush(); - words.Close(); - } - _log.Debug("save time (ms) = " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); - } - - private void LoadWords() - { - if (File.Exists(_Path + _FileName + ".words") == false) - return; - // load words - byte[] b = File.ReadAllBytes(_Path + _FileName + ".words"); - if (b.Length == 0) - return; - MemoryStream ms = new MemoryStream(b); - BinaryReader br = new BinaryReader(ms, Encoding.UTF8); - string s = br.ReadString(); - while (s != "") - { - int off = br.ReadInt32(); - _words.Add(s, off); - try - { - s = br.ReadString(); - } - catch { s = ""; } - } - _log.Debug("Word Count = " + _words.Count); - } - - private void AddtoIndex(int recnum, string text) - { - if (text == "" || text == null) - return; - text = text.ToLowerInvariant(); // lowercase index - string[] keys; - if (_docMode) - { - //_log.Debug("text size = " + text.Length); - Dictionary wordfreq = GenerateWordFreq(text); - //_log.Debug("word count = " + wordfreq.Count); - var kk = wordfreq.Keys; - keys = new string[kk.Count]; - kk.CopyTo(keys, 0); - } - else - { - keys = text.Split(' '); - } - - foreach (string key in keys) - { - if (key == "") - continue; - - int bmp; - if (_words.TryGetValue(key, out bmp)) - { - _bitmaps.GetBitmap(bmp).Set(recnum, true); - } - else - { - bmp = _bitmaps.GetFreeRecordNumber(); - _bitmaps.SetDuplicate(bmp, recnum); - _words.Add(key, bmp); - } - } - } - - private Dictionary GenerateWordFreq(string text) - { - Dictionary dic = new Dictionary(500); - - char[] chars = text.ToCharArray(); - int index = 0; - int run = -1; - int count = chars.Length; - while (index < count) - { - char c = chars[index++]; - if (!(char.IsLetterOrDigit(c) || c == '.' || c == '-' || c == '$' || c == '#')) // rdb specific - { - if (run != -1) - { - ParseString(dic, chars, index, run); - run = -1; - } - } - else - if (run == -1) - run = index - 1; - } - - if (run != -1) - { - ParseString(dic, chars, index, run); - run = -1; - } - - return dic; - } - - private void ParseString(Dictionary dic, char[] chars, int end, int start) - { - // check if upper lower case mix -> extract words - int uppers = 0; - bool found = false; - for (int i = start; i < end; i++) - { - if (char.IsUpper(chars[i])) - uppers++; - } - // not all uppercase - if (uppers != end - start - 1) - { - int lastUpper = start; - - string word = ""; - for (int i = start + 1; i < end; i++) - { - char c = chars[i]; - if (char.IsUpper(c)) - { - found = true; - word = new string(chars, lastUpper, i - lastUpper).ToLowerInvariant().Trim(); - AddDictionary(dic, word); - lastUpper = i; - } - } - if (lastUpper > start) - { - string last = new string(chars, lastUpper, end - lastUpper).ToLowerInvariant().Trim(); - if (word != last) - AddDictionary(dic, last); - } - } - if (found == false) - { - string s = new string(chars, start, end - start).ToLowerInvariant().Trim(); - AddDictionary(dic, s); - } - } - - private void AddDictionary(Dictionary dic, string word) - { - int l = word.Length; - if (l > Global.DefaultStringKeySize) - return; - if (l < 2) - return; - if (char.IsLetterOrDigit(word[l - 1]) == false) // rdb specific - word = new string(word.ToCharArray(), 0, l - 1); - if (word.Length < 2) - return; - int cc = 0; - if (dic.TryGetValue(word, out cc)) - dic[word] = ++cc; - else - dic.Add(word, 1); - } - #endregion - - public void Shutdown() - { - lock (_lock) - { - InternalSave(); - - if (_docMode) - { - _docs.Shutdown(); - _deleted.Shutdown(); - } - } - } - - public void FreeMemory() - { - if (_bitmaps != null) - _bitmaps.FreeMemory(); - if (_docs != null) - _docs.FreeMemory(); - } - } +using System; +using System.Collections.Generic; +using System.Text; +using System.Collections; +using System.IO; +using System.Threading; +using System.Text.RegularExpressions; +using RaptorDB.Common; + +namespace RaptorDB +{ + internal class Hoot + { + public Hoot(string IndexPath, string FileName, bool DocMode) + { + _Path = IndexPath; + _FileName = FileName; + _docMode = DocMode; + if (_Path.EndsWith(Path.DirectorySeparatorChar.ToString()) == false) _Path += Path.DirectorySeparatorChar; + Directory.CreateDirectory(IndexPath); + + _log.Debug("Starting hOOt...."); + _log.Debug("Storage Folder = " + _Path); + + if (DocMode) + { + _docs = new KeyStoreString(_Path + "files.docs", false); + // read deleted + _deleted = new BoolIndex(_Path, "_deleted" , ".hoot"); + _lastDocNum = (int)_docs.Count(); + } + _bitmaps = new BitmapIndex(_Path, _FileName + "_hoot.bmp"); + // read words + LoadWords(); + } + + private SafeDictionary _words = new SafeDictionary(); + private BitmapIndex _bitmaps; + private BoolIndex _deleted; + private ILog _log = LogManager.GetLogger(typeof(Hoot)); + private int _lastDocNum = 0; + private string _FileName = "words"; + private string _Path = ""; + private KeyStoreString _docs; + private bool _docMode = false; + + public int WordCount + { + get { return _words.Count; } + } + + public int DocumentCount + { + get { return _lastDocNum - (int)_deleted.GetBits().CountOnes(); } + } + + public void Save() + { + lock (_lock) + InternalSave(); + } + + public void Index(int recordnumber, string text) + { + AddtoIndex(recordnumber, text); + } + + public WahBitArray Query(string filter) + { + return ExecutionPlan(filter, _docs.RecordCount()); + } + + public int Index(Document doc, bool deleteold) + { + _log.Debug("indexing doc : " + doc.FileName); + DateTime dt = FastDateTime.Now; + + if (deleteold && doc.DocNumber > -1) + _deleted.Set(true, doc.DocNumber); + + if (deleteold == true || doc.DocNumber == -1) + doc.DocNumber = _lastDocNum++; + + // save doc to disk + string dstr = fastJSON.JSON.ToJSON(doc, new fastJSON.JSONParameters { UseExtensions = false }); + _docs.Set(doc.FileName.ToLower(), Encoding.Unicode.GetBytes(dstr)); + + _log.Debug("writing doc to disk (ms) = " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); + + dt = FastDateTime.Now; + // index doc + AddtoIndex(doc.DocNumber, doc.Text); + _log.Debug("indexing time (ms) = " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); + + return _lastDocNum; + } + + public IEnumerable FindRows(string filter) + { + WahBitArray bits = ExecutionPlan(filter, _docs.RecordCount()); + // enumerate records + return bits.GetBitIndexes(); + } + + public IEnumerable FindDocuments(string filter) + { + WahBitArray bits = ExecutionPlan(filter, _docs.RecordCount()); + // enumerate documents + foreach (int i in bits.GetBitIndexes()) + { + if (i > _lastDocNum - 1) + break; + string b = _docs.ReadData(i); + Document d = fastJSON.JSON.ToObject(b); + + yield return d; + } + } + + public IEnumerable FindDocumentFileNames(string filter) + { + WahBitArray bits = ExecutionPlan(filter, _docs.RecordCount()); + // enumerate documents + foreach (int i in bits.GetBitIndexes()) + { + if (i > _lastDocNum - 1) + break; + string b = _docs.ReadData(i); + var d = (Dictionary)fastJSON.JSON.Parse(b); + + yield return d["FileName"].ToString(); + } + } + + public void RemoveDocument(int number) + { + // add number to deleted bitmap + _deleted.Set(true, number); + } + + public bool RemoveDocument(string filename) + { + // remove doc based on filename + byte[] b; + if (_docs.Get(filename.ToLower(), out b)) + { + Document d = fastJSON.JSON.ToObject(Encoding.Unicode.GetString(b)); + RemoveDocument(d.DocNumber); + return true; + } + return false; + } + + public bool IsIndexed(string filename) + { + byte[] b; + return _docs.Get(filename.ToLower(), out b); + } + + public void OptimizeIndex() + { + _bitmaps.Commit(false); + _bitmaps.Optimize(); + } + + #region [ P R I V A T E M E T H O D S ] + + private WahBitArray ExecutionPlan(string filter, int maxsize) + { + //_log.Debug("query : " + filter); + DateTime dt = FastDateTime.Now; + // query indexes + string[] words = filter.Split(' '); + bool defaulttoand = true; + if (filter.IndexOfAny(new char[] { '+', '-' }, 0) >= 0) + defaulttoand = false; + + WahBitArray bits = null; + + foreach (string s in words) + { + int c; + string word = s; + if (s.Length == 0) continue; + + OPERATION op = OPERATION.OR; + if (defaulttoand) + op = OPERATION.AND; + + if (s.StartsWith("+")) + { + op = OPERATION.AND; + word = s.Replace("+", ""); + } + + if (s.StartsWith("-")) + { + op = OPERATION.ANDNOT; + word = s.Replace("-", ""); + } + + if (s.Contains("*") || s.Contains("?")) + { + WahBitArray wildbits = null; + // do wildcard search + Regex reg = new Regex("^" + s.Replace("*", ".*").Replace("?", "."), RegexOptions.IgnoreCase); + foreach (string key in _words.Keys()) + { + if (reg.IsMatch(key)) + { + _words.TryGetValue(key, out c); + WahBitArray ba = _bitmaps.GetBitmap(c); + + wildbits = DoBitOperation(wildbits, ba, OPERATION.OR, maxsize); + } + } + if (bits == null) + bits = wildbits; + else + { + if (op == OPERATION.AND) + bits = bits.And(wildbits); + else + bits = bits.Or(wildbits); + } + } + else if (_words.TryGetValue(word.ToLowerInvariant(), out c)) + { + // bits logic + WahBitArray ba = _bitmaps.GetBitmap(c); + bits = DoBitOperation(bits, ba, op, maxsize); + } + } + //if (bits == null) + // return new WahBitArray(); + + // remove deleted docs + WahBitArray ret; + if (_docMode) + ret = bits.AndNot(_deleted.GetBits()); + else + ret = bits; + //_log.Debug("query time (ms) = " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); + return ret; + } + + private static WahBitArray DoBitOperation(WahBitArray bits, WahBitArray c, OPERATION op, int maxsize) + { + if (bits != null) + { + switch (op) + { + case OPERATION.AND: + bits = bits.And(c); + break; + case OPERATION.OR: + bits = bits.Or(c); + break; + case OPERATION.ANDNOT: + bits = bits.And(c.Not()); + break; + } + } + else + bits = c; + return bits; + } + + private object _lock = new object(); + private void InternalSave() + { + _log.Debug("saving index..."); + DateTime dt = FastDateTime.Now; + // save deleted + if (_deleted != null) + _deleted.SaveIndex(); + + // save docs + if (_docMode) + _docs.SaveIndex(); + _bitmaps.Commit(false); + + MemoryStream ms = new MemoryStream(); + BinaryWriter bw = new BinaryWriter(ms, Encoding.UTF8); + + // save words and bitmaps + using (FileStream words = new FileStream(_Path + _FileName + ".words", FileMode.Create)) + { + foreach (string key in _words.Keys()) + { + bw.Write(key); + bw.Write(_words[key]); + } + byte[] b = ms.ToArray(); + words.Write(b, 0, b.Length); + words.Flush(); + words.Close(); + } + _log.Debug("save time (ms) = " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); + } + + private void LoadWords() + { + if (File.Exists(_Path + _FileName + ".words") == false) + return; + // load words + byte[] b = File.ReadAllBytes(_Path + _FileName + ".words"); + if (b.Length == 0) + return; + MemoryStream ms = new MemoryStream(b); + BinaryReader br = new BinaryReader(ms, Encoding.UTF8); + string s = br.ReadString(); + while (s.Length > 0) + { + int off = br.ReadInt32(); + _words.Add(s, off); + try + { + s = br.ReadString(); + } + catch { s = string.Empty; } + } + _log.Debug("Word Count = " + _words.Count); + } + + private void AddtoIndex(int recnum, string text) + { + if (string.IsNullOrEmpty(text)) + return; + text = text.ToLowerInvariant(); // lowercase index + string[] keys; + if (_docMode) + { + //_log.Debug("text size = " + text.Length); + Dictionary wordfreq = GenerateWordFreq(text); + //_log.Debug("word count = " + wordfreq.Count); + var kk = wordfreq.Keys; + keys = new string[kk.Count]; + kk.CopyTo(keys, 0); + } + else + { + keys = text.Split(' '); + } + + foreach (string key in keys) + { + if (string.IsNullOrEmpty(key)) + continue; + + int bmp; + if (_words.TryGetValue(key, out bmp)) + { + _bitmaps.GetBitmap(bmp).Set(recnum, true); + } + else + { + bmp = _bitmaps.GetFreeRecordNumber(); + _bitmaps.SetDuplicate(bmp, recnum); + _words.Add(key, bmp); + } + } + } + + private Dictionary GenerateWordFreq(string text) + { + Dictionary dic = new Dictionary(500); + + char[] chars = text.ToCharArray(); + int index = 0; + int run = -1; + int count = chars.Length; + while (index < count) + { + char c = chars[index++]; + if (!(char.IsLetterOrDigit(c) || c == '.' || c == '-' || c == '$' || c == '#')) // rdb specific + { + if (run != -1) + { + ParseString(dic, chars, index, run); + run = -1; + } + } + else + if (run == -1) + run = index - 1; + } + + if (run != -1) + { + ParseString(dic, chars, index, run); + run = -1; + } + + return dic; + } + + private void ParseString(Dictionary dic, char[] chars, int end, int start) + { + // check if upper lower case mix -> extract words + int uppers = 0; + bool found = false; + for (int i = start; i < end; i++) + { + if (char.IsUpper(chars[i])) + uppers++; + } + // not all uppercase + if (uppers != end - start - 1) + { + int lastUpper = start; + + string word = ""; + for (int i = start + 1; i < end; i++) + { + char c = chars[i]; + if (char.IsUpper(c)) + { + found = true; + word = new string(chars, lastUpper, i - lastUpper).ToLowerInvariant().Trim(); + AddDictionary(dic, word); + lastUpper = i; + } + } + if (lastUpper > start) + { + string last = new string(chars, lastUpper, end - lastUpper).ToLowerInvariant().Trim(); + if (word != last) + AddDictionary(dic, last); + } + } + if (found == false) + { + string s = new string(chars, start, end - start).ToLowerInvariant().Trim(); + AddDictionary(dic, s); + } + } + + private void AddDictionary(Dictionary dic, string word) + { + int l = word.Length; + if (l > Global.DefaultStringKeySize) + return; + if (l < 2) + return; + if (char.IsLetterOrDigit(word[l - 1]) == false) // rdb specific + word = new string(word.ToCharArray(), 0, l - 1); + if (word.Length < 2) + return; + int cc = 0; + if (dic.TryGetValue(word, out cc)) + dic[word] = ++cc; + else + dic.Add(word, 1); + } + #endregion + + public virtual void Dispose() + { + lock (_lock) + { + InternalSave(); + + if (_docMode) + { + _docs.Shutdown(); + _deleted.Dispose(); + } + } + } + + public void FreeMemory() + { + if (_bitmaps != null) + _bitmaps.FreeMemory(); + if (_docs != null) + _docs.FreeMemory(); + } + } } \ No newline at end of file diff --git a/RaptorDB/Indexes/IIndex.cs b/RaptorDB/Indexes/IIndex.cs index 4a5cfac..606c8f6 100644 --- a/RaptorDB/Indexes/IIndex.cs +++ b/RaptorDB/Indexes/IIndex.cs @@ -1,29 +1,73 @@ -using System; -using System.Collections.Generic; -using System.Text; - -namespace RaptorDB -{ - internal enum RDBExpression - { - Equal, - Greater, - GreaterEqual, - Less, - LessEqual, - NotEqual, - Between, - Contains - } - - internal interface IIndex - { - void Set(object key, int recnum); - //WAHBitArray Query(object fromkey, object tokey, int maxsize); - WAHBitArray Query(RDBExpression ex, object from , int maxsize); - void FreeMemory(); - void Shutdown(); - void SaveIndex(); - object[] GetKeys(); - } -} +using RaptorDB.Common; +using System; +using System.Collections; +using System.Collections.Generic; +using System.Text; + +namespace RaptorDB +{ + public enum RDBExpression + { + Equal, + Greater, + GreaterEqual, + Less, + LessEqual, + NotEqual, + Between, + Contains + } + + public interface IIndexAcceptable + { + TResult Accept(IIndex item); + } + + public interface IIndex: IDisposable + { + void FreeMemory(); + void SaveIndex(); + TResult Accept(IIndexAcceptable acc); + void Set(object key, int recnum); + bool AllowsDuplicates { get; } + } + + public interface IIndex: IIndex + { + void Set(T key, int recnum); + T[] GetKeys(); + } + + public interface IUpdatableIndex: IIndex + { + bool Remove(T key); + bool Remove(T key, int recnum); + void ReplaceFirst(T key, int recnum); + void Replace(T key, int oldNum, int newNum); + } + + public interface IEqualsQueryIndex: IIndex + { + WahBitArray QueryEquals(T key); + WahBitArray QueryNotEquals(T key); + bool GetFirst(T key, out int idx); + } + + public interface IComparisonIndex: IEqualsQueryIndex + { + WahBitArray QueryGreater(T key); + WahBitArray QueryGreaterEquals(T key); + WahBitArray QueryLess(T key); + WahBitArray QueryLessEquals(T key); + } + + public interface IBetweenComparisonIndex: IEqualsQueryIndex + { + WahBitArray QueryBetween(T from, T to); + } + + public interface IContainsIndex: IIndex + { + WahBitArray QueryContains(T value); + } +} diff --git a/RaptorDB/Indexes/IIndexRoot.cs b/RaptorDB/Indexes/IIndexRoot.cs new file mode 100644 index 0000000..caef02c --- /dev/null +++ b/RaptorDB/Indexes/IIndexRoot.cs @@ -0,0 +1,16 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace RaptorDB.Indexes +{ + interface IIndexRoot + { + int GetPageIndex(TKey key); + IEnumerable GetLowerPagesIndexes(int index); + IEnumerable GetUpperPagesIndexes(int index); + int CreateTable(TKey firstKey); + } +} diff --git a/RaptorDB/Indexes/IndexFile.cs b/RaptorDB/Indexes/IndexFile.cs index 0f30cb0..2d213e4 100644 --- a/RaptorDB/Indexes/IndexFile.cs +++ b/RaptorDB/Indexes/IndexFile.cs @@ -1,467 +1,467 @@ -using System; -using System.Collections.Generic; -using System.Text; -using System.IO; -using System.Collections; -using RaptorDB.Common; -using System.Threading; - -namespace RaptorDB -{ - internal class IndexFile - { - FileStream _file = null; - private byte[] _FileHeader = new byte[] { - (byte)'M', (byte)'G', (byte)'I', - 0, // 3 = [keysize] max 255 - 0,0, // 4 = [node size] max 65536 - 0,0,0,0, // 6 = [root page num] - 0, // 10 = Index file type : 0=mgindex 1=mgindex+strings (key = firstallocblock) - 0,0,0,0 // 11 = last record number indexed - }; - - private byte[] _BlockHeader = new byte[] { - (byte)'P',(byte)'A',(byte)'G',(byte)'E', - 0, // 4 = [Flag] = 0=page 1=page list - 0,0, // 5 = [item count] - 0,0,0,0, // 7 = reserved - 0,0,0,0 // 11 = [right page number] / [next page number] - }; - - internal byte _maxKeySize; - internal ushort _PageNodeCount = 5000; - private int _LastPageNumber = 1; // 0 = page list - private int _PageLength; - private int _rowSize; - private bool _allowDups = true; - ILog log = LogManager.GetLogger(typeof(IndexFile)); - private BitmapIndex _bitmap; - IGetBytes _T = null; - private object _fileLock = new object(); - - private KeyStoreHF _strings; - private bool _externalStrings = false; - - public IndexFile(string filename, byte maxKeySize, ushort pageNodeCount) - { - _T = RDBDataType.ByteHandler(); - if (typeof(T) == typeof(string) && Global.EnableOptimizedStringIndex) - { - _externalStrings = true; - _maxKeySize = 4;// blocknum:int - } - else - _maxKeySize = maxKeySize; - - _PageNodeCount = pageNodeCount; - _rowSize = (_maxKeySize + 1 + 4 + 4); - - string path = Path.GetDirectoryName(filename); - Directory.CreateDirectory(path); - if (File.Exists(filename)) - { - // if file exists open and read header - _file = File.Open(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite); - ReadFileHeader(); - if (_externalStrings == false)// if the file says different - { - _rowSize = (_maxKeySize + 1 + 4 + 4); - } - // compute last page number from file length - _PageLength = (_BlockHeader.Length + _rowSize * (_PageNodeCount)); - _LastPageNumber = (int)((_file.Length - _FileHeader.Length) / _PageLength); - } - else - { - // else create new file - _file = File.Open(filename, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite); - - _PageLength = (_BlockHeader.Length + _rowSize * (_PageNodeCount)); - - CreateFileHeader(0); - - _LastPageNumber = (int)((_file.Length - _FileHeader.Length) / _PageLength); - } - if (_externalStrings) - { - _strings = new KeyStoreHF(path, Path.GetFileNameWithoutExtension(filename) + ".strings"); - } - if (_LastPageNumber == 0) - _LastPageNumber = 1; - // bitmap duplicates - if (_allowDups) - _bitmap = new BitmapIndex(Path.GetDirectoryName(filename), Path.GetFileNameWithoutExtension(filename)); - } - - #region [ C o m m o n ] - public void SetBitmapDuplicate(int bitmaprec, int rec) - { - _bitmap.SetDuplicate(bitmaprec, rec); - } - - public int GetBitmapDuplaicateFreeRecordNumber() - { - return _bitmap.GetFreeRecordNumber(); - } - - public IEnumerable GetDuplicatesRecordNumbers(int recno) - { - return GetDuplicateBitmap(recno).GetBitIndexes(); - } - - public WAHBitArray GetDuplicateBitmap(int recno) - { - return _bitmap.GetBitmap(recno); - } - - private byte[] CreateBlockHeader(byte type, ushort itemcount, int rightpagenumber) - { - byte[] block = new byte[_BlockHeader.Length]; - Array.Copy(_BlockHeader, block, block.Length); - block[4] = type; - byte[] b = Helper.GetBytes(itemcount, false); - Buffer.BlockCopy(b, 0, block, 5, 2); - b = Helper.GetBytes(rightpagenumber, false); - Buffer.BlockCopy(b, 0, block, 11, 4); - return block; - } - - private void CreateFileHeader(int rowsindexed) - { - lock (_fileLock) - { - // max key size - byte[] b = Helper.GetBytes(_maxKeySize, false); - Buffer.BlockCopy(b, 0, _FileHeader, 3, 1); - // page node count - b = Helper.GetBytes(_PageNodeCount, false); - Buffer.BlockCopy(b, 0, _FileHeader, 4, 2); - b = Helper.GetBytes(rowsindexed, false); - Buffer.BlockCopy(b, 0, _FileHeader, 11, 4); - - if (_externalStrings) - _FileHeader[10] = 1; - - _file.Seek(0L, SeekOrigin.Begin); - _file.Write(_FileHeader, 0, _FileHeader.Length); - if (rowsindexed == 0) - { - byte[] pagezero = new byte[_PageLength]; - byte[] block = CreateBlockHeader(1, 0, -1); - Buffer.BlockCopy(block, 0, pagezero, 0, block.Length); - _file.Write(pagezero, 0, _PageLength); - } - _file.Flush(); - } - } - - private bool ReadFileHeader() - { - _file.Seek(0L, SeekOrigin.Begin); - byte[] b = new byte[_FileHeader.Length]; - _file.Read(b, 0, _FileHeader.Length); - - if (b[0] == _FileHeader[0] && b[1] == _FileHeader[1] && b[2] == _FileHeader[2]) // header - { - byte maxks = b[3]; - ushort nodes = (ushort)Helper.ToInt16(b, 4); - int root = Helper.ToInt32(b, 6); - _maxKeySize = maxks; - _PageNodeCount = nodes; - _FileHeader = b; - if (b[10] == 0) - _externalStrings = false; - } - - return false; - } - - public int GetNewPageNumber() - { - return Interlocked.Increment(ref _LastPageNumber); //_LastPageNumber++; - } - - private void SeekPage(int pnum) - { - long offset = _FileHeader.Length; - offset += (long)pnum * _PageLength; - if (offset > _file.Length) - CreateBlankPages(pnum); - - _file.Seek(offset, SeekOrigin.Begin); - } - - private void CreateBlankPages(int pnum) - { - // create space - byte[] b = new byte[_PageLength]; - _file.Seek(0L, SeekOrigin.Current); - for (int i = pnum; i < _LastPageNumber; i++) - _file.Write(b, 0, b.Length); - - _file.Flush(); - } - - public void FreeMemory() - { - if (_allowDups) - _bitmap.FreeMemory(); - } - - public void Shutdown() - { - log.Debug("Shutdown IndexFile"); - if (_externalStrings) - _strings.Shutdown(); - - if (_file != null) - { - _file.Flush(); - _file.Close(); - } - _file = null; - if (_allowDups) - { - _bitmap.Commit(Global.FreeBitmapMemoryOnSave); - _bitmap.Shutdown(); - } - } - - #endregion - - #region [ P a g e s ] - - public void GetPageList(List PageListDiskPages, SafeSortedList PageList, out int lastIndexedRow) - { - lastIndexedRow = Helper.ToInt32(_FileHeader, 11); - // load page list - PageListDiskPages.Add(0); // first page list - int nextpage = LoadPageListData(0, PageList); - while (nextpage != -1) - { - nextpage = LoadPageListData(nextpage, PageList); - - if (nextpage != -1) - PageListDiskPages.Add(nextpage); - } - } - - private int LoadPageListData(int page, SafeSortedList PageList) - { - lock (_fileLock) - { - // load page list data - int nextpage = -1; - SeekPage(page); - byte[] b = new byte[_PageLength]; - _file.Read(b, 0, _PageLength); - - if (b[0] == _BlockHeader[0] && b[1] == _BlockHeader[1] && b[2] == _BlockHeader[2] && b[3] == _BlockHeader[3]) - { - short count = Helper.ToInt16(b, 5); - if (count > _PageNodeCount) - throw new Exception("Count > node size"); - nextpage = Helper.ToInt32(b, 11); - int index = _BlockHeader.Length; - - for (int i = 0; i < count; i++) - { - int idx = index + _rowSize * i; - byte ks = b[idx]; - T key = _T.GetObject(b, idx + 1, ks); - int pagenum = Helper.ToInt32(b, idx + 1 + _maxKeySize); - // add counts - int unique = Helper.ToInt32(b, idx + 1 + _maxKeySize + 4); - // FEATURE : add dup count - PageList.Add(key, new PageInfo(pagenum, unique, 0)); - } - } - else - throw new Exception("Page List header is invalid"); - - return nextpage; - } - } - - internal void SavePage(Page node) - { - lock (_fileLock) - { - int pnum = node.DiskPageNumber; - if (pnum > _LastPageNumber) - throw new Exception("should not be here: page out of bounds"); - - SeekPage(pnum); - byte[] page = new byte[_PageLength]; - byte[] blockheader = CreateBlockHeader(0, (ushort)node.tree.Count, node.RightPageNumber); - Buffer.BlockCopy(blockheader, 0, page, 0, blockheader.Length); - - int index = blockheader.Length; - int i = 0; - byte[] b = null; - T[] keys = node.tree.Keys(); - Array.Sort(keys); // sort keys on save for read performance - int blocknum = 0; - if (_externalStrings) - { - // free old blocks - if (node.allocblocks != null) - _strings.FreeBlocks(node.allocblocks); - blocknum = _strings.SaveData(node.DiskPageNumber.ToString(), fastBinaryJSON.BJSON.ToBJSON(keys)); - } - // node children - foreach (var kp in keys) - { - var val = node.tree[kp]; - int idx = index + _rowSize * i; - // key bytes - byte[] kk; - byte size; - if (_externalStrings == false) - { - kk = _T.GetBytes(kp); - size = (byte)kk.Length; - if (size > _maxKeySize) - size = _maxKeySize; - } - else - { - kk = new byte[4]; - Buffer.BlockCopy(Helper.GetBytes(blocknum, false), 0, kk, 0, 4); - size = 4; - } - // key size = 1 byte - page[idx] = size; - Buffer.BlockCopy(kk, 0, page, idx + 1, page[idx]); - // offset = 4 bytes - b = Helper.GetBytes(val.RecordNumber, false); - Buffer.BlockCopy(b, 0, page, idx + 1 + _maxKeySize, b.Length); - // duplicatepage = 4 bytes - b = Helper.GetBytes(val.DuplicateBitmapNumber, false); - Buffer.BlockCopy(b, 0, page, idx + 1 + _maxKeySize + 4, b.Length); - i++; - } - _file.Write(page, 0, page.Length); - } - } - - public Page LoadPageFromPageNumber(int number) - { - lock (_fileLock) - { - SeekPage(number); - byte[] b = new byte[_PageLength]; - _file.Read(b, 0, _PageLength); - - if (b[0] == _BlockHeader[0] && b[1] == _BlockHeader[1] && b[2] == _BlockHeader[2] && b[3] == _BlockHeader[3]) - { - // create node here - Page page = new Page(); - - short count = Helper.ToInt16(b, 5); - if (count > _PageNodeCount) - throw new Exception("Count > node size"); - page.DiskPageNumber = number; - page.RightPageNumber = Helper.ToInt32(b, 11); - int index = _BlockHeader.Length; - object[] keys = null; - - for (int i = 0; i < count; i++) - { - int idx = index + _rowSize * i; - byte ks = b[idx]; - T key; - if (_externalStrings == false) - key = _T.GetObject(b, idx + 1, ks); - else - { - if (keys == null) - { - int blknum = Helper.ToInt32(b, idx + 1, false); - byte[] bb = _strings.GetData(blknum, page.allocblocks); - keys = (object[])fastBinaryJSON.BJSON.ToObject(bb); - } - key = (T)keys[i]; - } - int offset = Helper.ToInt32(b, idx + 1 + _maxKeySize); - int duppage = Helper.ToInt32(b, idx + 1 + _maxKeySize + 4); - page.tree.Add(key, new KeyInfo(offset, duppage)); - } - return page; - } - else - throw new Exception("Page read error header invalid, number = " + number); - } - } - #endregion - - internal void SavePageList(SafeSortedList _pages, List diskpages) - { - lock (_fileLock) - { - // save page list - int c = (_pages.Count / Global.PageItemCount) + 1; - // allocate pages needed - while (c > diskpages.Count) - diskpages.Add(GetNewPageNumber()); - - byte[] page = new byte[_PageLength]; - - for (int i = 0; i < (diskpages.Count - 1); i++) - { - byte[] block = CreateBlockHeader(1, Global.PageItemCount, diskpages[i + 1]); - Buffer.BlockCopy(block, 0, page, 0, block.Length); - - for (int j = 0; j < Global.PageItemCount; j++) - CreatePageListData(_pages, i * Global.PageItemCount, block.Length, j, page); - - SeekPage(diskpages[i]); - _file.Write(page, 0, page.Length); - } - - c = _pages.Count % Global.PageItemCount; - byte[] lastblock = CreateBlockHeader(1, (ushort)c, -1); - Buffer.BlockCopy(lastblock, 0, page, 0, lastblock.Length); - int lastoffset = (_pages.Count / Global.PageItemCount) * Global.PageItemCount; - - for (int j = 0; j < c; j++) - CreatePageListData(_pages, lastoffset, lastblock.Length, j, page); - - SeekPage(diskpages[diskpages.Count - 1]); - _file.Write(page, 0, page.Length); - } - } - - private void CreatePageListData(SafeSortedList _pages, int offset, int index, int counter, byte[] page) - { - int idx = index + _rowSize * counter; - // key bytes - byte[] kk = _T.GetBytes(_pages.GetKey(counter + offset)); - byte size = (byte)kk.Length; - if (size > _maxKeySize) - size = _maxKeySize; - // key size = 1 byte - page[idx] = size; - Buffer.BlockCopy(kk, 0, page, idx + 1, page[idx]); - // offset = 4 bytes - byte[] b = Helper.GetBytes(_pages.GetValue(offset + counter).PageNumber, false); - Buffer.BlockCopy(b, 0, page, idx + 1 + _maxKeySize, b.Length); - // add counts - b = Helper.GetBytes(_pages.GetValue(offset + counter).UniqueCount, false); - Buffer.BlockCopy(b, 0, page, idx + 1 + _maxKeySize + 4, b.Length); - // FEATURE : add dup counts - } - - internal void SaveLastRecordNumber(int recnum) - { - // save the last record number indexed to the header - CreateFileHeader(recnum); - } - - internal void BitmapFlush() - { - if (_allowDups) - _bitmap.Commit(Global.FreeBitmapMemoryOnSave); - } - } +using System; +using System.Collections.Generic; +using System.Text; +using System.IO; +using System.Collections; +using RaptorDB.Common; +using System.Threading; + +namespace RaptorDB +{ + internal class IndexFile + { + FileStream _file = null; + private byte[] _FileHeader = new byte[] { + (byte)'M', (byte)'G', (byte)'I', + 0, // 3 = [keysize] max 255 + 0,0, // 4 = [node size] max 65536 + 0,0,0,0, // 6 = [root page num] + 0, // 10 = Index file type : 0=mgindex 1=mgindex+strings (key = firstallocblock) + 0,0,0,0 // 11 = last record number indexed + }; + + private static readonly byte[] _BlockHeader = new byte[] { + (byte)'P',(byte)'A',(byte)'G',(byte)'E', + 0, // 4 = [Flag] = 0=page 1=page list + 0,0, // 5 = [item count] + 0,0,0,0, // 7 = reserved + 0,0,0,0 // 11 = [right page number] / [next page number] + }; + + internal byte _maxKeySize; + internal ushort _PageNodeCount = 5000; + private int _LastPageNumber = 1; // 0 = page list + private int _PageLength; + private int _rowSize; + private bool _allowDups = true; + ILog log = LogManager.GetLogger(typeof(IndexFile)); + private BitmapIndex _bitmap; + IGetBytes _T = null; + private object _fileLock = new object(); + + private KeyStoreHF _strings; + private bool _externalStrings = false; + + public IndexFile(string filename, byte maxKeySize, ushort pageNodeCount) + { + _T = RDBDataType.ByteHandler(); + if (typeof(T) == typeof(string) && Global.EnableOptimizedStringIndex) + { + _externalStrings = true; + _maxKeySize = 4;// blocknum:int + } + else + _maxKeySize = maxKeySize; + + _PageNodeCount = pageNodeCount; + _rowSize = (_maxKeySize + 1 + 4 + 4); + + string path = Path.GetDirectoryName(filename); + Directory.CreateDirectory(path); + if (File.Exists(filename)) + { + // if file exists open and read header + _file = File.Open(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite); + ReadFileHeader(); + if (!_externalStrings)// if the file says different + { + _rowSize = (_maxKeySize + 1 + 4 + 4); + } + // compute last page number from file length + _PageLength = (_BlockHeader.Length + _rowSize * (_PageNodeCount)); + _LastPageNumber = (int)((_file.Length - _FileHeader.Length) / _PageLength); + } + else + { + // else create new file + _file = File.Open(filename, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite); + + _PageLength = (_BlockHeader.Length + _rowSize * (_PageNodeCount)); + + CreateFileHeader(0); + + _LastPageNumber = (int)((_file.Length - _FileHeader.Length) / _PageLength); + } + if (_externalStrings) + { + _strings = new KeyStoreHF(path, Path.GetFileNameWithoutExtension(filename) + ".strings"); + } + if (_LastPageNumber == 0) + _LastPageNumber = 1; + // bitmap duplicates + if (_allowDups) + _bitmap = new BitmapIndex(Path.GetDirectoryName(filename), Path.GetFileNameWithoutExtension(filename)); + } + + #region [ C o m m o n ] + public void SetBitmapDuplicate(int bitmaprec, int rec) + { + _bitmap.SetDuplicate(bitmaprec, rec); + } + + public int GetBitmapDuplaicateFreeRecordNumber() + { + return _bitmap.GetFreeRecordNumber(); + } + + public IEnumerable GetDuplicatesRecordNumbers(int recno) + { + return GetDuplicateBitmap(recno).GetBitIndexes(); + } + + public WahBitArray GetDuplicateBitmap(int recno) + { + return _bitmap.GetBitmap(recno); + } + + private byte[] CreateBlockHeader(byte type, ushort itemcount, int rightpagenumber) + { + byte[] block = new byte[_BlockHeader.Length]; + Array.Copy(_BlockHeader, block, block.Length); + block[4] = type; + byte[] b = Helper.GetBytes(itemcount, false); + Buffer.BlockCopy(b, 0, block, 5, 2); + b = Helper.GetBytes(rightpagenumber, false); + Buffer.BlockCopy(b, 0, block, 11, 4); + return block; + } + + private void CreateFileHeader(int rowsindexed) + { + lock (_fileLock) + { + // max key size + byte[] b = Helper.GetBytes(_maxKeySize, false); + Buffer.BlockCopy(b, 0, _FileHeader, 3, 1); + // page node count + b = Helper.GetBytes(_PageNodeCount, false); + Buffer.BlockCopy(b, 0, _FileHeader, 4, 2); + b = Helper.GetBytes(rowsindexed, false); + Buffer.BlockCopy(b, 0, _FileHeader, 11, 4); + + if (_externalStrings) + _FileHeader[10] = 1; + + _file.Seek(0L, SeekOrigin.Begin); + _file.Write(_FileHeader, 0, _FileHeader.Length); + if (rowsindexed == 0) + { + byte[] pagezero = new byte[_PageLength]; + byte[] block = CreateBlockHeader(1, 0, -1); + Buffer.BlockCopy(block, 0, pagezero, 0, block.Length); + _file.Write(pagezero, 0, _PageLength); + } + _file.Flush(); + } + } + + private bool ReadFileHeader() + { + _file.Seek(0L, SeekOrigin.Begin); + byte[] b = new byte[_FileHeader.Length]; + _file.Read(b, 0, _FileHeader.Length); + + if (b[0] == _FileHeader[0] && b[1] == _FileHeader[1] && b[2] == _FileHeader[2]) // header + { + byte maxks = b[3]; + ushort nodes = (ushort)Helper.ToInt16(b, 4); + int root = Helper.ToInt32(b, 6); + _maxKeySize = maxks; + _PageNodeCount = nodes; + _FileHeader = b; + if (b[10] == 0) + _externalStrings = false; + } + + return false; + } + + public int GetNewPageNumber() + { + return Interlocked.Increment(ref _LastPageNumber); + } + + private void SeekPage(int pnum) + { + long offset = _FileHeader.Length; + offset += (long)pnum * _PageLength; + if (offset > _file.Length) + CreateBlankPages(pnum); + + _file.Seek(offset, SeekOrigin.Begin); + } + + private void CreateBlankPages(int pnum) + { + // create space + byte[] b = new byte[_PageLength]; + _file.Seek(0L, SeekOrigin.Current); + for (int i = pnum; i < _LastPageNumber; i++) + _file.Write(b, 0, b.Length); + + _file.Flush(); + } + + public void FreeMemory() + { + if (_allowDups) + _bitmap.FreeMemory(); + } + + public void Shutdown() + { + log.Debug("Shutdown IndexFile"); + if (_externalStrings) + _strings.Shutdown(); + + if (_file != null) + { + _file.Flush(); + _file.Close(); + } + _file = null; + if (_allowDups) + { + _bitmap.Commit(Global.FreeBitmapMemoryOnSave); + _bitmap.Shutdown(); + } + } + + #endregion + + #region [ P a g e s ] + + public void GetPageList(List PageListDiskPages, IDictionary PageList, out int lastIndexedRow) + { + lastIndexedRow = Helper.ToInt32(_FileHeader, 11); + // load page list + PageListDiskPages.Add(0); // first page list + int nextpage = LoadPageListData(0, PageList); + while (nextpage != -1) + { + nextpage = LoadPageListData(nextpage, PageList); + + if (nextpage != -1) + PageListDiskPages.Add(nextpage); + } + } + + private int LoadPageListData(int page, IDictionary PageList) + { + lock (_fileLock) + { + // load page list data + int nextpage = -1; + SeekPage(page); + byte[] b = new byte[_PageLength]; + _file.Read(b, 0, _PageLength); + + if (b[0] == _BlockHeader[0] && b[1] == _BlockHeader[1] && b[2] == _BlockHeader[2] && b[3] == _BlockHeader[3]) + { + short count = Helper.ToInt16(b, 5); + if (count > _PageNodeCount) + throw new Exception("Count > node size"); + nextpage = Helper.ToInt32(b, 11); + int index = _BlockHeader.Length; + + for (int i = 0; i < count; i++) + { + int idx = index + _rowSize * i; + byte ks = b[idx]; + T key = _T.GetObject(b, idx + 1, ks); + int pagenum = Helper.ToInt32(b, idx + 1 + _maxKeySize); + // add counts + int unique = Helper.ToInt32(b, idx + 1 + _maxKeySize + 4); + // FEATURE : add dup count + PageList.Add(key, new PageInfo(pagenum, unique, 0)); + } + } + else + throw new Exception("Page List header is invalid"); + + return nextpage; + } + } + + internal void SavePage(Page node) + { + lock (_fileLock) + { + int pnum = node.DiskPageNumber; + if (pnum > _LastPageNumber) + throw new Exception("should not be here: page out of bounds"); + + SeekPage(pnum); + byte[] page = new byte[_PageLength]; + byte[] blockheader = CreateBlockHeader(0, (ushort)node.tree.Count, node.RightPageNumber); + Buffer.BlockCopy(blockheader, 0, page, 0, blockheader.Length); + + int index = blockheader.Length; + int i = 0; + byte[] b = null; + T[] keys = node.tree.Keys(); + Array.Sort(keys); // sort keys on save for read performance + int blocknum = 0; + if (_externalStrings) + { + // free old blocks + if (node.allocblocks != null) + _strings.FreeBlocks(node.allocblocks); + blocknum = _strings.SaveData(node.DiskPageNumber.ToString(), fastBinaryJSON.BJSON.ToBJSON(keys)); + } + // node children + foreach (var kp in keys) + { + var val = node.tree[kp]; + int idx = index + _rowSize * i; + // key bytes + byte[] kk; + byte size; + if (_externalStrings == false) + { + kk = _T.GetBytes(kp); + size = (byte)kk.Length; + if (size > _maxKeySize) + size = _maxKeySize; + } + else + { + kk = new byte[4]; + Buffer.BlockCopy(Helper.GetBytes(blocknum, false), 0, kk, 0, 4); + size = 4; + } + // key size = 1 byte + page[idx] = size; + Buffer.BlockCopy(kk, 0, page, idx + 1, page[idx]); + // offset = 4 bytes + b = Helper.GetBytes(val.RecordNumber, false); + Buffer.BlockCopy(b, 0, page, idx + 1 + _maxKeySize, b.Length); + // duplicatepage = 4 bytes + b = Helper.GetBytes(val.DuplicateBitmapNumber, false); + Buffer.BlockCopy(b, 0, page, idx + 1 + _maxKeySize + 4, b.Length); + i++; + } + _file.Write(page, 0, page.Length); + } + } + + public Page LoadPageFromPageNumber(int number) + { + lock (_fileLock) + { + SeekPage(number); + byte[] b = new byte[_PageLength]; + _file.Read(b, 0, _PageLength); + + if (b[0] == _BlockHeader[0] && b[1] == _BlockHeader[1] && b[2] == _BlockHeader[2] && b[3] == _BlockHeader[3]) + { + // create node here + Page page = new Page(); + + short count = Helper.ToInt16(b, 5); + if (count > _PageNodeCount) + throw new Exception("Count > node size"); + page.DiskPageNumber = number; + page.RightPageNumber = Helper.ToInt32(b, 11); + int index = _BlockHeader.Length; + object[] keys = null; + + for (int i = 0; i < count; i++) + { + int idx = index + _rowSize * i; + byte ks = b[idx]; + T key; + if (_externalStrings == false) + key = _T.GetObject(b, idx + 1, ks); + else + { + if (keys == null) + { + int blknum = Helper.ToInt32(b, idx + 1, false); + byte[] bb = _strings.GetData(blknum, page.allocblocks); + keys = (object[])fastBinaryJSON.BJSON.ToObject(bb); + } + key = (T)keys[i]; + } + int offset = Helper.ToInt32(b, idx + 1 + _maxKeySize); + int duppage = Helper.ToInt32(b, idx + 1 + _maxKeySize + 4); + page.tree.Add(key, new KeyInfo(offset, duppage)); + } + return page; + } + else + throw new Exception("Page read error header invalid, number = " + number); + } + } + #endregion + + internal void SavePageList(SortedList _pages, List diskpages) + { + lock (_fileLock) + { + // save page list + int c = (_pages.Count / Global.PageItemCount) + 1; + // allocate pages needed + while (c > diskpages.Count) + diskpages.Add(GetNewPageNumber()); + + byte[] page = new byte[_PageLength]; + + for (int i = 0; i < (diskpages.Count - 1); i++) + { + byte[] block = CreateBlockHeader(1, Global.PageItemCount, diskpages[i + 1]); + Buffer.BlockCopy(block, 0, page, 0, block.Length); + + for (int j = 0; j < Global.PageItemCount; j++) + CreatePageListData(_pages, i * Global.PageItemCount, block.Length, j, page); + + SeekPage(diskpages[i]); + _file.Write(page, 0, page.Length); + } + + c = _pages.Count % Global.PageItemCount; + byte[] lastblock = CreateBlockHeader(1, (ushort)c, -1); + Buffer.BlockCopy(lastblock, 0, page, 0, lastblock.Length); + int lastoffset = (_pages.Count / Global.PageItemCount) * Global.PageItemCount; + + for (int j = 0; j < c; j++) + CreatePageListData(_pages, lastoffset, lastblock.Length, j, page); + + SeekPage(diskpages[diskpages.Count - 1]); + _file.Write(page, 0, page.Length); + } + } + + private void CreatePageListData(SortedList _pages, int offset, int index, int counter, byte[] page) + { + int idx = index + _rowSize * counter; + // key bytes + byte[] kk = _T.GetBytes(_pages.Keys[counter + offset]); + byte size = (byte)kk.Length; + if (size > _maxKeySize) + size = _maxKeySize; + // key size = 1 byte + page[idx] = size; + Buffer.BlockCopy(kk, 0, page, idx + 1, page[idx]); + // offset = 4 bytes + byte[] b = Helper.GetBytes(_pages.Values[offset + counter].PageNumber, false); + Buffer.BlockCopy(b, 0, page, idx + 1 + _maxKeySize, b.Length); + // add counts + b = Helper.GetBytes(_pages.Values[offset + counter].UniqueCount, false); + Buffer.BlockCopy(b, 0, page, idx + 1 + _maxKeySize + 4, b.Length); + // FEATURE : add dup counts + } + + internal void SaveLastRecordNumber(int recnum) + { + // save the last record number indexed to the header + CreateFileHeader(recnum); + } + + internal void BitmapFlush() + { + if (_allowDups) + _bitmap.Commit(Global.FreeBitmapMemoryOnSave); + } + } } \ No newline at end of file diff --git a/RaptorDB/Indexes/IndexRootFileManager.cs b/RaptorDB/Indexes/IndexRootFileManager.cs new file mode 100644 index 0000000..80fae58 --- /dev/null +++ b/RaptorDB/Indexes/IndexRootFileManager.cs @@ -0,0 +1,159 @@ +using GenericPointerHelpers; +using RaptorDB.Common; +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace RaptorDB.Indexes +{ + class IndexRootFileManager + where TKey : IComparable + { + public readonly string FileName; + public readonly IPageSerializer Serializer; + private SortedList pages; + private object locker = new object(); + + public IndexRootFileManager(string fileName, IPageSerializer serializer) + { + this.FileName = fileName; + Serializer = serializer; + } + + public unsafe IIndexRoot Load() + { + if (pages == null) + lock (locker) if (pages == null) + { + if (File.Exists(FileName)) + { + var file = File.ReadAllBytes(FileName); + var entrySize = 4 + (Serializer == null ? GenericPointerHelper.SizeOf() : Serializer.Size); + var count = file.Length / entrySize; + var sl = new SortedList(count); + fixed (byte* filePointer = file) + { + var ptr = filePointer; + for (int i = 0; i < count; i++) + { + var value = *(int*)ptr; + var key = Serializer == null ? GenericPointerHelper.Read(ptr + 4) : Serializer.Read(ptr + 4); + + sl.Add(key, value); + ptr += entrySize; + } + } + } + else + { + pages = new SortedList(); + if (Serializer != null) + { + var eb = new byte[Serializer.Size]; + fixed (byte* ebp = eb) + { + pages.Add(Serializer.Read(ebp), 0); + } + } + else pages.Add(default(TKey), 0); + } + } + return new SortedListIndexRoot(pages); + } + public unsafe void Save() + { + lock (locker) + { + var entrySize = 4 + (Serializer == null ? GenericPointerHelper.SizeOf() : Serializer.Size); + var count = pages.Count; + var file = new byte[entrySize * count]; + fixed (byte* filePointer = file) + { + var ptr = filePointer; + foreach (var item in pages) + { + *(int*)ptr = item.Value; + if (Serializer == null) GenericPointerHelper.Write(ptr + 4, item.Key); + else Serializer.Save(ptr + 4, item.Key); + ptr += entrySize; + } + } + File.WriteAllBytes(FileName, file); + } + } + + class SortedListIndexRoot : IIndexRoot + { + private SortedList pages; + private Func compFunc; + + public SortedListIndexRoot(SortedList pages) + { + this.pages = pages; + if (typeof(TKey) == typeof(string)) + { + compFunc = (Func)(Delegate)(Func)CultureInfo.CurrentCulture.CompareInfo.Compare; + } + } + + public int GetPageIndex(TKey key) + { + if (pages.Count <= 1) + return 0; + var keys = pages.Keys; + // binary search + int first = 0; + int last = pages.Count - 1; + int mid = 0; + while (first < last) + { + // int divide and ceil + mid = ((first + last - 1) >> 1) + 1; + var k = keys[mid]; + int compare = compFunc == null ? k.CompareTo(key) : compFunc(k, key); + if (compare < 0) + { + first = mid; + } + if (compare == 0) + { + return mid; + } + if (compare > 0) + { + last = mid - 1; + } + } + + return first; + } + + public IEnumerable GetLowerPagesIndexes(int index) + { + for (int i = index - 1; i >= 0; i--) + { + yield return pages.Values[i]; + } + } + + public IEnumerable GetUpperPagesIndexes(int index) + { + for (int i = index + 1; i < pages.Count; i++) + { + yield return pages.Values[i]; + } + } + + public int CreateTable(TKey firstKey) + { + var i = pages.Count; + pages.Add(firstKey, i); + return i; + } + } + } +} diff --git a/RaptorDB/Indexes/Indexes.cs b/RaptorDB/Indexes/Indexes.cs index 05cba2e..4c4a9de 100644 --- a/RaptorDB/Indexes/Indexes.cs +++ b/RaptorDB/Indexes/Indexes.cs @@ -1,384 +1,331 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.IO; -using RaptorDB.Common; - -namespace RaptorDB -{ - #region [ TypeIndexes ] - internal class TypeIndexes : MGIndex, IIndex where T : IComparable - { - public TypeIndexes(string path, string filename, byte keysize) - : base(path, filename + ".mgidx", keysize, Global.PageItemCount, true) - { - - } - - public void Set(object key, int recnum) - { - if (key == null) return; // FEATURE : index null values ?? - - base.Set((T)key, recnum); - } - - public WAHBitArray Query(RDBExpression ex, object from, int maxsize) - { - T f = default(T); - if (typeof(T).Equals(from.GetType()) == false) - f = Converter(from); - else - f = (T)from; - - return base.Query(ex, f, maxsize); - } - - private T Converter(object from) - { - if (typeof(T) == typeof(Guid)) - { - object o = new Guid(from.ToString()); - return (T)o; - } - else - return (T)Convert.ChangeType(from, typeof(T)); - } - - void IIndex.FreeMemory() - { - base.FreeMemory(); - base.SaveIndex(); - } - - void IIndex.Shutdown() - { - base.SaveIndex(); - base.Shutdown(); - } - - object[] IIndex.GetKeys() - { - return base.GetKeys(); - } - //public WAHBitArray Query(object fromkey, object tokey, int maxsize) - //{ - // T f = default(T); - // if (typeof(T).Equals(fromkey.GetType()) == false) - // f = (T)Convert.ChangeType(fromkey, typeof(T)); - // else - // f = (T)fromkey; - - // T t = default(T); - // if (typeof(T).Equals(tokey.GetType()) == false) - // t = (T)Convert.ChangeType(tokey, typeof(T)); - // else - // t = (T)tokey; - - // return base.Query(f, t, maxsize); - //} - } - #endregion - - #region [ BoolIndex ] - internal class BoolIndex : IIndex - { - public BoolIndex(string path, string filename, string extension) - { - // create file - _filename = filename + extension; - //if (_filename.Contains(".") == false) _filename += ".deleted"; - _path = path; - if (_path.EndsWith(Path.DirectorySeparatorChar.ToString()) == false) - _path += Path.DirectorySeparatorChar.ToString(); - - if (File.Exists(_path + _filename)) - ReadFile(); - } - - private WAHBitArray _bits = new WAHBitArray(); - private string _filename; - private string _path; - private object _lock = new object(); - //private bool _inMemory = false; - - public WAHBitArray GetBits() - { - return _bits.Copy(); - } - - public void Set(object key, int recnum) - { - lock (_lock) - if (key != null) - _bits.Set(recnum, (bool)key); - } - - public WAHBitArray Query(RDBExpression ex, object from, int maxsize) - { - lock (_lock) - { - bool b = (bool)from; - if (b) - return _bits; - else - return _bits.Not(maxsize); - } - } - - public void FreeMemory() - { - lock (_lock) - { - // free memory - //_bits.FreeMemory(); - // save to disk - //SaveIndex(); - } - } - - public void Shutdown() - { - // shutdown - //if (_inMemory == false) - WriteFile(); - } - - public void SaveIndex() - { - //if (_inMemory == false) - WriteFile(); - } - - public void InPlaceOR(WAHBitArray left) - { - lock (_lock) - _bits = _bits.Or(left); - } - - private void WriteFile() - { - lock (_lock) - { - WAHBitArray.TYPE t; - uint[] ints = _bits.GetCompressed(out t); - MemoryStream ms = new MemoryStream(); - BinaryWriter bw = new BinaryWriter(ms); - bw.Write((byte)t);// write new format with the data type byte - foreach (var i in ints) - { - bw.Write(i); - } - File.WriteAllBytes(_path + _filename, ms.ToArray()); - } - } - - private void ReadFile() - { - byte[] b = File.ReadAllBytes(_path + _filename); - WAHBitArray.TYPE t = WAHBitArray.TYPE.WAH; - int j = 0; - if (b.Length % 4 > 0) // new format with the data type byte - { - t = (WAHBitArray.TYPE)Enum.ToObject(typeof(WAHBitArray.TYPE), b[0]); - j = 1; - } - List ints = new List(); - for (int i = 0; i < b.Length / 4; i++) - { - ints.Add((uint)Helper.ToInt32(b, (i * 4) + j)); - } - _bits = new WAHBitArray(t, ints.ToArray()); - } - - public WAHBitArray Query(object fromkey, object tokey, int maxsize) - { - return Query(RDBExpression.Greater, fromkey, maxsize); - } - - internal void FixSize(int size) - { - _bits.Length = size; - } - - public object[] GetKeys() - { - return new object[] { true, false }; - } - } - #endregion - - #region [ FullTextIndex ] - internal class FullTextIndex : Hoot, IIndex - { - public FullTextIndex(string IndexPath, string FileName, bool docmode, bool sortable) - : base(IndexPath, FileName, docmode) - { - if (sortable) - { - _idx = new TypeIndexes(IndexPath, FileName, Global.DefaultStringKeySize); - _sortable = true; - } - } - private bool _sortable = false; - private IIndex _idx; - - public void Set(object key, int recnum) - { - base.Index(recnum, (string)key); - if (_sortable) - _idx.Set(key, recnum); - } - - public WAHBitArray Query(RDBExpression ex, object from, int maxsize) - { - return base.Query("" + from, maxsize); - } - - public void SaveIndex() - { - base.Save(); - if (_sortable) - _idx.SaveIndex(); - } - - public WAHBitArray Query(object fromkey, object tokey, int maxsize) - { - return base.Query("" + fromkey, maxsize); - } - - public object[] GetKeys() - { - if (_sortable) - return _idx.GetKeys(); // support get keys - else - return new object[] { }; - } - void IIndex.FreeMemory() - { - base.FreeMemory(); - - this.SaveIndex(); - } - - void IIndex.Shutdown() - { - this.SaveIndex(); - base.Shutdown(); - if (_sortable) - _idx.Shutdown(); - } - - } - #endregion - - #region [ EnumIndex ] - internal class EnumIndex : MGIndex, IIndex //where T : IComparable - { - public EnumIndex(string path, string filename) - : base(path, filename + ".mgidx", 30, Global.PageItemCount, true) - { - - } - - public void Set(object key, int recnum) - { - if (key == null) return; // FEATURE : index null values ?? - - base.Set(key.ToString(), recnum); - } - - public WAHBitArray Query(RDBExpression ex, object from, int maxsize) - { - T f = default(T); - if (typeof(T).Equals(from.GetType()) == false) - f = Converter(from); - else - f = (T)from; - - return base.Query(ex, f.ToString(), maxsize); - } - - private T Converter(object from) - { - if (typeof(T) == typeof(Guid)) - { - object o = new Guid(from.ToString()); - return (T)o; - } - else - return (T)Convert.ChangeType(from, typeof(T)); - } - - void IIndex.FreeMemory() - { - base.FreeMemory(); - base.SaveIndex(); - } - - void IIndex.Shutdown() - { - base.SaveIndex(); - base.Shutdown(); - } - - public WAHBitArray Query(object fromkey, object tokey, int maxsize) - { - T f = default(T); - if (typeof(T).Equals(fromkey.GetType()) == false) - f = (T)Convert.ChangeType(fromkey, typeof(T)); - else - f = (T)fromkey; - - T t = default(T); - if (typeof(T).Equals(tokey.GetType()) == false) - t = (T)Convert.ChangeType(tokey, typeof(T)); - else - t = (T)tokey; - - return base.Query(f.ToString(), t.ToString(), maxsize); - } - - object[] IIndex.GetKeys() - { - return base.GetKeys(); - } - } - #endregion - - #region [ NoIndex ] - internal class NoIndex : IIndex - { - public void Set(object key, int recnum) - { - // ignore set - } - - public WAHBitArray Query(RDBExpression ex, object from, int maxsize) - { - // always return everything - return WAHBitArray.Fill(maxsize); - } - - public void FreeMemory() - { - - } - - public void Shutdown() - { - - } - - public void SaveIndex() - { - - } - - public object[] GetKeys() - { - return new object[] { }; - } - } - #endregion -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.IO; +using RaptorDB.Common; +using System.Runtime.InteropServices; + +namespace RaptorDB +{ + #region [ TypeIndexes ] + public class TypeIndexes : MGIndex, IComparisonIndex where T : IComparable + { + public TypeIndexes(string path, string filename, byte keysize) + : base(path, filename + ".mgidx", keysize, Global.PageItemCount, true) + { + + } + public TypeIndexes(string path, string filename, byte keysize, bool allowDups) + : base(path, filename + ".mgidx", keysize, Global.PageItemCount, allowDups) + { + + } + + public void Set(object key, int recnum) + { + if (key == null) return; // FEATURE : index null values ?? + + base.Set((T)key, recnum); + } + + void IIndex.FreeMemory() + { + base.FreeMemory(); + base.SaveIndex(); + } + + public TResult Accept(IIndexAcceptable acc) + { + return acc.Accept(this); + } + } + #endregion + + #region [ BoolIndex ] + public class BoolIndex : IEqualsQueryIndex + { + public BoolIndex(string path, string filename, string extension) + { + // create file + _filename = filename + extension; + _path = path; + if (_path.EndsWith(Path.DirectorySeparatorChar.ToString()) == false) + _path += Path.DirectorySeparatorChar.ToString(); + + if (File.Exists(_path + _filename)) + ReadFile(); + } + + private WahBitArray _bits = new WahBitArray(); + private string _filename; + private string _path; + private object _lock = new object(); + + public bool AllowsDuplicates => true; + + //private bool _inMemory = false; + + public WahBitArray GetBits() + { + return _bits.Copy(); + } + + public void FreeMemory() + { + lock (_lock) + { + // free memory + //_bits.FreeMemory(); + // save to disk + //SaveIndex(); + } + } + + public void Dispose() + { + // shutdown + //if (_inMemory == false) + WriteFile(); + } + + public void SaveIndex() + { + //if (_inMemory == false) + WriteFile(); + } + + public void InPlaceOR(WahBitArray left) + { + lock (_lock) + _bits = _bits.Or(left); + } + + private void WriteFile() + { + lock (_lock) + { + WahBitArrayState t; + uint[] ints = _bits.GetCompressed(out t); + MemoryStream ms = new MemoryStream(); + BinaryWriter bw = new BinaryWriter(ms); + bw.Write((byte)t);// write new format with the data type byte + foreach (var i in ints) + { + bw.Write(i); + } + File.WriteAllBytes(_path + _filename, ms.ToArray()); + } + } + + private void ReadFile() + { + byte[] b = File.ReadAllBytes(_path + _filename); + WahBitArrayState t = WahBitArrayState.Wah; + int j = 0; + if (b.Length % 4 > 0) // new format with the data type byte + { + t = (WahBitArrayState)Enum.ToObject(typeof(WahBitArrayState), b[0]); + j = 1; + } + List ints = new List(); + for (int i = 0; i < b.Length / 4; i++) + { + ints.Add((uint)Helper.ToInt32(b, (i * 4) + j)); + } + _bits = new WahBitArray(t, ints.ToArray()); + } + + public WahBitArray QueryEquals(bool key) + { + if (key) + return _bits; + else return _bits.Not(); + } + + public WahBitArray QueryNotEquals(bool key) + { + return QueryEquals(!key); + } + + public void Set(bool key, int recnum) + { + lock (_lock) + _bits.Set(recnum, key); + } + + public bool[] GetKeys() + => new[] { true, false }; + + public TResult Accept(IIndexAcceptable acc) + => acc.Accept(this); + + void IIndex.Set(object key, int recnum) + => Set((bool)key, recnum); + + public bool GetFirst(bool key, out int idx) + { + throw new NotImplementedException(); + } + } + #endregion + + + internal class ObjectToStringIndex : MGIndex, IComparisonIndex + { + public ObjectToStringIndex(string path, string filename, byte maxLength) + : base(path, filename + ".mgidx", maxLength, Global.PageItemCount, true) + { + } + + void IIndex.FreeMemory() + { + base.FreeMemory(); + base.SaveIndex(); + } + + public WahBitArray QueryGreater(T key) + => QueryGreater(key.ToString()); + + public WahBitArray QueryGreaterEquals(T key) + => QueryGreaterEquals(key.ToString()); + + public WahBitArray QueryLess(T key) + => QueryLess(key.ToString()); + + public WahBitArray QueryLessEquals(T key) + => QueryLessEquals(key.ToString()); + + public WahBitArray QueryEquals(T key) + => QueryEquals(key.ToString()); + + public WahBitArray QueryNotEquals(T key) + => QueryNotEquals(key.ToString()); + + public void Set(T key, int recnum) + { + if (key != null) + { + base.Set(key.ToString(), recnum); + } + } + + T[] IIndex.GetKeys() + { + throw new NotSupportedException("ObjectToStringIndex can't rebuild keys from stored strings"); + } + + public TResult Accept(IIndexAcceptable acc) + => acc.Accept(this); + + public void Set(object key, int recnum) + => Set((T)key, recnum); + + public bool GetFirst(T key, out int idx) + { + return base.GetFirst(key.ToString(), out idx); + } + } + + #region [ FullTextIndex ] + internal class FullTextIndex : Hoot, IContainsIndex + { + public FullTextIndex(string IndexPath, string FileName, bool docmode, bool sortable) + : base(IndexPath, FileName, docmode) + { + if (sortable) + { + _idx = new TypeIndexes(IndexPath, FileName, Global.DefaultStringKeySize); + _sortable = true; + } + } + private bool _sortable = false; + private IIndex _idx; + + public bool AllowsDuplicates => true; + + public void Set(string key, int recnum) + { + base.Index(recnum, key); + if (_sortable) + _idx.Set(key, recnum); + } + + public void SaveIndex() + { + base.Save(); + if (_sortable) + _idx.SaveIndex(); + } + + public string[] GetKeys() + { + if (_sortable) + return _idx.GetKeys(); // support get keys + else + return new string[] { }; + } + void IIndex.FreeMemory() + { + base.FreeMemory(); + + this.SaveIndex(); + } + + public override void Dispose() + { + this.SaveIndex(); + base.Dispose(); + if (_sortable) _idx.Dispose(); + } + + public TResult Accept(IIndexAcceptable acc) + => acc.Accept(this); + + public WahBitArray QueryContains(string value) + { + return base.Query(value); + } + + public void Set(object key, int recnum) + => Set((string)key, recnum); + } + #endregion + + #region [ NoIndex ] + internal class NoIndex : IIndex + { + public void Set(object key, int recnum) + { + // ignore set + } + + public void FreeMemory() + { + + } + + public void Dispose() + { + + } + + public void SaveIndex() + { + + } + + public object[] GetKeys() + { + return new object[] { }; + } + + public TResult Accept(IIndexAcceptable acc) + => acc.Accept(this); + + public static readonly NoIndex Instance = new NoIndex(); + + public bool AllowsDuplicates => true; + } + #endregion +} diff --git a/RaptorDB/Indexes/MGIndex.cs b/RaptorDB/Indexes/MGIndex.cs index dc2f476..002b6f2 100644 --- a/RaptorDB/Indexes/MGIndex.cs +++ b/RaptorDB/Indexes/MGIndex.cs @@ -1,512 +1,555 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading; -using System.IO; -using RaptorDB.Common; - -namespace RaptorDB -{ - #region [ internal classes ] - - internal struct PageInfo // FEATURE : change back to class for count access for query caching - { - public PageInfo(int pagenum, int uniquecount, int duplicatecount) - { - PageNumber = pagenum; - UniqueCount = uniquecount; - } - public int PageNumber; - public int UniqueCount; - } - - internal struct KeyInfo - { - public KeyInfo(int recnum) - { - RecordNumber = recnum; - DuplicateBitmapNumber = -1; - } - public KeyInfo(int recnum, int bitmaprec) - { - RecordNumber = recnum; - DuplicateBitmapNumber = bitmaprec; - } - public int RecordNumber; - public int DuplicateBitmapNumber; - } - - internal class Page - { - public Page() // kludge so the compiler doesn't complain - { - DiskPageNumber = -1; - RightPageNumber = -1; - tree = new SafeDictionary(Global.PageItemCount); - isDirty = false; - FirstKey = default(T); - } - public int DiskPageNumber; - public int RightPageNumber; - public T FirstKey; - public bool isDirty; - public SafeDictionary tree; - public List allocblocks = null; - } - - #endregion - - internal class MGIndex where T : IComparable - { - ILog _log = LogManager.GetLogger(typeof(MGIndex)); - private SafeSortedList _pageList = new SafeSortedList(); - private SafeDictionary> _cache = new SafeDictionary>(); - private List _pageListDiskPages = new List(); - private IndexFile _index; - private bool _AllowDuplicates = true; - private int _LastIndexedRecordNumber = 0; - private int _maxPageItems = 0; - - public MGIndex(string path, string filename, byte keysize, ushort maxcount, bool allowdups) - { - _AllowDuplicates = allowdups; - _index = new IndexFile(path + Path.DirectorySeparatorChar + filename, keysize, maxcount); - _maxPageItems = maxcount; - // load page list - _index.GetPageList(_pageListDiskPages, _pageList, out _LastIndexedRecordNumber); - if (_pageList.Count == 0) - { - Page page = new Page(); - page.FirstKey = (T)RDBDataType.GetEmpty(); - page.DiskPageNumber = _index.GetNewPageNumber(); - page.isDirty = true; - _pageList.Add(page.FirstKey, new PageInfo(page.DiskPageNumber, 0, 0)); - _cache.Add(page.DiskPageNumber, page); - } - } - - public int GetLastIndexedRecordNumber() - { - return _LastIndexedRecordNumber; - } - - public WAHBitArray Query(T from, T to, int maxsize) - { - // TODO : add BETWEEN code here - T temp = default(T); - if (from.CompareTo(to) > 0) // check values order - { - temp = from; - from = to; - to = temp; - } - // find first page and do > than - bool found = false; - int startpos = FindPageOrLowerPosition(from, ref found); - - // find last page and do < than - int endpos = FindPageOrLowerPosition(to, ref found); - - // do all pages in between - - return new WAHBitArray(); - } - - public WAHBitArray Query(RDBExpression exp, T from, int maxsize) - { - T key = from; - if (exp == RDBExpression.Equal || exp == RDBExpression.NotEqual) - return doEqualOp(exp, key, maxsize); - - // TODO : optimize complement search if page count less for the complement pages - //bool found = false; - //int last = _pageList.Count - 1; - //int pos = FindPageOrLowerPosition(key, ref found); - - if (exp == RDBExpression.Less || exp == RDBExpression.LessEqual) - { - //long c = (pos+1) * _maxPageItems * 70 / 100; // 70% full pages - //long inv = maxsize - c; - //if (c < inv) - return doLessOp(exp, key); - //else - //{ - - //} - } - else if (exp == RDBExpression.Greater || exp == RDBExpression.GreaterEqual) - { - return doMoreOp(exp, key); - } - - return new WAHBitArray(); // blank results - } - - private object _setlock = new object(); - public void Set(T key, int val) - { - lock (_setlock) - { - PageInfo pi; - Page page = LoadPage(key, out pi); - - KeyInfo ki; - if (page.tree.TryGetValue(key, out ki)) - { - // item exists - if (_AllowDuplicates) - { - SaveDuplicate(key, ref ki); - // set current record in the bitmap also - _index.SetBitmapDuplicate(ki.DuplicateBitmapNumber, val); - } - ki.RecordNumber = val; - page.tree[key] = ki; // structs need resetting - } - else - { - // new item - ki = new KeyInfo(val); - if (_AllowDuplicates) - SaveDuplicate(key, ref ki); - pi.UniqueCount++; - page.tree.Add(key, ki); - } - - if (page.tree.Count > Global.PageItemCount) - SplitPage(page); - - _LastIndexedRecordNumber = val; - page.isDirty = true; - } - } - - public bool Get(T key, out int val) - { - val = -1; - PageInfo pi; - Page page = LoadPage(key, out pi); - KeyInfo ki; - bool ret = page.tree.TryGetValue(key, out ki); - if (ret) - val = ki.RecordNumber; - return ret; - } - - public void SaveIndex() - { - _log.Debug("Total split time (s) = " + _totalsplits); - _log.Debug("Total pages = " + _pageList.Count); - int[] keys = _cache.Keys(); - Array.Sort(keys); - // save index to disk - foreach (var i in keys) - { - var p = _cache[i]; - if (p.isDirty) - { - _index.SavePage(p); - p.isDirty = false; - } - } - _index.SavePageList(_pageList, _pageListDiskPages); - _index.BitmapFlush(); - } - - public void Shutdown() - { - SaveIndex(); - // save page list - _index.SavePageList(_pageList, _pageListDiskPages); - // shutdown - _index.Shutdown(); - } - - public void FreeMemory() - { - _index.FreeMemory(); - try - { - List free = new List(); - foreach (var c in _cache) - { - if (c.Value.isDirty == false) - free.Add(c.Key); - } - _log.Debug("releasing page count = " + free.Count + " out of " + _cache.Count); - foreach (var i in free) - _cache.Remove(i); - } - catch { } - } - - - public IEnumerable GetDuplicates(T key) - { - PageInfo pi; - Page page = LoadPage(key, out pi); - KeyInfo ki; - bool ret = page.tree.TryGetValue(key, out ki); - if (ret) - // get duplicates - if (ki.DuplicateBitmapNumber != -1) - return _index.GetDuplicatesRecordNumbers(ki.DuplicateBitmapNumber); - - return new List(); - } - - public void SaveLastRecordNumber(int recnum) - { - _index.SaveLastRecordNumber(recnum); - } - - public bool RemoveKey(T key) - { - PageInfo pi; - Page page = LoadPage(key, out pi); - bool b = page.tree.Remove(key); - // FIX : reset the first key for page ?? - if (b) - { - pi.UniqueCount--; - // FEATURE : decrease dup count - } - page.isDirty = true; - return b; - } - - #region [ P R I V A T E ] - private WAHBitArray doMoreOp(RDBExpression exp, T key) - { - bool found = false; - int pos = FindPageOrLowerPosition(key, ref found); - WAHBitArray result = new WAHBitArray(); - if (pos < _pageList.Count) - { - // all the pages after - for (int i = pos + 1; i < _pageList.Count; i++) - doPageOperation(ref result, i); - } - // key page - Page page = LoadPage(_pageList.GetValue(pos).PageNumber); - T[] keys = page.tree.Keys(); - Array.Sort(keys); - - // find better start position rather than 0 - pos = Array.IndexOf(keys, key); - if (pos == -1) pos = 0; - - for (int i = pos; i < keys.Length; i++) - { - T k = keys[i]; - int bn = page.tree[k].DuplicateBitmapNumber; - - if (k.CompareTo(key) > 0) - result = result.Or(_index.GetDuplicateBitmap(bn)); - - if (exp == RDBExpression.GreaterEqual && k.CompareTo(key) == 0) - result = result.Or(_index.GetDuplicateBitmap(bn)); - } - return result; - } - - private WAHBitArray doLessOp(RDBExpression exp, T key) - { - bool found = false; - int pos = FindPageOrLowerPosition(key, ref found); - WAHBitArray result = new WAHBitArray(); - if (pos > 0) - { - // all the pages before - for (int i = 0; i < pos - 1; i++) - doPageOperation(ref result, i); - } - // key page - Page page = LoadPage(_pageList.GetValue(pos).PageNumber); - T[] keys = page.tree.Keys(); - Array.Sort(keys); - for (int i = 0; i < keys.Length; i++) - { - T k = keys[i]; - if (k.CompareTo(key) > 0) - break; - int bn = page.tree[k].DuplicateBitmapNumber; - - if (k.CompareTo(key) < 0) - result = result.Or(_index.GetDuplicateBitmap(bn)); - - if (exp == RDBExpression.LessEqual && k.CompareTo(key) == 0) - result = result.Or(_index.GetDuplicateBitmap(bn)); - } - return result; - } - - private WAHBitArray doEqualOp(RDBExpression exp, T key, int maxsize) - { - PageInfo pi; - Page page = LoadPage(key, out pi); - KeyInfo k; - if (page.tree.TryGetValue(key, out k)) - { - int bn = k.DuplicateBitmapNumber; - - if (exp == RDBExpression.Equal) - return _index.GetDuplicateBitmap(bn); - else - return _index.GetDuplicateBitmap(bn).Not(maxsize); - } - else - return new WAHBitArray(); - } - - private void doPageOperation(ref WAHBitArray res, int pageidx) - { - Page page = LoadPage(_pageList.GetValue(pageidx).PageNumber); - T[] keys = page.tree.Keys(); // avoid sync issues - foreach (var k in keys) - { - int bn = page.tree[k].DuplicateBitmapNumber; - - res = res.Or(_index.GetDuplicateBitmap(bn)); - } - } - - private double _totalsplits = 0; - private void SplitPage(Page page) - { - // split the page - DateTime dt = FastDateTime.Now; - - Page newpage = new Page(); - newpage.DiskPageNumber = _index.GetNewPageNumber(); - newpage.RightPageNumber = page.RightPageNumber; - newpage.isDirty = true; - page.RightPageNumber = newpage.DiskPageNumber; - // get and sort keys - T[] keys = page.tree.Keys(); - Array.Sort(keys); - // copy data to new - for (int i = keys.Length / 2; i < keys.Length; i++) - { - newpage.tree.Add(keys[i], page.tree[keys[i]]); - // remove from old page - page.tree.Remove(keys[i]); - } - // set the first key - newpage.FirstKey = keys[keys.Length / 2]; - // set the first key refs - _pageList.Remove(page.FirstKey); - _pageList.Remove(keys[0]); - // dup counts - _pageList.Add(keys[0], new PageInfo(page.DiskPageNumber, page.tree.Count, 0)); - page.FirstKey = keys[0]; - // FEATURE : dup counts - _pageList.Add(newpage.FirstKey, new PageInfo(newpage.DiskPageNumber, newpage.tree.Count, 0)); - _cache.Add(newpage.DiskPageNumber, newpage); - - _totalsplits += FastDateTime.Now.Subtract(dt).TotalSeconds; - } - - private Page LoadPage(T key, out PageInfo pageinfo) - { - int pagenum = -1; - // find page in list of pages - - bool found = false; - int pos = 0; - if (key != null) - pos = FindPageOrLowerPosition(key, ref found); - pageinfo = _pageList.GetValue(pos); - pagenum = pageinfo.PageNumber; - - Page page; - if (_cache.TryGetValue(pagenum, out page) == false) - { - //load page from disk - page = _index.LoadPageFromPageNumber(pagenum); - _cache.Add(pagenum, page); - } - return page; - } - - private Page LoadPage(int pagenum) - { - Page page; - if (_cache.TryGetValue(pagenum, out page) == false) - { - //load page from disk - page = _index.LoadPageFromPageNumber(pagenum); - _cache.Add(pagenum, page); - } - return page; - } - - private void SaveDuplicate(T key, ref KeyInfo ki) - { - if (ki.DuplicateBitmapNumber == -1) - ki.DuplicateBitmapNumber = _index.GetBitmapDuplaicateFreeRecordNumber(); - - _index.SetBitmapDuplicate(ki.DuplicateBitmapNumber, ki.RecordNumber); - } - - private int FindPageOrLowerPosition(T key, ref bool found) - { - if (_pageList.Count == 0) - return 0; - // binary search - int lastlower = 0; - int first = 0; - int last = _pageList.Count - 1; - int mid = 0; - while (first <= last) - { - mid = (first + last) >> 1; - T k = _pageList.GetKey(mid); - int compare = k.CompareTo(key); - if (compare < 0) - { - lastlower = mid; - first = mid + 1; - } - if (compare == 0) - { - found = true; - return mid; - } - if (compare > 0) - { - last = mid - 1; - } - } - - return lastlower; - } - #endregion - - internal object[] GetKeys() - { - List keys = new List(); - for (int i = 0; i < _pageList.Count; i++) - { - Page page = LoadPage(_pageList.GetValue(i).PageNumber); - foreach (var k in page.tree.Keys()) - keys.Add(k); - } - return keys.ToArray(); - } - - internal int Count() - { - int count = 0; - for (int i = 0; i < _pageList.Count; i++) - { - Page page = LoadPage(_pageList.GetValue(i).PageNumber); - //foreach (var k in page.tree.Keys()) - // count++; - count += page.tree.Count; - } - return count; - } - } -} +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Text; +using System.Threading; +using System.IO; +using RaptorDB.Common; +using System.Collections.Concurrent; + +namespace RaptorDB +{ + #region [ internal classes ] + + internal struct PageInfo // FEATURE : change back to class for count access for query caching + { + public PageInfo(int pagenum, int uniquecount, int duplicatecount) + { + PageNumber = pagenum; + UniqueCount = uniquecount; + } + public int PageNumber; + public int UniqueCount; + } + + internal struct KeyInfo + { + public KeyInfo(int recnum) + { + RecordNumber = recnum; + DuplicateBitmapNumber = -1; + } + public KeyInfo(int recnum, int bitmaprec) + { + RecordNumber = recnum; + DuplicateBitmapNumber = bitmaprec; + } + public int RecordNumber; + public int DuplicateBitmapNumber; + } + + internal class Page + { + public Page() // kludge so the compiler doesn't complain + { + DiskPageNumber = -1; + RightPageNumber = -1; + tree = new SafeDictionary(Global.PageItemCount); + isDirty = false; + FirstKey = default(T); + rwlock = new ReaderWriterLockSlim(LockRecursionPolicy.SupportsRecursion); + } + public int DiskPageNumber; + public int RightPageNumber; + public T FirstKey; + public bool isDirty; + public SafeDictionary tree; + public List allocblocks; + public ReaderWriterLockSlim rwlock; + } + + #endregion + + public class MGIndex : IDisposable + where T : IComparable + { + ILog _log = LogManager.GetLogger(typeof(MGIndex)); + private SortedList _pageList = new SortedList(); + private ConcurrentDictionary> _cache = new ConcurrentDictionary>(); + //private SafeDictionary _usage = new SafeDictionary(); + private List _pageListDiskPages = new List(); + private IndexFile _index; + public bool AllowsDuplicates => _AllowDuplicates; + private bool _AllowDuplicates = true; + private int _LastIndexedRecordNumber = 0; + private int _maxPageItems = 0; + Func _compFunc = null; + + /// + /// lock read when reading anything and lock write if writing to pagelist and creating new pages + /// + private ReaderWriterLockSlim _listLock = new ReaderWriterLockSlim(LockRecursionPolicy.SupportsRecursion); + + public MGIndex(string path, string filename, byte keysize, ushort maxcount, bool allowdups) + { + keysize = RDBDataType.GetByteSize(keysize); + _AllowDuplicates = allowdups; + _index = new IndexFile(path + Path.DirectorySeparatorChar + filename, keysize, maxcount); + _maxPageItems = maxcount; + // load page list + _index.GetPageList(_pageListDiskPages, _pageList, out _LastIndexedRecordNumber); + if (_pageList.Count == 0) + { + Page page = new Page(); + page.FirstKey = (T)RDBDataType.GetEmpty(); + page.DiskPageNumber = _index.GetNewPageNumber(); + page.isDirty = true; + _pageList.Add(page.FirstKey, new PageInfo(page.DiskPageNumber, 0, 0)); + _cache.TryAdd(page.DiskPageNumber, page); + } + if (typeof(T) == typeof(string)) + { + _compFunc = (Func)(Delegate)(Func)CultureInfo.CurrentCulture.CompareInfo.Compare; + } + } + + public int GetLastIndexedRecordNumber() + { + return _LastIndexedRecordNumber; + } + + private object _setlock = new object(); + public void Set(T key, int val) + { + Page page = null; + using (_listLock.Reading()) + { + PageInfo pi; + page = LoadPage(key, out pi); + + using (page.rwlock.Writing()) + { + KeyInfo ki; + if (page.tree.TryGetValue(key, out ki)) + { + // item exists + if (_AllowDuplicates) + { + SaveDuplicate(key, ref ki); + // set current record in the bitmap also + _index.SetBitmapDuplicate(ki.DuplicateBitmapNumber, val); + } + ki.RecordNumber = val; + page.tree[key] = ki; // structs need resetting + } + else + { + // new item + ki = new KeyInfo(val); + if (_AllowDuplicates) + SaveDuplicate(key, ref ki); + pi.UniqueCount++; + page.tree.Add(key, ki); + } + + _LastIndexedRecordNumber = val; + page.isDirty = true; + } + } + var c = page.tree.Count; + if (c > Global.PageItemCount || (c > Global.EarlyPageSplitSize && _pageList.Count <= Global.EarlyPageCount)) + SplitPage(page.DiskPageNumber); + } + + public bool GetFirst(T key, out int val) + { + using (_listLock.Reading()) + { + val = -1; + PageInfo pi; + Page page = LoadPage(key, out pi); + KeyInfo ki; + bool ret = page.tree.TryGetValue(key, out ki); + if (ret) + val = ki.RecordNumber; + return ret; + } + } + + public void SaveIndex() + { + using (_listLock.Reading()) + { + _log.Debug("Total split time (s) = " + _totalsplits); + _log.Debug("Total pages = " + _pageList.Count); + var keys = _cache.Keys.ToArray(); + Array.Sort(keys); + // save index to disk + foreach (var i in keys) + { + var p = _cache[i]; + if (p.isDirty) + { + _index.SavePage(p); + p.isDirty = false; + } + } + _index.SavePageList(_pageList, _pageListDiskPages); + _index.BitmapFlush(); + } + } + + public virtual void Dispose() + { + using (_listLock.Writing()) + { + SaveIndex(); + // save page list + _index.SavePageList(_pageList, _pageListDiskPages); + // shutdown + _index.Shutdown(); + } + } + + public void FreeMemory() + { + _index.FreeMemory(); + try + { + List free = new List(); + foreach (var c in _cache) + { + if (c.Value.isDirty == false) + free.Add(c.Key); + } + _log.Debug("releasing page count = " + free.Count + " out of " + _cache.Count); + Page p; + foreach (var i in free) + { + _cache.TryRemove(i, out p); + p.rwlock.Dispose(); + } + } + catch { } + } + + + public IEnumerable GetDuplicates(T key) + { + using (_listLock.Reading()) + { + PageInfo pi; + Page page = LoadPage(key, out pi); + KeyInfo ki; + bool ret = page.tree.TryGetValue(key, out ki); + if (ret) + // get duplicates + if (ki.DuplicateBitmapNumber != -1) + return _index.GetDuplicatesRecordNumbers(ki.DuplicateBitmapNumber); + } + return new List(); + } + + public void SaveLastRecordNumber(int recnum) + { + _index.SaveLastRecordNumber(recnum); + } + + public bool RemoveKey(T key) + { + using (_listLock.Reading()) + { + PageInfo pi; + Page page = LoadPage(key, out pi); + bool b = page.tree.Remove(key); + using (page.rwlock.Writing()) + { + // FIX : reset the first key for page ?? + if (b) + { + Interlocked.Decrement(ref pi.UniqueCount); + // FEATURE : decrease dup count + } + page.isDirty = true; + } + return b; + } + } + + #region [ P R I V A T E ] + protected WahBitArray doMoreOp(T key, bool eq) + { + using (_listLock.Reading()) + { + bool found = false; + int pos = FindPageOrLowerPosition(key, ref found); + WahBitArray result = new WahBitArray(); + if (pos < _pageList.Count) + { + // all the pages after + for (int i = pos + 1; i < _pageList.Count; i++) + doPageOperation(ref result, i); + } + // key page + Page page = LoadPage(_pageList.Values[pos].PageNumber); + using (page.rwlock.Reading()) + { + T[] keys = page.tree.Keys(); + Array.Sort(keys); + + // find better start position rather than 0 + pos = Array.IndexOf(keys, key); + if (pos == -1) pos = 0; + + for (int i = pos; i < keys.Length; i++) + { + T k = keys[i]; + var comp = k.CompareTo(key); + int bn = page.tree[k].DuplicateBitmapNumber; + + if (comp > 0) + result = result.Or(_index.GetDuplicateBitmap(bn)); + + if (eq && comp == 0) + result = result.Or(_index.GetDuplicateBitmap(bn)); + } + } + return result; + } + } + + protected WahBitArray doLessOp(T key, bool eq) + { + using (_listLock.Reading()) + { + bool found = false; + int pos = FindPageOrLowerPosition(key, ref found); + WahBitArray result = new WahBitArray(); + if (pos > 0) + { + // all the pages before + for (int i = 0; i < pos - 1; i++) + doPageOperation(ref result, i); + } + // key page + Page page = LoadPage(_pageList.Values[pos].PageNumber); + using (page.rwlock.Reading()) + { + T[] keys = page.tree.Keys(); + Array.Sort(keys); + for (int i = 0; i < keys.Length; i++) + { + T k = keys[i]; + var comp = k.CompareTo(key); + if (comp > 0) + break; + int bn = page.tree[k].DuplicateBitmapNumber; + + if (comp < 0) + result = result.Or(_index.GetDuplicateBitmap(bn)); + + if (eq && comp == 0) + result = result.Or(_index.GetDuplicateBitmap(bn)); + } + } + return result; + } + } + + public WahBitArray QueryGreater(T key) + => doMoreOp(key, false); + + public WahBitArray QueryGreaterEquals(T key) + => doMoreOp(key, true); + + public WahBitArray QueryLess(T key) + => doMoreOp(key, false); + + public WahBitArray QueryLessEquals(T key) + => doMoreOp(key, true); + + public WahBitArray QueryEquals(T key) + { + using (_listLock.Reading()) + { + PageInfo pi; + Page page = LoadPage(key, out pi); + KeyInfo k; + if (page.tree.TryGetValue(key, out k)) + { + int bn = k.DuplicateBitmapNumber; + + return _index.GetDuplicateBitmap(bn); + } + else + return new WahBitArray(); + } + } + + public WahBitArray QueryNotEquals(T key) + { + return QueryEquals(key).Not(); + } + + private void doPageOperation(ref WahBitArray res, int pageidx) + { + Page page = LoadPage(_pageList.Values[pageidx].PageNumber); + using (page.rwlock.Reading()) + { + T[] keys = page.tree.Keys(); // avoid sync issues + foreach (var k in keys) + { + int bn = page.tree[k].DuplicateBitmapNumber; + + res = res.Or(_index.GetDuplicateBitmap(bn)); + } + } + } + + private double _totalsplits = 0; + private void SplitPage(int num) + { + Page page = null; + if (_pageList.Count == 1 && _listLock.WaitingWriteCount > 0) + // some other thread is waiting to change the first page + return; + using (_listLock.Writing()) + { + page = LoadPage(num); + if (page.tree.Count < Global.PageItemCount && (page.tree.Count < Global.EarlyPageSplitSize || _pageList.Count > Global.EarlyPageCount)) return; + + using (page.rwlock.Writing()) + { + if (page.tree.Count < Global.PageItemCount && (page.tree.Count < Global.EarlyPageSplitSize || _pageList.Count > Global.EarlyPageCount)) return; + + // split the page + DateTime dt = FastDateTime.Now; + + Page newpage = new Page(); + newpage.DiskPageNumber = _index.GetNewPageNumber(); + newpage.RightPageNumber = page.RightPageNumber; + newpage.isDirty = true; + page.RightPageNumber = newpage.DiskPageNumber; + // get and sort keys + T[] keys = page.tree.Keys(); + Array.Sort(keys); + // copy data to new + for (int i = keys.Length / 2; i < keys.Length; i++) + { + newpage.tree.Add(keys[i], page.tree[keys[i]]); + // remove from old page + page.tree.Remove(keys[i]); + } + // set the first key + newpage.FirstKey = keys[keys.Length / 2]; + // set the first key refs + _pageList.Remove(page.FirstKey); + _pageList.Remove(keys[0]); + // dup counts + _pageList.Add(keys[0], new PageInfo(page.DiskPageNumber, page.tree.Count, 0)); + page.FirstKey = keys[0]; + + // FEATURE : dup counts + _pageList.Add(newpage.FirstKey, new PageInfo(newpage.DiskPageNumber, newpage.tree.Count, 0)); + _cache.TryAdd(newpage.DiskPageNumber, newpage); + + _totalsplits += FastDateTime.Now.Subtract(dt).TotalMilliseconds; + } + + } + } + + private Page LoadPage(T key, out PageInfo pageinfo) + { + if (!_listLock.IsReadLockHeld) throw new InvalidOperationException("readlock not held"); + + int pagenum = -1; + // find page in list of pages + + bool found = false; + int pos = 0; + if (key != null) + pos = FindPageOrLowerPosition(key, ref found); + pageinfo = _pageList.Values[pos]; + pagenum = pageinfo.PageNumber; + + Page page; + if (_cache.TryGetValue(pagenum, out page) == false) + { + //load page from disk + page = _index.LoadPageFromPageNumber(pagenum); + _cache.TryAdd(pagenum, page); + } + return page; + } + + private Page LoadPage(int pagenum) + { + if (!(_listLock.IsReadLockHeld || _listLock.IsUpgradeableReadLockHeld || _listLock.IsWriteLockHeld)) + throw new InvalidOperationException("readlock not held"); + + // page usage data + //_usage.Add(pagenum, new CacheTimeOut(pagenum, FastDateTime.Now.Ticks)); + return _cache.GetOrAdd(pagenum, _index.LoadPageFromPageNumber); + } + + private void SaveDuplicate(T key, ref KeyInfo ki) + { + if (ki.DuplicateBitmapNumber == -1) + ki.DuplicateBitmapNumber = _index.GetBitmapDuplaicateFreeRecordNumber(); + + _index.SetBitmapDuplicate(ki.DuplicateBitmapNumber, ki.RecordNumber); + } + + private int FindPageOrLowerPosition(T key, ref bool found) + { + if (_pageList.Count <= 1) + return 0; + // binary search + int first = 0; + int last = _pageList.Count - 1; + int mid = 0; + while (first < last) + { + // int divide and ceil + mid = ((first + last - 1) >> 1) + 1; + T k = _pageList.Keys[mid]; + int compare = _compFunc == null ? k.CompareTo(key) : _compFunc(k, key); + if (compare < 0) + { + first = mid; + } + if (compare == 0) + { + found = true; + return mid; + } + if (compare > 0) + { + last = mid - 1; + } + } + + return first; + } + #endregion + + public T[] GetKeys() + { + using (_listLock.Reading()) + { + var keys = new List(); + for (int i = 0; i < _pageList.Count; i++) + { + Page page = LoadPage(_pageList.Values[i].PageNumber); + foreach (var k in page.tree.Keys()) + keys.Add(k); + } + return keys.ToArray(); + } + } + + internal int Count() + { + int count = 0; + for (int i = 0; i < _pageList.Count; i++) + { + Page page = LoadPage(_pageList.Values[i].PageNumber); + foreach (var k in page.tree.Keys()) + count++; + } + return count; + } + } +} diff --git a/RaptorDB/Indexes/MMIndex.cs b/RaptorDB/Indexes/MMIndex.cs new file mode 100644 index 0000000..71ae9f3 --- /dev/null +++ b/RaptorDB/Indexes/MMIndex.cs @@ -0,0 +1,445 @@ +using GenericPointerHelpers; +using RaptorDB.Common; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace RaptorDB.Indexes +{ + public class MMIndex : IBetweenComparisonIndex, IUpdatableIndex + where TKey : IComparable + { + ILog log = LogManager.GetLogger(typeof(MMIndex)); + private MmfTableIndexFileManager fileManager; + private IndexRootFileManager rootManager; + private ReaderWriterLockSlim rwlock = new ReaderWriterLockSlim(); + private IIndexRoot root; + public readonly int PageSize; + + public bool AllowsDuplicates => true; + + public MMIndex(string path, string filename, int pageSize, IPageSerializer keySerializer) + { + var filePath = Path.Combine(path, filename); + this.PageSize = pageSize; + fileManager = new MmfTableIndexFileManager(filePath + "-idxdata-", pageSize, keySerializer); + rootManager = new IndexRootFileManager(filePath + ".idxroot", keySerializer); + root = rootManager.Load(); + } + + public void Set(TKey key, int recnum) + { + int split = 0; + try + { + rwlock.EnterReadLock(); + + var tableIndex = root.GetPageIndex(key); + var table = fileManager.GetPage(tableIndex); + if (table.Capacity == table.Count) + { + split = 2; + } + else + { + try + { + table.rwlock.EnterWriteLock(); + table.Set(key, recnum); + if (table.Count * 4 > PageSize * 3) + split = 1; + } + finally + { + fileManager.MarkDirty(tableIndex, table); + table.rwlock.ExitWriteLock(); + } + } + } + finally + { + rwlock.ExitReadLock(); + } + if (split > 0) + { + Split(key); + if (split > 1) Set(key, recnum); + } + } + + protected void Split(TKey key) + { + try + { + rwlock.EnterUpgradeableReadLock(); + + var tableIndex = root.GetPageIndex(key); + var table = fileManager.GetPage(tableIndex); + if (table.Count * 4 <= PageSize * 3) + return; + + try + { + table.rwlock.EnterWriteLock(); + rwlock.EnterWriteLock(); + if (table.Count * 4 <= PageSize * 3) + return; + + SplitCore(tableIndex, table); + } + finally + { + rwlock.ExitWriteLock(); + table.rwlock.ExitWriteLock(); + } + } + finally + { + rwlock.ExitUpgradeableReadLock(); + } + } + + protected void SplitCore(int tableIndex, PageMultiValueHashTable table) + { + var buffer = new KeyValuePair[table.Count]; + var count = table.CopyTo(buffer, 0); + var pivot = SelectPivot(buffer, count); + var table2Index = root.CreateTable(pivot); + table.Clear(); + + // TODO: release root lock here? + + var table2 = fileManager.GetPage(table2Index); + for (int i = 0; i < count; i++) + { + if (buffer[i].Key.CompareTo(pivot) < 0) + table.Add(buffer[i]); + else table2.Add(buffer[i]); + } + fileManager.MarkDirty(tableIndex, table); + fileManager.MarkDirty(table2Index, table2); + log.Debug($"splitted node { tableIndex } in ratio { (float)table.Count / table2.Count }"); + } + + protected TKey SelectPivot(KeyValuePair[] buffer, int count) + { + //TKey gt; + //if (buffer[0].Key.CompareTo(buffer[300].Key) > 0) gt = buffer[0].Key; + //else gt = buffer[300].Key; + + //if (gt.CompareTo(buffer[600].Key) > 0) return buffer[600].Key; + //else return gt; + Array.Sort(buffer, 0, count, KeyValuePairComparer.DefaultInstance); + return buffer[count / 2].Key; + } + + public TKey[] GetKeys() + { + throw new NotImplementedException(); + } + + public void Set(object key, int recnum) + => Set((TKey)key, recnum); + + public void FreeMemory() + { + } + + public void SaveIndex() + { + rootManager.Save(); + } + + public void Dispose() + { + // wait for pending actions + rwlock.EnterWriteLock(); + // save root + rootManager.Save(); + // dispose + rootManager = null; + fileManager.Dispose(); + fileManager = null; + rwlock.Dispose(); + } + + public WahBitArray QueryBetween(TKey from, TKey to) + { + // TODO: implement between + throw new NotImplementedException(); + } + + public WahBitArray QueryGreater(TKey key) + => CompareCore(key, false, 1, 0, -1); + + public WahBitArray QueryGreaterEquals(TKey key) + => CompareCore(key, true, 1, 0, -1); + + public WahBitArray QueryLess(TKey key) + => CompareCore(key, false, -1, 0, -1); + + + public WahBitArray QueryLessEquals(TKey key) + => CompareCore(key, true, -1, 0, -1); + + private WahBitArray CompareCore(TKey key, bool eq, int goal, int skip, int take) + { + if (take == 0) return new WahBitArray(); + try + { + rwlock.EnterReadLock(); + var result = new WahBitArray(); + var topTableIndex = root.GetPageIndex(key); + var array = GetPageContent(topTableIndex); + for (int i = 0; i < array.Length; i++) + { + int cmp = array[i].Value.CompareTo(key); + if (cmp == goal || (eq && cmp == 0)) + { + if (skip <= 0) + { + result.Set(array[i].Value, true); + take--; + } + else skip--; + if (take == 0) return result; + } + } + SetAllTables(result, root.GetLowerPagesIndexes(topTableIndex), skip, take); + return result; + } + finally + { + rwlock.ExitReadLock(); + } + } + private KeyValuePair[] GetPageContent(int pageIndex) + { + var table = fileManager.GetPage(pageIndex); + try + { + table.rwlock.EnterReadLock(); + var array = new KeyValuePair[table.Count]; + table.CopyTo(array, 0); + return array; + } + finally + { + table.rwlock.ExitReadLock(); + } + } + + private void SetAllTables(WahBitArray result, IEnumerable tables, int skip, int take) + { + var buffer = new int[PageSize]; + foreach (var tableIndex in tables) + { + var table = fileManager.GetPage(tableIndex); + int count; + try + { + table.rwlock.EnterReadLock(); + table.CopyTo(null, buffer); + count = table.Count; + } + finally + { + table.rwlock.ExitReadLock(); + } + for (int i = 0; i < count; i++) + { + if (skip <= 0) + { + take--; + result.Set(buffer[i], true); + } + else skip--; + if (take == 0) return; + } + } + } + + public WahBitArray QueryEquals(TKey key) + { + try + { + rwlock.EnterReadLock(); + + var table = fileManager.GetPage(root.GetPageIndex(key)); + try + { + table.rwlock.EnterReadLock(); + return WahBitArray.FromIndexes(table[key].ToArray()); + } + finally + { + table.rwlock.ExitReadLock(); + } + } + finally + { + rwlock.ExitReadLock(); + } + } + + public WahBitArray QueryNotEquals(TKey key) + => QueryEquals(key).Not(); + + public TResult Accept(IIndexAcceptable acc) + => acc.Accept(this); + + public bool GetFirst(TKey key, out int idx) + { + try + { + rwlock.EnterReadLock(); + + var table = fileManager.GetPage(root.GetPageIndex(key)); + try + { + table.rwlock.EnterReadLock(); + return table.TryGetValue(key, out idx); + } + finally + { + table.rwlock.ExitReadLock(); + } + } + finally + { + rwlock.ExitReadLock(); + } + } + + public bool Remove(TKey key) + { + try + { + rwlock.EnterReadLock(); + + var tableIndex = root.GetPageIndex(key); + var table = fileManager.GetPage(tableIndex); + try + { + table.rwlock.EnterWriteLock(); + return table.RemoveAll(key) > 0; + } + finally + { + fileManager.MarkDirty(tableIndex, table); + table.rwlock.ExitWriteLock(); + } + } + finally + { + rwlock.ExitReadLock(); + } + } + + public bool Remove(TKey key, int recnum) + { + try + { + rwlock.EnterReadLock(); + + var tableIndex = root.GetPageIndex(key); + var table = fileManager.GetPage(tableIndex); + try + { + table.rwlock.EnterWriteLock(); + return table.RemoveFirst(key, recnum); + } + finally + { + fileManager.MarkDirty(tableIndex, table); + table.rwlock.ExitWriteLock(); + } + } + finally + { + rwlock.ExitReadLock(); + } + } + + public void ReplaceFirst(TKey key, int recnum) + { + try + { + rwlock.EnterReadLock(); + + var tableIndex = root.GetPageIndex(key); + var table = fileManager.GetPage(tableIndex); + try + { + table.rwlock.EnterWriteLock(); + table.RemoveFirst(key); + table.Set(key, recnum); + } + finally + { + fileManager.MarkDirty(tableIndex, table); + table.rwlock.ExitWriteLock(); + } + } + finally + { + rwlock.ExitReadLock(); + } + } + + public void Replace(TKey key, int oldNum, int newNum) + { + try + { + rwlock.EnterReadLock(); + + var tableIndex = root.GetPageIndex(key); + var table = fileManager.GetPage(tableIndex); + try + { + table.rwlock.EnterWriteLock(); + table.RemoveFirst(key, oldNum); + table.Set(key, newNum); + } + finally + { + fileManager.MarkDirty(tableIndex, table); + table.rwlock.ExitWriteLock(); + } + } + finally + { + rwlock.ExitReadLock(); + } + } + } + + public class KeyValuePairComparer : IComparer> + { + internal IComparer keyComparer; + + public KeyValuePairComparer(IComparer keyComparer = null) + { + if (keyComparer == null) + { + this.keyComparer = Comparer.Default; + } + else + { + this.keyComparer = keyComparer; + } + } + + public int Compare(KeyValuePair x, KeyValuePair y) + { + return keyComparer.Compare(x.Key, y.Key); + } + + public static readonly KeyValuePairComparer DefaultInstance = new KeyValuePairComparer(); + } +} diff --git a/RaptorDB/Indexes/MmfTableIndexFileManager.cs b/RaptorDB/Indexes/MmfTableIndexFileManager.cs new file mode 100644 index 0000000..fa322d8 --- /dev/null +++ b/RaptorDB/Indexes/MmfTableIndexFileManager.cs @@ -0,0 +1,150 @@ +using RaptorDB.Common; +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO.MemoryMappedFiles; +using System.Linq; +using System.Text; + +namespace RaptorDB.Indexes +{ + public class MmfTableIndexFileManager: IDisposable + { + readonly int PageSize; + readonly int HashtableCapacity; + readonly IPageSerializer KeySerializer; + readonly IPageSerializer ValueSerializer; + readonly ConcurrentDictionary>> TableCache = new ConcurrentDictionary>>(); + public readonly string FilePrefix; + MmFileInfo[] Files; + object initLocker = new object(); + object cacheLocker = new object(); + + public MmfTableIndexFileManager(string filePrefix, int hashtableCapacity, IPageSerializer keySerializer = null, IPageSerializer valueSerializer = null) + { + this.FilePrefix = filePrefix; + this.KeySerializer = keySerializer; + this.ValueSerializer = valueSerializer; + this.Files = new MmFileInfo[0]; + this.PageSize = hashtableCapacity * PageHashTableHelper.GetEntrySize(keySerializer, valueSerializer) + 4; + this.HashtableCapacity = hashtableCapacity; + } + + public PageMultiValueHashTable GetPage(int index) + { + WeakReference> t; + PageMultiValueHashTable table; + if (TableCache.TryGetValue(index, out t)) + { + if (t.TryGetTarget(out table)) + { + return table; + } + } + + lock(cacheLocker) + { + if (TableCache.TryRemove(index, out t) && t.TryGetTarget(out table)) + { + TableCache.TryAdd(index, t); + } + else + { + table = LoadHashtable(index); + if (!TableCache.TryAdd(index, new WeakReference>(table))) + throw new Exception("fuck!"); + } + return table; + } + } + + public unsafe void MarkDirty(int index, PageMultiValueHashTable page) + { + *(((int*)page.StartPointer) - 1) = page.Count; + } + + private PageMultiValueHashTable LoadHashtable(int index) + { + var fi = Helper.Log2(index + 1) - 1; + if (fi >= Files.Length) + InitFiles(fi); + var file = Files[fi]; + return CreateTable(file, index); + } + + private unsafe PageMultiValueHashTable CreateTable(MmFileInfo file, int index) + { + var pointer = file.StartPointer + (PageSize * (index - file.FirstPageIndex)); + return CreateTable(pointer); + } + + private unsafe PageMultiValueHashTable CreateTable(byte* pointer) + { + return new PageMultiValueHashTable(HashtableCapacity, KeySerializer, ValueSerializer, + pointer + 4, 256, *(int*)pointer); + } + + private void InitFiles(int fileIndex) + { + lock(initLocker) + { + if (Files.Length > fileIndex) return; + var l = Files.Length; + Array.Resize(ref Files, fileIndex + 1); + for (int i = l; i <= fileIndex; i++) + { + InitFile(fileIndex); + } + } + } + + private void InitFile(int fileIndex) + { + var size = fileIndex == 0 ? 1 : Files[fileIndex - 1].Count * 2; + var file = MmFileInfo.OpenOrCreate(FilePrefix + fileIndex + ".phti", size * PageSize, size - 1, size); + Files[fileIndex] = file; + } + + public void Dispose() + { + foreach (var file in Files) + { + file.Dispose(); + } + } + + public unsafe class MmFileInfo: IDisposable + { + public readonly int FirstPageIndex; + public readonly int Count; + public readonly long Size; + public readonly MemoryMappedFile File; + public readonly MemoryMappedViewAccessor Accessor; + public readonly byte* StartPointer; + + public MmFileInfo(int firstPageIndex, int count, long size, MemoryMappedFile file) + { + this.FirstPageIndex = firstPageIndex; + this.Count = count; + this.Size = size; + this.File = file; + this.Accessor = file.CreateViewAccessor(); + Accessor.SafeMemoryMappedViewHandle.AcquirePointer(ref StartPointer); + } + + public static MmFileInfo OpenOrCreate(string filePath, long size, int firstPageIndex, int pageCount) + { + var mmf = MemoryMappedFile.CreateFromFile(filePath, System.IO.FileMode.OpenOrCreate, null, size); + return new MmFileInfo(firstPageIndex, pageCount, size, mmf); + } + + public void Dispose() + { + Accessor.SafeMemoryMappedViewHandle.ReleasePointer(); + Accessor.Dispose(); + File.Dispose(); + } + } + } +} diff --git a/RaptorDB.Common/Interfaces.cs b/RaptorDB/Interfaces.cs similarity index 74% rename from RaptorDB.Common/Interfaces.cs rename to RaptorDB/Interfaces.cs index e51acc3..f41f819 100644 --- a/RaptorDB.Common/Interfaces.cs +++ b/RaptorDB/Interfaces.cs @@ -1,148 +1,148 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Linq.Expressions; - -namespace RaptorDB -{ - public static class RDBExtensions - { - ///// - ///// For RaptorDB optimized range queries - ///// - ///// - ///// - ///// - ///// - ///// - //public static bool Between(this T obj, T from, T to) - //{ - // return true; - //} - - ///// - ///// For RaptorDB full text search queries - ///// - ///// - ///// - ///// - //public static bool Contains(this string obj, string what) - //{ - // return true; - //} - } - - /// - /// Used for normal string columns - /// - [AttributeUsage(AttributeTargets.Field | AttributeTargets.Property)] - public class CaseInsensitiveAttribute : Attribute - { - } - - /// - /// Used for the indexer -> hOOt full text indexing - /// - [AttributeUsage(AttributeTargets.Field | AttributeTargets.Property)] - public class FullTextAttribute : Attribute - { - } - - /// - /// Used for declaring view extensions DLL's - /// - [AttributeUsage(AttributeTargets.Class)] - public class RegisterViewAttribute : Attribute - { - } - - /// - /// Index file max string length size in UTF8 (Default = 60) - /// - [AttributeUsage(AttributeTargets.Property | AttributeTargets.Field)] - public class StringIndexLength : Attribute - { - public StringIndexLength() - { - Length = 60; // default - } - public StringIndexLength(byte length) - { - Length = length; - } - public byte Length; - } - - public interface IQueryInterface - { - /// - /// Log messages - /// - /// - void Log(string message); - - /// - /// Count all data associated with the Documnet Type or the View Type with a string filter - /// - /// - /// - int Count(string viewname); - - /// - /// Count all data associated with View name and string filter - /// - /// - /// - /// - int Count(string ViewName, string Filter); - - /// - /// Fetch a document by it's Guid - /// - /// - /// - object Fetch(Guid guid); - - // new query model - Result Query(Expression> Filter); - Result Query(Expression> Filter, int start, int count); - Result Query(string Filter); - Result Query(string Filter, int start, int count); - int Count(Expression> Filter); - } - - public interface IMapAPI : IQueryInterface - { - /// - /// Emit values, the ordering must match the view schema - /// - /// - /// - void Emit(Guid docid, params object[] data); - - /// - /// Emits the object matching the view schema, you must make sure the object property names match the row schema - /// - /// - /// - /// - void EmitObject(Guid docid, T doc); - - /// - /// Roll back the transaction if the primary view is in transaction mode - /// - void RollBack(); - - /// - /// Get the next row number for this view - /// - /// - int NextRowNumber(); - } - - public interface IClientHandler - { - bool GenerateClientData(IQueryInterface api, string username, List DocsToSend); - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Linq.Expressions; + +namespace RaptorDB +{ + //public static class RDBExtensions + //{ + // ///// + // ///// For RaptorDB optimized range queries + // ///// + // ///// + // ///// + // ///// + // ///// + // ///// + // //public static bool Between(this T obj, T from, T to) + // //{ + // // return true; + // //} + + // ///// + // ///// For RaptorDB full text search queries + // ///// + // ///// + // ///// + // ///// + // //public static bool Contains(this string obj, string what) + // //{ + // // return true; + // //} + //} + + /// + /// Used for normal string columns + /// + [AttributeUsage(AttributeTargets.Field | AttributeTargets.Property)] + public class CaseInsensitiveAttribute : Attribute + { + } + + /// + /// Used for the indexer -> hOOt full text indexing + /// + [AttributeUsage(AttributeTargets.Field | AttributeTargets.Property)] + public class FullTextAttribute : Attribute + { + } + + /// + /// Used for declaring view extensions DLL's + /// + [AttributeUsage(AttributeTargets.Class)] + public class RegisterViewAttribute : Attribute + { + } + + /// + /// Index file max string length size in UTF8 (Default = 60) + /// + [AttributeUsage(AttributeTargets.Property | AttributeTargets.Field)] + public class StringIndexLengthAttribute : Attribute + { + public StringIndexLengthAttribute() + { + Length = 60; // default + } + public StringIndexLengthAttribute(byte length) + { + Length = length; + } + public byte Length; + } + + public interface IQueryInterface + { + /// + /// Log messages + /// + /// + void Log(string message); + + /// + /// Count all data associated with the Documnet Type or the View Type with a string filter + /// + /// + /// + int Count(string viewname); + + /// + /// Count all data associated with View name and string filter + /// + /// + /// + /// + int Count(string ViewName, string Filter); + + /// + /// Fetch a document by it's Guid + /// + /// + /// + object Fetch(Guid guid); + + // new query model + Result Query(Expression> Filter); + Result Query(Expression> Filter, int start, int count); + Result Query(string Filter); + Result Query(string Filter, int start, int count); + int Count(Expression> Filter); + } + + public interface IMapAPI : IQueryInterface + { + /// + /// Emit values, the ordering must match the view schema + /// + /// + /// + void Emit(Guid docid, params object[] data); + + /// + /// Emits the object matching the view schema, you must make sure the object property names match the row schema + /// + /// + /// + /// + void EmitObject(Guid docid, T doc); + + /// + /// Roll back the transaction if the primary view is in transaction mode + /// + void RollBack(); + + /// + /// Get the next row number for this view + /// + /// + int NextRowNumber(); + } + + public interface IClientHandler + { + bool GenerateClientData(IQueryInterface api, string username, List DocsToSend); + } +} diff --git a/RaptorDB/Properties/Resources.Designer.cs b/RaptorDB/Properties/Resources.Designer.cs index 32cf670..0229b09 100644 --- a/RaptorDB/Properties/Resources.Designer.cs +++ b/RaptorDB/Properties/Resources.Designer.cs @@ -1,81 +1,81 @@ -//------------------------------------------------------------------------------ -// -// This code was generated by a tool. -// Runtime Version:4.0.30319.18213 -// -// Changes to this file may cause incorrect behavior and will be lost if -// the code is regenerated. -// -//------------------------------------------------------------------------------ - -namespace RaptorDB.Properties { - using System; - - - /// - /// A strongly-typed resource class, for looking up localized strings, etc. - /// - // This class was auto-generated by the StronglyTypedResourceBuilder - // class via a tool like ResGen or Visual Studio. - // To add or remove a member, edit your .ResX file then rerun ResGen - // with the /str option, or rebuild your VS project. - [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")] - [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] - [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] - internal class Resources { - - private static global::System.Resources.ResourceManager resourceMan; - - private static global::System.Globalization.CultureInfo resourceCulture; - - [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] - internal Resources() { - } - - /// - /// Returns the cached ResourceManager instance used by this class. - /// - [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] - internal static global::System.Resources.ResourceManager ResourceManager { - get { - if (object.ReferenceEquals(resourceMan, null)) { - global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("RaptorDB.Properties.Resources", typeof(Resources).Assembly); - resourceMan = temp; - } - return resourceMan; - } - } - - /// - /// Overrides the current thread's CurrentUICulture property for all - /// resource lookups using this strongly typed resource class. - /// - [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] - internal static global::System.Globalization.CultureInfo Culture { - get { - return resourceCulture; - } - set { - resourceCulture = value; - } - } - - /// - /// Looks up a localized string similar to The following error occurred and the json document is below, you can skip this - ///document if you wish by incrementing the %c% file : - /// - ///%ex% - ///------------------------------------------------------------------------------ - ///The json document is : - /// - ///%js% - /// - ///. - /// - internal static string msg { - get { - return ResourceManager.GetString("msg", resourceCulture); - } - } - } -} +//------------------------------------------------------------------------------ +// +// This code was generated by a tool. +// Runtime Version:4.0.30319.42000 +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + +namespace RaptorDB.Properties { + using System; + + + /// + /// A strongly-typed resource class, for looking up localized strings, etc. + /// + // This class was auto-generated by the StronglyTypedResourceBuilder + // class via a tool like ResGen or Visual Studio. + // To add or remove a member, edit your .ResX file then rerun ResGen + // with the /str option, or rebuild your VS project. + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")] + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] + internal class Resources { + + private static global::System.Resources.ResourceManager resourceMan; + + private static global::System.Globalization.CultureInfo resourceCulture; + + [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] + internal Resources() { + } + + /// + /// Returns the cached ResourceManager instance used by this class. + /// + [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] + internal static global::System.Resources.ResourceManager ResourceManager { + get { + if (object.ReferenceEquals(resourceMan, null)) { + global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("RaptorDB.Properties.Resources", typeof(Resources).Assembly); + resourceMan = temp; + } + return resourceMan; + } + } + + /// + /// Overrides the current thread's CurrentUICulture property for all + /// resource lookups using this strongly typed resource class. + /// + [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] + internal static global::System.Globalization.CultureInfo Culture { + get { + return resourceCulture; + } + set { + resourceCulture = value; + } + } + + /// + /// Looks up a localized string similar to The following error occurred and the json document is below, you can skip this + ///document if you wish by incrementing the %c% file : + /// + ///%ex% + ///------------------------------------------------------------------------------ + ///The json document is : + /// + ///%js% + /// + ///. + /// + internal static string msg { + get { + return ResourceManager.GetString("msg", resourceCulture); + } + } + } +} diff --git a/RaptorDB/Properties/Resources.resx b/RaptorDB/Properties/Resources.resx index 2c0711f..378ca35 100644 --- a/RaptorDB/Properties/Resources.resx +++ b/RaptorDB/Properties/Resources.resx @@ -1,124 +1,124 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - text/microsoft-resx - - - 2.0 - - - System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 - - - System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 - - - - ..\replication\msg.txt;System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089;utf-8 - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + text/microsoft-resx + + + 2.0 + + + System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + + ..\replication\msg.txt;System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089;utf-8 + \ No newline at end of file diff --git a/RaptorDB/RaptorDB.cs b/RaptorDB/RaptorDB.cs index c5f0c5f..278a5fa 100644 --- a/RaptorDB/RaptorDB.cs +++ b/RaptorDB/RaptorDB.cs @@ -1,1458 +1,1468 @@ -using System; -using System.Collections.Generic; -using System.Text; -using System.IO; -using System.Threading; -using System.Collections; -using RaptorDB.Views; -using System.Linq.Expressions; -using System.Threading.Tasks; -using System.Reflection; -using RaptorDB.Common; -using System.IO.Compression; -using System.CodeDom.Compiler; -using System.Text.RegularExpressions; -using System.ComponentModel; - -namespace RaptorDB -{ - public class RaptorDB : IRaptorDB - { - private RaptorDB(string FolderPath) - { - // speed settings - fastJSON.JSON.Parameters.ParametricConstructorOverride = true; - fastBinaryJSON.BJSON.Parameters.ParametricConstructorOverride = true; - fastJSON.JSON.Parameters.UseEscapedUnicode = false; - - if (_S == "/") - FolderPath = FolderPath.Replace("\\", "/"); - // create folders - Directory.CreateDirectory(FolderPath); - string foldername = Path.GetFullPath(FolderPath); - if (foldername.EndsWith(_S) == false) - foldername += _S; - _Path = foldername; - - // if configs !exists create template config files - CreateTemplateConfigFiles(); - - Initialize(); - } - - private void CreateTemplateConfigFiles() - { - if (File.Exists(_Path + "RaptorDB.config") == false) - File.WriteAllText(_Path + "-RaptorDB.config", fastJSON.JSON.ToNiceJSON(new Global(), new fastJSON.JSONParameters { UseExtensions = false })); - - if (File.Exists(_Path + "RaptorDB-Branch.config") == false) - File.WriteAllText(_Path + "-RaptorDB-Branch.config", fastJSON.JSON.ToNiceJSON(new Replication.ClientConfiguration(), new fastJSON.JSONParameters { UseExtensions = false })); - - if (File.Exists(_Path + "RaptorDB-Replication.config") == false) - { - Replication.ServerConfiguration s = new Replication.ServerConfiguration(); - s.What.Add(new Replication.WhatItem { Name = "default", PackageItemLimit = 10000, Version = 1, B2HQtypes = new List { "*" }, HQ2Btypes = new List { "*" } }); - s.What.Add(new Replication.WhatItem { Name = "b2", PackageItemLimit = 10000, Version = 1, B2HQtypes = new List { "*" }, HQ2Btypes = new List { "config.*" } }); - s.Where.Add(new Replication.WhereItem { BranchName = "b1", Password = "123", When = "*/5 * * * *", What = "default" }); - s.Where.Add(new Replication.WhereItem { BranchName = "b2", Password = "321", When = "*/20 * * * *", What = "b2" }); - File.WriteAllText(_Path + "-RaptorDB-Replication.config", fastJSON.JSON.ToNiceJSON(s, new fastJSON.JSONParameters { UseExtensions = false })); - } - } - - public static RaptorDB Open(string FolderPath) - { - return new RaptorDB(FolderPath); - } - - private string _S = Path.DirectorySeparatorChar.ToString(); - private ILog _log = LogManager.GetLogger(typeof(RaptorDB)); - private Views.ViewManager _viewManager; - private KeyStore _objStore; - private KeyStore _fileStore; - private KeyStoreHF _objHF; - private string _Path = ""; - private int _LastRecordNumberProcessed = -1; // used by background saver - private int _LastFulltextIndexed = -1; // used by the fulltext indexer - private int _LastBackupRecordNumber = -1; - private int _CurrentRecordNumber = -1; - private System.Timers.Timer _saveTimer; - private System.Timers.Timer _fulltextTimer; - private System.Timers.Timer _freeMemTimer; - private System.Timers.Timer _processinboxTimer; - private bool _shuttingdown = false; - private bool _pauseindexer = false; - private MethodInfo otherviews = null; - private MethodInfo save = null; - private MethodInfo saverep = null; - private SafeDictionary _savecache = new SafeDictionary(); - private SafeDictionary _saverepcache = new SafeDictionary(); - private FullTextIndex _fulltextindex; - private CronDaemon _cron; - private Replication.ReplicationServer _repserver; - private Replication.ReplicationClient _repclient; - //private bool _disposed = false; - //private bool _clientReplicationEnabled; - - //public bool SyncNow(string server, int port, string username, string password) - //{ - - // return false; - //} - - #region [ p u b l i c i n t e r f a c e ] - /// - /// Save files to RaptorDB - /// - /// - /// - public bool SaveBytes(Guid docID, byte[] bytes) - { - // save files in storage - _fileStore.SetBytes(docID, bytes); - return true; - } - /// - /// Delete a document (note data is not lost just flagged as deleted) - /// - /// - /// - public bool Delete(Guid docid) - { - bool b = _objStore.Delete(docid); - _viewManager.Delete(docid); - return b; - } - - /// - /// Delete a file (note data is not lost just flagged as deleted) - /// - /// - /// - public bool DeleteBytes(Guid bytesid) - { - return _fileStore.Delete(bytesid); - } - - /// - /// Save a document - /// - /// - /// - /// - /// - public bool Save(Guid docid, T data) - { - string viewname = _viewManager.GetPrimaryViewForType(data.GetType()); - if (viewname == "" && Global.RequirePrimaryView == true) - { - _log.Debug("Primary View not defined for object : " + data.GetType()); - return false; - } - _pauseindexer = true; - if (viewname != "" && _viewManager.isTransaction(viewname)) - { - _log.Debug("TRANSACTION started for docid : " + docid); - // add code here - _viewManager.StartTransaction(); - - bool b = SaveInPrimaryViewTransaction(viewname, docid, data); - if (b == true) - { - b = SaveToConsistentViewsTransaction(docid, data); - if (b == true) - { - b = SaveInOtherViewsTransaction(docid, data); - if (b == true) - { - _viewManager.Commit(Thread.CurrentThread.ManagedThreadId); - int recnum = _objStore.SetObject(docid, data); - _CurrentRecordNumber = recnum; - _pauseindexer = false; - return true; - } - } - } - _viewManager.Rollback(Thread.CurrentThread.ManagedThreadId); - _pauseindexer = false; - return false; - } - else - { - int recnum = _objStore.SetObject(docid, data); - _CurrentRecordNumber = recnum; - - if (viewname != "") - { - SaveInPrimaryView(viewname, docid, data); - - SaveToConsistentViews(docid, data); - - if (Global.BackgroundSaveToOtherViews == false) - { - SaveInOtherViews(docid, data); - _LastRecordNumberProcessed = recnum; - } - } - _pauseindexer = false; - return true; - } - } - - /// - /// Query any view -> get all rows - /// - /// - /// - /// - public Result Query(string viewname) - { - return _viewManager.Query(viewname, 0, -1); - } - - /// - /// Query a view using a string filter - /// - /// - /// - /// - public Result Query(string viewname, string filter) - { - if (filter == "") - return _viewManager.Query(viewname, 0, -1); - - return _viewManager.Query(viewname, filter, 0, -1); - } - - /// - /// Fetch a document by it's ID - /// - /// - /// - public object Fetch(Guid docID) - { - object b = null; - _objStore.GetObject(docID, out b); - return b; - } - - /// - /// Fetch file data by it's ID - /// - /// - /// - public byte[] FetchBytes(Guid fileID) - { - byte[] b = null; - if (_fileStore.GetBytes(fileID, out b)) - return b; - else - return null; - } - - /// - /// Register a view - /// - /// - /// - public void RegisterView(View view) - { - _viewManager.RegisterView(view); - } - - /// - /// Shutdown the database engine and flush memory to disk - /// - public void Shutdown() - { - if (_shuttingdown == true) - return; - - _shuttingdown = true; - - _processinboxTimer.Enabled = false; - _saveTimer.Enabled = false; - _freeMemTimer.Enabled = false; - _fulltextTimer.Enabled = false; - - if (_repserver != null) - _repserver.Shutdown(); - - if (_repclient != null) - _repclient.Shutdown(); - - // TODO : write global or something else? - //if (File.Exists(_Path + "RaptorDB.config") == false) - File.WriteAllText(_Path + "RaptorDB.config", fastJSON.JSON.ToNiceJSON(new Global(), new fastJSON.JSONParameters { UseExtensions = false })); - if (_cron != null) - _cron.Stop(); - _fulltextindex.Shutdown(); - - _log.Debug("Shutting down"); - _saveTimer.Stop(); - _fulltextTimer.Stop(); - _viewManager.ShutDown(); - _objStore.Shutdown(); - _fileStore.Shutdown(); - _objHF.Shutdown(); - - // save records - _log.Debug("last full text record = " + _LastFulltextIndexed); - File.WriteAllBytes(_Path + "Data" + _S + "Fulltext" + _S + "_fulltext.rec", Helper.GetBytes(_LastFulltextIndexed, false)); - _log.Debug("last record = " + _LastRecordNumberProcessed); - File.WriteAllBytes(_Path + "Data" + _S + "_lastrecord.rec", Helper.GetBytes(_LastRecordNumberProcessed, false)); - _log.Debug("last backup record = " + _LastBackupRecordNumber); - File.WriteAllBytes(_Path + "Backup" + _S + "LastBackupRecord.rec", Helper.GetBytes(_LastBackupRecordNumber, false)); - - _log.Debug("Shutting down log."); - _log.Debug("RaptorDB done."); - LogManager.Shutdown(); - } - - - #region [ BACKUP/RESTORE and REPLICATION ] - private object _backuplock = new object(); - /// - /// Backup the document storage file incrementally to "Backup" folder - /// - /// True = done - public bool Backup() - { - if (_LastBackupRecordNumber >= _CurrentRecordNumber) - return false; - lock (_backuplock) - { - _log.Debug("Backup Started..."); - string tempp = _Path + "Temp" + _S + DateTime.Now.ToString("yyyy-MM-dd-HH-mm"); - Directory.CreateDirectory(tempp); - StorageFile backup = new StorageFile(tempp + _S + "backup.mgdat", SF_FORMAT.BSON, true); - _log.Debug("Copying data to backup"); - if (_LastBackupRecordNumber == -1) - _LastBackupRecordNumber = 0; - int rec = _objStore.CopyTo(backup, _LastBackupRecordNumber); - backup.Shutdown(); - - _log.Debug("Last backup rec# = " + rec); - - // compress the file - using (FileStream read = File.OpenRead(tempp + _S + "backup.mgdat")) - using (FileStream outp = File.Create(tempp + _S + "backup.mgdat.gz")) - CompressForBackup(read, outp); - - _log.Debug("Backup compressed and done"); - File.Move(tempp + _S + "backup.mgdat.gz", _Path + "Backup" + _S + DateTime.Now.ToString("yyyy-MM-dd-HH-mm") + ".mgdat.gz"); - - _log.Debug("last backup record = " + _LastBackupRecordNumber); - File.WriteAllBytes(_Path + "Backup" + _S + "LastBackupRecord.rec", Helper.GetBytes(_LastBackupRecordNumber, false)); - // cleanup temp folder - Directory.Delete(tempp, true); - _log.Debug("Backup done."); - _LastBackupRecordNumber = rec; - return true; - } - } - - private DateTime _lastFailedTime = DateTime.Now; - private object _replock = new object(); - private void ProcessReplicationInbox(string inboxfolder) - { - lock (_replock) - { - if (Directory.Exists(inboxfolder) == false) - return; - - string[] files = Directory.GetFiles(inboxfolder, "*.counter"); - - // check if ".counter" file exists - if (files.Length > 0) - { - // FEATURE: if lastfailedtime < 15 -> wait 15 min and retry (avoid extra cpu burning) - // recovery mode - string fn = files[0]; - int start = -1; - if (int.TryParse(File.ReadAllText(fn).Trim(), out start)) - { - if (DoRepProcessing(fn.Replace(".counter", ".mgdat"), start) == false) - return; - } - else - { - _log.Error("Unable to parse counter value in : " + fn); - return; - } - } - - files = Directory.GetFiles(inboxfolder, "*.gz"); - - Array.Sort(files); - foreach (var filename in files) - { - - string tmp = filename.Replace(".gz", "");// FEATURE : to temp folder ?? - if (File.Exists(tmp)) - File.Delete(tmp); - using (FileStream read = File.OpenRead(filename)) - using (FileStream outp = File.Create(tmp)) - DecompressForRestore(read, outp); - _log.Debug("Uncompress done : " + Path.GetFileName(tmp)); - if (DoRepProcessing(tmp, 0) == false) - return; - if (_shuttingdown) - return; - } - } - } - - private bool DoRepProcessing(string filename, int start) - { - string fn = Path.GetFileNameWithoutExtension(filename); - string path = Path.GetDirectoryName(filename); - StorageFile sf = StorageFile.ReadForward(filename); - int counter = 0; - if (start > 0) - _log.Debug("skipping replication items : " + start); - foreach (var i in sf.ReadOnlyEnumerate()) - { - if (start > 0) // skip already done - { - start--; - counter++; - } - else - { - if (i.meta.isDeleted) - DeleteReplicate(i.meta.key); - else - { - try - { - object obj = CreateObject(i.data); - var m = GetSaveReplicate(obj.GetType()); - m.Invoke(this, new object[] { i.meta.key, obj }); - } - catch (Exception ex) - { - _log.Error(ex); - sf.Shutdown(); - string err = Properties.Resources.msg.Replace("%js%", fastJSON.JSON.Beautify(Helper.GetString(i.data, 0, (short)i.data.Length))) - .Replace("%ex%", "" + ex) - .Replace("%c%", path + _S + fn + ".counter"); - - File.WriteAllText(path + _S + fn + ".error.txt", err); - _lastFailedTime = DateTime.Now; - return false; - } - } - counter++; - File.WriteAllText(path + _S + fn + ".counter", "" + counter); - if (_shuttingdown) - { - _log.Debug("shutting down before replicate data completed..."); - sf.Shutdown(); - return false; - } - } - } - sf.Shutdown(); - _log.Debug("File replicate complete : " + Path.GetFileName(filename)); - foreach (var f in Directory.GetFiles(path, fn + ".*")) - File.Delete(f); - return true; - } - - private void DeleteReplicate(Guid docid) - { - bool b = _objStore.DeleteReplicated(docid); - _viewManager.Delete(docid); - } - - private object _restoreLock = new object(); - /// - /// Start background restore of backups in the "Restore" folder - /// - public void Restore() - { - lock (_restoreLock) - { - try - { - string[] files = Directory.GetFiles(_Path + "Restore", "*.counter"); - // check if ".counter" file exists - if (files.Length > 0) - { - // resume mode - string fn = files[0]; - int start = -1; - if (int.TryParse(File.ReadAllText(fn).Trim(), out start)) - { - if (DoRestoreProcessinng(fn.Replace(".counter", ".mgdat"), start) == false) - return; - } - else - { - _log.Error("Unable to parse counter value in : " + fn); - return; - } - } - // do restore - files = Directory.GetFiles(_Path + "Restore", "*.gz"); - Array.Sort(files); - _log.Debug("Restoring file count = " + files.Length); - - foreach (string file in files) - { - string tmp = file.Replace(".gz", "");// FEATURE : to temp folder ?? - if (File.Exists(tmp)) - File.Delete(tmp); - using (FileStream read = File.OpenRead(file)) - using (FileStream outp = File.Create(tmp)) - DecompressForRestore(read, outp); - _log.Debug("Uncompress done : " + Path.GetFileName(tmp)); - - if (DoRestoreProcessinng(tmp, 0)) - File.Move(file, _Path + "Restore" + _S + "Done" + _S + Path.GetFileName(file)); - } - } - catch (Exception ex) - { - _log.Error(ex); - } - } - } - - private bool DoRestoreProcessinng(string filename, int start) - { - string fn = Path.GetFileNameWithoutExtension(filename); - string path = Path.GetDirectoryName(filename); - int counter = 0; - StorageFile sf = StorageFile.ReadForward(filename); - foreach (var i in sf.ReadOnlyEnumerate()) - { - if (start > 0) - { - start--; - counter++; - } - else - { - if (i.meta.isDeleted) - Delete(i.meta.key); - else - { - object obj = CreateObject(i.data); - var m = GetSave(obj.GetType()); - m.Invoke(this, new object[] { i.meta.key, obj }); - } - counter++; - File.WriteAllText(path + _S + fn + ".counter", "" + counter); - if (_shuttingdown) - { - _log.Debug("shutting down before restore completed..."); - sf.Shutdown(); - return false; - } - } - } - sf.Shutdown(); - _log.Debug("File restore complete : " + Path.GetFileName(filename)); - foreach (var f in Directory.GetFiles(path, fn + ".*")) - File.Delete(f); - - return true; - } - - private bool SaveReplicationObject(Guid docid, T data) - { - string viewname = _viewManager.GetPrimaryViewForType(data.GetType()); - if (viewname == "") - { - _log.Debug("Primary View not defined for object : " + data.GetType()); - return false; - } - _pauseindexer = true; - int recnum = _objStore.SetReplicationObject(docid, data); - _CurrentRecordNumber = recnum; - - SaveInPrimaryView(viewname, docid, data); - - SaveToConsistentViews(docid, data); - - if (Global.BackgroundSaveToOtherViews == false) - { - SaveInOtherViews(docid, data); - _LastRecordNumberProcessed = recnum; - } - _pauseindexer = false; - return true; - } - #endregion - - /// - /// Add a user (only supported in server mode) - /// - /// - /// - /// - /// - public bool AddUser(string username, string oldpassword, string newpassword) - { - return false; - } - - /// - /// Execute a server side string filter query - /// - /// - /// - /// - public object[] ServerSide(ServerSideFunc func, string filter) - { - return func(this, filter).ToArray(); - } - - /// - /// Execute a server side LINQ query - /// - /// - /// - /// - /// - public object[] ServerSide(ServerSideFunc func, Expression> filter) - { - LINQString ls = new LINQString(); - ls.Visit(filter); - return func(this, ls.sb.ToString()).ToArray(); - } - - /// - /// Full text search the entire original document - /// - /// - /// - public int[] FullTextSearch(string filter) - { - var wbmp = _fulltextindex.Query(filter, _objStore.RecordCount()); - List a = new List(); - a.AddRange(wbmp.GetBitIndexes()); - - return a.ToArray(); - } - - /// - /// Query a view - /// - /// - /// - /// - public Result Query(Expression> filter) - { - return _viewManager.Query(filter, 0, -1); - } - - /// - /// Query a view with paging - /// - /// - /// - /// - /// - /// - public Result Query(Expression> filter, int start, int count) - { - return _viewManager.Query(filter, start, count, ""); - } - - /// - /// Query a view with paging and order by - /// - /// - /// - /// - /// - /// - /// - public Result Query(Expression> filter, int start, int count, string orderby) - { - return _viewManager.Query(filter, start, count, orderby); - } - - /// - /// Query a view - /// - /// - /// - /// - public Result Query(string filter) - { - return _viewManager.Query(filter, 0, -1); - } - - /// - /// Query a view with paging - /// - /// - /// - /// - /// - /// - public Result Query(string filter, int start, int count) - { - return _viewManager.Query(filter, start, count); - } - - /// - /// Count with filter - /// - /// - /// - /// - public int Count(Expression> filter) - { - return _viewManager.Count(filter); - } - - /// - /// - /// - /// - /// - /// - /// - public Result Query(string viewname, int start, int count) - { - return _viewManager.Query(viewname, start, count); - } - - /// - /// - /// - /// - /// - /// - /// - /// - public Result Query(string viewname, string filter, int start, int count) - { - return _viewManager.Query(viewname, filter, start, count); - } - - /// - /// Count all data associated with View name - /// - /// - /// - public int Count(string viewname) - { - return _viewManager.Count(viewname, ""); - } - - /// - /// Count all data associated with View name and string filter - /// - /// - /// - /// - public int Count(string viewname, string filter) - { - return _viewManager.Count(viewname, filter); - } - - /// - /// Fetch the change history for a document - /// - /// - /// - public int[] FetchHistory(Guid docid) - { - return _objStore.GetHistory(docid); - } - - /// - /// Fetch a change history for a file - /// - /// - /// - public int[] FetchBytesHistory(Guid fileid) - { - return _fileStore.GetHistory(fileid); - } - - /// - /// Fetch the specific document version - /// - /// - /// - public object FetchVersion(int versionNumber) - { - StorageItem meta = null; - return _objStore.GetObject(versionNumber, out meta); - } - - /// - /// Fetch the specific file version - /// - /// - /// - public byte[] FetchBytesVersion(int versionNumber) - { - StorageItem meta = null; - return _fileStore.GetBytes(versionNumber, out meta); - } - #endregion - - #region [ P R I V A T E M E T H O D S ] - - internal string GetViewName(Type type) - { - return _viewManager.GetViewName(type); - } - - private bool SaveToView(Guid docid, T data, List list) - { - if (list != null) - foreach (string name in list) - { - bool ret = _viewManager.InsertTransaction(name, docid, data); - if (ret == false) - return false; - } - return true; - } - - private bool SaveInOtherViewsTransaction(Guid docid, T data) - { - List list = _viewManager.GetOtherViewsList(data.GetType()); - return SaveToView(docid, data, list); - } - - private bool SaveToConsistentViewsTransaction(Guid docid, T data) - { - List list = _viewManager.GetConsistentViews(data.GetType()); - return SaveToView(docid, data, list); - } - - private bool SaveInPrimaryViewTransaction(string viewname, Guid docid, T data) - { - return _viewManager.InsertTransaction(viewname, docid, data); - } - - private static void PumpDataForBackup(Stream input, Stream output) - { - byte[] bytes = new byte[4096 * 2]; - int n; - while ((n = input.Read(bytes, 0, bytes.Length)) != 0) - output.Write(bytes, 0, n); - } - - private static void CompressForBackup(Stream source, Stream destination) - { - using (GZipStream gz = new GZipStream(destination, CompressionMode.Compress)) - PumpDataForBackup(source, gz); - } - - private static void DecompressForRestore(Stream source, Stream destination) - { - using (GZipStream gz = new GZipStream(source, CompressionMode.Decompress)) - PumpDataForBackup(gz, destination); - } - - private void SaveToConsistentViews(Guid docid, T data) - { - List list = _viewManager.GetConsistentViews(data.GetType()); - if (list != null) - foreach (string name in list) - { - _log.Debug("Saving to consistent view : " + name); - _viewManager.Insert(name, docid, data); - } - } - - private object CreateObject(byte[] b) - { - if (b[0] < 32) - return fastBinaryJSON.BJSON.ToObject(b); - else - return fastJSON.JSON.ToObject(Encoding.ASCII.GetString(b)); - } - - private void SaveInOtherViews(Guid docid, T data) - { - List list = _viewManager.GetOtherViewsList(data.GetType()); - if (list != null) - foreach (string name in list) - _viewManager.Insert(name, docid, data); - } - - private void SaveInPrimaryView(string viewname, Guid docid, T data) - { - _viewManager.Insert(viewname, docid, data); - } - - private void Initialize() - { - //AppDomain.CurrentDomain.ProcessExit += new EventHandler(CurrentDomain_ProcessExit); - - // TODO : read/write global or another object? - // read raptordb.config here (running parameters) - if (File.Exists(_Path + "RaptorDB.config")) - fastJSON.JSON.FillObject(new Global(), File.ReadAllText(_Path + "RaptorDB.config")); - - Directory.CreateDirectory(_Path + "Data"); - Directory.CreateDirectory(_Path + "Data" + _S + "Fulltext"); - Directory.CreateDirectory(_Path + "Views"); - Directory.CreateDirectory(_Path + "Logs"); - Directory.CreateDirectory(_Path + "Temp"); - Directory.CreateDirectory(_Path + "Backup"); - Directory.CreateDirectory(_Path + "Restore"); - Directory.CreateDirectory(_Path + "Restore" + _S + "Done"); - // load logger - LogManager.Configure(_Path + "Logs" + _S + "log.txt", 500, false); - - _log.Debug("\r\n\r\nRaptorDB starting..."); - _log.Debug("RaptorDB data folder = " + _Path); - - // check doc & file storage file version and upgrade if needed here - int v = StorageFile.GetStorageFileHeaderVersion(_Path + "Data" + _S + "data"); - if (v < StorageFile._CurrentVersion) - UpgradeStorageFile(_Path + "Data" + _S + "data", v); - - v = StorageFile.GetStorageFileHeaderVersion(_Path + "Data" + _S + "files"); - if (v < StorageFile._CurrentVersion) - UpgradeStorageFile(_Path + "Data" + _S + "files", v); - - _objStore = new KeyStore(_Path + "Data" + _S + "data", true); - _fileStore = new KeyStore(_Path + "Data" + _S + "files", true); - - _viewManager = new Views.ViewManager(_Path + "Views", _objStore); - - // load _LastFulltextIndexed - if (File.Exists(_Path + "Data" + _S + "Fulltext" + _S + "_fulltext.rec")) - { - byte[] b = File.ReadAllBytes(_Path + "Data" + _S + "Fulltext" + _S + "_fulltext.rec"); - _LastFulltextIndexed = Helper.ToInt32(b, 0, false); - } - // load _LastRecordNumberProcessed - if (File.Exists(_Path + "Data" + _S + "_lastrecord.rec")) - { - byte[] b = File.ReadAllBytes(_Path + "Data" + _S + "_lastrecord.rec"); - _LastRecordNumberProcessed = Helper.ToInt32(b, 0, false); - } - // load _LastBackupRecordNumber - if (File.Exists(_Path + "Backup" + _S + "LastBackupRecord.rec")) - { - byte[] b = File.ReadAllBytes(_Path + "Backup" + _S + "LastBackupRecord.rec"); - _LastBackupRecordNumber = Helper.ToInt32(b, 0, false); - } - _CurrentRecordNumber = _objStore.RecordCount(); - - otherviews = this.GetType().GetMethod("SaveInOtherViews", BindingFlags.Instance | BindingFlags.NonPublic); - save = this.GetType().GetMethod("Save", BindingFlags.Instance | BindingFlags.Public); - saverep = this.GetType().GetMethod("SaveReplicationObject", BindingFlags.Instance | BindingFlags.NonPublic); - - _fulltextindex = new FullTextIndex(_Path + "Data" + _S + "Fulltext", "fulltext", true, false); - - // start backround save to views - _saveTimer = new System.Timers.Timer(Global.BackgroundSaveViewTimer * 1000); - _saveTimer.Elapsed += new System.Timers.ElapsedEventHandler(_saveTimer_Elapsed); - _saveTimer.Enabled = true; - _saveTimer.AutoReset = true; - _saveTimer.Start(); - - // start full text timer - _fulltextTimer = new System.Timers.Timer(Global.FullTextTimerSeconds * 1000); - _fulltextTimer.Elapsed += new System.Timers.ElapsedEventHandler(_fulltextTimer_Elapsed); - _fulltextTimer.Enabled = true; - _fulltextTimer.AutoReset = true; - _fulltextTimer.Start(); - - // start free memory timer - _freeMemTimer = new System.Timers.Timer(Global.FreeMemoryTimerSeconds * 1000); - _freeMemTimer.Elapsed += new System.Timers.ElapsedEventHandler(_freeMemTimer_Elapsed); - _freeMemTimer.Enabled = true; - _freeMemTimer.AutoReset = true; - _freeMemTimer.Start(); - - // start inbox procesor timer - _processinboxTimer = new System.Timers.Timer(Global.ProcessInboxTimerSeconds * 1000); - _processinboxTimer.Elapsed += new System.Timers.ElapsedEventHandler(_processinboxTimer_Elapsed); - _processinboxTimer.Enabled = true; - _processinboxTimer.AutoReset = true; - _processinboxTimer.Start(); - - // start cron daemon - _cron = new CronDaemon(); - _cron.AddJob(Global.BackupCronSchedule, () => this.Backup()); - - // compile & register view files - CompileAndRegisterScriptViews(_Path + "Views"); - - - if (File.Exists(_Path + "RaptorDB-Replication.config")) - { - // if replication.config exists -> start replication server - _repserver = new Replication.ReplicationServer(_Path, File.ReadAllText(_Path + "RaptorDB-Replication.config"), _objStore); - } - else if (File.Exists(_Path + "RaptorDB-Branch.config")) - { - // if branch.config exists -> start replication client - _repclient = new Replication.ReplicationClient(_Path, File.ReadAllText(_Path + "RaptorDB-Branch.config"), _objStore); - } - - _objHF = new KeyStoreHF(_Path + "DataHF"); - } - - object _inboxlock = new object(); - void _processinboxTimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) - { - lock (_inboxlock) - { - string d = _Path + "Replication" + _S + "Inbox"; - if (Directory.Exists(d) == false) - return; - - // start inbox processing timer - ProcessReplicationInbox(d); - - foreach (var f in Directory.GetDirectories(d)) - ProcessReplicationInbox(f); - } - } - - private void CompileAndRegisterScriptViews(string viewfolder) - { - // compile & register views - string[] files = Directory.GetFiles(viewfolder, "*.view"); - MethodInfo register = this.GetType().GetMethod("RegisterView", BindingFlags.Instance | BindingFlags.Public); - foreach (var fn in files) - { - Assembly a = CompileScript(fn); - if (a != null) - { - foreach (var t in a.GetTypes()) - { - foreach (var att in t.GetCustomAttributes(typeof(RegisterViewAttribute), false)) - { - try - { - object o = Activator.CreateInstance(t); - // handle types when view also - Type[] args = t.GetGenericArguments(); - if (args.Length == 0) - args = t.BaseType.GetGenericArguments(); - Type tt = args[0]; - var m = register.MakeGenericMethod(new Type[] { tt }); - m.Invoke(this, new object[] { o }); - } - catch (Exception ex) - { - _log.Error(ex); - } - } - } - } - } - } - - private Assembly CompileScript(string file) - { - try - { - _log.Debug("Compiling script view : " + file); - CodeDomProvider compiler = CodeDomProvider.CreateProvider("CSharp"); - - CompilerParameters compilerparams = new CompilerParameters(); - compilerparams.GenerateInMemory = false; - compilerparams.GenerateExecutable = false; - compilerparams.OutputAssembly = file.Replace(".view", ".dll"); - compilerparams.CompilerOptions = "/optimize"; - - Regex regex = new Regex( - @"\/\/\s*ref\s*\:\s*(?.*)", - System.Text.RegularExpressions.RegexOptions.IgnoreCase); - - compilerparams.ReferencedAssemblies.Add(typeof(View<>).Assembly.Location); - compilerparams.ReferencedAssemblies.Add(typeof(object).Assembly.Location); - compilerparams.ReferencedAssemblies.Add(typeof(ICustomTypeDescriptor).Assembly.Location); - - foreach (Match m in regex.Matches(File.ReadAllText(file))) - { - string str = m.Groups["refs"].Value.Trim(); -#pragma warning disable 618 - Assembly a = Assembly.LoadWithPartialName(Path.GetFileNameWithoutExtension(str));//load from GAC if possible -#pragma warning restore 618 - if (a != null) - compilerparams.ReferencedAssemblies.Add(a.Location); - else - { - string assm = Path.GetDirectoryName(this.GetType().Assembly.Location) + _S + str; - a = Assembly.LoadFrom(assm); - if (a != null) - compilerparams.ReferencedAssemblies.Add(a.Location); - else - _log.Error("unable to find referenced file for view compiling : " + str); - } - } - - CompilerResults results = compiler.CompileAssemblyFromFile(compilerparams, file); - - if (results.Errors.HasErrors == true) - { - _log.Error("Error compiling view definition : " + file); - foreach (var e in results.Errors) - _log.Error(e.ToString()); - return null; - } - - return results.CompiledAssembly; - } - catch (Exception ex) - { - _log.Error("Error compiling view definition : " + file); - _log.Error(ex); - return null; - } - } - - void _freeMemTimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) - { - long l = GC.GetTotalMemory(true) / (1024 * 1024); - _log.Debug("GC.GetTotalMemory() = " + l.ToString("#,0")); - if (l > Global.MemoryLimit) - { - _log.Debug("Freeing memory on " + Global.MemoryLimit.ToString("#,0") + " limit ..."); - _viewManager.FreeMemory(); - _fulltextindex.FreeMemory(); - _objStore.FreeMemory(); - _fileStore.FreeMemory(); - _objHF.FreeMemory(); - GC.Collect(2); - } - } - - private void UpgradeStorageFile(string filename, int ver) - { - _log.Debug("Upgrading storage file version from " + ver + " to " + StorageFile._CurrentVersion + " on file : " + filename); - throw new Exception("not implemented yet - contact the author if you need this functionality"); - // FEATURE : upgrade from v0 to v1 - - // FEATURE : upgrade from v1 to v2 - // read from one file and write to the other - } - - //private void CurrentDomain_ProcessExit(object sender, EventArgs e) - //{ - - // _log.Debug("appdomain closing"); - // Shutdown(); - //} - - private object _slock = new object(); - private void _saveTimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) - { - if (_shuttingdown) - return; - - if (Global.BackgroundSaveToOtherViews == false) - return; - - if (_CurrentRecordNumber == 0) - return; - - if (_CurrentRecordNumber == _LastRecordNumberProcessed) - return; - - lock (_slock) - { - int batch = Global.BackgroundViewSaveBatchSize; - while (batch > 0) - { - if (_shuttingdown) - return; - while (_pauseindexer) Thread.Sleep(0); - if (_CurrentRecordNumber == _LastRecordNumberProcessed) - return; - _LastRecordNumberProcessed++; - StorageItem meta = null; - object obj = _objStore.GetObject(_LastRecordNumberProcessed, out meta); - if (meta != null && meta.isDeleted) - _viewManager.Delete(meta.key); - else - { - if (obj == null) - { - _log.Debug("byte[] is null"); - _log.Debug("curr rec = " + _CurrentRecordNumber); - _log.Debug("last rec = " + _LastRecordNumberProcessed); - continue; - } - - var m = otherviews.MakeGenericMethod(new Type[] { obj.GetType() }); - m.Invoke(this, new object[] { meta.key, obj }); - } - - batch--; - } - } - } - - private object _flock = new object(); - void _fulltextTimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) - { - if (_shuttingdown) - return; - - if (_CurrentRecordNumber == 0) - return; - - if (_CurrentRecordNumber == _LastFulltextIndexed) - return; - - lock (_flock) - { - int batch = Global.BackgroundFullTextIndexBatchSize; - while (batch > 0) - { - if (_shuttingdown) - return; - //_log.Debug("batch full text indexing..."); - while (_pauseindexer) Thread.Sleep(0); - if (_CurrentRecordNumber == _LastFulltextIndexed) - return; - _LastFulltextIndexed++; - StorageItem meta = null; - object obj = _objStore.GetObject(_LastFulltextIndexed, out meta); - if (meta != null && meta.isDeleted == false) - { - if (obj != null) - { - // normal string and normal guid - string json = fastJSON.JSON.ToJSON(obj, new fastJSON.JSONParameters { UseEscapedUnicode = false, UseFastGuid = false }); - _fulltextindex.Set(json, _LastFulltextIndexed); - } - } - batch--; - } - - return; - } - } - - private MethodInfo GetSave(Type type) - { - MethodInfo m = null; - if (_savecache.TryGetValue(type, out m)) - return m; - - m = save.MakeGenericMethod(new Type[] { type }); - _savecache.Add(type, m); - return m; - } - - private MethodInfo GetSaveReplicate(Type type) - { - MethodInfo m = null; - if (_saverepcache.TryGetValue(type, out m)) - return m; - - m = saverep.MakeGenericMethod(new Type[] { type }); - _saverepcache.Add(type, m); - return m; - } - #endregion - - internal object GetAssemblyForView(string viewname, out string typename) - { - return _viewManager.GetAssemblyForView(viewname, out typename); - } - - /// - /// Get the current registered views - /// - /// - public List GetViews() - { - return _viewManager.GetViews(); - } - - /// - /// Get the schema for a view - /// - /// - /// - public ViewRowDefinition GetSchema(string view) - { - return _viewManager.GetSchema(view); - } - - /// - /// Query a view with paging and ordering - /// - /// - /// - /// - /// - /// - /// - public Result Query(string viewname, string filter, int start, int count, string orderby) - { - return _viewManager.Query(viewname, filter, start, count, orderby); - } - - /// - /// Query a view with paging and ordering - /// - /// - /// - /// - /// - /// - /// - public Result Query(string filter, int start, int count, string orderby) - { - return _viewManager.Query(filter, start, count, orderby); - } - - /// - /// Get the history information for a document - /// - /// - /// - public HistoryInfo[] FetchHistoryInfo(Guid docid) - { - List h = new List(); - - foreach (int i in FetchHistory(docid)) - { - HistoryInfo hi = new HistoryInfo(); - hi.Version = i; - var o = _objStore.GetMeta(i); - hi.ChangeDate = o.date; - if (o.isDeleted == false) - h.Add(hi); - } - return h.ToArray(); - } - - /// - /// Get the history information for a file - /// - /// - /// - public HistoryInfo[] FetchBytesHistoryInfo(Guid docid) - { - List h = new List(); - - foreach (int i in FetchBytesHistory(docid)) - { - HistoryInfo hi = new HistoryInfo(); - hi.Version = i; - var o = _fileStore.GetMeta(i); - hi.ChangeDate = o.date; - if (o.isDeleted == false) - h.Add(hi); - } - return h.ToArray(); - } - - /// - /// Direct delete from a view - /// - /// - /// - /// - public int ViewDelete(Expression> filter) - { - // do the delete - int c = _viewManager.ViewDelete(filter); - if (c > 0) - { - // save this filter to docs - View_delete vd = new View_delete(); - LINQString lq = new LINQString(); - lq.Visit(filter); - vd.Filter = lq.sb.ToString(); - vd.Viewname = _viewManager.GetViewName(typeof(TRowSchema)); - _objStore.SetObject(vd.ID, vd); - } - return c; - } - - /// - /// Direct delete from a view - /// - /// - /// - /// - public int ViewDelete(string viewname, string filter) - { - // do the delete - int c = _viewManager.ViewDelete(viewname, filter); - if (c > 0) - { - // save this filter to docs - View_delete vd = new View_delete(); - vd.Filter = filter; - vd.Viewname = viewname; - _objStore.SetObject(vd.ID, vd); - } - return c; - } - - /// - /// Direct insert into a view - /// - /// - /// - /// - /// - public bool ViewInsert(Guid id, TRowSchema row) - { - string vn = _viewManager.GetViewName(typeof(TRowSchema)); - if (vn != "") - { - if (_viewManager.ViewInsert(id, row)) - { - View_insert vi = new View_insert(); - vi.Viewname = vn; - vi.RowObject = row; - _objStore.SetObject(vi.ID, vi); - return true; - } - } - return false; - } - - /// - /// Direct insert into a view - /// - /// - /// - /// - /// - public bool ViewInsert(string viewname, Guid id, object row) - { - if (_viewManager.ViewInsert(viewname, id, row)) - { - View_insert vi = new View_insert(); - vi.Viewname = viewname; - vi.RowObject = row; - _objStore.SetObject(vi.ID, vi); - return true; - } - return false; - } - - /// - /// Total number of documents in the storage file including duplicates - /// - /// - public long DocumentCount() - { - return _objStore.Count(); - } - - public IKeyStoreHF GetKVHF() - { - return _objHF; - } - } -} +using System; +using System.Collections.Generic; +using System.Text; +using System.IO; +using System.Threading; +using System.Collections; +using RaptorDB.Views; +using System.Linq.Expressions; +using System.Threading.Tasks; +using System.Reflection; +using RaptorDB.Common; +using System.IO.Compression; +using System.CodeDom.Compiler; +using System.Text.RegularExpressions; +using System.ComponentModel; + +namespace RaptorDB +{ + public class RaptorDB : IRaptorDB + { + private RaptorDB(string FolderPath) + { + // speed settings + fastJSON.JSON.Parameters.ParametricConstructorOverride = true; + fastBinaryJSON.BJSON.Parameters.ParametricConstructorOverride = true; + fastJSON.JSON.Parameters.UseEscapedUnicode = false; + + if (_S == "/") + FolderPath = FolderPath.Replace("\\", "/"); + // create folders + Directory.CreateDirectory(FolderPath); + string foldername = Path.GetFullPath(FolderPath); + if (foldername.EndsWith(_S) == false) + foldername += _S; + _Path = foldername; + + // if configs !exists create template config files + CreateTemplateConfigFiles(); + + Initialize(); + } + + private void CreateTemplateConfigFiles() + { + if (File.Exists(_Path + "RaptorDB.config") == false) + File.WriteAllText(_Path + "-RaptorDB.config", fastJSON.JSON.ToNiceJSON(new Global(), new fastJSON.JSONParameters { UseExtensions = false })); + + if (File.Exists(_Path + "RaptorDB-Branch.config") == false) + File.WriteAllText(_Path + "-RaptorDB-Branch.config", fastJSON.JSON.ToNiceJSON(new Replication.ClientConfiguration(), new fastJSON.JSONParameters { UseExtensions = false })); + + if (File.Exists(_Path + "RaptorDB-Replication.config") == false) + { + Replication.ServerConfiguration s = new Replication.ServerConfiguration(); + s.What.Add(new Replication.WhatItem { Name = "default", PackageItemLimit = 10000, Version = 1, B2HQtypes = new List { "*" }, HQ2Btypes = new List { "*" } }); + s.What.Add(new Replication.WhatItem { Name = "b2", PackageItemLimit = 10000, Version = 1, B2HQtypes = new List { "*" }, HQ2Btypes = new List { "config.*" } }); + s.Where.Add(new Replication.WhereItem { BranchName = "b1", Password = "123", When = "*/5 * * * *", What = "default" }); + s.Where.Add(new Replication.WhereItem { BranchName = "b2", Password = "321", When = "*/20 * * * *", What = "b2" }); + File.WriteAllText(_Path + "-RaptorDB-Replication.config", fastJSON.JSON.ToNiceJSON(s, new fastJSON.JSONParameters { UseExtensions = false })); + } + } + + public static RaptorDB Open(string FolderPath) + { + return new RaptorDB(FolderPath); + } + + private string _S = Path.DirectorySeparatorChar.ToString(); + private ILog _log = LogManager.GetLogger(typeof(RaptorDB)); + private Views.ViewManager _viewManager; + private KeyStore _objStore; + private KeyStore _fileStore; + private KeyStoreHF _objHF; + private string _Path = string.Empty; + private int _LastRecordNumberProcessed = -1; // used by background saver + private int _LastFulltextIndexed = -1; // used by the fulltext indexer + private int _LastBackupRecordNumber = -1; + private int _CurrentRecordNumber = -1; + private System.Timers.Timer _saveTimer; + private System.Timers.Timer _fulltextTimer; + private System.Timers.Timer _freeMemTimer; + private System.Timers.Timer _processinboxTimer; + private bool _shuttingdown = false; + private bool _pauseindexer = false; + private MethodInfo otherviews = null; + private MethodInfo save = null; + private MethodInfo saverep = null; + private SafeDictionary _savecache = new SafeDictionary(); + private SafeDictionary _saverepcache = new SafeDictionary(); + private FullTextIndex _fulltextindex; + private CronDaemon _cron; + private Replication.ReplicationServer _repserver; + private Replication.ReplicationClient _repclient; + //private bool _disposed = false; + //private bool _clientReplicationEnabled; + + //public bool SyncNow(string server, int port, string username, string password) + //{ + + // return false; + //} + + #region [ p u b l i c i n t e r f a c e ] + /// + /// Save files to RaptorDB + /// + /// + /// + public bool SaveBytes(Guid docID, byte[] bytes) + { + // save files in storage + _fileStore.SetBytes(docID, bytes); + return true; + } + /// + /// Delete a document (note data is not lost just flagged as deleted) + /// + /// + /// + public bool Delete(Guid docid) + { + bool b = _objStore.Delete(docid); + _viewManager.Delete(docid); + return b; + } + + /// + /// Delete a file (note data is not lost just flagged as deleted) + /// + /// + /// + public bool DeleteBytes(Guid bytesid) + { + return _fileStore.Delete(bytesid); + } + + /// + /// Save a document + /// + /// + /// + /// + /// + public bool Save(Guid docid, T data) + { + string viewname = _viewManager.GetPrimaryViewForType(data.GetType()); + if (viewname == "" && Global.RequirePrimaryView == true) + { + _log.Debug("Primary View not defined for object : " + data.GetType()); + return false; + } + _pauseindexer = true; + if (viewname != "" && _viewManager.isTransaction(viewname)) + { + _log.Debug("TRANSACTION started for docid : " + docid); + // add code here + _viewManager.StartTransaction(); + + bool b = SaveInPrimaryViewTransaction(viewname, docid, data); + if (b == true) + { + b = SaveToConsistentViewsTransaction(docid, data); + if (b == true) + { + b = SaveInOtherViewsTransaction(docid, data); + if (b == true) + { + _viewManager.Commit(Thread.CurrentThread.ManagedThreadId); + int recnum = _objStore.SetObject(docid, data); + _CurrentRecordNumber = recnum; + _pauseindexer = false; + return true; + } + } + } + _viewManager.Rollback(Thread.CurrentThread.ManagedThreadId); + _pauseindexer = false; + return false; + } + else + { + int recnum = _objStore.SetObject(docid, data); + _CurrentRecordNumber = recnum; + + if (viewname != "") + { + SaveInPrimaryView(viewname, docid, data); + + SaveToConsistentViews(docid, data); + + if (Global.BackgroundSaveToOtherViews == false) + { + SaveInOtherViews(docid, data); + _LastRecordNumberProcessed = recnum; + } + } + _pauseindexer = false; + return true; + } + } + + /// + /// Query any view -> get all rows + /// + /// + /// + /// + public IResult Query(string viewname) + { + return _viewManager.Query(viewname, 0, -1); + } + + /// + /// Query a view using a string filter + /// + /// + /// + /// + public IResult Query(string viewname, string filter) + { + if (string.IsNullOrEmpty(filter)) + return _viewManager.Query(viewname, 0, -1); + + return _viewManager.Query(viewname, filter, 0, -1); + } + + /// + /// Fetch a document by it's ID + /// + /// + /// + public object Fetch(Guid docID) + { + object b = null; + _objStore.GetObject(docID, out b); + return b; + } + + /// + /// Fetch file data by it's ID + /// + /// + /// + public byte[] FetchBytes(Guid fileID) + { + byte[] b = null; + if (_fileStore.GetBytes(fileID, out b)) + return b; + else + return null; + } + + /// + /// Register a view + /// + /// + /// + public void RegisterView(View view) + { + _viewManager.RegisterView(view); + } + + /// + /// Shutdown the database engine and flush memory to disk + /// + public void Shutdown() + { + if (_shuttingdown == true) + return; + + _shuttingdown = true; + + _processinboxTimer.Enabled = false; + _saveTimer.Enabled = false; + _freeMemTimer.Enabled = false; + _fulltextTimer.Enabled = false; + + if (_repserver != null) + _repserver.Shutdown(); + + if (_repclient != null) + _repclient.Shutdown(); + + // TODO : write global or something else? + //if (File.Exists(_Path + "RaptorDB.config") == false) + File.WriteAllText(_Path + "RaptorDB.config", fastJSON.JSON.ToNiceJSON(new Global(), new fastJSON.JSONParameters { UseExtensions = false })); + if (_cron != null) + _cron.Stop(); + _fulltextindex.Dispose(); + + _log.Debug("Shutting down"); + _saveTimer.Stop(); + _fulltextTimer.Stop(); + _viewManager.ShutDown(); + _objStore.Shutdown(); + _fileStore.Shutdown(); + _objHF.Shutdown(); + + // save records + _log.Debug("last full text record = " + _LastFulltextIndexed); + File.WriteAllBytes(_Path + "Data" + _S + "Fulltext" + _S + "_fulltext.rec", Helper.GetBytes(_LastFulltextIndexed, false)); + _log.Debug("last record = " + _LastRecordNumberProcessed); + File.WriteAllBytes(_Path + "Data" + _S + "_lastrecord.rec", Helper.GetBytes(_LastRecordNumberProcessed, false)); + _log.Debug("last backup record = " + _LastBackupRecordNumber); + File.WriteAllBytes(_Path + "Backup" + _S + "LastBackupRecord.rec", Helper.GetBytes(_LastBackupRecordNumber, false)); + + _log.Debug("Shutting down log."); + _log.Debug("RaptorDB done."); + LogManager.Shutdown(); + } + + + #region [ BACKUP/RESTORE and REPLICATION ] + private object _backuplock = new object(); + /// + /// Backup the document storage file incrementally to "Backup" folder + /// + /// True = done + public bool Backup() + { + if (_LastBackupRecordNumber >= _CurrentRecordNumber) + return false; + lock (_backuplock) + { + _log.Debug("Backup Started..."); + string tempp = _Path + "Temp" + _S + DateTime.Now.ToString("yyyy-MM-dd-HH-mm"); + Directory.CreateDirectory(tempp); + StorageFile backup = new StorageFile(tempp + _S + "backup.mgdat", SF_FORMAT.BSON, true); + _log.Debug("Copying data to backup"); + if (_LastBackupRecordNumber == -1) + _LastBackupRecordNumber = 0; + int rec = _objStore.CopyTo(backup, _LastBackupRecordNumber); + backup.Shutdown(); + + _log.Debug("Last backup rec# = " + rec); + + // compress the file + using (FileStream read = File.OpenRead(tempp + _S + "backup.mgdat")) + using (FileStream outp = File.Create(tempp + _S + "backup.mgdat.gz")) + CompressForBackup(read, outp); + + _log.Debug("Backup compressed and done"); + File.Move(tempp + _S + "backup.mgdat.gz", _Path + "Backup" + _S + DateTime.Now.ToString("yyyy-MM-dd-HH-mm") + ".mgdat.gz"); + + _log.Debug("last backup record = " + _LastBackupRecordNumber); + File.WriteAllBytes(_Path + "Backup" + _S + "LastBackupRecord.rec", Helper.GetBytes(_LastBackupRecordNumber, false)); + // cleanup temp folder + Directory.Delete(tempp, true); + _log.Debug("Backup done."); + _LastBackupRecordNumber = rec; + return true; + } + } + + private DateTime _lastFailedTime = DateTime.Now; + private object _replock = new object(); + private void ProcessReplicationInbox(string inboxfolder) + { + lock (_replock) + { + if (Directory.Exists(inboxfolder) == false) + return; + + string[] files = Directory.GetFiles(inboxfolder, "*.counter"); + + // check if ".counter" file exists + if (files.Length > 0) + { + // FEATURE: if lastfailedtime < 15 -> wait 15 min and retry (avoid extra cpu burning) + // recovery mode + string fn = files[0]; + int start = -1; + if (int.TryParse(File.ReadAllText(fn).Trim(), out start)) + { + if (DoRepProcessing(fn.Replace(".counter", ".mgdat"), start) == false) + return; + } + else + { + _log.Error("Unable to parse counter value in : " + fn); + return; + } + } + + files = Directory.GetFiles(inboxfolder, "*.gz"); + + Array.Sort(files); + foreach (var filename in files) + { + + string tmp = filename.Replace(".gz", "");// FEATURE : to temp folder ?? + if (File.Exists(tmp)) + File.Delete(tmp); + using (FileStream read = File.OpenRead(filename)) + using (FileStream outp = File.Create(tmp)) + DecompressForRestore(read, outp); + _log.Debug("Uncompress done : " + Path.GetFileName(tmp)); + if (DoRepProcessing(tmp, 0) == false) + return; + if (_shuttingdown) + return; + } + } + } + + private bool DoRepProcessing(string filename, int start) + { + string fn = Path.GetFileNameWithoutExtension(filename); + string path = Path.GetDirectoryName(filename); + StorageFile sf = StorageFile.ReadForward(filename); + int counter = 0; + if (start > 0) + _log.Debug("skipping replication items : " + start); + foreach (var i in sf.ReadOnlyEnumerate()) + { + if (start > 0) // skip already done + { + start--; + counter++; + } + else + { + if (i.meta.isDeleted) + DeleteReplicate(i.meta.key); + else + { + try + { + object obj = CreateObject(i.data); + var m = GetSaveReplicate(obj.GetType()); + m.Invoke(this, new object[] { i.meta.key, obj }); + } + catch (Exception ex) + { + _log.Error(ex); + sf.Shutdown(); + string err = Properties.Resources.msg.Replace("%js%", fastJSON.JSON.Beautify(Helper.GetString(i.data, 0, (short)i.data.Length))) + .Replace("%ex%", "" + ex) + .Replace("%c%", path + _S + fn + ".counter"); + + File.WriteAllText(path + _S + fn + ".error.txt", err); + _lastFailedTime = DateTime.Now; + return false; + } + } + counter++; + File.WriteAllText(path + _S + fn + ".counter", "" + counter); + if (_shuttingdown) + { + _log.Debug("shutting down before replicate data completed..."); + sf.Shutdown(); + return false; + } + } + } + sf.Shutdown(); + _log.Debug("File replicate complete : " + Path.GetFileName(filename)); + foreach (var f in Directory.GetFiles(path, fn + ".*")) + File.Delete(f); + return true; + } + + private void DeleteReplicate(Guid docid) + { + _objStore.DeleteReplicated(docid); + _viewManager.Delete(docid); + } + + private object _restoreLock = new object(); + /// + /// Start background restore of backups in the "Restore" folder + /// + public void Restore() + { + lock (_restoreLock) + { + try + { + string[] files = Directory.GetFiles(_Path + "Restore", "*.counter"); + // check if ".counter" file exists + if (files.Length > 0) + { + // resume mode + string fn = files[0]; + int start = -1; + if (int.TryParse(File.ReadAllText(fn).Trim(), out start)) + { + if (DoRestoreProcessinng(fn.Replace(".counter", ".mgdat"), start) == false) + return; + } + else + { + _log.Error("Unable to parse counter value in : " + fn); + return; + } + } + // do restore + files = Directory.GetFiles(_Path + "Restore", "*.gz"); + Array.Sort(files); + _log.Debug("Restoring file count = " + files.Length); + + foreach (string file in files) + { + string tmp = file.Replace(".gz", "");// FEATURE : to temp folder ?? + if (File.Exists(tmp)) + File.Delete(tmp); + using (FileStream read = File.OpenRead(file)) + using (FileStream outp = File.Create(tmp)) + DecompressForRestore(read, outp); + _log.Debug("Uncompress done : " + Path.GetFileName(tmp)); + + if (DoRestoreProcessinng(tmp, 0)) + File.Move(file, _Path + "Restore" + _S + "Done" + _S + Path.GetFileName(file)); + } + } + catch (Exception ex) + { + _log.Error(ex); + } + } + } + + private bool DoRestoreProcessinng(string filename, int start) + { + string fn = Path.GetFileNameWithoutExtension(filename); + string path = Path.GetDirectoryName(filename); + int counter = 0; + StorageFile sf = StorageFile.ReadForward(filename); + foreach (var i in sf.ReadOnlyEnumerate()) + { + if (start > 0) + { + start--; + counter++; + } + else + { + if (i.meta.isDeleted) + Delete(i.meta.key); + else + { + object obj = CreateObject(i.data); + var m = GetSave(obj.GetType()); + m.Invoke(this, new object[] { i.meta.key, obj }); + } + counter++; + File.WriteAllText(path + _S + fn + ".counter", "" + counter); + if (_shuttingdown) + { + _log.Debug("shutting down before restore completed..."); + sf.Shutdown(); + return false; + } + } + } + sf.Shutdown(); + _log.Debug("File restore complete : " + Path.GetFileName(filename)); + foreach (var f in Directory.GetFiles(path, fn + ".*")) + File.Delete(f); + + return true; + } + + private bool SaveReplicationObject(Guid docid, T data) + { + string viewname = _viewManager.GetPrimaryViewForType(data.GetType()); + if (viewname == "") + { + _log.Debug("Primary View not defined for object : " + data.GetType()); + return false; + } + _pauseindexer = true; + int recnum = _objStore.SetReplicationObject(docid, data); + _CurrentRecordNumber = recnum; + + SaveInPrimaryView(viewname, docid, data); + + SaveToConsistentViews(docid, data); + + if (Global.BackgroundSaveToOtherViews == false) + { + SaveInOtherViews(docid, data); + _LastRecordNumberProcessed = recnum; + } + _pauseindexer = false; + return true; + } + #endregion + + /// + /// Add a user (only supported in server mode) + /// + /// + /// + /// + /// + public bool AddUser(string username, string oldpassword, string newpassword) + { + return false; + } + + /// + /// Execute a server side string filter query + /// + /// + /// + /// + public object[] ServerSide(ServerSideFunc func, string filter) + { + return func(this, filter).ToArray(); + } + + /// + /// Execute a server side LINQ query + /// + /// + /// + /// + /// + public object[] ServerSide(ServerSideFunc func, Expression> filter) + { + LINQString ls = new LINQString(); + ls.Visit(filter); + return func(this, ls.sb.ToString()).ToArray(); + } + + /// + /// Full text search the entire original document + /// + /// + /// + public int[] FullTextSearch(string filter) + { + var wbmp = _fulltextindex.QueryContains(filter); + List a = new List(); + a.AddRange(wbmp.GetBitIndexes()); + + return a.ToArray(); + } + + /// + /// Query a view + /// + /// + /// + /// + public Result Query(Expression> filter) + { + return _viewManager.Query(filter, 0, -1); + } + + /// + /// Query a view with paging + /// + /// + /// + /// + /// + /// + public Result Query(Expression> filter, int start, int count) + { + return _viewManager.Query(filter, start, count, ""); + } + + /// + /// Query a view with paging and order by + /// + /// + /// + /// + /// + /// + /// + public Result Query(Expression> filter, int start, int count, string orderby) + { + return _viewManager.Query(filter, start, count, orderby); + } + + /// + /// Query a view + /// + /// + /// + /// + public Result Query(string filter) + { + return _viewManager.Query(filter, 0, -1); + } + + /// + /// Query a view with paging + /// + /// + /// + /// + /// + /// + public Result Query(string filter, int start, int count) + { + return _viewManager.Query(filter, start, count); + } + + /// + /// Count with filter + /// + /// + /// + /// + public int Count(Expression> filter) + { + return _viewManager.Count(filter); + } + + /// + /// + /// + /// + /// + /// + /// + public IResult Query(string viewname, int start, int count) + { + return _viewManager.Query(viewname, start, count); + } + + /// + /// + /// + /// + /// + /// + /// + /// + public IResult Query(string viewname, string filter, int start, int count) + { + return _viewManager.Query(viewname, filter, start, count); + } + + /// + /// Count all data associated with View name + /// + /// + /// + public int Count(string viewname) + { + return _viewManager.Count(viewname, ""); + } + + /// + /// Count all data associated with View name and string filter + /// + /// + /// + /// + public int Count(string viewname, string filter) + { + return _viewManager.Count(viewname, filter); + } + + /// + /// Fetch the change history for a document + /// + /// + /// + public IEnumerable FetchHistory(Guid docid) + { + return _objStore.GetDuplicates(docid); + } + + /// + /// Fetch a change history for a file + /// + /// + /// + public IEnumerable FetchBytesHistory(Guid fileid) + { + return _fileStore.GetDuplicates(fileid); + } + + /// + /// Fetch the specific document version + /// + /// + /// + public object FetchVersion(int versionNumber) + { + StorageItem meta = null; + return _objStore.GetObject(versionNumber, out meta); + } + + /// + /// Fetch the specific file version + /// + /// + /// + public byte[] FetchBytesVersion(int versionNumber) + { + StorageItem meta = null; + return _fileStore.GetBytes(versionNumber, out meta); + } + #endregion + + #region [ P R I V A T E M E T H O D S ] + + internal string GetViewName(Type type) + { + return _viewManager.GetViewName(type); + } + + private bool SaveToView(Guid docid, T data, List list) + { + if (list != null) + foreach (string name in list) + { + bool ret = _viewManager.InsertTransaction(name, docid, data); + if (ret == false) + return false; + } + return true; + } + + private bool SaveInOtherViewsTransaction(Guid docid, T data) + { + List list = _viewManager.GetOtherViewsList(data.GetType()); + return SaveToView(docid, data, list); + } + + private bool SaveToConsistentViewsTransaction(Guid docid, T data) + { + List list = _viewManager.GetConsistentViews(data.GetType()); + return SaveToView(docid, data, list); + } + + private bool SaveInPrimaryViewTransaction(string viewname, Guid docid, T data) + { + return _viewManager.InsertTransaction(viewname, docid, data); + } + + private static void PumpDataForBackup(Stream input, Stream output) + { + byte[] bytes = new byte[4096 * 2]; + int n; + while ((n = input.Read(bytes, 0, bytes.Length)) != 0) + output.Write(bytes, 0, n); + } + + private static void CompressForBackup(Stream source, Stream destination) + { + using (GZipStream gz = new GZipStream(destination, CompressionMode.Compress)) + PumpDataForBackup(source, gz); + } + + private static void DecompressForRestore(Stream source, Stream destination) + { + using (GZipStream gz = new GZipStream(source, CompressionMode.Decompress)) + PumpDataForBackup(gz, destination); + } + + private void SaveToConsistentViews(Guid docid, T data) + { + List list = _viewManager.GetConsistentViews(data.GetType()); + if (list != null) + foreach (string name in list) + { + _log.Debug("Saving to consistent view : " + name); + _viewManager.Insert(name, docid, data); + } + } + + private object CreateObject(byte[] b) + { + if (b[0] < 32) + return fastBinaryJSON.BJSON.ToObject(b); + else + return fastJSON.JSON.ToObject(Encoding.ASCII.GetString(b)); + } + + private void SaveInOtherViews(Guid docid, T data) + { + List list = _viewManager.GetOtherViewsList(data.GetType()); + if (list != null) + foreach (string name in list) + _viewManager.Insert(name, docid, data); + } + + private void SaveInPrimaryView(string viewname, Guid docid, T data) + { + _viewManager.Insert(viewname, docid, data); + } + + private void Initialize() + { + //AppDomain.CurrentDomain.ProcessExit += new EventHandler(CurrentDomain_ProcessExit); + + // TODO : read/write global or another object? + // read raptordb.config here (running parameters) + if (File.Exists(_Path + "RaptorDB.config")) + fastJSON.JSON.FillObject(new Global(), File.ReadAllText(_Path + "RaptorDB.config")); + + Directory.CreateDirectory(_Path + "Data"); + Directory.CreateDirectory(_Path + "Data" + _S + "Fulltext"); + Directory.CreateDirectory(_Path + "Views"); + Directory.CreateDirectory(_Path + "Logs"); + Directory.CreateDirectory(_Path + "Temp"); + Directory.CreateDirectory(_Path + "Backup"); + Directory.CreateDirectory(_Path + "Restore"); + Directory.CreateDirectory(_Path + "Restore" + _S + "Done"); + // load logger + LogManager.Configure(_Path + "Logs" + _S + "log.txt", 500, false); + + _log.Debug("\r\n\r\nRaptorDB starting..."); + _log.Debug("RaptorDB data folder = " + _Path); + + // check doc & file storage file version and upgrade if needed here + int v = StorageFile.GetStorageFileHeaderVersion(_Path + "Data" + _S + "data"); + if (v < StorageFile._CurrentVersion) + UpgradeStorageFile(_Path + "Data" + _S + "data", v); + + v = StorageFile.GetStorageFileHeaderVersion(_Path + "Data" + _S + "files"); + if (v < StorageFile._CurrentVersion) + UpgradeStorageFile(_Path + "Data" + _S + "files", v); + + _objStore = new KeyStore(_Path + "Data" + _S + "data", true); + _fileStore = new KeyStore(_Path + "Data" + _S + "files", true); + + _viewManager = new Views.ViewManager(_Path + "Views", _objStore); + + // load _LastFulltextIndexed + if (File.Exists(_Path + "Data" + _S + "Fulltext" + _S + "_fulltext.rec")) + { + byte[] b = File.ReadAllBytes(_Path + "Data" + _S + "Fulltext" + _S + "_fulltext.rec"); + _LastFulltextIndexed = Helper.ToInt32(b, 0, false); + } + // load _LastRecordNumberProcessed + if (File.Exists(_Path + "Data" + _S + "_lastrecord.rec")) + { + byte[] b = File.ReadAllBytes(_Path + "Data" + _S + "_lastrecord.rec"); + _LastRecordNumberProcessed = Helper.ToInt32(b, 0, false); + } + // load _LastBackupRecordNumber + if (File.Exists(_Path + "Backup" + _S + "LastBackupRecord.rec")) + { + byte[] b = File.ReadAllBytes(_Path + "Backup" + _S + "LastBackupRecord.rec"); + _LastBackupRecordNumber = Helper.ToInt32(b, 0, false); + } + _CurrentRecordNumber = _objStore.RecordCount(); + + otherviews = this.GetType().GetMethod("SaveInOtherViews", BindingFlags.Instance | BindingFlags.NonPublic); + save = this.GetType().GetMethod("Save", BindingFlags.Instance | BindingFlags.Public); + saverep = this.GetType().GetMethod("SaveReplicationObject", BindingFlags.Instance | BindingFlags.NonPublic); + + _fulltextindex = new FullTextIndex(_Path + "Data" + _S + "Fulltext", "fulltext", true, false); + + // start backround save to views + _saveTimer = new System.Timers.Timer(Global.BackgroundSaveViewTimer * 1000); + _saveTimer.Elapsed += new System.Timers.ElapsedEventHandler(_saveTimer_Elapsed); + _saveTimer.Enabled = true; + _saveTimer.AutoReset = true; + _saveTimer.Start(); + + // start full text timer + _fulltextTimer = new System.Timers.Timer(Global.FullTextTimerSeconds * 1000); + _fulltextTimer.Elapsed += new System.Timers.ElapsedEventHandler(_fulltextTimer_Elapsed); + _fulltextTimer.Enabled = true; + _fulltextTimer.AutoReset = true; + _fulltextTimer.Start(); + + // start free memory timer + _freeMemTimer = new System.Timers.Timer(Global.FreeMemoryTimerSeconds * 1000); + _freeMemTimer.Elapsed += new System.Timers.ElapsedEventHandler(_freeMemTimer_Elapsed); + _freeMemTimer.Enabled = true; + _freeMemTimer.AutoReset = true; + _freeMemTimer.Start(); + + // start inbox procesor timer + _processinboxTimer = new System.Timers.Timer(Global.ProcessInboxTimerSeconds * 1000); + _processinboxTimer.Elapsed += new System.Timers.ElapsedEventHandler(_processinboxTimer_Elapsed); + _processinboxTimer.Enabled = true; + _processinboxTimer.AutoReset = true; + _processinboxTimer.Start(); + + // start cron daemon + _cron = new CronDaemon(); + _cron.AddJob(Global.BackupCronSchedule, () => this.Backup()); + + // compile & register view files + CompileAndRegisterScriptViews(_Path + "Views"); + + + if (File.Exists(_Path + "RaptorDB-Replication.config")) + { + // if replication.config exists -> start replication server + _repserver = new Replication.ReplicationServer(_Path, File.ReadAllText(_Path + "RaptorDB-Replication.config"), _objStore); + } + else if (File.Exists(_Path + "RaptorDB-Branch.config")) + { + // if branch.config exists -> start replication client + _repclient = new Replication.ReplicationClient(_Path, File.ReadAllText(_Path + "RaptorDB-Branch.config"), _objStore); + } + + _objHF = new KeyStoreHF(_Path + "DataHF"); + } + + object _inboxlock = new object(); + void _processinboxTimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) + { + lock (_inboxlock) + { + string d = _Path + "Replication" + _S + "Inbox"; + if (Directory.Exists(d) == false) + return; + + // start inbox processing timer + ProcessReplicationInbox(d); + + foreach (var f in Directory.GetDirectories(d)) + ProcessReplicationInbox(f); + } + } + + private void CompileAndRegisterScriptViews(string viewfolder) + { + // compile & register views + string[] files = Directory.GetFiles(viewfolder, "*.view"); + MethodInfo register = this.GetType().GetMethod("RegisterView", BindingFlags.Instance | BindingFlags.Public); + foreach (var fn in files) + { + Assembly a = CompileScript(fn); + if (a != null) + { + foreach (var t in a.GetTypes()) + { + foreach (var att in t.GetCustomAttributes(typeof(RegisterViewAttribute), false)) + { + try + { + object o = Activator.CreateInstance(t); + // handle types when view also + Type[] args = t.GetGenericArguments(); + if (args.Length == 0) + args = t.BaseType.GetGenericArguments(); + Type tt = args[0]; + var m = register.MakeGenericMethod(new Type[] { tt }); + m.Invoke(this, new object[] { o }); + } + catch (Exception ex) + { + _log.Error(ex); + } + } + } + } + } + } + + private Assembly CompileScript(string file) + { + try + { + _log.Debug("Compiling script view : " + file); + CodeDomProvider compiler = CodeDomProvider.CreateProvider("CSharp"); + + CompilerParameters compilerparams = new CompilerParameters(); + compilerparams.GenerateInMemory = false; + compilerparams.GenerateExecutable = false; + compilerparams.OutputAssembly = file.Replace(".view", ".dll"); + compilerparams.CompilerOptions = "/optimize"; + + Regex regex = new Regex( + @"\/\/\s*ref\s*\:\s*(?.*)", + System.Text.RegularExpressions.RegexOptions.IgnoreCase); + + compilerparams.ReferencedAssemblies.Add(typeof(View<>).Assembly.Location); + compilerparams.ReferencedAssemblies.Add(typeof(object).Assembly.Location); + compilerparams.ReferencedAssemblies.Add(typeof(ICustomTypeDescriptor).Assembly.Location); + + foreach (Match m in regex.Matches(File.ReadAllText(file))) + { + string str = m.Groups["refs"].Value.Trim(); +#pragma warning disable 618 + Assembly a = Assembly.LoadWithPartialName(Path.GetFileNameWithoutExtension(str));//load from GAC if possible +#pragma warning restore 618 + if (a != null) + compilerparams.ReferencedAssemblies.Add(a.Location); + else + { + string assm = Path.GetDirectoryName(this.GetType().Assembly.Location) + _S + str; + a = Assembly.LoadFrom(assm); + if (a != null) + compilerparams.ReferencedAssemblies.Add(a.Location); + else + _log.Error("unable to find referenced file for view compiling : " + str); + } + } + + CompilerResults results = compiler.CompileAssemblyFromFile(compilerparams, file); + + if (results.Errors.HasErrors == true) + { + _log.Error("Error compiling view definition : " + file); + foreach (var e in results.Errors) + _log.Error(e.ToString()); + return null; + } + + return results.CompiledAssembly; + } + catch (Exception ex) + { + _log.Error("Error compiling view definition : " + file); + _log.Error(ex); + return null; + } + } + + void _freeMemTimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) + { + long l = GC.GetTotalMemory(true) / (1024 * 1024); + _log.Debug("GC.GetTotalMemory() = " + l.ToString("#,0")); + if (l > Global.MemoryLimit) + { + _log.Debug("Freeing memory on " + Global.MemoryLimit.ToString("#,0") + " limit ..."); + _viewManager.FreeMemory(); + _fulltextindex.FreeMemory(); + _objStore.FreeMemory(); + _fileStore.FreeMemory(); + _objHF.FreeMemory(); + GC.Collect(2); + } + } + + private void UpgradeStorageFile(string filename, int ver) + { + _log.Debug("Upgrading storage file version from " + ver + " to " + StorageFile._CurrentVersion + " on file : " + filename); + throw new Exception("not implemented yet - contact the author if you need this functionality"); + // FEATURE : upgrade from v0 to v1 + + // FEATURE : upgrade from v1 to v2 + // read from one file and write to the other + } + + //private void CurrentDomain_ProcessExit(object sender, EventArgs e) + //{ + + // _log.Debug("appdomain closing"); + // Shutdown(); + //} + + private object _slock = new object(); + private int _saveprocessing = 0; + private void _saveTimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) + { + if (_shuttingdown) + return; + + if (Global.BackgroundSaveToOtherViews == false) + return; + + if (_CurrentRecordNumber == 0) + return; + + if (_CurrentRecordNumber == _LastRecordNumberProcessed) + return; + + if (!Monitor.TryEnter(_slock, (int)_saveTimer.Interval)) return; + try + { + int batch = Global.BackgroundViewSaveBatchSize; + while (batch > 0) + { + if (_shuttingdown) + return; + int i = 0; + while (_pauseindexer && i++ < 5) Thread.Sleep(0); + if(_pauseindexer) + return; + if (_CurrentRecordNumber == _LastRecordNumberProcessed) + return; + _LastRecordNumberProcessed++; + StorageItem meta = null; + object obj = _objStore.GetObject(_LastRecordNumberProcessed, out meta); + if (meta != null && meta.isDeleted) + _viewManager.Delete(meta.key); + else + { + if (obj == null) + { + _log.Debug("byte[] is null"); + _log.Debug("curr rec = " + _CurrentRecordNumber); + _log.Debug("last rec = " + _LastRecordNumberProcessed); + continue; + } + + var m = otherviews.MakeGenericMethod(new Type[] { obj.GetType() }); + m.Invoke(this, new object[] { meta.key, obj }); + } + + batch--; + } + } + finally{ + Monitor.Exit(_slock); + } + } + + private object _flock = new object(); + void _fulltextTimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) + { + if (_shuttingdown) + return; + + if (_CurrentRecordNumber == 0) + return; + + if (_CurrentRecordNumber == _LastFulltextIndexed) + return; + + lock (_flock) + { + int batch = Global.BackgroundFullTextIndexBatchSize; + while (batch > 0) + { + if (_shuttingdown) + return; + //_log.Debug("batch full text indexing..."); + int i = 0; + while (_pauseindexer && i++ < 20) Thread.Sleep(0); + if(_pauseindexer) return; + if (_CurrentRecordNumber == _LastFulltextIndexed) + return; + _LastFulltextIndexed++; + StorageItem meta = null; + object obj = _objStore.GetObject(_LastFulltextIndexed, out meta); + if (meta != null && meta.isDeleted == false) + { + if (obj != null) + { + // normal string and normal guid + string json = fastJSON.JSON.ToJSON(obj, new fastJSON.JSONParameters { UseEscapedUnicode = false, UseFastGuid = false }); + _fulltextindex.Set(json, _LastFulltextIndexed); + } + } + batch--; + } + + return; + } + } + + private MethodInfo GetSave(Type type) + { + MethodInfo m = null; + if (_savecache.TryGetValue(type, out m)) + return m; + + m = save.MakeGenericMethod(new Type[] { type }); + _savecache.Add(type, m); + return m; + } + + private MethodInfo GetSaveReplicate(Type type) + { + MethodInfo m = null; + if (_saverepcache.TryGetValue(type, out m)) + return m; + + m = saverep.MakeGenericMethod(new Type[] { type }); + _saverepcache.Add(type, m); + return m; + } + #endregion + + internal object GetAssemblyForView(string viewname, out string typename) + { + return _viewManager.GetAssemblyForView(viewname, out typename); + } + + /// + /// Get the current registered views + /// + /// + public List GetViews() + { + return _viewManager.GetViews(); + } + + /// + /// Get the schema for a view + /// + /// + /// + public ViewRowDefinition GetSchema(string view) + { + return _viewManager.GetSchema(view); + } + + /// + /// Query a view with paging and ordering + /// + /// + /// + /// + /// + /// + /// + public IResult Query(string viewname, string filter, int start, int count, string orderby) + { + return _viewManager.Query(viewname, filter, start, count, orderby); + } + + /// + /// Query a view with paging and ordering + /// + /// + /// + /// + /// + /// + /// + public Result Query(string filter, int start, int count, string orderby) + { + return _viewManager.Query(filter, start, count, orderby); + } + + /// + /// Get the history information for a document + /// + /// + /// + public HistoryInfo[] FetchHistoryInfo(Guid docid) + { + List h = new List(); + + foreach (int i in FetchHistory(docid)) + { + HistoryInfo hi = new HistoryInfo(); + hi.Version = i; + var o = _objStore.GetMeta(i); + hi.ChangeDate = o.date; + if (o.isDeleted == false) + h.Add(hi); + } + return h.ToArray(); + } + + /// + /// Get the history information for a file + /// + /// + /// + public HistoryInfo[] FetchBytesHistoryInfo(Guid docid) + { + List h = new List(); + + foreach (int i in FetchBytesHistory(docid)) + { + HistoryInfo hi = new HistoryInfo(); + hi.Version = i; + var o = _fileStore.GetMeta(i); + hi.ChangeDate = o.date; + if (o.isDeleted == false) + h.Add(hi); + } + return h.ToArray(); + } + + /// + /// Direct delete from a view + /// + /// + /// + /// + public int ViewDelete(Expression> filter) + { + // do the delete + int c = _viewManager.ViewDelete(filter); + if (c > 0) + { + // save this filter to docs + View_delete vd = new View_delete(); + LINQString lq = new LINQString(); + lq.Visit(filter); + vd.Filter = lq.sb.ToString(); + vd.Viewname = _viewManager.GetViewName(typeof(TRowSchema)); + _objStore.SetObject(vd.ID, vd); + } + return c; + } + + /// + /// Direct delete from a view + /// + /// + /// + /// + public int ViewDelete(string viewname, string filter) + { + // do the delete + int c = _viewManager.ViewDelete(viewname, filter); + if (c > 0) + { + // save this filter to docs + View_delete vd = new View_delete(); + vd.Filter = filter; + vd.Viewname = viewname; + _objStore.SetObject(vd.ID, vd); + } + return c; + } + + /// + /// Direct insert into a view + /// + /// + /// + /// + /// + public bool ViewInsert(Guid id, TRowSchema row) + { + string vn = _viewManager.GetViewName(typeof(TRowSchema)); + if (vn != "") + { + if (_viewManager.ViewInsert(id, row)) + { + View_insert vi = new View_insert(); + vi.Viewname = vn; + vi.RowObject = row; + _objStore.SetObject(vi.ID, vi); + return true; + } + } + return false; + } + + /// + /// Direct insert into a view + /// + /// + /// + /// + /// + public bool ViewInsert(string viewname, Guid id, object row) + { + if (_viewManager.ViewInsert(viewname, id, row)) + { + View_insert vi = new View_insert(); + vi.Viewname = viewname; + vi.RowObject = row; + _objStore.SetObject(vi.ID, vi); + return true; + } + return false; + } + + /// + /// Total number of documents in the storage file including duplicates + /// + /// + public long DocumentCount() + { + return _objStore.Count(); + } + + public IKeyStoreHF GetKVHF() + { + return _objHF; + } + } +} diff --git a/RaptorDB/RaptorDB.csproj b/RaptorDB/RaptorDB.csproj index 79d6559..eeed56b 100644 --- a/RaptorDB/RaptorDB.csproj +++ b/RaptorDB/RaptorDB.csproj @@ -1,163 +1,206 @@ - - - - Local - 9.0.30729 - 2.0 - {45F6BE30-989A-4749-B6A0-69099C8661F4} - Debug - AnyCPU - - - RaptorDB - False - Library - RaptorDB - OnBuildSuccess - - - - - 0.0 - v4.0 - false - publish\ - false - Foreground - 7 - Days - false - true - 0 - 1.0.0.%2a - false - true - true - - - - bin\Debug\ - true - 285212672 - false - - - true - 4096 - false - true - false - false - false - 4 - Full - prompt - false - - - bin\Release\ - false - 285212672 - false - - - - - false - 4096 - false - - - true - false - false - false - 4 - none - prompt - false - - - Auto - AnyCPU - - - TRACE - - - Project - - - ..\raptordb.snk - - - - - - - - - BuildVersion.cs - - - Code - - - - - - - - - - - - - - - - True - True - Resources.resx - - - - - - - - - - - - Code - - - - - - - - - - - - - - {32331D51-5BE0-41E2-AF1A-9B086C5AE809} - RaptorDB.Common - - - - - ResXFileCodeGenerator - Resources.Designer.cs - - - - - - md "$(SolutionDir)nuget\net40" -copy "$(TargetPath)" "$(SolutionDir)nuget\net40\$(TargetFileName)" - + + + + Local + 9.0.30729 + 2.0 + {45F6BE30-989A-4749-B6A0-69099C8661F4} + Debug + AnyCPU + + + RaptorDB + False + Library + RaptorDB + OnBuildSuccess + + + + + 0.0 + v4.5 + false + publish\ + false + Foreground + 7 + Days + false + true + 0 + 1.0.0.%2a + false + true + false + + obj\$(Configuration)\ + + + bin\Debug\ + True + 285212672 + False + + + true + 4096 + False + false + False + false + False + 4 + Full + prompt + false + + + bin\Release\ + true + 285212672 + false + + + false + 4096 + false + true + false + false + false + 4 + none + prompt + false + + + Auto + AnyCPU + + + TRACE + + + Project + obj\ + + + ..\raptordb.snk + + + true + bin\x64\Debug\ + 285212672 + true + 4096 + Full + x64 + prompt + MinimumRecommendedRules.ruleset + false + + + bin\x64\Release\ + TRACE + 285212672 + true + true + 4096 + x64 + prompt + MinimumRecommendedRules.ruleset + false + + + + False + ..\GenericPointerHelpers\GenericPointerHelpers.dll + + + + + + + + BuildVersion.cs + + + Code + + + + + + + + + + + + + + + + + + + + + + + True + True + Resources.resx + + + + + + + + + + + + + Code + + + + + + + + + + + + + + + + RaptorDB_Doc.nuspec + + + + + + {32331D51-5BE0-41E2-AF1A-9B086C5AE809} + RaptorDB.Common + + + + + ResXFileCodeGenerator + Resources.Designer.cs + Designer + + + + + + md "$(SolutionDir)nuget\net40" +copy "$(TargetPath)" "$(SolutionDir)nuget\net40\$(TargetFileName)" + \ No newline at end of file diff --git a/RaptorDB.Common/RaptorDBClient.cs b/RaptorDB/RaptorDBClient.cs similarity index 84% rename from RaptorDB.Common/RaptorDBClient.cs rename to RaptorDB/RaptorDBClient.cs index e4d2a0b..ff9a08f 100644 --- a/RaptorDB.Common/RaptorDBClient.cs +++ b/RaptorDB/RaptorDBClient.cs @@ -1,764 +1,750 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using RaptorDB.Common; -using System.Linq.Expressions; -using System.Reflection; -using System.IO; - -namespace RaptorDB -{ - public class KVHF : IKeyStoreHF - { - public KVHF(NetworkClient client, string username, string password) - { - _client = client; - _username = username; - _password = password; - } - - NetworkClient _client; - private string _username; - private string _password; - - - public object GetObjectHF(string key) - { - Packet p = CreatePacket(); - p.Command = "getobjecthf"; - p.Data = key; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - if (ret.OK) - return ret.Data; - else - return null; - } - - public bool SetObjectHF(string key, object obj) - { - Packet p = CreatePacket(); - p.Command = "setobjecthf"; - p.Data = new object[] { key, obj }; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - - return ret.OK; - } - - public bool DeleteKeyHF(string key) - { - Packet p = CreatePacket(); - p.Command = "deletekeyhf"; - p.Data = key; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - - return (bool)ret.Data; - } - - public int CountHF() - { - Packet p = CreatePacket(); - p.Command = "counthf"; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - - return (int)ret.Data; - } - - public bool ContainsHF(string key) - { - Packet p = CreatePacket(); - p.Command = "containshf"; - p.Data = key; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - - return (bool)ret.Data; - } - - public string[] GetKeysHF() - { - Packet p = CreatePacket(); - p.Command = "getkeyshf"; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - - return ((object[])ret.Data).Cast().ToArray(); - } - - public void CompactStorageHF() - { - Packet p = CreatePacket(); - p.Command = "compactstoragehf"; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - - return; - } - - private Packet CreatePacket() - { - Packet p = new Packet(); - p.Username = _username; - p.PasswordHash = Helper.MurMur.Hash(Encoding.UTF8.GetBytes(_username + "|" + _password)).ToString(); - - return p; - } - } - - public class RaptorDBClient : IRaptorDB - { - public RaptorDBClient(string server, int port, string username, string password) - { - _username = username; - _password = password; - _client = new NetworkClient(server, port); - // speed settings - fastJSON.JSON.Parameters.ParametricConstructorOverride = true; - fastBinaryJSON.BJSON.Parameters.ParametricConstructorOverride = true; - _kv = new KVHF(_client, _username, _password); - } - - private KVHF _kv; - private NetworkClient _client; - private string _username; - private string _password; - private SafeDictionary _assembly = new SafeDictionary(); - - /// - /// Save a document to RaptorDB - /// - /// - /// - /// - /// - public bool Save(Guid docID, T document) - { - Packet p = CreatePacket(); - p.Command = "save"; - p.Docid = docID; - p.Data = document; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return ret.OK; - } - - /// - /// Save a file to RaptorDB - /// - /// - /// - /// - public bool SaveBytes(Guid fileID, byte[] bytes) - { - Packet p = CreatePacket(); - p.Command = "savebytes"; - p.Docid = fileID; - p.Data = bytes; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return ret.OK; - } - - /// - /// Query any view -> get all rows - /// - /// - /// - /// - public Result Query(string viewname) - { - return Query(viewname, 0, -1); - } - - /// - /// Query a view using a string filter - /// - /// - /// - /// - public Result Query(string viewname, string filter) - { - return Query(viewname, filter, 0, -1); - } - - /// - /// Fetch a document by it's ID - /// - /// - /// - public object Fetch(Guid docID) - { - Packet p = CreatePacket(); - p.Command = "fetch"; - p.Docid = docID; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - if (ret.OK) - return ret.Data; - else - return null; - } - - /// - /// Fetch file data by it's ID - /// - /// - /// - public byte[] FetchBytes(Guid fileID) - { - Packet p = CreatePacket(); - p.Command = "fetchbytes"; - p.Docid = fileID; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - if (ret.OK) - return (byte[])ret.Data; - else - return null; - } - - /// - /// Shutdown and cleanup - /// - public void Shutdown() - { - _client.Close(); - } - - /// - /// Backup the data file in incremental mode to the RaptorDB folder - /// - /// - public bool Backup() - { - Packet p = CreatePacket(); - p.Command = "backup"; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return ret.OK; - } - - /// - /// Restore backup files stored in RaptorDB folder - /// - public void Restore() - { - Packet p = CreatePacket(); - p.Command = "restore"; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - } - - /// - /// Delete a document (the actual data is not deleted just marked so) - /// - /// - /// - public bool Delete(Guid docid) - { - Packet p = CreatePacket(); - p.Command = "delete"; - p.Docid = docid; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return ret.OK; - } - - /// - /// Delete a file (the actual data is not deleted just marked so) - /// - /// - /// - public bool DeleteBytes(Guid fileid) - { - Packet p = CreatePacket(); - p.Command = "deletebytes"; - p.Docid = fileid; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return ret.OK; - } - - /// - /// Add a user for server mode login - /// - /// - /// - /// - /// - public bool AddUser(string username, string oldpassword, string newpassword) - { - Packet p = CreatePacket(); - p.Command = "adduser"; - p.Data = new object[] { username, oldpassword, newpassword }; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return ret.OK; - } - - /// - /// Execute server side queries - /// - /// - /// - /// - public object[] ServerSide(ServerSideFunc func, string filter) - { - Packet p = CreatePacket(); - p.Command = "serverside"; - p.Data = new object[] { func.Method.ReflectedType.AssemblyQualifiedName, func.Method.Name, filter }; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (object[])ret.Data; - } - - /// - /// Execute server side queries - /// - /// - /// - /// - /// - public object[] ServerSide(ServerSideFunc func, Expression> filter) - { - LINQString ls = new LINQString(); - ls.Visit(filter); - - Packet p = CreatePacket(); - p.Command = "serverside"; - p.Data = new object[] { func.Method.ReflectedType.AssemblyQualifiedName, func.Method.Name, ls.sb.ToString() }; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (object[])ret.Data; - } - - /// - /// Full text search the complete original document - /// - /// - /// - public int[] FullTextSearch(string filter) - { - Packet p = CreatePacket(); - p.Command = "fulltext"; - p.Data = new object[] { filter }; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (int[])ret.Data; - } - - private Packet CreatePacket() - { - Packet p = new Packet(); - p.Username = _username; - p.PasswordHash = Helper.MurMur.Hash(Encoding.UTF8.GetBytes(_username + "|" + _password)).ToString(); - - return p; - } - - /// - /// Query all data in a view with paging - /// - /// - /// - /// - /// - public Result Query(string viewname, int start, int count) - { - return Query(viewname, "", start, count); - } - - /// - /// Query a View with a string filter with paging - /// - /// - /// - /// - /// - /// - public Result Query(string viewname, string filter, int start, int count, string orderby) - { - bool b = false; - // check if return type exists and copy assembly if needed - if (_assembly.TryGetValue(viewname, out b) == false) - { - Packet pp = CreatePacket(); - pp.Command = "checkassembly"; - pp.Viewname = viewname; - ReturnPacket r = (ReturnPacket)_client.Send(pp); - string type = r.Error; - Type t = Type.GetType(type); - if (t == null) - { - if (r.Data != null) - { - var a = Assembly.Load((byte[])r.Data); - _assembly.Add(viewname, true); - } - } - else - _assembly.Add(viewname, true); - } - Packet p = CreatePacket(); - p.Command = "querystr"; - p.Viewname = viewname; - p.Data = filter; - p.Start = start; - p.Count = count; - p.OrderBy = orderby; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (Result)ret.Data; - } - - /// - /// Query a View with a LINQ filter with paging - /// - /// - /// - /// - /// - /// - /// - public Result Query(string viewname, Expression> filter, int start, int count, string orderby) - { - LINQString ls = new LINQString(); - ls.Visit(filter); - Packet p = CreatePacket(); - p.Command = "querystr"; - p.Viewname = viewname; - p.Start = start; - p.Count = count; - p.Data = ls.sb.ToString(); - p.OrderBy = orderby; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (Result)ret.Data; - } - - /// - /// Count rows - /// - /// - /// - public int Count(string viewname) - { - return Count(viewname, ""); - } - - /// - /// Count rows with a string filter - /// - /// - /// - /// - public int Count(string viewname, string filter) - { - Packet p = CreatePacket(); - p.Command = "countstr"; - p.Viewname = viewname; - p.Data = filter; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (int)ret.Data; - } - - /// - /// Query with LINQ filter - /// - /// - /// - /// - public Result Query(Expression> filter) - { - return Query(filter, 0, -1, ""); - } - - /// - /// Query with LINQ filter and paging - /// - /// - /// - /// - /// - /// - public Result Query(Expression> filter, int start, int count, string orderby) - { - LINQString ls = new LINQString(); - ls.Visit(filter); - Packet p = CreatePacket(); - p.Command = "querytype"; - p.Start = start; - p.Count = count; - p.OrderBy = orderby; - p.Data = new object[] { typeof(TRowSchema).AssemblyQualifiedName, ls.sb.ToString() }; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - Result res = (Result)ret.Data; - return GenericResult(res); - } - - private static Result GenericResult(Result res) - { - // FEATURE : dirty hack here to cleanup - Result result = new Result(); - result.Count = res.Count; - result.EX = res.EX; - result.OK = res.OK; - result.TotalCount = res.TotalCount; - result.Rows = res.Rows.Cast().ToList(); - return result; - } - - /// - /// Query with string filter - /// - /// - /// - /// - public Result Query(string filter) - { - return Query(filter, 0, -1, ""); - } - - /// - /// Query with string filter and paging - /// - /// - /// - /// - /// - /// - public Result Query(string filter, int start, int count, string orderby) - { - Packet p = CreatePacket(); - p.Command = "querytype"; - p.Start = start; - p.Count = count; - p.OrderBy = orderby; - p.Data = new object[] { typeof(TRowSchema).AssemblyQualifiedName, filter }; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - Result res = (Result)ret.Data; - return GenericResult(res); - } - - /// - /// Count with LINQ filter - /// - /// - /// - /// - public int Count(Expression> filter) - { - LINQString ls = new LINQString(); - ls.Visit(filter); - Packet p = CreatePacket(); - p.Command = "gcount"; - p.Viewname = typeof(TRowSchema).AssemblyQualifiedName; - p.Data = ls.sb.ToString(); - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (int)ret.Data; - } - - /// - /// Fetch the document change history - /// - /// - /// - public int[] FetchHistory(Guid docid) - { - Packet p = CreatePacket(); - p.Command = "dochistory"; - p.Docid = docid; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (int[])ret.Data; - } - - /// - /// Fetch the file change history - /// - /// - /// - public int[] FetchBytesHistory(Guid fileid) - { - Packet p = CreatePacket(); - p.Command = "filehistory"; - p.Docid = fileid; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (int[])ret.Data; - } - - /// - /// Fetch a specific document version - /// - /// - /// - public object FetchVersion(int versionNumber) - { - Packet p = CreatePacket(); - p.Command = "fetchversion"; - p.Data = versionNumber; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return ret.Data; - } - - /// - /// Fetch a specific file version - /// - /// - /// - public byte[] FetchBytesVersion(int versionNumber) - { - Packet p = CreatePacket(); - p.Command = "fetchfileversion"; - p.Data = versionNumber; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (byte[])ret.Data; - } - - /// - /// Query a View with a string filter with paging - /// - /// - /// - /// - /// - /// - public Result Query(string viewname, string filter, int start, int count) - { - return this.Query(viewname, filter, start, count, ""); - } - - /// - /// Query a view with paging - /// - /// - /// - /// - /// - /// - /// - public Result Query(string viewname, Expression> filter, int start, int count) - { - return this.Query(viewname, filter, start, count, ""); - } - - /// - /// Query a view with paging - /// - /// - /// - /// - /// - /// - public Result Query(Expression> filter, int start, int count) - { - return Query(filter, start, count, ""); - } - - /// - /// Query a view with paging - /// - /// - /// - /// - /// - /// - public Result Query(string filter, int start, int count) - { - return Query(filter, start, count, ""); - } - - /// - /// Fetch a change history for a document with dates - /// - /// - /// - public HistoryInfo[] FetchHistoryInfo(Guid docid) - { - Packet p = CreatePacket(); - p.Command = "fetchhistoryinfo"; - p.Docid = docid; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (HistoryInfo[])ret.Data; - } - - /// - /// Fetch a change history for a file with dates - /// - /// - /// - public HistoryInfo[] FetchBytesHistoryInfo(Guid docid) - { - Packet p = CreatePacket(); - p.Command = "fetchbytehistoryinfo"; - p.Docid = docid; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (HistoryInfo[])ret.Data; - } - - /// - /// Delete directly from a view using a filter - /// - /// - /// - /// - public int ViewDelete(Expression> filter) - { - LINQString ls = new LINQString(); - ls.Visit(filter); - Packet p = CreatePacket(); - p.Command = "viewdelete-t"; - p.Data = new object[] { typeof(TRowSchema).AssemblyQualifiedName, ls.sb.ToString() }; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (int)ret.Data; - } - - /// - /// Delete directly from a view using a filter - /// - /// - /// - /// - public int ViewDelete(string viewname, string filter) - { - Packet p = CreatePacket(); - p.Command = "viewdelete"; - p.Data = new object[] { viewname, filter }; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (int)ret.Data; - } - - /// - /// Insert directly into a view - /// - /// - /// - /// - /// - public bool ViewInsert(Guid id, TRowSchema row) - { - Packet p = CreatePacket(); - p.Command = "viewinsert-t"; - p.Docid = id; - p.Data = new object[] { typeof(TRowSchema).AssemblyQualifiedName, row }; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (bool)ret.Data; - } - - /// - /// Insert directly into a view - /// - /// - /// - /// - /// - public bool ViewInsert(string viewname, Guid id, object row) - { - Packet p = CreatePacket(); - p.Command = "viewinsert"; - p.Docid = id; - p.Data = new object[] { viewname, row }; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (bool)ret.Data; - } - - /// - /// Get the number of documents in the storage file regardless of versions - /// - /// - public long DocumentCount() - { - Packet p = CreatePacket(); - p.Command = "doccount"; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - return (long)ret.Data; - } - - public IKeyStoreHF GetKVHF() - { - return _kv; - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using RaptorDB.Common; +using System.Linq.Expressions; +using System.Reflection; +using System.IO; + +namespace RaptorDB +{ + public class KVHF : IKeyStoreHF + { + public KVHF(NetworkClient client, string username, string password) + { + _client = client; + _username = username; + _password = password; + } + + NetworkClient _client; + private string _username; + private string _password; + + + public object GetObjectHF(string key) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.GetObjectHF; + p.Data = key; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + if (ret.OK) + return ret.Data; + else + return null; + } + + public bool SetObjectHF(string key, object obj) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.SetObjectHF; + p.Data = new object[] { key, obj }; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + + return ret.OK; + } + + public bool DeleteKeyHF(string key) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.DeleteKeyHF; + p.Data = key; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + + return (bool)ret.Data; + } + + public int CountHF() + { + Packet p = CreatePacket(); + p.Command = PacketCommand.CountHF; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + + return (int)ret.Data; + } + + public bool ContainsHF(string key) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.ContainsHF; + p.Data = key; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + + return (bool)ret.Data; + } + + public string[] GetKeysHF() + { + Packet p = CreatePacket(); + p.Command = PacketCommand.GetKeysHF; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + + return ((object[])ret.Data).Cast().ToArray(); + } + + public void CompactStorageHF() + { + Packet p = CreatePacket(); + p.Command = PacketCommand.CompactStorageHF; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + + return; + } + + private Packet CreatePacket() + { + Packet p = new Packet(); + p.Username = _username; + p.PasswordHash = Helper.MurMur.Hash(Encoding.UTF8.GetBytes(_username + "|" + _password)).ToString(); + + return p; + } + } + + public class RaptorDBClient : IRaptorDB + { + public RaptorDBClient(string server, int port, string username, string password) + { + _username = username; + _password = password; + _client = new NetworkClient(server, port); + // speed settings + fastJSON.JSON.Parameters.ParametricConstructorOverride = true; + fastBinaryJSON.BJSON.Parameters.ParametricConstructorOverride = true; + _kv = new KVHF(_client, _username, _password); + } + + private KVHF _kv; + private NetworkClient _client; + private string _username; + private string _password; + private SafeDictionary _assembly = new SafeDictionary(); + + /// + /// Save a document to RaptorDB + /// + /// + /// + /// + /// + public bool Save(Guid docID, T document) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.Save; + p.Docid = docID; + p.Data = document; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return ret.OK; + } + + /// + /// Save a file to RaptorDB + /// + /// + /// + /// + public bool SaveBytes(Guid fileID, byte[] bytes) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.SaveBytes; + p.Docid = fileID; + p.Data = bytes; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return ret.OK; + } + + /// + /// Query any view -> get all rows + /// + /// + /// + /// + public IResult Query(string viewname) + { + return Query(viewname, 0, -1); + } + + /// + /// Query a view using a string filter + /// + /// + /// + /// + public IResult Query(string viewname, string filter) + { + return Query(viewname, filter, 0, -1); + } + + /// + /// Fetch a document by it's ID + /// + /// + /// + public object Fetch(Guid docID) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.Fetch; + p.Docid = docID; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + if (ret.OK) + return ret.Data; + else + return null; + } + + /// + /// Fetch file data by it's ID + /// + /// + /// + public byte[] FetchBytes(Guid fileID) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.FetchBytes; + p.Docid = fileID; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + if (ret.OK) + return (byte[])ret.Data; + else + return null; + } + + /// + /// Shutdown and cleanup + /// + public void Shutdown() + { + _client.Close(); + } + + /// + /// Backup the data file in incremental mode to the RaptorDB folder + /// + /// + public bool Backup() + { + Packet p = CreatePacket(); + p.Command = PacketCommand.Backup; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return ret.OK; + } + + /// + /// Restore backup files stored in RaptorDB folder + /// + public void Restore() + { + Packet p = CreatePacket(); + p.Command = PacketCommand.Restore; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + } + + /// + /// Delete a document (the actual data is not deleted just marked so) + /// + /// + /// + public bool Delete(Guid docid) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.Delete; + p.Docid = docid; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return ret.OK; + } + + /// + /// Delete a file (the actual data is not deleted just marked so) + /// + /// + /// + public bool DeleteBytes(Guid fileid) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.DeleteBytes; + p.Docid = fileid; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return ret.OK; + } + + /// + /// Add a user for server mode login + /// + /// + /// + /// + /// + public bool AddUser(string username, string oldpassword, string newpassword) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.AddUser; + p.Data = new object[] { username, oldpassword, newpassword }; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return ret.OK; + } + + /// + /// Execute server side queries + /// + /// + /// + /// + public object[] ServerSide(ServerSideFunc func, string filter) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.ServerSide; + p.Data = new object[] { func.Method.ReflectedType.AssemblyQualifiedName, func.Method.Name, func.Target, filter }; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (object[])ret.Data; + } + + /// + /// Execute server side queries + /// + /// + /// + /// + /// + public object[] ServerSide(ServerSideFunc func, Expression> filter) + { + LINQString ls = new LINQString(); + ls.Visit(filter); + + Packet p = CreatePacket(); + p.Command = PacketCommand.ServerSide; + p.Data = new object[] { func.Method.ReflectedType.AssemblyQualifiedName, func.Method.Name, func.Target, ls.sb.ToString() }; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (object[])ret.Data; + } + + /// + /// Full text search the complete original document + /// + /// + /// + public int[] FullTextSearch(string filter) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.FullText; + p.Data = new object[] { filter }; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (int[])ret.Data; + } + + private Packet CreatePacket() + { + Packet p = new Packet(); + p.Username = _username; + p.PasswordHash = Helper.MurMur.Hash(Encoding.UTF8.GetBytes(_username + "|" + _password)).ToString(); + + return p; + } + + /// + /// Query all data in a view with paging + /// + /// + /// + /// + /// + public IResult Query(string viewname, int start, int count) + { + return Query(viewname, null, start, count); + } + + /// + /// Query a View with a string filter with paging + /// + /// + /// + /// + /// + /// + public IResult Query(string viewname, string filter, int start, int count, string orderby) + { + bool b = false; + // check if return type exists and copy assembly if needed + if (_assembly.TryGetValue(viewname, out b) == false) + { + Packet pp = CreatePacket(); + pp.Command = PacketCommand.CheckAssembly; + pp.Viewname = viewname; + ReturnPacket r = (ReturnPacket)_client.Send(pp); + string type = r.Error; + Type t = Type.GetType(type); + if (t == null) + { + if (r.Data != null) + { + var a = Assembly.Load((byte[])r.Data); + _assembly.Add(viewname, true); + } + } + else + _assembly.Add(viewname, true); + } + Packet p = CreatePacket(); + p.Command = PacketCommand.QueryStr; + p.Viewname = viewname; + p.Data = filter; + p.Start = start; + p.Count = count; + p.OrderBy = orderby; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (IResult)ret.Data; + } + + /// + /// Query a View with a LINQ filter with paging + /// + /// + /// + /// + /// + /// + /// + public Result Query(string viewname, Expression> filter, int start, int count, string orderby) + { + LINQString ls = new LINQString(); + ls.Visit(filter); + Packet p = CreatePacket(); + p.Command = PacketCommand.QueryStr; + p.Viewname = viewname; + p.Start = start; + p.Count = count; + p.Data = ls.sb.ToString(); + p.OrderBy = orderby; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (Result)ret.Data; + } + + /// + /// Count rows + /// + /// + /// + public int Count(string viewname) + { + return Count(viewname, null); + } + + /// + /// Count rows with a string filter + /// + /// + /// + /// + public int Count(string viewname, string filter) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.CountStr; + p.Viewname = viewname; + p.Data = filter; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (int)ret.Data; + } + + /// + /// Query with LINQ filter + /// + /// + /// + /// + public Result Query(Expression> filter) + { + return Query(filter, 0, -1, null); + } + + /// + /// Query with LINQ filter and paging + /// + /// + /// + /// + /// + /// + public Result Query(Expression> filter, int start, int count, string orderby) + { + LINQString ls = new LINQString(); + ls.Visit(filter); + Packet p = CreatePacket(); + p.Command = PacketCommand.QueryType; + p.Start = start; + p.Count = count; + p.OrderBy = orderby; + p.Data = new object[] { typeof(TRowSchema).AssemblyQualifiedName, ls.sb.ToString() }; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (Result)ret.Data; + } + + /// + /// Query with string filter + /// + /// + /// + /// + public Result Query(string filter) + { + return Query(filter, 0, -1, null); + } + + /// + /// Query with string filter and paging + /// + /// + /// + /// + /// + /// + public Result Query(string filter, int start, int count, string orderby) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.QueryType; + p.Start = start; + p.Count = count; + p.OrderBy = orderby; + p.Data = new object[] { typeof(TRowSchema).AssemblyQualifiedName, filter }; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (Result)ret.Data; + } + + /// + /// Count with LINQ filter + /// + /// + /// + /// + public int Count(Expression> filter) + { + LINQString ls = new LINQString(); + ls.Visit(filter); + Packet p = CreatePacket(); + p.Command = PacketCommand.GCount; + p.Viewname = typeof(TRowSchema).AssemblyQualifiedName; + p.Data = ls.sb.ToString(); + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (int)ret.Data; + } + + /// + /// Fetch the document change history + /// + /// + /// + public IEnumerable FetchHistory(Guid docid) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.DocHistory; + p.Docid = docid; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (int[])ret.Data; + } + + /// + /// Fetch the file change history + /// + /// + /// + public IEnumerable FetchBytesHistory(Guid fileid) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.FileHistory; + p.Docid = fileid; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (int[])ret.Data; + } + + /// + /// Fetch a specific document version + /// + /// + /// + public object FetchVersion(int versionNumber) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.FetchVersion; + p.Data = versionNumber; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return ret.Data; + } + + /// + /// Fetch a specific file version + /// + /// + /// + public byte[] FetchBytesVersion(int versionNumber) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.FetchFileVersion; + p.Data = versionNumber; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (byte[])ret.Data; + } + + /// + /// Query a View with a string filter with paging + /// + /// + /// + /// + /// + /// + public IResult Query(string viewname, string filter, int start, int count) + { + return this.Query(viewname, filter, start, count, null); + } + + /// + /// Query a view with paging + /// + /// + /// + /// + /// + /// + /// + public IResult Query(string viewname, Expression> filter, int start, int count) + { + return this.Query(viewname, filter, start, count, null); + } + + /// + /// Query a view with paging + /// + /// + /// + /// + /// + /// + public Result Query(Expression> filter, int start, int count) + { + return Query(filter, start, count, null); + } + + /// + /// Query a view with paging + /// + /// + /// + /// + /// + /// + public Result Query(string filter, int start, int count) + { + return Query(filter, start, count, null); + } + + /// + /// Fetch a change history for a document with dates + /// + /// + /// + public HistoryInfo[] FetchHistoryInfo(Guid docid) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.FetchHistoryInfo; + p.Docid = docid; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (HistoryInfo[])ret.Data; + } + + /// + /// Fetch a change history for a file with dates + /// + /// + /// + public HistoryInfo[] FetchBytesHistoryInfo(Guid docid) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.FetchByteHistoryInfo; + p.Docid = docid; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (HistoryInfo[])ret.Data; + } + + /// + /// Delete directly from a view using a filter + /// + /// + /// + /// + public int ViewDelete(Expression> filter) + { + LINQString ls = new LINQString(); + ls.Visit(filter); + Packet p = CreatePacket(); + p.Command = PacketCommand.ViewDelete_t; + p.Data = new object[] { typeof(TRowSchema).AssemblyQualifiedName, ls.sb.ToString() }; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (int)ret.Data; + } + + /// + /// Delete directly from a view using a filter + /// + /// + /// + /// + public int ViewDelete(string viewname, string filter) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.ViewDelete; + p.Data = new object[] { viewname, filter }; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (int)ret.Data; + } + + /// + /// Insert directly into a view + /// + /// + /// + /// + /// + public bool ViewInsert(Guid id, TRowSchema row) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.ViewInsert_t; + p.Docid = id; + p.Data = new object[] { typeof(TRowSchema).AssemblyQualifiedName, row }; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (bool)ret.Data; + } + + /// + /// Insert directly into a view + /// + /// + /// + /// + /// + public bool ViewInsert(string viewname, Guid id, object row) + { + Packet p = CreatePacket(); + p.Command = PacketCommand.ViewInsert; + p.Docid = id; + p.Data = new object[] { viewname, row }; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (bool)ret.Data; + } + + /// + /// Get the number of documents in the storage file regardless of versions + /// + /// + public long DocumentCount() + { + Packet p = CreatePacket(); + p.Command = PacketCommand.DocCount; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + return (long)ret.Data; + } + + public IKeyStoreHF GetKVHF() + { + return _kv; + } + } +} diff --git a/RaptorDB/RaptorDBServer.cs b/RaptorDB/RaptorDBServer.cs index 914f6fb..a1708da 100644 --- a/RaptorDB/RaptorDBServer.cs +++ b/RaptorDB/RaptorDBServer.cs @@ -1,379 +1,492 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using RaptorDB.Common; -using System.Reflection; -using System.IO; -using System.Threading.Tasks; -using System.Threading; - -namespace RaptorDB -{ - public class RaptorDBServer - { - public RaptorDBServer(int port, string DataPath) - { - _path = Directory.GetCurrentDirectory(); - AppDomain.CurrentDomain.AssemblyResolve += new ResolveEventHandler(CurrentDomain_AssemblyResolve); - _server = new NetworkServer(); - - if (_S == "/")// unix system - _datapath = DataPath.Replace("\\", "/"); - else - _datapath = DataPath; - - if (_datapath.EndsWith(_S) == false) - _datapath += _S; - - _raptor = RaptorDB.Open(DataPath); - register = _raptor.GetType().GetMethod("RegisterView", BindingFlags.Instance | BindingFlags.Public); - save = _raptor.GetType().GetMethod("Save", BindingFlags.Instance | BindingFlags.Public); - Initialize(); - _server.Start(port, processpayload); - } - - private string _S = Path.DirectorySeparatorChar.ToString(); - private Dictionary _users = new Dictionary(); - private string _path = ""; - private string _datapath = ""; - private ILog log = LogManager.GetLogger(typeof(RaptorDBServer)); - private NetworkServer _server; - private RaptorDB _raptor; - private MethodInfo register = null; - private MethodInfo save = null; - private SafeDictionary _savecache = new SafeDictionary(); - private SafeDictionary _ssidecache = new SafeDictionary(); - - private Assembly CurrentDomain_AssemblyResolve(object sender, ResolveEventArgs args) - { - if (File.Exists(args.Name)) - return Assembly.LoadFrom(args.Name); - string[] ss = args.Name.Split(','); - string fname = ss[0] + ".dll"; - if (File.Exists(fname)) - return Assembly.LoadFrom(fname); - fname = "Extensions" + _S + fname; - if (File.Exists(fname)) - return Assembly.LoadFrom(fname); - else return null; - } - - private MethodInfo GetSave(Type type) - { - MethodInfo m = null; - if (_savecache.TryGetValue(type, out m)) - return m; - - m = save.MakeGenericMethod(new Type[] { type }); - _savecache.Add(type, m); - return m; - } - - public void Shutdown() - { - WriteUsers(); - _server.Stop(); - _raptor.Shutdown(); - } - - private void WriteUsers() - { - // write users to user.config file - StringBuilder sb = new StringBuilder(); - sb.AppendLine("# FORMAT : username , pasword hash"); - sb.AppendLine("# To disable a user comment the line with the '#'"); - foreach (var kv in _users) - { - sb.AppendLine(kv.Key + " , " + kv.Value); - } - - File.WriteAllText(_datapath + "RaptorDB-Users.config", sb.ToString()); - } - - private object processpayload(object data) - { - Packet p = (Packet)data; - - if (Authenticate(p) == false) - return new ReturnPacket(false, "Authentication failed"); - - ReturnPacket ret = new ReturnPacket(true); - try - { - object[] param = null; - - switch (p.Command) - { - case "save": - var m = GetSave(p.Data.GetType()); - m.Invoke(_raptor, new object[] { p.Docid, p.Data }); - break; - case "savebytes": - ret.OK = _raptor.SaveBytes(p.Docid, (byte[])p.Data); - break; - case "querytype": - param = (object[])p.Data; - Type t = Type.GetType((string)param[0]); - string viewname = _raptor.GetViewName(t); - ret.OK = true; - ret.Data = _raptor.Query(viewname, (string)param[1], p.Start, p.Count, p.OrderBy); - break; - case "querystr": - ret.OK = true; - ret.Data = _raptor.Query(p.Viewname, (string)p.Data, p.Start, p.Count, p.OrderBy); - break; - case "fetch": - ret.OK = true; - ret.Data = _raptor.Fetch(p.Docid); - break; - case "fetchbytes": - ret.OK = true; - ret.Data = _raptor.FetchBytes(p.Docid); - break; - case "backup": - ret.OK = _raptor.Backup(); - break; - case "delete": - ret.OK = _raptor.Delete(p.Docid); - break; - case "deletebytes": - ret.OK = _raptor.DeleteBytes(p.Docid); - break; - case "restore": - ret.OK = true; - Task.Factory.StartNew(() => _raptor.Restore()); - break; - case "adduser": - param = (object[])p.Data; - ret.OK = AddUser((string)param[0], (string)param[1], (string)param[2]); - break; - case "serverside": - param = (object[])p.Data; - ret.OK = true; - ret.Data = _raptor.ServerSide(GetServerSideFuncCache(param[0].ToString(), param[1].ToString()), param[2].ToString()); - break; - case "fulltext": - param = (object[])p.Data; - ret.OK = true; - ret.Data = _raptor.FullTextSearch("" + param[0]); - break; - case "counttype": - // count type - param = (object[])p.Data; - Type t2 = Type.GetType((string)param[0]); - string viewname2 = _raptor.GetViewName(t2); - ret.OK = true; - ret.Data = _raptor.Count(viewname2, (string)param[1]); - break; - case "countstr": - // count str - ret.OK = true; - ret.Data = _raptor.Count(p.Viewname, (string)p.Data); - break; - case "gcount": - Type t3 = Type.GetType(p.Viewname); - string viewname3 = _raptor.GetViewName(t3); - ret.OK = true; - ret.Data = _raptor.Count(viewname3, (string)p.Data); - break; - case "dochistory": - ret.OK = true; - ret.Data = _raptor.FetchHistory(p.Docid); - break; - case "filehistory": - ret.OK = true; - ret.Data = _raptor.FetchBytesHistory(p.Docid); - break; - case "fetchversion": - ret.OK = true; - ret.Data = _raptor.FetchVersion((int)p.Data); - break; - case "fetchfileversion": - ret.OK = true; - ret.Data = _raptor.FetchBytesVersion((int)p.Data); - break; - case "checkassembly": - ret.OK = true; - string typ = ""; - ret.Data = _raptor.GetAssemblyForView(p.Viewname, out typ); - ret.Error = typ; - break; - case "fetchhistoryinfo": - ret.OK = true; - ret.Data = _raptor.FetchHistoryInfo(p.Docid); - break; - case "fetchbytehistoryinfo": - ret.OK = true; - ret.Data = _raptor.FetchBytesHistoryInfo(p.Docid); - break; - case "viewdelete": - ret.OK = true; - param = (object[])p.Data; - ret.Data = _raptor.ViewDelete((string)param[0], (string)param[1]); - break; - case "viewdelete-t": - ret.OK = true; - param = (object[])p.Data; - Type t4 = Type.GetType((string)param[0]); - string viewname4 = _raptor.GetViewName(t4); - ret.Data = _raptor.ViewDelete(viewname4, (string)param[1]); - break; - case "viewinsert": - ret.OK = true; - param = (object[])p.Data; - ret.Data = _raptor.ViewInsert((string)param[0], p.Docid, param[1]); - break; - case "viewinsert-t": - ret.OK = true; - param = (object[])p.Data; - Type t5 = Type.GetType((string)param[0]); - string viewname5 = _raptor.GetViewName(t5); - ret.Data = _raptor.ViewInsert(viewname5, p.Docid, param[1]); - break; - case "doccount": - ret.OK = true; - ret.Data = _raptor.DocumentCount(); - break; - case "getobjecthf": - ret.OK = true; - ret.Data = _raptor.GetKVHF().GetObjectHF((string)p.Data); - break; - case "setobjecthf": - ret.OK = true; - param = (object[])p.Data; - _raptor.GetKVHF().SetObjectHF((string)param[0], param[1]); - break; - case "deletekeyhf": - ret.OK = true; - ret.Data = _raptor.GetKVHF().DeleteKeyHF((string)p.Data); - break; - case "counthf": - ret.OK = true; - ret.Data = _raptor.GetKVHF().CountHF(); - break; - case "containshf": - ret.OK = true; - ret.Data = _raptor.GetKVHF().ContainsHF((string)p.Data); - break; - case "getkeyshf": - ret.OK = true; - ret.Data = _raptor.GetKVHF().GetKeysHF(); - break; - case "compactstoragehf": - ret.OK = true; - _raptor.GetKVHF().CompactStorageHF(); - break; - } - } - catch (Exception ex) - { - ret.OK = false; - log.Error(ex); - } - return ret; - } - - private ServerSideFunc GetServerSideFuncCache(string type, string method) - { - ServerSideFunc func; - log.Debug("Calling Server side Function : " + method + " on type " + type); - if (_ssidecache.TryGetValue(type + method, out func) == false) - { - Type tt = Type.GetType(type); - - func = (ServerSideFunc)Delegate.CreateDelegate(typeof(ServerSideFunc), tt, method); - _ssidecache.Add(type + method, func); - } - return func; - } - - private uint GenHash(string user, string pwd) - { - return Helper.MurMur.Hash(Encoding.UTF8.GetBytes(user.ToLower() + "|" + pwd)); - } - - private bool AddUser(string user, string oldpwd, string newpwd) - { - uint hash = 0; - if (_users.TryGetValue(user.ToLower(), out hash) == false) - { - _users.Add(user.ToLower(), GenHash(user, newpwd)); - return true; - } - if (hash == GenHash(user, oldpwd)) - { - _users[user.ToLower()] = GenHash(user, newpwd); - return true; - } - return false; - } - - private bool Authenticate(Packet p) - { - uint pwd; - if (_users.TryGetValue(p.Username.ToLower(), out pwd)) - { - uint hash = uint.Parse(p.PasswordHash); - if (hash == pwd) return true; - } - log.Debug("Authentication failed for '" + p.Username + "' hash = " + p.PasswordHash); - return false; - } - - private void Initialize() - { - // load users here - if (File.Exists(_datapath + "RaptorDB-Users.config")) - { - foreach (string line in File.ReadAllLines(_datapath + "RaptorDB-Users.config")) - { - if (line.Contains("#") == false) - { - string[] s = line.Split(','); - _users.Add(s[0].Trim().ToLower(), uint.Parse(s[1].Trim())); - } - } - } - // add default admin user if not exists - if (_users.ContainsKey("admin") == false) - _users.Add("admin", GenHash("admin", "admin")); - - // exe folder - // |-Extensions - Directory.CreateDirectory(_path + _S + "Extensions"); - - // open extensions folder - string path = _path + _S + "Extensions"; - - foreach (var f in Directory.GetFiles(path, "*.dll")) - { - // - load all dll files - // - register views - log.Debug("loading dll for views : " + f); - Assembly a = Assembly.Load(f); - foreach (var t in a.GetTypes()) - { - foreach (var att in t.GetCustomAttributes(typeof(RegisterViewAttribute), false)) - { - try - { - object o = Activator.CreateInstance(t); - // handle types when view also - Type[] args = t.GetGenericArguments(); - if (args.Length == 0) - args = t.BaseType.GetGenericArguments(); - Type tt = args[0]; - var m = register.MakeGenericMethod(new Type[] { tt }); - m.Invoke(_raptor, new object[] { o }); - } - catch (Exception ex) - { - log.Error(ex); - } - } - } - } - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using RaptorDB.Common; +using System.Reflection; +using System.IO; +using System.Threading.Tasks; +using System.Threading; +using System.Linq.Expressions; + +namespace RaptorDB +{ + public delegate void Handler(Packet data, ReturnPacket ret); + + public class RaptorDBServer + { + public RaptorDBServer(int port, string DataPath) + { + _path = Directory.GetCurrentDirectory(); + AppDomain.CurrentDomain.AssemblyResolve += new ResolveEventHandler(CurrentDomain_AssemblyResolve); + _server = new NetworkServer(); + + if (_S == "/")// unix system + _datapath = DataPath.Replace("\\", "/"); + else + _datapath = DataPath; + + if (_datapath.EndsWith(_S) == false) + _datapath += _S; + + _raptor = RaptorDB.Open(DataPath); + register = _raptor.GetType().GetMethod("RegisterView", BindingFlags.Instance | BindingFlags.Public); + save = _raptor.GetType().GetMethod("Save", BindingFlags.Instance | BindingFlags.Public); + Initialize(); + _server.Start(port, processpayload); + } + + private string _S = Path.DirectorySeparatorChar.ToString(); + private Dictionary _users = new Dictionary(); + private string _path = ""; + private string _datapath = ""; + private ILog _log = LogManager.GetLogger(typeof(RaptorDBServer)); + private NetworkServer _server; + private RaptorDB _raptor; + private MethodInfo register = null; + private MethodInfo save = null; + private SafeDictionary _savecache = new SafeDictionary(); + private SafeDictionary _ssidecache = new SafeDictionary(); + private Dictionary _handlers = new Dictionary(); + private const string _RaptorDB_users_config = "RaptorDB-Users.config"; + + private Assembly CurrentDomain_AssemblyResolve(object sender, ResolveEventArgs args) + { + if (File.Exists(args.Name)) + return Assembly.LoadFrom(args.Name); + string[] ss = args.Name.Split(','); + string fname = ss[0] + ".dll"; + if (File.Exists(fname)) + return Assembly.LoadFrom(fname); + fname = "Extensions" + _S + fname; + if (File.Exists(fname)) + return Assembly.LoadFrom(fname); + else return null; + } + + private MethodInfo GetSave(Type type) + { + MethodInfo m = null; + if (_savecache.TryGetValue(type, out m)) + return m; + + m = save.MakeGenericMethod(new Type[] { type }); + _savecache.Add(type, m); + return m; + } + + public void Shutdown() + { + WriteUsers(); + _server.Stop(); + _raptor.Shutdown(); + } + + private void WriteUsers() + { + // write users to user.config file + StringBuilder sb = new StringBuilder(); + sb.AppendLine("# FORMAT : username , pasword hash"); + sb.AppendLine("# To disable a user comment the line with the '#'"); + foreach (var kv in _users) + { + sb.AppendLine(kv.Key + " , " + kv.Value); + } + + File.WriteAllText(_datapath + _RaptorDB_users_config, sb.ToString()); + } + + private object processpayload(object data) + { + Packet p = (Packet)data; + + if (Authenticate(p) == false) + return new ReturnPacket(false, "Authentication failed"); + + ReturnPacket ret = new ReturnPacket(true); + try + { + Handler d = null; + ret.OK = true; + if (_handlers.TryGetValue(p.Command, out d)) + d(p, ret); + else + _log.Error("Command handler not found : " + p.Command); + } + catch (Exception ex) + { + ret.OK = false; + ret.Error = ex.GetType().Name + ": " + ex.Message; + _log.Error(ex); + } + return ret; + } + + private void InitializeCommandsDictionary() + { + _handlers.Add(PacketCommand.Save, + (p, ret) => + { + var m = GetSave(p.Data.GetType()); + m.Invoke(_raptor, new object[] { p.Docid, p.Data }); + }); + + _handlers.Add(PacketCommand.SaveBytes, + (p, ret) => + { + ret.OK = _raptor.SaveBytes(p.Docid, (byte[])p.Data); + }); + + _handlers.Add(PacketCommand.QueryType, + (p, ret) => + { + var param = (object[])p.Data; + Type t = Type.GetType((string)param[0]); + string viewname = _raptor.GetViewName(t); + ret.Data = _raptor.Query(viewname, (string)param[1], p.Start, p.Count, p.OrderBy); + }); + + _handlers.Add(PacketCommand.QueryStr, + (p, ret) => + { + ret.Data = _raptor.Query(p.Viewname, (string)p.Data, p.Start, p.Count, p.OrderBy); + }); + + _handlers.Add(PacketCommand.Fetch, + (p, ret) => + { + ret.Data = _raptor.Fetch(p.Docid); + }); + + _handlers.Add(PacketCommand.FetchBytes, + (p, ret) => + { + ret.OK = true; + ret.Data = _raptor.FetchBytes(p.Docid); + }); + + _handlers.Add(PacketCommand.Backup, + (p, ret) => + { + ret.OK = _raptor.Backup(); + }); + + _handlers.Add(PacketCommand.Delete, + (p, ret) => + { + ret.OK = _raptor.Delete(p.Docid); + }); + + _handlers.Add(PacketCommand.DeleteBytes, + (p, ret) => + { + ret.OK = _raptor.DeleteBytes(p.Docid); + }); + + _handlers.Add(PacketCommand.Restore, + (p, ret) => + { + Task.Factory.StartNew(() => _raptor.Restore()); + }); + + _handlers.Add(PacketCommand.AddUser, + (p, ret) => + { + var param = (object[])p.Data; + ret.OK = AddUser((string)param[0], (string)param[1], (string)param[2]); + }); + + _handlers.Add(PacketCommand.ServerSide, + (p, ret) => + { + var param = (object[])p.Data; + ret.Data = _raptor.ServerSide(GetServerSideFuncCache(param[0].ToString(), param[1].ToString()).GetFunc(param[2]), (string)param[3]); + }); + + _handlers.Add(PacketCommand.FullText, + (p, ret) => + { + var param = (object[])p.Data; + ret.Data = _raptor.FullTextSearch((string)param[0]); + }); + + _handlers.Add(PacketCommand.CountType, + (p, ret) => + { + // count type + var param = (object[])p.Data; + Type t = Type.GetType((string)param[0]); + string viewname = _raptor.GetViewName(t); + ret.Data = _raptor.Count(viewname, (string)param[1]); + }); + + _handlers.Add(PacketCommand.CountStr, + (p, ret) => + { + // count str + ret.Data = _raptor.Count(p.Viewname, (string)p.Data); + }); + + _handlers.Add(PacketCommand.GCount, + (p, ret) => + { + Type t = Type.GetType(p.Viewname); + string viewname = _raptor.GetViewName(t); + ret.Data = _raptor.Count(viewname, (string)p.Data); + }); + + _handlers.Add(PacketCommand.DocHistory, + (p, ret) => + { + ret.Data = _raptor.FetchHistory(p.Docid); + }); + + _handlers.Add(PacketCommand.FileHistory, + (p, ret) => + { + ret.Data = _raptor.FetchBytesHistory(p.Docid); + }); + + _handlers.Add(PacketCommand.FetchVersion, + (p, ret) => + { + ret.Data = _raptor.FetchVersion((int)p.Data); + }); + + _handlers.Add(PacketCommand.FetchFileVersion, + (p, ret) => + { + ret.Data = _raptor.FetchBytesVersion((int)p.Data); + }); + + _handlers.Add(PacketCommand.CheckAssembly, + (p, ret) => + { + string typ = ""; + ret.Data = _raptor.GetAssemblyForView(p.Viewname, out typ); + ret.Error = typ; + }); + _handlers.Add(PacketCommand.FetchHistoryInfo, + (p, ret) => + { + ret.Data = _raptor.FetchHistoryInfo(p.Docid); + }); + + _handlers.Add(PacketCommand.FetchByteHistoryInfo, + (p, ret) => + { + ret.Data = _raptor.FetchBytesHistoryInfo(p.Docid); + }); + + _handlers.Add(PacketCommand.ViewDelete, + (p, ret) => + { + var param = (object[])p.Data; + ret.Data = _raptor.ViewDelete((string)param[0], (string)param[1]); + }); + + _handlers.Add(PacketCommand.ViewDelete_t, + (p, ret) => + { + var param = (object[])p.Data; + Type t = Type.GetType((string)param[0]); + string viewname = _raptor.GetViewName(t); + ret.Data = _raptor.ViewDelete(viewname, (string)param[1]); + }); + + _handlers.Add(PacketCommand.ViewInsert, + (p, ret) => + { + var param = (object[])p.Data; + ret.Data = _raptor.ViewInsert((string)param[0], p.Docid, param[1]); + }); + + _handlers.Add(PacketCommand.ViewInsert_t, + (p, ret) => + { + var param = (object[])p.Data; + Type t = Type.GetType((string)param[0]); + string viewname = _raptor.GetViewName(t); + ret.Data = _raptor.ViewInsert(viewname, p.Docid, param[1]); + }); + + _handlers.Add(PacketCommand.DocCount, + (p, ret) => + { + ret.Data = _raptor.DocumentCount(); + }); + + _handlers.Add(PacketCommand.GetObjectHF, + (p, ret) => + { + ret.Data = _raptor.GetKVHF().GetObjectHF((string)p.Data); + }); + + _handlers.Add(PacketCommand.SetObjectHF, + (p, ret) => + { + var param = (object[])p.Data; + _raptor.GetKVHF().SetObjectHF((string)param[0], param[1]); + }); + + _handlers.Add(PacketCommand.DeleteKeyHF, + (p, ret) => + { + ret.Data = _raptor.GetKVHF().DeleteKeyHF((string)p.Data); + }); + + _handlers.Add(PacketCommand.CountHF, + (p, ret) => + { + ret.Data = _raptor.GetKVHF().CountHF(); + }); + + _handlers.Add(PacketCommand.ContainsHF, + (p, ret) => + { + ret.Data = _raptor.GetKVHF().ContainsHF((string)p.Data); + }); + + _handlers.Add(PacketCommand.GetKeysHF, + (p, ret) => + { + ret.Data = _raptor.GetKVHF().GetKeysHF(); + }); + + _handlers.Add(PacketCommand.CompactStorageHF, + (p, ret) => + { + _raptor.GetKVHF().CompactStorageHF(); + }); + } + + public delegate List ServerSideFuncAnonymous(object target, IRaptorDB rap, string filter); + public class ServerSideFuncInfo + { + public ServerSideFunc StaticFunc { get; set; } + public ServerSideFuncAnonymous InstanceFunc { get; set; } + + public ServerSideFunc GetFunc(object target = null) + { + return StaticFunc ?? ((rap, filter) => InstanceFunc(target, rap, filter)); + } + } + private ServerSideFuncInfo GetServerSideFuncCache(string type, string method) + { + ServerSideFuncInfo func; + _log.Debug("Calling Server side Function : " + method + " on type " + type); + if (_ssidecache.TryGetValue(type + method, out func) == false) + { + func = new ServerSideFuncInfo(); + Type tt = Type.GetType(type); + var methodInfo = Type.GetType(type).GetMethod(method, BindingFlags.Instance | BindingFlags.Static | BindingFlags.NonPublic | BindingFlags.Public); + if (methodInfo == null) throw new ArgumentException("specified method not found on type"); + if (!methodInfo.IsStatic) + { + var targetParEx = Expression.Parameter(typeof(object), "target"); + var rapParEx = Expression.Parameter(typeof(IRaptorDB), "rap"); + var filterParEx = Expression.Parameter(typeof(string), "filter"); + var callEx = Expression.Call(Expression.Convert(targetParEx, methodInfo.DeclaringType), methodInfo, rapParEx, filterParEx); + func.InstanceFunc = Expression.Lambda(callEx, targetParEx, rapParEx, filterParEx).Compile(); + } + else + { + var coreF = func.StaticFunc = (ServerSideFunc)Delegate.CreateDelegate(typeof(ServerSideFunc), methodInfo); + func.InstanceFunc = (t, r, f) => coreF(r, f); + } + _ssidecache.Add(type + method, func); + } + return func; + } + + private uint GenHash(string user, string pwd) + { + return Helper.MurMur.Hash(Encoding.UTF8.GetBytes(user.ToLower() + "|" + pwd)); + } + + private bool AddUser(string user, string oldpwd, string newpwd) + { + uint hash = 0; + if (_users.TryGetValue(user.ToLower(), out hash) == false) + { + _users.Add(user.ToLower(), GenHash(user, newpwd)); + return true; + } + if (hash == GenHash(user, oldpwd)) + { + _users[user.ToLower()] = GenHash(user, newpwd); + return true; + } + return false; + } + + private bool Authenticate(Packet p) + { + uint pwd; + if (_users.TryGetValue(p.Username.ToLower(), out pwd)) + { + uint hash = uint.Parse(p.PasswordHash); + if (hash == pwd) return true; + } + _log.Debug("Authentication failed for '" + p.Username + "' hash = " + p.PasswordHash); + return false; + } + + private void Initialize() + { + // load users here + if (File.Exists(_datapath + _RaptorDB_users_config)) + { + foreach (string line in File.ReadAllLines(_datapath + _RaptorDB_users_config)) + { + if (line.Contains("#") == false) + { + string[] s = line.Split(','); + _users.Add(s[0].Trim().ToLower(), uint.Parse(s[1].Trim())); + } + } + } + // add default admin user if not exists + if (_users.ContainsKey("admin") == false) + _users.Add("admin", GenHash("admin", "admin")); + + // exe folder + // |-Extensions + Directory.CreateDirectory(_path + _S + "Extensions"); + + // open extensions folder + string path = _path + _S + "Extensions"; + + foreach (var f in Directory.GetFiles(path, "*.dll")) + { + // - load all dll files + // - register views + _log.Debug("loading dll for views : " + f); + Assembly a = Assembly.Load(f); + foreach (var t in a.GetTypes()) + { + foreach (var att in t.GetCustomAttributes(typeof(RegisterViewAttribute), false)) + { + try + { + object o = Activator.CreateInstance(t); + // handle types when view also + Type[] args = t.GetGenericArguments(); + if (args.Length == 0) + args = t.BaseType.GetGenericArguments(); + Type tt = args[0]; + var m = register.MakeGenericMethod(new Type[] { tt }); + m.Invoke(_raptor, new object[] { o }); + } + catch (Exception ex) + { + _log.Error(ex); + } + } + } + } + + InitializeCommandsDictionary(); + } + } +} diff --git a/RaptorDB/Replication/Configuration.cs b/RaptorDB/Replication/Configuration.cs index 72364b6..25108e3 100644 --- a/RaptorDB/Replication/Configuration.cs +++ b/RaptorDB/Replication/Configuration.cs @@ -1,62 +1,62 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; - -namespace RaptorDB.Replication -{ - public class WhereItem - { - public string BranchName; - public string Password; - public string What; - public string When; - } - - public class WhatItem - { - public WhatItem() - { - HQ2Btypes = new List(); - B2HQtypes = new List(); - } - public string Name; - public int Version = 1; - public bool PropogateHQDeletes = true; - public int PackageItemLimit = 10000; - public List HQ2Btypes; - public List B2HQtypes; - } - - public class ServerConfiguration - { - public ServerConfiguration() - { - Where = new List(); - What = new List(); - ReplicationPort = 9999; - } - public int ReplicationPort; - //public string EmbeddedClientHandler; - public List Where; - public List What; - } - - public enum REPMODE - { - Branch, - Server - } - - public class ClientConfiguration - { - public ClientConfiguration() - { - ServerReplicationPort = 9999; - } - public string ServerAddress = ""; - public int ServerReplicationPort; - public string Password = ""; - public string BranchName = ""; - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace RaptorDB.Replication +{ + public class WhereItem + { + public string BranchName; + public string Password; + public string What; + public string When; + } + + public class WhatItem + { + public WhatItem() + { + HQ2Btypes = new List(); + B2HQtypes = new List(); + } + public string Name; + public int Version = 1; + public bool PropogateHQDeletes = true; + public int PackageItemLimit = 10000; + public List HQ2Btypes; + public List B2HQtypes; + } + + public class ServerConfiguration + { + public ServerConfiguration() + { + Where = new List(); + What = new List(); + ReplicationPort = 9999; + } + public int ReplicationPort; + //public string EmbeddedClientHandler; + public List Where; + public List What; + } + + public enum REPMODE + { + Branch, + Server + } + + public class ClientConfiguration + { + public ClientConfiguration() + { + ServerReplicationPort = 9999; + } + public string ServerAddress = ""; + public int ServerReplicationPort; + public string Password = ""; + public string BranchName = ""; + } +} diff --git a/RaptorDB/Replication/Packets.cs b/RaptorDB/Replication/Packets.cs index 975586d..5133388 100644 --- a/RaptorDB/Replication/Packets.cs +++ b/RaptorDB/Replication/Packets.cs @@ -1,19 +1,19 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; - -namespace RaptorDB.Replication -{ - public class ReplicationPacket - { - //public int number; - public string passwordhash; - public string branchname;// source name - public uint datahash; - public string filename; - public object data; - public string command; - public int lastrecord; - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace RaptorDB.Replication +{ + public class ReplicationPacket + { + //public int number; + public string passwordhash; + public string branchname;// source name + public uint datahash; + public string filename; + public object data; + public string command; + public int lastrecord; + } +} diff --git a/RaptorDB/Replication/Readme.txt b/RaptorDB/Replication/Readme.txt index 974cc4b..c7817da 100644 --- a/RaptorDB/Replication/Readme.txt +++ b/RaptorDB/Replication/Readme.txt @@ -1,40 +1,40 @@ -DATA Folder - | - |- Replication > (branch mode) - | |-: branch.dat - | | - . |- Inbox > - | |-: 0000000n.mgdat.gz, - | - |- Outbox > - - -- if inbox contains : "0000000n.counter" then error occurred and the text is in "0000000n.error.txt" -- you can skip the offending document if you increment the "counter" file (when you can't overcome the exception) -- files will be downloaded to the inbox folder in branch mode -- "branch.dat" in the "Replication" folder stores counter information for replication - - - -DATA Folder - | - |- Replication > (HQ mode) - | |-: BranchName1.last - | | - | |- Inbox > - | | | - . | |- BranchName1 > - | | |-: 0000000n.mgdat.gz - | | | - | - |- Outbox > - | | - | |- BranchName1 > - | |- BranchName2 > - - -- if inbox contains : "0000000n.counter" then error occurred and the text is in "0000000n.error.txt" -- you can skip the offending document if you increment the "counter" file (when you can't overcome the exception) -- files will be downloaded to the inbox folder in branch mode - - +DATA Folder + | + |- Replication > (branch mode) + | |-: branch.dat + | | + . |- Inbox > + | |-: 0000000n.mgdat.gz, + | + |- Outbox > + + +- if inbox contains : "0000000n.counter" then error occurred and the text is in "0000000n.error.txt" +- you can skip the offending document if you increment the "counter" file (when you can't overcome the exception) +- files will be downloaded to the inbox folder in branch mode +- "branch.dat" in the "Replication" folder stores counter information for replication + + + +DATA Folder + | + |- Replication > (HQ mode) + | |-: BranchName1.last + | | + | |- Inbox > + | | | + . | |- BranchName1 > + | | |-: 0000000n.mgdat.gz + | | | + | + |- Outbox > + | | + | |- BranchName1 > + | |- BranchName2 > + + +- if inbox contains : "0000000n.counter" then error occurred and the text is in "0000000n.error.txt" +- you can skip the offending document if you increment the "counter" file (when you can't overcome the exception) +- files will be downloaded to the inbox folder in branch mode + + diff --git a/RaptorDB/Replication/ReplicationClient.cs b/RaptorDB/Replication/ReplicationClient.cs index 0748318..87e70b4 100644 --- a/RaptorDB/Replication/ReplicationClient.cs +++ b/RaptorDB/Replication/ReplicationClient.cs @@ -1,285 +1,285 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.IO; -using System.IO.Compression; -using RaptorDB.Common; -using System.Text.RegularExpressions; - -namespace RaptorDB.Replication -{ - public class ClientRepConfig - { - public bool isConfigured; - public string whencron = "* * * * *"; - public WhatItem what; - public int lastHQCounter; - public int lastCounter; - public int outPackageNumber; - public int inPackageNumber; - public int lastPackageIndex; - } - - public class ClientWhatWhenConfig - { - public WhatItem what; - public string whencron; - } - - /// - /// Replication package processing is done in RaptorDB.cs - /// - internal class ReplicationClient - { - public ReplicationClient(string dataFolder, string config, IDocStorage docs) - { - _log.Debug("starting replication client : " + dataFolder); - _docs = docs; - _path = dataFolder; - // read client config file - _config = fastJSON.JSON.ToObject(config); - Initialize(); - } - - private void Initialize() - { - Directory.CreateDirectory(_path + "Replication"); - Directory.CreateDirectory(_path + "Replication" + _S + "Inbox"); - Directory.CreateDirectory(_path + "Replication" + _S + "Outbox"); - _InboxPath = _path + "Replication" + _S + "Inbox" + _S; - _OutboxPath = _path + "Replication" + _S + "Outbox" + _S; - // setup cron job - _cron = new CronDaemon(); - - _clientConfig = new ClientRepConfig(); - // read what config - if (File.Exists(_path + "Replication" + _S + "branch.dat")) - _clientConfig = fastBinaryJSON.BJSON.ToObject(File.ReadAllBytes(_path + "Replication" + _S + "branch.dat")); - // starting jobs - _cron.AddJob(_clientConfig.whencron, Replicate); - } - - - private ILog _log = LogManager.GetLogger(typeof(ReplicationClient)); - IDocStorage _docs; - private CronDaemon _cron; - private string _S = Path.DirectorySeparatorChar.ToString(); - private NetworkClient _client; - private Replication.ClientConfiguration _config; - private ClientRepConfig _clientConfig; - private string _path; - private string _OutboxPath; - private string _InboxPath; - private int INTERNALLIMIT = Global.PackageSizeItemCountLimit; - - public void Shutdown() - { - if (_cron != null) - _cron.Stop(); - - SaveConfig(); - } - - private void SaveConfig() - { - if (_clientConfig == null) - return; - if (_clientConfig.isConfigured == false) - return; - // write config to disk - byte[] b = fastBinaryJSON.BJSON.ToBJSON(_clientConfig); - File.WriteAllBytes(_path + "Replication" + _S + "branch.dat", b); - } - - private object _lock = new object(); - private void Replicate() - { - lock (_lock) - { - try - { - if (ConnectToHQ()) - { - SendPackageToHQ(); - GetPackageFormHQ(); - } - } - catch (Exception ex) - { - _log.Error(ex); - } - finally - { - if (_client != null) - { - _client.Close(); - _client = null; - } - } - } - } - - private void GetPackageFormHQ() - { - ReplicationPacket p = createpacket(); - p.command = "getpackageforbranch"; - p.lastrecord = _clientConfig.lastHQCounter; - ReturnPacket ret = (ReturnPacket)_client.Send(p); - if (ret.OK) - { - if (ret.Data != null) - { - ReplicationPacket pack = (ReplicationPacket)ret.Data; - - if (pack.datahash == Helper.MurMur.Hash((byte[])pack.data)) - { - _log.Debug("package recieved from server : " + pack.filename); - _log.Debug("package size : " + (pack.data as byte[]).Length.ToString("#,0")); - File.WriteAllBytes(_InboxPath + pack.filename, (byte[])pack.data); - p = createpacket(); - p.command = "hqpackageok"; - p.filename = pack.filename; - p.lastrecord = pack.lastrecord; - _clientConfig.lastHQCounter = pack.lastrecord; - SaveConfig(); - ret = (ReturnPacket)_client.Send(p); - if (ret.OK) - return; - } - } - } - } - - private void SendPackageToHQ() - { - string fn = CreatePackageForSend(); - if (fn != "") - { - ReplicationPacket p = createpacket(); - p.command = "packageforhq"; - p.data = File.ReadAllBytes(fn); - p.datahash = Helper.MurMur.Hash((byte[])p.data); - ReturnPacket ret = (ReturnPacket)_client.Send(p); - string path = Path.GetDirectoryName(fn); - string fnn = Path.GetFileNameWithoutExtension(fn); - foreach (var f in Directory.GetFiles(path, fnn + ".*")) - File.Delete(f); - } - } - - private ReplicationPacket createpacket() - { - ReplicationPacket p = new ReplicationPacket(); - p.branchname = _config.BranchName; - p.passwordhash = Helper.MurMur.Hash(Helper.GetBytes(_config.BranchName + "|" + _config.Password)).ToString(); - return p; - } - - private bool ConnectToHQ() - { - - if (_client == null) - { - _client = new NetworkClient(_config.ServerAddress, _config.ServerReplicationPort); - } - // authenticate and get branch config - ReplicationPacket p = createpacket(); - p.command = "getbranchconfig"; - - ReturnPacket ret = (ReturnPacket)_client.Send(p); - if (ret.OK) - { - ClientWhatWhenConfig c = (ClientWhatWhenConfig)ret.Data; - - _clientConfig.what = c.what; - - _clientConfig.isConfigured = true; - - if (_clientConfig.whencron != c.whencron) - { - _cron.Stop(); - _clientConfig.whencron = c.whencron; - _cron = new CronDaemon(); - _cron.AddJob(_clientConfig.whencron, Replicate); - } - - SaveConfig(); - } - return ret.OK; - } - - private string CreatePackageForSend() - { - int maxc = INTERNALLIMIT; - if (_clientConfig.what.PackageItemLimit > 0) - maxc = _clientConfig.what.PackageItemLimit; - string outFolder = _OutboxPath; - int packageNumber = _clientConfig.outPackageNumber; - int i = _clientConfig.lastCounter; - string filename = outFolder + packageNumber.ToString("0000000000") + ".mgdat"; - int total = _docs.RecordCount(); - if (i < total) - { - StorageFile package = new StorageFile(filename, SF_FORMAT.JSON, true); - while (maxc > 0 && i < total) - { - var meta = _docs.GetMeta(i); - if (meta == null) - break; - if (meta.isReplicated == false && MatchType(meta.typename)) - { - object obj = _docs.GetObject(i, out meta); - package.WriteObject(meta.key, obj); - maxc--; - } - - i++; - } - package.Shutdown(); - packageNumber++; - // compress the file - using (FileStream read = File.OpenRead(filename)) - using (FileStream outp = File.Create(filename + ".gz")) - CompressForBackup(read, outp); - - // delete uncompressed file - File.Delete(filename); - - _clientConfig.lastCounter = i; - _clientConfig.outPackageNumber = packageNumber; - SaveConfig(); - return filename + ".gz"; - } - return ""; - } - - private bool MatchType(string typename) - { - // match type filter - foreach (var i in _clientConfig.what.B2HQtypes) - { - // do wildcard search - Regex reg = new Regex("^" + i.Replace("*", ".*").Replace("?", "."), RegexOptions.IgnoreCase); - if (reg.IsMatch(typename)) - return true; - } - - return false; - } - - private static void CompressForBackup(Stream source, Stream destination) - { - using (GZipStream gz = new GZipStream(destination, CompressionMode.Compress)) - PumpDataForBackup(source, gz); - } - - private static void PumpDataForBackup(Stream input, Stream output) - { - byte[] bytes = new byte[4096 * 2]; - int n; - while ((n = input.Read(bytes, 0, bytes.Length)) != 0) - output.Write(bytes, 0, n); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.IO; +using System.IO.Compression; +using RaptorDB.Common; +using System.Text.RegularExpressions; + +namespace RaptorDB.Replication +{ + public class ClientRepConfig + { + public bool isConfigured; + public string whencron = "* * * * *"; + public WhatItem what; + public int lastHQCounter; + public int lastCounter; + public int outPackageNumber; + public int inPackageNumber; + public int lastPackageIndex; + } + + public class ClientWhatWhenConfig + { + public WhatItem what; + public string whencron; + } + + /// + /// Replication package processing is done in RaptorDB.cs + /// + internal class ReplicationClient + { + public ReplicationClient(string dataFolder, string config, IDocStorage docs) + { + _log.Debug("starting replication client : " + dataFolder); + _docs = docs; + _path = dataFolder; + // read client config file + _config = fastJSON.JSON.ToObject(config); + Initialize(); + } + + private void Initialize() + { + Directory.CreateDirectory(_path + "Replication"); + Directory.CreateDirectory(_path + "Replication" + _S + "Inbox"); + Directory.CreateDirectory(_path + "Replication" + _S + "Outbox"); + _InboxPath = _path + "Replication" + _S + "Inbox" + _S; + _OutboxPath = _path + "Replication" + _S + "Outbox" + _S; + // setup cron job + _cron = new CronDaemon(); + + _clientConfig = new ClientRepConfig(); + // read what config + if (File.Exists(_path + "Replication" + _S + "branch.dat")) + _clientConfig = fastBinaryJSON.BJSON.ToObject(File.ReadAllBytes(_path + "Replication" + _S + "branch.dat")); + // starting jobs + _cron.AddJob(_clientConfig.whencron, Replicate); + } + + + private ILog _log = LogManager.GetLogger(typeof(ReplicationClient)); + IDocStorage _docs; + private CronDaemon _cron; + private string _S = Path.DirectorySeparatorChar.ToString(); + private NetworkClient _client; + private Replication.ClientConfiguration _config; + private ClientRepConfig _clientConfig; + private string _path; + private string _OutboxPath; + private string _InboxPath; + private int INTERNALLIMIT = Global.PackageSizeItemCountLimit; + + public void Shutdown() + { + if (_cron != null) + _cron.Stop(); + + SaveConfig(); + } + + private void SaveConfig() + { + if (_clientConfig == null) + return; + if (_clientConfig.isConfigured == false) + return; + // write config to disk + byte[] b = fastBinaryJSON.BJSON.ToBJSON(_clientConfig); + File.WriteAllBytes(_path + "Replication" + _S + "branch.dat", b); + } + + private object _lock = new object(); + private void Replicate() + { + lock (_lock) + { + try + { + if (ConnectToHQ()) + { + SendPackageToHQ(); + GetPackageFormHQ(); + } + } + catch (Exception ex) + { + _log.Error(ex); + } + finally + { + if (_client != null) + { + _client.Close(); + _client = null; + } + } + } + } + + private void GetPackageFormHQ() + { + ReplicationPacket p = createpacket(); + p.command = "getpackageforbranch"; + p.lastrecord = _clientConfig.lastHQCounter; + ReturnPacket ret = (ReturnPacket)_client.Send(p); + if (ret.OK) + { + if (ret.Data != null) + { + ReplicationPacket pack = (ReplicationPacket)ret.Data; + + if (pack.datahash == Helper.MurMur.Hash((byte[])pack.data)) + { + _log.Debug("package recieved from server : " + pack.filename); + _log.Debug("package size : " + (pack.data as byte[]).Length.ToString("#,0")); + File.WriteAllBytes(_InboxPath + pack.filename, (byte[])pack.data); + p = createpacket(); + p.command = "hqpackageok"; + p.filename = pack.filename; + p.lastrecord = pack.lastrecord; + _clientConfig.lastHQCounter = pack.lastrecord; + SaveConfig(); + ret = (ReturnPacket)_client.Send(p); + if (ret.OK) + return; + } + } + } + } + + private void SendPackageToHQ() + { + string fn = CreatePackageForSend(); + if (!string.IsNullOrEmpty(fn)) + { + ReplicationPacket p = createpacket(); + p.command = "packageforhq"; + p.data = File.ReadAllBytes(fn); + p.datahash = Helper.MurMur.Hash((byte[])p.data); + ReturnPacket ret = (ReturnPacket)_client.Send(p); + string path = Path.GetDirectoryName(fn); + string fnn = Path.GetFileNameWithoutExtension(fn); + foreach (var f in Directory.GetFiles(path, fnn + ".*")) + File.Delete(f); + } + } + + private ReplicationPacket createpacket() + { + ReplicationPacket p = new ReplicationPacket(); + p.branchname = _config.BranchName; + p.passwordhash = Helper.MurMur.Hash(Helper.GetBytes(_config.BranchName + "|" + _config.Password)).ToString(); + return p; + } + + private bool ConnectToHQ() + { + + if (_client == null) + { + _client = new NetworkClient(_config.ServerAddress, _config.ServerReplicationPort); + } + // authenticate and get branch config + ReplicationPacket p = createpacket(); + p.command = "getbranchconfig"; + + ReturnPacket ret = (ReturnPacket)_client.Send(p); + if (ret.OK) + { + ClientWhatWhenConfig c = (ClientWhatWhenConfig)ret.Data; + + _clientConfig.what = c.what; + + _clientConfig.isConfigured = true; + + if (_clientConfig.whencron != c.whencron) + { + _cron.Stop(); + _clientConfig.whencron = c.whencron; + _cron = new CronDaemon(); + _cron.AddJob(_clientConfig.whencron, Replicate); + } + + SaveConfig(); + } + return ret.OK; + } + + private string CreatePackageForSend() + { + int maxc = INTERNALLIMIT; + if (_clientConfig.what.PackageItemLimit > 0) + maxc = _clientConfig.what.PackageItemLimit; + string outFolder = _OutboxPath; + int packageNumber = _clientConfig.outPackageNumber; + int i = _clientConfig.lastCounter; + string filename = outFolder + packageNumber.ToString("0000000000") + ".mgdat"; + int total = _docs.RecordCount(); + if (i < total) + { + StorageFile package = new StorageFile(filename, SF_FORMAT.JSON, true); + while (maxc > 0 && i < total) + { + var meta = _docs.GetMeta(i); + if (meta == null) + break; + if (meta.isReplicated == false && MatchType(meta.typename)) + { + object obj = _docs.GetObject(i, out meta); + package.WriteObject(meta.key, obj); + maxc--; + } + + i++; + } + package.Shutdown(); + packageNumber++; + // compress the file + using (FileStream read = File.OpenRead(filename)) + using (FileStream outp = File.Create(filename + ".gz")) + CompressForBackup(read, outp); + + // delete uncompressed file + File.Delete(filename); + + _clientConfig.lastCounter = i; + _clientConfig.outPackageNumber = packageNumber; + SaveConfig(); + return filename + ".gz"; + } + return null; + } + + private bool MatchType(string typename) + { + // match type filter + foreach (var i in _clientConfig.what.B2HQtypes) + { + // do wildcard search + Regex reg = new Regex("^" + i.Replace("*", ".*").Replace("?", "."), RegexOptions.IgnoreCase); + if (reg.IsMatch(typename)) + return true; + } + + return false; + } + + private static void CompressForBackup(Stream source, Stream destination) + { + using (GZipStream gz = new GZipStream(destination, CompressionMode.Compress)) + PumpDataForBackup(source, gz); + } + + private static void PumpDataForBackup(Stream input, Stream output) + { + byte[] bytes = new byte[4096 * 2]; + int n; + while ((n = input.Read(bytes, 0, bytes.Length)) != 0) + output.Write(bytes, 0, n); + } + } +} diff --git a/RaptorDB/Replication/ReplicationServer.cs b/RaptorDB/Replication/ReplicationServer.cs index 0c13d5f..8e1dc3f 100644 --- a/RaptorDB/Replication/ReplicationServer.cs +++ b/RaptorDB/Replication/ReplicationServer.cs @@ -1,261 +1,261 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using RaptorDB.Common; -using System.IO; -using System.IO.Compression; -using System.Text.RegularExpressions; - -namespace RaptorDB.Replication -{ - internal class ReplicationServer - { - public ReplicationServer(string datapath, string config, IDocStorage docs) - { - _docs = docs; - _Path = datapath; - Initialize(config); - } - IDocStorage _docs; - private string _S = Path.DirectorySeparatorChar.ToString(); - private string _Path; - private ILog _log = LogManager.GetLogger(typeof(ReplicationServer)); - private ServerConfiguration _config; - private NetworkServer _server; - private string _InboxPath; - private string _OutboxPath; - private int INTERNALLIMIT = Global.PackageSizeItemCountLimit; - private SafeDictionary _branchLastDocs = new SafeDictionary(); - - - private void Initialize(string config) - { - _log.Debug("Starting replication server..."); - Directory.CreateDirectory(_Path + "Replication"); - Directory.CreateDirectory(_Path + "Replication" + _S + "Inbox"); - Directory.CreateDirectory(_Path + "Replication" + _S + "Outbox"); - _InboxPath = _Path + "Replication" + _S + "Inbox"; - _OutboxPath = _Path + "Replication" + _S + "Outbox"; - - _config = fastJSON.JSON.ToObject(config); - if (_config == null) - { - _log.Error("unable to read the configuration for replication, check the config file"); - return; - } - - // read branch lastdoc counts here - foreach (var b in _config.Where) - { - int i = -1; - if (File.Exists(_Path + "Replication" + _S + b.BranchName + ".last")) - i = Helper.ToInt32(File.ReadAllBytes(_Path + "Replication" + _S + b.BranchName + ".last"), 0); - Directory.CreateDirectory(_Path + "Replication" + _S + "Inbox" + _S + b.BranchName); - Directory.CreateDirectory(_Path + "Replication" + _S + "Outbox" + _S + b.BranchName); - _branchLastDocs.Add(b.BranchName.ToLower(), i); - } - - _server = new NetworkServer(); - _server.Start(_config.ReplicationPort, processpayload); - } - - public void Shutdown() - { - WriteBranchCounters(); - // shutdown every thing - _server.Stop(); - } - - private void WriteBranchCounters() - { - // write branch counts etc. to disk - foreach (var b in _branchLastDocs) - { - File.WriteAllBytes(_Path + "Replication" + _S + b.Key + ".last", Helper.GetBytes(b.Value, false)); - _log.Debug("last counter for branch : " + b.Key + " = " + b.Value); - } - } - - private object processpayload(object data) - { - ReplicationPacket p = (ReplicationPacket)data; - - if (Authenticate(p) == false) - return new ReturnPacket(false, "Authentication failed"); - - ReturnPacket ret = new ReturnPacket(true); - try - { - switch (p.command) - { - case "getbranchconfig": - ret.OK = true; - ret.Data = GetBranchConfig(p.branchname); - break; - case "getpackageforbranch": - ret.OK = true; - ReplicationPacket pack = GetPackageForBranch(p); - ret.Data = pack; - break; - case "packageforhq": - ret.OK = PackageForHQ(p); - break; - case "hqpackageok": - ret.OK = true; - File.Delete(_OutboxPath + _S + p.branchname + _S + p.filename); - // set last rec on hq - _branchLastDocs.Add(p.branchname.ToLower(), p.lastrecord); - WriteBranchCounters(); - break; - } - } - catch (Exception ex) - { - ret.OK = false; - _log.Error(ex); - } - return ret; - } - - - private ClientWhatWhenConfig GetBranchConfig(string branchname) - { - WhatItem ret = _config.What.Find((WhatItem w) => { return w.Name.ToLower() == branchname.ToLower(); }); - - if (ret == null) - ret = _config.What.Find((WhatItem w) => { return w.Name.ToLower() == "default"; }); - - ClientWhatWhenConfig c = new ClientWhatWhenConfig(); - c.what = ret; - var where = _config.Where.Find(w => { return w.BranchName.ToLower() == branchname.ToLower(); }); - if (where != null) - c.whencron = where.When; - else - c.whencron = "* * * * *"; - - return c; - } - - private bool PackageForHQ(ReplicationPacket p) - { - uint hash = Helper.MurMur.Hash((byte[])p.data); - if (hash != p.datahash) - return false; - // save file to \replication\inbox\branchname - Directory.CreateDirectory(_InboxPath + _S + p.branchname); - string fn = _InboxPath + _S + p.branchname + _S + p.filename; - _log.Debug("package recieved from : " + p.branchname); - _log.Debug("package name : " + p.filename); - _log.Debug("package size : " + (p.data as byte[]).Length.ToString("#,0")); - File.WriteAllBytes(fn, (byte[])p.data); - return true; - } - - private ReplicationPacket GetPackageForBranch(ReplicationPacket packet) - { - int last = _branchLastDocs[packet.branchname.ToLower()]; - // skip retry for the same package - if (packet.lastrecord >= _branchLastDocs[packet.branchname.ToLower()]) - { - string fn = CreatePackageForSend(packet, out last); - ReplicationPacket p = new ReplicationPacket(); - p.filename = Path.GetFileName(fn); - p.data = File.ReadAllBytes(fn); - p.datahash = Helper.MurMur.Hash((byte[])p.data); - p.lastrecord = last; - return p; - } - else - return null; - } - - private bool Authenticate(ReplicationPacket p) - { - uint pwd = uint.Parse(p.passwordhash); - bool auth = false; - foreach (var w in _config.Where) - { - uint hash = Helper.MurMur.Hash(Helper.GetBytes(w.BranchName + "|" + w.Password)); - if (hash == pwd) auth = true; - } - if (auth == false) - _log.Debug("Authentication failed for '" + p.branchname + "' hash = " + p.passwordhash); - return auth; - } - - private string CreatePackageForSend(ReplicationPacket packet, out int last) - { - int maxc = INTERNALLIMIT; - WhatItem what = GetBranchConfig(packet.branchname).what; - if (what.PackageItemLimit > 0) - maxc = what.PackageItemLimit; - string outFolder = _OutboxPath; - int packageNumber = packet.lastrecord; - int i = packet.lastrecord; - string filename = outFolder + _S + packet.branchname + _S + packageNumber.ToString("0000000000") + ".mgdat"; - - if (i < _docs.RecordCount()) - { - StorageFile package = new StorageFile(filename, SF_FORMAT.JSON, true); - while (maxc > 0) - { - var meta = _docs.GetMeta(i); - if (meta == null) - break; - if (meta.isReplicated == false && MatchType(meta.typename, what)) - { - if (meta.isDeleted == false || what.PropogateHQDeletes) - { - object obj = _docs.GetObject(i, out meta); - package.WriteObject(meta.key, obj); - maxc--; - } - } - - i++; - } - package.Shutdown(); - packageNumber++; - // compress the file - using (FileStream read = File.OpenRead(filename)) - using (FileStream outp = File.Create(filename + ".gz")) - CompressForBackup(read, outp); - - // delete uncompressed file - File.Delete(filename); - } - - last = i; - return filename + ".gz"; - } - - private bool MatchType(string typename, WhatItem what) - { - // match type filter - foreach (var i in what.HQ2Btypes) - { - // do wildcard search - Regex reg = new Regex("^" + i.Replace("*", ".*").Replace("?", "."), RegexOptions.IgnoreCase); - if (reg.IsMatch(typename)) - return true; - } - - return false; - } - - private static void CompressForBackup(Stream source, Stream destination) - { - using (GZipStream gz = new GZipStream(destination, CompressionMode.Compress)) - PumpDataForBackup(source, gz); - } - - private static void PumpDataForBackup(Stream input, Stream output) - { - byte[] bytes = new byte[4096 * 2]; - int n; - while ((n = input.Read(bytes, 0, bytes.Length)) != 0) - output.Write(bytes, 0, n); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using RaptorDB.Common; +using System.IO; +using System.IO.Compression; +using System.Text.RegularExpressions; + +namespace RaptorDB.Replication +{ + internal class ReplicationServer + { + public ReplicationServer(string datapath, string config, IDocStorage docs) + { + _docs = docs; + _Path = datapath; + Initialize(config); + } + IDocStorage _docs; + private string _S = Path.DirectorySeparatorChar.ToString(); + private string _Path; + private ILog _log = LogManager.GetLogger(typeof(ReplicationServer)); + private ServerConfiguration _config; + private NetworkServer _server; + private string _InboxPath; + private string _OutboxPath; + private int INTERNALLIMIT = Global.PackageSizeItemCountLimit; + private SafeDictionary _branchLastDocs = new SafeDictionary(); + + + private void Initialize(string config) + { + _log.Debug("Starting replication server..."); + Directory.CreateDirectory(_Path + "Replication"); + Directory.CreateDirectory(_Path + "Replication" + _S + "Inbox"); + Directory.CreateDirectory(_Path + "Replication" + _S + "Outbox"); + _InboxPath = _Path + "Replication" + _S + "Inbox"; + _OutboxPath = _Path + "Replication" + _S + "Outbox"; + + _config = fastJSON.JSON.ToObject(config); + if (_config == null) + { + _log.Error("unable to read the configuration for replication, check the config file"); + return; + } + + // read branch lastdoc counts here + foreach (var b in _config.Where) + { + int i = -1; + if (File.Exists(_Path + "Replication" + _S + b.BranchName + ".last")) + i = Helper.ToInt32(File.ReadAllBytes(_Path + "Replication" + _S + b.BranchName + ".last"), 0); + Directory.CreateDirectory(_Path + "Replication" + _S + "Inbox" + _S + b.BranchName); + Directory.CreateDirectory(_Path + "Replication" + _S + "Outbox" + _S + b.BranchName); + _branchLastDocs.Add(b.BranchName.ToLower(), i); + } + + _server = new NetworkServer(); + _server.Start(_config.ReplicationPort, processpayload); + } + + public void Shutdown() + { + WriteBranchCounters(); + // shutdown every thing + _server.Stop(); + } + + private void WriteBranchCounters() + { + // write branch counts etc. to disk + foreach (var b in _branchLastDocs) + { + File.WriteAllBytes(_Path + "Replication" + _S + b.Key + ".last", Helper.GetBytes(b.Value, false)); + _log.Debug("last counter for branch : " + b.Key + " = " + b.Value); + } + } + + private object processpayload(object data) + { + ReplicationPacket p = (ReplicationPacket)data; + + if (Authenticate(p) == false) + return new ReturnPacket(false, "Authentication failed"); + + ReturnPacket ret = new ReturnPacket(true); + try + { + switch (p.command) + { + case "getbranchconfig": + ret.OK = true; + ret.Data = GetBranchConfig(p.branchname); + break; + case "getpackageforbranch": + ret.OK = true; + ReplicationPacket pack = GetPackageForBranch(p); + ret.Data = pack; + break; + case "packageforhq": + ret.OK = PackageForHQ(p); + break; + case "hqpackageok": + ret.OK = true; + File.Delete(_OutboxPath + _S + p.branchname + _S + p.filename); + // set last rec on hq + _branchLastDocs.Add(p.branchname.ToLower(), p.lastrecord); + WriteBranchCounters(); + break; + } + } + catch (Exception ex) + { + ret.OK = false; + _log.Error(ex); + } + return ret; + } + + + private ClientWhatWhenConfig GetBranchConfig(string branchname) + { + WhatItem ret = _config.What.Find((WhatItem w) => { return w.Name.ToLower() == branchname.ToLower(); }); + + if (ret == null) + ret = _config.What.Find((WhatItem w) => { return w.Name.ToLower() == "default"; }); + + ClientWhatWhenConfig c = new ClientWhatWhenConfig(); + c.what = ret; + var where = _config.Where.Find(w => { return w.BranchName.ToLower() == branchname.ToLower(); }); + if (where != null) + c.whencron = where.When; + else + c.whencron = "* * * * *"; + + return c; + } + + private bool PackageForHQ(ReplicationPacket p) + { + uint hash = Helper.MurMur.Hash((byte[])p.data); + if (hash != p.datahash) + return false; + // save file to \replication\inbox\branchname + Directory.CreateDirectory(_InboxPath + _S + p.branchname); + string fn = _InboxPath + _S + p.branchname + _S + p.filename; + _log.Debug("package recieved from : " + p.branchname); + _log.Debug("package name : " + p.filename); + _log.Debug("package size : " + (p.data as byte[]).Length.ToString("#,0")); + File.WriteAllBytes(fn, (byte[])p.data); + return true; + } + + private ReplicationPacket GetPackageForBranch(ReplicationPacket packet) + { + int last = _branchLastDocs[packet.branchname.ToLower()]; + // skip retry for the same package + if (packet.lastrecord >= _branchLastDocs[packet.branchname.ToLower()]) + { + string fn = CreatePackageForSend(packet, out last); + ReplicationPacket p = new ReplicationPacket(); + p.filename = Path.GetFileName(fn); + p.data = File.ReadAllBytes(fn); + p.datahash = Helper.MurMur.Hash((byte[])p.data); + p.lastrecord = last; + return p; + } + else + return null; + } + + private bool Authenticate(ReplicationPacket p) + { + uint pwd = uint.Parse(p.passwordhash); + bool auth = false; + foreach (var w in _config.Where) + { + uint hash = Helper.MurMur.Hash(Helper.GetBytes(w.BranchName + "|" + w.Password)); + if (hash == pwd) auth = true; + } + if (auth == false) + _log.Debug("Authentication failed for '" + p.branchname + "' hash = " + p.passwordhash); + return auth; + } + + private string CreatePackageForSend(ReplicationPacket packet, out int last) + { + int maxc = INTERNALLIMIT; + WhatItem what = GetBranchConfig(packet.branchname).what; + if (what.PackageItemLimit > 0) + maxc = what.PackageItemLimit; + string outFolder = _OutboxPath; + int packageNumber = packet.lastrecord; + int i = packet.lastrecord; + string filename = outFolder + _S + packet.branchname + _S + packageNumber.ToString("0000000000") + ".mgdat"; + + if (i < _docs.RecordCount()) + { + StorageFile package = new StorageFile(filename, SF_FORMAT.JSON, true); + while (maxc > 0) + { + var meta = _docs.GetMeta(i); + if (meta == null) + break; + if (meta.isReplicated == false && MatchType(meta.typename, what)) + { + if (meta.isDeleted == false || what.PropogateHQDeletes) + { + object obj = _docs.GetObject(i, out meta); + package.WriteObject(meta.key, obj); + maxc--; + } + } + + i++; + } + package.Shutdown(); + packageNumber++; + // compress the file + using (FileStream read = File.OpenRead(filename)) + using (FileStream outp = File.Create(filename + ".gz")) + CompressForBackup(read, outp); + + // delete uncompressed file + File.Delete(filename); + } + + last = i; + return filename + ".gz"; + } + + private bool MatchType(string typename, WhatItem what) + { + // match type filter + foreach (var i in what.HQ2Btypes) + { + // do wildcard search + Regex reg = new Regex("^" + i.Replace("*", ".*").Replace("?", "."), RegexOptions.IgnoreCase); + if (reg.IsMatch(typename)) + return true; + } + + return false; + } + + private static void CompressForBackup(Stream source, Stream destination) + { + using (GZipStream gz = new GZipStream(destination, CompressionMode.Compress)) + PumpDataForBackup(source, gz); + } + + private static void PumpDataForBackup(Stream input, Stream output) + { + byte[] bytes = new byte[4096 * 2]; + int n; + while ((n = input.Read(bytes, 0, bytes.Length)) != 0) + output.Write(bytes, 0, n); + } + } +} diff --git a/RaptorDB/Replication/msg.txt b/RaptorDB/Replication/msg.txt index adfc930..61658d1 100644 --- a/RaptorDB/Replication/msg.txt +++ b/RaptorDB/Replication/msg.txt @@ -1,9 +1,9 @@ -The following error occurred and the json document is below, you can skip this -document if you wish by incrementing the %c% file : - -%ex% ------------------------------------------------------------------------------- -The json document is : - -%js% - +The following error occurred and the json document is below, you can skip this +document if you wish by incrementing the %c% file : + +%ex% +------------------------------------------------------------------------------ +The json document is : + +%js% + diff --git a/RaptorDB/Storage/KeyStore.cs b/RaptorDB/Storage/KeyStore.cs index 95bafbd..c139901 100644 --- a/RaptorDB/Storage/KeyStore.cs +++ b/RaptorDB/Storage/KeyStore.cs @@ -1,612 +1,595 @@ -using System; -using System.Collections.Generic; -using System.Text; -using System.IO; -using RaptorDB.Common; - -namespace RaptorDB -{ - #region [ KeyStoreString ] - internal class KeyStoreString : IDisposable - { - public KeyStoreString(string filename, bool caseSensitve) - { - _db = KeyStore.Open(filename, true); - _caseSensitive = caseSensitve; - } - bool _caseSensitive = false; - - KeyStore _db; - - - public void Set(string key, string val) - { - Set(key, Encoding.Unicode.GetBytes(val)); - } - - public void Set(string key, byte[] val) - { - string str = (_caseSensitive ? key : key.ToLower()); - byte[] bkey = Encoding.Unicode.GetBytes(str); - int hc = (int)Helper.MurMur.Hash(bkey); - MemoryStream ms = new MemoryStream(); - ms.Write(Helper.GetBytes(bkey.Length, false), 0, 4); - ms.Write(bkey, 0, bkey.Length); - ms.Write(val, 0, val.Length); - - _db.SetBytes(hc, ms.ToArray()); - } - - public bool Get(string key, out string val) - { - val = null; - byte[] bval; - bool b = Get(key, out bval); - if (b) - { - val = Encoding.Unicode.GetString(bval); - } - return b; - } - - public bool Get(string key, out byte[] val) - { - string str = (_caseSensitive ? key : key.ToLower()); - val = null; - byte[] bkey = Encoding.Unicode.GetBytes(str); - int hc = (int)Helper.MurMur.Hash(bkey); - - if (_db.GetBytes(hc, out val)) - { - // unpack data - byte[] g = null; - if (UnpackData(val, out val, out g)) - { - if (Helper.CompareMemCmp(bkey, g) != 0) - { - // if data not equal check duplicates (hash conflict) - List ints = new List(_db.GetDuplicates(hc)); - ints.Reverse(); - foreach (int i in ints) - { - byte[] bb = _db.FetchRecordBytes(i); - if (UnpackData(bb, out val, out g)) - { - if (Helper.CompareMemCmp(bkey, g) == 0) - return true; - } - } - return false; - } - return true; - } - } - return false; - } - - public int Count() - { - return (int)_db.Count(); - } - - public int RecordCount() - { - return (int)_db.RecordCount(); - } - - public void SaveIndex() - { - _db.SaveIndex(); - } - - public void Shutdown() - { - _db.Shutdown(); - } - - public void Dispose() - { - _db.Shutdown(); - } - - private bool UnpackData(byte[] buffer, out byte[] val, out byte[] key) - { - int len = Helper.ToInt32(buffer, 0, false); - key = new byte[len]; - Buffer.BlockCopy(buffer, 4, key, 0, len); - val = new byte[buffer.Length - 4 - len]; - Buffer.BlockCopy(buffer, 4 + len, val, 0, buffer.Length - 4 - len); - - return true; - } - - public string ReadData(int recnumber) - { - byte[] val; - byte[] key; - byte[] b = _db.FetchRecordBytes(recnumber); - if (UnpackData(b, out val, out key)) - { - return Encoding.Unicode.GetString(val); - } - return ""; - } - - internal void FreeMemory() - { - _db.FreeMemory(); - } - } - #endregion - - #region [ KeyStoreGuid removed ] - //internal class KeyStoreGuid : IDisposable //, IDocStorage - //{ - // public KeyStoreGuid(string filename) - // { - // _db = KeyStore.Open(filename, true); - // } - - // KeyStore _db; - - // public void Set(Guid key, string val) - // { - // Set(key, Encoding.Unicode.GetBytes(val)); - // } - - // public int Set(Guid key, byte[] val) - // { - // byte[] bkey = key.ToByteArray(); - // int hc = (int)Helper.MurMur.Hash(bkey); - // MemoryStream ms = new MemoryStream(); - // ms.Write(Helper.GetBytes(bkey.Length, false), 0, 4); - // ms.Write(bkey, 0, bkey.Length); - // ms.Write(val, 0, val.Length); - - // return _db.SetBytes(hc, ms.ToArray()); - // } - - // public bool Get(Guid key, out string val) - // { - // val = null; - // byte[] bval; - // bool b = Get(key, out bval); - // if (b) - // { - // val = Encoding.Unicode.GetString(bval); - // } - // return b; - // } - - // public bool Get(Guid key, out byte[] val) - // { - // val = null; - // byte[] bkey = key.ToByteArray(); - // int hc = (int)Helper.MurMur.Hash(bkey); - - // if (_db.Get(hc, out val)) - // { - // // unpack data - // byte[] g = null; - // if (UnpackData(val, out val, out g)) - // { - // if (Helper.CompareMemCmp(bkey, g) != 0) - // { - // // if data not equal check duplicates (hash conflict) - // List ints = new List(_db.GetDuplicates(hc)); - // ints.Reverse(); - // foreach (int i in ints) - // { - // byte[] bb = _db.FetchRecordBytes(i); - // if (UnpackData(bb, out val, out g)) - // { - // if (Helper.CompareMemCmp(bkey, g) == 0) - // return true; - // } - // } - // return false; - // } - // return true; - // } - // } - // return false; - // } - - // public void SaveIndex() - // { - // _db.SaveIndex(); - // } - - // public void Shutdown() - // { - // _db.Shutdown(); - // } - - // public void Dispose() - // { - // _db.Shutdown(); - // } - - // public byte[] FetchRecordBytes(int record) - // { - // return _db.FetchRecordBytes(record); - // } - - // public int Count() - // { - // return (int)_db.Count(); - // } - - // public int RecordCount() - // { - // return (int)_db.RecordCount(); - // } - - // private bool UnpackData(byte[] buffer, out byte[] val, out byte[] key) - // { - // int len = Helper.ToInt32(buffer, 0, false); - // key = new byte[len]; - // Buffer.BlockCopy(buffer, 4, key, 0, len); - // val = new byte[buffer.Length - 4 - len]; - // Buffer.BlockCopy(buffer, 4 + len, val, 0, buffer.Length - 4 - len); - - // return true; - // } - - // internal byte[] Get(int recnumber, out Guid docid) - // { - // bool isdeleted = false; - // return Get(recnumber, out docid, out isdeleted); - // } - - // public bool RemoveKey(Guid key) - // { - // byte[] bkey = key.ToByteArray(); - // int hc = (int)Helper.MurMur.Hash(bkey); - // MemoryStream ms = new MemoryStream(); - // ms.Write(Helper.GetBytes(bkey.Length, false), 0, 4); - // ms.Write(bkey, 0, bkey.Length); - // return _db.Delete(hc, ms.ToArray()); - // } - - // public byte[] Get(int recnumber, out Guid docid, out bool isdeleted) - // { - // docid = Guid.Empty; - // byte[] buffer = _db.FetchRecordBytes(recnumber, out isdeleted); - // if (buffer == null) return null; - // if (buffer.Length == 0) return null; - // byte[] key; - // byte[] val; - // // unpack data - // UnpackData(buffer, out val, out key); - // docid = new Guid(key); - // return val; - // } - - // internal int CopyTo(StorageFile backup, int start) - // { - // return _db.CopyTo(backup, start); - // } - //} - #endregion - - internal class KeyStore : IDisposable, IDocStorage where T : IComparable - { - public KeyStore(string Filename, byte MaxKeySize, bool AllowDuplicateKeys) - { - Initialize(Filename, MaxKeySize, AllowDuplicateKeys); - } - - public KeyStore(string Filename, bool AllowDuplicateKeys) - { - Initialize(Filename, Global.DefaultStringKeySize, AllowDuplicateKeys); - } - - private ILog log = LogManager.GetLogger(typeof(KeyStore)); - - private string _Path = ""; - private string _FileName = ""; - private byte _MaxKeySize; - private StorageFile _archive; - private MGIndex _index; - private string _datExtension = ".mgdat"; - private string _idxExtension = ".mgidx"; - IGetBytes _T = null; - private System.Timers.Timer _savetimer; - private BoolIndex _deleted; - - - public static KeyStore Open(string Filename, bool AllowDuplicateKeys) - { - return new KeyStore(Filename, AllowDuplicateKeys); - } - - public static KeyStore Open(string Filename, byte MaxKeySize, bool AllowDuplicateKeys) - { - return new KeyStore(Filename, MaxKeySize, AllowDuplicateKeys); - } - - object _savelock = new object(); - public void SaveIndex() - { - if (_index == null) - return; - lock (_savelock) - { - log.Debug("saving to disk"); - _index.SaveIndex(); - _deleted.SaveIndex(); - log.Debug("index saved"); - } - } - - public IEnumerable GetDuplicates(T key) - { - // get duplicates from index - return _index.GetDuplicates(key); - } - - public byte[] FetchRecordBytes(int record) - { - return _archive.ReadBytes(record); - } - - public long Count() - { - int c = _archive.Count(); - return c - _deleted.GetBits().CountOnes() * 2; - } - - public bool Get(T key, out string val) - { - byte[] b = null; - val = ""; - bool ret = GetBytes(key, out b); - if (ret) - { - if (b != null) - val = Encoding.Unicode.GetString(b); - else - val = ""; - } - return ret; - } - - public bool GetObject(T key, out object val) - { - int off; - val = null; - if (_index.Get(key, out off)) - { - val = _archive.ReadObject(off); - return true; - } - return false; - } - - public bool GetBytes(T key, out byte[] val) - { - int off; - val = null; - // search index - if (_index.Get(key, out off)) - { - val = _archive.ReadBytes(off); - return true; - } - return false; - } - - public int SetString(T key, string data) - { - return SetBytes(key, Encoding.Unicode.GetBytes(data)); - } - - public int SetObject(T key, object doc) - { - int recno = -1; - // save to storage - recno = (int) _archive.WriteObject(key, doc); - // save to index - _index.Set(key, recno); - - return recno; - } - - public int SetBytes(T key, byte[] data) - { - int recno = -1; - // save to storage - recno = (int)_archive.WriteData(key, data); - // save to index - _index.Set(key, recno); - - return recno; - } - - private object _shutdownlock = new object(); - public void Shutdown() - { - lock (_shutdownlock) - { - if (_index != null) - log.Debug("Shutting down"); - else - return; - _savetimer.Enabled = false; - SaveIndex(); - SaveLastRecord(); - - if (_deleted != null) - _deleted.Shutdown(); - if (_index != null) - _index.Shutdown(); - if (_archive != null) - _archive.Shutdown(); - _index = null; - _archive = null; - _deleted = null; - //log.Debug("Shutting down log"); - //LogManager.Shutdown(); - } - } - - public void Dispose() - { - Shutdown(); - } - - #region [ P R I V A T E M E T H O D S ] - private void SaveLastRecord() - { - // save the last record number in the index file - _index.SaveLastRecordNumber(_archive.Count()); - } - - private void Initialize(string filename, byte maxkeysize, bool AllowDuplicateKeys) - { - _MaxKeySize = RDBDataType.GetByteSize(maxkeysize); - _T = RDBDataType.ByteHandler(); - - _Path = Path.GetDirectoryName(filename); - Directory.CreateDirectory(_Path); - - _FileName = Path.GetFileNameWithoutExtension(filename); - string db = _Path + Path.DirectorySeparatorChar + _FileName + _datExtension; - string idx = _Path + Path.DirectorySeparatorChar + _FileName + _idxExtension; - - //LogManager.Configure(_Path + Path.DirectorySeparatorChar + _FileName + ".txt", 500, false); - - _index = new MGIndex(_Path, _FileName + _idxExtension, _MaxKeySize, Global.PageItemCount, AllowDuplicateKeys); - - if (Global.SaveAsBinaryJSON) - _archive = new StorageFile(db, SF_FORMAT.BSON, false); - else - _archive = new StorageFile(db, SF_FORMAT.JSON, false); - - _deleted = new BoolIndex(_Path, _FileName , "_deleted.idx"); - - log.Debug("Current Count = " + RecordCount().ToString("#,0")); - - CheckIndexState(); - - log.Debug("Starting save timer"); - _savetimer = new System.Timers.Timer(); - _savetimer.Elapsed += new System.Timers.ElapsedEventHandler(_savetimer_Elapsed); - _savetimer.Interval = Global.SaveIndexToDiskTimerSeconds * 1000; - _savetimer.AutoReset = true; - _savetimer.Start(); - - } - - private void CheckIndexState() - { - log.Debug("Checking Index state..."); - int last = _index.GetLastIndexedRecordNumber(); - int count = _archive.Count(); - if (last < count) - { - log.Debug("Rebuilding index..."); - log.Debug(" last index count = " + last); - log.Debug(" data items count = " + count); - // check last index record and archive record - // rebuild index if needed - for (int i = last; i < count; i++) - { - bool deleted = false; - T key = _archive.GetKey(i, out deleted); - if (deleted == false) - _index.Set(key, i); - else - _index.RemoveKey(key); - - if (i % 100000 == 0) - log.Debug("100,000 items re-indexed"); - } - log.Debug("Rebuild index done."); - } - } - - void _savetimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) - { - SaveIndex(); - } - - #endregion - - public int RecordCount() - { - return _archive.Count(); - } - - public int[] GetHistory(T key) - { - List a = new List(); - foreach (int i in GetDuplicates(key)) - { - a.Add(i); - } - return a.ToArray(); - } - - internal byte[] FetchRecordBytes(int record, out bool isdeleted) - { - StorageItem meta; - byte[] b = _archive.ReadBytes(record, out meta); - isdeleted = meta.isDeleted; - return b; - } - - internal bool Delete(T id) - { - // write a delete record - int rec = (int)_archive.Delete(id); - _deleted.Set(true, rec); - return _index.RemoveKey(id); - } - - internal bool DeleteReplicated(T id) - { - // write a delete record for replicated object - int rec = (int)_archive.DeleteReplicated(id); - _deleted.Set(true, rec); - return _index.RemoveKey(id); - } - - internal int CopyTo(StorageFile storagefile, long startrecord) - { - return _archive.CopyTo(storagefile, startrecord); - } - - public byte[] GetBytes(int rowid, out StorageItem meta) - { - return _archive.ReadBytes(rowid, out meta); - } - - internal void FreeMemory() - { - _index.FreeMemory(); - } - - public object GetObject(int rowid, out StorageItem meta) - { - return _archive.ReadObject(rowid, out meta); - } - - public StorageItem GetMeta(int rowid) - { - return _archive.ReadMeta(rowid); - } - - internal int SetReplicationObject(T key, object doc) - { - int recno = -1; - // save to storage - recno = (int) _archive.WriteReplicationObject(key, doc); - // save to index - _index.Set(key, recno); - - return recno; - } - } -} +using System; +using System.Collections.Generic; +using System.Text; +using System.IO; +using RaptorDB.Common; +using RaptorDB.Views; + +namespace RaptorDB +{ + #region [ KeyStoreString ] + public class KeyStoreString : IDisposable + { + public KeyStoreString(string filename, bool caseSensitve) + { + _db = new KeyStore(filename, new MMIndexColumnDefinition()); + _caseSensitive = caseSensitve; + } + bool _caseSensitive = false; + + KeyStore _db; + + + public void Set(string key, string val) + { + Set(key, Encoding.Unicode.GetBytes(val)); + } + + public void Set(string key, byte[] val) + { + string str = (_caseSensitive ? key : key.ToLower()); + byte[] bkey = Encoding.Unicode.GetBytes(str); + int hc = (int)Helper.MurMur.Hash(bkey); + MemoryStream ms = new MemoryStream(); + ms.Write(Helper.GetBytes(bkey.Length, false), 0, 4); + ms.Write(bkey, 0, bkey.Length); + ms.Write(val, 0, val.Length); + + _db.SetBytes(hc, ms.ToArray()); + } + + public bool Get(string key, out string val) + { + val = null; + byte[] bval; + bool b = Get(key, out bval); + if (b) + { + val = Encoding.Unicode.GetString(bval); + } + return b; + } + + public bool Get(string key, out byte[] val) + { + string str = (_caseSensitive ? key : key.ToLower()); + val = null; + byte[] bkey = Encoding.Unicode.GetBytes(str); + int hc = (int)Helper.MurMur.Hash(bkey); + + if (_db.GetBytes(hc, out val)) + { + // unpack data + byte[] g = null; + UnpackData(val, out val, out g); + + if (!Helper.Cmp(bkey, g)) + { + // if data not equal check duplicates (hash conflict) + List ints = new List(_db.GetDuplicates(hc)); + ints.Reverse(); + foreach (int i in ints) + { + byte[] bb = _db.FetchRecordBytes(i); + UnpackData(bb, out val, out g); + if (Helper.Cmp(bkey, g)) + return true; + + } + return false; + } + return true; + + } + return false; + } + + public int Count() + { + return (int)_db.Count(); + } + + public int RecordCount() + { + return (int)_db.RecordCount(); + } + + public void SaveIndex() + { + _db.SaveIndex(); + } + + public void Shutdown() + { + _db.Shutdown(); + } + + public void Dispose() + { + _db.Shutdown(); + } + + private void UnpackData(byte[] buffer, out byte[] val, out byte[] key) + { + int len = Helper.ToInt32(buffer, 0, false); + key = new byte[len]; + Buffer.BlockCopy(buffer, 4, key, 0, len); + val = new byte[buffer.Length - 4 - len]; + Buffer.BlockCopy(buffer, 4 + len, val, 0, buffer.Length - 4 - len); + } + + public string ReadData(int recnumber) + { + byte[] val; + byte[] key; + byte[] b = _db.FetchRecordBytes(recnumber); + UnpackData(b, out val, out key); + return Encoding.Unicode.GetString(val); + } + + internal void FreeMemory() + { + _db.FreeMemory(); + } + } + #endregion + + #region [ KeyStoreGuid removed ] + //internal class KeyStoreGuid : IDisposable //, IDocStorage + //{ + // public KeyStoreGuid(string filename) + // { + // _db = KeyStore.Open(filename, true); + // } + + // KeyStore _db; + + // public void Set(Guid key, string val) + // { + // Set(key, Encoding.Unicode.GetBytes(val)); + // } + + // public int Set(Guid key, byte[] val) + // { + // byte[] bkey = key.ToByteArray(); + // int hc = (int)Helper.MurMur.Hash(bkey); + // MemoryStream ms = new MemoryStream(); + // ms.Write(Helper.GetBytes(bkey.Length, false), 0, 4); + // ms.Write(bkey, 0, bkey.Length); + // ms.Write(val, 0, val.Length); + + // return _db.SetBytes(hc, ms.ToArray()); + // } + + // public bool Get(Guid key, out string val) + // { + // val = null; + // byte[] bval; + // bool b = Get(key, out bval); + // if (b) + // { + // val = Encoding.Unicode.GetString(bval); + // } + // return b; + // } + + // public bool Get(Guid key, out byte[] val) + // { + // val = null; + // byte[] bkey = key.ToByteArray(); + // int hc = (int)Helper.MurMur.Hash(bkey); + + // if (_db.Get(hc, out val)) + // { + // // unpack data + // byte[] g = null; + // if (UnpackData(val, out val, out g)) + // { + // if (Helper.CompareMemCmp(bkey, g) != 0) + // { + // // if data not equal check duplicates (hash conflict) + // List ints = new List(_db.GetDuplicates(hc)); + // ints.Reverse(); + // foreach (int i in ints) + // { + // byte[] bb = _db.FetchRecordBytes(i); + // if (UnpackData(bb, out val, out g)) + // { + // if (Helper.CompareMemCmp(bkey, g) == 0) + // return true; + // } + // } + // return false; + // } + // return true; + // } + // } + // return false; + // } + + // public void SaveIndex() + // { + // _db.SaveIndex(); + // } + + // public void Shutdown() + // { + // _db.Shutdown(); + // } + + // public void Dispose() + // { + // _db.Shutdown(); + // } + + // public byte[] FetchRecordBytes(int record) + // { + // return _db.FetchRecordBytes(record); + // } + + // public int Count() + // { + // return (int)_db.Count(); + // } + + // public int RecordCount() + // { + // return (int)_db.RecordCount(); + // } + + // private bool UnpackData(byte[] buffer, out byte[] val, out byte[] key) + // { + // int len = Helper.ToInt32(buffer, 0, false); + // key = new byte[len]; + // Buffer.BlockCopy(buffer, 4, key, 0, len); + // val = new byte[buffer.Length - 4 - len]; + // Buffer.BlockCopy(buffer, 4 + len, val, 0, buffer.Length - 4 - len); + + // return true; + // } + + // internal byte[] Get(int recnumber, out Guid docid) + // { + // bool isdeleted = false; + // return Get(recnumber, out docid, out isdeleted); + // } + + // public bool RemoveKey(Guid key) + // { + // byte[] bkey = key.ToByteArray(); + // int hc = (int)Helper.MurMur.Hash(bkey); + // MemoryStream ms = new MemoryStream(); + // ms.Write(Helper.GetBytes(bkey.Length, false), 0, 4); + // ms.Write(bkey, 0, bkey.Length); + // return _db.Delete(hc, ms.ToArray()); + // } + + // public byte[] Get(int recnumber, out Guid docid, out bool isdeleted) + // { + // docid = Guid.Empty; + // byte[] buffer = _db.FetchRecordBytes(recnumber, out isdeleted); + // if (buffer == null) return null; + // if (buffer.Length == 0) return null; + // byte[] key; + // byte[] val; + // // unpack data + // UnpackData(buffer, out val, out key); + // docid = new Guid(key); + // return val; + // } + + // internal int CopyTo(StorageFile backup, int start) + // { + // return _db.CopyTo(backup, start); + // } + //} + #endregion + + internal class KeyStore : IDisposable, IDocStorage where T : IComparable + { + public KeyStore(string filename, bool AllowDuplicateKeys) + { + Initialize(filename, Global.DefaultStringKeySize, ViewIndexDefinitionHelpers.GetDefaultForType(AllowDuplicateKeys)); + } + + public KeyStore(string fileName, IViewColumnIndexDefinition indexDefinition) + { + Initialize(fileName, Global.DefaultStringKeySize, indexDefinition); + } + + private ILog log = LogManager.GetLogger(typeof(KeyStore)); + + private string _Path = ""; + private string _FileName = ""; + private byte _MaxKeySize; + private StorageFile _archive; + private IEqualsQueryIndex _index; + private string _datExtension = ".mgdat"; + private string _idxExtension = ".mgidx"; + private string lockFileExtension = ".kslock"; + private FileStream lockFile; + private System.Timers.Timer _savetimer; + private BoolIndex _deleted; + + object _savelock = new object(); + public void SaveIndex() + { + if (_index == null) + return; + lock (_savelock) + { + log.Debug("saving to disk"); + _index.SaveIndex(); + _deleted?.SaveIndex(); + log.Debug("index saved"); + } + } + + public IEnumerable GetDuplicates(T key) + { + // get duplicates from index + return _index.QueryEquals(key).GetBitIndexes(); + } + + public byte[] FetchRecordBytes(int record) + { + return _archive.ReadBytes(record); + } + + public long Count() + { + int c = _archive.Count(); + if(_deleted != null) c -= (int)_deleted.GetBits().CountOnes(); + return c; + } + + public bool Get(T key, out string val) + { + byte[] b; + val = null; + bool ret = GetBytes(key, out b); + if (ret && b != null) + { + val = Encoding.Unicode.GetString(b); + } + return ret; + } + + public bool GetObject(T key, out object val) + { + int off; + if (IndexGetFirst(key, out off)) + { + val = _archive.ReadObject(off); + return true; + } + val = null; + return false; + } + + public bool GetBytes(T key, out byte[] val) + { + int off; + // search index + if (IndexGetFirst(key, out off)) + { + val = _archive.ReadBytes(off); + return true; + } + val = null; + return false; + } + + private bool IndexGetFirst(T key, out int offset) + { + if (_deleted == null) + return _index.GetFirst(key, out offset); + else + { + offset = _index.QueryEquals(key).AndNot(_deleted.GetBits()).GetFirstIndex(); + return offset >= 0; + } + } + + public int SetString(T key, string data) + { + return SetBytes(key, Encoding.Unicode.GetBytes(data)); + } + + public int SetObject(T key, object doc) + { + int recno = (int)_archive.WriteObject(key, doc); + // save to index + _index.Set(key, recno); + + return recno; + } + + public int SetBytes(T key, byte[] data) + { + int recno = (int)_archive.WriteData(key, data); + // save to index + _index.Set(key, recno); + + return recno; + } + + private object _shutdownlock = new object(); + public void Shutdown() + { + lock (_shutdownlock) + { + if (_index != null) + log.Debug("Shutting down"); + else + return; + _savetimer.Enabled = false; + SaveIndex(); + lockFile.Dispose(); + File.Delete(lockFile.Name); + + if (_deleted != null) + _deleted.Dispose(); + if (_index != null) + _index.Dispose(); + if (_archive != null) + _archive.Shutdown(); + _index = null; + _archive = null; + _deleted = null; + //log.Debug("Shutting down log"); + //LogManager.Shutdown(); + } + } + + public void Dispose() + { + Shutdown(); + } + + #region [ P R I V A T E M E T H O D S ] + + private void Initialize(string fileName, byte maxkeysize, IViewColumnIndexDefinition indexDefinition) + { + _MaxKeySize = RDBDataType.GetByteSize(maxkeysize); + + _Path = Path.GetDirectoryName(fileName); + Directory.CreateDirectory(_Path); + + _FileName = Path.GetFileNameWithoutExtension(fileName); + var wasLocked = File.Exists(fileName + lockFileExtension); + if (wasLocked) File.Delete(fileName + lockFileExtension); + lockFile = File.Open(fileName + lockFileExtension, FileMode.OpenOrCreate, FileAccess.Read, FileShare.None); + + //LogManager.Configure(_Path + Path.DirectorySeparatorChar + _FileName + ".txt", 500, false); + + _index = indexDefinition.CreateIndex(_Path, _FileName) as IEqualsQueryIndex; + if (_index == null) throw new NotSupportedException("specified index does not support equals queries"); + + string db = _Path + Path.DirectorySeparatorChar + _FileName + _datExtension; + _archive = new StorageFile(db, Global.SaveAsBinaryJSON ? SF_FORMAT.BSON : SF_FORMAT.JSON, false); + + if (!(_index is IUpdatableIndex)) + _deleted = new BoolIndex(_Path, _FileName, "_deleted.idx"); + + log.Debug("Current Count = " + RecordCount().ToString("#,0")); + + if (wasLocked) RebuildIndex(); + + log.Debug("Starting save timer"); + _savetimer = new System.Timers.Timer(); + _savetimer.Elapsed += new System.Timers.ElapsedEventHandler(_savetimer_Elapsed); + _savetimer.Interval = Global.SaveIndexToDiskTimerSeconds * 1000; + _savetimer.AutoReset = true; + _savetimer.Start(); + + } + + private void RebuildIndex() + { + log.Debug("Rebuilding index..."); + // check last index record and archive record + // rebuild index if needed + for (int i = 0; i < _archive.Count(); i++) + { + bool deleted = false; + T key = _archive.GetKey(i, out deleted); + if (!deleted) + _index.Set(key, i); + else + IndexRm(key); + + if (i % 100000 == 0) + log.Debug("100,000 items re-indexed"); + } + log.Debug("Rebuild index done."); + } + + private void IndexRm(T key, int recnum) + { + if (_deleted == null) + ((IUpdatableIndex)_index).Remove(key); + else _deleted.Set(true, recnum); + } + + private bool IndexRm(T key) + { + if (_deleted == null) + return ((IUpdatableIndex)_index).Remove(key); + else + { + var q = _index.QueryEquals(key); + _deleted.InPlaceOR(q); + return q.Length > 0; + } + } + + void _savetimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) + { + SaveIndex(); + } + + #endregion + + public int RecordCount() + { + return _archive.Count(); + } + + internal byte[] FetchRecordBytes(int record, out bool isdeleted) + { + StorageItem meta; + byte[] b = _archive.ReadBytes(record, out meta); + isdeleted = meta.isDeleted; + return b; + } + + internal bool Delete(T id) + { + // write a delete record + _archive.Delete(id); + return IndexRm(id); + } + + internal bool DeleteReplicated(T id) + { + // write a delete record for replicated object + _archive.DeleteReplicated(id); + return IndexRm(id); + } + + internal int CopyTo(StorageFile storagefile, long startrecord) + { + return _archive.CopyTo(storagefile, startrecord); + } + + public byte[] GetBytes(int rowid, out StorageItem meta) + { + return _archive.ReadBytes(rowid, out meta); + } + + internal void FreeMemory() + { + _index.FreeMemory(); + } + + public object GetObject(int rowid, out StorageItem meta) + { + return _archive.ReadObject(rowid, out meta); + } + + public StorageItem GetMeta(int rowid) + { + return _archive.ReadMeta(rowid); + } + + internal int SetReplicationObject(T key, object doc) + { + int recno = (int)_archive.WriteReplicationObject(key, doc); + // save to index + _index.Set(key, recno); + + return recno; + } + } +} diff --git a/RaptorDB/Storage/KeyStoreHF.cs b/RaptorDB/Storage/KeyStoreHF.cs index a208b26..106e946 100644 --- a/RaptorDB/Storage/KeyStoreHF.cs +++ b/RaptorDB/Storage/KeyStoreHF.cs @@ -1,560 +1,560 @@ -using RaptorDB.Common; -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Text; - -namespace RaptorDB -{ - internal class AllocationBlock - { - public string key; - public byte keylen; - public int datalength; - public bool isCompressed; - public bool isBinaryJSON; - public bool deleteKey; - public List Blocks = new List(); - public int blocknumber; - } - - internal class KeyStoreHF : IKeyStoreHF - { - MGIndex _keys; - StorageFileHF _datastore; - object _lock = new object(); - ushort _BlockSize = 2048; - private const int _KILOBYTE = 1024; - ILog _log = LogManager.GetLogger(typeof(KeyStoreHF)); - - byte[] _blockheader = new byte[]{ - 0,0,0,0, // 0 block # (used for validate block reads and rebuild) - 0,0,0,0, // 4 next block # - 0, // 8 flags bits 0:iscompressed 1:isbinary 2:deletekey - 0,0,0,0, // 9 data length (compute alloc blocks needed) - 0, // 13 key length - 0, // 14 key type 0=guid 1=string - }; - private string _Path = ""; - private string _S = Path.DirectorySeparatorChar.ToString(); - private bool _isDirty = false; - private string _dirtyFilename = "temp.$"; - - // high frequency key value store - - public KeyStoreHF(string folder) - { - _Path = folder; - Directory.CreateDirectory(_Path); - if (_Path.EndsWith(_S) == false) _Path += _S; - - if (File.Exists(_Path + _dirtyFilename)) - { - _log.Error("Last shutdown failed, rebuilding data files..."); - RebuildDataFiles(); - } - _datastore = new StorageFileHF(_Path + "data.mghf", Global.HighFrequencyKVDiskBlockSize); - _keys = new MGIndex(_Path, "keys.idx", 255, Global.PageItemCount, false); - - _BlockSize = _datastore.GetBlockSize(); - } - - public KeyStoreHF(string folder, string filename) // mgindex special storage for strings - { - _Path = folder; - Directory.CreateDirectory(_Path); - if (_Path.EndsWith(_S) == false) _Path += _S; - - _datastore = new StorageFileHF(_Path + filename, Global.HighFrequencyKVDiskBlockSize); - - _BlockSize = _datastore.GetBlockSize(); - } - - public int CountHF() - { - return _keys.Count(); - } - - public object GetObjectHF(string key) - { - lock (_lock) - { - int alloc; - if (_keys.Get(key, out alloc)) - { - AllocationBlock ab = FillAllocationBlock(alloc); - if (ab.deleteKey == false) - { - byte[] data = new byte[ab.datalength]; - long offset = 0; - int len = ab.datalength; - int dbsize = _BlockSize - _blockheader.Length - ab.keylen; - ab.Blocks.ForEach(x => - { - byte[] b = _datastore.ReadBlock(x); - int c = len; - if (c > dbsize) c = dbsize; - Buffer.BlockCopy(b, _blockheader.Length + ab.keylen, data, (int)offset, c); - offset += c; - len -= c; - }); - if (ab.isCompressed) - data = MiniLZO.Decompress(data); - - return fastBinaryJSON.BJSON.ToObject(data); - } - } - } - - return null; - } - - public bool SetObjectHF(string key, object obj) - { - byte[] k = Helper.GetBytes(key); - if (k.Length > 255) - { - _log.Error("Key length > 255 : " + key); - throw new Exception("Key must be less than 255 characters"); - //return false; - } - lock (_lock) - { - if (_isDirty == false) - WriteDirtyFile(); - - AllocationBlock ab = null; - int firstblock = 0; - if (_keys.Get(key, out firstblock))// key exists already - ab = FillAllocationBlock(firstblock); - - SaveNew(key, k, obj); - if (ab != null) - { - // free old blocks - ab.Blocks.Add(ab.blocknumber); - _datastore.FreeBlocks(ab.Blocks); - } - return true; - } - } - - public bool DeleteKeyHF(string key) - { - lock (_lock) - { - int alloc; - if (_keys.Get(key, out alloc)) - { - if (_isDirty == false) - WriteDirtyFile(); - - byte[] keybytes = Helper.GetBytes(key); - AllocationBlock ab = FillAllocationBlock(alloc); - - ab.keylen = (byte)keybytes.Length; - - _keys.RemoveKey(key);// remove key from index - - // write ab - ab.deleteKey = true; - ab.datalength = 0; - - byte[] header = CreateAllocHeader(ab, keybytes); - - _datastore.SeekBlock(ab.blocknumber); - _datastore.WriteBlockBytes(header, 0, header.Length); - - // free old data blocks - _datastore.FreeBlocks(ab.Blocks); - - return true; - } - } - return false; - } - - public void CompactStorageHF() - { - lock (_lock) - { - try - { - _log.Debug("Compacting storage file ..."); - if (Directory.Exists(_Path + "temp")) - Directory.Delete(_Path + "temp", true); - - KeyStoreHF newfile = new KeyStoreHF(_Path + "temp"); - string[] keys = _keys.GetKeys().Cast().ToArray(); - _log.Debug("Number of keys : " + keys.Length); - foreach (var k in keys) - { - newfile.SetObjectHF(k, GetObjectHF(k)); - } - newfile.Shutdown(); - _log.Debug("Compact done."); - // shutdown and move files and restart here - if (Directory.Exists(_Path + "old")) - Directory.Delete(_Path + "old", true); - Directory.CreateDirectory(_Path + "old"); - _datastore.Shutdown(); - _keys.Shutdown(); - _log.Debug("Moving files..."); - foreach (var f in Directory.GetFiles(_Path, "*.*")) - File.Move(f, _Path + "old" + _S + Path.GetFileName(f)); - - foreach (var f in Directory.GetFiles(_Path + "temp", "*.*")) - File.Move(f, _Path + Path.GetFileName(f)); - - Directory.Delete(_Path + "temp", true); - //Directory.Delete(_Path + "old", true); // FEATURE : delete or keep? - _log.Debug("Re-opening storage file"); - _datastore = new StorageFileHF(_Path + "data.mghf", Global.HighFrequencyKVDiskBlockSize); - _keys = new MGIndex(_Path, "keys.idx", 255, Global.PageItemCount, false); - - _BlockSize = _datastore.GetBlockSize(); - } - catch (Exception ex) - { - _log.Error(ex); - } - } - } - - public string[] GetKeysHF() - { - lock (_lock) - return _keys.GetKeys().Cast().ToArray(); // FEATURE : dirty !? - } - - public bool ContainsHF(string key) - { - lock (_lock) - { - int i = 0; - return _keys.Get(key, out i); - } - } - - internal void Shutdown() - { - _datastore.Shutdown(); - if (_keys != null) - _keys.Shutdown(); - - if (File.Exists(_Path + _dirtyFilename)) - File.Delete(_Path + _dirtyFilename); - } - - internal void FreeMemory() - { - _keys.FreeMemory(); - } - - #region [ private methods ] - private object _dfile = new object(); - private void WriteDirtyFile() - { - lock (_dfile) - { - _isDirty = true; - if (File.Exists(_Path + _dirtyFilename) == false) - File.WriteAllText(_Path + _dirtyFilename, "dirty"); - } - } - - private void SaveNew(string key, byte[] keybytes, object obj) - { - byte[] data; - AllocationBlock ab = new AllocationBlock(); - ab.key = key; - ab.keylen = (byte)keybytes.Length; - - data = fastBinaryJSON.BJSON.ToBJSON(obj); - ab.isBinaryJSON = true; - - if (data.Length > (int)Global.CompressDocumentOverKiloBytes * _KILOBYTE) - { - ab.isCompressed = true; - data = MiniLZO.Compress(data); - } - ab.datalength = data.Length; - - int firstblock = internalSave(keybytes, data, ab); - - // save keys - _keys.Set(key, firstblock); - } - - private int internalSave(byte[] keybytes, byte[] data, AllocationBlock ab) - { - int firstblock = _datastore.GetFreeBlockNumber(); - int blocknum = firstblock; - byte[] header = CreateAllocHeader(ab, keybytes); - int dblocksize = _BlockSize - header.Length; - int offset = 0; - // compute data block count - int datablockcount = (data.Length / dblocksize) + 1; - // save data blocks - int counter = 0; - int len = data.Length; - while (datablockcount > 0) - { - datablockcount--; - int next = 0; - if (datablockcount > 0) - next = _datastore.GetFreeBlockNumber(); - - Buffer.BlockCopy(Helper.GetBytes(counter, false), 0, header, 0, 4); // set block number - Buffer.BlockCopy(Helper.GetBytes(next, false), 0, header, 4, 4); // set next pointer - - _datastore.SeekBlock(blocknum); - _datastore.WriteBlockBytes(header, 0, header.Length); - int c = len; - if (c > dblocksize) - c = dblocksize; - _datastore.WriteBlockBytes(data, offset, c); - - if (next > 0) - blocknum = next; - offset += c; - len -= c; - counter++; - } - return firstblock; - } - - private byte[] CreateAllocHeader(AllocationBlock ab, byte[] keybytes) - { - byte[] alloc = new byte[_blockheader.Length + keybytes.Length]; - - if (ab.isCompressed) - alloc[8] = 1; - if (ab.isBinaryJSON) - alloc[8] += 2; - if (ab.deleteKey) - alloc[8] += 4; - - Buffer.BlockCopy(Helper.GetBytes(ab.datalength, false), 0, alloc, 9, 4); - alloc[13] = ab.keylen; - alloc[14] = 1; // string keys for now - Buffer.BlockCopy(keybytes, 0, alloc, _blockheader.Length, ab.keylen); - - return alloc; - } - - private AllocationBlock FillAllocationBlock(int blocknumber) - { - AllocationBlock ab = new AllocationBlock(); - - ab.blocknumber = blocknumber; - ab.Blocks.Add(blocknumber); - - byte[] b = _datastore.ReadBlockBytes(blocknumber, _blockheader.Length + 255); - - int blocknumexpected = 0; - - int next = ParseBlockHeader(ab, b, blocknumexpected); - - blocknumexpected++; - - while (next > 0) - { - ab.Blocks.Add(next); - b = _datastore.ReadBlockBytes(next, _blockheader.Length + ab.keylen); - next = ParseBlockHeader(ab, b, blocknumexpected); - blocknumexpected++; - } - - return ab; - } - - private int ParseBlockHeader(AllocationBlock ab, byte[] b, int blocknumberexpected) - { - int bnum = Helper.ToInt32(b, 0); - if (bnum != blocknumberexpected) - { - _log.Error("Block numbers does not match, looking for : " + blocknumberexpected); - //throw new Exception("Block numbers does not match, looking for : " + blocknumberexpected); - return -1; - } - if (b[14] != 1) - { - _log.Error("Expecting string keys only, got : " + b[14]); - //throw new Exception("Expecting string keys only, got : " + b[11]); - return -1; - } - - int next = Helper.ToInt32(b, 4); - - if (ab.keylen == 0) - { - byte flags = b[8]; - - if ((flags & 0x01) > 0) - ab.isCompressed = true; - if ((flags & 0x02) > 0) - ab.isBinaryJSON = true; - if ((flags & 0x04) > 0) - ab.deleteKey = true; - - ab.datalength = Helper.ToInt32(b, 9); - byte keylen = b[13]; - ab.keylen = keylen; - ab.key = Helper.GetString(b, _blockheader.Length, keylen); - } - return next; - } - - private void RebuildDataFiles() - { - MGIndex keys = null; - try - { - // remove old free list - if (File.Exists(_Path + "data.bmp")) - File.Delete(_Path + "data.bmp"); - - _datastore = new StorageFileHF(_Path + "data.mghf", Global.HighFrequencyKVDiskBlockSize); - _BlockSize = _datastore.GetBlockSize(); - if (File.Exists(_Path + "keys.idx")) - { - _log.Debug("removing old keys index"); - foreach (var f in Directory.GetFiles(_Path, "keys.*")) - File.Delete(f); - } - - keys = new MGIndex(_Path, "keys.idx", 255, Global.PageItemCount, false); - - WAHBitArray visited = new WAHBitArray(); - - int c = _datastore.NumberofBlocks(); - - for (int i = 0; i < c; i++) // go through blocks - { - if (visited.Get(i)) - continue; - byte[] b = _datastore.ReadBlockBytes(i, _blockheader.Length + 255); - int bnum = Helper.ToInt32(b, 0); - if (bnum > 0) // check if a start block - { - visited.Set(i, true); - _datastore.FreeBlock(i); // mark as free - continue; - } - - AllocationBlock ab = new AllocationBlock(); - // start block found - int blocknumexpected = 0; - - int next = ParseBlockHeader(ab, b, blocknumexpected); - int last = 0; - bool freelast = false; - AllocationBlock old = null; - - if (keys.Get(ab.key, out last)) - { - old = this.FillAllocationBlock(last); - freelast = true; - } - blocknumexpected++; - bool failed = false; - if (ab.deleteKey == false) - { - while (next > 0) // read the blocks - { - ab.Blocks.Add(next); - b = _datastore.ReadBlockBytes(next, _blockheader.Length + ab.keylen); - next = ParseBlockHeader(ab, b, blocknumexpected); - if (next == -1) // non matching block - { - failed = true; - break; - } - blocknumexpected++; - } - } - else - { - failed = true; - keys.RemoveKey(ab.key); - } - // new data ok - if (failed == false) - { - keys.Set(ab.key, ab.blocknumber);// valid block found - if (freelast)// free the old blocks - _datastore.FreeBlocks(old.Blocks); - } - - visited.Set(i, true); - } - - // all ok delete temp.$ file - if (File.Exists(_Path + _dirtyFilename)) - File.Delete(_Path + _dirtyFilename); - } - catch (Exception ex) - { - _log.Error(ex); - } - finally - { - _log.Debug("Shutting down files and index"); - _datastore.Shutdown(); - keys.SaveIndex(); - keys.Shutdown(); - } - } - #endregion - - internal void FreeBlocks(List list) - { - lock (_lock) - _datastore.FreeBlocks(list); - } - - internal int SaveData(string key, byte[] data) - { - lock (_lock) - { - byte[] kb = Helper.GetBytes(key); - AllocationBlock ab = new AllocationBlock(); - ab.key = key; - ab.keylen = (byte)kb.Length; - ab.isCompressed = false; - ab.isBinaryJSON = true; - ab.datalength = data.Length; - - return internalSave(kb, data, ab); - } - } - - internal byte[] GetData(int blocknumber, List usedblocks) - { - lock (_lock) - { - AllocationBlock ab = FillAllocationBlock(blocknumber); - usedblocks = ab.Blocks; - byte[] data = new byte[ab.datalength]; - long offset = 0; - int len = ab.datalength; - int dbsize = _BlockSize - _blockheader.Length - ab.keylen; - ab.Blocks.ForEach(x => - { - byte[] b = _datastore.ReadBlock(x); - int c = len; - if (c > dbsize) c = dbsize; - Buffer.BlockCopy(b, _blockheader.Length + ab.keylen, data, (int)offset, c); - offset += c; - len -= c; - }); - if (ab.isCompressed) - data = MiniLZO.Decompress(data); - - return data; - } - } - } -} +using RaptorDB.Common; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; + +namespace RaptorDB +{ + internal class AllocationBlock + { + public string key; + public byte keylen; + public int datalength; + public bool isCompressed; + public bool isBinaryJSON; + public bool deleteKey; + public List Blocks = new List(); + public int blocknumber; + } + + public class KeyStoreHF : IKeyStoreHF + { + MGIndex _keys; + StorageFileHF _datastore; + object _lock = new object(); + ushort _BlockSize = 2048; + private const int _KILOBYTE = 1024; + ILog _log = LogManager.GetLogger(typeof(KeyStoreHF)); + + byte[] _blockheader = new byte[]{ + 0,0,0,0, // 0 block # (used for validate block reads and rebuild) + 0,0,0,0, // 4 next block # + 0, // 8 flags bits 0:iscompressed 1:isbinary 2:deletekey + 0,0,0,0, // 9 data length (compute alloc blocks needed) + 0, // 13 key length + 0, // 14 key type 0=guid 1=string + }; + private string _Path = ""; + private string _S = Path.DirectorySeparatorChar.ToString(); + private bool _isDirty = false; + private string _dirtyFilename = "temp.$"; + + // high frequency key value store + + public KeyStoreHF(string folder) + { + _Path = folder; + Directory.CreateDirectory(_Path); + if (_Path.EndsWith(_S) == false) _Path += _S; + + if (File.Exists(_Path + _dirtyFilename)) + { + _log.Error("Last shutdown failed, rebuilding data files..."); + RebuildDataFiles(); + } + _datastore = new StorageFileHF(_Path + "data.mghf", Global.HighFrequencyKVDiskBlockSize); + _keys = new MGIndex(_Path, "keys.idx", 255, Global.PageItemCount, false); + + _BlockSize = _datastore.GetBlockSize(); + } + + public KeyStoreHF(string folder, string filename) // mgindex special storage for strings + { + _Path = folder; + Directory.CreateDirectory(_Path); + if (_Path.EndsWith(_S) == false) _Path += _S; + + _datastore = new StorageFileHF(_Path + filename, Global.HighFrequencyKVDiskBlockSize); + + _BlockSize = _datastore.GetBlockSize(); + } + + public int CountHF() + { + return _keys.Count(); + } + + public object GetObjectHF(string key) + { + lock (_lock) + { + int alloc; + if (_keys.GetFirst(key, out alloc)) + { + AllocationBlock ab = FillAllocationBlock(alloc); + if (ab.deleteKey == false) + { + byte[] data = new byte[ab.datalength]; + long offset = 0; + int len = ab.datalength; + int dbsize = _BlockSize - _blockheader.Length - ab.keylen; + ab.Blocks.ForEach(x => + { + byte[] b = _datastore.ReadBlock(x); + int c = len; + if (c > dbsize) c = dbsize; + Buffer.BlockCopy(b, _blockheader.Length + ab.keylen, data, (int)offset, c); + offset += c; + len -= c; + }); + if (ab.isCompressed) + data = MiniLZO.Decompress(data); + + return fastBinaryJSON.BJSON.ToObject(data); + } + } + } + + return null; + } + + public bool SetObjectHF(string key, object obj) + { + byte[] k = Helper.GetBytes(key); + if (k.Length > 255) + { + _log.Error("Key length > 255 : " + key); + throw new Exception("Key must be less than 255 characters"); + //return false; + } + lock (_lock) + { + if (_isDirty == false) + WriteDirtyFile(); + + AllocationBlock ab = null; + int firstblock = 0; + if (_keys.GetFirst(key, out firstblock))// key exists already + ab = FillAllocationBlock(firstblock); + + SaveNew(key, k, obj); + if (ab != null) + { + // free old blocks + ab.Blocks.Add(ab.blocknumber); + _datastore.FreeBlocks(ab.Blocks); + } + return true; + } + } + + public bool DeleteKeyHF(string key) + { + lock (_lock) + { + int alloc; + if (_keys.GetFirst(key, out alloc)) + { + if (_isDirty == false) + WriteDirtyFile(); + + byte[] keybytes = Helper.GetBytes(key); + AllocationBlock ab = FillAllocationBlock(alloc); + + ab.keylen = (byte)keybytes.Length; + + _keys.RemoveKey(key);// remove key from index + + // write ab + ab.deleteKey = true; + ab.datalength = 0; + + byte[] header = CreateAllocHeader(ab, keybytes); + + _datastore.SeekBlock(ab.blocknumber); + _datastore.WriteBlockBytes(header, 0, header.Length); + + // free old data blocks + _datastore.FreeBlocks(ab.Blocks); + + return true; + } + } + return false; + } + + public void CompactStorageHF() + { + lock (_lock) + { + try + { + _log.Debug("Compacting storage file ..."); + if (Directory.Exists(_Path + "temp")) + Directory.Delete(_Path + "temp", true); + + KeyStoreHF newfile = new KeyStoreHF(_Path + "temp"); + string[] keys = _keys.GetKeys().Cast().ToArray(); + _log.Debug("Number of keys : " + keys.Length); + foreach (var k in keys) + { + newfile.SetObjectHF(k, GetObjectHF(k)); + } + newfile.Shutdown(); + _log.Debug("Compact done."); + // shutdown and move files and restart here + if (Directory.Exists(_Path + "old")) + Directory.Delete(_Path + "old", true); + Directory.CreateDirectory(_Path + "old"); + _datastore.Shutdown(); + _keys.Dispose(); + _log.Debug("Moving files..."); + foreach (var f in Directory.GetFiles(_Path, "*.*")) + File.Move(f, _Path + "old" + _S + Path.GetFileName(f)); + + foreach (var f in Directory.GetFiles(_Path + "temp", "*.*")) + File.Move(f, _Path + Path.GetFileName(f)); + + Directory.Delete(_Path + "temp", true); + //Directory.Delete(_Path + "old", true); // FEATURE : delete or keep? + _log.Debug("Re-opening storage file"); + _datastore = new StorageFileHF(_Path + "data.mghf", Global.HighFrequencyKVDiskBlockSize); + _keys = new MGIndex(_Path, "keys.idx", 255, Global.PageItemCount, false); + + _BlockSize = _datastore.GetBlockSize(); + } + catch (Exception ex) + { + _log.Error(ex); + } + } + } + + public string[] GetKeysHF() + { + lock (_lock) + return _keys.GetKeys().Cast().ToArray(); // FEATURE : dirty !? + } + + public bool ContainsHF(string key) + { + lock (_lock) + { + int i = 0; + return _keys.GetFirst(key, out i); + } + } + + internal void Shutdown() + { + _datastore.Shutdown(); + if (_keys != null) + _keys.Dispose(); + + if (File.Exists(_Path + _dirtyFilename)) + File.Delete(_Path + _dirtyFilename); + } + + internal void FreeMemory() + { + _keys.FreeMemory(); + } + + #region [ private methods ] + private object _dfile = new object(); + private void WriteDirtyFile() + { + lock (_dfile) + { + _isDirty = true; + if (File.Exists(_Path + _dirtyFilename) == false) + File.WriteAllText(_Path + _dirtyFilename, "dirty"); + } + } + + private void SaveNew(string key, byte[] keybytes, object obj) + { + byte[] data; + AllocationBlock ab = new AllocationBlock(); + ab.key = key; + ab.keylen = (byte)keybytes.Length; + + data = fastBinaryJSON.BJSON.ToBJSON(obj); + ab.isBinaryJSON = true; + + if (data.Length > (int)Global.CompressDocumentOverKiloBytes * _KILOBYTE) + { + ab.isCompressed = true; + data = MiniLZO.Compress(data); + } + ab.datalength = data.Length; + + int firstblock = internalSave(keybytes, data, ab); + + // save keys + _keys.Set(key, firstblock); + } + + private int internalSave(byte[] keybytes, byte[] data, AllocationBlock ab) + { + int firstblock = _datastore.GetFreeBlockNumber(); + int blocknum = firstblock; + byte[] header = CreateAllocHeader(ab, keybytes); + int dblocksize = _BlockSize - header.Length; + int offset = 0; + // compute data block count + int datablockcount = (data.Length / dblocksize) + 1; + // save data blocks + int counter = 0; + int len = data.Length; + while (datablockcount > 0) + { + datablockcount--; + int next = 0; + if (datablockcount > 0) + next = _datastore.GetFreeBlockNumber(); + + Buffer.BlockCopy(Helper.GetBytes(counter, false), 0, header, 0, 4); // set block number + Buffer.BlockCopy(Helper.GetBytes(next, false), 0, header, 4, 4); // set next pointer + + _datastore.SeekBlock(blocknum); + _datastore.WriteBlockBytes(header, 0, header.Length); + int c = len; + if (c > dblocksize) + c = dblocksize; + _datastore.WriteBlockBytes(data, offset, c); + + if (next > 0) + blocknum = next; + offset += c; + len -= c; + counter++; + } + return firstblock; + } + + private byte[] CreateAllocHeader(AllocationBlock ab, byte[] keybytes) + { + byte[] alloc = new byte[_blockheader.Length + keybytes.Length]; + + if (ab.isCompressed) + alloc[8] = 1; + if (ab.isBinaryJSON) + alloc[8] += 2; + if (ab.deleteKey) + alloc[8] += 4; + + Buffer.BlockCopy(Helper.GetBytes(ab.datalength, false), 0, alloc, 9, 4); + alloc[13] = ab.keylen; + alloc[14] = 1; // string keys for now + Buffer.BlockCopy(keybytes, 0, alloc, _blockheader.Length, ab.keylen); + + return alloc; + } + + private AllocationBlock FillAllocationBlock(int blocknumber) + { + AllocationBlock ab = new AllocationBlock(); + + ab.blocknumber = blocknumber; + ab.Blocks.Add(blocknumber); + + byte[] b = _datastore.ReadBlockBytes(blocknumber, _blockheader.Length + 255); + + int blocknumexpected = 0; + + int next = ParseBlockHeader(ab, b, blocknumexpected); + + blocknumexpected++; + + while (next > 0) + { + ab.Blocks.Add(next); + b = _datastore.ReadBlockBytes(next, _blockheader.Length + ab.keylen); + next = ParseBlockHeader(ab, b, blocknumexpected); + blocknumexpected++; + } + + return ab; + } + + private int ParseBlockHeader(AllocationBlock ab, byte[] b, int blocknumberexpected) + { + int bnum = Helper.ToInt32(b, 0); + if (bnum != blocknumberexpected) + { + _log.Error("Block numbers does not match, looking for : " + blocknumberexpected); + //throw new Exception("Block numbers does not match, looking for : " + blocknumberexpected); + return -1; + } + if (b[14] != 1) + { + _log.Error("Expecting string keys only, got : " + b[14]); + //throw new Exception("Expecting string keys only, got : " + b[11]); + return -1; + } + + int next = Helper.ToInt32(b, 4); + + if (ab.keylen == 0) + { + byte flags = b[8]; + + if ((flags & 0x01) > 0) + ab.isCompressed = true; + if ((flags & 0x02) > 0) + ab.isBinaryJSON = true; + if ((flags & 0x04) > 0) + ab.deleteKey = true; + + ab.datalength = Helper.ToInt32(b, 9); + byte keylen = b[13]; + ab.keylen = keylen; + ab.key = Helper.GetString(b, _blockheader.Length, keylen); + } + return next; + } + + private void RebuildDataFiles() + { + MGIndex keys = null; + try + { + // remove old free list + if (File.Exists(_Path + "data.bmp")) + File.Delete(_Path + "data.bmp"); + + _datastore = new StorageFileHF(_Path + "data.mghf", Global.HighFrequencyKVDiskBlockSize); + _BlockSize = _datastore.GetBlockSize(); + if (File.Exists(_Path + "keys.idx")) + { + _log.Debug("removing old keys index"); + foreach (var f in Directory.GetFiles(_Path, "keys.*")) + File.Delete(f); + } + + keys = new MGIndex(_Path, "keys.idx", 255, Global.PageItemCount, false); + + WahBitArray visited = new WahBitArray(); + + int c = _datastore.NumberofBlocks(); + + for (int i = 0; i < c; i++) // go through blocks + { + if (visited.Get(i)) + continue; + byte[] b = _datastore.ReadBlockBytes(i, _blockheader.Length + 255); + int bnum = Helper.ToInt32(b, 0); + if (bnum > 0) // check if a start block + { + visited.Set(i, true); + _datastore.FreeBlock(i); // mark as free + continue; + } + + AllocationBlock ab = new AllocationBlock(); + // start block found + int blocknumexpected = 0; + + int next = ParseBlockHeader(ab, b, blocknumexpected); + int last = 0; + bool freelast = false; + AllocationBlock old = null; + + if (keys.GetFirst(ab.key, out last)) + { + old = this.FillAllocationBlock(last); + freelast = true; + } + blocknumexpected++; + bool failed = false; + if (ab.deleteKey == false) + { + while (next > 0) // read the blocks + { + ab.Blocks.Add(next); + b = _datastore.ReadBlockBytes(next, _blockheader.Length + ab.keylen); + next = ParseBlockHeader(ab, b, blocknumexpected); + if (next == -1) // non matching block + { + failed = true; + break; + } + blocknumexpected++; + } + } + else + { + failed = true; + keys.RemoveKey(ab.key); + } + // new data ok + if (failed == false) + { + keys.Set(ab.key, ab.blocknumber);// valid block found + if (freelast)// free the old blocks + _datastore.FreeBlocks(old.Blocks); + } + + visited.Set(i, true); + } + + // all ok delete temp.$ file + if (File.Exists(_Path + _dirtyFilename)) + File.Delete(_Path + _dirtyFilename); + } + catch (Exception ex) + { + _log.Error(ex); + } + finally + { + _log.Debug("Shutting down files and index"); + _datastore.Shutdown(); + keys.SaveIndex(); + keys.Dispose(); + } + } + #endregion + + internal void FreeBlocks(List list) + { + lock (_lock) + _datastore.FreeBlocks(list); + } + + internal int SaveData(string key, byte[] data) + { + lock (_lock) + { + byte[] kb = Helper.GetBytes(key); + AllocationBlock ab = new AllocationBlock(); + ab.key = key; + ab.keylen = (byte)kb.Length; + ab.isCompressed = false; + ab.isBinaryJSON = true; + ab.datalength = data.Length; + + return internalSave(kb, data, ab); + } + } + + internal byte[] GetData(int blocknumber, List usedblocks) + { + lock (_lock) + { + AllocationBlock ab = FillAllocationBlock(blocknumber); + usedblocks = ab.Blocks; + byte[] data = new byte[ab.datalength]; + long offset = 0; + int len = ab.datalength; + int dbsize = _BlockSize - _blockheader.Length - ab.keylen; + ab.Blocks.ForEach(x => + { + byte[] b = _datastore.ReadBlock(x); + int c = len; + if (c > dbsize) c = dbsize; + Buffer.BlockCopy(b, _blockheader.Length + ab.keylen, data, (int)offset, c); + offset += c; + len -= c; + }); + if (ab.isCompressed) + data = MiniLZO.Decompress(data); + + return data; + } + } + } +} diff --git a/RaptorDB/Storage/StorageFile.cs b/RaptorDB/Storage/StorageFile.cs index bf0eb1b..c2d06a9 100644 --- a/RaptorDB/Storage/StorageFile.cs +++ b/RaptorDB/Storage/StorageFile.cs @@ -1,728 +1,731 @@ -using System; -using System.Diagnostics; -using System.Collections; -using System.Runtime.InteropServices; -using System.IO; -using System.Text; -using System.Collections.Generic; -using RaptorDB.Common; - -namespace RaptorDB -{ - internal class StorageData - { - public StorageItem meta; - public byte[] data; - } - - public class StorageItem - { - public T key; - public string typename; - public DateTime date = FastDateTime.Now; - public bool isDeleted; - public bool isReplicated; - public int dataLength; - public byte isCompressed; // 0 = no, 1 = MiniLZO - } - - public interface IDocStorage - { - int RecordCount(); - - byte[] GetBytes(int rowid, out StorageItem meta); - object GetObject(int rowid, out StorageItem meta); - StorageItem GetMeta(int rowid); - - bool GetObject(T key, out object doc); - } - - public enum SF_FORMAT - { - BSON, - JSON - } - - internal struct SplitFile - { - public long start; - public long uptolength; - public FileStream file; - } - - internal class StorageFile - { - FileStream _datawrite; - FileStream _recfilewrite; - FileStream _recfileread = null; - FileStream _dataread = null; - - private string _filename = ""; - private string _recfilename = ""; - private long _lastRecordNum = 0; - private long _lastWriteOffset = _fileheader.Length; - private object _readlock = new object(); - private bool _dirty = false; - IGetBytes _T = null; - ILog _log = LogManager.GetLogger(typeof(StorageFile)); - private SF_FORMAT _saveFormat = SF_FORMAT.BSON; - - // **** change this if storage format changed **** - internal static int _CurrentVersion = 2; - - //private ushort _splitMegaBytes = 0; // 0 = off - //private bool _enableSplits = false; - private List _files = new List(); - private List _uptoindexes = new List(); - // no splits in view mode - private bool _viewmode = false; - private SplitFile _lastsplitfile; - - public static byte[] _fileheader = { (byte)'M', (byte)'G', (byte)'D', (byte)'B', - 0, // 4 -- storage file version number, - 0 // 5 -- not used - }; - private static string _splitfileExtension = "00000"; - private const int _KILOBYTE = 1024; - // record format : - // 1 type (0 = raw no meta data, 1 = bson meta, 2 = json meta) - // 4 byte meta/data length, - // n byte meta serialized data if exists - // m byte data (if meta exists then m is in meta.dataLength) - - /// - /// View data storage mode (no splits, bson save) - /// - /// - public StorageFile(string filename) - { - _viewmode = true; - _saveFormat = SF_FORMAT.BSON; - // add version number - _fileheader[5] = (byte)_CurrentVersion; - Initialize(filename, false); - } - /// - /// - /// - /// - /// - /// = true -> don't create mgrec files (used for backup and replication mode) - public StorageFile(string filename, SF_FORMAT format, bool StorageOnlyMode) - { - _saveFormat = format; - if (StorageOnlyMode) _viewmode = true; // no file splits - // add version number - _fileheader[5] = (byte)_CurrentVersion; - Initialize(filename, StorageOnlyMode); - } - - private StorageFile(string filename, bool StorageOnlyMode) - { - Initialize(filename, StorageOnlyMode); - } - - private void Initialize(string filename, bool StorageOnlyMode) - { - _T = RDBDataType.ByteHandler(); - _filename = filename; - - // search for mgdat00000 extensions -> split files load - if (File.Exists(filename + _splitfileExtension)) - { - LoadSplitFiles(filename); - } - - if (File.Exists(filename) == false) - _datawrite = new FileStream(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite); - else - _datawrite = new FileStream(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite); - - _dataread = new FileStream(_filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); - - if (_datawrite.Length == 0) - { - // new file - _datawrite.Write(_fileheader, 0, _fileheader.Length); - _datawrite.Flush(); - _lastWriteOffset = _fileheader.Length; - } - else - { - long i = _datawrite.Seek(0L, SeekOrigin.End); - if (_files.Count == 0) - _lastWriteOffset = i; - else - _lastWriteOffset += i; // add to the splits - } - - if (StorageOnlyMode == false) - { - // load rec pointers - _recfilename = filename.Substring(0, filename.LastIndexOf('.')) + ".mgrec"; - if (File.Exists(_recfilename) == false) - _recfilewrite = new FileStream(_recfilename, FileMode.CreateNew, FileAccess.Write, FileShare.ReadWrite); - else - _recfilewrite = new FileStream(_recfilename, FileMode.Open, FileAccess.Write, FileShare.ReadWrite); - - _recfileread = new FileStream(_recfilename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); - - _lastRecordNum = (int)(_recfilewrite.Length / 8); - _recfilewrite.Seek(0L, SeekOrigin.End); - } - } - - private void LoadSplitFiles(string filename) - { - _log.Debug("Loading split files..."); - _lastWriteOffset = 0; - for (int i = 0; ; i++) - { - string _filename = filename + i.ToString(_splitfileExtension); - if (File.Exists(_filename) == false) - break; - FileStream file = new FileStream(_filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); - SplitFile sf = new SplitFile(); - sf.start = _lastWriteOffset; - _lastWriteOffset += file.Length; - sf.file = file; - sf.uptolength = _lastWriteOffset; - _files.Add(sf); - _uptoindexes.Add(sf.uptolength); - } - _lastsplitfile = _files[_files.Count - 1]; - _log.Debug("Number of split files = " + _files.Count); - } - - public static int GetStorageFileHeaderVersion(string filename) - { - string fn = filename + _splitfileExtension; // if split files -> load the header from the first file -> mgdat00000 - if (File.Exists(fn) == false) - fn = filename; // else use the mgdat file - - if (File.Exists(fn)) - { - var fs = new FileStream(fn, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite); - fs.Seek(0L, SeekOrigin.Begin); - byte[] b = new byte[_fileheader.Length]; - fs.Read(b, 0, _fileheader.Length); - fs.Close(); - return b[5]; - } - return _CurrentVersion; - } - - public int Count() - { - return (int)_lastRecordNum;// (int)(_recfilewrite.Length >> 3); - } - - public long WriteRawData(byte[] b) - { - return internalWriteData(null, b, true); - } - - public long Delete(T key) - { - StorageItem meta = new StorageItem(); - meta.key = key; - meta.isDeleted = true; - - return internalWriteData(meta, null, false); - } - - public long DeleteReplicated(T key) - { - StorageItem meta = new StorageItem(); - meta.key = key; - meta.isReplicated = true; - meta.isDeleted = true; - - return internalWriteData(meta, null, false); - } - - public long WriteObject(T key, object obj) - { - StorageItem meta = new StorageItem(); - meta.key = key; - meta.typename = fastJSON.Reflection.Instance.GetTypeAssemblyName(obj.GetType()); - byte[] data; - if (_saveFormat == SF_FORMAT.BSON) - data = fastBinaryJSON.BJSON.ToBJSON(obj); - else - data = Helper.GetBytes(fastJSON.JSON.ToJSON(obj)); - if(data.Length > (int)Global.CompressDocumentOverKiloBytes*_KILOBYTE) - { - meta.isCompressed = 1; - data = MiniLZO.Compress(data); //MiniLZO - } - return internalWriteData(meta, data, false); - } - - public long WriteReplicationObject(T key, object obj) - { - StorageItem meta = new StorageItem(); - meta.key = key; - meta.isReplicated = true; - meta.typename = fastJSON.Reflection.Instance.GetTypeAssemblyName(obj.GetType()); - byte[] data; - if (_saveFormat == SF_FORMAT.BSON) - data = fastBinaryJSON.BJSON.ToBJSON(obj); - else - data = Helper.GetBytes(fastJSON.JSON.ToJSON(obj)); - if (data.Length > (int)Global.CompressDocumentOverKiloBytes * _KILOBYTE) - { - meta.isCompressed = 1; - data = MiniLZO.Compress(data); - } - return internalWriteData(meta, data, false); - } - - public long WriteData(T key, byte[] data) - { - StorageItem meta = new StorageItem(); - meta.key = key; - - if (data.Length > (int)Global.CompressDocumentOverKiloBytes * _KILOBYTE) - { - meta.isCompressed = 1; - data = MiniLZO.Compress(data); - } - - return internalWriteData(meta, data, false); - } - - public byte[] ReadBytes(long recnum) - { - StorageItem meta; - return ReadBytes(recnum, out meta); - } - - public object ReadObject(long recnum) - { - StorageItem meta = null; - return ReadObject(recnum, out meta); - } - - public object ReadObject(long recnum, out StorageItem meta) - { - byte[] b = ReadBytes(recnum, out meta); - - if (b == null) - return null; - if (b[0] < 32) - return fastBinaryJSON.BJSON.ToObject(b); - else - return fastJSON.JSON.ToObject(Encoding.ASCII.GetString(b)); - } - - /// - /// used for views only - /// - /// - /// - public byte[] ViewReadRawBytes(long recnum) - { - // views can't be split - if (recnum >= _lastRecordNum) - return null; - - lock (_readlock) - { - long offset = ComputeOffset(recnum); - _dataread.Seek(offset, System.IO.SeekOrigin.Begin); - byte[] hdr = new byte[5]; - // read header - _dataread.Read(hdr, 0, 5); // meta length - int len = Helper.ToInt32(hdr, 1); - - int type = hdr[0]; - if (type == 0) - { - byte[] data = new byte[len]; - _dataread.Read(data, 0, len); - return data; - } - return null; - } - } - - public void Shutdown() - { - if (_files.Count > 0) - _files.ForEach(s => FlushClose(s.file)); - - FlushClose(_dataread); - FlushClose(_recfileread); - FlushClose(_recfilewrite); - FlushClose(_datawrite); - - _dataread = null; - _recfileread = null; - _recfilewrite = null; - _datawrite = null; - } - - public static StorageFile ReadForward(string filename) - { - StorageFile sf = new StorageFile(filename, true); - - return sf; - } - - public StorageItem ReadMeta(long rowid) - { - if (rowid >= _lastRecordNum) - return null; - lock (_readlock) - { - int metalen = 0; - long off = ComputeOffset(rowid); - FileStream fs = GetReadFileStreamWithSeek(off); - StorageItem meta = ReadMetaData(fs, out metalen); - return meta; - } - } - - #region [ private / internal ] - - private long internalWriteData(StorageItem meta, byte[] data, bool raw) - { - lock (_readlock) - { - _dirty = true; - // seek end of file - long offset = _lastWriteOffset; - if (_viewmode == false && Global.SplitStorageFilesMegaBytes > 0) - { - // current file size > _splitMegaBytes --> new file - if (offset > (long)Global.SplitStorageFilesMegaBytes * 1024 * 1024) - CreateNewStorageFile(); - } - - if (raw == false) - { - if (data != null) - meta.dataLength = data.Length; - byte[] metabytes = fastBinaryJSON.BJSON.ToBJSON(meta, new fastBinaryJSON.BJSONParameters { UseExtensions = false }); - - // write header info - _datawrite.Write(new byte[] { 1 }, 0, 1); // TODO : add json here, write bson for now - _datawrite.Write(Helper.GetBytes(metabytes.Length, false), 0, 4); - _datawrite.Write(metabytes, 0, metabytes.Length); - // update pointer - _lastWriteOffset += metabytes.Length + 5; - } - else - { - // write header info - _datawrite.Write(new byte[] { 0 }, 0, 1); // write raw - _datawrite.Write(Helper.GetBytes(data.Length, false), 0, 4); - // update pointer - _lastWriteOffset += 5; - } - - if (data != null) - { - // write data block - _datawrite.Write(data, 0, data.Length); - _lastWriteOffset += data.Length; - } - // return starting offset -> recno - long recno = _lastRecordNum++; - if (_recfilewrite != null) - _recfilewrite.Write(Helper.GetBytes(offset, false), 0, 8); - if (Global.FlushStorageFileImmediately) - { - _datawrite.Flush(); - if (_recfilewrite != null) - _recfilewrite.Flush(); - } - return recno; - } - } - - private void CreateNewStorageFile() - { - _log.Debug("Split limit reached = " + _datawrite.Length); - int i = _files.Count; - // close files - FlushClose(_datawrite); - FlushClose(_dataread); - long start = 0; - if (i > 0) - start = _lastsplitfile.uptolength; // last file offset - // rename mgdat to mgdat0000n - File.Move(_filename, _filename + i.ToString(_splitfileExtension)); - FileStream file = new FileStream(_filename + i.ToString(_splitfileExtension), FileMode.Open, FileAccess.Read, FileShare.ReadWrite); - SplitFile sf = new SplitFile(); - sf.start = start; - sf.uptolength = _lastWriteOffset; - sf.file = file; - _files.Add(sf); - - _uptoindexes.Add(sf.uptolength); - - _lastsplitfile = sf; - // new mgdat file - _datawrite = new FileStream(_filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite); - _dataread = new FileStream(_filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); - _log.Debug("New storage file created, count = " + _files.Count); - } - - internal byte[] ReadBytes(long recnum, out StorageItem meta) - { - meta = null; - if (recnum >= _lastRecordNum) - return null; - lock (_readlock) - { - long off = ComputeOffset(recnum); - FileStream fs = GetReadFileStreamWithSeek(off); - byte[] data = internalReadBytes(fs, out meta); - - if (meta.isCompressed > 0) - data = MiniLZO.Decompress(data); - - return data; - } - } - - private long ComputeOffset(long recnum) - { - if (_dirty) - { - _datawrite.Flush(); - _recfilewrite.Flush(); - } - long off = recnum << 3;// *8L; - byte[] b = new byte[8]; - - _recfileread.Seek(off, SeekOrigin.Begin); - _recfileread.Read(b, 0, 8); - off = Helper.ToInt64(b, 0); - if (off == 0)// kludge - off = 6; - return off; - } - - private byte[] internalReadBytes(FileStream fs, out StorageItem meta) - { - int metalen = 0; - meta = ReadMetaData(fs, out metalen); - if (meta != null) - { - if (meta.isDeleted == false) - { - byte[] data = new byte[meta.dataLength]; - fs.Read(data, 0, meta.dataLength); - return data; - } - } - else - { - byte[] data = new byte[metalen]; - fs.Read(data, 0, metalen); - return data; - } - return null; - } - - private StorageItem ReadMetaData(FileStream fs, out int metasize) - { - byte[] hdr = new byte[5]; - // read header - fs.Read(hdr, 0, 5); // meta length - int len = Helper.ToInt32(hdr, 1); - int type = hdr[0]; - if (type > 0) - { - metasize = len + 5; - hdr = new byte[len]; - fs.Read(hdr, 0, len); - StorageItem meta; - if (type == 1) - meta = fastBinaryJSON.BJSON.ToObject>(hdr); - else - { - string str = Helper.GetString(hdr, 0, (short)hdr.Length); - meta = fastJSON.JSON.ToObject>(str); - } - return meta; - } - else - { - metasize = len; - return null; - } - } - - private void FlushClose(FileStream st) - { - if (st != null) - { - st.Flush(true); - st.Close(); - } - } - - internal T GetKey(long recnum, out bool deleted) - { - lock (_readlock) - { - deleted = false; - long off = ComputeOffset(recnum); - FileStream fs = GetReadFileStreamWithSeek(off); - - int metalen = 0; - StorageItem meta = ReadMetaData(fs, out metalen); - deleted = meta.isDeleted; - return meta.key; - } - } - - internal int CopyTo(StorageFile storageFile, long startrecord) - { - FileStream fs; - bool inthefiles = false; - // copy data here - lock (_readlock) - { - long off = ComputeOffset(startrecord); - fs = GetReadFileStreamWithSeek(off); - if (fs != _dataread) - inthefiles = true; - Pump(fs, storageFile._datawrite); - } - - // pump the remainder of the files also - if (inthefiles && _files.Count > 0) - { - long off = ComputeOffset(startrecord); - int i = binarysearch(off); - i++; // next file stream - for (int j = i; j < _files.Count; j++) - { - lock (_readlock) - { - fs = _files[j].file; - fs.Seek(0L, SeekOrigin.Begin); - Pump(fs, storageFile._datawrite); - } - } - - // pump the current mgdat - lock(_readlock) - { - _dataread.Seek(0L, SeekOrigin.Begin); - Pump(_dataread, storageFile._datawrite); - } - } - - return (int)_lastRecordNum; - } - - private static void Pump(Stream input, Stream output) - { - byte[] bytes = new byte[4096 * 2]; - int n; - while ((n = input.Read(bytes, 0, bytes.Length)) != 0) - output.Write(bytes, 0, n); - } - - internal IEnumerable> ReadOnlyEnumerate() - { - // MGREC files may not exist - - //// the total number of records - //long count = _recfileread.Length >> 3; - - //for (long i = 0; i < count; i++) - //{ - // StorageItem meta; - // byte[] data = ReadBytes(i, out meta); - // StorageData sd = new StorageData(); - // sd.meta = meta; - // if (meta.dataLength > 0) - // sd.data = data; - - // yield return sd; - //} - - long offset = _fileheader.Length;// start; // skip header - long size = _dataread.Length; - while (offset < size) - { - StorageData sd = new StorageData(); - lock (_readlock) - { - _dataread.Seek(offset, SeekOrigin.Begin); - int metalen = 0; - StorageItem meta = ReadMetaData(_dataread, out metalen); - offset += metalen; - - sd.meta = meta; - if (meta.dataLength > 0) - { - byte[] data = new byte[meta.dataLength]; - _dataread.Read(data, 0, meta.dataLength); - sd.data = data; - } - offset += meta.dataLength; - } - yield return sd; - } - } - - private FileStream GetReadFileStreamWithSeek(long offset) - { - long fileoffset = offset; - // search split _files for offset and compute fileoffset in the file - if (_files.Count > 0) // we have splits - { - if (offset < _lastsplitfile.uptolength) // offset is in the list - { - int i = binarysearch(offset); - var f = _files[i]; - fileoffset -= f.start; // offset in the file - f.file.Seek(fileoffset, SeekOrigin.Begin); - return f.file; - } - else - fileoffset -= _lastsplitfile.uptolength; // offset in the mgdat file - } - - // seek to position in file - _dataread.Seek(fileoffset, SeekOrigin.Begin); - return _dataread; - } - - private int binarysearch(long offset) - { - //// binary search - int low = 0; - int high = _files.Count - 1; - int midpoint = 0; - int lastlower = 0; - - while (low <= high) - { - midpoint = low + (high - low) / 2; - long k = _uptoindexes[midpoint]; - // check to see if value is equal to item in array - if (offset == k) - return midpoint + 1; - else if (offset < k) - { - high = midpoint - 1; - lastlower = midpoint; - } - else - low = midpoint + 1; - } - - return lastlower; - } - #endregion - } -} +using System; +using System.Diagnostics; +using System.Collections; +using System.Runtime.InteropServices; +using System.IO; +using System.Text; +using System.Collections.Generic; +using RaptorDB.Common; +using System.Collections.Concurrent; + +namespace RaptorDB +{ + internal class StorageData + { + public StorageItem meta; + public byte[] data; + } + + public class StorageItem + { + public T key; + public string typename; + public DateTime date = FastDateTime.Now; + public bool isDeleted; + public bool isReplicated; + public int dataLength; + public byte isCompressed; // 0 = no, 1 = MiniLZO + } + + public interface IDocStorage + { + int RecordCount(); + + byte[] GetBytes(int rowid, out StorageItem meta); + object GetObject(int rowid, out StorageItem meta); + StorageItem GetMeta(int rowid); + + bool GetObject(T key, out object doc); + } + + public enum SF_FORMAT + { + BSON, + JSON + } + + internal struct SplitFile + { + public long start; + public long uptolength; + public FileStream file; + } + + internal class StorageFile + { + FileStream _datawrite; + FileStream _recfilewrite; + FileStream _recfileread = null; + FileStream _dataread = null; + + private string _filename = ""; + private string _recfilename = ""; + private long _lastRecordNum = 0; + private long _lastWriteOffset = _fileheader.Length; + private object _readlock = new object(); + private bool _dirty = false; + IGetBytes _T = null; + ILog _log = LogManager.GetLogger(typeof(StorageFile)); + private SF_FORMAT _saveFormat = SF_FORMAT.BSON; + + // **** change this if storage format changed **** + internal static int _CurrentVersion = 2; + + //private ushort _splitMegaBytes = 0; // 0 = off + //private bool _enableSplits = false; + private List _files = new List(); + private List _uptoindexes = new List(); + // no splits in view mode + private bool _viewmode = false; + private SplitFile _lastsplitfile; + + public static byte[] _fileheader = { (byte)'M', (byte)'G', (byte)'D', (byte)'B', + 0, // 4 -- storage file version number, + 0 // 5 -- not used + }; + private static string _splitfileExtension = "00000"; + private const int _KILOBYTE = 1024; + // record format : + // 1 type (0 = raw no meta data, 1 = bson meta, 2 = json meta) + // 4 byte meta/data length, + // n byte meta serialized data if exists + // m byte data (if meta exists then m is in meta.dataLength) + + /// + /// View data storage mode (no splits, bson save) + /// + /// + public StorageFile(string filename) + { + _viewmode = true; + _saveFormat = SF_FORMAT.BSON; + // add version number + _fileheader[5] = (byte)_CurrentVersion; + Initialize(filename, false); + } + /// + /// + /// + /// + /// + /// = true -> don't create mgrec files (used for backup and replication mode) + public StorageFile(string filename, SF_FORMAT format, bool StorageOnlyMode) + { + _saveFormat = format; + if (StorageOnlyMode) _viewmode = true; // no file splits + // add version number + _fileheader[5] = (byte)_CurrentVersion; + Initialize(filename, StorageOnlyMode); + } + + private StorageFile(string filename, bool StorageOnlyMode) + { + Initialize(filename, StorageOnlyMode); + } + + private void Initialize(string filename, bool StorageOnlyMode) + { + _T = RDBDataType.ByteHandler(); + _filename = filename; + + // search for mgdat00000 extensions -> split files load + if (File.Exists(filename + _splitfileExtension)) + { + LoadSplitFiles(filename); + } + + if (File.Exists(filename) == false) + _datawrite = new FileStream(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite); + else + _datawrite = new FileStream(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite); + + _dataread = new FileStream(_filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); + + if (_datawrite.Length == 0) + { + // new file + _datawrite.Write(_fileheader, 0, _fileheader.Length); + _datawrite.Flush(); + _lastWriteOffset = _fileheader.Length; + } + else + { + long i = _datawrite.Seek(0L, SeekOrigin.End); + if (_files.Count == 0) + _lastWriteOffset = i; + else + _lastWriteOffset += i; // add to the splits + } + + if (StorageOnlyMode == false) + { + // load rec pointers + _recfilename = filename.Substring(0, filename.LastIndexOf('.')) + ".mgrec"; + if (File.Exists(_recfilename) == false) + _recfilewrite = new FileStream(_recfilename, FileMode.CreateNew, FileAccess.Write, FileShare.ReadWrite); + else + _recfilewrite = new FileStream(_recfilename, FileMode.Open, FileAccess.Write, FileShare.ReadWrite); + + _recfileread = new FileStream(_recfilename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); + + _lastRecordNum = (int)(_recfilewrite.Length / 8); + _recfilewrite.Seek(0L, SeekOrigin.End); + } + } + + private void LoadSplitFiles(string filename) + { + _log.Debug("Loading split files..."); + _lastWriteOffset = 0; + for (int i = 0; ; i++) + { + string _filename = filename + i.ToString(_splitfileExtension); + if (File.Exists(_filename) == false) + break; + FileStream file = new FileStream(_filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); + SplitFile sf = new SplitFile(); + sf.start = _lastWriteOffset; + _lastWriteOffset += file.Length; + sf.file = file; + sf.uptolength = _lastWriteOffset; + _files.Add(sf); + _uptoindexes.Add(sf.uptolength); + } + _lastsplitfile = _files[_files.Count - 1]; + _log.Debug("Number of split files = " + _files.Count); + } + + public static int GetStorageFileHeaderVersion(string filename) + { + string fn = filename + _splitfileExtension; // if split files -> load the header from the first file -> mgdat00000 + if (File.Exists(fn) == false) + fn = filename; // else use the mgdat file + + if (File.Exists(fn)) + { + var fs = new FileStream(fn, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite); + fs.Seek(0L, SeekOrigin.Begin); + byte[] b = new byte[_fileheader.Length]; + fs.Read(b, 0, _fileheader.Length); + fs.Close(); + return b[5]; + } + return _CurrentVersion; + } + + public int Count() + { + return (int)_lastRecordNum; + } + + public long WriteRawData(byte[] b) + { + return internalWriteData(null, b, true); + } + + public long Delete(T key) + { + StorageItem meta = new StorageItem(); + meta.key = key; + meta.isDeleted = true; + + return internalWriteData(meta, null, false); + } + + public long DeleteReplicated(T key) + { + StorageItem meta = new StorageItem(); + meta.key = key; + meta.isReplicated = true; + meta.isDeleted = true; + + return internalWriteData(meta, null, false); + } + + public long WriteObject(T key, object obj) + { + StorageItem meta = new StorageItem(); + meta.key = key; + meta.typename = fastJSON.Reflection.Instance.GetTypeAssemblyName(obj.GetType()); + byte[] data; + if (_saveFormat == SF_FORMAT.BSON) + data = fastBinaryJSON.BJSON.ToBJSON(obj); + else + data = Helper.GetBytes(fastJSON.JSON.ToJSON(obj)); + if(data.Length > (int)Global.CompressDocumentOverKiloBytes*_KILOBYTE) + { + meta.isCompressed = 1; + data = MiniLZO.Compress(data); //MiniLZO + } + return internalWriteData(meta, data, false); + } + + public long WriteReplicationObject(T key, object obj) + { + StorageItem meta = new StorageItem(); + meta.key = key; + meta.isReplicated = true; + meta.typename = fastJSON.Reflection.Instance.GetTypeAssemblyName(obj.GetType()); + byte[] data; + if (_saveFormat == SF_FORMAT.BSON) + data = fastBinaryJSON.BJSON.ToBJSON(obj); + else + data = Helper.GetBytes(fastJSON.JSON.ToJSON(obj)); + if (data.Length > (int)Global.CompressDocumentOverKiloBytes * _KILOBYTE) + { + meta.isCompressed = 1; + data = MiniLZO.Compress(data); + } + return internalWriteData(meta, data, false); + } + + public long WriteData(T key, byte[] data) + { + StorageItem meta = new StorageItem(); + meta.key = key; + + if (data.Length > (int)Global.CompressDocumentOverKiloBytes * _KILOBYTE) + { + meta.isCompressed = 1; + data = MiniLZO.Compress(data); + } + + return internalWriteData(meta, data, false); + } + + public byte[] ReadBytes(long recnum) + { + StorageItem meta; + return ReadBytes(recnum, out meta); + } + + public object ReadObject(long recnum) + { + StorageItem meta = null; + return ReadObject(recnum, out meta); + } + + public object ReadObject(long recnum, out StorageItem meta) + { + byte[] b = ReadBytes(recnum, out meta); + + if (b == null) + return null; + if (b[0] < 32) + return fastBinaryJSON.BJSON.ToObject(b); + else + return fastJSON.JSON.ToObject(Encoding.ASCII.GetString(b)); + } + + /// + /// used for views only + /// + /// + /// + public byte[] ViewReadRawBytes(long recnum) + { + // views can't be split + if (recnum >= _lastRecordNum) + return null; + + lock (_readlock) + { + long offset = ComputeOffset(recnum); + _dataread.Seek(offset, System.IO.SeekOrigin.Begin); + byte[] hdr = new byte[5]; + // read header + _dataread.Read(hdr, 0, 5); // meta length + int len = Helper.ToInt32(hdr, 1); + + int type = hdr[0]; + if (type == 0) + { + byte[] data = new byte[len]; + _dataread.Read(data, 0, len); + return data; + } + return null; + } + } + + public void Shutdown() + { + if (_files.Count > 0) + _files.ForEach(s => FlushClose(s.file)); + + FlushClose(_dataread); + FlushClose(_recfileread); + FlushClose(_recfilewrite); + FlushClose(_datawrite); + + _dataread = null; + _recfileread = null; + _recfilewrite = null; + _datawrite = null; + } + + public static StorageFile ReadForward(string filename) + { + StorageFile sf = new StorageFile(filename, true); + + return sf; + } + + public StorageItem ReadMeta(long rowid) + { + if (rowid >= _lastRecordNum) + return null; + lock (_readlock) + { + int metalen = 0; + long off = ComputeOffset(rowid); + FileStream fs = GetReadFileStreamWithSeek(off); + StorageItem meta = ReadMetaData(fs, out metalen); + return meta; + } + } + + #region [ private / internal ] + + private long internalWriteData(StorageItem meta, byte[] data, bool raw) + { + byte[] metabytes = null; + if(!raw){ + if (data != null) + meta.dataLength = data.Length; + metabytes = fastBinaryJSON.BJSON.ToBJSON(meta, new fastBinaryJSON.BJSONParameters { UseExtensions = false }); + } + lock (_readlock) + { + _dirty = true; + // seek end of file + long offset = _lastWriteOffset; + if (_viewmode == false && Global.SplitStorageFilesMegaBytes > 0) + { + // current file size > _splitMegaBytes --> new file + if (offset > (long)Global.SplitStorageFilesMegaBytes * 1024 * 1024) + CreateNewStorageFile(); + } + + if (raw == false) + { + // write header info + _datawrite.Write(new byte[] { 1 }, 0, 1); // TODO : add json here, write bson for now + _datawrite.Write(Helper.GetBytes(metabytes.Length, false), 0, 4); + _datawrite.Write(metabytes, 0, metabytes.Length); + // update pointer + _lastWriteOffset += metabytes.Length + 5; + } + else + { + // write header info + _datawrite.Write(new byte[] { 0 }, 0, 1); // write raw + _datawrite.Write(Helper.GetBytes(data.Length, false), 0, 4); + // update pointer + _lastWriteOffset += 5; + } + + if (data != null) + { + // write data block + _datawrite.Write(data, 0, data.Length); + _lastWriteOffset += data.Length; + } + // return starting offset -> recno + long recno = _lastRecordNum++; + if (_recfilewrite != null) + _recfilewrite.Write(Helper.GetBytes(offset, false), 0, 8); + if (Global.FlushStorageFileImmediately) + { + _datawrite.Flush(); + if (_recfilewrite != null) + _recfilewrite.Flush(); + } + return recno; + } + } + + private void CreateNewStorageFile() + { + _log.Debug("Split limit reached = " + _datawrite.Length); + int i = _files.Count; + // close files + FlushClose(_datawrite); + FlushClose(_dataread); + long start = 0; + if (i > 0) + start = _lastsplitfile.uptolength; // last file offset + // rename mgdat to mgdat0000n + File.Move(_filename, _filename + i.ToString(_splitfileExtension)); + FileStream file = new FileStream(_filename + i.ToString(_splitfileExtension), FileMode.Open, FileAccess.Read, FileShare.ReadWrite); + SplitFile sf = new SplitFile(); + sf.start = start; + sf.uptolength = _lastWriteOffset; + sf.file = file; + _files.Add(sf); + + _uptoindexes.Add(sf.uptolength); + + _lastsplitfile = sf; + // new mgdat file + _datawrite = new FileStream(_filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite); + _dataread = new FileStream(_filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); + _log.Debug("New storage file created, count = " + _files.Count); + } + + internal byte[] ReadBytes(long recnum, out StorageItem meta) + { + meta = null; + if (recnum >= _lastRecordNum) + return null; + lock (_readlock) + { + long off = ComputeOffset(recnum); + FileStream fs = GetReadFileStreamWithSeek(off); + byte[] data = internalReadBytes(fs, out meta); + + if (meta.isCompressed > 0) + data = MiniLZO.Decompress(data); + + return data; + } + } + + private long ComputeOffset(long recnum) + { + if (_dirty) + { + _datawrite.Flush(); + _recfilewrite.Flush(); + } + long off = recnum << 3;// *8L; + byte[] b = new byte[8]; + + _recfileread.Seek(off, SeekOrigin.Begin); + _recfileread.Read(b, 0, 8); + off = Helper.ToInt64(b, 0); + if (off == 0)// kludge + off = 6; + return off; + } + + private byte[] internalReadBytes(FileStream fs, out StorageItem meta) + { + int metalen = 0; + meta = ReadMetaData(fs, out metalen); + if (meta != null) + { + if (meta.isDeleted == false) + { + byte[] data = new byte[meta.dataLength]; + fs.Read(data, 0, meta.dataLength); + return data; + } + } + else + { + byte[] data = new byte[metalen]; + fs.Read(data, 0, metalen); + return data; + } + return null; + } + + private StorageItem ReadMetaData(FileStream fs, out int metasize) + { + byte[] hdr = new byte[5]; + // read header + fs.Read(hdr, 0, 5); // meta length + int len = Helper.ToInt32(hdr, 1); + int type = hdr[0]; + if (type > 0) + { + metasize = len + 5; + hdr = new byte[len]; + fs.Read(hdr, 0, len); + StorageItem meta; + if (type == 1) + meta = fastBinaryJSON.BJSON.ToObject>(hdr); + else + { + string str = Helper.GetString(hdr, 0, (short)hdr.Length); + meta = fastJSON.JSON.ToObject>(str); + } + return meta; + } + else + { + metasize = len; + return null; + } + } + + private void FlushClose(FileStream st) + { + if (st != null) + { + st.Flush(true); + st.Close(); + } + } + + internal T GetKey(long recnum, out bool deleted) + { + lock (_readlock) + { + deleted = false; + long off = ComputeOffset(recnum); + FileStream fs = GetReadFileStreamWithSeek(off); + + int metalen = 0; + StorageItem meta = ReadMetaData(fs, out metalen); + deleted = meta.isDeleted; + return meta.key; + } + } + + internal int CopyTo(StorageFile storageFile, long startrecord) + { + FileStream fs; + bool inthefiles = false; + // copy data here + lock (_readlock) + { + long off = ComputeOffset(startrecord); + fs = GetReadFileStreamWithSeek(off); + if (fs != _dataread) + inthefiles = true; + Pump(fs, storageFile._datawrite); + } + + // pump the remainder of the files also + if (inthefiles && _files.Count > 0) + { + long off = ComputeOffset(startrecord); + int i = binarysearch(off); + i++; // next file stream + for (int j = i; j < _files.Count; j++) + { + lock (_readlock) + { + fs = _files[j].file; + fs.Seek(0L, SeekOrigin.Begin); + Pump(fs, storageFile._datawrite); + } + } + + // pump the current mgdat + lock(_readlock) + { + _dataread.Seek(0L, SeekOrigin.Begin); + Pump(_dataread, storageFile._datawrite); + } + } + + return (int)_lastRecordNum; + } + + private static void Pump(Stream input, Stream output) + { + byte[] bytes = new byte[4096 * 2]; + int n; + while ((n = input.Read(bytes, 0, bytes.Length)) != 0) + output.Write(bytes, 0, n); + } + + internal IEnumerable> ReadOnlyEnumerate() + { + // MGREC files may not exist + + //// the total number of records + //long count = _recfileread.Length >> 3; + + //for (long i = 0; i < count; i++) + //{ + // StorageItem meta; + // byte[] data = ReadBytes(i, out meta); + // StorageData sd = new StorageData(); + // sd.meta = meta; + // if (meta.dataLength > 0) + // sd.data = data; + + // yield return sd; + //} + + long offset = _fileheader.Length;// start; // skip header + long size = _dataread.Length; + while (offset < size) + { + StorageData sd = new StorageData(); + lock (_readlock) + { + _dataread.Seek(offset, SeekOrigin.Begin); + int metalen = 0; + StorageItem meta = ReadMetaData(_dataread, out metalen); + offset += metalen; + + sd.meta = meta; + if (meta.dataLength > 0) + { + byte[] data = new byte[meta.dataLength]; + _dataread.Read(data, 0, meta.dataLength); + sd.data = data; + } + offset += meta.dataLength; + } + yield return sd; + } + } + + private FileStream GetReadFileStreamWithSeek(long offset) + { + long fileoffset = offset; + // search split _files for offset and compute fileoffset in the file + if (_files.Count > 0) // we have splits + { + if (offset < _lastsplitfile.uptolength) // offset is in the list + { + int i = binarysearch(offset); + var f = _files[i]; + fileoffset -= f.start; // offset in the file + f.file.Seek(fileoffset, SeekOrigin.Begin); + return f.file; + } + else + fileoffset -= _lastsplitfile.uptolength; // offset in the mgdat file + } + + // seek to position in file + _dataread.Seek(fileoffset, SeekOrigin.Begin); + return _dataread; + } + + private int binarysearch(long offset) + { + //// binary search + int low = 0; + int high = _files.Count - 1; + int midpoint = 0; + int lastlower = 0; + + while (low <= high) + { + midpoint = low + (high - low) / 2; + long k = _uptoindexes[midpoint]; + // check to see if value is equal to item in array + if (offset == k) + return midpoint + 1; + else if (offset < k) + { + high = midpoint - 1; + lastlower = midpoint; + } + else + low = midpoint + 1; + } + + return lastlower; + } + #endregion + } +} diff --git a/RaptorDB/Storage/StorageFileHF.cs b/RaptorDB/Storage/StorageFileHF.cs index ff4cb9c..3b57fe1 100644 --- a/RaptorDB/Storage/StorageFileHF.cs +++ b/RaptorDB/Storage/StorageFileHF.cs @@ -1,210 +1,210 @@ -using System; -using System.Diagnostics; -using System.Collections; -using System.Runtime.InteropServices; -using System.IO; -using System.Text; -using System.Collections.Generic; -using RaptorDB.Common; -using System.Threading; - -namespace RaptorDB -{ - // high frequency storage file with overwrite old values - internal class StorageFileHF - { - FileStream _datawrite; - WAHBitArray _freeList; - - private string _filename = ""; - private object _readlock = new object(); - ILog _log = LogManager.GetLogger(typeof(StorageFileHF)); - - // **** change this if storage format changed **** - internal static int _CurrentVersion = 1; - int _lastBlockNumber = 0; - private ushort _BLOCKSIZE = 4096; - private string _Path = ""; - private string _S = Path.DirectorySeparatorChar.ToString(); - - public static byte[] _fileheader = { (byte)'M', (byte)'G', (byte)'H', (byte)'F', - 0, // 4 -- storage file version number, - 0,2, // 5,6 -- block size ushort low, hi - 1 // 7 -- key type 0 = guid, 1 = string - }; - - public StorageFileHF(string filename, ushort blocksize) - { - _Path = Path.GetDirectoryName(filename); - if (_Path.EndsWith(_S) == false) _Path += _S; - _filename = Path.GetFileNameWithoutExtension(filename); - - Initialize(filename, blocksize); - } - - public void Shutdown() - { - FlushClose(_datawrite); - // write free list - WriteFreeListBMPFile(_Path + _filename + ".free"); - - _datawrite = null; - } - - public ushort GetBlockSize() - { - return _BLOCKSIZE; - } - - internal void FreeBlocks(List list) - { - list.ForEach(x => _freeList.Set(x, true)); - } - - internal byte[] ReadBlock(int blocknumber) - { - SeekBlock(blocknumber); - byte[] data = new byte[_BLOCKSIZE]; - _datawrite.Read(data, 0, _BLOCKSIZE); - - return data; - } - - internal byte[] ReadBlockBytes(int blocknumber, int bytes) - { - SeekBlock(blocknumber); - byte[] data = new byte[bytes]; - _datawrite.Read(data, 0, bytes); - - return data; - } - - internal int GetFreeBlockNumber() - { - // get the first free block or append to the end - if (_freeList.CountOnes() > 0) - { - int i = _freeList.GetFirst(); - _freeList.Set(i, false); - return i; - } - else - return Interlocked.Increment(ref _lastBlockNumber);//++; - } - - internal void SeekBlock(int blocknumber) - { - long offset = (long)_fileheader.Length + blocknumber * _BLOCKSIZE; - _datawrite.Seek(offset, SeekOrigin.Begin);// wiil seek past the end of file on fs.Write will zero the difference - } - - internal void WriteBlockBytes(byte[] data, int start, int len) - { - _datawrite.Write(data, start, len); - } - - #region [ private / internal ] - - private void WriteFreeListBMPFile(string filename) - { - WAHBitArray.TYPE t; - uint[] ints = _freeList.GetCompressed(out t); - MemoryStream ms = new MemoryStream(); - BinaryWriter bw = new BinaryWriter(ms); - bw.Write((byte)t);// write new format with the data type byte - foreach (var i in ints) - { - bw.Write(i); - } - File.WriteAllBytes(filename, ms.ToArray()); - } - - private void ReadFreeListBMPFile(string filename) - { - byte[] b = File.ReadAllBytes(filename); - WAHBitArray.TYPE t = WAHBitArray.TYPE.WAH; - int j = 0; - if (b.Length % 4 > 0) // new format with the data type byte - { - t = (WAHBitArray.TYPE)Enum.ToObject(typeof(WAHBitArray.TYPE), b[0]); - j = 1; - } - List ints = new List(); - for (int i = 0; i < b.Length / 4; i++) - { - ints.Add((uint)Helper.ToInt32(b, (i * 4) + j)); - } - _freeList = new WAHBitArray(t, ints.ToArray()); - } - - private void Initialize(string filename, ushort blocksize) - { - if (File.Exists(filename) == false) - _datawrite = new FileStream(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite); - else - _datawrite = new FileStream(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite); - - if (_datawrite.Length == 0) - { - CreateFileHeader(blocksize); - // new file - _datawrite.Write(_fileheader, 0, _fileheader.Length); - _datawrite.Flush(); - } - else - { - ReadFileHeader(); - _lastBlockNumber = (int)((_datawrite.Length - _fileheader.Length) / _BLOCKSIZE); - _lastBlockNumber++; - } - _freeList = new WAHBitArray(); - if (File.Exists(_Path + _filename + ".free")) - { - ReadFreeListBMPFile(_Path + _filename + ".free"); - // delete file so if failure no big deal on restart - File.Delete(_Path + _filename + ".free"); - } - } - - private void ReadFileHeader() - { - // set _blockize - _datawrite.Seek(0L, SeekOrigin.Begin); - byte[] hdr = new byte[_fileheader.Length]; - _datawrite.Read(hdr, 0, _fileheader.Length); - - _BLOCKSIZE = 0; - _BLOCKSIZE = (ushort)((int)hdr[5] + ((int)hdr[6]) << 8); - } - - private void CreateFileHeader(int blocksize) - { - // add version number - _fileheader[4] = (byte)_CurrentVersion; - // block size - _fileheader[5] = (byte)(blocksize & 0xff); - _fileheader[6] = (byte)(blocksize >> 8); - _BLOCKSIZE = (ushort)blocksize; - } - - private void FlushClose(FileStream st) - { - if (st != null) - { - st.Flush(true); - st.Close(); - } - } - #endregion - - internal int NumberofBlocks() - { - return (int)((_datawrite.Length / (int)_BLOCKSIZE) + 1); - } - - internal void FreeBlock(int i) - { - _freeList.Set(i, true); - } - } -} +using System; +using System.Diagnostics; +using System.Collections; +using System.Runtime.InteropServices; +using System.IO; +using System.Text; +using System.Collections.Generic; +using RaptorDB.Common; +using System.Threading; + +namespace RaptorDB +{ + // high frequency storage file with overwrite old values + internal class StorageFileHF + { + FileStream _datawrite; + WahBitArray _freeList; + + private string _filename = ""; + private object _readlock = new object(); + ILog _log = LogManager.GetLogger(typeof(StorageFileHF)); + + // **** change this if storage format changed **** + internal static int _CurrentVersion = 1; + int _lastBlockNumber = 0; + private ushort _BLOCKSIZE = 4096; + private string _Path = ""; + private string _S = Path.DirectorySeparatorChar.ToString(); + + public static byte[] _fileheader = { (byte)'M', (byte)'G', (byte)'H', (byte)'F', + 0, // 4 -- storage file version number, + 0,2, // 5,6 -- block size ushort low, hi + 1 // 7 -- key type 0 = guid, 1 = string + }; + + public StorageFileHF(string filename, ushort blocksize) + { + _Path = Path.GetDirectoryName(filename); + if (_Path.EndsWith(_S) == false) _Path += _S; + _filename = Path.GetFileNameWithoutExtension(filename); + + Initialize(filename, blocksize); + } + + public void Shutdown() + { + FlushClose(_datawrite); + // write free list + WriteFreeListBMPFile(_Path + _filename + ".free"); + + _datawrite = null; + } + + public ushort GetBlockSize() + { + return _BLOCKSIZE; + } + + internal void FreeBlocks(List list) + { + list.ForEach(x => _freeList.Set(x, true)); + } + + internal byte[] ReadBlock(int blocknumber) + { + SeekBlock(blocknumber); + byte[] data = new byte[_BLOCKSIZE]; + _datawrite.Read(data, 0, _BLOCKSIZE); + + return data; + } + + internal byte[] ReadBlockBytes(int blocknumber, int bytes) + { + SeekBlock(blocknumber); + byte[] data = new byte[bytes]; + _datawrite.Read(data, 0, bytes); + + return data; + } + + internal int GetFreeBlockNumber() + { + // get the first free block or append to the end + if (_freeList.CountOnes() > 0) + { + int i = _freeList.GetFirstIndex(); + _freeList.Set(i, false); + return i; + } + else + return Interlocked.Increment(ref _lastBlockNumber);//++; + } + + internal void SeekBlock(int blocknumber) + { + long offset = (long)_fileheader.Length + blocknumber * _BLOCKSIZE; + _datawrite.Seek(offset, SeekOrigin.Begin);// wiil seek past the end of file on fs.Write will zero the difference + } + + internal void WriteBlockBytes(byte[] data, int start, int len) + { + _datawrite.Write(data, start, len); + } + + #region [ private / internal ] + + private void WriteFreeListBMPFile(string filename) + { + WahBitArrayState t; + uint[] ints = _freeList.GetCompressed(out t); + MemoryStream ms = new MemoryStream(); + BinaryWriter bw = new BinaryWriter(ms); + bw.Write((byte)t);// write new format with the data type byte + foreach (var i in ints) + { + bw.Write(i); + } + File.WriteAllBytes(filename, ms.ToArray()); + } + + private void ReadFreeListBMPFile(string filename) + { + byte[] b = File.ReadAllBytes(filename); + WahBitArrayState t = WahBitArrayState.Wah; + int j = 0; + if (b.Length % 4 > 0) // new format with the data type byte + { + t = (WahBitArrayState)Enum.ToObject(typeof(WahBitArrayState), b[0]); + j = 1; + } + List ints = new List(); + for (int i = 0; i < b.Length / 4; i++) + { + ints.Add((uint)Helper.ToInt32(b, (i * 4) + j)); + } + _freeList = new WahBitArray(t, ints.ToArray()); + } + + private void Initialize(string filename, ushort blocksize) + { + if (File.Exists(filename) == false) + _datawrite = new FileStream(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite); + else + _datawrite = new FileStream(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite); + + if (_datawrite.Length == 0) + { + CreateFileHeader(blocksize); + // new file + _datawrite.Write(_fileheader, 0, _fileheader.Length); + _datawrite.Flush(); + } + else + { + ReadFileHeader(); + _lastBlockNumber = (int)((_datawrite.Length - _fileheader.Length) / _BLOCKSIZE); + _lastBlockNumber++; + } + _freeList = new WahBitArray(); + if (File.Exists(_Path + _filename + ".free")) + { + ReadFreeListBMPFile(_Path + _filename + ".free"); + // delete file so if failure no big deal on restart + File.Delete(_Path + _filename + ".free"); + } + } + + private void ReadFileHeader() + { + // set _blockize + _datawrite.Seek(0L, SeekOrigin.Begin); + byte[] hdr = new byte[_fileheader.Length]; + _datawrite.Read(hdr, 0, _fileheader.Length); + + _BLOCKSIZE = 0; + _BLOCKSIZE = (ushort)((int)hdr[5] + ((int)hdr[6]) << 8); + } + + private void CreateFileHeader(int blocksize) + { + // add version number + _fileheader[4] = (byte)_CurrentVersion; + // block size + _fileheader[5] = (byte)(blocksize & 0xff); + _fileheader[6] = (byte)(blocksize >> 8); + _BLOCKSIZE = (ushort)blocksize; + } + + private void FlushClose(FileStream st) + { + if (st != null) + { + st.Flush(true); + st.Close(); + } + } + #endregion + + internal int NumberofBlocks() + { + return (int)((_datawrite.Length / (int)_BLOCKSIZE) + 1); + } + + internal void FreeBlock(int i) + { + _freeList.Set(i, true); + } + } +} diff --git a/RaptorDB/View.cs b/RaptorDB/View.cs new file mode 100644 index 0000000..8aa7c9e --- /dev/null +++ b/RaptorDB/View.cs @@ -0,0 +1,215 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Xml.Serialization; +using RaptorDB.Views; +using System.Reflection; + +namespace RaptorDB +{ + public abstract class ViewBase + { + public delegate void MapFunctionDelgate(IMapAPI api, Guid docid, V doc); + /// + /// Increment this when you change view definitions so the engine can rebuild the contents + /// + public int Version { get; set; } + + /// + /// Name of the view will be used for foldernames and filename and generated code + /// + public string Name { get; set; } + + /// + /// A text for describing this views purpose for other developers + /// + public string Description { get; set; } + + /// + /// Column definitions for the view storage + /// + public Type Schema { get; set; } + + /// + /// Is this the primary list and will be populated synchronously + /// + public bool isPrimaryList { get; set; } + + /// + /// Is this view active and will recieve data + /// + public bool isActive { get; set; } + + /// + /// Delete items on DocID before inserting new rows (default = true) + /// + public bool DeleteBeforeInsert { get; set; } + + /// + /// Index in the background : better performance but reads might not have all the data + /// + public bool BackgroundIndexing { get; set; } + + /// + /// Save documents to this view in the save process, like primary views + /// + public bool ConsistentSaveToThisView { get; set; } + + /// + /// Apply to a Primary View and all the mappings of all views will be done in a transaction. + /// You can use Rollback for failures. + /// + public bool TransactionMode { get; set; } + + /// + /// When defining your own schema and you don't want dependancies to RaptorDB to propogate through your code + /// define your full text columns here + /// + [Obsolete("You should use IndexDefinitions and ViewIndexDefinitionHelper extension methods")] + public List FullTextColumns = new List(); + + /// + /// When defining your own schems and you don't want dependancies to RaptorDB to propogate through your code + /// define your case insensitive columns here + /// + [Obsolete("You should use IndexDefinitions and ViewIndexDefinitionHelper extension methods")] + public List CaseInsensitiveColumns = new List(); + + + [Obsolete("You should use IndexDefinitions and ViewIndexDefinitionHelper extension methods")] + public Dictionary StringIndexLength = new Dictionary(); + + /// + /// Columns that you don't want to index + /// + [Obsolete("You should use IndexDefinitions and ViewIndexDefinitionHelper extension methods")] + public List NoIndexingColumns = new List(); + + public Dictionary IndexDefinitions { get; set; } + + + public void AutoInitIndexDefinitions() + { + foreach (var p in Schema.GetProperties()) + { + if (!IndexDefinitions.ContainsKey(p.Name)) + { + Type t = p.PropertyType; + IndexDefinitions[p.Name] = AutoInitMember(p, t); + } + } + + foreach (var f in Schema.GetFields()) + { + if (!IndexDefinitions.ContainsKey(f.Name)) + { + Type t = f.FieldType; + IndexDefinitions[f.Name] = AutoInitMember(f, t); + } + } + } + +#pragma warning disable CS0618 // Type or member is obsolete + public IViewColumnIndexDefinition AutoInitMember(MemberInfo p, Type t) + { + if (NoIndexingColumns.Contains(p.Name) || NoIndexingColumns.Contains(p.Name.ToLower())) + { + return new NoIndexColumnDefinition(); + } + else + { + if (FullTextColumns.Contains(p.Name) || FullTextColumns.Contains(p.Name.ToLower()) || Attribute.IsDefined(p, typeof(FullTextAttribute), true)) + return new FullTextIndexColumnDefinition(); + + var cs = p.GetCustomAttributes(typeof(CaseInsensitiveAttribute), true).Length > 0 || + CaseInsensitiveColumns.Contains(p.Name) || CaseInsensitiveColumns.Contains(p.Name.ToLower()); + + byte length = Global.DefaultStringKeySize; + var a = p.GetCustomAttributes(typeof(StringIndexLengthAttribute), false); + if (a.Length > 0) + { + length = (a[0] as StringIndexLengthAttribute).Length; + } + if (StringIndexLength.ContainsKey(p.Name) || StringIndexLength.ContainsKey(p.Name.ToLower())) + { + if (!StringIndexLength.TryGetValue(p.Name, out length)) + StringIndexLength.TryGetValue(p.Name.ToLower(), out length); + } + + if (t == typeof(string)) + { + // TODO: case sensitive index + return new StringIndexColumnDefinition(length); + } + else if (t == typeof(bool)) + { + return new BoolIndexColumnDefinition(); + } + return new MGIndexColumnDefinition(t, length); + } + } +#pragma warning restore CS0618 // Type or member is obsolete + + public abstract Type GetDocType(); + } + + + public class View : ViewBase + { + public View() + { + isActive = true; + DeleteBeforeInsert = true; + BackgroundIndexing = true; + IndexDefinitions = new Dictionary() + { + {"docid", new MMIndexColumnDefinition() } + }; + } + + /// + /// Inline delegate for the mapper function used for quick applications + /// + [XmlIgnore] + public MapFunctionDelgate Mapper { get; set; } + + public void Verify() + { + if (string.IsNullOrEmpty(this.Name)) + throw new Exception("Name must be given"); + if (Schema == null) + throw new Exception("Schema must be defined"); + if (Schema.IsSubclassOf(typeof(RDBSchema)) == false) + { + var pi = Schema.GetProperty("docid"); + if (pi == null || pi.PropertyType != typeof(Guid)) + { + var fi = Schema.GetField("docid"); + if (fi == null || fi.FieldType != typeof(Guid)) + throw new Exception("The schema must be derived from RaptorDB.RDBSchema or must contain a 'docid' Guid field or property"); + } + } + if (Mapper == null) + throw new Exception("A map function must be defined"); + + if (TransactionMode == true && isPrimaryList == false) + throw new Exception("Transaction mode can only be enabled on Primary Views"); + + // FEATURE : add more verifications + } + + public override Type GetDocType() + { + return typeof(T); + } + } + + public abstract class View : View + { + public View() + { + this.Schema = typeof(TSchema); + } + } +} diff --git a/RaptorDB/Views/Dynamic.cs b/RaptorDB/Views/Dynamic.cs index 17a4cf2..baf5eaf 100644 --- a/RaptorDB/Views/Dynamic.cs +++ b/RaptorDB/Views/Dynamic.cs @@ -1,2197 +1,2197 @@ -//Copyright (C) Microsoft Corporation. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Text; -using System.Linq; -using System.Linq.Expressions; -using System.Reflection; -using System.Reflection.Emit; -using System.Threading; - -namespace System.Linq.Dynamic -{ - // FEATURE : cleanup unused code here - - #region [ Classes ] - internal abstract class DynamicClass - { - public override string ToString() - { - PropertyInfo[] props = this.GetType().GetProperties(BindingFlags.Instance | BindingFlags.Public); - StringBuilder sb = new StringBuilder(); - sb.Append("{"); - for (int i = 0; i < props.Length; i++) - { - if (i > 0) sb.Append(", "); - sb.Append(props[i].Name); - sb.Append("="); - sb.Append(props[i].GetValue(this, null)); - } - sb.Append("}"); - return sb.ToString(); - } - } - - internal class DynamicProperty - { - string name; - Type type; - - public DynamicProperty(string name, Type type) - { - if (name == null) throw new ArgumentNullException("name"); - if (type == null) throw new ArgumentNullException("type"); - this.name = name; - this.type = type; - } - - public string Name - { - get { return name; } - } - - public Type Type - { - get { return type; } - } - } - - internal static class DynamicExpression - { - public static Expression Parse(Type resultType, string expression, params object[] values) - { - ExpressionParser parser = new ExpressionParser(null, expression, values); - return parser.Parse(resultType); - } - - public static LambdaExpression ParseLambda(Type itType, Type resultType, string expression, params object[] values) - { - return ParseLambda(new ParameterExpression[] { Expression.Parameter(itType, "") }, resultType, expression, values); - } - - public static LambdaExpression ParseLambda(ParameterExpression[] parameters, Type resultType, string expression, params object[] values) - { - ExpressionParser parser = new ExpressionParser(parameters, expression, values); - return Expression.Lambda(parser.Parse(resultType), parameters); - } - - //public static Expression> ParseLambda(string expression, params object[] values) - //{ - // return (Expression>)ParseLambda(typeof(T), typeof(S), expression, values); - //} - - public static Type CreateClass(params DynamicProperty[] properties) - { - return ClassFactory.Instance.GetDynamicClass(properties); - } - - public static Type CreateClass(IEnumerable properties) - { - return ClassFactory.Instance.GetDynamicClass(properties); - } - } - - //internal class DynamicOrdering - //{ - // public Expression Selector; - // public bool Ascending; - //} - - internal class Signature : IEquatable - { - public DynamicProperty[] properties; - public int hashCode; - - public Signature(IEnumerable properties) - { - this.properties = properties.ToArray(); - hashCode = 0; - foreach (DynamicProperty p in properties) - { - hashCode ^= p.Name.GetHashCode() ^ p.Type.GetHashCode(); - } - } - - public override int GetHashCode() - { - return hashCode; - } - - public override bool Equals(object obj) - { - return obj is Signature ? Equals((Signature)obj) : false; - } - - public bool Equals(Signature other) - { - if (properties.Length != other.properties.Length) return false; - for (int i = 0; i < properties.Length; i++) - { - if (properties[i].Name != other.properties[i].Name || - properties[i].Type != other.properties[i].Type) return false; - } - return true; - } - } - - internal class ClassFactory - { - public static readonly ClassFactory Instance = new ClassFactory(); - - static ClassFactory() { } // Trigger lazy initialization of static fields - - ModuleBuilder module; - Dictionary classes; - int classCount; - ReaderWriterLock rwLock; - - private ClassFactory() - { - AssemblyName name = new AssemblyName("DynamicClasses"); - AssemblyBuilder assembly = AppDomain.CurrentDomain.DefineDynamicAssembly(name, AssemblyBuilderAccess.Run); -#if ENABLE_LINQ_PARTIAL_TRUST - new ReflectionPermission(PermissionState.Unrestricted).Assert(); -#endif - try - { - module = assembly.DefineDynamicModule("Module"); - } - finally - { -#if ENABLE_LINQ_PARTIAL_TRUST - PermissionSet.RevertAssert(); -#endif - } - classes = new Dictionary(); - rwLock = new ReaderWriterLock(); - } - - public Type GetDynamicClass(IEnumerable properties) - { - rwLock.AcquireReaderLock(Timeout.Infinite); - try - { - Signature signature = new Signature(properties); - Type type; - if (!classes.TryGetValue(signature, out type)) - { - type = CreateDynamicClass(signature.properties); - classes.Add(signature, type); - } - return type; - } - finally - { - rwLock.ReleaseReaderLock(); - } - } - - Type CreateDynamicClass(DynamicProperty[] properties) - { - LockCookie cookie = rwLock.UpgradeToWriterLock(Timeout.Infinite); - try - { - string typeName = "DynamicClass" + (classCount + 1); -#if ENABLE_LINQ_PARTIAL_TRUST - new ReflectionPermission(PermissionState.Unrestricted).Assert(); -#endif - try - { - TypeBuilder tb = this.module.DefineType(typeName, TypeAttributes.Class | - TypeAttributes.Public, typeof(DynamicClass)); - FieldInfo[] fields = GenerateProperties(tb, properties); - GenerateEquals(tb, fields); - GenerateGetHashCode(tb, fields); - Type result = tb.CreateType(); - classCount++; - return result; - } - finally - { -#if ENABLE_LINQ_PARTIAL_TRUST - PermissionSet.RevertAssert(); -#endif - } - } - finally - { - rwLock.DowngradeFromWriterLock(ref cookie); - } - } - - FieldInfo[] GenerateProperties(TypeBuilder tb, DynamicProperty[] properties) - { - FieldInfo[] fields = new FieldBuilder[properties.Length]; - for (int i = 0; i < properties.Length; i++) - { - DynamicProperty dp = properties[i]; - FieldBuilder fb = tb.DefineField("_" + dp.Name, dp.Type, FieldAttributes.Private); - PropertyBuilder pb = tb.DefineProperty(dp.Name, PropertyAttributes.HasDefault, dp.Type, null); - MethodBuilder mbGet = tb.DefineMethod("get_" + dp.Name, - MethodAttributes.Public | MethodAttributes.SpecialName | MethodAttributes.HideBySig, - dp.Type, Type.EmptyTypes); - ILGenerator genGet = mbGet.GetILGenerator(); - genGet.Emit(OpCodes.Ldarg_0); - genGet.Emit(OpCodes.Ldfld, fb); - genGet.Emit(OpCodes.Ret); - MethodBuilder mbSet = tb.DefineMethod("set_" + dp.Name, - MethodAttributes.Public | MethodAttributes.SpecialName | MethodAttributes.HideBySig, - null, new Type[] { dp.Type }); - ILGenerator genSet = mbSet.GetILGenerator(); - genSet.Emit(OpCodes.Ldarg_0); - genSet.Emit(OpCodes.Ldarg_1); - genSet.Emit(OpCodes.Stfld, fb); - genSet.Emit(OpCodes.Ret); - pb.SetGetMethod(mbGet); - pb.SetSetMethod(mbSet); - fields[i] = fb; - } - return fields; - } - - void GenerateEquals(TypeBuilder tb, FieldInfo[] fields) - { - MethodBuilder mb = tb.DefineMethod("Equals", - MethodAttributes.Public | MethodAttributes.ReuseSlot | - MethodAttributes.Virtual | MethodAttributes.HideBySig, - typeof(bool), new Type[] { typeof(object) }); - ILGenerator gen = mb.GetILGenerator(); - LocalBuilder other = gen.DeclareLocal(tb); - Label next = gen.DefineLabel(); - gen.Emit(OpCodes.Ldarg_1); - gen.Emit(OpCodes.Isinst, tb); - gen.Emit(OpCodes.Stloc, other); - gen.Emit(OpCodes.Ldloc, other); - gen.Emit(OpCodes.Brtrue_S, next); - gen.Emit(OpCodes.Ldc_I4_0); - gen.Emit(OpCodes.Ret); - gen.MarkLabel(next); - foreach (FieldInfo field in fields) - { - Type ft = field.FieldType; - Type ct = typeof(EqualityComparer<>).MakeGenericType(ft); - next = gen.DefineLabel(); - gen.EmitCall(OpCodes.Call, ct.GetMethod("get_Default"), null); - gen.Emit(OpCodes.Ldarg_0); - gen.Emit(OpCodes.Ldfld, field); - gen.Emit(OpCodes.Ldloc, other); - gen.Emit(OpCodes.Ldfld, field); - gen.EmitCall(OpCodes.Callvirt, ct.GetMethod("Equals", new Type[] { ft, ft }), null); - gen.Emit(OpCodes.Brtrue_S, next); - gen.Emit(OpCodes.Ldc_I4_0); - gen.Emit(OpCodes.Ret); - gen.MarkLabel(next); - } - gen.Emit(OpCodes.Ldc_I4_1); - gen.Emit(OpCodes.Ret); - } - - void GenerateGetHashCode(TypeBuilder tb, FieldInfo[] fields) - { - MethodBuilder mb = tb.DefineMethod("GetHashCode", - MethodAttributes.Public | MethodAttributes.ReuseSlot | - MethodAttributes.Virtual | MethodAttributes.HideBySig, - typeof(int), Type.EmptyTypes); - ILGenerator gen = mb.GetILGenerator(); - gen.Emit(OpCodes.Ldc_I4_0); - foreach (FieldInfo field in fields) - { - Type ft = field.FieldType; - Type ct = typeof(EqualityComparer<>).MakeGenericType(ft); - gen.EmitCall(OpCodes.Call, ct.GetMethod("get_Default"), null); - gen.Emit(OpCodes.Ldarg_0); - gen.Emit(OpCodes.Ldfld, field); - gen.EmitCall(OpCodes.Callvirt, ct.GetMethod("GetHashCode", new Type[] { ft }), null); - gen.Emit(OpCodes.Xor); - } - gen.Emit(OpCodes.Ret); - } - } - - internal sealed class ParseException : Exception - { - int position; - - public ParseException(string message, int position) - : base(message) - { - this.position = position; - } - - public int Position - { - get { return position; } - } - - public override string ToString() - { - return string.Format(Res.ParseExceptionFormat, Message, position); - } - } - #endregion - - internal class ExpressionParser - { - #region [ internal ] - struct Token - { - public TokenId id; - public string text; - public int pos; - } - - enum TokenId - { - Unknown, - End, - Identifier, - StringLiteral, - IntegerLiteral, - RealLiteral, - Exclamation, - Percent, - Amphersand, - OpenParen, - CloseParen, - Asterisk, - Plus, - Comma, - Minus, - Dot, - Slash, - Colon, - LessThan, - Equal, - GreaterThan, - Question, - OpenBracket, - CloseBracket, - Bar, - ExclamationEqual, - DoubleAmphersand, - LessThanEqual, - LessGreater, - DoubleEqual, - GreaterThanEqual, - DoubleBar - } - - interface ILogicalSignatures - { - void F(bool x, bool y); - void F(bool? x, bool? y); - } - - interface IArithmeticSignatures - { - void F(int x, int y); - void F(uint x, uint y); - void F(long x, long y); - void F(ulong x, ulong y); - void F(float x, float y); - void F(double x, double y); - void F(decimal x, decimal y); - void F(int? x, int? y); - void F(uint? x, uint? y); - void F(long? x, long? y); - void F(ulong? x, ulong? y); - void F(float? x, float? y); - void F(double? x, double? y); - void F(decimal? x, decimal? y); - } - - interface IRelationalSignatures : IArithmeticSignatures - { - void F(string x, string y); - void F(char x, char y); - void F(DateTime x, DateTime y); - void F(TimeSpan x, TimeSpan y); - void F(char? x, char? y); - void F(DateTime? x, DateTime? y); - void F(TimeSpan? x, TimeSpan? y); - } - - interface IEqualitySignatures : IRelationalSignatures - { - void F(bool x, bool y); - void F(bool? x, bool? y); - } - - interface IAddSignatures : IArithmeticSignatures - { - void F(DateTime x, TimeSpan y); - void F(TimeSpan x, TimeSpan y); - void F(DateTime? x, TimeSpan? y); - void F(TimeSpan? x, TimeSpan? y); - } - - interface ISubtractSignatures : IAddSignatures - { - void F(DateTime x, DateTime y); - void F(DateTime? x, DateTime? y); - } - - interface INegationSignatures - { - void F(int x); - void F(long x); - void F(float x); - void F(double x); - void F(decimal x); - void F(int? x); - void F(long? x); - void F(float? x); - void F(double? x); - void F(decimal? x); - } - - interface INotSignatures - { - void F(bool x); - void F(bool? x); - } - - interface IEnumerableSignatures - { - void Where(bool predicate); - void Any(); - void Any(bool predicate); - void All(bool predicate); - void Count(); - void Count(bool predicate); - void Min(object selector); - void Max(object selector); - void Sum(int selector); - void Sum(int? selector); - void Sum(long selector); - void Sum(long? selector); - void Sum(float selector); - void Sum(float? selector); - void Sum(double selector); - void Sum(double? selector); - void Sum(decimal selector); - void Sum(decimal? selector); - void Average(int selector); - void Average(int? selector); - void Average(long selector); - void Average(long? selector); - void Average(float selector); - void Average(float? selector); - void Average(double selector); - void Average(double? selector); - void Average(decimal selector); - void Average(decimal? selector); - } - - static readonly Type[] predefinedTypes = { - typeof(Object), - typeof(Boolean), - typeof(Char), - typeof(String), - typeof(SByte), - typeof(Byte), - typeof(Int16), - typeof(UInt16), - typeof(Int32), - typeof(UInt32), - typeof(Int64), - typeof(UInt64), - typeof(Single), - typeof(Double), - typeof(Decimal), - typeof(DateTime), - typeof(TimeSpan), - typeof(Guid), - typeof(Math), - typeof(Convert) - }; - #endregion - - #region [ gunk ] - - - //// *, /, %, mod operators - //Expression ParseMultiplicative() - //{ - // Expression left = ParseUnary(); - // //while (token.id == TokenId.Asterisk || token.id == TokenId.Slash || - // // token.id == TokenId.Percent || TokenIdentifierIs("mod")) - // //{ - // // Token op = token; - // // NextToken(); - // // Expression right = ParseUnary(); - // // CheckAndPromoteOperands(typeof(IArithmeticSignatures), op.text, ref left, ref right, op.pos); - // // switch (op.id) - // // { - // // case TokenId.Asterisk: - // // left = Expression.Multiply(left, right); - // // break; - // // case TokenId.Slash: - // // left = Expression.Divide(left, right); - // // break; - // // case TokenId.Percent: - // // case TokenId.Identifier: - // // left = Expression.Modulo(left, right); - // // break; - // // } - // //} - // return left; - //} - - //Expression GenerateAdd(Expression left, Expression right) - //{ - // if (left.Type == typeof(string) && right.Type == typeof(string)) - // { - // return GenerateStaticMethodCall("Concat", left, right); - // } - // return Expression.Add(left, right); - //} - - //Expression GenerateSubtract(Expression left, Expression right) - //{ - // return Expression.Subtract(left, right); - //} - - //Expression GenerateStringConcat(Expression left, Expression right) - //{ - // return Expression.Call( - // null, - // typeof(string).GetMethod("Concat", new[] { typeof(object), typeof(object) }), - // new[] { left, right }); - //} - -#pragma warning disable 0219 - //public IEnumerable ParseOrdering() - //{ - // List orderings = new List(); - // while (true) - // { - // Expression expr = ParseExpression(); - // bool ascending = true; - // if (TokenIdentifierIs("asc") || TokenIdentifierIs("ascending")) - // { - // NextToken(); - // } - // else if (TokenIdentifierIs("desc") || TokenIdentifierIs("descending")) - // { - // NextToken(); - // ascending = false; - // } - // orderings.Add(new DynamicOrdering { Selector = expr, Ascending = ascending }); - // if (token.id != TokenId.Comma) break; - // NextToken(); - // } - // ValidateToken(TokenId.End, Res.SyntaxError); - // return orderings; - //} -#pragma warning restore 0219 - - //Expression ParseIt() - //{ - // if (it == null) - // throw ParseError(Res.NoItInScope); - // NextToken(); - // return it; - //} - - //Expression ParseIif() - //{ - // int errorPos = token.pos; - // NextToken(); - // Expression[] args = ParseArgumentList(); - // if (args.Length != 3) - // throw ParseError(errorPos, Res.IifRequiresThreeArgs); - // return GenerateConditional(args[0], args[1], args[2], errorPos); - //} - - //Expression GenerateConditional(Expression test, Expression expr1, Expression expr2, int errorPos) - //{ - // if (test.Type != typeof(bool)) - // throw ParseError(errorPos, Res.FirstExprMustBeBool); - // if (expr1.Type != expr2.Type) - // { - // Expression expr1as2 = expr2 != nullLiteral ? PromoteExpression(expr1, expr2.Type, true) : null; - // Expression expr2as1 = expr1 != nullLiteral ? PromoteExpression(expr2, expr1.Type, true) : null; - // if (expr1as2 != null && expr2as1 == null) - // { - // expr1 = expr1as2; - // } - // else if (expr2as1 != null && expr1as2 == null) - // { - // expr2 = expr2as1; - // } - // else - // { - // string type1 = expr1 != nullLiteral ? expr1.Type.Name : "null"; - // string type2 = expr2 != nullLiteral ? expr2.Type.Name : "null"; - // if (expr1as2 != null && expr2as1 != null) - // throw ParseError(errorPos, Res.BothTypesConvertToOther, type1, type2); - // throw ParseError(errorPos, Res.NeitherTypeConvertsToOther, type1, type2); - // } - // } - // return Expression.Condition(test, expr1, expr2); - //} - - //Expression ParseNew() - //{ - // NextToken(); - // ValidateToken(TokenId.OpenParen, Res.OpenParenExpected); - // NextToken(); - // List properties = new List(); - // List expressions = new List(); - // while (true) - // { - // int exprPos = token.pos; - // Expression expr = ParseExpression(); - // string propName; - // if (TokenIdentifierIs("as")) - // { - // NextToken(); - // propName = GetIdentifier(); - // NextToken(); - // } - // else - // { - // MemberExpression me = expr as MemberExpression; - // if (me == null) throw ParseError(exprPos, Res.MissingAsClause); - // propName = me.Member.Name; - // } - // expressions.Add(expr); - // properties.Add(new DynamicProperty(propName, expr.Type)); - // if (token.id != TokenId.Comma) break; - // NextToken(); - // } - // ValidateToken(TokenId.CloseParen, Res.CloseParenOrCommaExpected); - // NextToken(); - // Type type = DynamicExpression.CreateClass(properties); - // MemberBinding[] bindings = new MemberBinding[properties.Count]; - // for (int i = 0; i < bindings.Length; i++) - // bindings[i] = Expression.Bind(type.GetProperty(properties[i].Name), expressions[i]); - // return Expression.MemberInit(Expression.New(type), bindings); - //} - - //Expression ParseLambdaInvocation(LambdaExpression lambda) - //{ - // int errorPos = token.pos; - // NextToken(); - // Expression[] args = ParseArgumentList(); - // MethodBase method; - // if (FindMethod(lambda.Type, "Invoke", false, args, out method) != 1) - // throw ParseError(errorPos, Res.ArgsIncompatibleWithLambda); - // return Expression.Invoke(lambda, args); - //} - - - //Expression GenerateConversion(Expression expr, Type type, int errorPos) - //{ - // Type exprType = expr.Type; - // if (exprType == type) return expr; - // if (exprType.IsValueType && type.IsValueType) - // { - // if ((IsNullableType(exprType) || IsNullableType(type)) && - // GetNonNullableType(exprType) == GetNonNullableType(type)) - // return Expression.Convert(expr, type); - // if ((IsNumericType(exprType) || IsEnumType(exprType)) && - // (IsNumericType(type)) || IsEnumType(type)) - // return Expression.ConvertChecked(expr, type); - // } - // if (exprType.IsAssignableFrom(type) || type.IsAssignableFrom(exprType) || - // exprType.IsInterface || type.IsInterface) - // return Expression.Convert(expr, type); - // throw ParseError(errorPos, Res.CannotConvertValue, - // GetTypeName(exprType), GetTypeName(type)); - //} - - - //static Type FindGenericType(Type generic, Type type) - //{ - // while (type != null && type != typeof(object)) - // { - // if (type.IsGenericType && type.GetGenericTypeDefinition() == generic) return type; - // if (generic.IsInterface) - // { - // foreach (Type intfType in type.GetInterfaces()) - // { - // Type found = FindGenericType(generic, intfType); - // if (found != null) return found; - // } - // } - // type = type.BaseType; - // } - // return null; - //} - - //Expression ParseAggregate(Expression instance, Type elementType, string methodName, int errorPos) - //{ - // ParameterExpression outerIt = it; - // ParameterExpression innerIt = Expression.Parameter(elementType, ""); - // it = innerIt; - // Expression[] args = ParseArgumentList(); - // it = outerIt; - // MethodBase signature; - // if (FindMethod(typeof(IEnumerableSignatures), methodName, false, args, out signature) != 1) - // throw ParseError(errorPos, Res.NoApplicableAggregate, methodName); - // Type[] typeArgs; - // if (signature.Name == "Min" || signature.Name == "Max") - // { - // typeArgs = new Type[] { elementType, args[0].Type }; - // } - // else - // { - // typeArgs = new Type[] { elementType }; - // } - // if (args.Length == 0) - // { - // args = new Expression[] { instance }; - // } - // else - // { - // args = new Expression[] { instance, Expression.Lambda(args[0], innerIt) }; - // } - // return Expression.Call(typeof(Enumerable), signature.Name, typeArgs, args); - //} - - //Expression[] ParseArgumentList() - //{ - // ValidateToken(TokenId.OpenParen, Res.OpenParenExpected); - // NextToken(); - // Expression[] args = token.id != TokenId.CloseParen ? ParseArguments() : new Expression[0]; - // ValidateToken(TokenId.CloseParen, Res.CloseParenOrCommaExpected); - // NextToken(); - // return args; - //} - - //Expression[] ParseArguments() - //{ - // List argList = new List(); - // while (true) - // { - // argList.Add(ParseExpression()); - // if (token.id != TokenId.Comma) break; - // NextToken(); - // } - // return argList.ToArray(); - //} - - //Expression ParseElementAccess(Expression expr) - //{ - // int errorPos = token.pos; - // ValidateToken(TokenId.OpenBracket, Res.OpenParenExpected); - // NextToken(); - // Expression[] args = ParseArguments(); - // ValidateToken(TokenId.CloseBracket, Res.CloseBracketOrCommaExpected); - // NextToken(); - // if (expr.Type.IsArray) - // { - // if (expr.Type.GetArrayRank() != 1 || args.Length != 1) - // throw ParseError(errorPos, Res.CannotIndexMultiDimArray); - // Expression index = PromoteExpression(args[0], typeof(int), true); - // if (index == null) - // throw ParseError(errorPos, Res.InvalidIndex); - // return Expression.ArrayIndex(expr, index); - // } - // else - // { - // MethodBase mb; - // switch (FindIndexer(expr.Type, args, out mb)) - // { - // case 0: - // throw ParseError(errorPos, Res.NoApplicableIndexer, - // GetTypeName(expr.Type)); - // case 1: - // return Expression.Call(expr, (MethodInfo)mb, args); - // default: - // throw ParseError(errorPos, Res.AmbiguousIndexerInvocation, - // GetTypeName(expr.Type)); - // } - // } - //} - - //static bool IsPredefinedType(Type type) - //{ - // foreach (Type t in predefinedTypes) if (t == type) return true; - // return false; - //} - - //static bool IsNumericType(Type type) - //{ - // return GetNumericTypeKind(type) != 0; - //} - - //static bool IsEnumType(Type type) - //{ - // return GetNonNullableType(type).IsEnum; - //} - - //static readonly string keywordIt = "it"; - //static readonly string keywordIif = "iif"; - //static readonly string keywordNew = "new"; - - #endregion - - static readonly Expression trueLiteral = Expression.Constant(true); - static readonly Expression falseLiteral = Expression.Constant(false); - static readonly Expression nullLiteral = Expression.Constant(null); - - static Dictionary keywords; - - Dictionary symbols; - IDictionary externals; - Dictionary literals; - ParameterExpression it; - string text; - int textPos; - int textLen; - char ch; - Token token; - - public ExpressionParser(ParameterExpression[] parameters, string expression, object[] values) - { - if (expression == null) throw new ArgumentNullException("expression"); - if (keywords == null) keywords = CreateKeywords(); - symbols = new Dictionary(StringComparer.OrdinalIgnoreCase); - literals = new Dictionary(); - if (parameters != null) ProcessParameters(parameters); - if (values != null) ProcessValues(values); - text = expression; - textLen = text.Length; - SetTextPos(0); - NextToken(); - } - - void ProcessParameters(ParameterExpression[] parameters) - { - foreach (ParameterExpression pe in parameters) - if (!String.IsNullOrEmpty(pe.Name)) - AddSymbol(pe.Name, pe); - if (parameters.Length == 1 && String.IsNullOrEmpty(parameters[0].Name)) - it = parameters[0]; - } - - void ProcessValues(object[] values) - { - for (int i = 0; i < values.Length; i++) - { - object value = values[i]; - if (i == values.Length - 1 && value is IDictionary) - { - externals = (IDictionary)value; - } - else - { - AddSymbol("@" + i.ToString(System.Globalization.CultureInfo.InvariantCulture), value); - } - } - } - - void AddSymbol(string name, object value) - { - if (symbols.ContainsKey(name)) - throw ParseError(Res.DuplicateIdentifier, name); - symbols.Add(name, value); - } - - public Expression Parse(Type resultType) - { - int exprPos = token.pos; - Expression expr = ParseExpression(); - if (resultType != null) - if ((expr = PromoteExpression(expr, resultType, true)) == null) - throw ParseError(exprPos, Res.ExpressionTypeMismatch, GetTypeName(resultType)); - ValidateToken(TokenId.End, Res.SyntaxError); - return expr; - } - - - // ?: operator - Expression ParseExpression() - { - int errorPos = token.pos; - Expression expr = ParseLogicalOr(); - //if (token.id == TokenId.Question) - //{ - // NextToken(); - // Expression expr1 = ParseExpression(); - // ValidateToken(TokenId.Colon, Res.ColonExpected); - // NextToken(); - // Expression expr2 = ParseExpression(); - // expr = GenerateConditional(expr, expr1, expr2, errorPos); - //} - return expr; - } - - // ||, or operator - Expression ParseLogicalOr() - { - Expression left = ParseLogicalAnd(); - while (token.id == TokenId.DoubleBar || TokenIdentifierIs("or")) - { - Token op = token; - NextToken(); - Expression right = ParseLogicalAnd(); - CheckAndPromoteOperands(typeof(ILogicalSignatures), op.text, ref left, ref right, op.pos); - left = Expression.OrElse(left, right); - } - return left; - } - - // &&, and operator - Expression ParseLogicalAnd() - { - Expression left = ParseComparison(); - while (token.id == TokenId.DoubleAmphersand || TokenIdentifierIs("and")) - { - Token op = token; - NextToken(); - Expression right = ParseComparison(); - CheckAndPromoteOperands(typeof(ILogicalSignatures), op.text, ref left, ref right, op.pos); - left = Expression.AndAlso(left, right); - } - return left; - } - - // =, ==, !=, <>, >, >=, <, <= operators - Expression ParseComparison() - { - Expression left = ParseAdditive(); - while (token.id == TokenId.Equal || token.id == TokenId.DoubleEqual || - token.id == TokenId.ExclamationEqual || token.id == TokenId.LessGreater || - token.id == TokenId.GreaterThan || token.id == TokenId.GreaterThanEqual || - token.id == TokenId.LessThan || token.id == TokenId.LessThanEqual) - { - Token op = token; - NextToken(); - Expression right = ParseAdditive(); - bool isEquality = op.id == TokenId.Equal || op.id == TokenId.DoubleEqual || - op.id == TokenId.ExclamationEqual || op.id == TokenId.LessGreater; - if (isEquality && !left.Type.IsValueType && !right.Type.IsValueType) - { - if (left.Type != right.Type) - { - if (left.Type.IsAssignableFrom(right.Type)) - { - right = Expression.Convert(right, left.Type); - } - else if (right.Type.IsAssignableFrom(left.Type)) - { - left = Expression.Convert(left, right.Type); - } - else - { - throw IncompatibleOperandsError(op.text, left, right, op.pos); - } - } - } - //else if (IsEnumType(left.Type) || IsEnumType(right.Type)) - //{ - // if (left.Type != right.Type) - // { - // Expression e; - // if ((e = PromoteExpression(right, left.Type, true)) != null) - // { - // right = e; - // } - // else if ((e = PromoteExpression(left, right.Type, true)) != null) - // { - // left = e; - // } - // else - // { - // throw IncompatibleOperandsError(op.text, left, right, op.pos); - // } - // } - //} - else - { - if (left.Type == typeof(Guid)) - right = Expression.Constant(Guid.Parse(right.ToString().Replace("\"", "").Replace("'",""))); - else if (left.Type == typeof(DateTime)) - right = Expression.Constant(DateTime.Parse(right.ToString().Replace("\"", "").Replace("'", ""))); - else - CheckAndPromoteOperands(isEquality ? typeof(IEqualitySignatures) : typeof(IRelationalSignatures), - op.text, ref left, ref right, op.pos); - } - switch (op.id) - { - case TokenId.Equal: - case TokenId.DoubleEqual: - left = GenerateEqual(left, right); - break; - case TokenId.ExclamationEqual: - case TokenId.LessGreater: - left = GenerateNotEqual(left, right); - break; - case TokenId.GreaterThan: - left = GenerateGreaterThan(left, right); - break; - case TokenId.GreaterThanEqual: - left = GenerateGreaterThanEqual(left, right); - break; - case TokenId.LessThan: - left = GenerateLessThan(left, right); - break; - case TokenId.LessThanEqual: - left = GenerateLessThanEqual(left, right); - break; - } - } - return left; - } - - // +, -, & operators - Expression ParseAdditive() - { - Expression left = ParseUnary();// ParseMultiplicative(); - //while (token.id == TokenId.Plus || token.id == TokenId.Minus || - // token.id == TokenId.Amphersand) - //{ - // Token op = token; - // NextToken(); - // Expression right = ParseMultiplicative(); - // switch (op.id) - // { - // case TokenId.Plus: - // if (left.Type == typeof(string) || right.Type == typeof(string)) - // goto case TokenId.Amphersand; - // CheckAndPromoteOperands(typeof(IAddSignatures), op.text, ref left, ref right, op.pos); - // left = GenerateAdd(left, right); - // break; - // case TokenId.Minus: - // CheckAndPromoteOperands(typeof(ISubtractSignatures), op.text, ref left, ref right, op.pos); - // left = GenerateSubtract(left, right); - // break; - // case TokenId.Amphersand: - // left = GenerateStringConcat(left, right); - // break; - // } - //} - return left; - } - - // -, !, not unary operators - Expression ParseUnary() - { - if (token.id == TokenId.Minus || token.id == TokenId.Exclamation || - TokenIdentifierIs("not")) - { - Token op = token; - NextToken(); - if (op.id == TokenId.Minus && (token.id == TokenId.IntegerLiteral || - token.id == TokenId.RealLiteral)) - { - token.text = "-" + token.text; - token.pos = op.pos; - return ParsePrimary(); - } - Expression expr = ParseUnary(); - if (op.id == TokenId.Minus) - { - CheckAndPromoteOperand(typeof(INegationSignatures), op.text, ref expr, op.pos); - expr = Expression.Negate(expr); - } - else - { - CheckAndPromoteOperand(typeof(INotSignatures), op.text, ref expr, op.pos); - expr = Expression.Not(expr); - } - return expr; - } - return ParsePrimary(); - } - - Expression ParsePrimary() - { - Expression expr = ParsePrimaryStart(); - while (true) - { - if (token.id == TokenId.Dot) - { - NextToken(); - expr = ParseMemberAccess(null, expr); - } - //else if (token.id == TokenId.OpenBracket) - //{ - // expr = ParseElementAccess(expr); - //} - else - { - break; - } - } - return expr; - } - - Expression ParsePrimaryStart() - { - switch (token.id) - { - case TokenId.Identifier: - return ParseIdentifier(); - case TokenId.StringLiteral: - return ParseStringLiteral(); - case TokenId.IntegerLiteral: - return ParseIntegerLiteral(); - case TokenId.RealLiteral: - return ParseRealLiteral(); - case TokenId.OpenParen: - return ParseParenExpression(); - default: - throw ParseError(Res.ExpressionExpected); - } - } - - Expression ParseStringLiteral() - { - ValidateToken(TokenId.StringLiteral); - char quote = token.text[0]; - string s = token.text.Substring(1, token.text.Length - 2); - int start = 0; - while (true) - { - int i = s.IndexOf(quote, start); - if (i < 0) break; - s = s.Remove(i, 1); - start = i + 1; - } - if (quote == '\'') - { - if (s.Length != 1) - throw ParseError(Res.InvalidCharacterLiteral); - NextToken(); - return CreateLiteral(s[0], s); - } - NextToken(); - return CreateLiteral(s, s); - } - - Expression ParseIntegerLiteral() - { - ValidateToken(TokenId.IntegerLiteral); - string text = token.text; - if (text[0] != '-') - { - ulong value; - if (!UInt64.TryParse(text, out value)) - throw ParseError(Res.InvalidIntegerLiteral, text); - NextToken(); - if (value <= (ulong)Int32.MaxValue) return CreateLiteral((int)value, text); - if (value <= (ulong)UInt32.MaxValue) return CreateLiteral((uint)value, text); - if (value <= (ulong)Int64.MaxValue) return CreateLiteral((long)value, text); - return CreateLiteral(value, text); - } - else - { - long value; - if (!Int64.TryParse(text, out value)) - throw ParseError(Res.InvalidIntegerLiteral, text); - NextToken(); - if (value >= Int32.MinValue && value <= Int32.MaxValue) - return CreateLiteral((int)value, text); - return CreateLiteral(value, text); - } - } - - Expression ParseRealLiteral() - { - ValidateToken(TokenId.RealLiteral); - string text = token.text; - object value = null; - char last = text[text.Length - 1]; - if (last == 'F' || last == 'f') - { - float f; - if (Single.TryParse(text.Substring(0, text.Length - 1), out f)) value = f; - } - else - { - double d; - if (Double.TryParse(text, out d)) value = d; - } - if (value == null) throw ParseError(Res.InvalidRealLiteral, text); - NextToken(); - return CreateLiteral(value, text); - } - - Expression CreateLiteral(object value, string text) - { - ConstantExpression expr = Expression.Constant(value); - literals.Add(expr, text); - return expr; - } - - Expression ParseParenExpression() - { - ValidateToken(TokenId.OpenParen, Res.OpenParenExpected); - NextToken(); - Expression e = ParseExpression(); - ValidateToken(TokenId.CloseParen, Res.CloseParenOrOperatorExpected); - NextToken(); - return e; - } - - Expression ParseIdentifier() - { - ValidateToken(TokenId.Identifier); - object value; - if (keywords.TryGetValue(token.text, out value)) - { - if (value is Type) return ParseTypeAccess((Type)value); - //if (value == (object)keywordIt) return ParseIt(); - //if (value == (object)keywordIif) return ParseIif(); - //if (value == (object)keywordNew) return ParseNew(); - NextToken(); - return (Expression)value; - } - if (symbols.TryGetValue(token.text, out value) || - externals != null && externals.TryGetValue(token.text, out value)) - { - Expression expr = value as Expression; - if (expr == null) - { - expr = Expression.Constant(value); - } - //else - //{ - // LambdaExpression lambda = expr as LambdaExpression; - // if (lambda != null) return ParseLambdaInvocation(lambda); - //} - NextToken(); - return expr; - } - if (it != null) return ParseMemberAccess(null, it); - throw ParseError(Res.UnknownIdentifier, token.text); - } - - Expression ParseTypeAccess(Type type) - { - int errorPos = token.pos; - NextToken(); - if (token.id == TokenId.Question) - { - if (!type.IsValueType || IsNullableType(type)) - throw ParseError(errorPos, Res.TypeHasNoNullableForm, GetTypeName(type)); - type = typeof(Nullable<>).MakeGenericType(type); - NextToken(); - } - //if (token.id == TokenId.OpenParen) - //{ - // Expression[] args = ParseArgumentList(); - // MethodBase method; - // switch (FindBestMethod(type.GetConstructors(), args, out method)) - // { - // case 0: - // if (args.Length == 1) - // return GenerateConversion(args[0], type, errorPos); - // throw ParseError(errorPos, Res.NoMatchingConstructor, GetTypeName(type)); - // case 1: - // return Expression.New((ConstructorInfo)method, args); - // default: - // throw ParseError(errorPos, Res.AmbiguousConstructorInvocation, GetTypeName(type)); - // } - //} - ValidateToken(TokenId.Dot, Res.DotOrOpenParenExpected); - NextToken(); - return ParseMemberAccess(type, null); - } - - Expression ParseMemberAccess(Type type, Expression instance) - { - if (instance != null) type = instance.Type; - int errorPos = token.pos; - string id = GetIdentifier(); - NextToken(); - //if (token.id == TokenId.OpenParen) - //{ - // //if (instance != null && type != typeof(string)) - // //{ - // // Type enumerableType = FindGenericType(typeof(IEnumerable<>), type); - // // if (enumerableType != null) - // // { - // // Type elementType = enumerableType.GetGenericArguments()[0]; - // // return ParseAggregate(instance, elementType, id, errorPos); - // // } - // //} - // Expression[] args = ParseArgumentList(); - // MethodBase mb; - // switch (FindMethod(type, id, instance == null, args, out mb)) - // { - // case 0: - // throw ParseError(errorPos, Res.NoApplicableMethod, - // id, GetTypeName(type)); - // case 1: - // MethodInfo method = (MethodInfo)mb; - // if (!IsPredefinedType(method.DeclaringType)) - // throw ParseError(errorPos, Res.MethodsAreInaccessible, GetTypeName(method.DeclaringType)); - // if (method.ReturnType == typeof(void)) - // throw ParseError(errorPos, Res.MethodIsVoid, - // id, GetTypeName(method.DeclaringType)); - // return Expression.Call(instance, (MethodInfo)method, args); - // default: - // throw ParseError(errorPos, Res.AmbiguousMethodInvocation, - // id, GetTypeName(type)); - // } - //} - //else - { - MemberInfo member = FindPropertyOrField(type, id, instance == null); - if (member == null) - throw ParseError(errorPos, Res.UnknownPropertyOrField, - id, GetTypeName(type)); - return member is PropertyInfo ? - Expression.Property(instance, (PropertyInfo)member) : - Expression.Field(instance, (FieldInfo)member); - } - } - - static bool IsNullableType(Type type) - { - return type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>); - } - - static Type GetNonNullableType(Type type) - { - return IsNullableType(type) ? type.GetGenericArguments()[0] : type; - } - - static string GetTypeName(Type type) - { - Type baseType = GetNonNullableType(type); - string s = baseType.Name; - if (type != baseType) s += '?'; - return s; - } - - static bool IsSignedIntegralType(Type type) - { - return GetNumericTypeKind(type) == 2; - } - - static bool IsUnsignedIntegralType(Type type) - { - return GetNumericTypeKind(type) == 3; - } - - static int GetNumericTypeKind(Type type) - { - type = GetNonNullableType(type); - if (type.IsEnum) return 0; - switch (Type.GetTypeCode(type)) - { - case TypeCode.Char: - case TypeCode.Single: - case TypeCode.Double: - case TypeCode.Decimal: - return 1; - case TypeCode.SByte: - case TypeCode.Int16: - case TypeCode.Int32: - case TypeCode.Int64: - return 2; - case TypeCode.Byte: - case TypeCode.UInt16: - case TypeCode.UInt32: - case TypeCode.UInt64: - return 3; - default: - return 0; - } - } - - void CheckAndPromoteOperand(Type signatures, string opName, ref Expression expr, int errorPos) - { - Expression[] args = new Expression[] { expr }; - MethodBase method; - if (FindMethod(signatures, "F", false, args, out method) != 1) - throw ParseError(errorPos, Res.IncompatibleOperand, - opName, GetTypeName(args[0].Type)); - expr = args[0]; - } - - void CheckAndPromoteOperands(Type signatures, string opName, ref Expression left, ref Expression right, int errorPos) - { - Expression[] args = new Expression[] { left, right }; - MethodBase method; - if (FindMethod(signatures, "F", false, args, out method) != 1) - throw IncompatibleOperandsError(opName, left, right, errorPos); - left = args[0]; - right = args[1]; - } - - Exception IncompatibleOperandsError(string opName, Expression left, Expression right, int pos) - { - return ParseError(pos, Res.IncompatibleOperands, - opName, GetTypeName(left.Type), GetTypeName(right.Type)); - } - - MemberInfo FindPropertyOrField(Type type, string memberName, bool staticAccess) - { - BindingFlags flags = BindingFlags.Public | BindingFlags.DeclaredOnly | - (staticAccess ? BindingFlags.Static : BindingFlags.Instance); - foreach (Type t in SelfAndBaseTypes(type)) - { - MemberInfo[] members = t.FindMembers(MemberTypes.Property | MemberTypes.Field, - flags, Type.FilterNameIgnoreCase, memberName); - if (members.Length != 0) return members[0]; - } - return null; - } - - int FindMethod(Type type, string methodName, bool staticAccess, Expression[] args, out MethodBase method) - { - BindingFlags flags = BindingFlags.Public | BindingFlags.DeclaredOnly | - (staticAccess ? BindingFlags.Static : BindingFlags.Instance); - foreach (Type t in SelfAndBaseTypes(type)) - { - MemberInfo[] members = t.FindMembers(MemberTypes.Method, - flags, Type.FilterNameIgnoreCase, methodName); - int count = FindBestMethod(members.Cast(), args, out method); - if (count != 0) return count; - } - method = null; - return 0; - } - - int FindIndexer(Type type, Expression[] args, out MethodBase method) - { - foreach (Type t in SelfAndBaseTypes(type)) - { - MemberInfo[] members = t.GetDefaultMembers(); - if (members.Length != 0) - { - IEnumerable methods = members. - OfType(). - Select(p => (MethodBase)p.GetGetMethod()). - Where(m => m != null); - int count = FindBestMethod(methods, args, out method); - if (count != 0) return count; - } - } - method = null; - return 0; - } - - static IEnumerable SelfAndBaseTypes(Type type) - { - if (type.IsInterface) - { - List types = new List(); - AddInterface(types, type); - return types; - } - return SelfAndBaseClasses(type); - } - - static IEnumerable SelfAndBaseClasses(Type type) - { - while (type != null) - { - yield return type; - type = type.BaseType; - } - } - - static void AddInterface(List types, Type type) - { - if (!types.Contains(type)) - { - types.Add(type); - foreach (Type t in type.GetInterfaces()) AddInterface(types, t); - } - } - - class MethodData - { - public MethodBase MethodBase; - public ParameterInfo[] Parameters; - public Expression[] Args; - } - - int FindBestMethod(IEnumerable methods, Expression[] args, out MethodBase method) - { - MethodData[] applicable = methods. - Select(m => new MethodData { MethodBase = m, Parameters = m.GetParameters() }). - Where(m => IsApplicable(m, args)). - ToArray(); - if (applicable.Length > 1) - { - applicable = applicable. - Where(m => applicable.All(n => m == n || IsBetterThan(args, m, n))). - ToArray(); - } - if (applicable.Length == 1) - { - MethodData md = applicable[0]; - for (int i = 0; i < args.Length; i++) args[i] = md.Args[i]; - method = md.MethodBase; - } - else - { - method = null; - } - return applicable.Length; - } - - bool IsApplicable(MethodData method, Expression[] args) - { - if (method.Parameters.Length != args.Length) return false; - Expression[] promotedArgs = new Expression[args.Length]; - for (int i = 0; i < args.Length; i++) - { - ParameterInfo pi = method.Parameters[i]; - if (pi.IsOut) return false; - Expression promoted = PromoteExpression(args[i], pi.ParameterType, false); - if (promoted == null) return false; - promotedArgs[i] = promoted; - } - method.Args = promotedArgs; - return true; - } - - Expression PromoteExpression(Expression expr, Type type, bool exact) - { - if (expr.Type == type) return expr; - if (expr is ConstantExpression) - { - ConstantExpression ce = (ConstantExpression)expr; - if (ce == nullLiteral) - { - if (!type.IsValueType || IsNullableType(type)) - return Expression.Constant(null, type); - } - else - { - string text; - if (literals.TryGetValue(ce, out text)) - { - Type target = GetNonNullableType(type); - Object value = null; - switch (Type.GetTypeCode(ce.Type)) - { - case TypeCode.Int32: - case TypeCode.UInt32: - case TypeCode.Int64: - case TypeCode.UInt64: - value = ParseNumber(text, target); - break; - case TypeCode.Double: - if (target == typeof(decimal)) value = ParseNumber(text, target); - break; - case TypeCode.String: - value = ParseEnum(text, target); - break; - } - if (value != null) - return Expression.Constant(value, type); - } - } - } - if (IsCompatibleWith(expr.Type, type)) - { - if (type.IsValueType || exact) return Expression.Convert(expr, type); - return expr; - } - return null; - } - - static object ParseNumber(string text, Type type) - { - switch (Type.GetTypeCode(GetNonNullableType(type))) - { - case TypeCode.SByte: - sbyte sb; - if (sbyte.TryParse(text, out sb)) return sb; - break; - case TypeCode.Byte: - byte b; - if (byte.TryParse(text, out b)) return b; - break; - case TypeCode.Int16: - short s; - if (short.TryParse(text, out s)) return s; - break; - case TypeCode.UInt16: - ushort us; - if (ushort.TryParse(text, out us)) return us; - break; - case TypeCode.Int32: - int i; - if (int.TryParse(text, out i)) return i; - break; - case TypeCode.UInt32: - uint ui; - if (uint.TryParse(text, out ui)) return ui; - break; - case TypeCode.Int64: - long l; - if (long.TryParse(text, out l)) return l; - break; - case TypeCode.UInt64: - ulong ul; - if (ulong.TryParse(text, out ul)) return ul; - break; - case TypeCode.Single: - float f; - if (float.TryParse(text, out f)) return f; - break; - case TypeCode.Double: - double d; - if (double.TryParse(text, out d)) return d; - break; - case TypeCode.Decimal: - decimal e; - if (decimal.TryParse(text, out e)) return e; - break; - } - return null; - } - - static object ParseEnum(string name, Type type) - { - if (type.IsEnum) - { - MemberInfo[] memberInfos = type.FindMembers(MemberTypes.Field, - BindingFlags.Public | BindingFlags.DeclaredOnly | BindingFlags.Static, - Type.FilterNameIgnoreCase, name); - if (memberInfos.Length != 0) return ((FieldInfo)memberInfos[0]).GetValue(null); - } - return null; - } - - static bool IsCompatibleWith(Type source, Type target) - { - if (source == target) return true; - if (!target.IsValueType) return target.IsAssignableFrom(source); - Type st = GetNonNullableType(source); - Type tt = GetNonNullableType(target); - if (st != source && tt == target) return false; - TypeCode sc = st.IsEnum ? TypeCode.Object : Type.GetTypeCode(st); - TypeCode tc = tt.IsEnum ? TypeCode.Object : Type.GetTypeCode(tt); - switch (sc) - { - case TypeCode.SByte: - switch (tc) - { - case TypeCode.SByte: - case TypeCode.Int16: - case TypeCode.Int32: - case TypeCode.Int64: - case TypeCode.Single: - case TypeCode.Double: - case TypeCode.Decimal: - return true; - } - break; - case TypeCode.Byte: - switch (tc) - { - case TypeCode.Byte: - case TypeCode.Int16: - case TypeCode.UInt16: - case TypeCode.Int32: - case TypeCode.UInt32: - case TypeCode.Int64: - case TypeCode.UInt64: - case TypeCode.Single: - case TypeCode.Double: - case TypeCode.Decimal: - return true; - } - break; - case TypeCode.Int16: - switch (tc) - { - case TypeCode.Int16: - case TypeCode.Int32: - case TypeCode.Int64: - case TypeCode.Single: - case TypeCode.Double: - case TypeCode.Decimal: - return true; - } - break; - case TypeCode.UInt16: - switch (tc) - { - case TypeCode.UInt16: - case TypeCode.Int32: - case TypeCode.UInt32: - case TypeCode.Int64: - case TypeCode.UInt64: - case TypeCode.Single: - case TypeCode.Double: - case TypeCode.Decimal: - return true; - } - break; - case TypeCode.Int32: - switch (tc) - { - case TypeCode.Int32: - case TypeCode.Int64: - case TypeCode.Single: - case TypeCode.Double: - case TypeCode.Decimal: - return true; - } - break; - case TypeCode.UInt32: - switch (tc) - { - case TypeCode.UInt32: - case TypeCode.Int64: - case TypeCode.UInt64: - case TypeCode.Single: - case TypeCode.Double: - case TypeCode.Decimal: - return true; - } - break; - case TypeCode.Int64: - switch (tc) - { - case TypeCode.Int64: - case TypeCode.Single: - case TypeCode.Double: - case TypeCode.Decimal: - return true; - } - break; - case TypeCode.UInt64: - switch (tc) - { - case TypeCode.UInt64: - case TypeCode.Single: - case TypeCode.Double: - case TypeCode.Decimal: - return true; - } - break; - case TypeCode.Single: - switch (tc) - { - case TypeCode.Single: - case TypeCode.Double: - return true; - } - break; - default: - if (st == tt) return true; - break; - } - return false; - } - - static bool IsBetterThan(Expression[] args, MethodData m1, MethodData m2) - { - bool better = false; - for (int i = 0; i < args.Length; i++) - { - int c = CompareConversions(args[i].Type, - m1.Parameters[i].ParameterType, - m2.Parameters[i].ParameterType); - if (c < 0) return false; - if (c > 0) better = true; - } - return better; - } - - // Return 1 if s -> t1 is a better conversion than s -> t2 - // Return -1 if s -> t2 is a better conversion than s -> t1 - // Return 0 if neither conversion is better - static int CompareConversions(Type s, Type t1, Type t2) - { - if (t1 == t2) return 0; - if (s == t1) return 1; - if (s == t2) return -1; - bool t1t2 = IsCompatibleWith(t1, t2); - bool t2t1 = IsCompatibleWith(t2, t1); - if (t1t2 && !t2t1) return 1; - if (t2t1 && !t1t2) return -1; - if (IsSignedIntegralType(t1) && IsUnsignedIntegralType(t2)) return 1; - if (IsSignedIntegralType(t2) && IsUnsignedIntegralType(t1)) return -1; - return 0; - } - - Expression GenerateEqual(Expression left, Expression right) - { - return Expression.Equal(left, right); - } - - Expression GenerateNotEqual(Expression left, Expression right) - { - return Expression.NotEqual(left, right); - } - - Expression GenerateGreaterThan(Expression left, Expression right) - { - if (left.Type == typeof(string)) - { - return Expression.GreaterThan( - GenerateStaticMethodCall("Compare", left, right), - Expression.Constant(0) - ); - } - return Expression.GreaterThan(left, right); - } - - Expression GenerateGreaterThanEqual(Expression left, Expression right) - { - if (left.Type == typeof(string)) - { - return Expression.GreaterThanOrEqual( - GenerateStaticMethodCall("Compare", left, right), - Expression.Constant(0) - ); - } - return Expression.GreaterThanOrEqual(left, right); - } - - Expression GenerateLessThan(Expression left, Expression right) - { - if (left.Type == typeof(string)) - { - return Expression.LessThan( - GenerateStaticMethodCall("Compare", left, right), - Expression.Constant(0) - ); - } - return Expression.LessThan(left, right); - } - - Expression GenerateLessThanEqual(Expression left, Expression right) - { - if (left.Type == typeof(string)) - { - return Expression.LessThanOrEqual( - GenerateStaticMethodCall("Compare", left, right), - Expression.Constant(0) - ); - } - return Expression.LessThanOrEqual(left, right); - } - - MethodInfo GetStaticMethod(string methodName, Expression left, Expression right) - { - return left.Type.GetMethod(methodName, new[] { left.Type, right.Type }); - } - - Expression GenerateStaticMethodCall(string methodName, Expression left, Expression right) - { - return Expression.Call(null, GetStaticMethod(methodName, left, right), new[] { left, right }); - } - - void SetTextPos(int pos) - { - textPos = pos; - ch = textPos < textLen ? text[textPos] : '\0'; - } - - void NextChar() - { - if (textPos < textLen) textPos++; - ch = textPos < textLen ? text[textPos] : '\0'; - } - - void NextToken() - { - while (Char.IsWhiteSpace(ch)) NextChar(); - TokenId t; - int tokenPos = textPos; - switch (ch) - { - case '!': - NextChar(); - if (ch == '=') - { - NextChar(); - t = TokenId.ExclamationEqual; - } - else - { - t = TokenId.Exclamation; - } - break; - case '%': - NextChar(); - t = TokenId.Percent; - break; - case '&': - NextChar(); - if (ch == '&') - { - NextChar(); - t = TokenId.DoubleAmphersand; - } - else - { - t = TokenId.Amphersand; - } - break; - case '(': - NextChar(); - t = TokenId.OpenParen; - break; - case ')': - NextChar(); - t = TokenId.CloseParen; - break; - case '*': - NextChar(); - t = TokenId.Asterisk; - break; - case '+': - NextChar(); - t = TokenId.Plus; - break; - case ',': - NextChar(); - t = TokenId.Comma; - break; - case '-': - NextChar(); - t = TokenId.Minus; - break; - case '.': - NextChar(); - t = TokenId.Dot; - break; - case '/': - NextChar(); - t = TokenId.Slash; - break; - case ':': - NextChar(); - t = TokenId.Colon; - break; - case '<': - NextChar(); - if (ch == '=') - { - NextChar(); - t = TokenId.LessThanEqual; - } - else if (ch == '>') - { - NextChar(); - t = TokenId.LessGreater; - } - else - { - t = TokenId.LessThan; - } - break; - case '=': - NextChar(); - if (ch == '=') - { - NextChar(); - t = TokenId.DoubleEqual; - } - else - { - t = TokenId.Equal; - } - break; - case '>': - NextChar(); - if (ch == '=') - { - NextChar(); - t = TokenId.GreaterThanEqual; - } - else - { - t = TokenId.GreaterThan; - } - break; - case '?': - NextChar(); - t = TokenId.Question; - break; - case '[': - NextChar(); - t = TokenId.OpenBracket; - break; - case ']': - NextChar(); - t = TokenId.CloseBracket; - break; - case '|': - NextChar(); - if (ch == '|') - { - NextChar(); - t = TokenId.DoubleBar; - } - else - { - t = TokenId.Bar; - } - break; - case '"': - case '\'': - char quote = ch; - do - { - NextChar(); - while (textPos < textLen && ch != quote) NextChar(); - if (textPos == textLen) - throw ParseError(textPos, Res.UnterminatedStringLiteral); - NextChar(); - } while (ch == quote); - t = TokenId.StringLiteral; - break; - default: - if (Char.IsLetter(ch) || ch == '@' || ch == '_') - { - do - { - NextChar(); - } while (Char.IsLetterOrDigit(ch) || ch == '_'); - t = TokenId.Identifier; - break; - } - if (Char.IsDigit(ch)) - { - t = TokenId.IntegerLiteral; - do - { - NextChar(); - } while (Char.IsDigit(ch)); - if (ch == '.') - { - t = TokenId.RealLiteral; - NextChar(); - ValidateDigit(); - do - { - NextChar(); - } while (Char.IsDigit(ch)); - } - if (ch == 'E' || ch == 'e') - { - t = TokenId.RealLiteral; - NextChar(); - if (ch == '+' || ch == '-') NextChar(); - ValidateDigit(); - do - { - NextChar(); - } while (Char.IsDigit(ch)); - } - if (ch == 'F' || ch == 'f') NextChar(); - break; - } - if (textPos == textLen) - { - t = TokenId.End; - break; - } - throw ParseError(textPos, Res.InvalidCharacter, ch); - } - token.id = t; - token.text = text.Substring(tokenPos, textPos - tokenPos); - token.pos = tokenPos; - } - - bool TokenIdentifierIs(string id) - { - return token.id == TokenId.Identifier && String.Equals(id, token.text, StringComparison.OrdinalIgnoreCase); - } - - string GetIdentifier() - { - ValidateToken(TokenId.Identifier, Res.IdentifierExpected); - string id = token.text; - if (id.Length > 1 && id[0] == '@') id = id.Substring(1); - return id; - } - - void ValidateDigit() - { - if (!Char.IsDigit(ch)) throw ParseError(textPos, Res.DigitExpected); - } - - void ValidateToken(TokenId t, string errorMessage) - { - if (token.id != t) throw ParseError(errorMessage); - } - - void ValidateToken(TokenId t) - { - if (token.id != t) throw ParseError(Res.SyntaxError); - } - - Exception ParseError(string format, params object[] args) - { - return ParseError(token.pos, format, args); - } - - Exception ParseError(int pos, string format, params object[] args) - { - return new ParseException(string.Format(System.Globalization.CultureInfo.CurrentCulture, format, args), pos); - } - - static Dictionary CreateKeywords() - { - Dictionary d = new Dictionary(StringComparer.OrdinalIgnoreCase); - d.Add("true", trueLiteral); - d.Add("false", falseLiteral); - d.Add("null", nullLiteral); - //d.Add(keywordIt, keywordIt); - //d.Add(keywordIif, keywordIif); - //d.Add(keywordNew, keywordNew); - foreach (Type type in predefinedTypes) d.Add(type.Name, type); - return d; - } - } - - static class Res - { - public const string DuplicateIdentifier = "The identifier '{0}' was defined more than once"; - public const string ExpressionTypeMismatch = "Expression of type '{0}' expected"; - public const string ExpressionExpected = "Expression expected"; - public const string InvalidCharacterLiteral = "Character literal must contain exactly one character"; - public const string InvalidIntegerLiteral = "Invalid integer literal '{0}'"; - public const string InvalidRealLiteral = "Invalid real literal '{0}'"; - public const string UnknownIdentifier = "Unknown identifier '{0}'"; - public const string NoItInScope = "No 'it' is in scope"; - public const string IifRequiresThreeArgs = "The 'iif' function requires three arguments"; - public const string FirstExprMustBeBool = "The first expression must be of type 'Boolean'"; - public const string BothTypesConvertToOther = "Both of the types '{0}' and '{1}' convert to the other"; - public const string NeitherTypeConvertsToOther = "Neither of the types '{0}' and '{1}' converts to the other"; - public const string MissingAsClause = "Expression is missing an 'as' clause"; - public const string ArgsIncompatibleWithLambda = "Argument list incompatible with lambda expression"; - public const string TypeHasNoNullableForm = "Type '{0}' has no nullable form"; - public const string NoMatchingConstructor = "No matching constructor in type '{0}'"; - public const string AmbiguousConstructorInvocation = "Ambiguous invocation of '{0}' constructor"; - public const string CannotConvertValue = "A value of type '{0}' cannot be converted to type '{1}'"; - public const string NoApplicableMethod = "No applicable method '{0}' exists in type '{1}'"; - public const string MethodsAreInaccessible = "Methods on type '{0}' are not accessible"; - public const string MethodIsVoid = "Method '{0}' in type '{1}' does not return a value"; - public const string AmbiguousMethodInvocation = "Ambiguous invocation of method '{0}' in type '{1}'"; - public const string UnknownPropertyOrField = "No property or field '{0}' exists in type '{1}'"; - public const string NoApplicableAggregate = "No applicable aggregate method '{0}' exists"; - public const string CannotIndexMultiDimArray = "Indexing of multi-dimensional arrays is not supported"; - public const string InvalidIndex = "Array index must be an integer expression"; - public const string NoApplicableIndexer = "No applicable indexer exists in type '{0}'"; - public const string AmbiguousIndexerInvocation = "Ambiguous invocation of indexer in type '{0}'"; - public const string IncompatibleOperand = "Operator '{0}' incompatible with operand type '{1}'"; - public const string IncompatibleOperands = "Operator '{0}' incompatible with operand types '{1}' and '{2}'"; - public const string UnterminatedStringLiteral = "Unterminated string literal"; - public const string InvalidCharacter = "Syntax error '{0}'"; - public const string DigitExpected = "Digit expected"; - public const string SyntaxError = "Syntax error"; - public const string TokenExpected = "{0} expected"; - public const string ParseExceptionFormat = "{0} (at index {1})"; - public const string ColonExpected = "':' expected"; - public const string OpenParenExpected = "'(' expected"; - public const string CloseParenOrOperatorExpected = "')' or operator expected"; - public const string CloseParenOrCommaExpected = "')' or ',' expected"; - public const string DotOrOpenParenExpected = "'.' or '(' expected"; - public const string OpenBracketExpected = "'[' expected"; - public const string CloseBracketOrCommaExpected = "']' or ',' expected"; - public const string IdentifierExpected = "Identifier expected"; - } -} +//Copyright (C) Microsoft Corporation. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using System.Reflection.Emit; +using System.Threading; + +namespace System.Linq.Dynamic +{ + // FEATURE : cleanup unused code here + + #region [ Classes ] + internal abstract class DynamicClass + { + public override string ToString() + { + PropertyInfo[] props = this.GetType().GetProperties(BindingFlags.Instance | BindingFlags.Public); + StringBuilder sb = new StringBuilder(); + sb.Append("{"); + for (int i = 0; i < props.Length; i++) + { + if (i > 0) sb.Append(", "); + sb.Append(props[i].Name); + sb.Append("="); + sb.Append(props[i].GetValue(this, null)); + } + sb.Append("}"); + return sb.ToString(); + } + } + + internal class DynamicProperty + { + string name; + Type type; + + public DynamicProperty(string name, Type type) + { + if (name == null) throw new ArgumentNullException("name"); + if (type == null) throw new ArgumentNullException("type"); + this.name = name; + this.type = type; + } + + public string Name + { + get { return name; } + } + + public Type Type + { + get { return type; } + } + } + + internal static class DynamicExpression + { + public static Expression Parse(Type resultType, string expression, params object[] values) + { + ExpressionParser parser = new ExpressionParser(null, expression, values); + return parser.Parse(resultType); + } + + public static Expression ParseLambda(Type itType, Type resultType, string expression, params object[] values) + { + return ParseLambda(new ParameterExpression[] { Expression.Parameter(itType) }, resultType, expression, values); + } + + public static Expression ParseLambda(ParameterExpression[] parameters, Type resultType, string expression, params object[] values) + { + ExpressionParser parser = new ExpressionParser(parameters, expression, values); + return Expression.Lambda(parser.Parse(resultType), parameters); + } + + //public static Expression> ParseLambda(string expression, params object[] values) + //{ + // return (Expression>)ParseLambda(typeof(T), typeof(S), expression, values); + //} + + public static Type CreateClass(params DynamicProperty[] properties) + { + return ClassFactory.Instance.GetDynamicClass(properties); + } + + public static Type CreateClass(IEnumerable properties) + { + return ClassFactory.Instance.GetDynamicClass(properties); + } + } + + //internal class DynamicOrdering + //{ + // public Expression Selector; + // public bool Ascending; + //} + + internal class Signature : IEquatable + { + public DynamicProperty[] properties; + public int hashCode; + + public Signature(IEnumerable properties) + { + this.properties = properties.ToArray(); + hashCode = 0; + foreach (DynamicProperty p in properties) + { + hashCode ^= p.Name.GetHashCode() ^ p.Type.GetHashCode(); + } + } + + public override int GetHashCode() + { + return hashCode; + } + + public override bool Equals(object obj) + { + return obj is Signature ? Equals((Signature)obj) : false; + } + + public bool Equals(Signature other) + { + if (properties.Length != other.properties.Length) return false; + for (int i = 0; i < properties.Length; i++) + { + if (properties[i].Name != other.properties[i].Name || + properties[i].Type != other.properties[i].Type) return false; + } + return true; + } + } + + internal class ClassFactory + { + public static readonly ClassFactory Instance = new ClassFactory(); + + static ClassFactory() { } // Trigger lazy initialization of static fields + + ModuleBuilder module; + Dictionary classes; + int classCount; + ReaderWriterLock rwLock; + + private ClassFactory() + { + AssemblyName name = new AssemblyName("DynamicClasses"); + AssemblyBuilder assembly = AppDomain.CurrentDomain.DefineDynamicAssembly(name, AssemblyBuilderAccess.Run); +#if ENABLE_LINQ_PARTIAL_TRUST + new ReflectionPermission(PermissionState.Unrestricted).Assert(); +#endif + try + { + module = assembly.DefineDynamicModule("Module"); + } + finally + { +#if ENABLE_LINQ_PARTIAL_TRUST + PermissionSet.RevertAssert(); +#endif + } + classes = new Dictionary(); + rwLock = new ReaderWriterLock(); + } + + public Type GetDynamicClass(IEnumerable properties) + { + rwLock.AcquireReaderLock(Timeout.Infinite); + try + { + Signature signature = new Signature(properties); + Type type; + if (!classes.TryGetValue(signature, out type)) + { + type = CreateDynamicClass(signature.properties); + classes.Add(signature, type); + } + return type; + } + finally + { + rwLock.ReleaseReaderLock(); + } + } + + Type CreateDynamicClass(DynamicProperty[] properties) + { + LockCookie cookie = rwLock.UpgradeToWriterLock(Timeout.Infinite); + try + { + string typeName = "DynamicClass" + (classCount + 1); +#if ENABLE_LINQ_PARTIAL_TRUST + new ReflectionPermission(PermissionState.Unrestricted).Assert(); +#endif + try + { + TypeBuilder tb = this.module.DefineType(typeName, TypeAttributes.Class | + TypeAttributes.Public, typeof(DynamicClass)); + FieldInfo[] fields = GenerateProperties(tb, properties); + GenerateEquals(tb, fields); + GenerateGetHashCode(tb, fields); + Type result = tb.CreateType(); + classCount++; + return result; + } + finally + { +#if ENABLE_LINQ_PARTIAL_TRUST + PermissionSet.RevertAssert(); +#endif + } + } + finally + { + rwLock.DowngradeFromWriterLock(ref cookie); + } + } + + FieldInfo[] GenerateProperties(TypeBuilder tb, DynamicProperty[] properties) + { + FieldInfo[] fields = new FieldBuilder[properties.Length]; + for (int i = 0; i < properties.Length; i++) + { + DynamicProperty dp = properties[i]; + FieldBuilder fb = tb.DefineField("_" + dp.Name, dp.Type, FieldAttributes.Private); + PropertyBuilder pb = tb.DefineProperty(dp.Name, PropertyAttributes.HasDefault, dp.Type, null); + MethodBuilder mbGet = tb.DefineMethod("get_" + dp.Name, + MethodAttributes.Public | MethodAttributes.SpecialName | MethodAttributes.HideBySig, + dp.Type, Type.EmptyTypes); + ILGenerator genGet = mbGet.GetILGenerator(); + genGet.Emit(OpCodes.Ldarg_0); + genGet.Emit(OpCodes.Ldfld, fb); + genGet.Emit(OpCodes.Ret); + MethodBuilder mbSet = tb.DefineMethod("set_" + dp.Name, + MethodAttributes.Public | MethodAttributes.SpecialName | MethodAttributes.HideBySig, + null, new Type[] { dp.Type }); + ILGenerator genSet = mbSet.GetILGenerator(); + genSet.Emit(OpCodes.Ldarg_0); + genSet.Emit(OpCodes.Ldarg_1); + genSet.Emit(OpCodes.Stfld, fb); + genSet.Emit(OpCodes.Ret); + pb.SetGetMethod(mbGet); + pb.SetSetMethod(mbSet); + fields[i] = fb; + } + return fields; + } + + void GenerateEquals(TypeBuilder tb, FieldInfo[] fields) + { + MethodBuilder mb = tb.DefineMethod("Equals", + MethodAttributes.Public | MethodAttributes.ReuseSlot | + MethodAttributes.Virtual | MethodAttributes.HideBySig, + typeof(bool), new Type[] { typeof(object) }); + ILGenerator gen = mb.GetILGenerator(); + LocalBuilder other = gen.DeclareLocal(tb); + Label next = gen.DefineLabel(); + gen.Emit(OpCodes.Ldarg_1); + gen.Emit(OpCodes.Isinst, tb); + gen.Emit(OpCodes.Stloc, other); + gen.Emit(OpCodes.Ldloc, other); + gen.Emit(OpCodes.Brtrue_S, next); + gen.Emit(OpCodes.Ldc_I4_0); + gen.Emit(OpCodes.Ret); + gen.MarkLabel(next); + foreach (FieldInfo field in fields) + { + Type ft = field.FieldType; + Type ct = typeof(EqualityComparer<>).MakeGenericType(ft); + next = gen.DefineLabel(); + gen.EmitCall(OpCodes.Call, ct.GetMethod("get_Default"), null); + gen.Emit(OpCodes.Ldarg_0); + gen.Emit(OpCodes.Ldfld, field); + gen.Emit(OpCodes.Ldloc, other); + gen.Emit(OpCodes.Ldfld, field); + gen.EmitCall(OpCodes.Callvirt, ct.GetMethod("Equals", new Type[] { ft, ft }), null); + gen.Emit(OpCodes.Brtrue_S, next); + gen.Emit(OpCodes.Ldc_I4_0); + gen.Emit(OpCodes.Ret); + gen.MarkLabel(next); + } + gen.Emit(OpCodes.Ldc_I4_1); + gen.Emit(OpCodes.Ret); + } + + void GenerateGetHashCode(TypeBuilder tb, FieldInfo[] fields) + { + MethodBuilder mb = tb.DefineMethod("GetHashCode", + MethodAttributes.Public | MethodAttributes.ReuseSlot | + MethodAttributes.Virtual | MethodAttributes.HideBySig, + typeof(int), Type.EmptyTypes); + ILGenerator gen = mb.GetILGenerator(); + gen.Emit(OpCodes.Ldc_I4_0); + foreach (FieldInfo field in fields) + { + Type ft = field.FieldType; + Type ct = typeof(EqualityComparer<>).MakeGenericType(ft); + gen.EmitCall(OpCodes.Call, ct.GetMethod("get_Default"), null); + gen.Emit(OpCodes.Ldarg_0); + gen.Emit(OpCodes.Ldfld, field); + gen.EmitCall(OpCodes.Callvirt, ct.GetMethod("GetHashCode", new Type[] { ft }), null); + gen.Emit(OpCodes.Xor); + } + gen.Emit(OpCodes.Ret); + } + } + + internal sealed class ParseException : Exception + { + int position; + + public ParseException(string message, int position) + : base(message) + { + this.position = position; + } + + public int Position + { + get { return position; } + } + + public override string ToString() + { + return string.Format(Res.ParseExceptionFormat, Message, position); + } + } + #endregion + + internal class ExpressionParser + { + #region [ internal ] + struct Token + { + public TokenId id; + public string text; + public int pos; + } + + enum TokenId + { + Unknown, + End, + Identifier, + StringLiteral, + IntegerLiteral, + RealLiteral, + Exclamation, + Percent, + Amphersand, + OpenParen, + CloseParen, + Asterisk, + Plus, + Comma, + Minus, + Dot, + Slash, + Colon, + LessThan, + Equal, + GreaterThan, + Question, + OpenBracket, + CloseBracket, + Bar, + ExclamationEqual, + DoubleAmphersand, + LessThanEqual, + LessGreater, + DoubleEqual, + GreaterThanEqual, + DoubleBar + } + + interface ILogicalSignatures + { + void F(bool x, bool y); + void F(bool? x, bool? y); + } + + interface IArithmeticSignatures + { + void F(int x, int y); + void F(uint x, uint y); + void F(long x, long y); + void F(ulong x, ulong y); + void F(float x, float y); + void F(double x, double y); + void F(decimal x, decimal y); + void F(int? x, int? y); + void F(uint? x, uint? y); + void F(long? x, long? y); + void F(ulong? x, ulong? y); + void F(float? x, float? y); + void F(double? x, double? y); + void F(decimal? x, decimal? y); + } + + interface IRelationalSignatures : IArithmeticSignatures + { + void F(string x, string y); + void F(char x, char y); + void F(DateTime x, DateTime y); + void F(TimeSpan x, TimeSpan y); + void F(char? x, char? y); + void F(DateTime? x, DateTime? y); + void F(TimeSpan? x, TimeSpan? y); + } + + interface IEqualitySignatures : IRelationalSignatures + { + void F(bool x, bool y); + void F(bool? x, bool? y); + } + + interface IAddSignatures : IArithmeticSignatures + { + void F(DateTime x, TimeSpan y); + void F(TimeSpan x, TimeSpan y); + void F(DateTime? x, TimeSpan? y); + void F(TimeSpan? x, TimeSpan? y); + } + + interface ISubtractSignatures : IAddSignatures + { + void F(DateTime x, DateTime y); + void F(DateTime? x, DateTime? y); + } + + interface INegationSignatures + { + void F(int x); + void F(long x); + void F(float x); + void F(double x); + void F(decimal x); + void F(int? x); + void F(long? x); + void F(float? x); + void F(double? x); + void F(decimal? x); + } + + interface INotSignatures + { + void F(bool x); + void F(bool? x); + } + + interface IEnumerableSignatures + { + void Where(bool predicate); + void Any(); + void Any(bool predicate); + void All(bool predicate); + void Count(); + void Count(bool predicate); + void Min(object selector); + void Max(object selector); + void Sum(int selector); + void Sum(int? selector); + void Sum(long selector); + void Sum(long? selector); + void Sum(float selector); + void Sum(float? selector); + void Sum(double selector); + void Sum(double? selector); + void Sum(decimal selector); + void Sum(decimal? selector); + void Average(int selector); + void Average(int? selector); + void Average(long selector); + void Average(long? selector); + void Average(float selector); + void Average(float? selector); + void Average(double selector); + void Average(double? selector); + void Average(decimal selector); + void Average(decimal? selector); + } + + static readonly Type[] predefinedTypes = { + typeof(Object), + typeof(Boolean), + typeof(Char), + typeof(String), + typeof(SByte), + typeof(Byte), + typeof(Int16), + typeof(UInt16), + typeof(Int32), + typeof(UInt32), + typeof(Int64), + typeof(UInt64), + typeof(Single), + typeof(Double), + typeof(Decimal), + typeof(DateTime), + typeof(TimeSpan), + typeof(Guid), + typeof(Math), + typeof(Convert) + }; + #endregion + + #region [ gunk ] + + + //// *, /, %, mod operators + //Expression ParseMultiplicative() + //{ + // Expression left = ParseUnary(); + // //while (token.id == TokenId.Asterisk || token.id == TokenId.Slash || + // // token.id == TokenId.Percent || TokenIdentifierIs("mod")) + // //{ + // // Token op = token; + // // NextToken(); + // // Expression right = ParseUnary(); + // // CheckAndPromoteOperands(typeof(IArithmeticSignatures), op.text, ref left, ref right, op.pos); + // // switch (op.id) + // // { + // // case TokenId.Asterisk: + // // left = Expression.Multiply(left, right); + // // break; + // // case TokenId.Slash: + // // left = Expression.Divide(left, right); + // // break; + // // case TokenId.Percent: + // // case TokenId.Identifier: + // // left = Expression.Modulo(left, right); + // // break; + // // } + // //} + // return left; + //} + + //Expression GenerateAdd(Expression left, Expression right) + //{ + // if (left.Type == typeof(string) && right.Type == typeof(string)) + // { + // return GenerateStaticMethodCall("Concat", left, right); + // } + // return Expression.Add(left, right); + //} + + //Expression GenerateSubtract(Expression left, Expression right) + //{ + // return Expression.Subtract(left, right); + //} + + //Expression GenerateStringConcat(Expression left, Expression right) + //{ + // return Expression.Call( + // null, + // typeof(string).GetMethod("Concat", new[] { typeof(object), typeof(object) }), + // new[] { left, right }); + //} + +#pragma warning disable 0219 + //public IEnumerable ParseOrdering() + //{ + // List orderings = new List(); + // while (true) + // { + // Expression expr = ParseExpression(); + // bool ascending = true; + // if (TokenIdentifierIs("asc") || TokenIdentifierIs("ascending")) + // { + // NextToken(); + // } + // else if (TokenIdentifierIs("desc") || TokenIdentifierIs("descending")) + // { + // NextToken(); + // ascending = false; + // } + // orderings.Add(new DynamicOrdering { Selector = expr, Ascending = ascending }); + // if (token.id != TokenId.Comma) break; + // NextToken(); + // } + // ValidateToken(TokenId.End, Res.SyntaxError); + // return orderings; + //} +#pragma warning restore 0219 + + //Expression ParseIt() + //{ + // if (it == null) + // throw ParseError(Res.NoItInScope); + // NextToken(); + // return it; + //} + + //Expression ParseIif() + //{ + // int errorPos = token.pos; + // NextToken(); + // Expression[] args = ParseArgumentList(); + // if (args.Length != 3) + // throw ParseError(errorPos, Res.IifRequiresThreeArgs); + // return GenerateConditional(args[0], args[1], args[2], errorPos); + //} + + //Expression GenerateConditional(Expression test, Expression expr1, Expression expr2, int errorPos) + //{ + // if (test.Type != typeof(bool)) + // throw ParseError(errorPos, Res.FirstExprMustBeBool); + // if (expr1.Type != expr2.Type) + // { + // Expression expr1as2 = expr2 != nullLiteral ? PromoteExpression(expr1, expr2.Type, true) : null; + // Expression expr2as1 = expr1 != nullLiteral ? PromoteExpression(expr2, expr1.Type, true) : null; + // if (expr1as2 != null && expr2as1 == null) + // { + // expr1 = expr1as2; + // } + // else if (expr2as1 != null && expr1as2 == null) + // { + // expr2 = expr2as1; + // } + // else + // { + // string type1 = expr1 != nullLiteral ? expr1.Type.Name : "null"; + // string type2 = expr2 != nullLiteral ? expr2.Type.Name : "null"; + // if (expr1as2 != null && expr2as1 != null) + // throw ParseError(errorPos, Res.BothTypesConvertToOther, type1, type2); + // throw ParseError(errorPos, Res.NeitherTypeConvertsToOther, type1, type2); + // } + // } + // return Expression.Condition(test, expr1, expr2); + //} + + //Expression ParseNew() + //{ + // NextToken(); + // ValidateToken(TokenId.OpenParen, Res.OpenParenExpected); + // NextToken(); + // List properties = new List(); + // List expressions = new List(); + // while (true) + // { + // int exprPos = token.pos; + // Expression expr = ParseExpression(); + // string propName; + // if (TokenIdentifierIs("as")) + // { + // NextToken(); + // propName = GetIdentifier(); + // NextToken(); + // } + // else + // { + // MemberExpression me = expr as MemberExpression; + // if (me == null) throw ParseError(exprPos, Res.MissingAsClause); + // propName = me.Member.Name; + // } + // expressions.Add(expr); + // properties.Add(new DynamicProperty(propName, expr.Type)); + // if (token.id != TokenId.Comma) break; + // NextToken(); + // } + // ValidateToken(TokenId.CloseParen, Res.CloseParenOrCommaExpected); + // NextToken(); + // Type type = DynamicExpression.CreateClass(properties); + // MemberBinding[] bindings = new MemberBinding[properties.Count]; + // for (int i = 0; i < bindings.Length; i++) + // bindings[i] = Expression.Bind(type.GetProperty(properties[i].Name), expressions[i]); + // return Expression.MemberInit(Expression.New(type), bindings); + //} + + //Expression ParseLambdaInvocation(LambdaExpression lambda) + //{ + // int errorPos = token.pos; + // NextToken(); + // Expression[] args = ParseArgumentList(); + // MethodBase method; + // if (FindMethod(lambda.Type, "Invoke", false, args, out method) != 1) + // throw ParseError(errorPos, Res.ArgsIncompatibleWithLambda); + // return Expression.Invoke(lambda, args); + //} + + + //Expression GenerateConversion(Expression expr, Type type, int errorPos) + //{ + // Type exprType = expr.Type; + // if (exprType == type) return expr; + // if (exprType.IsValueType && type.IsValueType) + // { + // if ((IsNullableType(exprType) || IsNullableType(type)) && + // GetNonNullableType(exprType) == GetNonNullableType(type)) + // return Expression.Convert(expr, type); + // if ((IsNumericType(exprType) || IsEnumType(exprType)) && + // (IsNumericType(type)) || IsEnumType(type)) + // return Expression.ConvertChecked(expr, type); + // } + // if (exprType.IsAssignableFrom(type) || type.IsAssignableFrom(exprType) || + // exprType.IsInterface || type.IsInterface) + // return Expression.Convert(expr, type); + // throw ParseError(errorPos, Res.CannotConvertValue, + // GetTypeName(exprType), GetTypeName(type)); + //} + + + //static Type FindGenericType(Type generic, Type type) + //{ + // while (type != null && type != typeof(object)) + // { + // if (type.IsGenericType && type.GetGenericTypeDefinition() == generic) return type; + // if (generic.IsInterface) + // { + // foreach (Type intfType in type.GetInterfaces()) + // { + // Type found = FindGenericType(generic, intfType); + // if (found != null) return found; + // } + // } + // type = type.BaseType; + // } + // return null; + //} + + //Expression ParseAggregate(Expression instance, Type elementType, string methodName, int errorPos) + //{ + // ParameterExpression outerIt = it; + // ParameterExpression innerIt = Expression.Parameter(elementType, ""); + // it = innerIt; + // Expression[] args = ParseArgumentList(); + // it = outerIt; + // MethodBase signature; + // if (FindMethod(typeof(IEnumerableSignatures), methodName, false, args, out signature) != 1) + // throw ParseError(errorPos, Res.NoApplicableAggregate, methodName); + // Type[] typeArgs; + // if (signature.Name == "Min" || signature.Name == "Max") + // { + // typeArgs = new Type[] { elementType, args[0].Type }; + // } + // else + // { + // typeArgs = new Type[] { elementType }; + // } + // if (args.Length == 0) + // { + // args = new Expression[] { instance }; + // } + // else + // { + // args = new Expression[] { instance, Expression.Lambda(args[0], innerIt) }; + // } + // return Expression.Call(typeof(Enumerable), signature.Name, typeArgs, args); + //} + + //Expression[] ParseArgumentList() + //{ + // ValidateToken(TokenId.OpenParen, Res.OpenParenExpected); + // NextToken(); + // Expression[] args = token.id != TokenId.CloseParen ? ParseArguments() : new Expression[0]; + // ValidateToken(TokenId.CloseParen, Res.CloseParenOrCommaExpected); + // NextToken(); + // return args; + //} + + //Expression[] ParseArguments() + //{ + // List argList = new List(); + // while (true) + // { + // argList.Add(ParseExpression()); + // if (token.id != TokenId.Comma) break; + // NextToken(); + // } + // return argList.ToArray(); + //} + + //Expression ParseElementAccess(Expression expr) + //{ + // int errorPos = token.pos; + // ValidateToken(TokenId.OpenBracket, Res.OpenParenExpected); + // NextToken(); + // Expression[] args = ParseArguments(); + // ValidateToken(TokenId.CloseBracket, Res.CloseBracketOrCommaExpected); + // NextToken(); + // if (expr.Type.IsArray) + // { + // if (expr.Type.GetArrayRank() != 1 || args.Length != 1) + // throw ParseError(errorPos, Res.CannotIndexMultiDimArray); + // Expression index = PromoteExpression(args[0], typeof(int), true); + // if (index == null) + // throw ParseError(errorPos, Res.InvalidIndex); + // return Expression.ArrayIndex(expr, index); + // } + // else + // { + // MethodBase mb; + // switch (FindIndexer(expr.Type, args, out mb)) + // { + // case 0: + // throw ParseError(errorPos, Res.NoApplicableIndexer, + // GetTypeName(expr.Type)); + // case 1: + // return Expression.Call(expr, (MethodInfo)mb, args); + // default: + // throw ParseError(errorPos, Res.AmbiguousIndexerInvocation, + // GetTypeName(expr.Type)); + // } + // } + //} + + //static bool IsPredefinedType(Type type) + //{ + // foreach (Type t in predefinedTypes) if (t == type) return true; + // return false; + //} + + //static bool IsNumericType(Type type) + //{ + // return GetNumericTypeKind(type) != 0; + //} + + //static bool IsEnumType(Type type) + //{ + // return GetNonNullableType(type).IsEnum; + //} + + //static readonly string keywordIt = "it"; + //static readonly string keywordIif = "iif"; + //static readonly string keywordNew = "new"; + + #endregion + + static readonly Expression trueLiteral = Expression.Constant(true); + static readonly Expression falseLiteral = Expression.Constant(false); + static readonly Expression nullLiteral = Expression.Constant(null); + + static Dictionary keywords; + + Dictionary symbols; + IDictionary externals; + Dictionary literals; + ParameterExpression it; + string text; + int textPos; + int textLen; + char ch; + Token token; + + public ExpressionParser(ParameterExpression[] parameters, string expression, object[] values) + { + if (expression == null) throw new ArgumentNullException("expression"); + if (keywords == null) keywords = CreateKeywords(); + symbols = new Dictionary(StringComparer.OrdinalIgnoreCase); + literals = new Dictionary(); + if (parameters != null) ProcessParameters(parameters); + if (values != null) ProcessValues(values); + text = expression; + textLen = text.Length; + SetTextPos(0); + NextToken(); + } + + void ProcessParameters(ParameterExpression[] parameters) + { + foreach (ParameterExpression pe in parameters) + if (!String.IsNullOrEmpty(pe.Name)) + AddSymbol(pe.Name, pe); + if (parameters.Length == 1 && String.IsNullOrEmpty(parameters[0].Name)) + it = parameters[0]; + } + + void ProcessValues(object[] values) + { + for (int i = 0; i < values.Length; i++) + { + object value = values[i]; + if (i == values.Length - 1 && value is IDictionary) + { + externals = (IDictionary)value; + } + else + { + AddSymbol("@" + i.ToString(System.Globalization.CultureInfo.InvariantCulture), value); + } + } + } + + void AddSymbol(string name, object value) + { + if (symbols.ContainsKey(name)) + throw ParseError(Res.DuplicateIdentifier, name); + symbols.Add(name, value); + } + + public Expression Parse(Type resultType) + { + int exprPos = token.pos; + Expression expr = ParseExpression(); + if (resultType != null) + if ((expr = PromoteExpression(expr, resultType, true)) == null) + throw ParseError(exprPos, Res.ExpressionTypeMismatch, GetTypeName(resultType)); + ValidateToken(TokenId.End, Res.SyntaxError); + return expr; + } + + + // ?: operator + Expression ParseExpression() + { + int errorPos = token.pos; + Expression expr = ParseLogicalOr(); + //if (token.id == TokenId.Question) + //{ + // NextToken(); + // Expression expr1 = ParseExpression(); + // ValidateToken(TokenId.Colon, Res.ColonExpected); + // NextToken(); + // Expression expr2 = ParseExpression(); + // expr = GenerateConditional(expr, expr1, expr2, errorPos); + //} + return expr; + } + + // ||, or operator + Expression ParseLogicalOr() + { + Expression left = ParseLogicalAnd(); + while (token.id == TokenId.DoubleBar || TokenIdentifierIs("or")) + { + Token op = token; + NextToken(); + Expression right = ParseLogicalAnd(); + CheckAndPromoteOperands(typeof(ILogicalSignatures), op.text, ref left, ref right, op.pos); + left = Expression.OrElse(left, right); + } + return left; + } + + // &&, and operator + Expression ParseLogicalAnd() + { + Expression left = ParseComparison(); + while (token.id == TokenId.DoubleAmphersand || TokenIdentifierIs("and")) + { + Token op = token; + NextToken(); + Expression right = ParseComparison(); + CheckAndPromoteOperands(typeof(ILogicalSignatures), op.text, ref left, ref right, op.pos); + left = Expression.AndAlso(left, right); + } + return left; + } + + // =, ==, !=, <>, >, >=, <, <= operators + Expression ParseComparison() + { + Expression left = ParseAdditive(); + while (token.id == TokenId.Equal || token.id == TokenId.DoubleEqual || + token.id == TokenId.ExclamationEqual || token.id == TokenId.LessGreater || + token.id == TokenId.GreaterThan || token.id == TokenId.GreaterThanEqual || + token.id == TokenId.LessThan || token.id == TokenId.LessThanEqual) + { + Token op = token; + NextToken(); + Expression right = ParseAdditive(); + bool isEquality = op.id == TokenId.Equal || op.id == TokenId.DoubleEqual || + op.id == TokenId.ExclamationEqual || op.id == TokenId.LessGreater; + if (isEquality && !left.Type.IsValueType && !right.Type.IsValueType) + { + if (left.Type != right.Type) + { + if (left.Type.IsAssignableFrom(right.Type)) + { + right = Expression.Convert(right, left.Type); + } + else if (right.Type.IsAssignableFrom(left.Type)) + { + left = Expression.Convert(left, right.Type); + } + else + { + throw IncompatibleOperandsError(op.text, left, right, op.pos); + } + } + } + //else if (IsEnumType(left.Type) || IsEnumType(right.Type)) + //{ + // if (left.Type != right.Type) + // { + // Expression e; + // if ((e = PromoteExpression(right, left.Type, true)) != null) + // { + // right = e; + // } + // else if ((e = PromoteExpression(left, right.Type, true)) != null) + // { + // left = e; + // } + // else + // { + // throw IncompatibleOperandsError(op.text, left, right, op.pos); + // } + // } + //} + else + { + if (left.Type == typeof(Guid)) + right = Expression.Constant(Guid.Parse(right.ToString().Replace("\"", "").Replace("'",""))); + else if (left.Type == typeof(DateTime)) + right = Expression.Constant(DateTime.Parse(right.ToString().Replace("\"", "").Replace("'", ""))); + else + CheckAndPromoteOperands(isEquality ? typeof(IEqualitySignatures) : typeof(IRelationalSignatures), + op.text, ref left, ref right, op.pos); + } + switch (op.id) + { + case TokenId.Equal: + case TokenId.DoubleEqual: + left = GenerateEqual(left, right); + break; + case TokenId.ExclamationEqual: + case TokenId.LessGreater: + left = GenerateNotEqual(left, right); + break; + case TokenId.GreaterThan: + left = GenerateGreaterThan(left, right); + break; + case TokenId.GreaterThanEqual: + left = GenerateGreaterThanEqual(left, right); + break; + case TokenId.LessThan: + left = GenerateLessThan(left, right); + break; + case TokenId.LessThanEqual: + left = GenerateLessThanEqual(left, right); + break; + } + } + return left; + } + + // +, -, & operators + Expression ParseAdditive() + { + Expression left = ParseUnary();// ParseMultiplicative(); + //while (token.id == TokenId.Plus || token.id == TokenId.Minus || + // token.id == TokenId.Amphersand) + //{ + // Token op = token; + // NextToken(); + // Expression right = ParseMultiplicative(); + // switch (op.id) + // { + // case TokenId.Plus: + // if (left.Type == typeof(string) || right.Type == typeof(string)) + // goto case TokenId.Amphersand; + // CheckAndPromoteOperands(typeof(IAddSignatures), op.text, ref left, ref right, op.pos); + // left = GenerateAdd(left, right); + // break; + // case TokenId.Minus: + // CheckAndPromoteOperands(typeof(ISubtractSignatures), op.text, ref left, ref right, op.pos); + // left = GenerateSubtract(left, right); + // break; + // case TokenId.Amphersand: + // left = GenerateStringConcat(left, right); + // break; + // } + //} + return left; + } + + // -, !, not unary operators + Expression ParseUnary() + { + if (token.id == TokenId.Minus || token.id == TokenId.Exclamation || + TokenIdentifierIs("not")) + { + Token op = token; + NextToken(); + if (op.id == TokenId.Minus && (token.id == TokenId.IntegerLiteral || + token.id == TokenId.RealLiteral)) + { + token.text = "-" + token.text; + token.pos = op.pos; + return ParsePrimary(); + } + Expression expr = ParseUnary(); + if (op.id == TokenId.Minus) + { + CheckAndPromoteOperand(typeof(INegationSignatures), op.text, ref expr, op.pos); + expr = Expression.Negate(expr); + } + else + { + CheckAndPromoteOperand(typeof(INotSignatures), op.text, ref expr, op.pos); + expr = Expression.Not(expr); + } + return expr; + } + return ParsePrimary(); + } + + Expression ParsePrimary() + { + Expression expr = ParsePrimaryStart(); + while (true) + { + if (token.id == TokenId.Dot) + { + NextToken(); + expr = ParseMemberAccess(null, expr); + } + //else if (token.id == TokenId.OpenBracket) + //{ + // expr = ParseElementAccess(expr); + //} + else + { + break; + } + } + return expr; + } + + Expression ParsePrimaryStart() + { + switch (token.id) + { + case TokenId.Identifier: + return ParseIdentifier(); + case TokenId.StringLiteral: + return ParseStringLiteral(); + case TokenId.IntegerLiteral: + return ParseIntegerLiteral(); + case TokenId.RealLiteral: + return ParseRealLiteral(); + case TokenId.OpenParen: + return ParseParenExpression(); + default: + throw ParseError(Res.ExpressionExpected); + } + } + + Expression ParseStringLiteral() + { + ValidateToken(TokenId.StringLiteral); + char quote = token.text[0]; + string s = token.text.Substring(1, token.text.Length - 2); + int start = 0; + while (true) + { + int i = s.IndexOf(quote, start); + if (i < 0) break; + s = s.Remove(i, 1); + start = i + 1; + } + if (quote == '\'') + { + if (s.Length != 1) + throw ParseError(Res.InvalidCharacterLiteral); + NextToken(); + return CreateLiteral(s[0], s); + } + NextToken(); + return CreateLiteral(s, s); + } + + Expression ParseIntegerLiteral() + { + ValidateToken(TokenId.IntegerLiteral); + string text = token.text; + if (text[0] != '-') + { + ulong value; + if (!UInt64.TryParse(text, out value)) + throw ParseError(Res.InvalidIntegerLiteral, text); + NextToken(); + if (value <= (ulong)Int32.MaxValue) return CreateLiteral((int)value, text); + if (value <= (ulong)UInt32.MaxValue) return CreateLiteral((uint)value, text); + if (value <= (ulong)Int64.MaxValue) return CreateLiteral((long)value, text); + return CreateLiteral(value, text); + } + else + { + long value; + if (!Int64.TryParse(text, out value)) + throw ParseError(Res.InvalidIntegerLiteral, text); + NextToken(); + if (value >= Int32.MinValue && value <= Int32.MaxValue) + return CreateLiteral((int)value, text); + return CreateLiteral(value, text); + } + } + + Expression ParseRealLiteral() + { + ValidateToken(TokenId.RealLiteral); + string text = token.text; + object value = null; + char last = text[text.Length - 1]; + if (last == 'F' || last == 'f') + { + float f; + if (Single.TryParse(text.Substring(0, text.Length - 1), out f)) value = f; + } + else + { + double d; + if (Double.TryParse(text, out d)) value = d; + } + if (value == null) throw ParseError(Res.InvalidRealLiteral, text); + NextToken(); + return CreateLiteral(value, text); + } + + Expression CreateLiteral(object value, string text) + { + ConstantExpression expr = Expression.Constant(value); + literals.Add(expr, text); + return expr; + } + + Expression ParseParenExpression() + { + ValidateToken(TokenId.OpenParen, Res.OpenParenExpected); + NextToken(); + Expression e = ParseExpression(); + ValidateToken(TokenId.CloseParen, Res.CloseParenOrOperatorExpected); + NextToken(); + return e; + } + + Expression ParseIdentifier() + { + ValidateToken(TokenId.Identifier); + object value; + if (keywords.TryGetValue(token.text, out value)) + { + if (value is Type) return ParseTypeAccess((Type)value); + //if (value == (object)keywordIt) return ParseIt(); + //if (value == (object)keywordIif) return ParseIif(); + //if (value == (object)keywordNew) return ParseNew(); + NextToken(); + return (Expression)value; + } + if (symbols.TryGetValue(token.text, out value) || + externals != null && externals.TryGetValue(token.text, out value)) + { + Expression expr = value as Expression; + if (expr == null) + { + expr = Expression.Constant(value); + } + //else + //{ + // LambdaExpression lambda = expr as LambdaExpression; + // if (lambda != null) return ParseLambdaInvocation(lambda); + //} + NextToken(); + return expr; + } + if (it != null) return ParseMemberAccess(null, it); + throw ParseError(Res.UnknownIdentifier, token.text); + } + + Expression ParseTypeAccess(Type type) + { + int errorPos = token.pos; + NextToken(); + if (token.id == TokenId.Question) + { + if (!type.IsValueType || IsNullableType(type)) + throw ParseError(errorPos, Res.TypeHasNoNullableForm, GetTypeName(type)); + type = typeof(Nullable<>).MakeGenericType(type); + NextToken(); + } + //if (token.id == TokenId.OpenParen) + //{ + // Expression[] args = ParseArgumentList(); + // MethodBase method; + // switch (FindBestMethod(type.GetConstructors(), args, out method)) + // { + // case 0: + // if (args.Length == 1) + // return GenerateConversion(args[0], type, errorPos); + // throw ParseError(errorPos, Res.NoMatchingConstructor, GetTypeName(type)); + // case 1: + // return Expression.New((ConstructorInfo)method, args); + // default: + // throw ParseError(errorPos, Res.AmbiguousConstructorInvocation, GetTypeName(type)); + // } + //} + ValidateToken(TokenId.Dot, Res.DotOrOpenParenExpected); + NextToken(); + return ParseMemberAccess(type, null); + } + + Expression ParseMemberAccess(Type type, Expression instance) + { + if (instance != null) type = instance.Type; + int errorPos = token.pos; + string id = GetIdentifier(); + NextToken(); + //if (token.id == TokenId.OpenParen) + //{ + // //if (instance != null && type != typeof(string)) + // //{ + // // Type enumerableType = FindGenericType(typeof(IEnumerable<>), type); + // // if (enumerableType != null) + // // { + // // Type elementType = enumerableType.GetGenericArguments()[0]; + // // return ParseAggregate(instance, elementType, id, errorPos); + // // } + // //} + // Expression[] args = ParseArgumentList(); + // MethodBase mb; + // switch (FindMethod(type, id, instance == null, args, out mb)) + // { + // case 0: + // throw ParseError(errorPos, Res.NoApplicableMethod, + // id, GetTypeName(type)); + // case 1: + // MethodInfo method = (MethodInfo)mb; + // if (!IsPredefinedType(method.DeclaringType)) + // throw ParseError(errorPos, Res.MethodsAreInaccessible, GetTypeName(method.DeclaringType)); + // if (method.ReturnType == typeof(void)) + // throw ParseError(errorPos, Res.MethodIsVoid, + // id, GetTypeName(method.DeclaringType)); + // return Expression.Call(instance, (MethodInfo)method, args); + // default: + // throw ParseError(errorPos, Res.AmbiguousMethodInvocation, + // id, GetTypeName(type)); + // } + //} + //else + { + MemberInfo member = FindPropertyOrField(type, id, instance == null); + if (member == null) + throw ParseError(errorPos, Res.UnknownPropertyOrField, + id, GetTypeName(type)); + return member is PropertyInfo ? + Expression.Property(instance, (PropertyInfo)member) : + Expression.Field(instance, (FieldInfo)member); + } + } + + static bool IsNullableType(Type type) + { + return type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>); + } + + static Type GetNonNullableType(Type type) + { + return IsNullableType(type) ? type.GetGenericArguments()[0] : type; + } + + static string GetTypeName(Type type) + { + Type baseType = GetNonNullableType(type); + string s = baseType.Name; + if (type != baseType) s += '?'; + return s; + } + + static bool IsSignedIntegralType(Type type) + { + return GetNumericTypeKind(type) == 2; + } + + static bool IsUnsignedIntegralType(Type type) + { + return GetNumericTypeKind(type) == 3; + } + + static int GetNumericTypeKind(Type type) + { + type = GetNonNullableType(type); + if (type.IsEnum) return 0; + switch (Type.GetTypeCode(type)) + { + case TypeCode.Char: + case TypeCode.Single: + case TypeCode.Double: + case TypeCode.Decimal: + return 1; + case TypeCode.SByte: + case TypeCode.Int16: + case TypeCode.Int32: + case TypeCode.Int64: + return 2; + case TypeCode.Byte: + case TypeCode.UInt16: + case TypeCode.UInt32: + case TypeCode.UInt64: + return 3; + default: + return 0; + } + } + + void CheckAndPromoteOperand(Type signatures, string opName, ref Expression expr, int errorPos) + { + Expression[] args = new Expression[] { expr }; + MethodBase method; + if (FindMethod(signatures, "F", false, args, out method) != 1) + throw ParseError(errorPos, Res.IncompatibleOperand, + opName, GetTypeName(args[0].Type)); + expr = args[0]; + } + + void CheckAndPromoteOperands(Type signatures, string opName, ref Expression left, ref Expression right, int errorPos) + { + Expression[] args = new Expression[] { left, right }; + MethodBase method; + if (FindMethod(signatures, "F", false, args, out method) != 1) + throw IncompatibleOperandsError(opName, left, right, errorPos); + left = args[0]; + right = args[1]; + } + + Exception IncompatibleOperandsError(string opName, Expression left, Expression right, int pos) + { + return ParseError(pos, Res.IncompatibleOperands, + opName, GetTypeName(left.Type), GetTypeName(right.Type)); + } + + MemberInfo FindPropertyOrField(Type type, string memberName, bool staticAccess) + { + BindingFlags flags = BindingFlags.Public | BindingFlags.DeclaredOnly | + (staticAccess ? BindingFlags.Static : BindingFlags.Instance); + foreach (Type t in SelfAndBaseTypes(type)) + { + MemberInfo[] members = t.FindMembers(MemberTypes.Property | MemberTypes.Field, + flags, Type.FilterNameIgnoreCase, memberName); + if (members.Length != 0) return members[0]; + } + return null; + } + + int FindMethod(Type type, string methodName, bool staticAccess, Expression[] args, out MethodBase method) + { + BindingFlags flags = BindingFlags.Public | BindingFlags.DeclaredOnly | + (staticAccess ? BindingFlags.Static : BindingFlags.Instance); + foreach (Type t in SelfAndBaseTypes(type)) + { + MemberInfo[] members = t.FindMembers(MemberTypes.Method, + flags, Type.FilterNameIgnoreCase, methodName); + int count = FindBestMethod(members.Cast(), args, out method); + if (count != 0) return count; + } + method = null; + return 0; + } + + int FindIndexer(Type type, Expression[] args, out MethodBase method) + { + foreach (Type t in SelfAndBaseTypes(type)) + { + MemberInfo[] members = t.GetDefaultMembers(); + if (members.Length != 0) + { + IEnumerable methods = members. + OfType(). + Select(p => (MethodBase)p.GetGetMethod()). + Where(m => m != null); + int count = FindBestMethod(methods, args, out method); + if (count != 0) return count; + } + } + method = null; + return 0; + } + + static IEnumerable SelfAndBaseTypes(Type type) + { + if (type.IsInterface) + { + List types = new List(); + AddInterface(types, type); + return types; + } + return SelfAndBaseClasses(type); + } + + static IEnumerable SelfAndBaseClasses(Type type) + { + while (type != null) + { + yield return type; + type = type.BaseType; + } + } + + static void AddInterface(List types, Type type) + { + if (!types.Contains(type)) + { + types.Add(type); + foreach (Type t in type.GetInterfaces()) AddInterface(types, t); + } + } + + class MethodData + { + public MethodBase MethodBase; + public ParameterInfo[] Parameters; + public Expression[] Args; + } + + int FindBestMethod(IEnumerable methods, Expression[] args, out MethodBase method) + { + MethodData[] applicable = methods. + Select(m => new MethodData { MethodBase = m, Parameters = m.GetParameters() }). + Where(m => IsApplicable(m, args)). + ToArray(); + if (applicable.Length > 1) + { + applicable = applicable. + Where(m => applicable.All(n => m == n || IsBetterThan(args, m, n))). + ToArray(); + } + if (applicable.Length == 1) + { + MethodData md = applicable[0]; + for (int i = 0; i < args.Length; i++) args[i] = md.Args[i]; + method = md.MethodBase; + } + else + { + method = null; + } + return applicable.Length; + } + + bool IsApplicable(MethodData method, Expression[] args) + { + if (method.Parameters.Length != args.Length) return false; + Expression[] promotedArgs = new Expression[args.Length]; + for (int i = 0; i < args.Length; i++) + { + ParameterInfo pi = method.Parameters[i]; + if (pi.IsOut) return false; + Expression promoted = PromoteExpression(args[i], pi.ParameterType, false); + if (promoted == null) return false; + promotedArgs[i] = promoted; + } + method.Args = promotedArgs; + return true; + } + + Expression PromoteExpression(Expression expr, Type type, bool exact) + { + if (expr.Type == type) return expr; + if (expr is ConstantExpression) + { + ConstantExpression ce = (ConstantExpression)expr; + if (ce == nullLiteral) + { + if (!type.IsValueType || IsNullableType(type)) + return Expression.Constant(null, type); + } + else + { + string text; + if (literals.TryGetValue(ce, out text)) + { + Type target = GetNonNullableType(type); + Object value = null; + switch (Type.GetTypeCode(ce.Type)) + { + case TypeCode.Int32: + case TypeCode.UInt32: + case TypeCode.Int64: + case TypeCode.UInt64: + value = ParseNumber(text, target); + break; + case TypeCode.Double: + if (target == typeof(decimal)) value = ParseNumber(text, target); + break; + case TypeCode.String: + value = ParseEnum(text, target); + break; + } + if (value != null) + return Expression.Constant(value, type); + } + } + } + if (IsCompatibleWith(expr.Type, type)) + { + if (type.IsValueType || exact) return Expression.Convert(expr, type); + return expr; + } + return null; + } + + static object ParseNumber(string text, Type type) + { + switch (Type.GetTypeCode(GetNonNullableType(type))) + { + case TypeCode.SByte: + sbyte sb; + if (sbyte.TryParse(text, out sb)) return sb; + break; + case TypeCode.Byte: + byte b; + if (byte.TryParse(text, out b)) return b; + break; + case TypeCode.Int16: + short s; + if (short.TryParse(text, out s)) return s; + break; + case TypeCode.UInt16: + ushort us; + if (ushort.TryParse(text, out us)) return us; + break; + case TypeCode.Int32: + int i; + if (int.TryParse(text, out i)) return i; + break; + case TypeCode.UInt32: + uint ui; + if (uint.TryParse(text, out ui)) return ui; + break; + case TypeCode.Int64: + long l; + if (long.TryParse(text, out l)) return l; + break; + case TypeCode.UInt64: + ulong ul; + if (ulong.TryParse(text, out ul)) return ul; + break; + case TypeCode.Single: + float f; + if (float.TryParse(text, out f)) return f; + break; + case TypeCode.Double: + double d; + if (double.TryParse(text, out d)) return d; + break; + case TypeCode.Decimal: + decimal e; + if (decimal.TryParse(text, out e)) return e; + break; + } + return null; + } + + static object ParseEnum(string name, Type type) + { + if (type.IsEnum) + { + MemberInfo[] memberInfos = type.FindMembers(MemberTypes.Field, + BindingFlags.Public | BindingFlags.DeclaredOnly | BindingFlags.Static, + Type.FilterNameIgnoreCase, name); + if (memberInfos.Length != 0) return ((FieldInfo)memberInfos[0]).GetValue(null); + } + return null; + } + + static bool IsCompatibleWith(Type source, Type target) + { + if (source == target) return true; + if (!target.IsValueType) return target.IsAssignableFrom(source); + Type st = GetNonNullableType(source); + Type tt = GetNonNullableType(target); + if (st != source && tt == target) return false; + TypeCode sc = st.IsEnum ? TypeCode.Object : Type.GetTypeCode(st); + TypeCode tc = tt.IsEnum ? TypeCode.Object : Type.GetTypeCode(tt); + switch (sc) + { + case TypeCode.SByte: + switch (tc) + { + case TypeCode.SByte: + case TypeCode.Int16: + case TypeCode.Int32: + case TypeCode.Int64: + case TypeCode.Single: + case TypeCode.Double: + case TypeCode.Decimal: + return true; + } + break; + case TypeCode.Byte: + switch (tc) + { + case TypeCode.Byte: + case TypeCode.Int16: + case TypeCode.UInt16: + case TypeCode.Int32: + case TypeCode.UInt32: + case TypeCode.Int64: + case TypeCode.UInt64: + case TypeCode.Single: + case TypeCode.Double: + case TypeCode.Decimal: + return true; + } + break; + case TypeCode.Int16: + switch (tc) + { + case TypeCode.Int16: + case TypeCode.Int32: + case TypeCode.Int64: + case TypeCode.Single: + case TypeCode.Double: + case TypeCode.Decimal: + return true; + } + break; + case TypeCode.UInt16: + switch (tc) + { + case TypeCode.UInt16: + case TypeCode.Int32: + case TypeCode.UInt32: + case TypeCode.Int64: + case TypeCode.UInt64: + case TypeCode.Single: + case TypeCode.Double: + case TypeCode.Decimal: + return true; + } + break; + case TypeCode.Int32: + switch (tc) + { + case TypeCode.Int32: + case TypeCode.Int64: + case TypeCode.Single: + case TypeCode.Double: + case TypeCode.Decimal: + return true; + } + break; + case TypeCode.UInt32: + switch (tc) + { + case TypeCode.UInt32: + case TypeCode.Int64: + case TypeCode.UInt64: + case TypeCode.Single: + case TypeCode.Double: + case TypeCode.Decimal: + return true; + } + break; + case TypeCode.Int64: + switch (tc) + { + case TypeCode.Int64: + case TypeCode.Single: + case TypeCode.Double: + case TypeCode.Decimal: + return true; + } + break; + case TypeCode.UInt64: + switch (tc) + { + case TypeCode.UInt64: + case TypeCode.Single: + case TypeCode.Double: + case TypeCode.Decimal: + return true; + } + break; + case TypeCode.Single: + switch (tc) + { + case TypeCode.Single: + case TypeCode.Double: + return true; + } + break; + default: + if (st == tt) return true; + break; + } + return false; + } + + static bool IsBetterThan(Expression[] args, MethodData m1, MethodData m2) + { + bool better = false; + for (int i = 0; i < args.Length; i++) + { + int c = CompareConversions(args[i].Type, + m1.Parameters[i].ParameterType, + m2.Parameters[i].ParameterType); + if (c < 0) return false; + if (c > 0) better = true; + } + return better; + } + + // Return 1 if s -> t1 is a better conversion than s -> t2 + // Return -1 if s -> t2 is a better conversion than s -> t1 + // Return 0 if neither conversion is better + static int CompareConversions(Type s, Type t1, Type t2) + { + if (t1 == t2) return 0; + if (s == t1) return 1; + if (s == t2) return -1; + bool t1t2 = IsCompatibleWith(t1, t2); + bool t2t1 = IsCompatibleWith(t2, t1); + if (t1t2 && !t2t1) return 1; + if (t2t1 && !t1t2) return -1; + if (IsSignedIntegralType(t1) && IsUnsignedIntegralType(t2)) return 1; + if (IsSignedIntegralType(t2) && IsUnsignedIntegralType(t1)) return -1; + return 0; + } + + Expression GenerateEqual(Expression left, Expression right) + { + return Expression.Equal(left, right); + } + + Expression GenerateNotEqual(Expression left, Expression right) + { + return Expression.NotEqual(left, right); + } + + Expression GenerateGreaterThan(Expression left, Expression right) + { + if (left.Type == typeof(string)) + { + return Expression.GreaterThan( + GenerateStaticMethodCall("Compare", left, right), + Expression.Constant(0) + ); + } + return Expression.GreaterThan(left, right); + } + + Expression GenerateGreaterThanEqual(Expression left, Expression right) + { + if (left.Type == typeof(string)) + { + return Expression.GreaterThanOrEqual( + GenerateStaticMethodCall("Compare", left, right), + Expression.Constant(0) + ); + } + return Expression.GreaterThanOrEqual(left, right); + } + + Expression GenerateLessThan(Expression left, Expression right) + { + if (left.Type == typeof(string)) + { + return Expression.LessThan( + GenerateStaticMethodCall("Compare", left, right), + Expression.Constant(0) + ); + } + return Expression.LessThan(left, right); + } + + Expression GenerateLessThanEqual(Expression left, Expression right) + { + if (left.Type == typeof(string)) + { + return Expression.LessThanOrEqual( + GenerateStaticMethodCall("Compare", left, right), + Expression.Constant(0) + ); + } + return Expression.LessThanOrEqual(left, right); + } + + MethodInfo GetStaticMethod(string methodName, Expression left, Expression right) + { + return left.Type.GetMethod(methodName, new[] { left.Type, right.Type }); + } + + Expression GenerateStaticMethodCall(string methodName, Expression left, Expression right) + { + return Expression.Call(null, GetStaticMethod(methodName, left, right), new[] { left, right }); + } + + void SetTextPos(int pos) + { + textPos = pos; + ch = textPos < textLen ? text[textPos] : '\0'; + } + + void NextChar() + { + if (textPos < textLen) textPos++; + ch = textPos < textLen ? text[textPos] : '\0'; + } + + void NextToken() + { + while (Char.IsWhiteSpace(ch)) NextChar(); + TokenId t; + int tokenPos = textPos; + switch (ch) + { + case '!': + NextChar(); + if (ch == '=') + { + NextChar(); + t = TokenId.ExclamationEqual; + } + else + { + t = TokenId.Exclamation; + } + break; + case '%': + NextChar(); + t = TokenId.Percent; + break; + case '&': + NextChar(); + if (ch == '&') + { + NextChar(); + t = TokenId.DoubleAmphersand; + } + else + { + t = TokenId.Amphersand; + } + break; + case '(': + NextChar(); + t = TokenId.OpenParen; + break; + case ')': + NextChar(); + t = TokenId.CloseParen; + break; + case '*': + NextChar(); + t = TokenId.Asterisk; + break; + case '+': + NextChar(); + t = TokenId.Plus; + break; + case ',': + NextChar(); + t = TokenId.Comma; + break; + case '-': + NextChar(); + t = TokenId.Minus; + break; + case '.': + NextChar(); + t = TokenId.Dot; + break; + case '/': + NextChar(); + t = TokenId.Slash; + break; + case ':': + NextChar(); + t = TokenId.Colon; + break; + case '<': + NextChar(); + if (ch == '=') + { + NextChar(); + t = TokenId.LessThanEqual; + } + else if (ch == '>') + { + NextChar(); + t = TokenId.LessGreater; + } + else + { + t = TokenId.LessThan; + } + break; + case '=': + NextChar(); + if (ch == '=') + { + NextChar(); + t = TokenId.DoubleEqual; + } + else + { + t = TokenId.Equal; + } + break; + case '>': + NextChar(); + if (ch == '=') + { + NextChar(); + t = TokenId.GreaterThanEqual; + } + else + { + t = TokenId.GreaterThan; + } + break; + case '?': + NextChar(); + t = TokenId.Question; + break; + case '[': + NextChar(); + t = TokenId.OpenBracket; + break; + case ']': + NextChar(); + t = TokenId.CloseBracket; + break; + case '|': + NextChar(); + if (ch == '|') + { + NextChar(); + t = TokenId.DoubleBar; + } + else + { + t = TokenId.Bar; + } + break; + case '"': + case '\'': + char quote = ch; + do + { + NextChar(); + while (textPos < textLen && ch != quote) NextChar(); + if (textPos == textLen) + throw ParseError(textPos, Res.UnterminatedStringLiteral); + NextChar(); + } while (ch == quote); + t = TokenId.StringLiteral; + break; + default: + if (Char.IsLetter(ch) || ch == '@' || ch == '_') + { + do + { + NextChar(); + } while (Char.IsLetterOrDigit(ch) || ch == '_'); + t = TokenId.Identifier; + break; + } + if (Char.IsDigit(ch)) + { + t = TokenId.IntegerLiteral; + do + { + NextChar(); + } while (Char.IsDigit(ch)); + if (ch == '.') + { + t = TokenId.RealLiteral; + NextChar(); + ValidateDigit(); + do + { + NextChar(); + } while (Char.IsDigit(ch)); + } + if (ch == 'E' || ch == 'e') + { + t = TokenId.RealLiteral; + NextChar(); + if (ch == '+' || ch == '-') NextChar(); + ValidateDigit(); + do + { + NextChar(); + } while (Char.IsDigit(ch)); + } + if (ch == 'F' || ch == 'f') NextChar(); + break; + } + if (textPos == textLen) + { + t = TokenId.End; + break; + } + throw ParseError(textPos, Res.InvalidCharacter, ch); + } + token.id = t; + token.text = text.Substring(tokenPos, textPos - tokenPos); + token.pos = tokenPos; + } + + bool TokenIdentifierIs(string id) + { + return token.id == TokenId.Identifier && String.Equals(id, token.text, StringComparison.OrdinalIgnoreCase); + } + + string GetIdentifier() + { + ValidateToken(TokenId.Identifier, Res.IdentifierExpected); + string id = token.text; + if (id.Length > 1 && id[0] == '@') id = id.Substring(1); + return id; + } + + void ValidateDigit() + { + if (!Char.IsDigit(ch)) throw ParseError(textPos, Res.DigitExpected); + } + + void ValidateToken(TokenId t, string errorMessage) + { + if (token.id != t) throw ParseError(errorMessage); + } + + void ValidateToken(TokenId t) + { + if (token.id != t) throw ParseError(Res.SyntaxError); + } + + Exception ParseError(string format, params object[] args) + { + return ParseError(token.pos, format, args); + } + + Exception ParseError(int pos, string format, params object[] args) + { + return new ParseException(string.Format(System.Globalization.CultureInfo.CurrentCulture, format, args), pos); + } + + static Dictionary CreateKeywords() + { + Dictionary d = new Dictionary(StringComparer.OrdinalIgnoreCase); + d.Add("true", trueLiteral); + d.Add("false", falseLiteral); + d.Add("null", nullLiteral); + //d.Add(keywordIt, keywordIt); + //d.Add(keywordIif, keywordIif); + //d.Add(keywordNew, keywordNew); + foreach (Type type in predefinedTypes) d.Add(type.Name, type); + return d; + } + } + + static class Res + { + public const string DuplicateIdentifier = "The identifier '{0}' was defined more than once"; + public const string ExpressionTypeMismatch = "Expression of type '{0}' expected"; + public const string ExpressionExpected = "Expression expected"; + public const string InvalidCharacterLiteral = "Character literal must contain exactly one character"; + public const string InvalidIntegerLiteral = "Invalid integer literal '{0}'"; + public const string InvalidRealLiteral = "Invalid real literal '{0}'"; + public const string UnknownIdentifier = "Unknown identifier '{0}'"; + public const string NoItInScope = "No 'it' is in scope"; + public const string IifRequiresThreeArgs = "The 'iif' function requires three arguments"; + public const string FirstExprMustBeBool = "The first expression must be of type 'Boolean'"; + public const string BothTypesConvertToOther = "Both of the types '{0}' and '{1}' convert to the other"; + public const string NeitherTypeConvertsToOther = "Neither of the types '{0}' and '{1}' converts to the other"; + public const string MissingAsClause = "Expression is missing an 'as' clause"; + public const string ArgsIncompatibleWithLambda = "Argument list incompatible with lambda expression"; + public const string TypeHasNoNullableForm = "Type '{0}' has no nullable form"; + public const string NoMatchingConstructor = "No matching constructor in type '{0}'"; + public const string AmbiguousConstructorInvocation = "Ambiguous invocation of '{0}' constructor"; + public const string CannotConvertValue = "A value of type '{0}' cannot be converted to type '{1}'"; + public const string NoApplicableMethod = "No applicable method '{0}' exists in type '{1}'"; + public const string MethodsAreInaccessible = "Methods on type '{0}' are not accessible"; + public const string MethodIsVoid = "Method '{0}' in type '{1}' does not return a value"; + public const string AmbiguousMethodInvocation = "Ambiguous invocation of method '{0}' in type '{1}'"; + public const string UnknownPropertyOrField = "No property or field '{0}' exists in type '{1}'"; + public const string NoApplicableAggregate = "No applicable aggregate method '{0}' exists"; + public const string CannotIndexMultiDimArray = "Indexing of multi-dimensional arrays is not supported"; + public const string InvalidIndex = "Array index must be an integer expression"; + public const string NoApplicableIndexer = "No applicable indexer exists in type '{0}'"; + public const string AmbiguousIndexerInvocation = "Ambiguous invocation of indexer in type '{0}'"; + public const string IncompatibleOperand = "Operator '{0}' incompatible with operand type '{1}'"; + public const string IncompatibleOperands = "Operator '{0}' incompatible with operand types '{1}' and '{2}'"; + public const string UnterminatedStringLiteral = "Unterminated string literal"; + public const string InvalidCharacter = "Syntax error '{0}'"; + public const string DigitExpected = "Digit expected"; + public const string SyntaxError = "Syntax error"; + public const string TokenExpected = "{0} expected"; + public const string ParseExceptionFormat = "{0} (at index {1})"; + public const string ColonExpected = "':' expected"; + public const string OpenParenExpected = "'(' expected"; + public const string CloseParenOrOperatorExpected = "')' or operator expected"; + public const string CloseParenOrCommaExpected = "')' or ',' expected"; + public const string DotOrOpenParenExpected = "'.' or '(' expected"; + public const string OpenBracketExpected = "'[' expected"; + public const string CloseBracketOrCommaExpected = "']' or ',' expected"; + public const string IdentifierExpected = "Identifier expected"; + } +} diff --git a/RaptorDB/Views/LINQQuery.cs b/RaptorDB/Views/LINQQuery.cs index 925e071..03c70f2 100644 --- a/RaptorDB/Views/LINQQuery.cs +++ b/RaptorDB/Views/LINQQuery.cs @@ -1,189 +1,190 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Linq.Expressions; -using System.Reflection; - -namespace RaptorDB.Views -{ - //FEATURE : handle Contains, StartsWith, Between predicates - - delegate WAHBitArray QueryFromTo(string colname, object from, object to); - delegate WAHBitArray QueryExpression(string colname, RDBExpression exp, object from); - - internal class QueryVisitor : ExpressionVisitor - { - public QueryVisitor(QueryExpression express) - { - qexpression = express; - } - public Stack _stack = new Stack(); - public Stack _bitmap = new Stack(); - //QueryFromTo qfromto; - QueryExpression qexpression; - - protected override Expression VisitBinary(BinaryExpression b) - { - var m = this.Visit(b.Left); - if (m == null) // VB.net sty;e linq for string compare - return b.Right; - ExpressionType t = b.NodeType; - - if (t == ExpressionType.Equal || t == ExpressionType.NotEqual || - t == ExpressionType.LessThan || t == ExpressionType.LessThanOrEqual || - t == ExpressionType.GreaterThan || t == ExpressionType.GreaterThanOrEqual) - _stack.Push(b.NodeType); - - - this.Visit(b.Right); - t = b.NodeType; - if (t == ExpressionType.Equal || t == ExpressionType.NotEqual || - t == ExpressionType.LessThanOrEqual || t == ExpressionType.LessThan || - t == ExpressionType.GreaterThanOrEqual || t == ExpressionType.GreaterThan - ) - { - // binary expression - object lv = _stack.Pop(); - ExpressionType lo = (ExpressionType)_stack.Pop(); - object ln = _stack.Pop(); - RDBExpression exp = RDBExpression.Equal; - // FEATURE : add contains , between, startswith - if (lo == ExpressionType.LessThan) exp = RDBExpression.Less; - else if (lo == ExpressionType.LessThanOrEqual) exp = RDBExpression.LessEqual; - else if (lo == ExpressionType.GreaterThan) exp = RDBExpression.Greater; - else if (lo == ExpressionType.GreaterThanOrEqual) exp = RDBExpression.GreaterEqual; - else if (lo == ExpressionType.NotEqual) exp = RDBExpression.NotEqual; - - _bitmap.Push(qexpression("" + ln, exp, lv)); - } - - if (t == ExpressionType.And || t == ExpressionType.AndAlso || - t == ExpressionType.Or || t == ExpressionType.OrElse) - { - if (_bitmap.Count > 1) - { - // do bitmap operations - WAHBitArray r = (WAHBitArray)_bitmap.Pop(); - WAHBitArray l = (WAHBitArray)_bitmap.Pop(); - - if (t == ExpressionType.And || t == ExpressionType.AndAlso) - _bitmap.Push(r.And(l)); - if (t == ExpressionType.Or || t == ExpressionType.OrElse) - _bitmap.Push(r.Or(l)); - } - else - { - // single bitmap operation - } - } - return b; - } - - protected override Expression VisitMethodCall(MethodCallExpression m) - { - string s = m.ToString(); - // VB.net : e.g. CompareString(x.NoCase, "Me 4", False) - if(s.StartsWith("CompareString")) - { - var left = m.Arguments[0]; - // Removes dot if any - var leftStr = left.ToString().Substring(left.ToString().IndexOf('.') + 1); - var right = m.Arguments[1].ToString().Replace("\"", String.Empty); - RDBExpression exp = RDBExpression.Equal; - _bitmap.Push(qexpression("" + leftStr, exp, right)); - return null; - } - string mc = s.Substring(s.IndexOf('.') + 1); - if (mc.Contains("Between")) - { - // TODO : add code for between parsing here - - string name = m.Arguments[0].ToString().Split('.')[1]; - object from = GetValueForMember(m.Arguments[1]); - object to = GetValueForMember(m.Arguments[2]); - //var bits = qfromto(name, from, to); - } - else - _stack.Push(mc); - - return m; - } - - private object GetValueForMember(object m) - { - object val = null; - var f = m as ConstantExpression; - if (f != null) - return f.Value; - - var mm = m as MemberExpression; - if (mm.NodeType == ExpressionType.MemberAccess) - { - Type tt = mm.Expression.Type; - val = tt.InvokeMember(mm.Member.Name, BindingFlags.GetField | - BindingFlags.GetProperty | - BindingFlags.Public | - BindingFlags.NonPublic | - BindingFlags.Instance | - BindingFlags.Static, null, (mm.Expression as ConstantExpression).Value, null); - } - return val; - } - - protected override Expression VisitMember(MemberExpression m) - { - var e = base.VisitMember(m); - var c = m.Expression as ConstantExpression; - if (c != null) - { - Type t = c.Value.GetType(); - var x = t.InvokeMember(m.Member.Name, BindingFlags.GetField | - BindingFlags.GetProperty | - BindingFlags.Public | - BindingFlags.NonPublic | - BindingFlags.Instance | - BindingFlags.Static, null, c.Value, null); - _stack.Push(x); - } - - if (m.Expression != null) - { - if (m.Expression.NodeType == ExpressionType.Parameter) // property - _stack.Push(m.Member.Name); - else if (m.Expression.NodeType == ExpressionType.MemberAccess) // obj.property - { - Type t = m.Expression.Type; - var val = t.InvokeMember(m.Member.Name, BindingFlags.GetField | - BindingFlags.GetProperty | - BindingFlags.Public | - BindingFlags.NonPublic | - BindingFlags.Instance | - BindingFlags.Static, null, _stack.Pop(), null); - _stack.Push(val); - } - } - return e; - } - - protected override Expression VisitConstant(ConstantExpression c) - { - IQueryable q = c.Value as IQueryable; - if (q != null) - { - _stack.Push(q.ElementType.Name); - } - else if (c.Value == null) - { - _stack.Push(null); - } - else - { - Type t = c.Value.GetType(); - if (t.IsValueType || t == typeof(string)) - _stack.Push(c.Value); - } - return c; - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Linq.Expressions; +using System.Reflection; +using RaptorDB.Common; + +namespace RaptorDB.Views +{ + //FEATURE : handle Contains, StartsWith, Between predicates + + delegate WahBitArray QueryFromTo(string colname, object from, object to); + delegate WahBitArray QueryExpression(string colname, RDBExpression exp, object from); + + internal class QueryVisitor : ExpressionVisitor + { + public QueryVisitor(QueryExpression express) + { + qexpression = express; + } + public Stack _stack = new Stack(); + public Stack _bitmap = new Stack(); + //QueryFromTo qfromto; + QueryExpression qexpression; + + protected override Expression VisitBinary(BinaryExpression b) + { + var m = this.Visit(b.Left); + if (m == null) // VB.net sty;e linq for string compare + return b.Right; + ExpressionType t = b.NodeType; + + if (t == ExpressionType.Equal || t == ExpressionType.NotEqual || + t == ExpressionType.LessThan || t == ExpressionType.LessThanOrEqual || + t == ExpressionType.GreaterThan || t == ExpressionType.GreaterThanOrEqual) + _stack.Push(b.NodeType); + + + this.Visit(b.Right); + t = b.NodeType; + if (t == ExpressionType.Equal || t == ExpressionType.NotEqual || + t == ExpressionType.LessThanOrEqual || t == ExpressionType.LessThan || + t == ExpressionType.GreaterThanOrEqual || t == ExpressionType.GreaterThan + ) + { + // binary expression + object lv = _stack.Pop(); + ExpressionType lo = (ExpressionType)_stack.Pop(); + object ln = _stack.Pop(); + RDBExpression exp = RDBExpression.Equal; + // FEATURE : add contains , between, startswith + if (lo == ExpressionType.LessThan) exp = RDBExpression.Less; + else if (lo == ExpressionType.LessThanOrEqual) exp = RDBExpression.LessEqual; + else if (lo == ExpressionType.GreaterThan) exp = RDBExpression.Greater; + else if (lo == ExpressionType.GreaterThanOrEqual) exp = RDBExpression.GreaterEqual; + else if (lo == ExpressionType.NotEqual) exp = RDBExpression.NotEqual; + + _bitmap.Push(qexpression(ln.ToString(), exp, lv)); + } + + if (t == ExpressionType.And || t == ExpressionType.AndAlso || + t == ExpressionType.Or || t == ExpressionType.OrElse) + { + if (_bitmap.Count > 1) + { + // do bitmap operations + WahBitArray r = (WahBitArray)_bitmap.Pop(); + WahBitArray l = (WahBitArray)_bitmap.Pop(); + + if (t == ExpressionType.And || t == ExpressionType.AndAlso) + _bitmap.Push(r.And(l)); + if (t == ExpressionType.Or || t == ExpressionType.OrElse) + _bitmap.Push(r.Or(l)); + } + else + { + // single bitmap operation + } + } + return b; + } + + protected override Expression VisitMethodCall(MethodCallExpression m) + { + string s = m.ToString(); + // VB.net : e.g. CompareString(x.NoCase, "Me 4", False) + if(s.StartsWith("CompareString")) + { + var left = m.Arguments[0]; + // Removes dot if any + var leftStr = left.ToString().Substring(left.ToString().IndexOf('.') + 1); + var right = m.Arguments[1].ToString().Replace("\"", String.Empty); + RDBExpression exp = RDBExpression.Equal; + _bitmap.Push(qexpression(leftStr, exp, right)); + return null; + } + string mc = s.Substring(s.IndexOf('.') + 1); + if (mc.Contains("Between")) + { + // TODO : add code for between parsing here + + string name = m.Arguments[0].ToString().Split('.')[1]; + object from = GetValueForMember(m.Arguments[1]); + object to = GetValueForMember(m.Arguments[2]); + //var bits = qfromto(name, from, to); + } + else + _stack.Push(mc); + + return m; + } + + private object GetValueForMember(object m) + { + object val = null; + var f = m as ConstantExpression; + if (f != null) + return f.Value; + + var mm = m as MemberExpression; + if (mm.NodeType == ExpressionType.MemberAccess) + { + Type tt = mm.Expression.Type; + val = tt.InvokeMember(mm.Member.Name, BindingFlags.GetField | + BindingFlags.GetProperty | + BindingFlags.Public | + BindingFlags.NonPublic | + BindingFlags.Instance | + BindingFlags.Static, null, (mm.Expression as ConstantExpression).Value, null); + } + return val; + } + + protected override Expression VisitMember(MemberExpression m) + { + var e = base.VisitMember(m); + var c = m.Expression as ConstantExpression; + if (c != null) + { + Type t = c.Value.GetType(); + var x = t.InvokeMember(m.Member.Name, BindingFlags.GetField | + BindingFlags.GetProperty | + BindingFlags.Public | + BindingFlags.NonPublic | + BindingFlags.Instance | + BindingFlags.Static, null, c.Value, null); + _stack.Push(x); + } + + if (m.Expression != null) + { + if (m.Expression.NodeType == ExpressionType.Parameter) // property + _stack.Push(m.Member.Name); + else if (m.Expression.NodeType == ExpressionType.MemberAccess) // obj.property + { + Type t = m.Expression.Type; + var val = t.InvokeMember(m.Member.Name, BindingFlags.GetField | + BindingFlags.GetProperty | + BindingFlags.Public | + BindingFlags.NonPublic | + BindingFlags.Instance | + BindingFlags.Static, null, _stack.Pop(), null); + _stack.Push(val); + } + } + return e; + } + + protected override Expression VisitConstant(ConstantExpression c) + { + IQueryable q = c.Value as IQueryable; + if (q != null) + { + _stack.Push(q.ElementType.Name); + } + else if (c.Value == null) + { + _stack.Push(null); + } + else + { + Type t = c.Value.GetType(); + if (t.IsValueType || t == typeof(string)) + _stack.Push(c.Value); + } + return c; + } + } +} diff --git a/RaptorDB/Views/TaskQueue.cs b/RaptorDB/Views/TaskQueue.cs index ea2b313..3e1dbc4 100644 --- a/RaptorDB/Views/TaskQueue.cs +++ b/RaptorDB/Views/TaskQueue.cs @@ -1,67 +1,67 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Timers; -using System.Threading.Tasks; -using System.Collections.Concurrent; - -namespace RaptorDB.Views -{ - internal class TaskQueue - { - public TaskQueue() - { - _timer = new Timer(); - _timer.Interval = Global.TaskCleanupTimerSeconds * 1000; - _timer.Elapsed += new ElapsedEventHandler(_timer_Elapsed); - _timer.Enabled = true; - _log.Debug("TaskQueue starting"); - } - - private ILog _log = LogManager.GetLogger(typeof(TaskQueue)); - private object _lock = new object(); - private bool _shuttingdown = false; - private Timer _timer; - private ConcurrentQueue _que = new ConcurrentQueue(); - - void _timer_Elapsed(object sender, ElapsedEventArgs e) - { - lock (_lock) - { - while (_que.Count > 0) - { - //_log.Debug("in queue cleanup, count = " + _que.Count); - if (_shuttingdown) - break; - Task t = null; - if (_que.TryPeek(out t)) - { - if (t.IsCompleted || t.IsCanceled || t.IsFaulted) - _que.TryDequeue(out t); - else - break; - } - else - break; - } - } - } - - public void AddTask(Action action) - { - if (_shuttingdown == false) - _que.Enqueue(Task.Factory.StartNew(action)); - } - - public void Shutdown() - { - _log.Debug("TaskQueue shutdown"); - // wait for tasks to finish - _shuttingdown = true; - _timer.Enabled = false; - if (_que.Count > 0) - Task.WaitAll(_que.ToArray()); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Timers; +using System.Threading.Tasks; +using System.Collections.Concurrent; + +namespace RaptorDB.Views +{ + internal class TaskQueue + { + public TaskQueue() + { + _timer = new Timer(); + _timer.Interval = Global.TaskCleanupTimerSeconds * 1000; + _timer.Elapsed += new ElapsedEventHandler(_timer_Elapsed); + _timer.Enabled = true; + _log.Debug("TaskQueue starting"); + } + + private ILog _log = LogManager.GetLogger(typeof(TaskQueue)); + private object _lock = new object(); + private bool _shuttingdown = false; + private Timer _timer; + private ConcurrentQueue _que = new ConcurrentQueue(); + + void _timer_Elapsed(object sender, ElapsedEventArgs e) + { + lock (_lock) + { + while (_que.Count > 0) + { + //_log.Debug("in queue cleanup, count = " + _que.Count); + if (_shuttingdown) + break; + Task t = null; + if (_que.TryPeek(out t)) + { + if (t.IsCompleted || t.IsCanceled || t.IsFaulted) + _que.TryDequeue(out t); + else + break; + } + else + break; + } + } + } + + public void AddTask(Action action) + { + if (_shuttingdown == false) + _que.Enqueue(Task.Factory.StartNew(action)); + } + + public void Shutdown() + { + _log.Debug("TaskQueue shutdown"); + // wait for tasks to finish + _shuttingdown = true; + _timer.Enabled = false; + if (_que.Count > 0) + Task.WaitAll(_que.ToArray()); + } + } +} diff --git a/RaptorDB/Views/ViewHandler.cs b/RaptorDB/Views/ViewHandler.cs index 7f471c7..d8fb0be 100644 --- a/RaptorDB/Views/ViewHandler.cs +++ b/RaptorDB/Views/ViewHandler.cs @@ -1,1337 +1,936 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.IO; -using System.Collections; -using System.Linq.Expressions; -using System.Threading.Tasks; -using System.Reflection; -using System.Reflection.Emit; -using Microsoft.CSharp; -using System.CodeDom.Compiler; -using System.ComponentModel; -using RaptorDB.Common; -using System.Threading; -using fastJSON; - -namespace RaptorDB.Views -{ - public class ViewRowDefinition - { - public ViewRowDefinition() - { - Columns = new List>(); - } - public string Name { get; set; } - public List> Columns { get; set; } - - public void Add(string name, Type type) - { - Columns.Add(new KeyValuePair(name, type)); - } - } - - internal class tran_data - { - public Guid docid; - public Dictionary> rows; - } - - internal class ViewHandler - { - private ILog _log = LogManager.GetLogger(typeof(ViewHandler)); - - public ViewHandler(string path, ViewManager manager) - { - _Path = path; - _viewmanager = manager; - } - - private string _S = Path.DirectorySeparatorChar.ToString(); - private string _Path = ""; - private ViewManager _viewmanager; - internal ViewBase _view; - private Dictionary _indexes = new Dictionary(); - private StorageFile _viewData; - private BoolIndex _deletedRows; - private string _docid = "docid"; - private List _colnames = new List(); - private RowFill _rowfiller; - private ViewRowDefinition _schema; - private SafeDictionary _transactions = new SafeDictionary(); - private SafeDictionary _nocase = new SafeDictionary(); - private Dictionary _idxlen = new Dictionary(); - - private System.Timers.Timer _saveTimer; - Type basetype; // used for mapper - dynamic mapper; - bool _isDirty = false; - private string _dirtyFilename = "temp.$"; - private bool _stsaving = false; - private int _RaptorDBVersion = 3; // used for engine changes to views - private string _RaptorDBVersionFilename = "RaptorDB.version"; - - private object _stlock = new object(); - void _saveTimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) - { - lock (_stlock) - { - _stsaving = true; - foreach (var i in _indexes) - i.Value.SaveIndex(); - - _deletedRows.SaveIndex(); - _stsaving = false; - } - } - - public Type GetFireOnType() - { - return basetype; - } - - internal void SetView(View view, IDocStorage docs) - { - bool rebuild = false; - _view = view; - // generate schemacolumns from schema - GenerateSchemaColumns(_view); - - if (_Path.EndsWith(_S) == false) _Path += _S; - _Path += view.Name + _S; - if (Directory.Exists(_Path) == false) - { - Directory.CreateDirectory(_Path); - rebuild = true; - } - else - { - // read version file and check with view - int version = 0; - if (File.Exists(_Path + _view.Name + ".version")) - { - int.TryParse(File.ReadAllText(_Path + _view.Name + ".version"), out version); - if (version != view.Version) - { - _log.Debug("Newer view version detected"); - rebuild = true; - } - } - } - - if (File.Exists(_Path + _dirtyFilename)) - { - _log.Debug("Last shutdown failed, rebuilding view : " + _view.Name); - rebuild = true; - } - - if (File.Exists(_Path + _RaptorDBVersionFilename)) - { - // check view engine version - string s = File.ReadAllText(_Path + _RaptorDBVersionFilename); - int version = 0; - int.TryParse(s, out version); - if (version != _RaptorDBVersion) - { - _log.Debug("RaptorDB view engine upgrade, rebuilding view : " + _view.Name); - rebuild = true; - } - } - else - { - _log.Debug("RaptorDB view engine upgrade, rebuilding view : " + _view.Name); - rebuild = true; - } - - if (rebuild) - { - _log.Debug("Deleting old view data folder = " + view.Name); - Directory.Delete(_Path, true); - Directory.CreateDirectory(_Path); - } - // load indexes here - CreateLoadIndexes(_schema); - - _deletedRows = new BoolIndex(_Path, _view.Name , ".deleted"); - - _viewData = new StorageFile(_Path + view.Name + ".mgdat"); - - CreateResultRowFiller(); - - mapper = view.Mapper; - // looking for the T in View - if (view.GetType().GetGenericArguments().Length == 1) // HACK : kludge change when possible - basetype = view.GetType().GetGenericArguments()[0]; - else - { - // or recurse until found - basetype = view.GetType().BaseType.GetGenericArguments()[0]; - } - - if (rebuild) - Task.Factory.StartNew(() => RebuildFromScratch(docs)); - - _saveTimer = new System.Timers.Timer(); - _saveTimer.AutoReset = true; - _saveTimer.Elapsed += new System.Timers.ElapsedEventHandler(_saveTimer_Elapsed); - _saveTimer.Interval = Global.SaveIndexToDiskTimerSeconds * 1000; - _saveTimer.Start(); - } - - internal void FreeMemory() - { - _log.Debug("free memory : " + _view.Name); - foreach (var i in _indexes) - i.Value.FreeMemory(); - - _deletedRows.FreeMemory(); - InvalidateSortCache(); - } - - internal void Commit(int ID) - { - tran_data data = null; - // save data to indexes - if (_transactions.TryGetValue(ID, out data)) - { - // delete any items with docid in view - if (_view.DeleteBeforeInsert) - DeleteRowsWith(data.docid); - SaveAndIndex(data.rows); - } - // remove in memory data - _transactions.Remove(ID); - } - - internal void RollBack(int ID) - { - // remove in memory data - _transactions.Remove(ID); - } - - internal void Insert(Guid guid, T doc) - { - apimapper api = new apimapper(_viewmanager, this); - - if (basetype == doc.GetType()) - { - View view = _view as View; - if (view.Mapper != null) - view.Mapper(api, guid, doc); - } - else if (mapper != null) - mapper(api, guid, doc); - - // map objects to rows - foreach (var d in api.emitobj) - api.emit.Add(d.Key, ExtractRows(d.Value)); - - // delete any items with docid in view - if (_view.DeleteBeforeInsert) - DeleteRowsWith(guid); - - SaveAndIndex(api.emit); - } - - private void SaveAndIndex(Dictionary> rows) - { - foreach (var d in rows) - { - // insert new items into view - InsertRowsWithIndexUpdate(d.Key, d.Value); - } - InvalidateSortCache(); - } - - internal bool InsertTransaction(Guid docid, T doc) - { - apimapper api = new apimapper(_viewmanager, this); - if (basetype == doc.GetType()) - { - View view = (View)_view; - - try - { - if (view.Mapper != null) - view.Mapper(api, docid, doc); - } - catch (Exception ex) - { - _log.Error(ex); - return false; - } - } - else if (mapper != null) - mapper(api, docid, doc); - - if (api._RollBack == true) - return false; - - // map emitobj -> rows - foreach (var d in api.emitobj) - api.emit.Add(d.Key, ExtractRows(d.Value)); - - //Dictionary> rows = new Dictionary>(); - tran_data data = new tran_data(); - if (_transactions.TryGetValue(Thread.CurrentThread.ManagedThreadId, out data)) - { - // TODO : exists -> merge data?? - } - else - { - data = new tran_data(); - data.docid = docid; - data.rows = api.emit; - _transactions.Add(Thread.CurrentThread.ManagedThreadId, data); - } - - return true; - } - - // FEATURE : add query caching here - SafeDictionary _lambdacache = new SafeDictionary(); - internal Result Query(string filter, int start, int count) - { - return Query(filter, start, count, ""); - } - - internal Result Query(string filter, int start, int count, string orderby) - { - filter = filter.Trim(); - if (filter == "") - return Query(start, count, orderby); - - DateTime dt = FastDateTime.Now; - _log.Debug("query : " + _view.Name); - _log.Debug("query : " + filter); - _log.Debug("orderby : " + orderby); - - WAHBitArray ba = new WAHBitArray(); - var delbits = _deletedRows.GetBits(); - if (filter != "") - { - LambdaExpression le = null; - if (_lambdacache.TryGetValue(filter, out le) == false) - { - le = System.Linq.Dynamic.DynamicExpression.ParseLambda(_view.Schema, typeof(bool), filter, null); - _lambdacache.Add(filter, le); - } - QueryVisitor qv = new QueryVisitor(QueryColumnExpression); - qv.Visit(le.Body); - - ba = ((WAHBitArray)qv._bitmap.Pop()).AndNot(delbits); - } - else - ba = WAHBitArray.Fill(_viewData.Count()).AndNot(delbits); - - var order = SortBy(orderby); - bool desc = false; - if (orderby.ToLower().Contains(" desc")) - desc = true; - _log.Debug("query bitmap done (ms) : " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); - dt = FastDateTime.Now; - // exec query return rows - return ReturnRows(ba, null, start, count, order, desc); - } - - internal Result Query(Expression> filter, int start, int count) - { - return Query(filter, start, count, ""); - } - - // FEATURE : add query caching here - internal Result Query(Expression> filter, int start, int count, string orderby) - { - if (filter == null) - return Query(start, count); - - DateTime dt = FastDateTime.Now; - _log.Debug("query : " + _view.Name); - - WAHBitArray ba = new WAHBitArray(); - - QueryVisitor qv = new QueryVisitor(QueryColumnExpression); - qv.Visit(filter); - var delbits = _deletedRows.GetBits(); - ba = ((WAHBitArray)qv._bitmap.Pop()).AndNot(delbits); - List trows = null; - if (_viewmanager.inTransaction()) - { - // query from transaction own data - tran_data data = null; - if (_transactions.TryGetValue(Thread.CurrentThread.ManagedThreadId, out data)) - { - List rrows = new List(); - foreach (var kv in data.rows) - { - foreach (var r in kv.Value) - { - object o = FastCreateObject(_view.Schema); - rrows.Add((T)_rowfiller(o, r)); - } - } - trows = rrows.FindAll(filter.Compile()); - } - } - - var order = SortBy(orderby); - bool desc = false; - if (orderby.ToLower().Contains(" desc")) - desc = true; - _log.Debug("query bitmap done (ms) : " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); - dt = FastDateTime.Now; - // exec query return rows - return ReturnRows(ba, trows, start, count, order, desc); - } - - internal Result Query(int start, int count) - { - return Query(start, count, ""); - } - - internal Result Query(int start, int count, string orderby) - { - // no filter query -> just show all the data - DateTime dt = FastDateTime.Now; - _log.Debug("query : " + _view.Name); - int totalviewrows = _viewData.Count(); - List rows = new List(); - Result ret = new Result(); - int skip = start; - int cc = 0; - WAHBitArray del = _deletedRows.GetBits(); - ret.TotalCount = totalviewrows - (int)del.CountOnes(); - - var order = SortBy(orderby); - bool desc = false; - if (orderby.ToLower().Contains(" desc")) - desc = true; - if (order.Count == 0) - for (int i = 0; i < totalviewrows; i++) - order.Add(i); - - if (count == -1) - count = totalviewrows; - int len = order.Count; - if (desc == false) - { - for (int idx = 0; idx < len; idx++) - { - extractrowobject(count, rows, ref skip, ref cc, del, order, idx); - if (cc == count) break; - } - } - else - { - for (int idx = len - 1; idx >= 0; idx--) - { - extractrowobject(count, rows, ref skip, ref cc, del, order, idx); - if (cc == count) break; - } - } - - _log.Debug("query rows fetched (ms) : " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); - _log.Debug("query rows count : " + rows.Count.ToString("#,0")); - ret.OK = true; - ret.Count = rows.Count; - //ret.TotalCount = rows.Count; - ret.Rows = rows; - return ret; - } - - private void extractrowobject(int count, List rows, ref int skip, ref int cc, WAHBitArray del, List order, int idx) - { - int i = order[idx]; - if (del.Get(i) == false) - { - if (skip > 0) - skip--; - else - { - bool b = OutputRow(rows, i); - if (b && count > 0) - cc++; - } - } - } - - internal void Shutdown() - { - try - { - lock (_stlock) - _saveTimer.Enabled = false; - while (_stsaving) - Thread.Sleep(1); - - if (_rebuilding) - _log.Debug("Waiting for view rebuild to finish... : " + _view.Name); - - while (_rebuilding) - Thread.Sleep(50); - - _log.Debug("Shutting down Viewhandler"); - // shutdown indexes - foreach (var v in _indexes) - { - _log.Debug("Shutting down view index : " + v.Key); - v.Value.Shutdown(); - } - // save deletedbitmap - _deletedRows.Shutdown(); - - _viewData.Shutdown(); - - // write view version - File.WriteAllText(_Path + _view.Name + ".version", _view.Version.ToString()); - - File.WriteAllText(_Path + _RaptorDBVersionFilename, _RaptorDBVersion.ToString()); - // remove dirty file - if (File.Exists(_Path + _dirtyFilename)) - File.Delete(_Path + _dirtyFilename); - _log.Debug("Viewhandler shutdown done."); - } - catch (Exception ex) - { - _log.Error(ex); - } - } - - internal void Delete(Guid docid) - { - DeleteRowsWith(docid); - - InvalidateSortCache(); - } - - #region [ private methods ] - - private void CreateResultRowFiller() - { - _rowfiller = (RowFill)CreateRowFillerDelegate(_view.Schema); - // init the row create - _createrow = null; - FastCreateObject(_view.Schema); - } - - public delegate object RowFill(object o, object[] data); - public static RowFill CreateRowFillerDelegate(Type objtype) - { - DynamicMethod dynMethod = new DynamicMethod("_", typeof(object), new Type[] { typeof(object), typeof(object[]) }); - ILGenerator il = dynMethod.GetILGenerator(); - var local = il.DeclareLocal(objtype); - il.Emit(OpCodes.Ldarg_0); - il.Emit(OpCodes.Castclass, objtype); - il.Emit(OpCodes.Stloc, local); - int i = 1; - - foreach (var c in objtype.GetFields()) - { - il.Emit(OpCodes.Ldloc, local); - il.Emit(OpCodes.Ldarg_1); - if (c.Name != "docid") - il.Emit(OpCodes.Ldc_I4, i++); - else - il.Emit(OpCodes.Ldc_I4, 0); - - il.Emit(OpCodes.Ldelem_Ref); - il.Emit(OpCodes.Unbox_Any, c.FieldType); - il.Emit(OpCodes.Stfld, c); - } - - foreach (var c in objtype.GetProperties()) - { - MethodInfo setMethod = c.GetSetMethod(); - il.Emit(OpCodes.Ldloc, local); - il.Emit(OpCodes.Ldarg_1); - if (c.Name != "docid") - il.Emit(OpCodes.Ldc_I4, i++); - else - il.Emit(OpCodes.Ldc_I4, 0); - il.Emit(OpCodes.Ldelem_Ref); - il.Emit(OpCodes.Unbox_Any, c.PropertyType); - il.EmitCall(OpCodes.Callvirt, setMethod, null); - } - - il.Emit(OpCodes.Ldloc, local); - il.Emit(OpCodes.Ret); - - return (RowFill)dynMethod.CreateDelegate(typeof(RowFill)); - } - - private Result ReturnRows(WAHBitArray ba, List trows, int start, int count, List orderby, bool descending) - { - DateTime dt = FastDateTime.Now; - List rows = new List(); - Result ret = new Result(); - int skip = start; - int c = 0; - ret.TotalCount = (int)ba.CountOnes(); - if (count == -1) count = ret.TotalCount; - if (count > 0) - { - int len = orderby.Count; - if (len > 0) - { - if (descending == false) - { - for (int idx = 0; idx < len; idx++) - { - extractsortrowobject(ba, count, orderby, rows, ref skip, ref c, idx); - if (c == count) break; - } - } - else - { - for (int idx = len - 1; idx >= 0; idx--) - { - extractsortrowobject(ba, count, orderby, rows, ref skip, ref c, idx); - if (c == count) break; - } - } - } - foreach (int i in ba.GetBitIndexes()) - { - if (c < count) - { - if (skip > 0) - skip--; - else - { - bool b = OutputRow(rows, i); - if (b && count > 0) - c++; - } - if (c == count) break; - } - } - } - if (trows != null) // TODO : move to start and decrement in count - foreach (var o in trows) - rows.Add(o); - _log.Debug("query rows fetched (ms) : " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); - _log.Debug("query rows count : " + rows.Count.ToString("#,0")); - ret.OK = true; - ret.Count = rows.Count; - ret.Rows = rows; - return ret; - } - - private void extractsortrowobject(WAHBitArray ba, int count, List orderby, List rows, ref int skip, ref int c, int idx) - { - int i = orderby[idx]; - if (ba.Get(i)) - { - if (skip > 0) - skip--; - else - { - bool b = OutputRow(rows, i); - if (b && count > 0) - c++; - } - ba.Set(i, false); - } - } - - private bool OutputRow(List rows, int i) - { - byte[] b = _viewData.ViewReadRawBytes(i); - if (b != null) - { - object o = FastCreateObject(_view.Schema); - object[] data = (object[])fastBinaryJSON.BJSON.ToObject(b); - rows.Add((T)_rowfiller(o, data)); - return true; - } - return false; - } - - private Result ReturnRows2(WAHBitArray ba, List trows, int start, int count, List orderby, bool descending) - { - DateTime dt = FastDateTime.Now; - List rows = new List(); - Result ret = new Result(); - int skip = start; - int c = 0; - ret.TotalCount = (int)ba.CountOnes(); - if (count == -1) count = ret.TotalCount; - if (count > 0) - { - int len = orderby.Count; - if (len > 0) - { - if (descending == false) - { - for (int idx = 0; idx < len; idx++) //foreach (int i in orderby) - { - extractsortrowT(ba, count, orderby, rows, ref skip, ref c, idx); - if (c == count) break; - } - } - else - { - for (int idx = len - 1; idx >= 0; idx--) //foreach (int i in orderby) - { - extractsortrowT(ba, count, orderby, rows, ref skip, ref c, idx); - if (c == count) break; - } - } - } - foreach (int i in ba.GetBitIndexes()) - { - if (c < count) - { - if (skip > 0) - skip--; - else - { - bool b = OutputRow(rows, i); - if (b && count > 0) - c++; - } - if (c == count) break; - } - } - } - if (trows != null)// TODO : move to start and decrement in count - foreach (var o in trows) - rows.Add(o); - _log.Debug("query rows fetched (ms) : " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); - _log.Debug("query rows count : " + rows.Count.ToString("#,0")); - ret.OK = true; - ret.Count = rows.Count; - ret.Rows = rows; - return ret; - } - - private void extractsortrowT(WAHBitArray ba, int count, List orderby, List rows, ref int skip, ref int c, int idx) - { - int i = orderby[idx]; - if (ba.Get(i)) - { - if (skip > 0) - skip--; - else - { - bool b = OutputRow(rows, i); - if (b && count > 0) - c++; - } - ba.Set(i, false); - } - } - - private CreateRow _createrow = null; - private delegate object CreateRow(); - private object FastCreateObject(Type objtype) - { - try - { - if (_createrow != null) - return _createrow(); - else - { - DynamicMethod dynMethod = new DynamicMethod("_", objtype, null); - ILGenerator ilGen = dynMethod.GetILGenerator(); - - ilGen.Emit(OpCodes.Newobj, objtype.GetConstructor(Type.EmptyTypes)); - ilGen.Emit(OpCodes.Ret); - _createrow = (CreateRow)dynMethod.CreateDelegate(typeof(CreateRow)); - return _createrow(); - } - } - catch (Exception exc) - { - throw new Exception(string.Format("Failed to fast create instance for type '{0}' from assemebly '{1}'", - objtype.FullName, objtype.AssemblyQualifiedName), exc); - } - } - - MethodInfo insertmethod = null; - bool _rebuilding = false; - private void RebuildFromScratch(IDocStorage docs) - { - _rebuilding = true; - try - { - insertmethod = this.GetType().GetMethod("Insert", BindingFlags.Instance | BindingFlags.NonPublic); - _log.Debug("Rebuilding view from scratch..."); - _log.Debug("View = " + _view.Name); - DateTime dt = FastDateTime.Now; - - int c = docs.RecordCount(); - for (int i = 0; i < c; i++) - { - StorageItem meta = null; - object b = docs.GetObject(i, out meta); - if (meta != null && meta.isDeleted) - Delete(meta.key); - else - { - if (b != null) - { - object obj = b; - Type t = obj.GetType(); - if (t == typeof(View_delete)) - { - View_delete vd = (View_delete)obj; - if (vd.Viewname.ToLower() == this._view.Name.ToLower()) - ViewDelete(vd.Filter); - } - else if (t == typeof(View_insert)) - { - View_insert vi = (View_insert)obj; - if (vi.Viewname.ToLower() == this._view.Name.ToLower()) - ViewInsert(vi.ID, vi.RowObject); - } - else if (t.IsSubclassOf(basetype) || t == basetype) - { - var m = insertmethod.MakeGenericMethod(new Type[] { obj.GetType() }); - m.Invoke(this, new object[] { meta.key, obj }); - } - } - else - _log.Error("Doc is null : " + meta.key); - } - } - _log.Debug("rebuild view '" + _view.Name + "' done (s) = " + FastDateTime.Now.Subtract(dt).TotalSeconds); - - // write version.dat file when done - File.WriteAllText(_Path + _view.Name + ".version", _view.Version.ToString()); - } - catch (Exception ex) - { - _log.Error("Rebuilding View failed : " + _view.Name, ex); - } - _rebuilding = false; - } - - private object CreateObject(byte[] b) - { - if (b[0] < 32) - return fastBinaryJSON.BJSON.ToObject(b); - else - return fastJSON.JSON.ToObject(Encoding.ASCII.GetString(b)); - } - - private void CreateLoadIndexes(ViewRowDefinition viewRowDefinition) - { - int i = 0; - _indexes.Add(_docid, new TypeIndexes(_Path, _docid, 16)); - // load indexes - foreach (var c in viewRowDefinition.Columns) - { - if (c.Key != "docid") - _indexes.Add(_schema.Columns[i].Key, - CreateIndex( - _schema.Columns[i].Key, - _schema.Columns[i].Value)); - i++; - } - } - - private void GenerateSchemaColumns(ViewBase _view) - { - // generate schema columns from schema - _schema = new ViewRowDefinition(); - _schema.Name = _view.Name; - - foreach (var p in _view.Schema.GetProperties()) - { - Type t = p.PropertyType; - - if (_view.NoIndexingColumns.Contains(p.Name) || _view.NoIndexingColumns.Contains(p.Name.ToLower())) - { - _schema.Add(p.Name, typeof(NoIndexing)); - } - else - { - if (p.GetCustomAttributes(typeof(FullTextAttribute), true).Length > 0) - t = typeof(FullTextString); - if (_view.FullTextColumns.Contains(p.Name) || _view.FullTextColumns.Contains(p.Name.ToLower())) - t = typeof(FullTextString); - if (p.Name != "docid") - _schema.Add(p.Name, t); - - if (p.GetCustomAttributes(typeof(CaseInsensitiveAttribute), true).Length > 0) - _nocase.Add(p.Name, 0); - if (_view.CaseInsensitiveColumns.Contains(p.Name) || _view.CaseInsensitiveColumns.Contains(p.Name.ToLower())) - _nocase.Add(p.Name, 0); - - var a = p.GetCustomAttributes(typeof(StringIndexLength), false); - if (a.Length > 0) - { - byte l = (a[0] as StringIndexLength).Length; - _idxlen.Add(p.Name, l); - } - if (_view.StringIndexLength.ContainsKey(p.Name) || _view.StringIndexLength.ContainsKey(p.Name.ToLower())) - { - byte b = 0; - if (_view.StringIndexLength.TryGetValue(p.Name, out b)) - _idxlen.Add(p.Name, b); - if (_view.StringIndexLength.TryGetValue(p.Name.ToLower(), out b)) - _idxlen.Add(p.Name, b); - } - } - } - - foreach (var f in _view.Schema.GetFields()) - { - Type t = f.FieldType; - if (_view.NoIndexingColumns.Contains(f.Name) || _view.NoIndexingColumns.Contains(f.Name.ToLower())) - { - _schema.Add(f.Name, typeof(NoIndexing)); - } - else - { - if (f.GetCustomAttributes(typeof(FullTextAttribute), true).Length > 0) - t = typeof(FullTextString); - if (_view.FullTextColumns.Contains(f.Name) || _view.FullTextColumns.Contains(f.Name.ToLower())) - t = typeof(FullTextString); - if (f.Name != "docid") - _schema.Add(f.Name, t); - - if (f.GetCustomAttributes(typeof(CaseInsensitiveAttribute), true).Length > 0) - _nocase.Add(f.Name, 0); - if (_view.CaseInsensitiveColumns.Contains(f.Name) || _view.CaseInsensitiveColumns.Contains(f.Name.ToLower())) - _nocase.Add(f.Name, 0); - - var a = f.GetCustomAttributes(typeof(StringIndexLength), false); - if (a.Length > 0) - { - byte l = (a[0] as StringIndexLength).Length; - _idxlen.Add(f.Name, l); - } - if (_view.StringIndexLength.ContainsKey(f.Name) || _view.StringIndexLength.ContainsKey(f.Name.ToLower())) - { - byte b = 0; - if (_view.StringIndexLength.TryGetValue(f.Name, out b)) - _idxlen.Add(f.Name, b); - if (_view.StringIndexLength.TryGetValue(f.Name.ToLower(), out b)) - _idxlen.Add(f.Name, b); - } - } - } - _schema.Add("docid", typeof(Guid)); - - foreach (var s in _schema.Columns) - _colnames.Add(s.Key); - - // set column index for nocase - for (int i = 0; i < _colnames.Count; i++) - { - int j = 0; - if (_nocase.TryGetValue(_colnames[i], out j)) - _nocase[_colnames[i]] = i; - } - } - - private void InsertRowsWithIndexUpdate(Guid guid, List rows) - { - if (_isDirty == false) - WriteDirtyFile(); - - foreach (var row in rows) - { - object[] r = new object[row.Length + 1]; - r[0] = guid; - Array.Copy(row, 0, r, 1, row.Length); - byte[] b = fastBinaryJSON.BJSON.ToBJSON(r); - - int rownum = (int)_viewData.WriteRawData(b); - - // case insensitve columns here - foreach (var kv in _nocase) - row[kv.Value] = ("" + row[kv.Value]).ToLowerInvariant(); - - IndexRow(guid, row, rownum); - } - } - - private List ExtractRows(List rows) - { - List output = new List(); - // reflection match object properties to the schema row - - int colcount = _schema.Columns.Count; - - foreach (var obj in rows) - { - object[] r = new object[colcount]; - Getters[] getters = Reflection.Instance.GetGetters(obj.GetType(), true, null); - - for (int i = 0; i < colcount; i++) - { - var c = _schema.Columns[i]; - foreach (var g in getters) - { - //var g = getters[ii]; - if (g.Name == c.Key) - { - r[i] = g.Getter(obj); - break; - } - } - } - output.Add(r); - } - - return output; - } - - private void IndexRow(Guid docid, object[] row, int rownum) - { - int c = _colnames.Count - 1; // skip last docid - _indexes[_docid].Set(docid, rownum); - - for (int i = 0; i < c; i++) - { - object d = row[i]; - var idx = _indexes[_colnames[i]]; - if (idx != null) - idx.Set(d, rownum); - } - } - - private IIndex CreateIndex(string name, Type type) - { - if (type == typeof(NoIndexing)) - return new NoIndex(); - - if (type == typeof(FullTextString)) - return new FullTextIndex(_Path, name, false, true); - - else if (type == typeof(string)) - { - byte len = Global.DefaultStringKeySize; - if (_idxlen.TryGetValue(name, out len) == false) - len = Global.DefaultStringKeySize; - return new TypeIndexes(_Path, name, len); - } - - else if (type == typeof(bool) || type == typeof(bool?)) - return new BoolIndex(_Path, name , ".idx"); - - else if (type.IsEnum) - return (IIndex)Activator.CreateInstance( - typeof(EnumIndex<>).MakeGenericType(type), - new object[] { _Path, name }); - - else - return (IIndex)Activator.CreateInstance( - typeof(TypeIndexes<>).MakeGenericType(type), - new object[] { _Path, name, Global.DefaultStringKeySize }); - } - - private void DeleteRowsWith(Guid guid) - { - // find bitmap for guid column - WAHBitArray gc = QueryColumnExpression(_docid, RDBExpression.Equal, guid); - _deletedRows.InPlaceOR(gc); - } - - private WAHBitArray QueryColumnExpression(string colname, RDBExpression exp, object from) - { - int i = 0; - if (_nocase.TryGetValue(colname, out i)) // no case query - return _indexes[colname].Query(exp, ("" + from).ToLowerInvariant(), _viewData.Count()); - else - return _indexes[colname].Query(exp, from, _viewData.Count()); - } - #endregion - - internal int Count(Expression> filter) - { - int totcount = 0; - DateTime dt = FastDateTime.Now; - if (filter == null) - totcount = internalCount(); - else - { - WAHBitArray ba = new WAHBitArray(); - - QueryVisitor qv = new QueryVisitor(QueryColumnExpression); - qv.Visit(filter); - var delbits = _deletedRows.GetBits(); - ba = ((WAHBitArray)qv._bitmap.Pop()).AndNot(delbits); - - totcount = (int)ba.CountOnes(); - } - _log.Debug("Count items = " + totcount); - _log.Debug("Count time (ms) : " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); - return totcount; - } - - internal int Count(string filter) - { - int totcount = 0; - DateTime dt = FastDateTime.Now; - filter = filter.Trim(); - if (filter == null || filter == "") - totcount = internalCount(); - else - { - _log.Debug("Count filter : " + filter); - WAHBitArray ba = new WAHBitArray(); - - LambdaExpression le = null; - if (_lambdacache.TryGetValue(filter, out le) == false) - { - le = System.Linq.Dynamic.DynamicExpression.ParseLambda(_view.Schema, typeof(bool), filter, null); - _lambdacache.Add(filter, le); - } - QueryVisitor qv = new QueryVisitor(QueryColumnExpression); - qv.Visit(le.Body); - var delbits = _deletedRows.GetBits(); - ba = ((WAHBitArray)qv._bitmap.Pop()).AndNot(delbits); - - totcount = (int)ba.CountOnes(); - } - _log.Debug("Count items = " + totcount); - _log.Debug("Count time (ms) : " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); - return totcount; - } - - private int internalCount() - { - int c = _viewData.Count(); - int cc = (int)_deletedRows.GetBits().CountOnes(); - return c - cc; - } - - internal Result Query2(Expression> filter, int start, int count) - { - return Query2(filter, start, count, ""); - } - - internal Result Query2(Expression> filter, int start, int count, string orderby) - { - DateTime dt = FastDateTime.Now; - _log.Debug("query : " + _view.Name); - - WAHBitArray ba = new WAHBitArray(); - - QueryVisitor qv = new QueryVisitor(QueryColumnExpression); - qv.Visit(filter); - var delbits = _deletedRows.GetBits(); - if (qv._bitmap.Count > 0) - { - WAHBitArray qbits = (WAHBitArray)qv._bitmap.Pop(); - ba = qbits.AndNot(delbits); - } - List trows = null; - if (_viewmanager.inTransaction()) - { - // query from transactions own data - tran_data data = null; - if (_transactions.TryGetValue(Thread.CurrentThread.ManagedThreadId, out data)) - { - List rrows = new List(); - foreach (var kv in data.rows) - { - foreach (var r in kv.Value) - { - object o = FastCreateObject(_view.Schema); - rrows.Add((T)_rowfiller(o, r)); - } - } - trows = rrows.FindAll(filter.Compile()); - } - } - var order = SortBy(orderby); - bool desc = false; - if (orderby.ToLower().Contains(" desc")) - desc = true; - _log.Debug("query bitmap done (ms) : " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); - dt = FastDateTime.Now; - // exec query return rows - return ReturnRows2(ba, trows, start, count, order, desc); - } - - internal Result Query2(string filter, int start, int count) - { - return Query2(filter, start, count, ""); - } - - internal Result Query2(string filter, int start, int count, string orderby) - { - DateTime dt = FastDateTime.Now; - _log.Debug("query : " + _view.Name); - _log.Debug("query : " + filter); - _log.Debug("order by : " + orderby); - - WAHBitArray ba = new WAHBitArray(); - var delbits = _deletedRows.GetBits(); - - if (filter != "") - { - LambdaExpression le = null; - if (_lambdacache.TryGetValue(filter, out le) == false) - { - le = System.Linq.Dynamic.DynamicExpression.ParseLambda(_view.Schema, typeof(bool), filter, null); - _lambdacache.Add(filter, le); - } - QueryVisitor qv = new QueryVisitor(QueryColumnExpression); - qv.Visit(le.Body); - - ba = ((WAHBitArray)qv._bitmap.Pop()).AndNot(delbits); - } - else - ba = WAHBitArray.Fill(_viewData.Count()).AndNot(delbits); - - var order = SortBy(orderby); - bool desc = false; - if (orderby.ToLower().Contains(" desc")) - desc = true; - _log.Debug("query bitmap done (ms) : " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); - dt = FastDateTime.Now; - // exec query return rows - return ReturnRows2(ba, null, start, count, order, desc); - } - - private SafeDictionary> _sortcache = new SafeDictionary>(); - - internal List SortBy(string sortcol) - { - List sortlist = new List(); - if (sortcol == "") - return sortlist; - string col = ""; - foreach (var c in _schema.Columns) - if (sortcol.ToLower().Contains(c.Key.ToLower())) - { - col = c.Key; - break; - } - if (col == "") - { - _log.Debug("sort column not recognized : " + sortcol); - return sortlist; - } - - DateTime dt = FastDateTime.Now; - - if (_sortcache.TryGetValue(col, out sortlist) == false) - { - sortlist = new List(); - int count = _viewData.Count(); - IIndex idx = _indexes[col]; - object[] keys = idx.GetKeys(); - Array.Sort(keys); - - foreach (var k in keys) - { - var bi = idx.Query(RDBExpression.Equal, k, count).GetBitIndexes(); - foreach (var i in bi) - sortlist.Add(i); - } - _sortcache.Add(col, sortlist); - } - _log.Debug("Sort column = " + col + ", time (ms) = " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); - return sortlist; - } - - internal object GetAssembly(out string typename) - { - typename = _view.Schema.AssemblyQualifiedName; - return File.ReadAllBytes(_view.Schema.Assembly.Location); - } - - public ViewRowDefinition GetSchema() - { - return _schema; - } - - int _lastrownumber = -1; - object _rowlock = new object(); - internal int NextRowNumber() - { - lock (_rowlock) - { - if (_lastrownumber == -1) - _lastrownumber = internalCount(); - return ++_lastrownumber; - } - } - - internal int ViewDelete(Expression> filter) - { - _log.Debug("delete : " + _view.Name); - if (_isDirty == false) - WriteDirtyFile(); - QueryVisitor qv = new QueryVisitor(QueryColumnExpression); - qv.Visit(filter); - var delbits = _deletedRows.GetBits(); - int count = qv._bitmap.Count; - if (count > 0) - { - WAHBitArray qbits = (WAHBitArray)qv._bitmap.Pop(); - _deletedRows.InPlaceOR(qbits); - count = (int)qbits.CountOnes(); - } - _log.Debug("Deleted rows = " + count); - - InvalidateSortCache(); - return count; - } - - private object _dfile = new object(); - private void WriteDirtyFile() - { - lock (_dfile) - { - _isDirty = true; - if (File.Exists(_Path + _dirtyFilename) == false) - File.WriteAllText(_Path + _dirtyFilename, "dirty"); - } - } - - internal int ViewDelete(string filter) - { - _log.Debug("delete : " + _view.Name); - if (_isDirty == false) - WriteDirtyFile(); - int count = 0; - if (filter != "") - { - LambdaExpression le = null; - if (_lambdacache.TryGetValue(filter, out le) == false) - { - le = System.Linq.Dynamic.DynamicExpression.ParseLambda(_view.Schema, typeof(bool), filter, null); - _lambdacache.Add(filter, le); - } - QueryVisitor qv = new QueryVisitor(QueryColumnExpression); - qv.Visit(le.Body); - count = qv._bitmap.Count; - if (count > 0) - { - WAHBitArray qbits = (WAHBitArray)qv._bitmap.Pop(); - _deletedRows.InPlaceOR(qbits); - count = (int)qbits.CountOnes(); - } - } - - InvalidateSortCache(); - return count; - } - - internal bool ViewInsert(Guid id, object row) - { - List l = new List(); - l.Add(row); - - var r = ExtractRows(l); - InsertRowsWithIndexUpdate(id, r); - - InvalidateSortCache(); - return true; - } - - private void InvalidateSortCache() - { - _sortcache = new SafeDictionary>(); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.IO; +using System.Collections; +using System.Linq.Expressions; +using System.Threading.Tasks; +using System.Reflection; +using System.Reflection.Emit; +using Microsoft.CSharp; +using System.CodeDom.Compiler; +using System.ComponentModel; +using RaptorDB.Common; +using System.Threading; +using fastJSON; +using System.Runtime.InteropServices; + +namespace RaptorDB.Views +{ + internal class tran_data + { + public Guid docid; + public Dictionary> rows; + } + + public interface IViewHandler + { + int NextRowNumber(); + ViewBase View { get; } + Type GetFireOnType(); + void FreeMemory(); + void Commit(int id); + void RollBack(int id); + void Insert(Guid docid, object doc); + bool InsertTransaction(Guid docid, object doc); + void Shutdown(); + void Delete(Guid docid); + int Count(string filter); + IResult Query(int start, int count); + IResult Query(int start, int count, string orderby); + IResult Query(string filter, int start, int count, string orderby); + int ViewDelete(string filter); + bool ViewInsert(Guid id, object row); + + bool IsActive { get; } + bool BackgroundIndexing { get; } + + ViewRowDefinition GetSchema(); + void SetView(ViewBase view, IDocStorage objStore); + } + + internal interface IViewHandler : IViewHandler + { + Result Query(Expression> filter, int start, int count, string orderby); + new Result Query(string filter, int start, int count, string orderby); + int Count(Expression> filter); + int ViewDelete(Expression> filter); + } + + internal class ViewHandler : IViewHandler + { + private View _view; + ViewBase.MapFunctionDelgate mapper; + protected static readonly string _S = Path.DirectorySeparatorChar.ToString(); + + protected ILog _log = LogManager.GetLogger(typeof(ViewHandler)); + protected string _path; + protected ViewManager _viewmanager; + protected IEqualsQueryIndex idIndex; + protected Dictionary _indexes = new Dictionary(); + protected StorageFile _viewData; + protected BoolIndex _deletedRows; + protected string _docid = "docid"; + protected string[] _colnames; + protected ViewRowDefinition _schema; + protected SafeDictionary _transactions = new SafeDictionary(); + protected SafeDictionary _nocase = new SafeDictionary(); + protected Dictionary _idxlen = new Dictionary(); + RowFill _rowfill; + + protected System.Timers.Timer _saveTimer; + + protected Type basetype; // used for mapper + + bool _isDirty = false; + protected bool _stsaving = false; + + protected const string _dirtyFilename = "temp.$"; + protected const int _RaptorDBVersion = 4; // used for engine changes to views + protected const string _RaptorDBVersionFilename = "RaptorDB.version"; + + public ViewBase View { get { return _view; } } + public bool BackgroundIndexing { get { return _view.BackgroundIndexing; } } + public bool IsActive { get { return _view.isActive; } } + + public ViewHandler(string path, ViewManager manager) + { + _path = path; + _viewmanager = manager; + } + + + private object _stlock = new object(); + void _saveTimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e) + { + lock (_stlock) + { + _stsaving = true; + foreach (var i in _indexes) + i.Value.SaveIndex(); + + _deletedRows.SaveIndex(); + _stsaving = false; + } + } + + public Type GetFireOnType() + { + return basetype; + } + + /// + /// Inits directory structure, deleted index, storage and checks if rebuild + /// + /// rebuild + protected bool InitStorage(string viewName, int viewVersion) + { + bool rebuild = false; + + if (!_path.EndsWith(_S)) _path += _S; + _path += viewName + _S; + if (Directory.Exists(_path) == false) + { + Directory.CreateDirectory(_path); + rebuild = true; + } + else + { + // read version file and check with view + int version = 0; + if (File.Exists(_path + viewName + ".version")) + { + int.TryParse(File.ReadAllText(_path + viewName + ".version"), out version); + if (version != viewVersion) + { + _log.Debug("Newer view version detected"); + rebuild = true; + } + } + } + + if (File.Exists(_path + _dirtyFilename)) + { + _log.Debug("Last shutdown failed, rebuilding view : " + viewName); + rebuild = true; + } + + if (File.Exists(_path + _RaptorDBVersionFilename)) + { + // check view engine version + string s = File.ReadAllText(_path + _RaptorDBVersionFilename); + int version = 0; + int.TryParse(s, out version); + if (version != _RaptorDBVersion) + { + _log.Debug("RaptorDB view engine upgrade, rebuilding view : " + viewName); + rebuild = true; + } + } + else + { + _log.Debug("RaptorDB view engine upgrade, rebuilding view : " + viewName); + rebuild = true; + } + + if (rebuild) + { + _log.Debug("Deleting old view data folder = " + viewName); + Directory.Delete(_path, true); + Directory.CreateDirectory(_path); + } + + + _deletedRows = new BoolIndex(_path, viewName, ".deleted"); + + _viewData = new StorageFile(_path + viewName + ".mgdat"); + + return rebuild; + } + + void IViewHandler.SetView(ViewBase view, IDocStorage objStore) + { + SetView((View)view, objStore); + } + public void SetView(View view, IDocStorage docs) + { + this._view = view; + view.AutoInitIndexDefinitions(); + _schema = new ViewRowDefinition() + { + Name = _view.Name, + Columns = view.IndexDefinitions.ToList() + }; + _colnames = _schema.Columns.Select(v => v.Key).Where(c => c != _docid).ToArray(); + //Array.Sort(_colnames); + _rowfill = ViewSchemaHelper.CreateRowFiller(_colnames); + + var rebuild = InitStorage(view.Name, view.Version); + + CreateLoadIndexes(_schema); + + mapper = view.Mapper; + + basetype = view.GetDocType(); + + if (rebuild) + Task.Factory.StartNew(() => RebuildFromScratch(docs)); + + _saveTimer = new System.Timers.Timer(); + _saveTimer.AutoReset = true; + _saveTimer.Elapsed += new System.Timers.ElapsedEventHandler(_saveTimer_Elapsed); + _saveTimer.Interval = Global.SaveIndexToDiskTimerSeconds * 1000; + _saveTimer.Start(); + } + + public void FreeMemory() + { + _log.Debug("free memory : " + _view.Name); + foreach (var i in _indexes) + i.Value.FreeMemory(); + + _deletedRows.FreeMemory(); + InvalidateSortCache(); + } + + public void Commit(int id) + { + tran_data data = null; + // save data to indexes + if (_transactions.TryGetValue(id, out data)) + { + // delete any items with docid in view + if (_view.DeleteBeforeInsert) + DeleteRowsWith(data.docid); + SaveAndIndex(data.rows); + } + // remove in memory data + _transactions.Remove(id); + } + + public void RollBack(int ID) + { + // remove in memory data + _transactions.Remove(ID); + } + + void IViewHandler.Insert(Guid docid, object doc) + { + Insert(docid, (TDoc)doc); + } + public void Insert(Guid guid, TDoc doc) + { + // TODO: optimize (allocation) + apimapper api = new apimapper(_viewmanager, this); + + if (basetype == doc.GetType()) + { + if (_view.Mapper != null) + _view.Mapper(api, guid, doc); + } + else if (mapper != null) + mapper(api, guid, doc); + + // map objects to rows + foreach (var d in api.emitobj) + api.emit.Add(d.Key, ViewSchemaHelper.ExtractRows(d.Value, _colnames)); + + // delete any items with docid in view + if (_view.DeleteBeforeInsert) + DeleteRowsWith(guid); + + SaveAndIndex(api.emit); + } + + private void SaveAndIndex(Dictionary> rows) + { + foreach (var d in rows) + { + // insert new items into view + InsertRowsWithIndexUpdate(d.Key, d.Value); + } + InvalidateSortCache(); + } + + bool IViewHandler.InsertTransaction(Guid docid, object doc) + { + return InsertTransaction(docid, (TDoc)doc); + } + + public bool InsertTransaction(Guid docid, TDoc doc) + { + apimapper api = new apimapper(_viewmanager, this); + if (basetype == doc.GetType()) + { + var view = _view; + + try + { + if (view.Mapper != null) + view.Mapper(api, docid, doc); + } + catch (Exception ex) + { + _log.Error(ex); + return false; + } + } + else if (mapper != null) + mapper(api, docid, doc); + + if (api._RollBack == true) + return false; + + // map emitobj -> rows + foreach (var d in api.emitobj) + api.emit.Add(d.Key, ViewSchemaHelper.ExtractRows(d.Value, _colnames)); + + //Dictionary> rows = new Dictionary>(); + tran_data data; + if (_transactions.TryGetValue(Thread.CurrentThread.ManagedThreadId, out data)) + { + // TODO : exists -> merge data?? + } + else + { + data = new tran_data(); + data.docid = docid; + data.rows = api.emit; + _transactions.Add(Thread.CurrentThread.ManagedThreadId, data); + } + + return true; + } + + // FEATURE : add query caching here + public Result Query(string filter, int start, int count) + { + return Query(filter, start, count, null); + } + + IResult IViewHandler.Query(string filter, int start, int count, string orderby) + { + return Query(filter, start, count, orderby); + } + public Result Query(string filter, int start, int count, string orderby) + { + return Query(ParseFilter(filter), start, count, orderby); + } + + public Result Query(Expression> filter, int start, int count) + { + return Query(filter, start, count, null); + } + + // FEATURE : add query caching here + public Result Query(Expression> filter, int start, int count, string orderby) + { + if (filter == null) + return Query(start, count); + + DateTime dt = FastDateTime.Now; + _log.Debug("query : " + _view.Name); + + QueryVisitor qv = new QueryVisitor(QueryColumnExpression); + qv.Visit(filter); + var delbits = _deletedRows.GetBits(); + var ba = ((WahBitArray)qv._bitmap.Pop()).AndNot(delbits); + List trows = null; + if (_view.TransactionMode) + { + // query from transaction own data + tran_data data = null; + if (_transactions.TryGetValue(Thread.CurrentThread.ManagedThreadId, out data)) + { + var rrows = new List(); + foreach (var kv in data.rows) + { + foreach (var r in kv.Value) + { + rrows.Add(_rowfill(r)); + } + } + trows = rrows.FindAll(filter.Compile()); + } + } + + var order = SortBy(orderby); + bool desc = orderby != null && orderby.EndsWith(" desc", StringComparison.InvariantCultureIgnoreCase); + _log.Debug("query bitmap done (ms) : " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); + // exec query return rows + return ReturnRows(ba, trows, start, count, order, desc); + } + + IResult IViewHandler.Query(int start, int count) + { + return Query(start, count); + } + public Result Query(int start, int count) + { + return Query(start, count, null); + } + + IResult IViewHandler.Query(int start, int count, string orderby) + { + return Query(start, count, orderby); + } + public Result Query(int start, int count, string orderby) + { + // no filter query -> just show all the data + DateTime dt = FastDateTime.Now; + _log.Debug("query : " + _view.Name); + int totalviewrows = _viewData.Count(); + var rows = new List(); + var ret = new Result(); + int skip = start; + int cc = 0; + WahBitArray del = _deletedRows.GetBits(); + ret.TotalCount = totalviewrows - (int)del.CountOnes(); + + var order = SortBy(orderby); + bool desc = false; + if (orderby.ToLower().Contains(" desc")) + desc = true; + if (order.Count == 0) + for (int i = 0; i < totalviewrows; i++) + order.Add(i); + + if (count == -1) + count = totalviewrows; + int len = order.Count; + if (desc == false) + { + for (int idx = 0; idx < len; idx++) + { + OutputRow(rows, idx, count, ref skip, ref cc, del, order); + if (cc == count) break; + } + } + else + { + for (int idx = len - 1; idx >= 0; idx--) + { + OutputRow(rows, idx, count, ref skip, ref cc, del, order); + if (cc == count) break; + } + } + + _log.Debug("query rows fetched (ms) : " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); + _log.Debug("query rows count : " + rows.Count.ToString("#,0")); + ret.OK = true; + ret.Count = rows.Count; + //ret.TotalCount = rows.Count; + ret.Rows = rows; + return ret; + } + + + /// + /// Adds item with idnex 'idx' to 'rows' + /// + /// if something was added + private bool OutputRow(List rows, int idx) + { + byte[] b = _viewData.ViewReadRawBytes(idx); + if (b != null) + { + object[] data = (object[])fastBinaryJSON.BJSON.ToObject(b); + rows.Add(_rowfill(data)); + return true; + } + return false; + } + + /// + /// Adds item with idnex 'order[idx]' to 'rows' if skipped == 0 and not in del + /// + private void OutputRow(List rows, int idx, int count, ref int skip, ref int currentCount, WahBitArray del, List order) + { + int i = order[idx]; + if (del.Get(i) == false) + { + if (skip > 0) + skip--; + else + { + bool b = OutputRow(rows, i); + if (b && count > 0) + currentCount++; + } + } + } + + private void extractsortrowobject(WahBitArray ba, int count, List orderby, List rows, ref int skip, ref int c, int idx) + { + int i = orderby[idx]; + if (ba.Get(i)) + { + if (skip > 0) + skip--; + else + { + bool b = OutputRow(rows, i); + if (b && count > 0) + c++; + } + ba.Set(i, false); + } + } + + public void Shutdown() + { + try + { + _saveTimer.Enabled = false; + while (_stsaving) + Thread.Sleep(1); + + if (_rebuilding) + _log.Debug("Waiting for view rebuild to finish... : " + _view.Name); + + while (_rebuilding) + Thread.Sleep(50); + + _log.Debug("Shutting down Viewhandler"); + // shutdown indexes + foreach (var v in _indexes) + { + _log.Debug("Shutting down view index : " + v.Key); + v.Value.Dispose(); + } + // save deletedbitmap + _deletedRows.Dispose(); + + _viewData.Shutdown(); + + // write view version + File.WriteAllText(_path + _view.Name + ".version", _view.Version.ToString()); + + File.WriteAllText(_path + _RaptorDBVersionFilename, _RaptorDBVersion.ToString()); + // remove dirty file + if (File.Exists(_path + _dirtyFilename)) + File.Delete(_path + _dirtyFilename); + _log.Debug("Viewhandler shutdown done."); + } + catch (Exception ex) + { + _log.Error(ex); + } + } + + public void Delete(Guid docid) + { + DeleteRowsWith(docid); + + InvalidateSortCache(); + } + + #region [ private methods ] + + private Result ReturnRows(WahBitArray ba, List trows, int start, int count, List orderby, bool descending) + { + DateTime dt = FastDateTime.Now; + List rows = new List(); + Result ret = new Result(); + int skip = start; + int c = 0; + ret.TotalCount = (int)ba.CountOnes(); + if (count == -1) count = ret.TotalCount; + if (count > 0) + { + if (orderby != null && orderby.Count > 0) + { + int len = orderby.Count; + if (descending == false) + { + for (int idx = 0; idx < len; idx++) + { + extractsortrowobject(ba, count, orderby, rows, ref skip, ref c, idx); + if (c == count) break; + } + } + else + { + for (int idx = len - 1; idx >= 0; idx--) + { + extractsortrowobject(ba, count, orderby, rows, ref skip, ref c, idx); + if (c == count) break; + } + } + } + foreach (int i in ba.GetBitIndexes()) + { + if (c < count) + { + if (skip > 0) + skip--; + else + { + bool b = OutputRow(rows, i); + if (b && count > 0) + c++; + } + if (c == count) break; + } + } + } + if (trows != null) // TODO : move to start and decrement in count + foreach (var o in trows) + rows.Add(o); + _log.Debug("query rows fetched (ms) : " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); + _log.Debug("query rows count : " + rows.Count.ToString("#,0")); + ret.OK = true; + ret.Count = rows.Count; + ret.Rows = rows; + return ret; + } + + MethodInfo insertmethod = null; + bool _rebuilding = false; + private void RebuildFromScratch(IDocStorage docs) + { + _rebuilding = true; + try + { + _log.Debug("Rebuilding view from scratch..."); + _log.Debug("View = " + _view.Name); + DateTime dt = FastDateTime.Now; + + int c = docs.RecordCount(); + for (int i = 0; i < c; i++) + { + StorageItem meta = null; + object obj = docs.GetObject(i, out meta); + if (meta != null && meta.isDeleted) + Delete(meta.key); + else if (obj is View_delete) + { + View_delete vd = (View_delete)obj; + if (vd.Viewname.Equals(this._view.Name, StringComparison.InvariantCultureIgnoreCase)) + ViewDelete(vd.Filter); + } + else if (obj is View_insert) + { + View_insert vi = (View_insert)obj; + if (vi.Viewname.Equals(this._view.Name.ToLower(), StringComparison.InvariantCultureIgnoreCase)) + ViewInsert(vi.ID, vi.RowObject); + } + else if (obj is TDoc) + { + Insert(meta.key, (TDoc)obj); + } + } + _log.Debug("rebuild view '" + _view.Name + "' done (s) = " + FastDateTime.Now.Subtract(dt).TotalSeconds); + + // write version.dat file when done + File.WriteAllText(_path + _view.Name + ".version", _view.Version.ToString()); + } + catch (Exception ex) + { + _log.Error("Rebuilding View failed : " + _view.Name, ex); + } + _rebuilding = false; + } + + private void CreateLoadIndexes(ViewRowDefinition viewRowDefinition) + { + idIndex = new TypeIndexes(_path, _docid, 16/*, allowDups: !_view.DeleteBeforeInsert*/); + _indexes.Add(_docid, idIndex); + // load indexes + foreach (var c in viewRowDefinition.Columns) + { + if (c.Key != "docid") + _indexes.Add(c.Key, c.Value.CreateIndex(_path, c.Key)); + } + } + + private void InsertRowsWithIndexUpdate(Guid guid, List rows) + { + if (_isDirty == false) + WriteDirtyFile(); + + foreach (var row in rows) + { + object[] r = new object[row.Length + 1]; + r[0] = guid; + Array.Copy(row, 0, r, 1, row.Length); + byte[] b = fastBinaryJSON.BJSON.ToBJSON(r); + + int rownum = (int)_viewData.WriteRawData(b); + + IndexRow(guid, row, rownum); + } + } + + private void IndexRow(Guid id, object[] row, int rownum) + { + idIndex.Set(id, rownum); + for (int i = 0; i < row.Length; i++) + { + _indexes[_colnames[i]].Set(row[i], rownum); + } + } + + + private void DeleteRowsWith(Guid guid) + { + // find bitmap for guid column + WahBitArray gc = QueryColumnExpression(_docid, RDBExpression.Equal, guid); + _deletedRows.InPlaceOR(gc); + } + + private WahBitArray QueryColumnExpression(string colname, RDBExpression exp, object from) + { + var index = _indexes[colname]; //.Query(exp, from, _viewData.Count()); + var qia = new QueryIA() + { + Expression = exp, + Key = from + }; + return index.Accept(qia); + } + + SafeDictionary>> _lambdacache = new SafeDictionary>>(); + private Expression> ParseFilter(string filter) + { + if (filter == null) return null; + filter = filter.Trim(); + if (filter.Length == 0) return null; + Expression> le = null; + if (_lambdacache.TryGetValue(filter, out le) == false) + { + le = System.Linq.Dynamic.DynamicExpression.ParseLambda>(_view.Schema, typeof(bool), filter, null); + _lambdacache.Add(filter, le); + } + return le; + } + + #endregion + + #region Count + public int Count(Expression> filter) + { + int totcount = 0; + DateTime dt = FastDateTime.Now; + if (filter == null) + totcount = TotalCount(); + else + { + WahBitArray ba = new WahBitArray(); + + QueryVisitor qv = new QueryVisitor(QueryColumnExpression); + qv.Visit(filter.Body); + var delbits = _deletedRows.GetBits(); + ba = ((WahBitArray)qv._bitmap.Pop()).AndNot(delbits); + + totcount = (int)ba.CountOnes(); + } + _log.Debug("Count items = " + totcount); + _log.Debug("Count time (ms) : " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); + return totcount; + } + + public int Count(string filter) + { + return Count(ParseFilter(filter)); + } + + public int TotalCount() + { + int c = _viewData.Count(); + int cc = (int)_deletedRows.GetBits().CountOnes(); + return c - cc; + } + #endregion + + private SafeDictionary> _sortcache = new SafeDictionary>(); + + private List SortBy(string sortcol) + { + if (string.IsNullOrEmpty(sortcol)) + return null; + // TODO: sorting + throw new NotImplementedException("sorting not implemented"); + //string col = _colnames.FirstOrDefault(c => sortcol.StartsWith(sortcol, StringComparison.InvariantCultureIgnoreCase)); + //if (col == null) + //{ + // _log.Debug("sort column not recognized : " + sortcol); + // return null; + //} + + //DateTime dt = FastDateTime.Now; + + //List sortlist; + //if (!_sortcache.TryGetValue(col, out sortlist)) + //{ + // sortlist = new List(); + // int count = _viewData.Count(); + // IIndex idx = _indexes[col]; + // object[] keys = idx.GetKeys(); + // Array.Sort(keys); + + // foreach (var k in keys) + // { + // var bi = idx.Query(RDBExpression.Equal, k, count).GetBitIndexes(); + // foreach (var i in bi) + // sortlist.Add(i); + // } + // _sortcache.Add(col, sortlist); + //} + //_log.Debug("Sort column = " + col + ", time (ms) = " + FastDateTime.Now.Subtract(dt).TotalMilliseconds); + //return sortlist; + } + + public ViewRowDefinition GetSchema() + { + return _schema; + } + + int _lastrownumber = -1; + object _rowlock = new object(); + public int NextRowNumber() + { + // TODO: interlocked + lock (_rowlock) + { + if (_lastrownumber == -1) + _lastrownumber = TotalCount(); + return ++_lastrownumber; + } + } + + /// + /// marks matching items as removed + /// + /// Count of removed items + public int ViewDelete(Expression> filter) + { + if (filter == null) return 0; + _log.Debug("delete : " + _view.Name); + if (_isDirty == false) + WriteDirtyFile(); + QueryVisitor qv = new QueryVisitor(QueryColumnExpression); + qv.Visit(filter.Body); + var delbits = _deletedRows.GetBits(); + int count = qv._bitmap.Count; + if (count > 0) + { + WahBitArray qbits = (WahBitArray)qv._bitmap.Pop(); + _deletedRows.InPlaceOR(qbits); + count = (int)qbits.CountOnes(); + } + _log.Debug("Deleted rows = " + count); + + InvalidateSortCache(); + return count; + } + /// + /// marks matching items as removed + /// + /// Count of removed items + public int ViewDelete(string filter) + { + return ViewDelete(ParseFilter(filter)); + } + + private object _dfile = new object(); + private void WriteDirtyFile() + { + lock (_dfile) + { + _isDirty = true; + if (File.Exists(_path + _dirtyFilename) == false) + File.WriteAllText(_path + _dirtyFilename, "dirty"); + } + } + + + public bool ViewInsert(Guid id, object row) + { + List l = new List(); + l.Add(row); + + var r = ViewSchemaHelper.ExtractRows(l, _colnames); + InsertRowsWithIndexUpdate(id, r); + + InvalidateSortCache(); + return true; + } + + private void InvalidateSortCache() + { + _sortcache = new SafeDictionary>(); + } + + class QueryIA : IIndexAcceptable + { + public RDBExpression Expression; + public object Key; + public WahBitArray Accept(IIndex item) + { + if (!(Key is T)) return null; + var key = (T)Key; + switch (Expression) + { + case RDBExpression.Equal: + return (item as IEqualsQueryIndex)?.QueryEquals(key); + case RDBExpression.Greater: + return (item as IComparisonIndex)?.QueryGreater(key); + case RDBExpression.GreaterEqual: + return (item as IComparisonIndex)?.QueryGreaterEquals(key); + case RDBExpression.Less: + return (item as IComparisonIndex)?.QueryLess(key); + case RDBExpression.LessEqual: + return (item as IComparisonIndex)?.QueryLessEquals(key); + case RDBExpression.NotEqual: + return (item as IEqualsQueryIndex)?.QueryNotEquals(key); + case RDBExpression.Contains: + return (item as IContainsIndex)?.QueryContains(key); + default: + return null; + } + } + } + } +} diff --git a/RaptorDB/Views/ViewHelpers.cs b/RaptorDB/Views/ViewHelpers.cs new file mode 100644 index 0000000..0f8490d --- /dev/null +++ b/RaptorDB/Views/ViewHelpers.cs @@ -0,0 +1,187 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using System.Reflection.Emit; +using System.Text; +using fastJSON; + +namespace RaptorDB.Views +{ + internal delegate T RowFill(object[] data); + public delegate object[] RowExtract(T obj); + static class ViewSchemaHelper + { + public static readonly Type Type = typeof(T); + [Obsolete] + public static readonly RowFill RowFill = CreateRowFillerDelegate(); + [Obsolete] + public static RowFill CreateRowFillerDelegate() + { + var objtype = Type; + // TODO: use Linq.Expressions + DynamicMethod dynMethod = new DynamicMethod("_", objtype, new Type[] { typeof(object[]) }); + ILGenerator il = dynMethod.GetILGenerator(); + var local = il.DeclareLocal(objtype); + il.Emit(OpCodes.Newobj, objtype.GetConstructor(Type.EmptyTypes)); + il.Emit(OpCodes.Castclass, objtype); + il.Emit(OpCodes.Stloc, local); + int i = 1; + + foreach (var c in objtype.GetFields()) + { + il.Emit(OpCodes.Ldloc, local); + il.Emit(OpCodes.Ldarg_1); + if (c.Name != "docid") + il.Emit(OpCodes.Ldc_I4, i++); + else + il.Emit(OpCodes.Ldc_I4, 0); + + il.Emit(OpCodes.Ldelem_Ref); + il.Emit(OpCodes.Unbox_Any, c.FieldType); + il.Emit(OpCodes.Stfld, c); + } + + foreach (var c in objtype.GetProperties()) + { + MethodInfo setMethod = c.GetSetMethod(); + il.Emit(OpCodes.Ldloc, local); + il.Emit(OpCodes.Ldarg_1); + if (c.Name != "docid") + il.Emit(OpCodes.Ldc_I4, i++); + else + il.Emit(OpCodes.Ldc_I4, 0); + il.Emit(OpCodes.Ldelem_Ref); + il.Emit(OpCodes.Unbox_Any, c.PropertyType); + il.EmitCall(OpCodes.Callvirt, setMethod, null); + } + + il.Emit(OpCodes.Ldloc, local); + il.Emit(OpCodes.Ret); + + return (RowFill)dynMethod.CreateDelegate(typeof(RowFill)); + + //var objtype = Type; + //// TODO: use Linq.Expressions + //var + //var statements = new List(); + //ILGenerator il = dynMethod.GetILGenerator(); + //var local = il.DeclareLocal(objtype); + //il.Emit(OpCodes.Newobj, objtype.GetConstructor(Type.EmptyTypes)); + //il.Emit(OpCodes.Castclass, objtype); + //il.Emit(OpCodes.Stloc, local); + //int i = 1; + + //foreach (var c in objtype.GetFields()) + //{ + // il.Emit(OpCodes.Ldloc, local); + // il.Emit(OpCodes.Ldarg_1); + // if (c.Name != "docid") + // il.Emit(OpCodes.Ldc_I4, i++); + // else + // il.Emit(OpCodes.Ldc_I4, 0); + + // il.Emit(OpCodes.Ldelem_Ref); + // il.Emit(OpCodes.Unbox_Any, c.FieldType); + // il.Emit(OpCodes.Stfld, c); + //} + + //foreach (var c in objtype.GetProperties()) + //{ + // MethodInfo setMethod = c.GetSetMethod(); + // il.Emit(OpCodes.Ldloc, local); + // il.Emit(OpCodes.Ldarg_1); + // if (c.Name != "docid") + // il.Emit(OpCodes.Ldc_I4, i++); + // else + // il.Emit(OpCodes.Ldc_I4, 0); + // il.Emit(OpCodes.Ldelem_Ref); + // il.Emit(OpCodes.Unbox_Any, c.PropertyType); + // il.EmitCall(OpCodes.Callvirt, setMethod, null); + //} + + //il.Emit(OpCodes.Ldloc, local); + //il.Emit(OpCodes.Ret); + + //return (RowFill)dynMethod.CreateDelegate(typeof(RowFill)); + } + } + + static class ViewSchemaHelper + { + public static List ExtractRows(List rows, string[] columnNames) + { + // TODO: precompile this like RowFiller + List output = new List(); + // reflection match object properties to the schema row + var colcount = columnNames.Length; + foreach (var obj in rows) + { + object[] r = new object[colcount]; + Getters[] getters = Reflection.Instance.GetGetters(obj.GetType(), true, null); + + for (int i = 0; i < colcount; i++) + { + var c = columnNames[i]; + foreach (var g in getters) + { + if (g.Name == c) + { + r[i] = g.Getter(obj); + break; + } + } + } + output.Add(r); + } + + return output; + } + + public static RowFill CreateRowFiller(string[] columns) + { + var values = Expression.Parameter(typeof(object[]), "columns"); + var row = Expression.Variable(typeof(T), "row"); + var block = new List(); + + if (typeof(T).IsClass) + block.Add(Expression.Assign(row, Expression.New(typeof(T)))); + + block.Add(ConvertAndAssign(row, "docid", + Expression.ArrayIndex(values, Expression.Constant(0)))); + int i = 1; + foreach (var col in columns) + { + block.Add(ConvertAndAssign(row, col, + Expression.ArrayIndex(values, Expression.Constant(i)))); + i++; + } + + // return row; + block.Add(row); + + return Expression.Lambda>(Expression.Block(typeof(T), new[] { row }, block), values).Compile(); + } + + private static Expression ConvertAndAssign(Expression obj, string propName, Expression value) + { + // TODO: generic property API + var property = obj.Type.GetProperty(propName); + if (property != null) + { + return Expression.Assign( + Expression.Property(obj, property), + Expression.Convert(value, property.PropertyType)); + } + var field = obj.Type.GetField(propName); + if (field != null) + { + return Expression.Assign( + Expression.Field(obj, field), + Expression.Convert(value, field.FieldType)); + } + throw new ArgumentException("specified property does not exist"); + } + } +} diff --git a/RaptorDB/Views/ViewIndexDefinitionHelpers.cs b/RaptorDB/Views/ViewIndexDefinitionHelpers.cs new file mode 100644 index 0000000..907d634 --- /dev/null +++ b/RaptorDB/Views/ViewIndexDefinitionHelpers.cs @@ -0,0 +1,100 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using RaptorDB.Common; +using RaptorDB.Views; +using System.Runtime.InteropServices; + +namespace RaptorDB +{ + public static class ViewIndexDefinitionHelpers + { + public static void SetStringIndex( + this View view, + System.Linq.Expressions.Expression> selector, + byte length = 60, + bool ignoreCase = false) + { + var name = ExpressionHelper.GetPropertyName(selector); + view.IndexDefinitions[name] = new StringIndexColumnDefinition(length); + } + + public static void SetObjectToStringIndex( + this View view, + System.Linq.Expressions.Expression> selector, + byte length = 60, + bool ignoreCase = false) + { + var name = ExpressionHelper.GetPropertyName(selector); + view.IndexDefinitions[name] = new ObjectToStringColumnDefinition(length); + } + + public static void SetMGIndex( + this View view, + System.Linq.Expressions.Expression> selector, + byte length = 60) + where TProp : struct, IComparable + { + var name = ExpressionHelper.GetPropertyName(selector); + view.IndexDefinitions[name] = new MGIndexColumnDefinition(length); + } + + public static void SetMMIndex( + this View view, + System.Linq.Expressions.Expression> selector, + int pageSize = 8192, + IPageSerializer keySerializer = null) + where TProp: IComparable + { + var name = ExpressionHelper.GetPropertyName(selector); + view.IndexDefinitions[name] = new MMIndexColumnDefinition() + { + PageSize = pageSize, + KeySerializer = keySerializer + }; + } + + public static void SetFullTextIndex( + this View view, + System.Linq.Expressions.Expression> selector) + { + var name = ExpressionHelper.GetPropertyName(selector); + view.IndexDefinitions[name] = new FullTextIndexColumnDefinition(); + } + + public static void SetEnumIndex( + this View view, + System.Linq.Expressions.Expression> selector) + where TProp : struct, IConvertible + { + var name = ExpressionHelper.GetPropertyName(selector); + view.IndexDefinitions[name] = new EnumIndexColumnDefinition(); + } + + public static void SetHashIndex( + this View view, + System.Linq.Expressions.Expression> selector, + long defaultSize = 4096, + IPageSerializer serializer = null) + { + var name = ExpressionHelper.GetPropertyName(selector); + view.IndexDefinitions[name] = new HashIndexColumnDefinition() { DefaultSize = defaultSize, KeySerializer = serializer }; + } + + public static void SetNoIndexing( + this View view, + System.Linq.Expressions.Expression> selector) + { + var name = ExpressionHelper.GetPropertyName(selector); + view.IndexDefinitions[name] = new NoIndexColumnDefinition(); + } + + public static IViewColumnIndexDefinition GetDefaultForType(bool allowDups = true) + { + if (typeof(T).IsValueType) + return Activator.CreateInstance(typeof(MMIndexColumnDefinition<>).MakeGenericType(typeof(T)), new object[] { }) as IViewColumnIndexDefinition; + throw new NotImplementedException(); + } + } +} diff --git a/RaptorDB/Views/ViewManager.cs b/RaptorDB/Views/ViewManager.cs index ecf500d..fbe4d01 100644 --- a/RaptorDB/Views/ViewManager.cs +++ b/RaptorDB/Views/ViewManager.cs @@ -1,407 +1,363 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Linq.Expressions; -using System.Threading.Tasks; -using System.Threading; -using RaptorDB.Common; - -namespace RaptorDB.Views -{ - internal class ViewManager - { - public ViewManager(string viewfolder, IDocStorage objstore) - { - _Path = viewfolder; - _objectStore = objstore; - } - - private IDocStorage _objectStore; - private ILog _log = LogManager.GetLogger(typeof(ViewManager)); - private string _Path = ""; - // list of views - private SafeDictionary _views = new SafeDictionary(); - // primary view list - private SafeDictionary _primaryView = new SafeDictionary(); - // like primary view list - private SafeDictionary _otherViewTypes = new SafeDictionary(); - // consistent views - private SafeDictionary> _consistentViews = new SafeDictionary>(); - // other views type->list of view names to call - private SafeDictionary> _otherViews = new SafeDictionary>(); - private TaskQueue _que = new TaskQueue(); - private SafeDictionary _transactions = new SafeDictionary(); - - internal int Count(string viewname, string filter) - { - ViewHandler view = null; - // find view from name - if (_views.TryGetValue(viewname.ToLower(), out view)) - return view.Count(filter); - - _log.Error("view not found", viewname); - return 0; - } - - internal Result Query(string viewname, string filter, int start, int count) - { - return Query(viewname, filter, start, count, ""); - } - - internal Result Query(string viewname, int start, int count) - { - ViewHandler view = null; - // find view from name - if (_views.TryGetValue(viewname.ToLower(), out view)) - return view.Query(start, count); - - _log.Error("view not found", viewname); - return new Result(false, new Exception("view not found : " + viewname)); - } - - internal void Insert(string viewname, Guid docid, T data) - { - ViewHandler vman = null; - // find view from name - if (_views.TryGetValue(viewname.ToLower(), out vman)) - { - if (vman._view.isActive == false) - { - _log.Debug("view is not active, skipping insert : " + viewname); - return; - } - if (vman._view.BackgroundIndexing) - _que.AddTask(() => vman.Insert(docid, data)); - else - vman.Insert(docid, data); - - return; - } - _log.Error("view not found", viewname); - } - - internal bool InsertTransaction(string viewname, Guid docid, T data) - { - ViewHandler vman = null; - // find view from name - if (_views.TryGetValue(viewname.ToLower(), out vman)) - { - if (vman._view.isActive == false) - { - _log.Debug("view is not active, skipping insert : " + viewname); - return false; - } - - return vman.InsertTransaction(docid, data); - } - _log.Error("view not found", viewname); - return false; - } - - internal object Fetch(Guid guid) - { - object b = null; - _objectStore.GetObject(guid, out b); - - return b; - } - - internal string GetPrimaryViewForType(Type type) - { - string vn = ""; - if (type == null || type == typeof(object)) // reached the end - return vn; - // find direct - if (_primaryView.TryGetValue(type, out vn)) - return vn; - // recurse basetype - return GetPrimaryViewForType(type.BaseType); - } - - internal List GetOtherViewsList(Type type) - { - List list = new List(); - _otherViews.TryGetValue(type, out list); - return list; - } - - internal string GetViewName(Type type) // used for queries - { - string viewname = null; - // find view from name - - viewname = GetPrimaryViewForType(type); - if (viewname != "") - return viewname; - - // search for viewtype here - if (_otherViewTypes.TryGetValue(type, out viewname)) - return viewname; - - return ""; - } - - internal void RegisterView(View view) - { - view.Verify(); - - ViewHandler vh = null; - if (_views.TryGetValue(view.Name.ToLower(), out vh)) - { - _log.Error("View already added and exists : " + view.Name); - } - else - { - vh = new ViewHandler(_Path, this); - vh.SetView(view, _objectStore); - _views.Add(view.Name.ToLower(), vh); - _otherViewTypes.Add(view.GetType(), view.Name.ToLower()); - - // add view schema mapping - _otherViewTypes.Add(view.Schema, view.Name.ToLower()); - - Type basetype = vh.GetFireOnType(); - if (view.isPrimaryList) - { - _primaryView.Add(basetype, view.Name.ToLower()); - } - else - { - if (view.ConsistentSaveToThisView) - AddToViewList(_consistentViews, basetype, view.Name); - else - AddToViewList(_otherViews, basetype, view.Name); - } - } - } - - internal void ShutDown() - { - _log.Debug("View Manager shutdown"); - // shutdown views - foreach (var v in _views) - { - try - { - _log.Debug(" shutting down view : " + v.Value._view.Name); - v.Value.Shutdown(); - } - catch (Exception ex) - { - _log.Error(ex); - } - } - _que.Shutdown(); - } - - internal List GetConsistentViews(Type type) - { - List list = new List(); - _consistentViews.TryGetValue(type, out list); - return list; - } - - private void AddToViewList(SafeDictionary> diclist, Type fireontype, string viewname) - { - //foreach (var tn in view.FireOnTypes) - { - List list = null; - Type t = fireontype;// Type.GetType(tn); - if (diclist.TryGetValue(t, out list)) - list.Add(viewname); - else - { - list = new List(); - list.Add(viewname); - diclist.Add(t, list); - } - } - } - - internal void Delete(Guid docid) - { - // remove from all views - foreach (var v in _views) - v.Value.Delete(docid); - } - - internal void Rollback(int ID) - { - _log.Debug("ROLLBACK"); - // rollback all views with tran id - foreach (var v in _views) - v.Value.RollBack(ID); - - _transactions.Remove(ID); - } - - internal void Commit(int ID) - { - _log.Debug("COMMIT"); - // commit all data in vews with tran id - foreach (var v in _views) - v.Value.Commit(ID); - - _transactions.Remove(ID); - } - - internal bool isTransaction(string viewname) - { - return _views[viewname.ToLower()]._view.TransactionMode; - } - - internal bool inTransaction() - { - bool b = false; - return _transactions.TryGetValue(Thread.CurrentThread.ManagedThreadId, out b); - } - - internal void StartTransaction() - { - _transactions.Add(Thread.CurrentThread.ManagedThreadId, false); - } - - internal Result Query(Expression> filter, int start, int count) - { - return Query(filter, start, count, ""); - } - - internal Result Query(Expression> filter, int start, int count, string orderby) - { - string view = GetViewName(typeof(T)); - - ViewHandler vman = null; - // find view from name - if (_views.TryGetValue(view.ToLower(), out vman)) - { - return vman.Query2(filter, start, count, orderby); - } - return new Result(false, new Exception("View not found")); - } - - internal Result Query(string filter, int start, int count) - { - return Query(filter, start, count, ""); - } - - internal Result Query(string filter, int start, int count, string orderby) - { - string view = GetViewName(typeof(T)); - - ViewHandler vman = null; - // find view from name - if (_views.TryGetValue(view.ToLower(), out vman)) - { - return vman.Query2(filter, start, count, orderby); - } - return new Result(false, new Exception("View not found")); - } - - internal int Count(Expression> filter) - { - string view = GetViewName(typeof(T)); - - ViewHandler vman = null; - // find view from name - if (_views.TryGetValue(view.ToLower(), out vman)) - { - return vman.Count(filter); - } - return 0; - } - - internal void FreeMemory() - { - foreach (var v in _views) - v.Value.FreeMemory(); - } - - internal object GetAssemblyForView(string viewname, out string typename) - { - ViewHandler view = null; - typename = ""; - // find view from name - if (_views.TryGetValue(viewname.ToLower(), out view)) - { - return view.GetAssembly(out typename); - } - return null; - } - - internal List GetViews() - { - List o = new List(); - foreach (var i in _views) - o.Add(i.Value._view); - return o; - } - - internal ViewRowDefinition GetSchema(string view) - { - ViewHandler v = null; - if (_views.TryGetValue(view.ToLower(), out v)) - { - return v.GetSchema(); - } - return null; - } - - internal Result Query(string viewname, string filter, int start, int count, string orderby) - { - ViewHandler view = null; - // find view from name - if (_views.TryGetValue(viewname.ToLower(), out view)) - return view.Query(filter, start, count, orderby); - - _log.Error("view not found", viewname); - return new Result(false, new Exception("view not found : " + viewname)); - } - - internal int ViewDelete(Expression> filter) - { - string view = GetViewName(typeof(T)); - - ViewHandler vman = null; - // find view from name - if (_views.TryGetValue(view.ToLower(), out vman)) - { - return vman.ViewDelete(filter); - } - return -1; - } - - internal int ViewDelete(string viewname, string filter) - { - ViewHandler view = null; - // find view from name - if (_views.TryGetValue(viewname.ToLower(), out view)) - return view.ViewDelete(filter); - return -1; - } - - internal bool ViewInsert(Guid id, T row) - { - string view = GetViewName(typeof(T)); - - ViewHandler vman = null; - // find view from name - if (_views.TryGetValue(view.ToLower(), out vman)) - { - return vman.ViewInsert(id, row); - } - return false; - } - - internal bool ViewInsert(string viewname, Guid id, object row) - { - ViewHandler vman = null; - // find view from name - if (_views.TryGetValue(viewname.ToLower(), out vman)) - { - return vman.ViewInsert(id, row); - } - return false; - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Linq.Expressions; +using System.Threading.Tasks; +using System.Threading; +using RaptorDB.Common; +using System.Reflection; + +namespace RaptorDB.Views +{ + public class ViewManager + { + public ViewManager(string viewfolder, IDocStorage objstore) + { + _Path = viewfolder; + _objectStore = objstore; + } + + private IDocStorage _objectStore; + private ILog _log = LogManager.GetLogger(typeof(ViewManager)); + private string _Path = ""; + // list of views + private Dictionary _views = new Dictionary(StringComparer.InvariantCultureIgnoreCase); + // primary view list + private Dictionary _primaryView = new Dictionary(); + // like primary view list + private Dictionary _otherViewTypes = new Dictionary(); + // consistent views + private Dictionary> _consistentViews = new Dictionary>(); + // other views type->list of view names to call + private Dictionary> _otherViews = new Dictionary>(); + private TaskQueue _que = new TaskQueue(); + private SafeDictionary _transactions = new SafeDictionary(); + + IViewHandler GetHandler(string name) + { + IViewHandler view = null; + if (_views.TryGetValue(name, out view)) + return view; + throw new ViewNotFoundException(name); + } + + IViewHandler GetHandler(string name) + { + IViewHandler view = null; + if (_views.TryGetValue(name, out view)) + return (IViewHandler)view; + throw new ViewNotFoundException(name); + } + + public int Count(string viewname, string filter) + { + return GetHandler(viewname).Count(filter); + } + + public IResult Query(string viewname, string filter, int start, int count) + { + return Query(viewname, filter, start, count, null); + } + + public IResult Query(string viewname, int start, int count) + { + return GetHandler(viewname).Query(start, count); + } + + public void Insert(string viewname, Guid docid, T data) + { + var handler = GetHandler(viewname); + if (!handler.IsActive) + { + _log.Debug("view is not active, skipping insert : " + viewname); + } + else if (handler.BackgroundIndexing) + _que.AddTask(() => handler.Insert(docid, data)); + else + handler.Insert(docid, data); + + return; + } + + public bool InsertTransaction(string viewname, Guid docid, T data) + { + IViewHandler vman = GetHandler(viewname); + if (!vman.IsActive) + { + _log.Debug("view is not active, skipping insert : " + viewname); + return false; + } + + return vman.InsertTransaction(docid, data); + } + + public object Fetch(Guid guid) + { + object b = null; + _objectStore.GetObject(guid, out b); + + return b; + } + + public string GetPrimaryViewForType(Type type) + { + string vn; + if (type == null || type == typeof(object)) // reached the end + return null; + // find direct + if (_primaryView.TryGetValue(type, out vn)) + return vn; + // recurse basetype + return GetPrimaryViewForType(type.BaseType); + } + + public List GetOtherViewsList(Type type) + { + List list = new List(); + _otherViews.TryGetValue(type, out list); + return list; + } + + public string GetViewName(Type type) // used for queries + { + string viewname = GetPrimaryViewForType(type); + if (viewname != null) + return viewname; + + // search for viewtype here + if (_otherViewTypes.TryGetValue(type, out viewname)) + return viewname; + + return null; + } + + public void RegisterView(View view) + { + view.Verify(); + if (_views.ContainsKey(view.Name)) + { + _log.Error("View already added and exists : " + view.Name); + } + else + { + var vh = new ViewHandler(_Path, this); + vh.SetView(view, _objectStore); + _views.Add(view.Name, vh); + _otherViewTypes.Add(view.GetType(), view.Name); + + // add view schema mapping + _otherViewTypes.Add(view.Schema, view.Name); + + Type basetype = vh.GetFireOnType(); + if (view.isPrimaryList) + { + _primaryView.Add(basetype, view.Name); + } + else + { + if (view.ConsistentSaveToThisView) + AddToViewList(_consistentViews, basetype, view.Name); + else + AddToViewList(_otherViews, basetype, view.Name); + } + } + } + public void RegisterView(View view) + { + view.Verify(); + if (_views.ContainsKey(view.Name)) + { + _log.Error("View already added and exists : " + view.Name); + } + else + { + var type = typeof(ViewHandler<,>).MakeGenericType(typeof(TDoc), view.Schema); + var vh = Activator.CreateInstance(type, _Path, this) as IViewHandler; + vh.SetView(view, _objectStore); + _views.Add(view.Name, vh); + _otherViewTypes.Add(view.GetType(), view.Name); + + // add view schema mapping + _otherViewTypes.Add(view.Schema, view.Name); + + Type basetype = vh.GetFireOnType(); + if (view.isPrimaryList) + { + _primaryView.Add(basetype, view.Name); + } + else + { + if (view.ConsistentSaveToThisView) + AddToViewList(_consistentViews, basetype, view.Name); + else + AddToViewList(_otherViews, basetype, view.Name); + } + } + } + + public void ShutDown() + { + _log.Debug("View Manager shutdown"); + // shutdown views + foreach (var v in _views) + { + try + { + _log.Debug(" shutting down view : " + v.Value.View.Name); + v.Value.Shutdown(); + } + catch (Exception ex) + { + _log.Error(ex); + } + } + _que.Shutdown(); + } + + public List GetConsistentViews(Type type) + { + List list = new List(); + _consistentViews.TryGetValue(type, out list); + return list; + } + + private static void AddToViewList(IDictionary> diclist, Type fireontype, string viewname) + { + List list = null; + Type t = fireontype;// Type.GetType(tn); + if (diclist.TryGetValue(t, out list)) + list.Add(viewname); + else + { + list = new List(); + list.Add(viewname); + diclist.Add(t, list); + } + } + + public void Delete(Guid docid) + { + // remove from all views + foreach (var v in _views) + v.Value.Delete(docid); + } + + public void Rollback(int ID) + { + _log.Debug("ROLLBACK"); + // rollback all views with tran id + foreach (var v in _views) + v.Value.RollBack(ID); + + _transactions.Remove(ID); + } + + public void Commit(int ID) + { + _log.Debug("COMMIT"); + // commit all data in vews with tran id + foreach (var v in _views) + v.Value.Commit(ID); + + _transactions.Remove(ID); + } + + public bool isTransaction(string viewname) + { + return _views[viewname.ToLower()].View.TransactionMode; + } + + public bool inTransaction() + { + bool b = false; + return _transactions.TryGetValue(Thread.CurrentThread.ManagedThreadId, out b); + } + + public void StartTransaction() + { + _transactions.Add(Thread.CurrentThread.ManagedThreadId, false); + } + + public Result Query(Expression> filter, int start, int count) + { + return Query(filter, start, count, null); + } + + public Result Query(Expression> filter, int start, int count, string orderby) + { + string view = GetViewName(typeof(T)); + return GetHandler(view).Query(filter, start, count, orderby); + } + + public Result Query(string filter, int start, int count) + { + return Query(filter, start, count, null); + } + + public Result Query(string filter, int start, int count, string orderby) + { + string view = GetViewName(typeof(T)); + + return GetHandler(view).Query(filter, start, count, orderby); + } + + public int Count(Expression> filter) + { + string view = GetViewName(typeof(T)); + return GetHandler(view).Count(filter); + } + + public void FreeMemory() + { + foreach (var v in _views) + v.Value.FreeMemory(); + } + + public object GetAssemblyForView(string viewname, out string typename) + { + var schema = GetHandler(viewname).View.Schema; + typename = schema.AssemblyQualifiedName; + return System.IO.File.ReadAllBytes(schema.Assembly.Location); + } + + public List GetViews() + { + return _views.Values.Select(v => v.View).ToList(); + } + + public ViewRowDefinition GetSchema(string view) + { + return GetHandler(view).GetSchema(); + } + + public IResult Query(string viewname, string filter, int start, int count, string orderby) + { + return GetHandler(viewname).Query(filter, start, count, orderby); + } + + public int ViewDelete(Expression> filter) + { + string view = GetViewName(typeof(T)); + + return GetHandler(view).ViewDelete(filter); + } + + public int ViewDelete(string viewname, string filter) + { + return GetHandler(viewname).ViewDelete(filter); + } + + public bool ViewInsert(Guid id, T row) + { + string view = GetViewName(typeof(T)); + + return GetHandler(view).ViewInsert(id, row); + } + + public bool ViewInsert(string viewname, Guid id, object row) + { + return GetHandler(viewname).ViewInsert(id, row); + } + } +} diff --git a/RaptorDB/Views/ViewNotFoundException.cs b/RaptorDB/Views/ViewNotFoundException.cs new file mode 100644 index 0000000..11e473b --- /dev/null +++ b/RaptorDB/Views/ViewNotFoundException.cs @@ -0,0 +1,12 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace RaptorDB.Views +{ + public class ViewNotFoundException: Exception + { + public ViewNotFoundException(string viewName) : base(string.Format("view '{0}' was not found", viewName)) { } + } +} diff --git a/RaptorDB/Views/ViewRowDefinition.cs b/RaptorDB/Views/ViewRowDefinition.cs new file mode 100644 index 0000000..1ff7136 --- /dev/null +++ b/RaptorDB/Views/ViewRowDefinition.cs @@ -0,0 +1,185 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using RaptorDB.Indexes; +using RaptorDB.Common; + +namespace RaptorDB.Views +{ + + public class ViewRowDefinition + { + public ViewRowDefinition() + { + Columns = new List>(); + } + public string Name { get; set; } + public List> Columns { get; set; } + + public void Add(string name, IViewColumnIndexDefinition type) + { + Columns.Add(new KeyValuePair(name, type)); + } + } + + public interface IViewColumnIndexDefinition + { + IIndex CreateIndex(string path, string name); + } + + public interface IViewColumnIndexDefinition: IViewColumnIndexDefinition + { + new IIndex CreateIndex(string path, string name); + } + public class MGIndexColumnDefinition : IViewColumnIndexDefinition + { + public Type Type { get; protected set; } + public byte KeySize { get; protected set; } + public bool AllowDuplicates { get; set; } + public MGIndexColumnDefinition(Type type, byte keySize, bool allowDups = true) + { + Type = type; KeySize = keySize; + AllowDuplicates = allowDups; + } + public MGIndexColumnDefinition(Type type) + : this(type, (byte)Marshal.SizeOf(type)) + { } + public virtual IIndex CreateIndex(string path, string name) + { + return (IIndex)Activator.CreateInstance( + typeof(TypeIndexes<>).MakeGenericType(Type), + new object[] { path, name, KeySize, AllowDuplicates }); + } + } + + public class MGIndexColumnDefinition : MGIndexColumnDefinition, IViewColumnIndexDefinition + where T : IComparable + { + public MGIndexColumnDefinition(byte keySize) : base(typeof(T), keySize) { } + public MGIndexColumnDefinition() : base(typeof(T)) { } + public override IIndex CreateIndex(string path, string name) + { + return new TypeIndexes(path, name, KeySize, AllowDuplicates); + } + + IIndex IViewColumnIndexDefinition.CreateIndex(string path, string name) + { + return new TypeIndexes(path, name, KeySize, AllowDuplicates); + } + } + + public class MMIndexColumnDefinition : IViewColumnIndexDefinition + where T : IComparable + { + public int PageSize { get; set; } = 8192; + public IPageSerializer KeySerializer { get; set; } + public IIndex CreateIndex(string path, string name) + { + return new MMIndex(path, name, PageSize, KeySerializer); + } + + IIndex IViewColumnIndexDefinition.CreateIndex(string path, string name) + { + return CreateIndex(path, name); + } + } + + public class EnumIndexColumnDefinition : IViewColumnIndexDefinition + { + public Type Type { get; protected set; } + public EnumIndexColumnDefinition(Type type) + { + this.Type = type; + } + public virtual IIndex CreateIndex(string path, string name) + { + return (IIndex)Activator.CreateInstance( + typeof(EnumIntIndex<>).MakeGenericType(Type), + new object[] { path, name }); + } + } + + public class EnumIndexColumnDefinition : EnumIndexColumnDefinition, IViewColumnIndexDefinition + where T : struct, IConvertible + { + public EnumIndexColumnDefinition() : base(typeof(T)) { } + + public override IIndex CreateIndex(string path, string name) + { + return new EnumIntIndex(path, name); + } + + IIndex IViewColumnIndexDefinition.CreateIndex(string path, string name) + { + return new EnumIntIndex(path, name); + } + } + + public class BoolIndexColumnDefinition : IViewColumnIndexDefinition + { + public IIndex CreateIndex(string path, string name) + { + return new BoolIndex(path, name, ".idx"); + } + } + + public class StringIndexColumnDefinition : MGIndexColumnDefinition + { + public StringIndexColumnDefinition(byte length) : base(length) { } + } + public class FullTextIndexColumnDefinition : IViewColumnIndexDefinition + { + public IIndex CreateIndex(string path, string name) + { + return new FullTextIndex(path, name, false, true); + } + + IIndex IViewColumnIndexDefinition.CreateIndex(string path, string name) + { + return CreateIndex(path, name); + } + } + + public class ObjectToStringColumnDefinition : IViewColumnIndexDefinition + { + public ObjectToStringColumnDefinition(byte length) + { + this.MaxLength = length; + } + public byte MaxLength { get; set; } + public IIndex CreateIndex(string path, string name) + { + return new ObjectToStringIndex(path, name, MaxLength); + } + + IIndex IViewColumnIndexDefinition.CreateIndex(string path, string name) + { + return CreateIndex(path, name); + } + } + + public class HashIndexColumnDefinition : IViewColumnIndexDefinition + { + public long DefaultSize { get; set; } = 4096; + public IPageSerializer KeySerializer { get; set; } + public IIndex CreateIndex(string path, string name) + { + return new HashIndex(path, name, DefaultSize, KeySerializer); + } + + IIndex IViewColumnIndexDefinition.CreateIndex(string path, string name) + { + return CreateIndex(path, name); + } + } + + public class NoIndexColumnDefinition : IViewColumnIndexDefinition + { + public IIndex CreateIndex(string path, string name) + { + return NoIndex.Instance; + } + } +} diff --git a/RaptorDB/Views/apimapper.cs b/RaptorDB/Views/apimapper.cs index 1aec737..b68d77f 100644 --- a/RaptorDB/Views/apimapper.cs +++ b/RaptorDB/Views/apimapper.cs @@ -1,131 +1,131 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Linq.Expressions; - -namespace RaptorDB.Views -{ - internal class apimapper : IMapAPI - { - public apimapper(ViewManager man, ViewHandler vhandler) - { - _viewmanager = man; - _viewhandler = vhandler; - } - - ViewManager _viewmanager; - ViewHandler _viewhandler; - private ILog _log = LogManager.GetLogger(typeof(apimapper)); - internal Dictionary> emit = new Dictionary>(); - internal Dictionary> emitobj = new Dictionary>(); - internal bool _RollBack = false; - - public void Log(string message) - { - _log.Debug(message); - } - - public object Fetch(Guid guid) - { - return _viewmanager.Fetch(guid); - } - - public void Emit(Guid docid, params object[] data) - { - if (data == null) - return; - List d = null; - if (emit.Count == 0) - { - d = new List(); - d.Add(data); - emit.Add(docid, d); - } - else - { - if (emit.TryGetValue(docid, out d)) - { - d.Add(data); - } - else - { - d = new List(); - d.Add(data); - emit.Add(docid, d); - } - } - } - - public void EmitObject(Guid docid, T doc) - { - if (doc == null) - return; - List d = null; - if (emitobj.Count == 0) - { - d = new List(); - d.Add(doc); - emitobj.Add(docid, d); - } - else - { - if (emitobj.TryGetValue(docid, out d)) - { - d.Add(doc); - } - else - { - d = new List(); - d.Add(doc); - emitobj.Add(docid, d); - } - } - } - - public void RollBack() - { - _RollBack = true; - } - - public int Count(string viewname) - { - return _viewmanager.Count(viewname, ""); - } - - public int Count(string ViewName, string Filter) - { - return _viewmanager.Count(ViewName, Filter); - } - - public Result Query(Expression> Filter) - { - return _viewmanager.Query(Filter, 0, -1); - } - - public Result Query(Expression> Filter, int start, int count) - { - return _viewmanager.Query(Filter, start, count); - } - - public Result Query(string Filter) - { - return _viewmanager.Query(Filter, 0, -1); - } - - public Result Query(string Filter, int start, int count) - { - return _viewmanager.Query(Filter, start, count); - } - - public int Count(Expression> Filter) - { - return _viewmanager.Count(Filter); - } - - public int NextRowNumber() - { - return _viewhandler.NextRowNumber(); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Linq.Expressions; + +namespace RaptorDB.Views +{ + internal class apimapper : IMapAPI + { + public apimapper(ViewManager man, IViewHandler vhandler) + { + _viewmanager = man; + _viewhandler = vhandler; + } + + ViewManager _viewmanager; + IViewHandler _viewhandler; + private ILog _log = LogManager.GetLogger(typeof(apimapper)); + internal Dictionary> emit = new Dictionary>(); + internal Dictionary> emitobj = new Dictionary>(); + internal bool _RollBack = false; + + public void Log(string message) + { + _log.Debug(message); + } + + public object Fetch(Guid guid) + { + return _viewmanager.Fetch(guid); + } + + public void Emit(Guid docid, params object[] data) + { + if (data == null) + return; + List d = null; + if (emit.Count == 0) + { + d = new List(); + d.Add(data); + emit.Add(docid, d); + } + else + { + if (emit.TryGetValue(docid, out d)) + { + d.Add(data); + } + else + { + d = new List(); + d.Add(data); + emit.Add(docid, d); + } + } + } + + public void EmitObject(Guid docid, T doc) + { + if (doc == null) + return; + List d = null; + if (emitobj.Count == 0) + { + d = new List(); + d.Add(doc); + emitobj.Add(docid, d); + } + else + { + if (emitobj.TryGetValue(docid, out d)) + { + d.Add(doc); + } + else + { + d = new List(); + d.Add(doc); + emitobj.Add(docid, d); + } + } + } + + public void RollBack() + { + _RollBack = true; + } + + public int Count(string viewname) + { + return _viewmanager.Count(viewname, null); + } + + public int Count(string ViewName, string Filter) + { + return _viewmanager.Count(ViewName, Filter); + } + + public Result Query(Expression> Filter) + { + return _viewmanager.Query(Filter, 0, -1); + } + + public Result Query(Expression> Filter, int start, int count) + { + return _viewmanager.Query(Filter, start, count); + } + + public Result Query(string Filter) + { + return _viewmanager.Query(Filter, 0, -1); + } + + public Result Query(string Filter, int start, int count) + { + return _viewmanager.Query(Filter, start, count); + } + + public int Count(Expression> Filter) + { + return _viewmanager.Count(Filter); + } + + public int NextRowNumber() + { + return _viewhandler.NextRowNumber(); + } + } +} diff --git a/RaptorDB/cron/CronDaemon.cs b/RaptorDB/cron/CronDaemon.cs index 09b3d59..a63601d 100644 --- a/RaptorDB/cron/CronDaemon.cs +++ b/RaptorDB/cron/CronDaemon.cs @@ -1,56 +1,56 @@ -using System; -using System.Collections.Generic; -using System.Timers; -using System.Threading; - -namespace RaptorDB -{ - internal class CronDaemon - { - private readonly System.Timers.Timer timer = new System.Timers.Timer(30000); - private readonly List cron_jobs = new List(); - private DateTime _last= DateTime.Now; - - public CronDaemon() - { - timer.AutoReset = true; - timer.Elapsed += timer_elapsed; - timer.Start(); - } - - public void AddJob(string schedule, ThreadStart action) - { - var cj = new CronJob(schedule, action); - cron_jobs.Add(cj); - } - - //public void RemoveJob(string schedule)//, ThreadStart action) - //{ - // var f = cron_jobs.Find((x) => { return x._cron_schedule._expression == schedule; }); - // if(f!=null) - // cron_jobs.Remove(f); - //} - //public void Start() - //{ - // timer.Start(); - //} - - public void Stop() - { - timer.Stop(); - - foreach (CronJob job in cron_jobs) - job.abort(); - } - - private void timer_elapsed(object sender, ElapsedEventArgs e) - { - if (DateTime.Now.Minute != _last.Minute) - { - _last = DateTime.Now; - foreach (CronJob job in cron_jobs) - job.execute(DateTime.Now); - } - } - } -} +using System; +using System.Collections.Generic; +using System.Timers; +using System.Threading; + +namespace RaptorDB +{ + internal class CronDaemon + { + private readonly System.Timers.Timer timer = new System.Timers.Timer(30000); + private readonly List cron_jobs = new List(); + private DateTime _last= DateTime.Now; + + public CronDaemon() + { + timer.AutoReset = true; + timer.Elapsed += timer_elapsed; + timer.Start(); + } + + public void AddJob(string schedule, ThreadStart action) + { + var cj = new CronJob(schedule, action); + cron_jobs.Add(cj); + } + + //public void RemoveJob(string schedule)//, ThreadStart action) + //{ + // var f = cron_jobs.Find((x) => { return x._cron_schedule._expression == schedule; }); + // if(f!=null) + // cron_jobs.Remove(f); + //} + //public void Start() + //{ + // timer.Start(); + //} + + public void Stop() + { + timer.Stop(); + + foreach (CronJob job in cron_jobs) + job.abort(); + } + + private void timer_elapsed(object sender, ElapsedEventArgs e) + { + if (DateTime.Now.Minute != _last.Minute) + { + _last = DateTime.Now; + foreach (CronJob job in cron_jobs) + job.execute(DateTime.Now); + } + } + } +} diff --git a/RaptorDB/cron/CronJob.cs b/RaptorDB/cron/CronJob.cs index 55b2b5a..9bb16c3 100644 --- a/RaptorDB/cron/CronJob.cs +++ b/RaptorDB/cron/CronJob.cs @@ -1,41 +1,41 @@ -using System; -using System.Threading; - -namespace RaptorDB -{ - internal class CronJob - { - internal readonly CronSchedule _cron_schedule = new CronSchedule(); - private readonly ThreadStart _thread_start; - internal Thread _thread; - - public CronJob(string schedule, ThreadStart thread_start) - { - _cron_schedule = new CronSchedule(schedule); - _thread_start = thread_start; - _thread = new Thread(thread_start); - } - - private object _lock = new object(); - public void execute(DateTime date_time) - { - lock (_lock) - { - if (!_cron_schedule.isTime(date_time)) - return; - - if (_thread.ThreadState == ThreadState.Running) - return; - - _thread = new Thread(_thread_start); - _thread.Start(); - } - } - - public void abort() - { - _thread.Abort(); - } - - } -} +using System; +using System.Threading; + +namespace RaptorDB +{ + internal class CronJob + { + internal readonly CronSchedule _cron_schedule = new CronSchedule(); + private readonly ThreadStart _thread_start; + internal Thread _thread; + + public CronJob(string schedule, ThreadStart thread_start) + { + _cron_schedule = new CronSchedule(schedule); + _thread_start = thread_start; + _thread = new Thread(thread_start); + } + + private object _lock = new object(); + public void execute(DateTime date_time) + { + lock (_lock) + { + if (!_cron_schedule.isTime(date_time)) + return; + + if (_thread.ThreadState == ThreadState.Running) + return; + + _thread = new Thread(_thread_start); + _thread.Start(); + } + } + + public void abort() + { + _thread.Abort(); + } + + } +} diff --git a/RaptorDB/cron/CronSchedule.cs b/RaptorDB/cron/CronSchedule.cs index d89d6a1..73e181b 100644 --- a/RaptorDB/cron/CronSchedule.cs +++ b/RaptorDB/cron/CronSchedule.cs @@ -1,205 +1,205 @@ -using System; -using System.Collections.Generic; -using System.Text.RegularExpressions; - -namespace RaptorDB -{ - internal class CronSchedule - { - #region Readonly Class Members - - readonly static Regex divided_regex = new Regex(@"(\*/\d+)"); - readonly static Regex range_regex = new Regex(@"(\d+\-\d+)\/?(\d+)?"); - readonly static Regex wild_regex = new Regex(@"(\*)"); - readonly static Regex list_regex = new Regex(@"(((\d+,)*\d+)+)"); - readonly static Regex validation_regex = new Regex(divided_regex + "|" + range_regex + "|" + wild_regex + "|" + list_regex); - - #endregion - - #region Private Instance Members - - internal readonly string _expression; - public List minutes; - public List hours; - public List days_of_month; - public List months; - public List days_of_week; - - #endregion - - #region Public Constructors - - public CronSchedule() - { - } - - public CronSchedule(string expressions) - { - this._expression = expressions; - generate(); - } - - #endregion - - #region Public Methods - - private bool isValid() - { - return isValid(this._expression); - } - - public bool isValid(string expression) - { - MatchCollection matches = validation_regex.Matches(expression); - return matches.Count > 0;//== 5; - } - - public bool isTime(DateTime date_time) - { - return minutes.Contains(date_time.Minute) && - hours.Contains(date_time.Hour) && - days_of_month.Contains(date_time.Day) && - months.Contains(date_time.Month) && - days_of_week.Contains((int)date_time.DayOfWeek); - } - - private void generate() - { - if (!isValid()) return; - - MatchCollection matches = validation_regex.Matches(this._expression); - - generate_minutes(matches[0].ToString()); - - if (matches.Count > 1) - generate_hours(matches[1].ToString()); - else - generate_hours("*"); - - if (matches.Count > 2) - generate_days_of_month(matches[2].ToString()); - else - generate_days_of_month("*"); - - if (matches.Count > 3) - generate_months(matches[3].ToString()); - else - generate_months("*"); - - if (matches.Count > 4) - generate_days_of_weeks(matches[4].ToString()); - else - generate_days_of_weeks("*"); - } - - private void generate_minutes(string match) - { - this.minutes = generate_values(match, 0, 60); - } - - private void generate_hours(string match) - { - this.hours = generate_values(match, 0, 24); - } - - private void generate_days_of_month(string match) - { - this.days_of_month = generate_values(match, 1, 32); - } - - private void generate_months(string match) - { - this.months = generate_values(match, 1, 13); - } - - private void generate_days_of_weeks(string match) - { - this.days_of_week = generate_values(match, 0, 7); - } - - private List generate_values(string configuration, int start, int max) - { - if (divided_regex.IsMatch(configuration)) return divided_array(configuration, start, max); - if (range_regex.IsMatch(configuration)) return range_array(configuration); - if (wild_regex.IsMatch(configuration)) return wild_array(configuration, start, max); - if (list_regex.IsMatch(configuration)) return list_array(configuration); - - return new List(); - } - - private List divided_array(string configuration, int start, int max) - { - if (!divided_regex.IsMatch(configuration)) - return new List(); - - List ret = new List(); - string[] split = configuration.Split("/".ToCharArray()); - int divisor = int.Parse(split[1]); - - for (int i = start; i < max; ++i) - if (i % divisor == 0) - ret.Add(i); - - return ret; - } - - private List range_array(string configuration) - { - if (!range_regex.IsMatch(configuration)) - return new List(); - - List ret = new List(); - string[] split = configuration.Split("-".ToCharArray()); - int start = int.Parse(split[0]); - int end = 0; - if (split[1].Contains("/")) - { - split = split[1].Split("/".ToCharArray()); - end = int.Parse(split[0]); - int divisor = int.Parse(split[1]); - - for (int i = start; i < end; ++i) - if (i % divisor == 0) - ret.Add(i); - return ret; - } - else - end = int.Parse(split[1]); - - for (int i = start; i <= end; ++i) - ret.Add(i); - - return ret; - } - - private List wild_array(string configuration, int start, int max) - { - if (!wild_regex.IsMatch(configuration)) - return new List(); - - List ret = new List(); - - for (int i = start; i < max; ++i) - ret.Add(i); - - return ret; - } - - private List list_array(string configuration) - { - if (!list_regex.IsMatch(configuration)) - return new List(); - - List ret = new List(); - - string[] split = configuration.Split(",".ToCharArray()); - - foreach (string s in split) - ret.Add(int.Parse(s)); - - return ret; - } - - #endregion - } -} +using System; +using System.Collections.Generic; +using System.Text.RegularExpressions; + +namespace RaptorDB +{ + internal class CronSchedule + { + #region Readonly Class Members + + readonly static Regex divided_regex = new Regex(@"(\*/\d+)"); + readonly static Regex range_regex = new Regex(@"(\d+\-\d+)\/?(\d+)?"); + readonly static Regex wild_regex = new Regex(@"(\*)"); + readonly static Regex list_regex = new Regex(@"(((\d+,)*\d+)+)"); + readonly static Regex validation_regex = new Regex(divided_regex + "|" + range_regex + "|" + wild_regex + "|" + list_regex); + + #endregion + + #region Private Instance Members + + internal readonly string _expression; + public List minutes; + public List hours; + public List days_of_month; + public List months; + public List days_of_week; + + #endregion + + #region Public Constructors + + public CronSchedule() + { + } + + public CronSchedule(string expressions) + { + this._expression = expressions; + generate(); + } + + #endregion + + #region Public Methods + + private bool isValid() + { + return isValid(this._expression); + } + + public bool isValid(string expression) + { + MatchCollection matches = validation_regex.Matches(expression); + return matches.Count > 0;//== 5; + } + + public bool isTime(DateTime date_time) + { + return minutes.Contains(date_time.Minute) && + hours.Contains(date_time.Hour) && + days_of_month.Contains(date_time.Day) && + months.Contains(date_time.Month) && + days_of_week.Contains((int)date_time.DayOfWeek); + } + + private void generate() + { + if (!isValid()) return; + + MatchCollection matches = validation_regex.Matches(this._expression); + + generate_minutes(matches[0].ToString()); + + if (matches.Count > 1) + generate_hours(matches[1].ToString()); + else + generate_hours("*"); + + if (matches.Count > 2) + generate_days_of_month(matches[2].ToString()); + else + generate_days_of_month("*"); + + if (matches.Count > 3) + generate_months(matches[3].ToString()); + else + generate_months("*"); + + if (matches.Count > 4) + generate_days_of_weeks(matches[4].ToString()); + else + generate_days_of_weeks("*"); + } + + private void generate_minutes(string match) + { + this.minutes = generate_values(match, 0, 60); + } + + private void generate_hours(string match) + { + this.hours = generate_values(match, 0, 24); + } + + private void generate_days_of_month(string match) + { + this.days_of_month = generate_values(match, 1, 32); + } + + private void generate_months(string match) + { + this.months = generate_values(match, 1, 13); + } + + private void generate_days_of_weeks(string match) + { + this.days_of_week = generate_values(match, 0, 7); + } + + private List generate_values(string configuration, int start, int max) + { + if (divided_regex.IsMatch(configuration)) return divided_array(configuration, start, max); + if (range_regex.IsMatch(configuration)) return range_array(configuration); + if (wild_regex.IsMatch(configuration)) return wild_array(configuration, start, max); + if (list_regex.IsMatch(configuration)) return list_array(configuration); + + return new List(); + } + + private List divided_array(string configuration, int start, int max) + { + if (!divided_regex.IsMatch(configuration)) + return new List(); + + List ret = new List(); + string[] split = configuration.Split("/".ToCharArray()); + int divisor = int.Parse(split[1]); + + for (int i = start; i < max; ++i) + if (i % divisor == 0) + ret.Add(i); + + return ret; + } + + private List range_array(string configuration) + { + if (!range_regex.IsMatch(configuration)) + return new List(); + + List ret = new List(); + string[] split = configuration.Split("-".ToCharArray()); + int start = int.Parse(split[0]); + int end = 0; + if (split[1].Contains("/")) + { + split = split[1].Split("/".ToCharArray()); + end = int.Parse(split[0]); + int divisor = int.Parse(split[1]); + + for (int i = start; i < end; ++i) + if (i % divisor == 0) + ret.Add(i); + return ret; + } + else + end = int.Parse(split[1]); + + for (int i = start; i <= end; ++i) + ret.Add(i); + + return ret; + } + + private List wild_array(string configuration, int start, int max) + { + if (!wild_regex.IsMatch(configuration)) + return new List(); + + List ret = new List(); + + for (int i = start; i < max; ++i) + ret.Add(i); + + return ret; + } + + private List list_array(string configuration) + { + if (!list_regex.IsMatch(configuration)) + return new List(); + + List ret = new List(); + + string[] split = configuration.Split(",".ToCharArray()); + + foreach (string s in split) + ret.Add(int.Parse(s)); + + return ret; + } + + #endregion + } +} diff --git a/RaptorDBServer/Installer.cs b/RaptorDBServer/Installer.cs index 3c90e7d..2ecfaac 100644 --- a/RaptorDBServer/Installer.cs +++ b/RaptorDBServer/Installer.cs @@ -1,36 +1,36 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.ComponentModel; -using System.Configuration.Install; -using System.ServiceProcess; - -namespace RaptorDBServer -{ - [RunInstaller(true)] - public class CustomServiceInstaller : Installer - { - private ServiceProcessInstaller process; - private ServiceInstaller service; - - public CustomServiceInstaller() - { - process = new ServiceProcessInstaller(); - - process.Account = ServiceAccount.LocalSystem; - - service = new ServiceInstaller(); - service.ServiceName = Program.InstallServiceName; - - Installers.Add(process); - Installers.Add(service); - } - - protected override void OnBeforeInstall(System.Collections.IDictionary savedState) - { - Context.Parameters["assemblypath"] = "\"" + this.GetType().Assembly.Location + "\" -p " + Program.Port + " -f \"" + Program.Path + "\""; - base.OnBeforeInstall(savedState); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.ComponentModel; +using System.Configuration.Install; +using System.ServiceProcess; + +namespace RaptorDBServer +{ + [RunInstaller(true)] + public class CustomServiceInstaller : Installer + { + private ServiceProcessInstaller process; + private ServiceInstaller service; + + public CustomServiceInstaller() + { + process = new ServiceProcessInstaller(); + + process.Account = ServiceAccount.LocalSystem; + + service = new ServiceInstaller(); + service.ServiceName = Program.InstallServiceName; + + Installers.Add(process); + Installers.Add(service); + } + + protected override void OnBeforeInstall(System.Collections.IDictionary savedState) + { + Context.Parameters["assemblypath"] = "\"" + this.GetType().Assembly.Location + "\" -p " + Program.Port + " -f \"" + Program.Path + "\""; + base.OnBeforeInstall(savedState); + } + } +} diff --git a/RaptorDBServer/Program.cs b/RaptorDBServer/Program.cs index 487eb77..6dfaace 100644 --- a/RaptorDBServer/Program.cs +++ b/RaptorDBServer/Program.cs @@ -1,103 +1,109 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.ServiceProcess; -using System.Text; -using System.IO; -using System.Reflection; -using System.Configuration.Install; - -namespace RaptorDBServer -{ - static class Program - { - public static string InstallServiceName; - public static int Port = 90; - public static string Path = ""; - /// - /// The main entry point for the application. - /// - static void Main(string[] args) - { - if (args.Length == 0) - { - Console.WriteLine(@" -Run with : - -i install service - -u uninstall service - -n [default = RaptorDB] - -p [default = 90] - -f -"); - return; - } - - string name = "RaptorDB"; - string path = Directory.GetCurrentDirectory(); - int port = 90; - bool install = false; - bool uninstall = false; - - for (int i = 0; i < args.Length; i++) - { - if (args[i].Trim() == "-i") install = true; - if (args[i].Trim() == "-u") uninstall = true; - if (args[i].Trim() == "-p") port = int.Parse(args[++i]); - if (args[i].Trim() == "-f") path = args[++i].Trim(); - if (args[i].Trim() == "-n") name = "RaptorDB - " + args[++i].Trim(); - } - - InstallServiceName = name; - Port = port; - Path = path; - - if (install) - { - if (IsServiceInstalled(name)) - { - Console.WriteLine(); - Console.WriteLine("Service exists : " + name); - return; - } - // Install service - ManagedInstallerClass.InstallHelper(new string[] { Assembly.GetExecutingAssembly().Location }); - return; - } - else if (uninstall) - { - if (IsServiceInstalled(name) == false) - return; - // Uninstall service - ManagedInstallerClass.InstallHelper(new string[] { "/u", Assembly.GetExecutingAssembly().Location }); - return; - } - - if (Environment.UserInteractive == false) - ServiceBase.Run(new Service1()); - else - Dostart(); - } - - private static void Dostart() - { - var _raptor = new RaptorDB.RaptorDBServer(Program.Port, Program.Path); - Console.WriteLine("Press Enter to shutdown..."); - Console.ReadLine(); - _raptor.Shutdown(); - } - - private static bool IsServiceInstalled(string serviceName) - { - // Get a list of current services - ServiceController[] services = ServiceController.GetServices(); - - // Look for our service - foreach (ServiceController service in services) - if (String.Compare(serviceName, service.ServiceName, true) == 0) - return true; - - // Return - return false; - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.ServiceProcess; +using System.Text; +using System.IO; +using System.Reflection; +using System.Configuration.Install; + +namespace RaptorDBServer +{ + static class Program + { + public static string InstallServiceName; + public static int Port = 90; + public static string Path = ""; + /// + /// The main entry point for the application. + /// + static void Main(string[] args) + { + if (args.Length == 0) + { + Console.WriteLine(@" +Run with : + -i install service + -u uninstall service + -n [default = RaptorDB] + -p [default = 90] + -f +"); + if (Environment.UserInteractive) + { + Console.Write("wating for enter to exit ..."); + Console.ReadLine(); + } + return; + } + + string name = "RaptorDB"; + string path = Directory.GetCurrentDirectory(); + int port = 90; + bool install = false; + bool uninstall = false; + + for (int i = 0; i < args.Length; i++) + { + if (args[i].Trim() == "-i") install = true; + if (args[i].Trim() == "-u") uninstall = true; + if (args[i].Trim() == "-p") port = int.Parse(args[++i]); + if (args[i].Trim() == "-f") path = args[++i].Trim(); + if (args[i].Trim() == "-n") name = "RaptorDB - " + args[++i].Trim(); + } + + InstallServiceName = name; + Port = port; + Path = path; + + if (install) + { + if (IsServiceInstalled(name)) + { + Console.WriteLine(); + Console.WriteLine("Service exists : " + name); + return; + } + // Install service + ManagedInstallerClass.InstallHelper(new string[] { Assembly.GetExecutingAssembly().Location }); + return; + } + else if (uninstall) + { + if (IsServiceInstalled(name) == false) + return; + // Uninstall service + ManagedInstallerClass.InstallHelper(new string[] { "/u", Assembly.GetExecutingAssembly().Location }); + return; + } + + if (Environment.UserInteractive == false) + ServiceBase.Run(new Service1()); + else + Dostart(); + } + + private static void Dostart() + { + Console.WriteLine("Starting db engine, path: {0}", System.IO.Path.GetFullPath(Path)); + var _raptor = new RaptorDB.RaptorDBServer(Program.Port, Program.Path); + Console.WriteLine("Press Enter to shutdown..."); + Console.ReadLine(); + _raptor.Shutdown(); + } + + private static bool IsServiceInstalled(string serviceName) + { + // Get a list of current services + ServiceController[] services = ServiceController.GetServices(); + + // Look for our service + foreach (ServiceController service in services) + if (String.Compare(serviceName, service.ServiceName, true) == 0) + return true; + + // Return + return false; + } + } +} diff --git a/RaptorDBServer/Properties/AssemblyInfo.cs b/RaptorDBServer/Properties/AssemblyInfo.cs index 900e527..bb19e06 100644 --- a/RaptorDBServer/Properties/AssemblyInfo.cs +++ b/RaptorDBServer/Properties/AssemblyInfo.cs @@ -1,11 +1,11 @@ -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -[assembly: AssemblyTitle("RaptorDBServer")] -[assembly: AssemblyDescription("Stand alone server or Windows service loader")] -[assembly: AssemblyProduct("RaptorDBServer")] - - - - +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +[assembly: AssemblyTitle("RaptorDBServer")] +[assembly: AssemblyDescription("Stand alone server or Windows service loader")] +[assembly: AssemblyProduct("RaptorDBServer")] + + + + diff --git a/RaptorDBServer/RaptorDBServer.csproj b/RaptorDBServer/RaptorDBServer.csproj index b6b7f56..87e53b1 100644 --- a/RaptorDBServer/RaptorDBServer.csproj +++ b/RaptorDBServer/RaptorDBServer.csproj @@ -1,92 +1,110 @@ - - - - Debug - x86 - 8.0.30703 - 2.0 - {3EEB5C76-8216-4013-915D-94402BB320F6} - Exe - Properties - RaptorDBServer - RaptorDBServer - v4.0 - 512 - - - - AnyCPU - true - full - true - ..\Output\server\ - DEBUG;TRACE - prompt - 4 - false - false - - - AnyCPU - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - false - - - - - - true - - - ..\raptordb.snk - - - - - - - - - BuildVersion.cs - - - Component - - - Component - - - Service1.cs - - - - - - - Service1.cs - - - - - {45F6BE30-989A-4749-B6A0-69099C8661F4} - RaptorDB - - - - - md "$(SolutionDir)nuget" -copy "$(TargetPath)" "$(SolutionDir)nuget\$(TargetFileName)" - - + + + + Debug + x86 + 8.0.30703 + 2.0 + {3EEB5C76-8216-4013-915D-94402BB320F6} + Exe + Properties + RaptorDBServer + RaptorDBServer + v4.0 + 512 + + + + AnyCPU + true + full + false + ..\Output\server\ + DEBUG;TRACE + prompt + 4 + false + false + + + AnyCPU + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + false + + + + + + true + + + ..\raptordb.snk + + + true + bin\x64\Debug\ + DEBUG;TRACE + full + x64 + prompt + MinimumRecommendedRules.ruleset + + + bin\x64\Release\ + TRACE + true + pdbonly + x64 + prompt + MinimumRecommendedRules.ruleset + + + + + + + + + BuildVersion.cs + + + Component + + + Component + + + Service1.cs + + + + + + + Service1.cs + + + + + {45F6BE30-989A-4749-B6A0-69099C8661F4} + RaptorDB + + + + + md "$(SolutionDir)nuget" +copy "$(TargetPath)" "$(SolutionDir)nuget\$(TargetFileName)" + + \ No newline at end of file diff --git a/RaptorDBServer/Service1.Designer.cs b/RaptorDBServer/Service1.Designer.cs index 7f13c46..0ba54b9 100644 --- a/RaptorDBServer/Service1.Designer.cs +++ b/RaptorDBServer/Service1.Designer.cs @@ -1,40 +1,40 @@ -namespace RaptorDBServer -{ - partial class Service1 - { - /// - /// Required designer variable. - /// - private System.ComponentModel.IContainer components = null; - - /// - /// Clean up any resources being used. - /// - /// true if managed resources should be disposed; otherwise, false. - protected override void Dispose(bool disposing) - { - if (disposing && (components != null)) - { - components.Dispose(); - } - base.Dispose(disposing); - } - - #region Component Designer generated code - - /// - /// Required method for Designer support - do not modify - /// the contents of this method with the code editor. - /// - private void InitializeComponent() - { - // - // Service1 - // - this.ServiceName = "RaptorDB"; - - } - - #endregion - } -} +namespace RaptorDBServer +{ + partial class Service1 + { + /// + /// Required designer variable. + /// + private System.ComponentModel.IContainer components = null; + + /// + /// Clean up any resources being used. + /// + /// true if managed resources should be disposed; otherwise, false. + protected override void Dispose(bool disposing) + { + if (disposing && (components != null)) + { + components.Dispose(); + } + base.Dispose(disposing); + } + + #region Component Designer generated code + + /// + /// Required method for Designer support - do not modify + /// the contents of this method with the code editor. + /// + private void InitializeComponent() + { + // + // Service1 + // + this.ServiceName = "RaptorDB"; + + } + + #endregion + } +} diff --git a/RaptorDBServer/Service1.cs b/RaptorDBServer/Service1.cs index 3d0a5a5..d7b5988 100644 --- a/RaptorDBServer/Service1.cs +++ b/RaptorDBServer/Service1.cs @@ -1,38 +1,38 @@ -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Diagnostics; -using System.Linq; -using System.ServiceProcess; -using System.Text; -using System.IO; -using System.Threading.Tasks; - -namespace RaptorDBServer -{ - public partial class Service1 : ServiceBase - { - public Service1() - { - InitializeComponent(); - } - - RaptorDB.RaptorDBServer _raptor; - - protected override void OnStart(string[] args) - { - Directory.SetCurrentDirectory(Path.GetDirectoryName(this.GetType().Assembly.Location)); - _raptor = new RaptorDB.RaptorDBServer(Program.Port, Program.Path); - } - - protected override void OnStop() - { - _raptor.Shutdown(); - } - - protected override void OnShutdown() - { - _raptor.Shutdown(); - } - } -} +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Linq; +using System.ServiceProcess; +using System.Text; +using System.IO; +using System.Threading.Tasks; + +namespace RaptorDBServer +{ + public partial class Service1 : ServiceBase + { + public Service1() + { + InitializeComponent(); + } + + RaptorDB.RaptorDBServer _raptor; + + protected override void OnStart(string[] args) + { + Directory.SetCurrentDirectory(Path.GetDirectoryName(this.GetType().Assembly.Location)); + _raptor = new RaptorDB.RaptorDBServer(Program.Port, Program.Path); + } + + protected override void OnStop() + { + _raptor.Shutdown(); + } + + protected override void OnShutdown() + { + _raptor.Shutdown(); + } + } +} diff --git a/RaptorDBServer/Service1.resx b/RaptorDBServer/Service1.resx index e5858cc..34987b2 100644 --- a/RaptorDBServer/Service1.resx +++ b/RaptorDBServer/Service1.resx @@ -1,123 +1,123 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - text/microsoft-resx - - - 2.0 - - - System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 - - - System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 - - - False - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + text/microsoft-resx + + + 2.0 + + + System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + False + \ No newline at end of file diff --git a/RaptorDBTest.sln b/RaptorDBTest.sln index 1ff0cf9..de0135f 100644 --- a/RaptorDBTest.sln +++ b/RaptorDBTest.sln @@ -1,92 +1,159 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio 2013 -VisualStudioVersion = 12.0.31101.0 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RaptorDB", "RaptorDB\RaptorDB.csproj", "{45F6BE30-989A-4749-B6A0-69099C8661F4}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "tests", "testing\tests.csproj", "{C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "datagridbinding", "datagridbinding\datagridbinding.csproj", "{4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RaptorDB.Common", "RaptorDB.Common\RaptorDB.Common.csproj", "{32331D51-5BE0-41E2-AF1A-9B086C5AE809}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Views", "Views\Views.csproj", "{A1347486-8D54-4E17-8A22-76EFE61BF37B}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RaptorDBServer", "RaptorDBServer\RaptorDBServer.csproj", "{3EEB5C76-8216-4013-915D-94402BB320F6}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Debug|Mixed Platforms = Debug|Mixed Platforms - Debug|x86 = Debug|x86 - Release|Any CPU = Release|Any CPU - Release|Mixed Platforms = Release|Mixed Platforms - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {45F6BE30-989A-4749-B6A0-69099C8661F4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {45F6BE30-989A-4749-B6A0-69099C8661F4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {45F6BE30-989A-4749-B6A0-69099C8661F4}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU - {45F6BE30-989A-4749-B6A0-69099C8661F4}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU - {45F6BE30-989A-4749-B6A0-69099C8661F4}.Debug|x86.ActiveCfg = Debug|Any CPU - {45F6BE30-989A-4749-B6A0-69099C8661F4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {45F6BE30-989A-4749-B6A0-69099C8661F4}.Release|Any CPU.Build.0 = Release|Any CPU - {45F6BE30-989A-4749-B6A0-69099C8661F4}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU - {45F6BE30-989A-4749-B6A0-69099C8661F4}.Release|Mixed Platforms.Build.0 = Release|Any CPU - {45F6BE30-989A-4749-B6A0-69099C8661F4}.Release|x86.ActiveCfg = Release|Any CPU - {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU - {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU - {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Debug|x86.ActiveCfg = Debug|Any CPU - {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Release|Any CPU.Build.0 = Release|Any CPU - {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU - {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Release|Mixed Platforms.Build.0 = Release|Any CPU - {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Release|x86.ActiveCfg = Release|Any CPU - {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU - {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU - {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Debug|x86.ActiveCfg = Debug|Any CPU - {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Release|Any CPU.Build.0 = Release|Any CPU - {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU - {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Release|Mixed Platforms.Build.0 = Release|Any CPU - {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Release|x86.ActiveCfg = Release|Any CPU - {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Debug|Any CPU.Build.0 = Debug|Any CPU - {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU - {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU - {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Debug|x86.ActiveCfg = Debug|Any CPU - {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Release|Any CPU.ActiveCfg = Release|Any CPU - {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Release|Any CPU.Build.0 = Release|Any CPU - {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU - {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Release|Mixed Platforms.Build.0 = Release|Any CPU - {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Release|x86.ActiveCfg = Release|Any CPU - {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Debug|Any CPU.Build.0 = Debug|Any CPU - {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU - {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU - {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Debug|x86.ActiveCfg = Debug|Any CPU - {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Release|Any CPU.ActiveCfg = Release|Any CPU - {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Release|Any CPU.Build.0 = Release|Any CPU - {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU - {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Release|Mixed Platforms.Build.0 = Release|Any CPU - {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Release|x86.ActiveCfg = Release|Any CPU - {3EEB5C76-8216-4013-915D-94402BB320F6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3EEB5C76-8216-4013-915D-94402BB320F6}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3EEB5C76-8216-4013-915D-94402BB320F6}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU - {3EEB5C76-8216-4013-915D-94402BB320F6}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU - {3EEB5C76-8216-4013-915D-94402BB320F6}.Debug|x86.ActiveCfg = Debug|Any CPU - {3EEB5C76-8216-4013-915D-94402BB320F6}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3EEB5C76-8216-4013-915D-94402BB320F6}.Release|Any CPU.Build.0 = Release|Any CPU - {3EEB5C76-8216-4013-915D-94402BB320F6}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU - {3EEB5C76-8216-4013-915D-94402BB320F6}.Release|Mixed Platforms.Build.0 = Release|Any CPU - {3EEB5C76-8216-4013-915D-94402BB320F6}.Release|x86.ActiveCfg = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 14 +VisualStudioVersion = 14.0.23107.0 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RaptorDB", "RaptorDB\RaptorDB.csproj", "{45F6BE30-989A-4749-B6A0-69099C8661F4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "tests", "testing\tests.csproj", "{C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "datagridbinding", "datagridbinding\datagridbinding.csproj", "{4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RaptorDB.Common", "RaptorDB.Common\RaptorDB.Common.csproj", "{32331D51-5BE0-41E2-AF1A-9B086C5AE809}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Views", "Views\Views.csproj", "{A1347486-8D54-4E17-8A22-76EFE61BF37B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RaptorDBServer", "RaptorDBServer\RaptorDBServer.csproj", "{3EEB5C76-8216-4013-915D-94402BB320F6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "playground", "playground\playground.csproj", "{7B90D541-C37E-44D6-B344-35ECA59E9A14}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{F7A768F5-717F-40D5-9E15-C87ABE1F1E5E}" + ProjectSection(SolutionItems) = preProject + Performance1.psess = Performance1.psess + EndProjectSection +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "GenericPointerHelpers", "GenericPointerHelpers\GenericPointerHelpers.csproj", "{FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|Mixed Platforms = Debug|Mixed Platforms + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|Mixed Platforms = Release|Mixed Platforms + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {45F6BE30-989A-4749-B6A0-69099C8661F4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {45F6BE30-989A-4749-B6A0-69099C8661F4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {45F6BE30-989A-4749-B6A0-69099C8661F4}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU + {45F6BE30-989A-4749-B6A0-69099C8661F4}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU + {45F6BE30-989A-4749-B6A0-69099C8661F4}.Debug|x64.ActiveCfg = Debug|x64 + {45F6BE30-989A-4749-B6A0-69099C8661F4}.Debug|x64.Build.0 = Debug|x64 + {45F6BE30-989A-4749-B6A0-69099C8661F4}.Debug|x86.ActiveCfg = Debug|Any CPU + {45F6BE30-989A-4749-B6A0-69099C8661F4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {45F6BE30-989A-4749-B6A0-69099C8661F4}.Release|Any CPU.Build.0 = Release|Any CPU + {45F6BE30-989A-4749-B6A0-69099C8661F4}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU + {45F6BE30-989A-4749-B6A0-69099C8661F4}.Release|Mixed Platforms.Build.0 = Release|Any CPU + {45F6BE30-989A-4749-B6A0-69099C8661F4}.Release|x64.ActiveCfg = Release|x64 + {45F6BE30-989A-4749-B6A0-69099C8661F4}.Release|x64.Build.0 = Release|x64 + {45F6BE30-989A-4749-B6A0-69099C8661F4}.Release|x86.ActiveCfg = Release|Any CPU + {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU + {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU + {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Debug|x64.ActiveCfg = Debug|x64 + {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Debug|x64.Build.0 = Debug|x64 + {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Debug|x86.ActiveCfg = Debug|Any CPU + {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Release|Any CPU.Build.0 = Release|Any CPU + {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU + {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Release|Mixed Platforms.Build.0 = Release|Any CPU + {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Release|x64.ActiveCfg = Release|x64 + {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Release|x64.Build.0 = Release|x64 + {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90}.Release|x86.ActiveCfg = Release|Any CPU + {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU + {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU + {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Debug|x64.ActiveCfg = Debug|x64 + {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Debug|x64.Build.0 = Debug|x64 + {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Debug|x86.ActiveCfg = Debug|Any CPU + {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Release|Any CPU.Build.0 = Release|Any CPU + {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU + {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Release|Mixed Platforms.Build.0 = Release|Any CPU + {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Release|x64.ActiveCfg = Release|x64 + {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Release|x64.Build.0 = Release|x64 + {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41}.Release|x86.ActiveCfg = Release|Any CPU + {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Debug|Any CPU.Build.0 = Debug|Any CPU + {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU + {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU + {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Debug|x64.ActiveCfg = Debug|x64 + {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Debug|x64.Build.0 = Debug|x64 + {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Debug|x86.ActiveCfg = Debug|Any CPU + {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Release|Any CPU.ActiveCfg = Release|Any CPU + {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Release|Any CPU.Build.0 = Release|Any CPU + {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU + {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Release|Mixed Platforms.Build.0 = Release|Any CPU + {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Release|x64.ActiveCfg = Release|x64 + {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Release|x64.Build.0 = Release|x64 + {32331D51-5BE0-41E2-AF1A-9B086C5AE809}.Release|x86.ActiveCfg = Release|Any CPU + {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU + {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU + {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Debug|x64.ActiveCfg = Debug|x64 + {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Debug|x64.Build.0 = Debug|x64 + {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Debug|x86.ActiveCfg = Debug|Any CPU + {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Release|Any CPU.Build.0 = Release|Any CPU + {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU + {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Release|Mixed Platforms.Build.0 = Release|Any CPU + {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Release|x64.ActiveCfg = Release|x64 + {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Release|x64.Build.0 = Release|x64 + {A1347486-8D54-4E17-8A22-76EFE61BF37B}.Release|x86.ActiveCfg = Release|Any CPU + {3EEB5C76-8216-4013-915D-94402BB320F6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3EEB5C76-8216-4013-915D-94402BB320F6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3EEB5C76-8216-4013-915D-94402BB320F6}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU + {3EEB5C76-8216-4013-915D-94402BB320F6}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU + {3EEB5C76-8216-4013-915D-94402BB320F6}.Debug|x64.ActiveCfg = Debug|x64 + {3EEB5C76-8216-4013-915D-94402BB320F6}.Debug|x64.Build.0 = Debug|x64 + {3EEB5C76-8216-4013-915D-94402BB320F6}.Debug|x86.ActiveCfg = Debug|Any CPU + {3EEB5C76-8216-4013-915D-94402BB320F6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3EEB5C76-8216-4013-915D-94402BB320F6}.Release|Any CPU.Build.0 = Release|Any CPU + {3EEB5C76-8216-4013-915D-94402BB320F6}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU + {3EEB5C76-8216-4013-915D-94402BB320F6}.Release|Mixed Platforms.Build.0 = Release|Any CPU + {3EEB5C76-8216-4013-915D-94402BB320F6}.Release|x64.ActiveCfg = Release|x64 + {3EEB5C76-8216-4013-915D-94402BB320F6}.Release|x64.Build.0 = Release|x64 + {3EEB5C76-8216-4013-915D-94402BB320F6}.Release|x86.ActiveCfg = Release|Any CPU + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Debug|x64.ActiveCfg = Debug|x64 + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Debug|x64.Build.0 = Debug|x64 + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Debug|x86.ActiveCfg = Debug|Any CPU + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Debug|x86.Build.0 = Debug|Any CPU + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Release|Any CPU.Build.0 = Release|Any CPU + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Release|Mixed Platforms.Build.0 = Release|Any CPU + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Release|x64.ActiveCfg = Release|x64 + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Release|x64.Build.0 = Release|x64 + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Release|x86.ActiveCfg = Release|Any CPU + {7B90D541-C37E-44D6-B344-35ECA59E9A14}.Release|x86.Build.0 = Release|Any CPU + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Debug|x64.ActiveCfg = Debug|x64 + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Debug|x64.Build.0 = Debug|x64 + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Debug|x86.ActiveCfg = Debug|Any CPU + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Debug|x86.Build.0 = Debug|Any CPU + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Release|Any CPU.Build.0 = Release|Any CPU + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Release|Mixed Platforms.Build.0 = Release|Any CPU + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Release|x64.ActiveCfg = Release|x64 + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Release|x64.Build.0 = Release|x64 + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Release|x86.ActiveCfg = Release|Any CPU + {FF66CACB-E66E-4DB6-86F6-4F69D5E40B2B}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/RaptorDB_Doc.nuspec b/RaptorDB_Doc.nuspec index f521e2e..d292c01 100644 --- a/RaptorDB_Doc.nuspec +++ b/RaptorDB_Doc.nuspec @@ -1,18 +1,18 @@ - - - - RaptorDB_doc - 3.2.12 - RaptorDB Document Database - mgholam - - http://raptordb.codeplex.com/ - false - NoSql, JSON based, Document store database with compiled .net map functions and automatic hybrid bitmap indexing and LINQ query filters (now with standalone Server mode, Backup and Active Restore, Transactions, Server side queries, MonoDroid support, HQ-Branch Replication, works on Linux) - - - - - - + + + + RaptorDB_doc + 3.2.13 + RaptorDB Document Database + mgholam + + http://raptordb.codeplex.com/ + false + NoSql, JSON based, Document store database with compiled .net map functions and automatic hybrid bitmap indexing and LINQ query filters (now with standalone Server mode, Backup and Active Restore, Transactions, Server side queries, MonoDroid support, HQ-Branch Replication, works on Linux) + + + + + + \ No newline at end of file diff --git a/Views/Class1.cs b/Views/Class1.cs index ddef335..7f48e1e 100644 --- a/Views/Class1.cs +++ b/Views/Class1.cs @@ -1,154 +1,148 @@ -using System; -using System.Collections.Generic; -using RaptorDB; - - -namespace SampleViews -{ - #region [ class definitions ] - //public enum State - //{ - // Open, - // Closed, - // Approved - //} - - public class LineItem - { - public decimal QTY { get; set; } - public string Product { get; set; } - public decimal Price { get; set; } - public decimal Discount { get; set; } - } - - public class SalesInvoice - { - public SalesInvoice() - { - ID = Guid.NewGuid(); - } - - public Guid ID { get; set; } - public string CustomerName { get; set; } - public string NoCase { get; set; } - public string Address { get; set; } - public List Items { get; set; } - public DateTime Date { get; set; } - public int Serial { get; set; } - public byte Status { get; set; } - public bool Approved { get; set; } - //public State InvoiceState { get; set; } - } - #endregion - - #region [ views ] - - public class SalesInvoiceViewRowSchema : RDBSchema - { - //[FullText] - public string CustomerName; - [CaseInsensitive] - [StringIndexLength(255)] - public string NoCase; - public DateTime Date; - public string Address; - public int Serial; - public byte Status; - public bool? Approved; - //public State InvoiceState; - } - - [RegisterView] - public class SalesInvoiceView : View - { - public SalesInvoiceView() - { - this.Name = "SalesInvoice"; - this.Description = "A primary view for SalesInvoices"; - this.isPrimaryList = true; - this.isActive = true; - this.BackgroundIndexing = true; - this.Version = 6; - //// uncomment the following for transaction mode - //this.TransactionMode = true; - - this.Schema = typeof(SalesInvoiceViewRowSchema); - - this.FullTextColumns.Add("customername"); // this or the attribute - this.FullTextColumns.Add("address"); - - this.CaseInsensitiveColumns.Add("nocase"); // this or the attribute - //this.StringIndexLength.Add("nocase", 255); - - this.Mapper = (api, docid, doc) => - { - //int c = api.Count("SalesItemRows", "product = \"prod 1\""); - if (doc.Serial == 0) - api.RollBack(); - api.EmitObject(docid, doc); - }; - } - } - - public class SalesItemRowsViewRowSchema : RDBSchema - { - public string Product; - public decimal QTY; - public decimal Price; - public decimal Discount; - } - - [RegisterView] - public class SalesItemRowsView : View - { - public SalesItemRowsView() - { - this.Name = "SalesItemRows"; - this.Description = ""; - this.isPrimaryList = false; - this.isActive = true; - this.BackgroundIndexing = true; - - this.Schema = typeof(SalesItemRowsViewRowSchema); - - this.Mapper = (api, docid, doc) => - { - if (doc.Status == 3 && doc.Items != null) - foreach (var item in doc.Items) - api.EmitObject(docid, item); - }; - } - } - - public class NewViewRowSchema : RDBSchema - { - public string Product; - public decimal QTY; - public decimal Price; - public decimal Discount; - } - - [RegisterView] - public class newview : View - { - public newview() - { - this.Name = "newview"; - this.Description = ""; - this.isPrimaryList = false; - this.isActive = true; - this.BackgroundIndexing = true; - this.Version = 1; - - this.Schema = typeof(NewViewRowSchema); - - this.Mapper = (api, docid, doc) => - { - if (doc.Status == 3 && doc.Items != null) - foreach (var i in doc.Items) - api.EmitObject(docid, i); - }; - } - } - #endregion -} +using System; +using System.Collections.Generic; +using RaptorDB; +using RaptorDB.Views; + + +namespace SampleViews +{ + #region [ class definitions ] + //public enum State + //{ + // Open, + // Closed, + // Approved + //} + + public class LineItem + { + public decimal QTY { get; set; } + public string Product { get; set; } + public decimal Price { get; set; } + public decimal Discount { get; set; } + } + + public class SalesInvoice + { + public SalesInvoice() + { + ID = Guid.NewGuid(); + } + + public Guid ID { get; set; } + public string CustomerName { get; set; } + public string NoCase { get; set; } + public string Address { get; set; } + public List Items { get; set; } + public DateTime Date { get; set; } + public int Serial { get; set; } + public byte Status { get; set; } + public bool Approved { get; set; } + //public State InvoiceState { get; set; } + } + #endregion + + #region [ views ] + + public class SalesInvoiceViewRowSchema : RDBSchema + { + //[FullText] + public string CustomerName; + [CaseInsensitive] + [StringIndexLengthAttribute(255)] + public string NoCase; + public DateTime Date; + public string Address; + public int Serial; + public byte Status; + public bool Approved; + //public State InvoiceState; + } + + [RegisterView] + public class SalesInvoiceView : View + { + public SalesInvoiceView() + { + this.Name = "SalesInvoice"; + this.Description = "A primary view for SalesInvoices"; + this.isPrimaryList = true; + this.isActive = true; + this.BackgroundIndexing = true; + this.Version = 6; + //// uncomment the following for transaction mode + //this.TransactionMode = true; + + this.SetFullTextIndex(s => s.CustomerName); + this.SetFullTextIndex(s => s.Address); + + this.SetStringIndex(s => s.NoCase, length: 255, ignoreCase: true); + + this.Mapper = (api, docid, doc) => + { + //int c = api.Count("SalesItemRows", "product = \"prod 1\""); + if (doc.Serial == 0) + api.RollBack(); + api.EmitObject(docid, doc); + }; + } + } + + public class SalesItemRowsViewRowSchema : RDBSchema + { + public string Product; + public decimal QTY; + public decimal Price; + public decimal Discount; + } + + [RegisterView] + public class SalesItemRowsView : View + { + public SalesItemRowsView() + { + this.Name = "SalesItemRows"; + this.Description = ""; + this.isPrimaryList = false; + this.isActive = true; + this.BackgroundIndexing = true; + + this.Mapper = (api, docid, doc) => + { + if (doc.Status == 3 && doc.Items != null) + foreach (var item in doc.Items) + api.EmitObject(docid, item); + }; + } + } + + public class NewViewRowSchema : RDBSchema + { + public string Product; + public decimal QTY; + public decimal Price; + public decimal Discount; + } + + [RegisterView] + public class newview : View + { + public newview() + { + this.Name = "newview"; + this.Description = ""; + this.isPrimaryList = false; + this.isActive = true; + this.BackgroundIndexing = true; + this.Version = 1; + + this.Mapper = (api, docid, doc) => + { + if (doc.Status == 3 && doc.Items != null) + foreach (var i in doc.Items) + api.EmitObject(docid, i); + }; + } + } + #endregion +} diff --git a/Views/Properties/AssemblyInfo.cs b/Views/Properties/AssemblyInfo.cs index e2c85f2..d5e4fb9 100644 --- a/Views/Properties/AssemblyInfo.cs +++ b/Views/Properties/AssemblyInfo.cs @@ -1,36 +1,36 @@ -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("Views")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("Microsoft")] -[assembly: AssemblyProduct("Views")] -[assembly: AssemblyCopyright("Copyright © Microsoft 2012")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -// Setting ComVisible to false makes the types in this assembly not visible -// to COM components. If you need to access a type in this assembly from -// COM, set the ComVisible attribute to true on that type. -[assembly: ComVisible(false)] - -// The following GUID is for the ID of the typelib if this project is exposed to COM -[assembly: Guid("b3b61807-a394-421f-88f3-a340dc93f53b")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Views")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("Microsoft")] +[assembly: AssemblyProduct("Views")] +[assembly: AssemblyCopyright("Copyright © Microsoft 2012")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("b3b61807-a394-421f-88f3-a340dc93f53b")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/Views/ServerSide.cs b/Views/ServerSide.cs index e809829..5778875 100644 --- a/Views/ServerSide.cs +++ b/Views/ServerSide.cs @@ -1,37 +1,54 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using RaptorDB.Common; -using SampleViews; -using RaptorDB; - -namespace Views -{ - public class ServerSide - { - // so the result can be serialized and is not an anonymous type - // since this uses fields, derive from the BindableFields for data binding to work - public class sumtype : RaptorDB.BindableFields - { - public string Product; - public decimal TotalPrice; - public decimal TotalQTY; - } - - public static List Sum_Products_based_on_filter(IRaptorDB rap, string filter) - { - var q = rap.Query(filter); - - var res = from x in q.Rows - group x by x.Product into g - select new sumtype // avoid anonymous types - { - Product = g.Key, - TotalPrice = g.Sum(p => p.Price), - TotalQTY = g.Sum(p => p.QTY) - }; - - return res.ToList(); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using RaptorDB.Common; +using SampleViews; +using RaptorDB; + +namespace Views +{ + public class ServerSide + { + // so the result can be serialized and is not an anonymous type + // since this uses fields, derive from the BindableFields for data binding to work + public class sumtype : BindableFields + { + public string Product; + public decimal TotalPrice; + public decimal TotalQTY; + } + + public static List Sum_Products_based_on_filter(IRaptorDB rap, string filter) + { + var q = rap.Query(filter); + + var res = from x in q.Rows + group x by x.Product into g + select new sumtype // avoid anonymous types + { + Product = g.Key, + TotalPrice = g.Sum(p => p.Price), + TotalQTY = g.Sum(p => p.QTY) + }; + + return res.ToList(); + } + + public static List DoServerSideSumOnRaptor(IRaptorDB rap, string productName) + { + return rap.ServerSide((r, f) => + { + var q = r.Query(i => i.Product == productName); + var res = from x in q.Rows + group x by x.Product into g + select new sumtype + { + Product = g.Key, + TotalPrice = g.Sum(p => p.Price), + TotalQTY = g.Sum(p => p.QTY) + }; + return res.ToList(); + }, null).Cast().ToList(); + } + } +} diff --git a/Views/Views.csproj b/Views/Views.csproj index 9bf6120..8ed41a4 100644 --- a/Views/Views.csproj +++ b/Views/Views.csproj @@ -1,68 +1,90 @@ - - - - Debug - AnyCPU - 8.0.30703 - 2.0 - {A1347486-8D54-4E17-8A22-76EFE61BF37B} - Library - Properties - Views - Views - v4.0 - 512 - - - - true - full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - false - - - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - false - - - false - - - ..\key.snk - - - - - - - - - - - - {32331D51-5BE0-41E2-AF1A-9B086C5AE809} - RaptorDB.Common - - - - - md "$(SolutionDir)Output\server\Extensions" -xcopy /q /y "$(TargetDir)$(TargetFileName)" "$(SolutionDir)Output\server\Extensions\" - - + + + + Debug + AnyCPU + 8.0.30703 + 2.0 + {A1347486-8D54-4E17-8A22-76EFE61BF37B} + Library + Properties + Views + Views + v4.0 + 512 + + + + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + false + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + false + + + false + + + ..\key.snk + + + true + bin\x64\Debug\ + DEBUG;TRACE + full + x64 + prompt + MinimumRecommendedRules.ruleset + + + bin\x64\Release\ + TRACE + true + pdbonly + x64 + prompt + MinimumRecommendedRules.ruleset + + + + + + + + + + + + {32331D51-5BE0-41E2-AF1A-9B086C5AE809} + RaptorDB.Common + + + {45f6be30-989a-4749-b6a0-69099c8661f4} + RaptorDB + + + + + md "$(SolutionDir)Output\server\Extensions" +xcopy /q /y "$(TargetDir)$(TargetFileName)" "$(SolutionDir)Output\server\Extensions\" + + \ No newline at end of file diff --git a/datagridbinding/Program.cs b/datagridbinding/Program.cs index a701c5a..2f7f329 100644 --- a/datagridbinding/Program.cs +++ b/datagridbinding/Program.cs @@ -1,29 +1,29 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Windows.Forms; -using System.Reflection; - -namespace datagridbinding -{ - - static class Program - { - public static void DoubleBuffered(this DataGridView dgv, bool setting) - { - Type dgvType = dgv.GetType(); - PropertyInfo pi = dgvType.GetProperty("DoubleBuffered", BindingFlags.Instance | BindingFlags.NonPublic); - pi.SetValue(dgv, setting, null); - } - /// - /// The main entry point for the application. - /// - [STAThread] - static void Main() - { - Application.EnableVisualStyles(); - Application.SetCompatibleTextRenderingDefault(false); - Application.Run(new frmMain()); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Windows.Forms; +using System.Reflection; + +namespace datagridbinding +{ + + static class Program + { + public static void DoubleBuffered(this DataGridView dgv, bool setting) + { + Type dgvType = dgv.GetType(); + PropertyInfo pi = dgvType.GetProperty("DoubleBuffered", BindingFlags.Instance | BindingFlags.NonPublic); + pi.SetValue(dgv, setting, null); + } + /// + /// The main entry point for the application. + /// + [STAThread] + static void Main() + { + Application.EnableVisualStyles(); + Application.SetCompatibleTextRenderingDefault(false); + Application.Run(new frmMain()); + } + } +} diff --git a/datagridbinding/Properties/AssemblyInfo.cs b/datagridbinding/Properties/AssemblyInfo.cs index f3a2f58..1af632b 100644 --- a/datagridbinding/Properties/AssemblyInfo.cs +++ b/datagridbinding/Properties/AssemblyInfo.cs @@ -1,36 +1,36 @@ -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("datagridbinding")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("Microsoft")] -[assembly: AssemblyProduct("datagridbinding")] -[assembly: AssemblyCopyright("Copyright © Microsoft 2012")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -// Setting ComVisible to false makes the types in this assembly not visible -// to COM components. If you need to access a type in this assembly from -// COM, set the ComVisible attribute to true on that type. -[assembly: ComVisible(false)] - -// The following GUID is for the ID of the typelib if this project is exposed to COM -[assembly: Guid("7231f003-9b1a-444b-b0db-a4109f304fd1")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("datagridbinding")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("Microsoft")] +[assembly: AssemblyProduct("datagridbinding")] +[assembly: AssemblyCopyright("Copyright © Microsoft 2012")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("7231f003-9b1a-444b-b0db-a4109f304fd1")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/datagridbinding/app.config b/datagridbinding/app.config new file mode 100644 index 0000000..9c3d200 --- /dev/null +++ b/datagridbinding/app.config @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/datagridbinding/datagridbinding.csproj b/datagridbinding/datagridbinding.csproj index 16f0859..2c839bd 100644 --- a/datagridbinding/datagridbinding.csproj +++ b/datagridbinding/datagridbinding.csproj @@ -1,88 +1,106 @@ - - - - Debug - AnyCPU - 9.0.30729 - 2.0 - {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41} - WinExe - Properties - datagridbinding - datagridbinding - v4.0 - 512 - - - - true - full - false - ..\Output\ - DEBUG;TRACE - prompt - 4 - false - - - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - false - - - - ..\Faker.dll - - - - - - - - Form - - - frmMain.cs - - - Form - - - frmStartup.cs - - - - - frmMain.cs - - - frmStartup.cs - - - - - {32331D51-5BE0-41E2-AF1A-9B086C5AE809} - RaptorDB.Common - - - {45F6BE30-989A-4749-B6A0-69099C8661F4} - RaptorDB - - - {A1347486-8D54-4E17-8A22-76EFE61BF37B} - Views - - - - + + + + Debug + AnyCPU + 9.0.30729 + 2.0 + {4B90D800-C8C9-45CA-BD8C-DD5B47F78C41} + WinExe + Properties + datagridbinding + datagridbinding + v4.0 + 512 + + + + true + full + false + ..\Output\ + DEBUG;TRACE + prompt + 4 + false + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + false + + + true + bin\x64\Debug\ + DEBUG;TRACE + full + x64 + prompt + MinimumRecommendedRules.ruleset + + + bin\x64\Release\ + TRACE + true + pdbonly + x64 + prompt + MinimumRecommendedRules.ruleset + + + + ..\Faker.dll + + + + + + + + Form + + + frmMain.cs + + + Form + + + frmStartup.cs + + + + + frmMain.cs + + + frmStartup.cs + + + + + {32331D51-5BE0-41E2-AF1A-9B086C5AE809} + RaptorDB.Common + + + {45F6BE30-989A-4749-B6A0-69099C8661F4} + RaptorDB + + + {A1347486-8D54-4E17-8A22-76EFE61BF37B} + Views + + + + \ No newline at end of file diff --git a/datagridbinding/frmMain.Designer.cs b/datagridbinding/frmMain.Designer.cs index ec646ab..354bf31 100644 --- a/datagridbinding/frmMain.Designer.cs +++ b/datagridbinding/frmMain.Designer.cs @@ -1,229 +1,229 @@ -namespace datagridbinding -{ - partial class frmMain - { - /// - /// Required designer variable. - /// - private System.ComponentModel.IContainer components = null; - - /// - /// Clean up any resources being used. - /// - /// true if managed resources should be disposed; otherwise, false. - protected override void Dispose(bool disposing) - { - if (disposing && (components != null)) - { - components.Dispose(); - } - base.Dispose(disposing); - } - - #region Windows Form Designer generated code - - /// - /// Required method for Designer support - do not modify - /// the contents of this method with the code editor. - /// - private void InitializeComponent() - { - this.components = new System.ComponentModel.Container(); - System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle2 = new System.Windows.Forms.DataGridViewCellStyle(); - this.bindingSource1 = new System.Windows.Forms.BindingSource(this.components); - this.statusStrip1 = new System.Windows.Forms.StatusStrip(); - this.toolStripStatusLabel1 = new System.Windows.Forms.ToolStripStatusLabel(); - this.toolStripStatusLabel2 = new System.Windows.Forms.ToolStripStatusLabel(); - this.toolStripProgressBar1 = new System.Windows.Forms.ToolStripProgressBar(); - this.menuStrip1 = new System.Windows.Forms.MenuStrip(); - this.sumQueryToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); - this.serverSideSumQueryToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); - this.insert100000DocumentsToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); - this.backupToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); - this.restoreToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); - this.exitToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); - this.testToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); - this.textBox1 = new System.Windows.Forms.TextBox(); - this.dataGridView1 = new System.Windows.Forms.DataGridView(); - this.stsError = new System.Windows.Forms.ToolStripStatusLabel(); - ((System.ComponentModel.ISupportInitialize)(this.bindingSource1)).BeginInit(); - this.statusStrip1.SuspendLayout(); - this.menuStrip1.SuspendLayout(); - ((System.ComponentModel.ISupportInitialize)(this.dataGridView1)).BeginInit(); - this.SuspendLayout(); - // - // statusStrip1 - // - this.statusStrip1.Items.AddRange(new System.Windows.Forms.ToolStripItem[] { - this.toolStripStatusLabel1, - this.toolStripStatusLabel2, - this.toolStripProgressBar1, - this.stsError}); - this.statusStrip1.Location = new System.Drawing.Point(0, 364); - this.statusStrip1.Name = "statusStrip1"; - this.statusStrip1.Padding = new System.Windows.Forms.Padding(1, 0, 7, 0); - this.statusStrip1.Size = new System.Drawing.Size(693, 22); - this.statusStrip1.TabIndex = 3; - this.statusStrip1.Text = "statusStrip1"; - // - // toolStripStatusLabel1 - // - this.toolStripStatusLabel1.Name = "toolStripStatusLabel1"; - this.toolStripStatusLabel1.Size = new System.Drawing.Size(60, 17); - this.toolStripStatusLabel1.Text = "Count = 0"; - // - // toolStripStatusLabel2 - // - this.toolStripStatusLabel2.Name = "toolStripStatusLabel2"; - this.toolStripStatusLabel2.Size = new System.Drawing.Size(48, 17); - this.toolStripStatusLabel2.Text = "time =0"; - // - // toolStripProgressBar1 - // - this.toolStripProgressBar1.Name = "toolStripProgressBar1"; - this.toolStripProgressBar1.Size = new System.Drawing.Size(100, 16); - // - // menuStrip1 - // - this.menuStrip1.Items.AddRange(new System.Windows.Forms.ToolStripItem[] { - this.sumQueryToolStripMenuItem, - this.serverSideSumQueryToolStripMenuItem, - this.insert100000DocumentsToolStripMenuItem, - this.backupToolStripMenuItem, - this.restoreToolStripMenuItem, - this.exitToolStripMenuItem, - this.testToolStripMenuItem}); - this.menuStrip1.Location = new System.Drawing.Point(0, 0); - this.menuStrip1.Name = "menuStrip1"; - this.menuStrip1.Size = new System.Drawing.Size(693, 24); - this.menuStrip1.TabIndex = 5; - this.menuStrip1.Text = "menuStrip1"; - // - // sumQueryToolStripMenuItem - // - this.sumQueryToolStripMenuItem.Name = "sumQueryToolStripMenuItem"; - this.sumQueryToolStripMenuItem.Size = new System.Drawing.Size(78, 20); - this.sumQueryToolStripMenuItem.Text = "Sum Query"; - this.sumQueryToolStripMenuItem.Click += new System.EventHandler(this.sumQueryToolStripMenuItem_Click); - // - // serverSideSumQueryToolStripMenuItem - // - this.serverSideSumQueryToolStripMenuItem.Name = "serverSideSumQueryToolStripMenuItem"; - this.serverSideSumQueryToolStripMenuItem.Size = new System.Drawing.Size(138, 20); - this.serverSideSumQueryToolStripMenuItem.Text = "Server Side Sum Query"; - this.serverSideSumQueryToolStripMenuItem.Click += new System.EventHandler(this.serverSideSumQueryToolStripMenuItem_Click); - // - // insert100000DocumentsToolStripMenuItem - // - this.insert100000DocumentsToolStripMenuItem.Name = "insert100000DocumentsToolStripMenuItem"; - this.insert100000DocumentsToolStripMenuItem.Size = new System.Drawing.Size(154, 20); - this.insert100000DocumentsToolStripMenuItem.Text = "Insert 100,000 Documents"; - this.insert100000DocumentsToolStripMenuItem.Click += new System.EventHandler(this.insert100000DocumentsToolStripMenuItem_Click); - // - // backupToolStripMenuItem - // - this.backupToolStripMenuItem.Name = "backupToolStripMenuItem"; - this.backupToolStripMenuItem.Size = new System.Drawing.Size(58, 20); - this.backupToolStripMenuItem.Text = "Backup"; - this.backupToolStripMenuItem.Click += new System.EventHandler(this.backupToolStripMenuItem_Click); - // - // restoreToolStripMenuItem - // - this.restoreToolStripMenuItem.Name = "restoreToolStripMenuItem"; - this.restoreToolStripMenuItem.Size = new System.Drawing.Size(58, 20); - this.restoreToolStripMenuItem.Text = "Restore"; - this.restoreToolStripMenuItem.Click += new System.EventHandler(this.restoreToolStripMenuItem_Click); - // - // exitToolStripMenuItem - // - this.exitToolStripMenuItem.Name = "exitToolStripMenuItem"; - this.exitToolStripMenuItem.Size = new System.Drawing.Size(37, 20); - this.exitToolStripMenuItem.Text = "Exit"; - this.exitToolStripMenuItem.Click += new System.EventHandler(this.exitToolStripMenuItem_Click); - // - // testToolStripMenuItem - // - this.testToolStripMenuItem.Name = "testToolStripMenuItem"; - this.testToolStripMenuItem.Size = new System.Drawing.Size(38, 20); - this.testToolStripMenuItem.Text = "test"; - this.testToolStripMenuItem.Click += new System.EventHandler(this.testToolStripMenuItem_Click); - // - // textBox1 - // - this.textBox1.Dock = System.Windows.Forms.DockStyle.Top; - this.textBox1.Location = new System.Drawing.Point(0, 24); - this.textBox1.Margin = new System.Windows.Forms.Padding(3, 2, 3, 2); - this.textBox1.Name = "textBox1"; - this.textBox1.Size = new System.Drawing.Size(693, 23); - this.textBox1.TabIndex = 6; - this.textBox1.Text = "salesinvoice, serial<100"; - this.textBox1.KeyPress += new System.Windows.Forms.KeyPressEventHandler(this.TextBox1KeyPress); - // - // dataGridView1 - // - dataGridViewCellStyle2.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(192)))), ((int)(((byte)(255)))), ((int)(((byte)(192))))); - this.dataGridView1.AlternatingRowsDefaultCellStyle = dataGridViewCellStyle2; - this.dataGridView1.AutoSizeColumnsMode = System.Windows.Forms.DataGridViewAutoSizeColumnsMode.DisplayedCells; - this.dataGridView1.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.AutoSize; - this.dataGridView1.Dock = System.Windows.Forms.DockStyle.Fill; - this.dataGridView1.EditMode = System.Windows.Forms.DataGridViewEditMode.EditProgrammatically; - this.dataGridView1.Location = new System.Drawing.Point(0, 47); - this.dataGridView1.Margin = new System.Windows.Forms.Padding(3, 2, 3, 2); - this.dataGridView1.Name = "dataGridView1"; - this.dataGridView1.Size = new System.Drawing.Size(693, 317); - this.dataGridView1.TabIndex = 7; - // - // stsError - // - this.stsError.Font = new System.Drawing.Font("Segoe UI", 9F, System.Drawing.FontStyle.Bold); - this.stsError.ForeColor = System.Drawing.Color.Red; - this.stsError.Name = "stsError"; - this.stsError.Size = new System.Drawing.Size(0, 17); - // - // Form1 - // - this.AutoScaleDimensions = new System.Drawing.SizeF(7F, 16F); - this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; - this.ClientSize = new System.Drawing.Size(693, 386); - this.Controls.Add(this.dataGridView1); - this.Controls.Add(this.textBox1); - this.Controls.Add(this.statusStrip1); - this.Controls.Add(this.menuStrip1); - this.Font = new System.Drawing.Font("Tahoma", 9.75F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(178))); - this.MainMenuStrip = this.menuStrip1; - this.Margin = new System.Windows.Forms.Padding(3, 4, 3, 4); - this.Name = "Form1"; - this.Text = "Query Viewer"; - this.Load += new System.EventHandler(this.Form1_Load); - ((System.ComponentModel.ISupportInitialize)(this.bindingSource1)).EndInit(); - this.statusStrip1.ResumeLayout(false); - this.statusStrip1.PerformLayout(); - this.menuStrip1.ResumeLayout(false); - this.menuStrip1.PerformLayout(); - ((System.ComponentModel.ISupportInitialize)(this.dataGridView1)).EndInit(); - this.ResumeLayout(false); - this.PerformLayout(); - - } - private System.Windows.Forms.ToolStripStatusLabel toolStripStatusLabel2; - private System.Windows.Forms.ToolStripStatusLabel toolStripStatusLabel1; - private System.Windows.Forms.StatusStrip statusStrip1; - - #endregion - - private System.Windows.Forms.BindingSource bindingSource1; - private System.Windows.Forms.MenuStrip menuStrip1; - private System.Windows.Forms.ToolStripMenuItem sumQueryToolStripMenuItem; - private System.Windows.Forms.TextBox textBox1; - private System.Windows.Forms.DataGridView dataGridView1; - private System.Windows.Forms.ToolStripMenuItem exitToolStripMenuItem; - private System.Windows.Forms.ToolStripMenuItem insert100000DocumentsToolStripMenuItem; - private System.Windows.Forms.ToolStripProgressBar toolStripProgressBar1; - private System.Windows.Forms.ToolStripMenuItem backupToolStripMenuItem; - private System.Windows.Forms.ToolStripMenuItem restoreToolStripMenuItem; - private System.Windows.Forms.ToolStripMenuItem serverSideSumQueryToolStripMenuItem; - private System.Windows.Forms.ToolStripMenuItem testToolStripMenuItem; - private System.Windows.Forms.ToolStripStatusLabel stsError; - } -} - +namespace datagridbinding +{ + partial class frmMain + { + /// + /// Required designer variable. + /// + private System.ComponentModel.IContainer components = null; + + /// + /// Clean up any resources being used. + /// + /// true if managed resources should be disposed; otherwise, false. + protected override void Dispose(bool disposing) + { + if (disposing && (components != null)) + { + components.Dispose(); + } + base.Dispose(disposing); + } + + #region Windows Form Designer generated code + + /// + /// Required method for Designer support - do not modify + /// the contents of this method with the code editor. + /// + private void InitializeComponent() + { + this.components = new System.ComponentModel.Container(); + System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle2 = new System.Windows.Forms.DataGridViewCellStyle(); + this.bindingSource1 = new System.Windows.Forms.BindingSource(this.components); + this.statusStrip1 = new System.Windows.Forms.StatusStrip(); + this.toolStripStatusLabel1 = new System.Windows.Forms.ToolStripStatusLabel(); + this.toolStripStatusLabel2 = new System.Windows.Forms.ToolStripStatusLabel(); + this.toolStripProgressBar1 = new System.Windows.Forms.ToolStripProgressBar(); + this.menuStrip1 = new System.Windows.Forms.MenuStrip(); + this.sumQueryToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); + this.serverSideSumQueryToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); + this.insert100000DocumentsToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); + this.backupToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); + this.restoreToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); + this.exitToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); + this.testToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); + this.textBox1 = new System.Windows.Forms.TextBox(); + this.dataGridView1 = new System.Windows.Forms.DataGridView(); + this.stsError = new System.Windows.Forms.ToolStripStatusLabel(); + ((System.ComponentModel.ISupportInitialize)(this.bindingSource1)).BeginInit(); + this.statusStrip1.SuspendLayout(); + this.menuStrip1.SuspendLayout(); + ((System.ComponentModel.ISupportInitialize)(this.dataGridView1)).BeginInit(); + this.SuspendLayout(); + // + // statusStrip1 + // + this.statusStrip1.Items.AddRange(new System.Windows.Forms.ToolStripItem[] { + this.toolStripStatusLabel1, + this.toolStripStatusLabel2, + this.toolStripProgressBar1, + this.stsError}); + this.statusStrip1.Location = new System.Drawing.Point(0, 364); + this.statusStrip1.Name = "statusStrip1"; + this.statusStrip1.Padding = new System.Windows.Forms.Padding(1, 0, 7, 0); + this.statusStrip1.Size = new System.Drawing.Size(693, 22); + this.statusStrip1.TabIndex = 3; + this.statusStrip1.Text = "statusStrip1"; + // + // toolStripStatusLabel1 + // + this.toolStripStatusLabel1.Name = "toolStripStatusLabel1"; + this.toolStripStatusLabel1.Size = new System.Drawing.Size(60, 17); + this.toolStripStatusLabel1.Text = "Count = 0"; + // + // toolStripStatusLabel2 + // + this.toolStripStatusLabel2.Name = "toolStripStatusLabel2"; + this.toolStripStatusLabel2.Size = new System.Drawing.Size(48, 17); + this.toolStripStatusLabel2.Text = "time =0"; + // + // toolStripProgressBar1 + // + this.toolStripProgressBar1.Name = "toolStripProgressBar1"; + this.toolStripProgressBar1.Size = new System.Drawing.Size(100, 16); + // + // menuStrip1 + // + this.menuStrip1.Items.AddRange(new System.Windows.Forms.ToolStripItem[] { + this.sumQueryToolStripMenuItem, + this.serverSideSumQueryToolStripMenuItem, + this.insert100000DocumentsToolStripMenuItem, + this.backupToolStripMenuItem, + this.restoreToolStripMenuItem, + this.exitToolStripMenuItem, + this.testToolStripMenuItem}); + this.menuStrip1.Location = new System.Drawing.Point(0, 0); + this.menuStrip1.Name = "menuStrip1"; + this.menuStrip1.Size = new System.Drawing.Size(693, 24); + this.menuStrip1.TabIndex = 5; + this.menuStrip1.Text = "menuStrip1"; + // + // sumQueryToolStripMenuItem + // + this.sumQueryToolStripMenuItem.Name = "sumQueryToolStripMenuItem"; + this.sumQueryToolStripMenuItem.Size = new System.Drawing.Size(78, 20); + this.sumQueryToolStripMenuItem.Text = "Sum Query"; + this.sumQueryToolStripMenuItem.Click += new System.EventHandler(this.sumQueryToolStripMenuItem_Click); + // + // serverSideSumQueryToolStripMenuItem + // + this.serverSideSumQueryToolStripMenuItem.Name = "serverSideSumQueryToolStripMenuItem"; + this.serverSideSumQueryToolStripMenuItem.Size = new System.Drawing.Size(138, 20); + this.serverSideSumQueryToolStripMenuItem.Text = "Server Side Sum Query"; + this.serverSideSumQueryToolStripMenuItem.Click += new System.EventHandler(this.serverSideSumQueryToolStripMenuItem_Click); + // + // insert100000DocumentsToolStripMenuItem + // + this.insert100000DocumentsToolStripMenuItem.Name = "insert100000DocumentsToolStripMenuItem"; + this.insert100000DocumentsToolStripMenuItem.Size = new System.Drawing.Size(154, 20); + this.insert100000DocumentsToolStripMenuItem.Text = "Insert 100,000 Documents"; + this.insert100000DocumentsToolStripMenuItem.Click += new System.EventHandler(this.insert100000DocumentsToolStripMenuItem_Click); + // + // backupToolStripMenuItem + // + this.backupToolStripMenuItem.Name = "backupToolStripMenuItem"; + this.backupToolStripMenuItem.Size = new System.Drawing.Size(58, 20); + this.backupToolStripMenuItem.Text = "Backup"; + this.backupToolStripMenuItem.Click += new System.EventHandler(this.backupToolStripMenuItem_Click); + // + // restoreToolStripMenuItem + // + this.restoreToolStripMenuItem.Name = "restoreToolStripMenuItem"; + this.restoreToolStripMenuItem.Size = new System.Drawing.Size(58, 20); + this.restoreToolStripMenuItem.Text = "Restore"; + this.restoreToolStripMenuItem.Click += new System.EventHandler(this.restoreToolStripMenuItem_Click); + // + // exitToolStripMenuItem + // + this.exitToolStripMenuItem.Name = "exitToolStripMenuItem"; + this.exitToolStripMenuItem.Size = new System.Drawing.Size(37, 20); + this.exitToolStripMenuItem.Text = "Exit"; + this.exitToolStripMenuItem.Click += new System.EventHandler(this.exitToolStripMenuItem_Click); + // + // testToolStripMenuItem + // + this.testToolStripMenuItem.Name = "testToolStripMenuItem"; + this.testToolStripMenuItem.Size = new System.Drawing.Size(38, 20); + this.testToolStripMenuItem.Text = "test"; + this.testToolStripMenuItem.Click += new System.EventHandler(this.testToolStripMenuItem_Click); + // + // textBox1 + // + this.textBox1.Dock = System.Windows.Forms.DockStyle.Top; + this.textBox1.Location = new System.Drawing.Point(0, 24); + this.textBox1.Margin = new System.Windows.Forms.Padding(3, 2, 3, 2); + this.textBox1.Name = "textBox1"; + this.textBox1.Size = new System.Drawing.Size(693, 23); + this.textBox1.TabIndex = 6; + this.textBox1.Text = "salesinvoice, serial<100"; + this.textBox1.KeyPress += new System.Windows.Forms.KeyPressEventHandler(this.TextBox1KeyPress); + // + // dataGridView1 + // + dataGridViewCellStyle2.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(192)))), ((int)(((byte)(255)))), ((int)(((byte)(192))))); + this.dataGridView1.AlternatingRowsDefaultCellStyle = dataGridViewCellStyle2; + this.dataGridView1.AutoSizeColumnsMode = System.Windows.Forms.DataGridViewAutoSizeColumnsMode.DisplayedCells; + this.dataGridView1.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.AutoSize; + this.dataGridView1.Dock = System.Windows.Forms.DockStyle.Fill; + this.dataGridView1.EditMode = System.Windows.Forms.DataGridViewEditMode.EditProgrammatically; + this.dataGridView1.Location = new System.Drawing.Point(0, 47); + this.dataGridView1.Margin = new System.Windows.Forms.Padding(3, 2, 3, 2); + this.dataGridView1.Name = "dataGridView1"; + this.dataGridView1.Size = new System.Drawing.Size(693, 317); + this.dataGridView1.TabIndex = 7; + // + // stsError + // + this.stsError.Font = new System.Drawing.Font("Segoe UI", 9F, System.Drawing.FontStyle.Bold); + this.stsError.ForeColor = System.Drawing.Color.Red; + this.stsError.Name = "stsError"; + this.stsError.Size = new System.Drawing.Size(0, 17); + // + // Form1 + // + this.AutoScaleDimensions = new System.Drawing.SizeF(7F, 16F); + this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; + this.ClientSize = new System.Drawing.Size(693, 386); + this.Controls.Add(this.dataGridView1); + this.Controls.Add(this.textBox1); + this.Controls.Add(this.statusStrip1); + this.Controls.Add(this.menuStrip1); + this.Font = new System.Drawing.Font("Tahoma", 9.75F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(178))); + this.MainMenuStrip = this.menuStrip1; + this.Margin = new System.Windows.Forms.Padding(3, 4, 3, 4); + this.Name = "Form1"; + this.Text = "Query Viewer"; + this.Load += new System.EventHandler(this.Form1_Load); + ((System.ComponentModel.ISupportInitialize)(this.bindingSource1)).EndInit(); + this.statusStrip1.ResumeLayout(false); + this.statusStrip1.PerformLayout(); + this.menuStrip1.ResumeLayout(false); + this.menuStrip1.PerformLayout(); + ((System.ComponentModel.ISupportInitialize)(this.dataGridView1)).EndInit(); + this.ResumeLayout(false); + this.PerformLayout(); + + } + private System.Windows.Forms.ToolStripStatusLabel toolStripStatusLabel2; + private System.Windows.Forms.ToolStripStatusLabel toolStripStatusLabel1; + private System.Windows.Forms.StatusStrip statusStrip1; + + #endregion + + private System.Windows.Forms.BindingSource bindingSource1; + private System.Windows.Forms.MenuStrip menuStrip1; + private System.Windows.Forms.ToolStripMenuItem sumQueryToolStripMenuItem; + private System.Windows.Forms.TextBox textBox1; + private System.Windows.Forms.DataGridView dataGridView1; + private System.Windows.Forms.ToolStripMenuItem exitToolStripMenuItem; + private System.Windows.Forms.ToolStripMenuItem insert100000DocumentsToolStripMenuItem; + private System.Windows.Forms.ToolStripProgressBar toolStripProgressBar1; + private System.Windows.Forms.ToolStripMenuItem backupToolStripMenuItem; + private System.Windows.Forms.ToolStripMenuItem restoreToolStripMenuItem; + private System.Windows.Forms.ToolStripMenuItem serverSideSumQueryToolStripMenuItem; + private System.Windows.Forms.ToolStripMenuItem testToolStripMenuItem; + private System.Windows.Forms.ToolStripStatusLabel stsError; + } +} + diff --git a/datagridbinding/frmMain.cs b/datagridbinding/frmMain.cs index 4edd7b5..d4228b5 100644 --- a/datagridbinding/frmMain.cs +++ b/datagridbinding/frmMain.cs @@ -1,272 +1,274 @@ -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Drawing; -using System.Linq; -using System.Text; -using System.Windows.Forms; -using System.Reflection; -using RaptorDB; -using RaptorDB.Common; -using SampleViews; -using System.Linq.Expressions; -using System.IO; - -namespace datagridbinding -{ - public partial class frmMain : Form - { - public frmMain() - { - InitializeComponent(); - } - - - IRaptorDB rap; - - private void Form1_Load(object sender, EventArgs e) - { - dataGridView1.DoubleBuffered(true); - frmStartup f = new frmStartup(); - if (f.ShowDialog() == System.Windows.Forms.DialogResult.OK) - { - rap = f._rap; - - Query(); - } - } - - void TextBox1KeyPress(object sender, KeyPressEventArgs e) - { - if (e.KeyChar == (char)Keys.Return) - Query(); - } - - private void Query() - { - string[] s = textBox1.Text.Split(','); - - try - { - DateTime dt = FastDateTime.Now; - var q = rap.Query(s[0].Trim(), s[1].Trim()); - toolStripStatusLabel2.Text = "Query time (sec) = " + FastDateTime.Now.Subtract(dt).TotalSeconds; - dataGridView1.DataSource = q.Rows; - toolStripStatusLabel1.Text = "Count = " + q.Count.ToString("#,0"); - stsError.Text = ""; - } - catch (Exception ex) - { - stsError.Text = ex.Message; - dataGridView1.DataSource = null; - toolStripStatusLabel1.Text = "Count = 0"; - toolStripStatusLabel2.Text = "Query time (sec) = 0"; - } - } - - private void sumQueryToolStripMenuItem_Click(object sender, EventArgs e) - { - int c = rap.Count("SalesItemRows", "product = \"prod 1\""); - - DateTime dt = FastDateTime.Now; - var q = //rap.Query(typeof(SalesItemRowsView), (LineItem l) => (l.Product == "prod 1" || l.Product == "prod 3")); - rap.Query(x => x.Product == "prod 1" || x.Product == "prod 3"); - //List list = q.Rows.Cast().ToList(); - var res = from item in q.Rows//list - group item by item.Product into grouped - select new - { - Product = grouped.Key, - TotalPrice = grouped.Sum(product => product.Price), - TotalQTY = grouped.Sum(product => product.QTY) - }; - - var reslist = res.ToList(); - dataGridView1.DataSource = reslist; - toolStripStatusLabel2.Text = "Query time (sec) = " + FastDateTime.Now.Subtract(dt).TotalSeconds; - toolStripStatusLabel1.Text = "Count = " + q.Count.ToString("#,0"); - } - - private void exitToolStripMenuItem_Click(object sender, EventArgs e) - { - if (rap != null) - rap.Shutdown(); - this.Close(); - } - - private object _lock = new object(); - private void insert100000DocumentsToolStripMenuItem_Click(object sender, EventArgs e) - { - //RaptorDB.Global.SplitStorageFilesMegaBytes = 50; - lock (_lock) - { - DialogResult dr = MessageBox.Show("Do you want to insert?", "Continue?", MessageBoxButtons.OKCancel, MessageBoxIcon.Stop, MessageBoxDefaultButton.Button2); - if (dr == System.Windows.Forms.DialogResult.Cancel) - return; - toolStripProgressBar1.Value = 0; - DateTime dt = FastDateTime.Now; - int count = 100000; - int step = 5000; - toolStripProgressBar1.Maximum = (count / step) + 1; - Random r = new Random(); - for (int i = 0; i < count; i++) - { - var inv = CreateInvoice(i); - if (i % step == 0) - toolStripProgressBar1.Value++; - rap.Save(inv.ID, inv); - } - MessageBox.Show("Insert done in (sec) : " + FastDateTime.Now.Subtract(dt).TotalSeconds); - toolStripProgressBar1.Value = 0; - } - } - - private static SalesInvoice CreateInvoice(int i) - { - var inv = new SalesInvoice() - { - Date = Faker.DateTimeFaker.BirthDay(),// FastDateTime.Now.AddMinutes(r.Next(60)), - Serial = i % 10000, - CustomerName = Faker.NameFaker.Name(),// "Me " + i % 10, - NoCase = "Me " + i % 10, - Status = (byte)(i % 4), - Address = Faker.LocationFaker.Street(), //"df asd sdf asdf asdf", - Approved = i % 100 == 0 ? true : false - }; - inv.Items = new List(); - for (int k = 0; k < 5; k++) - inv.Items.Add(new LineItem() { Product = "prod " + k, Discount = 0, Price = 10 + k, QTY = 1 + k }); - return inv; - } - - private void backupToolStripMenuItem_Click(object sender, EventArgs e) - { - bool b = rap.Backup(); - MessageBox.Show("Backup done"); - } - - private void restoreToolStripMenuItem_Click(object sender, EventArgs e) - { - rap.Restore(); - } - - public class objclass - { - public string val; - } - string prod3 = "prod 3"; - private void serverSideSumQueryToolStripMenuItem_Click(object sender, EventArgs e) - { - //string prod1 = "prod 1"; - objclass c = new objclass() { val = "prod 3" }; - //decimal i = 20; - - //var q = rap.Count(typeof(SalesItemRowsView), - // (LineItem l) => (l.Product == prod1 || l.Product == prod3) && l.Price.Between(10,i) - // ); - - DateTime dt = FastDateTime.Now; - - var qq = rap.ServerSide(Views.ServerSide.Sum_Products_based_on_filter, - //"product = \"prod 1\"" - //(LineItem l) => (l.Product == c.val || l.Product == prod3 ) - x => x.Product == c.val || x.Product == prod3 - ).ToList(); - dataGridView1.DataSource = qq; - toolStripStatusLabel2.Text = "Query time (sec) = " + FastDateTime.Now.Subtract(dt).TotalSeconds; - toolStripStatusLabel1.Text = "Count = " + qq.Count.ToString("#,0"); - } - - private void KVHFtest() - { - //var r = (rap as RaptorDB.RaptorDB); - var kv = rap.GetKVHF(); - - DateTime dt = DateTime.Now; - for (int i = 0; i < 100000; i++) - { - var o = CreateInvoice(i); - kv.SetObjectHF(i.ToString(), o);// new byte[100000]); - } - MessageBox.Show("time = " + DateTime.Now.Subtract(dt).TotalSeconds); - - var g = kv.GetObjectHF("1009"); - - for (int i = 0; i < 100000; i++) - kv.DeleteKeyHF(i.ToString()); - - g = kv.GetObjectHF("1009"); - MessageBox.Show(""+kv.CountHF()); - - foreach (var f in Directory.GetFiles("d:\\pp", "*.*")) - { - kv.SetObjectHF(f, File.ReadAllBytes(f)); - } - - kv.CompactStorageHF(); - - foreach (var f in Directory.GetFiles("d:\\pp", "*.*")) - { - var o = kv.GetObjectHF(f); - File.WriteAllBytes(f.Replace("\\pp\\", "\\ppp\\"), o as byte[]); - } - bool b = kv.ContainsHF("aa"); - var keys = kv.GetKeysHF(); - //foreach(var o in r.KVHF.EnumerateObjects()) - //{ - // string s = o.GetType().ToString(); - //} - } - - private void testToolStripMenuItem_Click(object sender, EventArgs e) - { - GC.Collect(2); - //KVHFtest(); - - - int c = rap.Count(x => x.Serial < 100); - c = rap.Count(x => x.Serial != 100); - c = rap.Count("SalesInvoice", "serial != 100"); - var q = rap.Query(x => x.Serial < 100, 0, 10, "serial desc"); - //var p = rap.Query("SalesInvoice"); - //var pp = rap.Query(typeof(SalesInvoiceView)); - //var ppp = rap.Query(typeof(SalesItemRowsView.RowSchema)); - //var pppp = rap.Query(typeof(SalesInvoiceView), (SalesInvoiceView.RowSchema r) => r.Serial < 10); - //var ppppp = rap.Query(typeof(SalesInvoiceView.RowSchema), (SalesInvoiceView.RowSchema r) => r.Serial < 10); - //var pppppp = rap.Query("serial <10"); - //Guid g = new Guid("82997e60-f8f4-4b37-ae35-02d033512673"); - var qq = rap.Query(x => x.docid == new Guid("82997e60-f8f4-4b37-ae35-02d033512673")); - dataGridView1.DataSource = q.Rows; - - //int i = rap.ViewDelete(x => x.Serial == 0); - - //var qqq= rap.Query(x => ); - //SalesInvoiceViewRowSchema s = new SalesInvoiceViewRowSchema(); - //s.docid = Guid.NewGuid(); - //s.CustomerName = "hello"; - //rap.ViewInsert(s.docid, s); - //q= rap.Query("serial <100"); - //string s = q.Rows[0].CustomerName; - - //perftest(); - } - - //private void perftest() - //{ - // DateTime dt = DateTime.Now; - - // for (int i = 0; i < 100000; i++) - // { - // var s = new SalesInvoiceViewRowSchema(); - // s.docid = Guid.NewGuid(); - // s.Address = Faker.LocationFaker.Street(); - // s.CustomerName = Faker.NameFaker.Name(); - // s.Date = Faker.DateTimeFaker.BirthDay(); - // s.Serial = i % 1000; - // s.Status = (byte)(i % 5); - // rap.ViewInsert(s.docid, s); - // } - // MessageBox.Show("time = " + DateTime.Now.Subtract(dt).TotalSeconds); - //} - } +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Drawing; +using System.Linq; +using System.Text; +using System.Windows.Forms; +using System.Reflection; +using RaptorDB; +using RaptorDB.Common; +using SampleViews; +using System.Linq.Expressions; +using Views; +using System.IO; + +namespace datagridbinding +{ + public partial class frmMain : Form + { + public frmMain() + { + InitializeComponent(); + } + + + IRaptorDB rap; + + private void Form1_Load(object sender, EventArgs e) + { + dataGridView1.DoubleBuffered(true); + frmStartup f = new frmStartup(); + if (f.ShowDialog() == System.Windows.Forms.DialogResult.OK) + { + rap = f._rap; + + Query(); + } + } + + void TextBox1KeyPress(object sender, KeyPressEventArgs e) + { + if (e.KeyChar == (char)Keys.Return) + Query(); + } + + private void Query() + { + string[] s = textBox1.Text.Split(','); + + try + { + DateTime dt = FastDateTime.Now; + var q = rap.Query(s[0].Trim(), s[1].Trim()); + toolStripStatusLabel2.Text = "Query time (sec) = " + FastDateTime.Now.Subtract(dt).TotalSeconds; + dataGridView1.DataSource = q.Rows; + toolStripStatusLabel1.Text = "Count = " + q.Count.ToString("#,0"); + stsError.Text = ""; + } + catch (Exception ex) + { + stsError.Text = ex.Message; + dataGridView1.DataSource = null; + toolStripStatusLabel1.Text = "Count = 0"; + toolStripStatusLabel2.Text = "Query time (sec) = 0"; + } + } + + private void sumQueryToolStripMenuItem_Click(object sender, EventArgs e) + { + int c = rap.Count("SalesItemRows", "product = \"prod 1\""); + + DateTime dt = FastDateTime.Now; + var q = //rap.Query(typeof(SalesItemRowsView), (LineItem l) => (l.Product == "prod 1" || l.Product == "prod 3")); + rap.Query(x => x.Product == "prod 1" || x.Product == "prod 3"); + //List list = q.Rows.Cast().ToList(); + var res = from item in q.Rows//list + group item by item.Product into grouped + select new + { + Product = grouped.Key, + TotalPrice = grouped.Sum(product => product.Price), + TotalQTY = grouped.Sum(product => product.QTY) + }; + + var reslist = res.ToList(); + dataGridView1.DataSource = reslist; + toolStripStatusLabel2.Text = "Query time (sec) = " + FastDateTime.Now.Subtract(dt).TotalSeconds; + toolStripStatusLabel1.Text = "Count = " + q.Count.ToString("#,0"); + } + + private void exitToolStripMenuItem_Click(object sender, EventArgs e) + { + if (rap != null) + rap.Shutdown(); + this.Close(); + } + + private object _lock = new object(); + private void insert100000DocumentsToolStripMenuItem_Click(object sender, EventArgs e) + { + //RaptorDB.Global.SplitStorageFilesMegaBytes = 50; + lock (_lock) + { + DialogResult dr = MessageBox.Show("Do you want to insert?", "Continue?", MessageBoxButtons.OKCancel, MessageBoxIcon.Stop, MessageBoxDefaultButton.Button2); + if (dr == System.Windows.Forms.DialogResult.Cancel) + return; + toolStripProgressBar1.Value = 0; + DateTime dt = FastDateTime.Now; + int count = 100000; + int step = 5000; + toolStripProgressBar1.Maximum = (count / step) + 1; + Random r = new Random(); + for (int i = 0; i < count; i++) + { + var inv = CreateInvoice(i); + if (i % step == 0) + toolStripProgressBar1.Value++; + rap.Save(inv.ID, inv); + } + MessageBox.Show("Insert done in (sec) : " + FastDateTime.Now.Subtract(dt).TotalSeconds); + toolStripProgressBar1.Value = 0; + } + } + + private static SalesInvoice CreateInvoice(int i) + { + var inv = new SalesInvoice() + { + Date = Faker.DateTimeFaker.BirthDay(),// FastDateTime.Now.AddMinutes(r.Next(60)), + Serial = i % 10000, + CustomerName = Faker.NameFaker.Name(),// "Me " + i % 10, + NoCase = "Me " + i % 10, + Status = (byte)(i % 4), + Address = Faker.LocationFaker.Street(), //"df asd sdf asdf asdf", + Approved = i % 100 == 0 ? true : false + }; + inv.Items = new List(); + for (int k = 0; k < 5; k++) + inv.Items.Add(new LineItem() { Product = "prod " + k, Discount = 0, Price = 10 + k, QTY = 1 + k }); + return inv; + } + + private void backupToolStripMenuItem_Click(object sender, EventArgs e) + { + bool b = rap.Backup(); + MessageBox.Show("Backup done"); + } + + private void restoreToolStripMenuItem_Click(object sender, EventArgs e) + { + rap.Restore(); + } + + public class objclass + { + public string val; + } + string prod3 = "prod 3"; + private void serverSideSumQueryToolStripMenuItem_Click(object sender, EventArgs e) + { + //string prod1 = "prod 1"; + objclass c = new objclass() { val = "prod 1" }; + //decimal i = 20; + + //var q = rap.Count(typeof(SalesItemRowsView), + // (LineItem l) => (l.Product == prod1 || l.Product == prod3) && l.Price.Between(10,i) + // ); + + DateTime dt = FastDateTime.Now; + + //var qq = rap.ServerSide((r, filter) => Views.ServerSide.Sum_Products_based_on_filter(r, filter), + // //"product = \"prod 1\"" + // //(LineItem l) => (l.Product == c.val || l.Product == prod3 ) + // x => x.Product == c.val || x.Product == prod3 + // ).ToList(); + var qq = ServerSide.DoServerSideSumOnRaptor(rap, prod3); + dataGridView1.DataSource = qq; + toolStripStatusLabel2.Text = "Query time (sec) = " + FastDateTime.Now.Subtract(dt).TotalSeconds; + toolStripStatusLabel1.Text = "Count = " + qq.Count.ToString("#,0"); + } + + private void KVHFtest() + { + //var r = (rap as RaptorDB.RaptorDB); + var kv = rap.GetKVHF(); + + DateTime dt = DateTime.Now; + for (int i = 0; i < 100000; i++) + { + var o = CreateInvoice(i); + kv.SetObjectHF(i.ToString(), o);// new byte[100000]); + } + MessageBox.Show("time = " + DateTime.Now.Subtract(dt).TotalSeconds); + + var g = kv.GetObjectHF("1009"); + + for (int i = 0; i < 100000; i++) + kv.DeleteKeyHF(i.ToString()); + + g = kv.GetObjectHF("1009"); + MessageBox.Show(""+kv.CountHF()); + + foreach (var f in Directory.GetFiles("d:\\pp", "*.*")) + { + kv.SetObjectHF(f, File.ReadAllBytes(f)); + } + + kv.CompactStorageHF(); + + foreach (var f in Directory.GetFiles("d:\\pp", "*.*")) + { + var o = kv.GetObjectHF(f); + File.WriteAllBytes(f.Replace("\\pp\\", "\\ppp\\"), o as byte[]); + } + bool b = kv.ContainsHF("aa"); + var keys = kv.GetKeysHF(); + //foreach(var o in r.KVHF.EnumerateObjects()) + //{ + // string s = o.GetType().ToString(); + //} + } + + private void testToolStripMenuItem_Click(object sender, EventArgs e) + { + GC.Collect(2); + //KVHFtest(); + + + int c = rap.Count(x => x.Serial < 100); + c = rap.Count(x => x.Serial != 100); + c = rap.Count("SalesInvoice", "serial != 100"); + var q = rap.Query(x => x.Serial < 100, 0, 10, "serial desc"); + //var p = rap.Query("SalesInvoice"); + //var pp = rap.Query(typeof(SalesInvoiceView)); + //var ppp = rap.Query(typeof(SalesItemRowsView.RowSchema)); + //var pppp = rap.Query(typeof(SalesInvoiceView), (SalesInvoiceView.RowSchema r) => r.Serial < 10); + //var ppppp = rap.Query(typeof(SalesInvoiceView.RowSchema), (SalesInvoiceView.RowSchema r) => r.Serial < 10); + //var pppppp = rap.Query("serial <10"); + //Guid g = new Guid("82997e60-f8f4-4b37-ae35-02d033512673"); + var qq = rap.Query(x => x.docid == new Guid("82997e60-f8f4-4b37-ae35-02d033512673")); + dataGridView1.DataSource = q.Rows; + + //int i = rap.ViewDelete(x => x.Serial == 0); + + //var qqq= rap.Query(x => ); + //SalesInvoiceViewRowSchema s = new SalesInvoiceViewRowSchema(); + //s.docid = Guid.NewGuid(); + //s.CustomerName = "hello"; + //rap.ViewInsert(s.docid, s); + //q= rap.Query("serial <100"); + //string s = q.Rows[0].CustomerName; + + //perftest(); + } + + //private void perftest() + //{ + // DateTime dt = DateTime.Now; + + // for (int i = 0; i < 100000; i++) + // { + // var s = new SalesInvoiceViewRowSchema(); + // s.docid = Guid.NewGuid(); + // s.Address = Faker.LocationFaker.Street(); + // s.CustomerName = Faker.NameFaker.Name(); + // s.Date = Faker.DateTimeFaker.BirthDay(); + // s.Serial = i % 1000; + // s.Status = (byte)(i % 5); + // rap.ViewInsert(s.docid, s); + // } + // MessageBox.Show("time = " + DateTime.Now.Subtract(dt).TotalSeconds); + //} + } } \ No newline at end of file diff --git a/datagridbinding/frmMain.resx b/datagridbinding/frmMain.resx index a0d61cd..27e9f85 100644 --- a/datagridbinding/frmMain.resx +++ b/datagridbinding/frmMain.resx @@ -1,129 +1,129 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - text/microsoft-resx - - - 2.0 - - - System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 - - - System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 - - - 17, 17 - - - 147, 17 - - - 263, 17 - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + text/microsoft-resx + + + 2.0 + + + System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + 17, 17 + + + 147, 17 + + + 263, 17 + \ No newline at end of file diff --git a/datagridbinding/frmStartup.Designer.cs b/datagridbinding/frmStartup.Designer.cs index 1f68a9a..37a57e8 100644 --- a/datagridbinding/frmStartup.Designer.cs +++ b/datagridbinding/frmStartup.Designer.cs @@ -1,284 +1,284 @@ -namespace datagridbinding -{ - partial class frmStartup - { - /// - /// Required designer variable. - /// - private System.ComponentModel.IContainer components = null; - - /// - /// Clean up any resources being used. - /// - /// true if managed resources should be disposed; otherwise, false. - protected override void Dispose(bool disposing) - { - if (disposing && (components != null)) - { - components.Dispose(); - } - base.Dispose(disposing); - } - - #region Windows Form Designer generated code - - /// - /// Required method for Designer support - do not modify - /// the contents of this method with the code editor. - /// - private void InitializeComponent() - { - this.groupBox1 = new System.Windows.Forms.GroupBox(); - this.label1 = new System.Windows.Forms.Label(); - this.button3 = new System.Windows.Forms.Button(); - this.txtFolder = new System.Windows.Forms.TextBox(); - this.groupBox2 = new System.Windows.Forms.GroupBox(); - this.txtPassword = new System.Windows.Forms.TextBox(); - this.label5 = new System.Windows.Forms.Label(); - this.txtUser = new System.Windows.Forms.TextBox(); - this.label4 = new System.Windows.Forms.Label(); - this.txtPort = new System.Windows.Forms.TextBox(); - this.label3 = new System.Windows.Forms.Label(); - this.txtServer = new System.Windows.Forms.TextBox(); - this.label2 = new System.Windows.Forms.Label(); - this.button1 = new System.Windows.Forms.Button(); - this.button2 = new System.Windows.Forms.Button(); - this.groupBox3 = new System.Windows.Forms.GroupBox(); - this.radioButton2 = new System.Windows.Forms.RadioButton(); - this.radioButton1 = new System.Windows.Forms.RadioButton(); - this.groupBox1.SuspendLayout(); - this.groupBox2.SuspendLayout(); - this.groupBox3.SuspendLayout(); - this.SuspendLayout(); - // - // groupBox1 - // - this.groupBox1.Controls.Add(this.label1); - this.groupBox1.Controls.Add(this.button3); - this.groupBox1.Controls.Add(this.txtFolder); - this.groupBox1.Dock = System.Windows.Forms.DockStyle.Top; - this.groupBox1.Location = new System.Drawing.Point(0, 45); - this.groupBox1.Name = "groupBox1"; - this.groupBox1.Size = new System.Drawing.Size(341, 63); - this.groupBox1.TabIndex = 0; - this.groupBox1.TabStop = false; - // - // label1 - // - this.label1.AutoSize = true; - this.label1.Location = new System.Drawing.Point(6, 18); - this.label1.Name = "label1"; - this.label1.Size = new System.Drawing.Size(67, 14); - this.label1.TabIndex = 3; - this.label1.Text = "Data folder"; - // - // button3 - // - this.button3.Location = new System.Drawing.Point(297, 35); - this.button3.Name = "button3"; - this.button3.Size = new System.Drawing.Size(36, 22); - this.button3.TabIndex = 2; - this.button3.Text = "..."; - this.button3.UseVisualStyleBackColor = true; - // - // txtFolder - // - this.txtFolder.Location = new System.Drawing.Point(3, 35); - this.txtFolder.Name = "txtFolder"; - this.txtFolder.Size = new System.Drawing.Size(288, 22); - this.txtFolder.TabIndex = 1; - this.txtFolder.Text = "..\\RaptorDBdata"; - // - // groupBox2 - // - this.groupBox2.Controls.Add(this.txtPassword); - this.groupBox2.Controls.Add(this.label5); - this.groupBox2.Controls.Add(this.txtUser); - this.groupBox2.Controls.Add(this.label4); - this.groupBox2.Controls.Add(this.txtPort); - this.groupBox2.Controls.Add(this.label3); - this.groupBox2.Controls.Add(this.txtServer); - this.groupBox2.Controls.Add(this.label2); - this.groupBox2.Dock = System.Windows.Forms.DockStyle.Top; - this.groupBox2.Location = new System.Drawing.Point(0, 108); - this.groupBox2.Name = "groupBox2"; - this.groupBox2.Size = new System.Drawing.Size(341, 137); - this.groupBox2.TabIndex = 1; - this.groupBox2.TabStop = false; - this.groupBox2.Visible = false; - // - // txtPassword - // - this.txtPassword.Location = new System.Drawing.Point(79, 99); - this.txtPassword.Name = "txtPassword"; - this.txtPassword.PasswordChar = '*'; - this.txtPassword.Size = new System.Drawing.Size(240, 22); - this.txtPassword.TabIndex = 11; - this.txtPassword.Text = "admin"; - // - // label5 - // - this.label5.AutoSize = true; - this.label5.Location = new System.Drawing.Point(15, 102); - this.label5.Name = "label5"; - this.label5.Size = new System.Drawing.Size(58, 14); - this.label5.TabIndex = 10; - this.label5.Text = "Password"; - // - // txtUser - // - this.txtUser.Location = new System.Drawing.Point(79, 71); - this.txtUser.Name = "txtUser"; - this.txtUser.Size = new System.Drawing.Size(240, 22); - this.txtUser.TabIndex = 9; - this.txtUser.Text = "admin"; - // - // label4 - // - this.label4.AutoSize = true; - this.label4.Location = new System.Drawing.Point(12, 74); - this.label4.Name = "label4"; - this.label4.Size = new System.Drawing.Size(61, 14); - this.label4.TabIndex = 8; - this.label4.Text = "Username"; - // - // txtPort - // - this.txtPort.Location = new System.Drawing.Point(79, 43); - this.txtPort.Name = "txtPort"; - this.txtPort.Size = new System.Drawing.Size(240, 22); - this.txtPort.TabIndex = 7; - this.txtPort.Text = "90"; - // - // label3 - // - this.label3.AutoSize = true; - this.label3.Location = new System.Drawing.Point(38, 46); - this.label3.Name = "label3"; - this.label3.Size = new System.Drawing.Size(30, 14); - this.label3.TabIndex = 6; - this.label3.Text = "Port"; - // - // txtServer - // - this.txtServer.Location = new System.Drawing.Point(79, 15); - this.txtServer.Name = "txtServer"; - this.txtServer.Size = new System.Drawing.Size(240, 22); - this.txtServer.TabIndex = 5; - this.txtServer.Text = "127.0.0.1"; - // - // label2 - // - this.label2.AutoSize = true; - this.label2.Location = new System.Drawing.Point(31, 18); - this.label2.Name = "label2"; - this.label2.Size = new System.Drawing.Size(42, 14); - this.label2.TabIndex = 4; - this.label2.Text = "Server"; - // - // button1 - // - this.button1.DialogResult = System.Windows.Forms.DialogResult.OK; - this.button1.Location = new System.Drawing.Point(7, 254); - this.button1.Name = "button1"; - this.button1.Size = new System.Drawing.Size(117, 34); - this.button1.TabIndex = 2; - this.button1.Text = "Start"; - this.button1.UseVisualStyleBackColor = true; - this.button1.Click += new System.EventHandler(this.button1_Click); - // - // button2 - // - this.button2.DialogResult = System.Windows.Forms.DialogResult.Cancel; - this.button2.Location = new System.Drawing.Point(217, 254); - this.button2.Name = "button2"; - this.button2.Size = new System.Drawing.Size(117, 34); - this.button2.TabIndex = 3; - this.button2.Text = "Cancel"; - this.button2.UseVisualStyleBackColor = true; - this.button2.Click += new System.EventHandler(this.button2_Click); - // - // groupBox3 - // - this.groupBox3.Controls.Add(this.radioButton2); - this.groupBox3.Controls.Add(this.radioButton1); - this.groupBox3.Dock = System.Windows.Forms.DockStyle.Top; - this.groupBox3.Location = new System.Drawing.Point(0, 0); - this.groupBox3.Name = "groupBox3"; - this.groupBox3.Size = new System.Drawing.Size(341, 45); - this.groupBox3.TabIndex = 4; - this.groupBox3.TabStop = false; - // - // radioButton2 - // - this.radioButton2.AutoSize = true; - this.radioButton2.Dock = System.Windows.Forms.DockStyle.Left; - this.radioButton2.Location = new System.Drawing.Point(121, 18); - this.radioButton2.Name = "radioButton2"; - this.radioButton2.Size = new System.Drawing.Size(94, 24); - this.radioButton2.TabIndex = 2; - this.radioButton2.Text = "Server Mode"; - this.radioButton2.UseVisualStyleBackColor = true; - // - // radioButton1 - // - this.radioButton1.AutoSize = true; - this.radioButton1.Checked = true; - this.radioButton1.Dock = System.Windows.Forms.DockStyle.Left; - this.radioButton1.Location = new System.Drawing.Point(3, 18); - this.radioButton1.Name = "radioButton1"; - this.radioButton1.Size = new System.Drawing.Size(118, 24); - this.radioButton1.TabIndex = 1; - this.radioButton1.TabStop = true; - this.radioButton1.Text = "Embedded Mode"; - this.radioButton1.UseVisualStyleBackColor = true; - this.radioButton1.CheckedChanged += new System.EventHandler(this.radioButton1_CheckedChanged); - // - // Form2 - // - this.AcceptButton = this.button1; - this.AutoScaleDimensions = new System.Drawing.SizeF(7F, 14F); - this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; - this.CancelButton = this.button2; - this.ClientSize = new System.Drawing.Size(341, 293); - this.Controls.Add(this.button2); - this.Controls.Add(this.button1); - this.Controls.Add(this.groupBox2); - this.Controls.Add(this.groupBox1); - this.Controls.Add(this.groupBox3); - this.Font = new System.Drawing.Font("Tahoma", 9F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(178))); - this.Name = "Form2"; - this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen; - this.Text = "Startup"; - this.groupBox1.ResumeLayout(false); - this.groupBox1.PerformLayout(); - this.groupBox2.ResumeLayout(false); - this.groupBox2.PerformLayout(); - this.groupBox3.ResumeLayout(false); - this.groupBox3.PerformLayout(); - this.ResumeLayout(false); - - } - - #endregion - - private System.Windows.Forms.GroupBox groupBox1; - private System.Windows.Forms.Button button3; - private System.Windows.Forms.TextBox txtFolder; - private System.Windows.Forms.GroupBox groupBox2; - private System.Windows.Forms.Button button1; - private System.Windows.Forms.Button button2; - private System.Windows.Forms.Label label1; - private System.Windows.Forms.TextBox txtPassword; - private System.Windows.Forms.Label label5; - private System.Windows.Forms.TextBox txtUser; - private System.Windows.Forms.Label label4; - private System.Windows.Forms.TextBox txtPort; - private System.Windows.Forms.Label label3; - private System.Windows.Forms.TextBox txtServer; - private System.Windows.Forms.Label label2; - private System.Windows.Forms.GroupBox groupBox3; - private System.Windows.Forms.RadioButton radioButton2; - private System.Windows.Forms.RadioButton radioButton1; - } +namespace datagridbinding +{ + partial class frmStartup + { + /// + /// Required designer variable. + /// + private System.ComponentModel.IContainer components = null; + + /// + /// Clean up any resources being used. + /// + /// true if managed resources should be disposed; otherwise, false. + protected override void Dispose(bool disposing) + { + if (disposing && (components != null)) + { + components.Dispose(); + } + base.Dispose(disposing); + } + + #region Windows Form Designer generated code + + /// + /// Required method for Designer support - do not modify + /// the contents of this method with the code editor. + /// + private void InitializeComponent() + { + this.groupBox1 = new System.Windows.Forms.GroupBox(); + this.label1 = new System.Windows.Forms.Label(); + this.button3 = new System.Windows.Forms.Button(); + this.txtFolder = new System.Windows.Forms.TextBox(); + this.groupBox2 = new System.Windows.Forms.GroupBox(); + this.txtPassword = new System.Windows.Forms.TextBox(); + this.label5 = new System.Windows.Forms.Label(); + this.txtUser = new System.Windows.Forms.TextBox(); + this.label4 = new System.Windows.Forms.Label(); + this.txtPort = new System.Windows.Forms.TextBox(); + this.label3 = new System.Windows.Forms.Label(); + this.txtServer = new System.Windows.Forms.TextBox(); + this.label2 = new System.Windows.Forms.Label(); + this.button1 = new System.Windows.Forms.Button(); + this.button2 = new System.Windows.Forms.Button(); + this.groupBox3 = new System.Windows.Forms.GroupBox(); + this.radioButton2 = new System.Windows.Forms.RadioButton(); + this.radioButton1 = new System.Windows.Forms.RadioButton(); + this.groupBox1.SuspendLayout(); + this.groupBox2.SuspendLayout(); + this.groupBox3.SuspendLayout(); + this.SuspendLayout(); + // + // groupBox1 + // + this.groupBox1.Controls.Add(this.label1); + this.groupBox1.Controls.Add(this.button3); + this.groupBox1.Controls.Add(this.txtFolder); + this.groupBox1.Dock = System.Windows.Forms.DockStyle.Top; + this.groupBox1.Location = new System.Drawing.Point(0, 45); + this.groupBox1.Name = "groupBox1"; + this.groupBox1.Size = new System.Drawing.Size(341, 63); + this.groupBox1.TabIndex = 0; + this.groupBox1.TabStop = false; + // + // label1 + // + this.label1.AutoSize = true; + this.label1.Location = new System.Drawing.Point(6, 18); + this.label1.Name = "label1"; + this.label1.Size = new System.Drawing.Size(67, 14); + this.label1.TabIndex = 3; + this.label1.Text = "Data folder"; + // + // button3 + // + this.button3.Location = new System.Drawing.Point(297, 35); + this.button3.Name = "button3"; + this.button3.Size = new System.Drawing.Size(36, 22); + this.button3.TabIndex = 2; + this.button3.Text = "..."; + this.button3.UseVisualStyleBackColor = true; + // + // txtFolder + // + this.txtFolder.Location = new System.Drawing.Point(3, 35); + this.txtFolder.Name = "txtFolder"; + this.txtFolder.Size = new System.Drawing.Size(288, 22); + this.txtFolder.TabIndex = 1; + this.txtFolder.Text = "..\\RaptorDBdata"; + // + // groupBox2 + // + this.groupBox2.Controls.Add(this.txtPassword); + this.groupBox2.Controls.Add(this.label5); + this.groupBox2.Controls.Add(this.txtUser); + this.groupBox2.Controls.Add(this.label4); + this.groupBox2.Controls.Add(this.txtPort); + this.groupBox2.Controls.Add(this.label3); + this.groupBox2.Controls.Add(this.txtServer); + this.groupBox2.Controls.Add(this.label2); + this.groupBox2.Dock = System.Windows.Forms.DockStyle.Top; + this.groupBox2.Location = new System.Drawing.Point(0, 108); + this.groupBox2.Name = "groupBox2"; + this.groupBox2.Size = new System.Drawing.Size(341, 137); + this.groupBox2.TabIndex = 1; + this.groupBox2.TabStop = false; + this.groupBox2.Visible = false; + // + // txtPassword + // + this.txtPassword.Location = new System.Drawing.Point(79, 99); + this.txtPassword.Name = "txtPassword"; + this.txtPassword.PasswordChar = '*'; + this.txtPassword.Size = new System.Drawing.Size(240, 22); + this.txtPassword.TabIndex = 11; + this.txtPassword.Text = "admin"; + // + // label5 + // + this.label5.AutoSize = true; + this.label5.Location = new System.Drawing.Point(15, 102); + this.label5.Name = "label5"; + this.label5.Size = new System.Drawing.Size(58, 14); + this.label5.TabIndex = 10; + this.label5.Text = "Password"; + // + // txtUser + // + this.txtUser.Location = new System.Drawing.Point(79, 71); + this.txtUser.Name = "txtUser"; + this.txtUser.Size = new System.Drawing.Size(240, 22); + this.txtUser.TabIndex = 9; + this.txtUser.Text = "admin"; + // + // label4 + // + this.label4.AutoSize = true; + this.label4.Location = new System.Drawing.Point(12, 74); + this.label4.Name = "label4"; + this.label4.Size = new System.Drawing.Size(61, 14); + this.label4.TabIndex = 8; + this.label4.Text = "Username"; + // + // txtPort + // + this.txtPort.Location = new System.Drawing.Point(79, 43); + this.txtPort.Name = "txtPort"; + this.txtPort.Size = new System.Drawing.Size(240, 22); + this.txtPort.TabIndex = 7; + this.txtPort.Text = "90"; + // + // label3 + // + this.label3.AutoSize = true; + this.label3.Location = new System.Drawing.Point(38, 46); + this.label3.Name = "label3"; + this.label3.Size = new System.Drawing.Size(30, 14); + this.label3.TabIndex = 6; + this.label3.Text = "Port"; + // + // txtServer + // + this.txtServer.Location = new System.Drawing.Point(79, 15); + this.txtServer.Name = "txtServer"; + this.txtServer.Size = new System.Drawing.Size(240, 22); + this.txtServer.TabIndex = 5; + this.txtServer.Text = "127.0.0.1"; + // + // label2 + // + this.label2.AutoSize = true; + this.label2.Location = new System.Drawing.Point(31, 18); + this.label2.Name = "label2"; + this.label2.Size = new System.Drawing.Size(42, 14); + this.label2.TabIndex = 4; + this.label2.Text = "Server"; + // + // button1 + // + this.button1.DialogResult = System.Windows.Forms.DialogResult.OK; + this.button1.Location = new System.Drawing.Point(7, 254); + this.button1.Name = "button1"; + this.button1.Size = new System.Drawing.Size(117, 34); + this.button1.TabIndex = 2; + this.button1.Text = "Start"; + this.button1.UseVisualStyleBackColor = true; + this.button1.Click += new System.EventHandler(this.button1_Click); + // + // button2 + // + this.button2.DialogResult = System.Windows.Forms.DialogResult.Cancel; + this.button2.Location = new System.Drawing.Point(217, 254); + this.button2.Name = "button2"; + this.button2.Size = new System.Drawing.Size(117, 34); + this.button2.TabIndex = 3; + this.button2.Text = "Cancel"; + this.button2.UseVisualStyleBackColor = true; + this.button2.Click += new System.EventHandler(this.button2_Click); + // + // groupBox3 + // + this.groupBox3.Controls.Add(this.radioButton2); + this.groupBox3.Controls.Add(this.radioButton1); + this.groupBox3.Dock = System.Windows.Forms.DockStyle.Top; + this.groupBox3.Location = new System.Drawing.Point(0, 0); + this.groupBox3.Name = "groupBox3"; + this.groupBox3.Size = new System.Drawing.Size(341, 45); + this.groupBox3.TabIndex = 4; + this.groupBox3.TabStop = false; + // + // radioButton2 + // + this.radioButton2.AutoSize = true; + this.radioButton2.Dock = System.Windows.Forms.DockStyle.Left; + this.radioButton2.Location = new System.Drawing.Point(121, 18); + this.radioButton2.Name = "radioButton2"; + this.radioButton2.Size = new System.Drawing.Size(94, 24); + this.radioButton2.TabIndex = 2; + this.radioButton2.Text = "Server Mode"; + this.radioButton2.UseVisualStyleBackColor = true; + // + // radioButton1 + // + this.radioButton1.AutoSize = true; + this.radioButton1.Checked = true; + this.radioButton1.Dock = System.Windows.Forms.DockStyle.Left; + this.radioButton1.Location = new System.Drawing.Point(3, 18); + this.radioButton1.Name = "radioButton1"; + this.radioButton1.Size = new System.Drawing.Size(118, 24); + this.radioButton1.TabIndex = 1; + this.radioButton1.TabStop = true; + this.radioButton1.Text = "Embedded Mode"; + this.radioButton1.UseVisualStyleBackColor = true; + this.radioButton1.CheckedChanged += new System.EventHandler(this.radioButton1_CheckedChanged); + // + // Form2 + // + this.AcceptButton = this.button1; + this.AutoScaleDimensions = new System.Drawing.SizeF(7F, 14F); + this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; + this.CancelButton = this.button2; + this.ClientSize = new System.Drawing.Size(341, 293); + this.Controls.Add(this.button2); + this.Controls.Add(this.button1); + this.Controls.Add(this.groupBox2); + this.Controls.Add(this.groupBox1); + this.Controls.Add(this.groupBox3); + this.Font = new System.Drawing.Font("Tahoma", 9F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(178))); + this.Name = "Form2"; + this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen; + this.Text = "Startup"; + this.groupBox1.ResumeLayout(false); + this.groupBox1.PerformLayout(); + this.groupBox2.ResumeLayout(false); + this.groupBox2.PerformLayout(); + this.groupBox3.ResumeLayout(false); + this.groupBox3.PerformLayout(); + this.ResumeLayout(false); + + } + + #endregion + + private System.Windows.Forms.GroupBox groupBox1; + private System.Windows.Forms.Button button3; + private System.Windows.Forms.TextBox txtFolder; + private System.Windows.Forms.GroupBox groupBox2; + private System.Windows.Forms.Button button1; + private System.Windows.Forms.Button button2; + private System.Windows.Forms.Label label1; + private System.Windows.Forms.TextBox txtPassword; + private System.Windows.Forms.Label label5; + private System.Windows.Forms.TextBox txtUser; + private System.Windows.Forms.Label label4; + private System.Windows.Forms.TextBox txtPort; + private System.Windows.Forms.Label label3; + private System.Windows.Forms.TextBox txtServer; + private System.Windows.Forms.Label label2; + private System.Windows.Forms.GroupBox groupBox3; + private System.Windows.Forms.RadioButton radioButton2; + private System.Windows.Forms.RadioButton radioButton1; + } } \ No newline at end of file diff --git a/datagridbinding/frmStartup.cs b/datagridbinding/frmStartup.cs index 8107843..e402b9c 100644 --- a/datagridbinding/frmStartup.cs +++ b/datagridbinding/frmStartup.cs @@ -1,58 +1,58 @@ -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Drawing; -using System.Linq; -using System.Text; -using System.Windows.Forms; -using RaptorDB.Common; -using SampleViews; - -namespace datagridbinding -{ - public partial class frmStartup : Form - { - public frmStartup() - { - InitializeComponent(); - } - - public IRaptorDB _rap; - - private void button2_Click(object sender, EventArgs e) - { - this.Close(); - Application.Exit(); - } - - private void button1_Click(object sender, EventArgs e) - { - if (radioButton1.Checked) - { - var p = RaptorDB.RaptorDB.Open(txtFolder.Text); - p.RegisterView(new SalesInvoiceView()); - p.RegisterView(new SalesItemRowsView()); - p.RegisterView(new newview()); - _rap = p; - } - else - { - _rap = new RaptorDB.RaptorDBClient(txtServer.Text, int.Parse(txtPort.Text), txtUser.Text, txtPassword.Text); - } - } - - private void radioButton1_CheckedChanged(object sender, EventArgs e) - { - if (radioButton1.Checked) - { - groupBox1.Visible = true; - groupBox2.Visible = false; - } - else - { - groupBox1.Visible = false; - groupBox2.Visible = true; - } - } - } -} +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Drawing; +using System.Linq; +using System.Text; +using System.Windows.Forms; +using RaptorDB.Common; +using SampleViews; + +namespace datagridbinding +{ + public partial class frmStartup : Form + { + public frmStartup() + { + InitializeComponent(); + } + + public IRaptorDB _rap; + + private void button2_Click(object sender, EventArgs e) + { + this.Close(); + Application.Exit(); + } + + private void button1_Click(object sender, EventArgs e) + { + if (radioButton1.Checked) + { + var p = RaptorDB.RaptorDB.Open(txtFolder.Text); + p.RegisterView(new SalesInvoiceView()); + p.RegisterView(new SalesItemRowsView()); + p.RegisterView(new newview()); + _rap = p; + } + else + { + _rap = new RaptorDB.RaptorDBClient(txtServer.Text, int.Parse(txtPort.Text), txtUser.Text, txtPassword.Text); + } + } + + private void radioButton1_CheckedChanged(object sender, EventArgs e) + { + if (radioButton1.Checked) + { + groupBox1.Visible = true; + groupBox2.Visible = false; + } + else + { + groupBox1.Visible = false; + groupBox2.Visible = true; + } + } + } +} diff --git a/datagridbinding/frmStartup.resx b/datagridbinding/frmStartup.resx index 1af7de1..29dcb1b 100644 --- a/datagridbinding/frmStartup.resx +++ b/datagridbinding/frmStartup.resx @@ -1,120 +1,120 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - text/microsoft-resx - - - 2.0 - - - System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 - - - System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + text/microsoft-resx + + + 2.0 + + + System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + \ No newline at end of file diff --git a/history.txt b/history.txt index 230916a..33df2e8 100644 --- a/history.txt +++ b/history.txt @@ -1,337 +1,345 @@ -v3.2.12 -------- -- code cleanup -- bug fix full text searching with + - prefixes -- upgrade to fastJSON v2.1.13 -- upgrade to fastBinaryJSON v1.4.10 - -v3.2.11 -------- -- bug fix sorting cache - -v3.2.10 -------- -- renamed Form1 to frmMain -- added sortable fulltext indexes -- fixed path names for linux systems -- changed default save and free memory timers to 30 min instead of 60 sec -- optimized query sorting with internal cache ~100x faster - -v3.2.9 ------- -- upgrade to fastJSON v2.1.11 -- upgrade to fastBinaryJSON v1.4.8 -- added support for vb.net string linq queries -- added vb test project - -v3.2.8 ------- -- bug fix duplicates showing in queries related to the deleted bitmap index - -v3.2.7 ------- -- bug fix wait on view rebuild while Shutdown() was being cut off in 2 secs mid process (ProcessExit) - -v3.2.6 ------- -- optimizations done by Stainslav Lukeš -- upgrade to fastJSON v2.1.10 -- upgrade to fastBinaryJSON v1.4.7 -- bug fix bitmap indexes -- bug fix file name conflicts with deleted bitmap indexes -- added version checking of views and RaptorDB engine with auto rebuild for engine upgrades -- changed deleted bitmap indexes to .deleted extension -- changed version number files to text mode with .version extension - -v3.2.5 ------- -- new optimized storage for string key MGIndex file -- added Global.EnableOptimizedStringIndex flag to control the new index usage - -v3.2.0 ------- -- you can compress the documents in the storage file with Global.CompressDocumentOverKiloBytes configuration -- Upgrade to fastJSON v2.1.9 -- added integrity check for views with auto rebuild if not shutdown cleanly -- bug fix disable timers before Shutdown() -- added high frequency update key/value storage file - - -v3.1.6 ------- -- document storage files can now be split with Global.SplitStorageFilesMegaBytes configuration -- refactoring StorageFile.cs -- Upgrade to fastJSON v2.1.8 -- Upgrade to fastBinaryJSON v1.4.6 -- bug fix .config files were not saved correctly - -v3.1.5 ------- -- added View.NoIndexingColumns definition to override indexing of selected columns -- Upgrade to fastJSON v2.1.7 -- Upgrade to fastBinaryJSON v1.4.5 -- added DocumentCount() to get how many items in the storage file -- Shutdown() now waits for View rebuilds to finish -- more intellisense help - - -v3.1.4 ------- -- added StringIndexLength attribute for view schema to control string index size for the index file -- added ViewDelete() to delete directly from views -- added ViewInsert() to insert directly into views -- added Faker.dll (http://faker.codeplex.com) to generate nicer data -- FreeMemory() will save indexes to disk also -- moved server mode files to output\server so you don't get conflicts loading views.dll -- page list is also saved to disk on SaveIndex() -- bug fix view schema when not inheriting from RDBSchema -- replaced T with more meaningful TRowSchema in code intellisense - -v3.1.3 ------- -- added FetchHistoryInfo() and FetchBytesHistoryInfo() with date change information -- added api.NextRowNumber() -- moved all config files to the data folder which you should have write access to (thanks to Detlef Kroll) -- bug fixed delete before insert with no rows - -v3.1.2 ------- -- Upgrade to fastJSON v2.1.1 -- Upgrade to fastBinaryJSON v1.4.1 -- bug fixes in WAH and Query2 from Richard Hauer -- changed all singleton implementations -- bug fix indexing String.Empty -- *breaking change* removed FireOnType from view definitions -- Views can now correctly work with subclass of the T defined (i.e. SpecialInvoice : Invoice) -- bug fix index bitmap.Not(size) - -v3.1.1 ------- -- added signed assemblies the assembly version will stay at 3.0.0.0 and the file version will increment -- added nuget build - -v3.1.0 ------- -- added sort for queries -- removed extra query overloads in favour of the new model - -v3.0.6 ------- -- Result.TotalCount reflects the original row count and differs from Result.Count when paging -- internal changed FireOnType to handle Type instead of strings -- Query() can now handle empty filter strings correctly -- Upgrade to fastJSON v2.0.24 -- Upgrade to fastBinaryJSON v1.3.11 - -v3.0.5 ------- -- bug fix saving page list to disk for counts > 50 million items - -v3.0.1 ------- -- upgrade to fastJSON v2.0.22 -- upgrade to fastBinaryJSON v1.3.10 -- detect process exit and shutdown cleanly so you can omit the explicit Shutdown() -- bug fix WAH bitarray - -v3.0.0 ------- -- index files are opened in share mode for online copy -- add cron daemon (thanks to Kevin Coylar) -- backups are now on a cron schedule -- restructured storage file for future proofing and replication support -- storage files now store meta data about objects stored -- * storage files are not backward compatible * -- dirty index pages are sorted on save for read performance -- restore is now resumable after a shutdown -- you can disable the primary view to be defined on save with Global.RequirePrimaryView (K/V mode) -- view rebuilds are now done in the background (non-blocking on restart) -- you can define views in c# script format (*.view) to be compiled at runtime in 'datafolder\views' -- row schema defined in script views will be transferred to the client if they don't exist -- fastJSON now serializes static properties -- upgrade to fastJSON v2.0.18 -- upgrade to fastBinaryJSON v1.3.8 -- added HQ-Branch replication feature -- automatic generate config files if they don't exist with a '-' prefix -- 'output' in the root of the solution folder is the new build destination of projects for easy access - -v2.0.6.1 --------- -- bug fix WAHBitArray - -v2.0.6 ------- -- bug fix WAHBitArray -- upgrade to fastJSON v2.0.15 -- bug fix hoot fulltext index on last word -- save deleted items bitmap on save timer - -v2.0.5 ------- -- added FreeMemory to classes -- memory limiting and free memory timer added -- views background save indexes to disk on timer -- fixed RaptorDBServer.csproj to AnyCPU build - -v2.0.0 ------- -- added more method documentations -- * breaking change in doc storage file from hashed guid to guid keys * -- added FetchHistory() and FetchVersion() for docs and files to get revisions -- upgrade to fastJSON v2.0.14 -- upgrade to fastBinaryJSON v1.3.7 -- full text indexing and search for the entire original document -- bug fix linq query with boolean parameter - -v1.9.2 ------- -- SafeDictionary.Add() will update if item exists -- BitmapIndex using new lock mechanism -- CaseInsensitive attribute -- bug fix lowercase hoot indexing -- case insensitive string indexing and searching -- nocase samples -- fixed handling != (not equal) in linq query - -v1.9.1 ------- -- bug fix edge case WAHBitarray -- sync code with changes in hOOt -- bug fix missing server mode SaveBytes() -- bug fix server side queries in server mode -- bug fix embedded guid in query : v => v.docid == new Guid("...") - -v1.9.0 ------- -- speed increase writing bitmap indexes to disk -- bug fix hoot search with wildcards -- bug fix datetime indexing with UTC time (all times are localtime) -- upgrade to fastJSON v2.0.9 -- upgrade to fastBinaryJSON v1.3.5 -- changed CodeDOM to Reflection.Emit for MonoDroid compatibility -- more optimized bitmap storage format (save offsets if smaller than WAH) -- fixed path seperator character for monodroid and windows compatibility changed to Path.DirectorySeparatorChar -- new generic Query interface with typed results (thanks to seer_tenedos2) -- changed to Result -- WAH bitcount speed increase -- bitmap index uses buffered stream for speed -- added between query (work in progress) -- bug fix storage file and deleted items -- new query model for mapper api interface -- you can now define your own schema for rows with caveats -- bug fix NOT on bitmap indexes to resize to the total row count first -- when defining your own schema you can define the fulltext columns in the view without attributes - -v1.8.3 ------- -- upgrade to fastJSON v2.0.6 -- upgrade to fastBinaryJSON v1.3.4 -- bug fix linq2string with date,guid parameters -- added double,float types to the indexer valid data types -- added a lock to the IndexFile for concurrency issues (thanks to Antonello Oliveri) -- fixed lock on _que in the logger for concurrency (thanks to Antonello Oliveri) -- fixed the reflection binding to the insert method (thanks to Antonello Oliveri) -- added Count() on views -- added support for paging of results -- the mapper can now see changes it has made in it's own thread in transaction mode while quering - -v1.8.2 ------- -- bug fix linq binding -> c.val == obj.property (Thanks to Joe Dluzen) -- added lock to the bitmap index for concurrency -- optimized $types output in JSON and BJSON -- bug fix null check for SafeSortedList.Remove -- bug fix server mode data transfer - -v1.8.1 ------- -- speed increase WAH bitmap Set() code -- bug fix concurrent save bitmap index to disk -- upgrade to fastBinaryJSON v1.3 -- upgrade to fastJSON v2.0.1 - -v1.8 ----- -- upgrade to fastBinaryJSON v1.2 with struct support -- upgrade to fastJSON v1.9.9 with struct support -- bug fix hoot index loadwords when file size is zero -- bug fix linq binding ServerSide -> c.val == stringvariable -- bug fix linq binding -> c.val == stringvariable -- bug fix reflection code in serializers - -v1.7 ----- -- server side aggregate queries -- fixed the build script for views to copy the dll to the extensions folder -- server side queries can have filters -- login form default buttons fix - -v1.6 ----- -- query lambda caching -- transaction support -- bug in datetime serialization - -v1.5 ----- -- compressing network traffic over Param.CompressDataOver limit with MiniLZO -- added Delete(docid) and DeleteBytes(fileid) -- added ability to query Guid and DateTime in string form -- bug fix reading boolean indexes -- rebuild view and background indexer handles deleted docs -- added authentication via users.config file in server mode -- Backup & Restore data -- AddUser() method for user -- handle isDeleted when restoring data and rebuilding View -- Auto backup in server mode @ 00:00 time - -v1.4 ----- -- break up the source into projects -- created client, server dlls -- upgrade to fastBinaryJSON v1.1 -- changed to SafeSortedList for thread safe indexes -- add auto installer RaptorDBServer service -- performance optimized tcp network layer -- added dual mode usage to the windows application (embedded, server) -- code cleanup -- added IRaptorDB interface to allow you to switch between embedded and client seamlessly -- load views from the Extensions folder in server mode - -v1.3 ----- -- Results.Rows are now row schema objects and bindable (even when fields) -- View.Schema must now derive from RaptorDB.RDBSchema -- removed columns from Result (not needed anymore) -- RegisterView throws exceptions instead of returning a Result -- added a rudimentary query viewer project -- null values are ignored when indexing -- bool index filename will end in ".idx" -- sample apps will create data files in the main soultion folder for easy sharing -- you can now do aggregate queries on the results on the client side -- added api.EmitObject for easier mapping (less code to write) -- upgrade to fastJSON v1.9.8 -- bug fix datetime in fastBinaryJSON -- added ConsistentView - -v1.2 ----- -- View versioning and rebuild -- code cleanup -- removed indent logic from fastJSON -- added schema of the query to the Result - -v1.1 ----- -- fulltext indexing via attribute -- string query parser -- fix shutdown flusing indexes to disk -- rudimentary console application -- lowercase viewnames for string queries -- fulltext search defaults to AND if + - characters not present in query -- Query now works when suppling the view type -- save pauses indexer for better insert performance ~30% faster - -v1.0 ----- +v3.2.13 +------- +- code refactoring +- bug fix full text index search with leading not "-oak hill" +- fix time output in logs +- upgrade to fastJSON v2.1.14 +- upgrade to fastBinaryJSON v1.4.11 + +v3.2.12 +------- +- code cleanup +- bug fix full text searching with + - prefixes +- upgrade to fastJSON v2.1.13 +- upgrade to fastBinaryJSON v1.4.10 + +v3.2.11 +------- +- bug fix sorting cache + +v3.2.10 +------- +- renamed Form1 to frmMain +- added sortable fulltext indexes +- fixed path names for linux systems +- changed default save and free memory timers to 30 min instead of 60 sec +- optimized query sorting with internal cache ~100x faster + +v3.2.9 +------ +- upgrade to fastJSON v2.1.11 +- upgrade to fastBinaryJSON v1.4.8 +- added support for vb.net string linq queries +- added vb test project + +v3.2.8 +------ +- bug fix duplicates showing in queries related to the deleted bitmap index + +v3.2.7 +------ +- bug fix wait on view rebuild while Shutdown() was being cut off in 2 secs mid process (ProcessExit) + +v3.2.6 +------ +- optimizations done by Stainslav Lukeš +- upgrade to fastJSON v2.1.10 +- upgrade to fastBinaryJSON v1.4.7 +- bug fix bitmap indexes +- bug fix file name conflicts with deleted bitmap indexes +- added version checking of views and RaptorDB engine with auto rebuild for engine upgrades +- changed deleted bitmap indexes to .deleted extension +- changed version number files to text mode with .version extension + +v3.2.5 +------ +- new optimized storage for string key MGIndex file +- added Global.EnableOptimizedStringIndex flag to control the new index usage + +v3.2.0 +------ +- you can compress the documents in the storage file with Global.CompressDocumentOverKiloBytes configuration +- Upgrade to fastJSON v2.1.9 +- added integrity check for views with auto rebuild if not shutdown cleanly +- bug fix disable timers before Shutdown() +- added high frequency update key/value storage file + + +v3.1.6 +------ +- document storage files can now be split with Global.SplitStorageFilesMegaBytes configuration +- refactoring StorageFile.cs +- Upgrade to fastJSON v2.1.8 +- Upgrade to fastBinaryJSON v1.4.6 +- bug fix .config files were not saved correctly + +v3.1.5 +------ +- added View.NoIndexingColumns definition to override indexing of selected columns +- Upgrade to fastJSON v2.1.7 +- Upgrade to fastBinaryJSON v1.4.5 +- added DocumentCount() to get how many items in the storage file +- Shutdown() now waits for View rebuilds to finish +- more intellisense help + + +v3.1.4 +------ +- added StringIndexLength attribute for view schema to control string index size for the index file +- added ViewDelete() to delete directly from views +- added ViewInsert() to insert directly into views +- added Faker.dll (http://faker.codeplex.com) to generate nicer data +- FreeMemory() will save indexes to disk also +- moved server mode files to output\server so you don't get conflicts loading views.dll +- page list is also saved to disk on SaveIndex() +- bug fix view schema when not inheriting from RDBSchema +- replaced T with more meaningful TRowSchema in code intellisense + +v3.1.3 +------ +- added FetchHistoryInfo() and FetchBytesHistoryInfo() with date change information +- added api.NextRowNumber() +- moved all config files to the data folder which you should have write access to (thanks to Detlef Kroll) +- bug fixed delete before insert with no rows + +v3.1.2 +------ +- Upgrade to fastJSON v2.1.1 +- Upgrade to fastBinaryJSON v1.4.1 +- bug fixes in WAH and Query2 from Richard Hauer +- changed all singleton implementations +- bug fix indexing String.Empty +- *breaking change* removed FireOnType from view definitions +- Views can now correctly work with subclass of the T defined (i.e. SpecialInvoice : Invoice) +- bug fix index bitmap.Not(size) + +v3.1.1 +------ +- added signed assemblies the assembly version will stay at 3.0.0.0 and the file version will increment +- added nuget build + +v3.1.0 +------ +- added sort for queries +- removed extra query overloads in favour of the new model + +v3.0.6 +------ +- Result.TotalCount reflects the original row count and differs from Result.Count when paging +- internal changed FireOnType to handle Type instead of strings +- Query() can now handle empty filter strings correctly +- Upgrade to fastJSON v2.0.24 +- Upgrade to fastBinaryJSON v1.3.11 + +v3.0.5 +------ +- bug fix saving page list to disk for counts > 50 million items + +v3.0.1 +------ +- upgrade to fastJSON v2.0.22 +- upgrade to fastBinaryJSON v1.3.10 +- detect process exit and shutdown cleanly so you can omit the explicit Shutdown() +- bug fix WAH bitarray + +v3.0.0 +------ +- index files are opened in share mode for online copy +- add cron daemon (thanks to Kevin Coylar) +- backups are now on a cron schedule +- restructured storage file for future proofing and replication support +- storage files now store meta data about objects stored +- * storage files are not backward compatible * +- dirty index pages are sorted on save for read performance +- restore is now resumable after a shutdown +- you can disable the primary view to be defined on save with Global.RequirePrimaryView (K/V mode) +- view rebuilds are now done in the background (non-blocking on restart) +- you can define views in c# script format (*.view) to be compiled at runtime in 'datafolder\views' +- row schema defined in script views will be transferred to the client if they don't exist +- fastJSON now serializes static properties +- upgrade to fastJSON v2.0.18 +- upgrade to fastBinaryJSON v1.3.8 +- added HQ-Branch replication feature +- automatic generate config files if they don't exist with a '-' prefix +- 'output' in the root of the solution folder is the new build destination of projects for easy access + +v2.0.6.1 +-------- +- bug fix WAHBitArray + +v2.0.6 +------ +- bug fix WAHBitArray +- upgrade to fastJSON v2.0.15 +- bug fix hoot fulltext index on last word +- save deleted items bitmap on save timer + +v2.0.5 +------ +- added FreeMemory to classes +- memory limiting and free memory timer added +- views background save indexes to disk on timer +- fixed RaptorDBServer.csproj to AnyCPU build + +v2.0.0 +------ +- added more method documentations +- * breaking change in doc storage file from hashed guid to guid keys * +- added FetchHistory() and FetchVersion() for docs and files to get revisions +- upgrade to fastJSON v2.0.14 +- upgrade to fastBinaryJSON v1.3.7 +- full text indexing and search for the entire original document +- bug fix linq query with boolean parameter + +v1.9.2 +------ +- SafeDictionary.Add() will update if item exists +- BitmapIndex using new lock mechanism +- CaseInsensitive attribute +- bug fix lowercase hoot indexing +- case insensitive string indexing and searching +- nocase samples +- fixed handling != (not equal) in linq query + +v1.9.1 +------ +- bug fix edge case WAHBitarray +- sync code with changes in hOOt +- bug fix missing server mode SaveBytes() +- bug fix server side queries in server mode +- bug fix embedded guid in query : v => v.docid == new Guid("...") + +v1.9.0 +------ +- speed increase writing bitmap indexes to disk +- bug fix hoot search with wildcards +- bug fix datetime indexing with UTC time (all times are localtime) +- upgrade to fastJSON v2.0.9 +- upgrade to fastBinaryJSON v1.3.5 +- changed CodeDOM to Reflection.Emit for MonoDroid compatibility +- more optimized bitmap storage format (save offsets if smaller than WAH) +- fixed path seperator character for monodroid and windows compatibility changed to Path.DirectorySeparatorChar +- new generic Query interface with typed results (thanks to seer_tenedos2) +- changed to Result +- WAH bitcount speed increase +- bitmap index uses buffered stream for speed +- added between query (work in progress) +- bug fix storage file and deleted items +- new query model for mapper api interface +- you can now define your own schema for rows with caveats +- bug fix NOT on bitmap indexes to resize to the total row count first +- when defining your own schema you can define the fulltext columns in the view without attributes + +v1.8.3 +------ +- upgrade to fastJSON v2.0.6 +- upgrade to fastBinaryJSON v1.3.4 +- bug fix linq2string with date,guid parameters +- added double,float types to the indexer valid data types +- added a lock to the IndexFile for concurrency issues (thanks to Antonello Oliveri) +- fixed lock on _que in the logger for concurrency (thanks to Antonello Oliveri) +- fixed the reflection binding to the insert method (thanks to Antonello Oliveri) +- added Count() on views +- added support for paging of results +- the mapper can now see changes it has made in it's own thread in transaction mode while quering + +v1.8.2 +------ +- bug fix linq binding -> c.val == obj.property (Thanks to Joe Dluzen) +- added lock to the bitmap index for concurrency +- optimized $types output in JSON and BJSON +- bug fix null check for SafeSortedList.Remove +- bug fix server mode data transfer + +v1.8.1 +------ +- speed increase WAH bitmap Set() code +- bug fix concurrent save bitmap index to disk +- upgrade to fastBinaryJSON v1.3 +- upgrade to fastJSON v2.0.1 + +v1.8 +---- +- upgrade to fastBinaryJSON v1.2 with struct support +- upgrade to fastJSON v1.9.9 with struct support +- bug fix hoot index loadwords when file size is zero +- bug fix linq binding ServerSide -> c.val == stringvariable +- bug fix linq binding -> c.val == stringvariable +- bug fix reflection code in serializers + +v1.7 +---- +- server side aggregate queries +- fixed the build script for views to copy the dll to the extensions folder +- server side queries can have filters +- login form default buttons fix + +v1.6 +---- +- query lambda caching +- transaction support +- bug in datetime serialization + +v1.5 +---- +- compressing network traffic over Param.CompressDataOver limit with MiniLZO +- added Delete(docid) and DeleteBytes(fileid) +- added ability to query Guid and DateTime in string form +- bug fix reading boolean indexes +- rebuild view and background indexer handles deleted docs +- added authentication via users.config file in server mode +- Backup & Restore data +- AddUser() method for user +- handle isDeleted when restoring data and rebuilding View +- Auto backup in server mode @ 00:00 time + +v1.4 +---- +- break up the source into projects +- created client, server dlls +- upgrade to fastBinaryJSON v1.1 +- changed to SafeSortedList for thread safe indexes +- add auto installer RaptorDBServer service +- performance optimized tcp network layer +- added dual mode usage to the windows application (embedded, server) +- code cleanup +- added IRaptorDB interface to allow you to switch between embedded and client seamlessly +- load views from the Extensions folder in server mode + +v1.3 +---- +- Results.Rows are now row schema objects and bindable (even when fields) +- View.Schema must now derive from RaptorDB.RDBSchema +- removed columns from Result (not needed anymore) +- RegisterView throws exceptions instead of returning a Result +- added a rudimentary query viewer project +- null values are ignored when indexing +- bool index filename will end in ".idx" +- sample apps will create data files in the main soultion folder for easy sharing +- you can now do aggregate queries on the results on the client side +- added api.EmitObject for easier mapping (less code to write) +- upgrade to fastJSON v1.9.8 +- bug fix datetime in fastBinaryJSON +- added ConsistentView + +v1.2 +---- +- View versioning and rebuild +- code cleanup +- removed indent logic from fastJSON +- added schema of the query to the Result + +v1.1 +---- +- fulltext indexing via attribute +- string query parser +- fix shutdown flusing indexes to disk +- rudimentary console application +- lowercase viewnames for string queries +- fulltext search defaults to AND if + - characters not present in query +- Query now works when suppling the view type +- save pauses indexer for better insert performance ~30% faster + +v1.0 +---- - initial release \ No newline at end of file diff --git a/playground/App.config b/playground/App.config new file mode 100644 index 0000000..877624e --- /dev/null +++ b/playground/App.config @@ -0,0 +1,6 @@ + + + + + + diff --git a/playground/ProfilingSessions/Session20150614_154524.sdps b/playground/ProfilingSessions/Session20150614_154524.sdps new file mode 100644 index 0000000..66c1ef0 Binary files /dev/null and b/playground/ProfilingSessions/Session20150614_154524.sdps differ diff --git a/playground/Program.cs b/playground/Program.cs new file mode 100644 index 0000000..4614f00 --- /dev/null +++ b/playground/Program.cs @@ -0,0 +1,494 @@ +using RaptorDB.Common; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using Faker; +using System.Diagnostics; +using System.Threading; +using System.Collections.Concurrent; +using GenericPointerHelpers; +using System.Collections; +using System.Runtime.CompilerServices; + +namespace playground +{ + static class Program + { + static void Main(string[] args) + { + new RRRandom().HackToFaker(); + + GphTest(); + + //Console.WriteLine(GenericPointerHelper.SuperCoder_Decode("\f")); + //while(true) + //{ + // var line = Console.ReadLine(); + // while(line.EndsWith("\\")) + // { + // line = line.Remove(line.Length - 1) + Console.ReadLine(); + // } + // string str; + // if (line.StartsWith(".")) + // { + // str = line.Remove(0, 1); + // } + // else + // { + // var bytes = StringToByteArray(line); + // str = Encoding.Unicode.GetString(bytes); + // } + // Console.WriteLine("\"{0}\"", str); + // Console.WriteLine("\"{0}\"", GenericPointerHelper.SuperCoder_Decode(str)); + //} + + //TestGrowingArray(); + //TestGrowingArray(); + + //TestMemcp(16); + //TestMultiHashtable(); + //TestMultiHashtable(); + //TestMultiHashtable(); + //TestMultiHashtable(); + //TestMultiHashtable(); + //TestMultiHashtable(); + //Console.ReadLine(); + //return; + + Console.WriteLine("opening db"); + var rap = OpenDB(); + if (rap.Count("ModelItem") == 0) Insert(rap, 30000); + // UpdateHF(rap, 3); + + QueryTest(rap); + // rap.Shutdown(); + } + + public unsafe static void GphTest() + { + var array = new byte[] { 1, 1, 0, 1, 0x12 }; + fixed (byte* ptr = array) + { + var i = 654; + var ptr1 = &i; + var ptr2 = GenericPointerHelper.AddrOf(ref i); + Console.WriteLine(); + + + //var read = GenericPointerHelper.Read(ptr); + //var sw = Stopwatch.StartNew(); + //int result; + //for (int i = 0; i < 10000000; i++) + //{ + // result = GenericPointerHelper.Read(ptr); + //} + //Console.WriteLine(sw.Elapsed); + //sw.Restart(); + //for (int i = 0; i < 10000000; i++) + //{ + // result = GenericPointerHelper.ReadLimited(ptr, 1); + //} + //Console.WriteLine(sw.Elapsed); + //sw.Restart(); + //for (int i = 0; i < 10000000; i++) + //{ + // if (i > 0) + // { + // result = GenericPointerHelper.Read(ptr); + // } + // else result = GenericPointerHelper.ReadLimited(ptr, 4); + //} + //Console.WriteLine(sw.Elapsed); + //sw.Restart(); + //for (int i = 0; i < 10000000; i++) + //{ + // result = BitConverter.ToInt32(array, 0); + //} + //Console.WriteLine(sw.Elapsed); + } + Console.ReadLine(); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static T Test(ref int p) + { + var def = default(T); + return def; + } + + public static byte[] StringToByteArray(string hex) + { + hex = hex.Replace(" ", ""); + return Enumerable.Range(0, hex.Length) + .Where(x => x % 2 == 0) + .Select(x => Convert.ToByte(hex.Substring(x, 2), 16)) + .ToArray(); + } + + static void TestGrowingArray() + { + var sw = Stopwatch.StartNew(); + var list = new List(1024); + for (int i = 0; i < 0x100000; i++) + { + list.Add(i); + } + for (int i = 0; i < list.Count; i++) + { + list[i] *= 2; + } + Console.WriteLine("list: {0}", sw.Elapsed); + sw.Restart(); + var array = new int[0x100000]; + var len = array.Length; + for (int i = 0; i < 0x100000; i++) + { + if (i == len) Array.Resize(ref array, array.Length * 2); + array[i] = i; + } + for (int i = 0; i < array.Length; i++) + { + array[i] *= 2; + } + Console.WriteLine("array.resize: {0}", sw.Elapsed); + sw.Restart(); + var arrList = new List(512); + len = 1024; + var curArr = new int[len]; + int index = 0; + for (int i = 0; i < 0x100000; i++) + { + if (curArr.Length == index) + { + curArr = new int[len]; + arrList.Add(curArr); + index = 0; + } + curArr[index++] = i; + } + for (int i1 = 0; i1 < arrList.Count; i1++) + { + curArr = arrList[i1]; + for (int i = 0; i < curArr.Length; i++) + { + curArr[i] *= 2; + } + } + Console.WriteLine("list: {0}", sw.Elapsed); + } + + static unsafe void TestHashtable() + { + var names = Enumerable.Repeat(0, 100000).Select(i => Guid.NewGuid()).Distinct().ToArray(); + var dictionary = new Dictionary(190000); + var pht = PageHashTableHelper.CreateStructStruct(12503 * 16); + Console.WriteLine("testing"); + var sw = Stopwatch.StartNew(); + foreach (var name in names) + { + dictionary.Add(name, 120); + } + Console.WriteLine("dictionary write: {0}", sw.Elapsed); + sw.Restart(); + foreach (var name in names) + { + var i = dictionary[name]; + } + Console.WriteLine("dictionary read: {0}", sw.Elapsed); + sw.Restart(); + foreach (var name in names) + { + pht.Set(name, 120); + } + Console.WriteLine("PageHashTable write: {0}", sw.Elapsed); + sw.Restart(); + foreach (var name in names) + { + pht.FirstOrDefault(name); + } + Console.WriteLine("PageHashTable read: {0}", sw.Elapsed); + HashtableDiagnostic(pht.GetBlockUsageBitmap()); + pht.Dispose(); + } + + static unsafe void TestMultiHashtable() + { + var random = new Random(); + var keys = Enumerable.Repeat(0, 100000).Select(i => random.Next(0, 20000)).ToArray(); + var dictionary = new Dictionary(131072); + var pht = PageHashTableHelper.CreateStructStructMulti(131072); + Console.WriteLine("testing"); + var sw = Stopwatch.StartNew(); + for (int i = 0; i < keys.Length; i++) + { + dictionary[keys[i]] = i; + } + Console.WriteLine("dictionary write: {0}", sw.Elapsed); + sw.Restart(); + foreach (var name in keys) + { + var i = dictionary[name]; + } + Console.WriteLine("dictionary read: {0}", sw.Elapsed); + sw.Restart(); + for (int i = 0; i < keys.Length; i++) + { + pht.Set(keys[i], i); + } + Console.WriteLine("PageHashTable write: {0}", sw.Elapsed); + sw.Restart(); + foreach (var name in keys) + { + var i = pht.FirstOrDefault(name); + } + Console.WriteLine("PageHashTable read: {0}", sw.Elapsed); + HashtableDiagnostic(pht.GetBlockUsageBitmap()); + pht.Dispose(); + } + + public static void HashtableDiagnostic(BitArray ba) + { + List clusters = new List(); + int count = 0; + bool value = false; + for (int i = 1; i < ba.Count; i++) + { + var cv = ba.Get(i); + if (cv == value) count++; + else + { + clusters.Add(count); + value = cv; + count = 1; + } + } + Console.WriteLine("max: {0}", clusters.Max()); + Console.WriteLine("avg: {0}", clusters.Average()); + File.WriteAllLines("hashtableDiagnostic.txt", clusters.Select(c => c.ToString())); + } + + static unsafe void TestMemcp(uint size) + { + const int iter = 100000; + var from = new byte[size]; + var to = new byte[size]; + fixed (byte* fromPtr = from) + { + fixed (byte* toPtr = to) + { + Console.WriteLine("testing memcpy, {0} bytes", size); + var sw = Stopwatch.StartNew(); + for (int i = 0; i < iter; i++) + { + GenericPointerHelper.CopyBytes(fromPtr, toPtr, size); + } + Console.WriteLine("unaligned: {0}", sw.Elapsed); + sw.Restart(); + for (int i = 0; i < iter; i++) + { + GenericPointerHelper.CopyBytesAlligned(fromPtr, toPtr, size); + } + Console.WriteLine("alligned: {0}", sw.Elapsed); + sw.Restart(); + for (int i = 0; i < iter; i++) + { + for (int j = 0; j < size; j++) + { + *(toPtr + j) = *(fromPtr + j); + } + } + Console.WriteLine("stupidcopy: {0}", sw.Elapsed); + sw.Restart(); + for (int i = 0; i < iter; i++) + { + Buffer.BlockCopy(from, 0, to, 0, (int)size); + } + Console.WriteLine("Buffer.BlockCopy: {0}", sw.Elapsed); + sw.Restart(); + for (int i = 0; i < iter; i++) + { + Array.Copy(from, to, (int)size); + } + Console.WriteLine("Array.Copy: {0}", sw.Elapsed); + } + } + } + + static void QueryTest(IRaptorDB rap) + { + while (true) + { + Console.Write("> "); + var query = Console.ReadLine().Split(':'); + if (query.Length == 0) continue; + var sw = new Stopwatch(); + if (query[0] == "q" || query[0] == "c") + { + string view = "ModelItem"; + string filter = ""; + int skip = 0; + int limit = 10; + if (query.Length > 2) + { + view = query[1]; + filter = query[2]; + } + else if (query.Length == 2) + { + filter = query[1]; + } + if (query.Length >= 5) + { + if (!int.TryParse(query[3], out skip)) + skip = 0; + if (!int.TryParse(query[4], out limit)) + limit = 10; + } + if (query[0] == "q") + { + sw.Start(); + var result = rap.Query(view, filter, skip, limit); + sw.Stop(); + if (result.OK) + { + foreach (var r in result.Rows) + { + Print(r); + } + } + } + else if (query[0] == "c") + { + sw.Start(); + var c = rap.Count(view, filter); + sw.Stop(); + Console.WriteLine(c); + } + } + else if (query[0] == "hf") + { + if (query.Length == 2) + { + sw.Start(); + var doc = rap.GetKVHF().GetObjectHF(query[1]); + sw.Stop(); + Print(doc); + } + else if (query.Length == 1) + { + sw.Start(); + var doc = rap.GetKVHF().GetKeysHF(); + sw.Stop(); + Print(doc); + } + } + else if (query[0] == "f") + { + var id = Guid.Parse(query[1]); + sw.Start(); + var doc = rap.Fetch(id); + sw.Stop(); + Print(doc); + } + else if (query[0] == "exit") return; + Console.WriteLine("{0}", sw.Elapsed.ToString()); + } + } + + static void Print(object o) + { + Console.WriteLine(fastJSON.JSON.ToNiceJSON(o, new fastJSON.JSONParameters + { + UseFastGuid = false, + UsingGlobalTypes = false + })); + } + + static void Insert(IRaptorDB rap, int count) + { + int i = 0; + Console.WriteLine("generating items"); + var items = GenerateItems(count); + var sw = Stopwatch.StartNew(); + Console.WriteLine("inserting items"); + Parallel.ForEach(items, new ParallelOptions() { MaxDegreeOfParallelism = 1 }, item => + //foreach(var item in items) + { + var j = Interlocked.Increment(ref i); + rap.Save(item.Id, item); + if (j % 500 == 0) Console.WriteLine("{0} items inserted in {1:N1}s", j, sw.ElapsedMilliseconds / 1000.0); + }); + } + + static void UpdateHF(IRaptorDB rap, int count) + { + var hf = rap.GetKVHF(); + for (int i = 0; i < count; i++) + { + var name = NameFaker.Name(); + var obj = new OtherItem() + { + Numbers = new bool[3].Select(n => NumberFaker.Number()).ToArray(), + Bytes = new Dictionary() { + { + "a", + fastBinaryJSON.BJSON.ToBJSON("{}") + } + } + }; + hf.SetObjectHF(name, obj); + } + } + + public static RaptorDB.RaptorDB OpenDB() + { + RaptorDB.Global.EarlyPageSplitSize = 50; + if (Directory.Exists("rdb")) Directory.Delete("rdb", recursive: true); + var r = RaptorDB.RaptorDB.Open("rdb"); + r.RegisterView(new DefaultModelItemView()); + r.RegisterView(new FriendsModelItemView()); + return r; + } + + public static IEnumerable OneThreadBuffered(this IEnumerable source, int buffer = 1000) + { + int i = 0; + var arr = new T[buffer]; + foreach (var el in source) + { + arr[i++] = el; + if (i == buffer) + { + foreach (var a in arr) + yield return a; + arr = new T[buffer]; + i = 0; + } + } + + for (int j = 0; j < i; j++) + { + yield return arr[j]; + } + } + + public static IEnumerable GenerateItems(int count) + { + return new bool[count].Select(a => + { + + var i = new ModelItem(); + i.Id = Guid.NewGuid(); + i.Name = NameFaker.Name(); + i.Number = Faker.NumberFaker.Number(1, 500); + i.Friends = new bool[NumberFaker.Number(1, 400)] + .Select(_ => NameFaker.Name()).ToArray(); + i.WebSite = Faker.InternetFaker.Domain(); + return i; + }); + } + } +} diff --git a/playground/Properties/AssemblyInfo.cs b/playground/Properties/AssemblyInfo.cs new file mode 100644 index 0000000..297f6eb --- /dev/null +++ b/playground/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("playground")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("playground")] +[assembly: AssemblyCopyright("Copyright © 2015")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("7b90d541-c37e-44d6-b344-35eca59e9a14")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/playground/RRRandom.cs b/playground/RRRandom.cs new file mode 100644 index 0000000..da7f80c --- /dev/null +++ b/playground/RRRandom.cs @@ -0,0 +1,57 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading; + +namespace playground +{ + + public class RRRandom : Random + { + int a; + + public RRRandom() + { + a = base.Next(); + } + + public override int Next() + { + unchecked + { + var ticks = (int)(DateTime.UtcNow.Ticks * 101701); + var i = Interlocked.Increment(ref a); + return ticks * i; + } + } + + public override int Next(int maxValue) + { + int bits = -1; + var n = maxValue; + while (n > 0) + { + n = n >> 1; + bits++; + } + uint result; + while (true) + { + result = ((uint)this.Next()) >> (32 - bits); + if (result < maxValue) return (int)result; + } + } + + public override int Next(int minValue, int maxValue) + { + return Next(maxValue - minValue) + minValue; + } + + public void HackToFaker() + { + var f = typeof(Faker.NumberFaker).GetField("_random", System.Reflection.BindingFlags.Static | System.Reflection.BindingFlags.NonPublic); + f.SetValue(null, this); + } + } +} diff --git a/playground/playground.csproj b/playground/playground.csproj new file mode 100644 index 0000000..c413acc --- /dev/null +++ b/playground/playground.csproj @@ -0,0 +1,108 @@ + + + + + Debug + AnyCPU + {7B90D541-C37E-44D6-B344-35ECA59E9A14} + Exe + Properties + playground + playground + v4.5 + 512 + + + + x64 + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + true + false + + + AnyCPU + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + true + false + + + true + bin\x64\Debug\ + DEBUG;TRACE + true + full + x64 + prompt + MinimumRecommendedRules.ruleset + true + + + bin\x64\Release\ + TRACE + true + true + pdbonly + x64 + prompt + MinimumRecommendedRules.ruleset + true + + + + ..\Faker.dll + + + False + ..\GenericPointerHelpers\GenericPointerHelpers.dll + + + + + + + + + + + + + + + + + + + + + {32331d51-5be0-41e2-af1a-9b086c5ae809} + RaptorDB.Common + + + {45f6be30-989a-4749-b6a0-69099c8661f4} + RaptorDB + + + + + + + + + \ No newline at end of file diff --git a/playground/views.cs b/playground/views.cs new file mode 100644 index 0000000..efe1ab4 --- /dev/null +++ b/playground/views.cs @@ -0,0 +1,89 @@ +using RaptorDB; +using RaptorDB.Common; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace playground +{ + public class ModelItem + { + public string Name { get; set; } + public int Number { get; set; } + public string WebSite { get; set; } + public Guid Id { get; set; } + public string[] Friends { get; set; } + + public ModelItem() + { + Friends = new string[0]; + } + } + + public class OtherItem + { + public int[] Numbers; + public Dictionary Bytes; + } + + public class FriendsModelItemView : View + { + public class RowSchema : RDBSchema + { + public string Name; + } + public FriendsModelItemView() + { + this.Name = "friends"; + this.Description = "ModelItems friens"; + this.ConsistentSaveToThisView = true; + this.isActive = true; + this.BackgroundIndexing = false; + this.Version = 1; + // // uncomment the following for transaction mode + //this.TransactionMode = true; + + this.SetStringIndex(s => s.Name, length: 255); + //this.SetMMIndex(s => s.Name, keySerializer: new PageHashTableHelper.StringPageSerializer(255)); + + this.Mapper = (api, docid, doc) => + { + foreach (var f in doc.Friends) + { + api.Emit(docid, f); + } + }; + } + + } + + public class DefaultModelItemView : RaptorDB.View + { + public class RowSchema : RDBSchema + { + public string Name; + public int Number; + } + public DefaultModelItemView() + { + this.Name = "ModelItem"; + this.Description = "A primary view for ModelItem"; + this.isPrimaryList = true; + this.isActive = true; + this.BackgroundIndexing = false; + this.Version = 1; + //// uncomment the following for transaction mode + //this.TransactionMode = true; + + this.SetStringIndex(s => s.Name, ignoreCase: true); + this.SetMMIndex(s => s.Number); + + this.Mapper = (api, docid, doc) => + { + api.EmitObject(docid, doc); + }; + } + } +} diff --git a/removed/DataRow.cs b/removed/DataRow.cs index c8541f2..290f339 100644 --- a/removed/DataRow.cs +++ b/removed/DataRow.cs @@ -1,44 +1,44 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; - -namespace RaptorDB.Views -{ - public struct Column - { - public string Name { get; set; } - public Type DataType { get; set; } - } - - public class DataRow - { - public DataRow() - { - Columns = new List(); - RowData = new object[0]; - } - - public object[] RowData { get; set; } - public List Columns { get; set; } - - public object this[int index] - { - get { return RowData[index]; } - set { RowData[index] = value; } - } - - public object this[string name] - { - get { return RowData[columnindex(name)]; } - set { RowData[columnindex(name)] = value; } - } - - private int columnindex(string name) - { - int i = -1; - i = Columns.FindIndex(delegate(Column c) { return c.Name.ToLower() == name.ToLower(); }); - return i; - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace RaptorDB.Views +{ + public struct Column + { + public string Name { get; set; } + public Type DataType { get; set; } + } + + public class DataRow + { + public DataRow() + { + Columns = new List(); + RowData = new object[0]; + } + + public object[] RowData { get; set; } + public List Columns { get; set; } + + public object this[int index] + { + get { return RowData[index]; } + set { RowData[index] = value; } + } + + public object this[string name] + { + get { return RowData[columnindex(name)]; } + set { RowData[columnindex(name)] = value; } + } + + private int columnindex(string name) + { + int i = -1; + i = Columns.FindIndex(delegate(Column c) { return c.Name.ToLower() == name.ToLower(); }); + return i; + } + } +} diff --git a/removed/MapEngine.cs b/removed/MapEngine.cs index 4de8878..39307bd 100644 --- a/removed/MapEngine.cs +++ b/removed/MapEngine.cs @@ -1,184 +1,184 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.CodeDom.Compiler; -using System.IO; -using System.Reflection; -using RaptorDB.Views; - -namespace RaptorDB.Mapping -{ - public class MapEngine - { - public MapEngine(IMapAPI vm) - { - _api = vm; - } - - private IMapAPI _api; - SafeDictionary _mapcache = new SafeDictionary(); - public bool DebugMode = false; - private ILog _log = LogManager.GetLogger(typeof(MapEngine)); - - -// public void test() -// { -// Views.View v = new Views.View(); -// v.SchemaColumns = new ViewRowDefinition(); -// v.Name = "test"; -// v.FireOnTypes = new List(); -// v.FireOnTypes.Add("BizFX.Entity.Return, BizFX.Entity, Version=2.0.0.0, Culture=neutral, PublicKeyToken=e5d192f5e46064af"); -// v.SchemaColumns.Columns.Add("Name", DataTypes.String); -// v.SchemaColumns.Columns.Add("BirthDay", DataTypes.DateTime); -// v.SchemaColumns.Columns.Add("Address", DataTypes.String); -// v.SchemaColumns.Columns.Add("Code", DataTypes.Int); -// v.MapFunctionCode = @" -//List q = api.Query(""someview"",""a=1"",0, -1); -//foreach(object[] rr in q) -//{ -// testRow r = new testRow(rr); -// emit(data.GUID, r.Name, r.BirthDay, r.Address, r.Code*2); -//} -//if(data.IsOK) -// emit(data.GUID, ""ppp"",DateTime.Now,""hello"",123); -//else -// api.Log(""error""); -//"; -// //v.SchemaColumns = v.Name; -// //v.ViewsUsed = new List(); -// //v.ViewsUsed.Add(v.SchemaColumns); -// Compile(v,"RaptorDB\\Views\\"); -// } - - #region [ M A P C O M P I L E R ] - public void Compile(Views.ViewBase view, string mapfolder) - { - Directory.CreateDirectory(mapfolder); - CodeDomProvider cs = CodeDomProvider.CreateProvider("CSharp"); - var _Parameters = new CompilerParameters(); - - // create source file - string code = CreateSourceFile(view, _Parameters); - - // Compile code - _Parameters.IncludeDebugInformation = false; - _Parameters.GenerateExecutable = false; - _Parameters.CompilerOptions = "/optimize"; - _Parameters.OutputAssembly = mapfolder + - view.Name + - //DocID.ToString().Replace("-", "") + - ".dll"; - var compilerresult = cs.CompileAssemblyFromSource(_Parameters, code); - - if (compilerresult.Errors.HasErrors) - { - foreach (var p in compilerresult.Errors) - _log.Error(p.ToString()); - } - else - { - _mapcache.Remove(mapfolder + view.Name + ".dll"); - } - } - - private string CreateSourceFile(Views.ViewBase view, CompilerParameters _Parameters) - { - // create source file from template - string code = Properties.Resources.CodeTemplate - .Replace("%USER_CODE%", view.MapFunctionCode) - .Replace("%COLUMN_COUNT%", view.SchemaColumns.Columns.Count.ToString()) - .Replace("%VIEW_ROW%", CreateViewRows(view.ViewsUsed)) - .Replace("%BASE_TYPE%", Type.GetType(view.FireOnTypes[0]).FullName) - .Replace("%COLUMNS_PARAMS%", CreateColumnParams(view.SchemaColumns)) - .Replace("%DATA_ASSIGN%", CreateDataAssign(view.SchemaColumns)) - ; - - // extract type information for compile references - Dictionary references = new Dictionary(); - - foreach (string aqn in view.FireOnTypes) - { - Type t = Type.GetType(aqn); - if (references.ContainsKey(t.FullName) == false) - references.Add(t.FullName, t.Assembly.Location); - // TODO :traverse hierarchy and add all references - } - - _Parameters.ReferencedAssemblies.Add(this.GetType().Assembly.Location); - // set reference assemblies - foreach (string s in references.Values) - _Parameters.ReferencedAssemblies.Add(s); - - return code; - } - - private string CreateViewRows(List list) - { - StringBuilder sb = new StringBuilder(); - - string temp = Properties.Resources.ViewRow; - - foreach (var v in list) - { - sb.Append( ViewBase.GenerateViewRow(v)); - } - - return sb.ToString(); - } - - private string CreateColumnParams(Views.ViewRowDefinition viewRowDefinition) - { - StringBuilder sb = new StringBuilder(); - - int i = 0; - foreach (KeyValuePair kv in viewRowDefinition.Columns) - { - sb.Append(kv.Value); - sb.Append(" "); - sb.Append(kv.Key); - i++; - if (i < viewRowDefinition.Columns.Count) - sb.Append(","); - } - - return sb.ToString(); - } - - private string CreateDataAssign(Views.ViewRowDefinition viewRowDefinition) - { - StringBuilder sb = new StringBuilder(); - int i = 1; - foreach (KeyValuePair kv in viewRowDefinition.Columns) - { - sb.Append("data["); - sb.Append(i.ToString()); - sb.Append("] = "); - sb.Append(kv.Key); - sb.AppendLine(";"); - i++; - } - - return sb.ToString(); - } - #endregion - - public DataList Execute(string filename,Guid docid, object data) - { - IMAPFunction map; - if (_mapcache.TryGetValue(filename, out map)==false) - { - byte[] b = File.ReadAllBytes(filename); - Assembly a= Assembly.Load(b); - map = (IMAPFunction)a.CreateInstance("mapnamespace.mapfunction"); - _mapcache.Add(filename, map); - } - if (map != null) - { - map.CallMapper(docid, data, _api); - return map.GetRows(); - } - return null; - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.CodeDom.Compiler; +using System.IO; +using System.Reflection; +using RaptorDB.Views; + +namespace RaptorDB.Mapping +{ + public class MapEngine + { + public MapEngine(IMapAPI vm) + { + _api = vm; + } + + private IMapAPI _api; + SafeDictionary _mapcache = new SafeDictionary(); + public bool DebugMode = false; + private ILog _log = LogManager.GetLogger(typeof(MapEngine)); + + +// public void test() +// { +// Views.View v = new Views.View(); +// v.SchemaColumns = new ViewRowDefinition(); +// v.Name = "test"; +// v.FireOnTypes = new List(); +// v.FireOnTypes.Add("BizFX.Entity.Return, BizFX.Entity, Version=2.0.0.0, Culture=neutral, PublicKeyToken=e5d192f5e46064af"); +// v.SchemaColumns.Columns.Add("Name", DataTypes.String); +// v.SchemaColumns.Columns.Add("BirthDay", DataTypes.DateTime); +// v.SchemaColumns.Columns.Add("Address", DataTypes.String); +// v.SchemaColumns.Columns.Add("Code", DataTypes.Int); +// v.MapFunctionCode = @" +//List q = api.Query(""someview"",""a=1"",0, -1); +//foreach(object[] rr in q) +//{ +// testRow r = new testRow(rr); +// emit(data.GUID, r.Name, r.BirthDay, r.Address, r.Code*2); +//} +//if(data.IsOK) +// emit(data.GUID, ""ppp"",DateTime.Now,""hello"",123); +//else +// api.Log(""error""); +//"; +// //v.SchemaColumns = v.Name; +// //v.ViewsUsed = new List(); +// //v.ViewsUsed.Add(v.SchemaColumns); +// Compile(v,"RaptorDB\\Views\\"); +// } + + #region [ M A P C O M P I L E R ] + public void Compile(Views.ViewBase view, string mapfolder) + { + Directory.CreateDirectory(mapfolder); + CodeDomProvider cs = CodeDomProvider.CreateProvider("CSharp"); + var _Parameters = new CompilerParameters(); + + // create source file + string code = CreateSourceFile(view, _Parameters); + + // Compile code + _Parameters.IncludeDebugInformation = false; + _Parameters.GenerateExecutable = false; + _Parameters.CompilerOptions = "/optimize"; + _Parameters.OutputAssembly = mapfolder + + view.Name + + //DocID.ToString().Replace("-", "") + + ".dll"; + var compilerresult = cs.CompileAssemblyFromSource(_Parameters, code); + + if (compilerresult.Errors.HasErrors) + { + foreach (var p in compilerresult.Errors) + _log.Error(p.ToString()); + } + else + { + _mapcache.Remove(mapfolder + view.Name + ".dll"); + } + } + + private string CreateSourceFile(Views.ViewBase view, CompilerParameters _Parameters) + { + // create source file from template + string code = Properties.Resources.CodeTemplate + .Replace("%USER_CODE%", view.MapFunctionCode) + .Replace("%COLUMN_COUNT%", view.SchemaColumns.Columns.Count.ToString()) + .Replace("%VIEW_ROW%", CreateViewRows(view.ViewsUsed)) + .Replace("%BASE_TYPE%", Type.GetType(view.FireOnTypes[0]).FullName) + .Replace("%COLUMNS_PARAMS%", CreateColumnParams(view.SchemaColumns)) + .Replace("%DATA_ASSIGN%", CreateDataAssign(view.SchemaColumns)) + ; + + // extract type information for compile references + Dictionary references = new Dictionary(); + + foreach (string aqn in view.FireOnTypes) + { + Type t = Type.GetType(aqn); + if (references.ContainsKey(t.FullName) == false) + references.Add(t.FullName, t.Assembly.Location); + // TODO :traverse hierarchy and add all references + } + + _Parameters.ReferencedAssemblies.Add(this.GetType().Assembly.Location); + // set reference assemblies + foreach (string s in references.Values) + _Parameters.ReferencedAssemblies.Add(s); + + return code; + } + + private string CreateViewRows(List list) + { + StringBuilder sb = new StringBuilder(); + + string temp = Properties.Resources.ViewRow; + + foreach (var v in list) + { + sb.Append( ViewBase.GenerateViewRow(v)); + } + + return sb.ToString(); + } + + private string CreateColumnParams(Views.ViewRowDefinition viewRowDefinition) + { + StringBuilder sb = new StringBuilder(); + + int i = 0; + foreach (KeyValuePair kv in viewRowDefinition.Columns) + { + sb.Append(kv.Value); + sb.Append(" "); + sb.Append(kv.Key); + i++; + if (i < viewRowDefinition.Columns.Count) + sb.Append(","); + } + + return sb.ToString(); + } + + private string CreateDataAssign(Views.ViewRowDefinition viewRowDefinition) + { + StringBuilder sb = new StringBuilder(); + int i = 1; + foreach (KeyValuePair kv in viewRowDefinition.Columns) + { + sb.Append("data["); + sb.Append(i.ToString()); + sb.Append("] = "); + sb.Append(kv.Key); + sb.AppendLine(";"); + i++; + } + + return sb.ToString(); + } + #endregion + + public DataList Execute(string filename,Guid docid, object data) + { + IMAPFunction map; + if (_mapcache.TryGetValue(filename, out map)==false) + { + byte[] b = File.ReadAllBytes(filename); + Assembly a= Assembly.Load(b); + map = (IMAPFunction)a.CreateInstance("mapnamespace.mapfunction"); + _mapcache.Add(filename, map); + } + if (map != null) + { + map.CallMapper(docid, data, _api); + return map.GetRows(); + } + return null; + } + } +} diff --git a/removed/removed code.txt b/removed/removed code.txt index 9f48314..4d40e1a 100644 --- a/removed/removed code.txt +++ b/removed/removed code.txt @@ -1,238 +1,238 @@ -raptordb.cs-------------------------------------------------------------------------------------------------------------------------- - //private bool GetDocID(object data, out Guid docid) - //{ - // docid = Guid.Empty; - // Type t = data.GetType(); - // string propname = ""; - // if (_docIDProperty.TryGetValue(t, out propname) == false) - // { - // // type not found try all subclasses - // foreach (var p in _docIDProperty) - // { - // if (t.IsSubclassOf(p.Key) && p.Key != typeof(object)) - // { - // // add type to dictionary for later - // _docIDProperty.Add(t, p.Value); - // propname = p.Value; - // break; - // } - // } - // if (propname == "") - // return false; - // } - - // var props = fastJSON.JSON.Instance.Getproperties(t, fastJSON.JSON.Instance.GetTypeAssemblyName(t)); - // fastJSON.JSON.myPropInfo mpi; - // if (props.TryGetValue(propname, out mpi) == true) - // { - // docid = (Guid)mpi.getter(data); - // return true; - // } - // return false; - //} - - //public void SetDocumentIDProperty(Type type, string IDproperty) - //{ - // _docIDProperty.Add(type, IDproperty); - //} - - //public void CompactView(string viewname) - //{ - - //} - - //public void RebuildView(string viewname) - //{ - - //} - - - - -view.cs------------------------------------------------------------------------------------------------------------------------- -// /// -// /// C# code generator for the DataList class -// /// -// /// string -// public string GenerateDataList() -// { -// string str = @" -//public class {0}EmitList : EmitList -//{{ -// public void Emit(Guid docid, {1}) -// {{ -// object[] row = new object[{2}]; -// row[0] = docid; -//{3} -// Data.Add(row); -// }} -//}} -//"; -// StringBuilder rows = new StringBuilder(); -// int i = 1; -// foreach (var s in SchemaColumns.Columns) -// { -// rows.AppendLine("\t\trow[" + i++ + "] = " + s.Key + ";"); -// } -// StringBuilder columnlist = new StringBuilder(); -// bool prepend = false; -// foreach (var s in SchemaColumns.Columns) -// { -// if (prepend) columnlist.Append(", "); -// prepend = true; -// columnlist.Append(ConvertDataType(s.Value) + " " + s.Key); -// } -// string n = Name; -// if (n == null || n == "") -// n = "unnamed"; - -// return string.Format(str, n, columnlist.ToString(), (SchemaColumns.Columns.Count + 1).ToString(), rows.ToString()); -// } - - ///// - ///// C# code generator for the ViewRow class - ///// - ///// string - //public string GenerateViewRow() - //{ - // return "";//GenerateViewRow(this.SchemaColumns); - //} - - //internal static string GenerateViewRow(ViewRowDefinition viewdef) - //{ - // StringBuilder sb = new StringBuilder(); - - // string temp = Properties.Resources.ViewRow; - // sb.Append(temp.Replace("%VIEWNAME%", viewdef.Name)); - // int i = 0; - // foreach (var c in viewdef.Columns) - // { - // i++; - // string row = "\tpublic %value% %name% { get { return (%value%)d[%i%]; } set { d[%i%] = value; } }"; - // sb.AppendLine(row.Replace("%name%", c.Key).Replace("%value%", ConvertDataType(c.Value)).Replace("%i%", i.ToString())); - // } - // sb.Append("}"); - - // return sb.ToString(); - //} - - //private static string ConvertDataType(Type dt) - //{ - // string str = "int"; - // str = dt.ToString(); - // return str; - //} - - //public enum DataTypes - //{ - // Boolean, - // Int, - // DateTime, - // Long, - // Byte, - // Short, - // Guid, - // String, - // Decimal, - // Float - //} - - -viewhandler.cs ----------------------------------------------------------------------------------------------------------------- - //private Type GetType(Type dataTypes) - //{ - // Type t = null; - // switch (dataTypes) - // { - // case DataTypes.Boolean: t = typeof(bool); break; - // case DataTypes.Byte: t = typeof(byte); break; - // case DataTypes.DateTime: t = typeof(DateTime); break; - // case DataTypes.Guid: t = typeof(Guid); break; - // case DataTypes.Int: t = typeof(int); break; - // case DataTypes.Long: t = typeof(long); break; - // case DataTypes.Short: t = typeof(short); break; - // case DataTypes.String: t = typeof(string); break; - // case DataTypes.Decimal: t = typeof(decimal); break; - // case DataTypes.Float: t = typeof(float); break; - // } - - // return t; - //} - - //private byte[] GenerateRowBytes(object[] data) - //{ - // MemoryStream ms = new MemoryStream(); - // BinaryWriter bw = new BinaryWriter(ms, Encoding.Unicode); - // // write null value bitmap - // BitArray ba = new BitArray(data.Length); - // for (int i = 1; i < data.Length; i++) - // if (data[i] == null) ba.Set(i - 1, true); - - // byte[] nulls = new byte[data.Length / 8 + 1]; - // ba.CopyTo(nulls, 0); - // bw.Write(nulls); - - // foreach (object o in data) - // { - // if (o != null) - // { - // if (o is bool) bw.Write((bool)o); - // else if (o is int) bw.Write((int)o); - // else if (o is Guid) bw.Write(((Guid)o).ToByteArray()); - // else if (o is long) bw.Write((long)o); - // else if (o is DateTime) bw.Write(((DateTime)o).ToBinary()); - // else if (o is string) bw.Write((string)o); - // else if (o is byte) bw.Write((byte)o); - // else if (o is short) bw.Write((short)o); - // else if (o is double) bw.Write((double)o); - // else if (o is decimal) bw.Write((decimal)o); - // else if (o is float) bw.Write((float)o); - // } - // } - - // return ms.ToArray(); - //} - - //private void Test() - //{ - // byte[] b = GenerateRowBytes(new object[] { 1, null, null, "asdasdasd asdAS" }); - - // object[] cols = ReadRowFromBytes(new Type[] { typeof(int), typeof(bool), typeof(long), typeof(string) }, b); - // int i = cols.Length; - //} - - //private object[] ReadRowFromBytes(Type[] schema, byte[] bytes) - //{ - // MemoryStream ms = new MemoryStream(bytes); - // BinaryReader br = new BinaryReader(ms, Encoding.Unicode); - // byte[] nulls = new byte[schema.Length / 8 + 1]; - // List obj = new List(10); - // br.Read(nulls, 0, schema.Length / 8 + 1); - // BitArray ba = new BitArray(nulls); - - // // first is the docID - // obj.Add(new Guid(br.ReadBytes(16))); - - // for (int i = 0; i < schema.Length; i++) - // { - // if (ba.Get(i) == false) - // { - // Type o = schema[i]; - // if (o == typeof(bool)) obj.Add(br.ReadBoolean()); - // else if (o == typeof(int)) obj.Add(br.ReadInt32()); - // else if (o == typeof(Guid)) obj.Add(new Guid(br.ReadBytes(16))); - // else if (o == typeof(long)) obj.Add(br.ReadInt64()); - // else if (o == typeof(DateTime)) obj.Add(DateTime.FromBinary(br.ReadInt64())); - // else if (o == typeof(string)) obj.Add(br.ReadString()); - // else if (o == typeof(byte)) obj.Add(br.ReadByte()); - // else if (o == typeof(short)) obj.Add(br.ReadInt16()); - // else if (o == typeof(double)) obj.Add(br.ReadDouble()); - // else if (o == typeof(decimal)) obj.Add(br.ReadDecimal()); - // else if (o == typeof(float)) obj.Add(br.ReadSingle()); - // } - // else - // obj.Add(null); - // } - - // return obj.ToArray(); +raptordb.cs-------------------------------------------------------------------------------------------------------------------------- + //private bool GetDocID(object data, out Guid docid) + //{ + // docid = Guid.Empty; + // Type t = data.GetType(); + // string propname = ""; + // if (_docIDProperty.TryGetValue(t, out propname) == false) + // { + // // type not found try all subclasses + // foreach (var p in _docIDProperty) + // { + // if (t.IsSubclassOf(p.Key) && p.Key != typeof(object)) + // { + // // add type to dictionary for later + // _docIDProperty.Add(t, p.Value); + // propname = p.Value; + // break; + // } + // } + // if (propname == "") + // return false; + // } + + // var props = fastJSON.JSON.Instance.Getproperties(t, fastJSON.JSON.Instance.GetTypeAssemblyName(t)); + // fastJSON.JSON.myPropInfo mpi; + // if (props.TryGetValue(propname, out mpi) == true) + // { + // docid = (Guid)mpi.getter(data); + // return true; + // } + // return false; + //} + + //public void SetDocumentIDProperty(Type type, string IDproperty) + //{ + // _docIDProperty.Add(type, IDproperty); + //} + + //public void CompactView(string viewname) + //{ + + //} + + //public void RebuildView(string viewname) + //{ + + //} + + + + +view.cs------------------------------------------------------------------------------------------------------------------------- +// /// +// /// C# code generator for the DataList class +// /// +// /// string +// public string GenerateDataList() +// { +// string str = @" +//public class {0}EmitList : EmitList +//{{ +// public void Emit(Guid docid, {1}) +// {{ +// object[] row = new object[{2}]; +// row[0] = docid; +//{3} +// Data.Add(row); +// }} +//}} +//"; +// StringBuilder rows = new StringBuilder(); +// int i = 1; +// foreach (var s in SchemaColumns.Columns) +// { +// rows.AppendLine("\t\trow[" + i++ + "] = " + s.Key + ";"); +// } +// StringBuilder columnlist = new StringBuilder(); +// bool prepend = false; +// foreach (var s in SchemaColumns.Columns) +// { +// if (prepend) columnlist.Append(", "); +// prepend = true; +// columnlist.Append(ConvertDataType(s.Value) + " " + s.Key); +// } +// string n = Name; +// if (n == null || n == "") +// n = "unnamed"; + +// return string.Format(str, n, columnlist.ToString(), (SchemaColumns.Columns.Count + 1).ToString(), rows.ToString()); +// } + + ///// + ///// C# code generator for the ViewRow class + ///// + ///// string + //public string GenerateViewRow() + //{ + // return "";//GenerateViewRow(this.SchemaColumns); + //} + + //internal static string GenerateViewRow(ViewRowDefinition viewdef) + //{ + // StringBuilder sb = new StringBuilder(); + + // string temp = Properties.Resources.ViewRow; + // sb.Append(temp.Replace("%VIEWNAME%", viewdef.Name)); + // int i = 0; + // foreach (var c in viewdef.Columns) + // { + // i++; + // string row = "\tpublic %value% %name% { get { return (%value%)d[%i%]; } set { d[%i%] = value; } }"; + // sb.AppendLine(row.Replace("%name%", c.Key).Replace("%value%", ConvertDataType(c.Value)).Replace("%i%", i.ToString())); + // } + // sb.Append("}"); + + // return sb.ToString(); + //} + + //private static string ConvertDataType(Type dt) + //{ + // string str = "int"; + // str = dt.ToString(); + // return str; + //} + + //public enum DataTypes + //{ + // Boolean, + // Int, + // DateTime, + // Long, + // Byte, + // Short, + // Guid, + // String, + // Decimal, + // Float + //} + + +viewhandler.cs ----------------------------------------------------------------------------------------------------------------- + //private Type GetType(Type dataTypes) + //{ + // Type t = null; + // switch (dataTypes) + // { + // case DataTypes.Boolean: t = typeof(bool); break; + // case DataTypes.Byte: t = typeof(byte); break; + // case DataTypes.DateTime: t = typeof(DateTime); break; + // case DataTypes.Guid: t = typeof(Guid); break; + // case DataTypes.Int: t = typeof(int); break; + // case DataTypes.Long: t = typeof(long); break; + // case DataTypes.Short: t = typeof(short); break; + // case DataTypes.String: t = typeof(string); break; + // case DataTypes.Decimal: t = typeof(decimal); break; + // case DataTypes.Float: t = typeof(float); break; + // } + + // return t; + //} + + //private byte[] GenerateRowBytes(object[] data) + //{ + // MemoryStream ms = new MemoryStream(); + // BinaryWriter bw = new BinaryWriter(ms, Encoding.Unicode); + // // write null value bitmap + // BitArray ba = new BitArray(data.Length); + // for (int i = 1; i < data.Length; i++) + // if (data[i] == null) ba.Set(i - 1, true); + + // byte[] nulls = new byte[data.Length / 8 + 1]; + // ba.CopyTo(nulls, 0); + // bw.Write(nulls); + + // foreach (object o in data) + // { + // if (o != null) + // { + // if (o is bool) bw.Write((bool)o); + // else if (o is int) bw.Write((int)o); + // else if (o is Guid) bw.Write(((Guid)o).ToByteArray()); + // else if (o is long) bw.Write((long)o); + // else if (o is DateTime) bw.Write(((DateTime)o).ToBinary()); + // else if (o is string) bw.Write((string)o); + // else if (o is byte) bw.Write((byte)o); + // else if (o is short) bw.Write((short)o); + // else if (o is double) bw.Write((double)o); + // else if (o is decimal) bw.Write((decimal)o); + // else if (o is float) bw.Write((float)o); + // } + // } + + // return ms.ToArray(); + //} + + //private void Test() + //{ + // byte[] b = GenerateRowBytes(new object[] { 1, null, null, "asdasdasd asdAS" }); + + // object[] cols = ReadRowFromBytes(new Type[] { typeof(int), typeof(bool), typeof(long), typeof(string) }, b); + // int i = cols.Length; + //} + + //private object[] ReadRowFromBytes(Type[] schema, byte[] bytes) + //{ + // MemoryStream ms = new MemoryStream(bytes); + // BinaryReader br = new BinaryReader(ms, Encoding.Unicode); + // byte[] nulls = new byte[schema.Length / 8 + 1]; + // List obj = new List(10); + // br.Read(nulls, 0, schema.Length / 8 + 1); + // BitArray ba = new BitArray(nulls); + + // // first is the docID + // obj.Add(new Guid(br.ReadBytes(16))); + + // for (int i = 0; i < schema.Length; i++) + // { + // if (ba.Get(i) == false) + // { + // Type o = schema[i]; + // if (o == typeof(bool)) obj.Add(br.ReadBoolean()); + // else if (o == typeof(int)) obj.Add(br.ReadInt32()); + // else if (o == typeof(Guid)) obj.Add(new Guid(br.ReadBytes(16))); + // else if (o == typeof(long)) obj.Add(br.ReadInt64()); + // else if (o == typeof(DateTime)) obj.Add(DateTime.FromBinary(br.ReadInt64())); + // else if (o == typeof(string)) obj.Add(br.ReadString()); + // else if (o == typeof(byte)) obj.Add(br.ReadByte()); + // else if (o == typeof(short)) obj.Add(br.ReadInt16()); + // else if (o == typeof(double)) obj.Add(br.ReadDouble()); + // else if (o == typeof(decimal)) obj.Add(br.ReadDecimal()); + // else if (o == typeof(float)) obj.Add(br.ReadSingle()); + // } + // else + // obj.Add(null); + // } + + // return obj.ToArray(); //} \ No newline at end of file diff --git a/testing.view b/testing.view index 438ae26..23b3407 100644 --- a/testing.view +++ b/testing.view @@ -1,41 +1,41 @@ -// ref: views.dll -using System; -using System.Collections.Generic; -using RaptorDB; - -namespace SampleViews -{ - [RegisterView] - public class testing : View - { - // row schema defined in the script file - // and will be transferred to the client when needed - public class RowSchema : RDBSchema - { - public string Product; - public decimal QTY; - public decimal Price; - public decimal Discount; - } - - public testing() - { - this.Name = "testing"; - this.Description = ""; - this.isPrimaryList = false; - this.isActive = true; - this.BackgroundIndexing = true; - this.Version = 3; - - this.Schema = typeof(RowSchema); - - this.AddFireOnTypes(typeof(SalesInvoice)); - - this.Mapper = (api, docid, doc) => - { - foreach (var i in doc.Items) - api.EmitObject(docid, i); - }; - } - } +// ref: views.dll +using System; +using System.Collections.Generic; +using RaptorDB; + +namespace SampleViews +{ + [RegisterView] + public class testing : View + { + // row schema defined in the script file + // and will be transferred to the client when needed + public class RowSchema : RDBSchema + { + public string Product; + public decimal QTY; + public decimal Price; + public decimal Discount; + } + + public testing() + { + this.Name = "testing"; + this.Description = ""; + this.isPrimaryList = false; + this.isActive = true; + this.BackgroundIndexing = true; + this.Version = 3; + + this.Schema = typeof(RowSchema); + + this.AddFireOnTypes(typeof(SalesInvoice)); + + this.Mapper = (api, docid, doc) => + { + foreach (var i in doc.Items) + api.EmitObject(docid, i); + }; + } + } } \ No newline at end of file diff --git a/testing/AssemblyInfo.cs b/testing/AssemblyInfo.cs index eecf4de..1a314b0 100644 --- a/testing/AssemblyInfo.cs +++ b/testing/AssemblyInfo.cs @@ -1,32 +1,32 @@ -using System.Reflection; -using System.Runtime.CompilerServices; - -// -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -// -[assembly: AssemblyTitle("")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("")] -[assembly: AssemblyCopyright("")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -// -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Revision and Build Numbers -// by using the '*' as shown below: - -[assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyDelaySign(false)] -[assembly: AssemblyKeyFile("")] -[assembly: AssemblyKeyName("")] +using System.Reflection; +using System.Runtime.CompilerServices; + +// +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +// +[assembly: AssemblyTitle("")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("")] +[assembly: AssemblyCopyright("")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Revision and Build Numbers +// by using the '*' as shown below: + +[assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyDelaySign(false)] +[assembly: AssemblyKeyFile("")] +[assembly: AssemblyKeyName("")] diff --git a/testing/Class1.cs b/testing/Class1.cs index 8711dd8..0b83888 100644 --- a/testing/Class1.cs +++ b/testing/Class1.cs @@ -1,364 +1,364 @@ -using System; -using System.Diagnostics; -using System.Collections; -using System.IO; -using System.Text; -using System.Threading; -using RaptorDB; -using System.Collections.Generic; -using System.Runtime.InteropServices; -using System.Reflection; -using RaptorDB.Views; -using System.Linq.Expressions; -using System.Linq; -using System.Collections.ObjectModel; -using System.Dynamic; -using RaptorDB.Common; -using System.Threading.Tasks; - - -namespace testing -{ - //class Program3 - //{ - // static int waitTime = 5; - // static string dataPath = "_data"; - // static RaptorDB.RaptorDB DB = null; - // static Sample[] SampleArray = - // { - // new Sample - // { - // ID = Guid.NewGuid(), - // Name = "Object 1", - // OtherValue = "other value for object 1" - // }, - // new Sample - // { - // ID = Guid.NewGuid(), - // Name = "Object 2", - // OtherValue = "other value for object 2" - // } - // }; - - // public static void Main3(string[] args) - // { - // //start with a clean slate - // if (Directory.Exists(dataPath)) - // { - // Console.WriteLine("Delete database directory if it already exists"); - // Directory.Delete(dataPath, true); - // } - // DBInit(); - - // //Write initial data and verify - // WriteSampleData(); - // Wait(); - // ReadAllObjects(); - - // //Delete 1 object and verify - // if (DB.Delete(SampleArray[0].ID)) - // Console.WriteLine("Deleted {0}", SampleArray[0].Name); - // else - // { - // Console.WriteLine("Unable to delete object"); - // return; - // } - // Wait(); - // ReadAllObjects(); - - // //Remove view folder and demonstrate the bug - // RemoveViewFolder(); - // Wait(); - // ReadAllObjects(); - - // Console.Write("Press any key to continue . . . "); - // Console.ReadKey(true); - // } - - // static void DBInit() - // { - // Console.WriteLine("Initialise database"); - // DB = RaptorDB.RaptorDB.Open(dataPath); - // DB.RegisterView(new SampleView()); - // } - - // static void WriteSampleData() - // { - // foreach (Sample obj in SampleArray) - // { - // Console.WriteLine("{0} : {1} : {2}", obj.ID.ToString(), obj.Name, obj.OtherValue); - // DB.Save(obj.ID, obj); - // } - // } - - // static void ReadAllObjects() - // { - // Console.WriteLine("Read back all data from DB"); - - // var list = DB.Query("SampleView"); - // foreach (SampleView.RowSchema row in list.Rows) - // { - // Console.WriteLine("ViewRow {0}: {1}", row.docid.ToString(), row.Name); - // Sample obj = (Sample)DB.Fetch(row.docid); - // if (obj != null) - // Console.WriteLine("Object {0} : {1} : {2}", obj.ID.ToString(), obj.Name, obj.OtherValue); - // else - // Console.WriteLine("Can't retrieve original Object"); - - // Console.WriteLine(""); - // } - // } - - // static void Wait() - // { - // Console.Write("\nWait a few seconds to make sure records are written"); - // for (int i = 0; i < waitTime; i++) - // { - // Console.Write("."); - // System.Threading.Thread.Sleep(1000); - // } - // Console.WriteLine("\n"); - // } - - // static void RemoveViewFolder() - // { - // Console.WriteLine("Close connection to DB"); - // DB.Shutdown(); - // DB = null; - - // Console.WriteLine("Remove 'Views' folder from DB folder to force the views to be re-populated"); - // Directory.Delete(dataPath + @"\Views", true); - - // Console.WriteLine("Re-connect to database"); - // //Re-initialise DB - // DBInit(); - // } - //} - - //public class Sample - //{ - // public Guid ID; - // public string Name; - // public string OtherValue; - - // public Sample() - // { - // } - //} - - //public class SampleView : View - //{ - // public class RowSchema : RDBSchema - // { - // public string Name; - // } - - // public SampleView() - // { - // this.Name = "SampleView"; - // this.Description = "A primary view for sample objects"; - // this.isPrimaryList = true; - // this.isActive = true; - // this.BackgroundIndexing = false; - - // this.Schema = typeof(SampleView.RowSchema); - - // this.AddFireOnTypes(typeof(Sample)); - - // this.Mapper = (api, docid, doc) => - // { - // api.EmitObject(docid, doc); - // }; - // } - //} - - ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - - /* - public class program2 - { - static RaptorDB.RaptorDB rd; - public static void Main2(string[] args) - { - if (args.Length < 2) - { - System.Console.WriteLine("Params: numthreads insertsforthreads (numthreads = 0 for insertion in the main thread )"); - return; - } - DateTime dt = FastDateTime.Now; - int maxThread = int.Parse(args[0]); - int maxDataToInsert = int.Parse(args[1]); - RepositoryStart(); - System.Console.WriteLine("Starting inserts " + maxDataToInsert + " objects for " + maxThread + " thread/s...."); - if (maxThread == 0) - InsertInvoice(maxDataToInsert); - else - { - System.Threading.Tasks.Task[] tasks = new System.Threading.Tasks.Task[maxThread]; - Action act = InsertInvoice; - - for (int i = 0; i < maxThread; i++) - { - tasks[i] = System.Threading.Tasks.Task.Factory.StartNew(act, maxDataToInsert); - } - Task.WaitAll(tasks); - } - //System.Console.Write((maxDataToInsert * (maxThread == 0 ? 1 : maxThread)).ToString("#,###") + " inserted press a key to continue... "); - //System.Console.ReadLine(); - RepositoryStop(); - System.Console.WriteLine("Re-opening RaptorDB for objects count...."); - RepositoryStart(); - System.Console.WriteLine(rd.Count(typeof(InvoiceView))); - RepositoryStop(); - Console.WriteLine("time = " + FastDateTime.Now.Subtract(dt).TotalSeconds); - return; - } - - - public static void RepositoryStart() - { - rd = RaptorDB.RaptorDB.Open(@"C:\temp\RaptorDBData\DMDATA"); - System.Console.WriteLine("Registering indexes...."); - rd.RegisterView(new InvoiceView()); - rd.RegisterView(new InvoiceAdditionalView()); - rd.RegisterView(new InvoiceItemView()); - System.Console.WriteLine("RaptorDB is Up...."); - } - - public static void RepositoryStop() - { - System.Console.WriteLine("RaptorDB shutdown...."); - rd.Shutdown(); - rd.Dispose(); - rd = null; - System.Console.WriteLine("RaptorDB halted...."); - } - - - - public static void InsertInvoice(object maxDataToInsert) - { - System.Console.WriteLine("Thread " + System.Threading.Thread.CurrentThread.ManagedThreadId + " started..."); - int maxVal = (int)maxDataToInsert; - for (int i = 1; i <= maxVal; i++) - { - - Invoice invoice = new Invoice(); - invoice.UNIQUEID = Guid.NewGuid(); - invoice.Customer = "Customer1"; - invoice.InvoiceNumber = 10; - invoice.InvoiceDate = DateTime.Now; - invoice.items = new List(); - invoice.items.Add(new InvoiceItem() { SKU = "ART" + i.ToString(), qty = 10.50M, UnitPrice = i, TotalPrice = (10.50M * i) }); - rd.Save(invoice.UNIQUEID, invoice); - // System.Console.WriteLine(" Writing Thread " + System.Threading.Thread.CurrentThread.ManagedThreadId.ToString() + " Inserted doc " + i.ToString()); - } - System.Console.WriteLine("Thread " + System.Threading.Thread.CurrentThread.ManagedThreadId + " ended with " + maxVal + " objects"); - } - - public static void SearchInvoice() - { - int count = rd.Query(typeof(InvoiceView)).Count; - } - - - - public class BaseStore - { - public Guid UNIQUEID { get; set; } - - } - - - - public class Invoice : BaseStore - { - public DateTime InvoiceDate { get; set; } - public String Customer { get; set; } - public int InvoiceNumber { get; set; } - public List items { get; set; } - public String TAG { get; set; } - } - - - public class InvoiceItem : BaseStore - { - public string SKU { get; set; } - public Decimal qty { get; set; } - public Decimal UnitPrice { get; set; } - public Decimal TotalPrice { get; set; } - } - - - - - public class InvoiceView : RaptorDB.View - { - public class RowSchema : RaptorDB.RDBSchema - { - public DateTime InvoiceDate { get; set; } - public String Customer { get; set; } - public int InvoiceNumber { get; set; } - - } - - public InvoiceView() - { - this.Name = "InvoiceView"; - this.Schema = typeof(InvoiceView.RowSchema); - this.isActive = true; - this.isPrimaryList = true; - this.ConsistentSaveToThisView = true; - this.AddFireOnTypes(typeof(Invoice)); - this.Mapper = (api, docid, doc) => { api.Emit(docid, doc.InvoiceDate, doc.Customer, doc.InvoiceNumber); }; - } - } - - public class InvoiceAdditionalView : RaptorDB.View - { - public class RowSchema : RaptorDB.RDBSchema - { - public String TAG { get; set; } - } - - public InvoiceAdditionalView() - { - this.Name = "InvoiceAdditionalView"; - this.Schema = typeof(InvoiceView.RowSchema); - this.isActive = true; - this.isPrimaryList = false; - this.ConsistentSaveToThisView = true; - this.AddFireOnTypes(typeof(Invoice)); - this.Mapper = (api, docid, doc) => { api.Emit(docid, doc.TAG); }; - } - } - - [RegisterView] - public class InvoiceItemView : RaptorDB.View - { - public class RowSchema : RDBSchema - { - public string SKU { get; set; } - public Decimal UnitPrice { get; set; } - public Decimal qty { get; set; } - } - - public InvoiceItemView() - { - this.Name = "InvoiceItem View"; - this.Schema = typeof(InvoiceItemView.RowSchema); - this.isActive = true; - this.isPrimaryList = false; - this.AddFireOnTypes(typeof(Invoice)); - this.Mapper = (api, docid, doc) => - { - foreach (InvoiceItem item in doc.items) - { - api.Emit(docid, item.SKU, item.UnitPrice, item.qty); - } - }; - } - } - } - */ +using System; +using System.Diagnostics; +using System.Collections; +using System.IO; +using System.Text; +using System.Threading; +using RaptorDB; +using System.Collections.Generic; +using System.Runtime.InteropServices; +using System.Reflection; +using RaptorDB.Views; +using System.Linq.Expressions; +using System.Linq; +using System.Collections.ObjectModel; +using System.Dynamic; +using RaptorDB.Common; +using System.Threading.Tasks; + + +namespace testing +{ + //class Program3 + //{ + // static int waitTime = 5; + // static string dataPath = "_data"; + // static RaptorDB.RaptorDB DB = null; + // static Sample[] SampleArray = + // { + // new Sample + // { + // ID = Guid.NewGuid(), + // Name = "Object 1", + // OtherValue = "other value for object 1" + // }, + // new Sample + // { + // ID = Guid.NewGuid(), + // Name = "Object 2", + // OtherValue = "other value for object 2" + // } + // }; + + // public static void Main3(string[] args) + // { + // //start with a clean slate + // if (Directory.Exists(dataPath)) + // { + // Console.WriteLine("Delete database directory if it already exists"); + // Directory.Delete(dataPath, true); + // } + // DBInit(); + + // //Write initial data and verify + // WriteSampleData(); + // Wait(); + // ReadAllObjects(); + + // //Delete 1 object and verify + // if (DB.Delete(SampleArray[0].ID)) + // Console.WriteLine("Deleted {0}", SampleArray[0].Name); + // else + // { + // Console.WriteLine("Unable to delete object"); + // return; + // } + // Wait(); + // ReadAllObjects(); + + // //Remove view folder and demonstrate the bug + // RemoveViewFolder(); + // Wait(); + // ReadAllObjects(); + + // Console.Write("Press any key to continue . . . "); + // Console.ReadKey(true); + // } + + // static void DBInit() + // { + // Console.WriteLine("Initialise database"); + // DB = RaptorDB.RaptorDB.Open(dataPath); + // DB.RegisterView(new SampleView()); + // } + + // static void WriteSampleData() + // { + // foreach (Sample obj in SampleArray) + // { + // Console.WriteLine("{0} : {1} : {2}", obj.ID.ToString(), obj.Name, obj.OtherValue); + // DB.Save(obj.ID, obj); + // } + // } + + // static void ReadAllObjects() + // { + // Console.WriteLine("Read back all data from DB"); + + // var list = DB.Query("SampleView"); + // foreach (SampleView.RowSchema row in list.Rows) + // { + // Console.WriteLine("ViewRow {0}: {1}", row.docid.ToString(), row.Name); + // Sample obj = (Sample)DB.Fetch(row.docid); + // if (obj != null) + // Console.WriteLine("Object {0} : {1} : {2}", obj.ID.ToString(), obj.Name, obj.OtherValue); + // else + // Console.WriteLine("Can't retrieve original Object"); + + // Console.WriteLine(""); + // } + // } + + // static void Wait() + // { + // Console.Write("\nWait a few seconds to make sure records are written"); + // for (int i = 0; i < waitTime; i++) + // { + // Console.Write("."); + // System.Threading.Thread.Sleep(1000); + // } + // Console.WriteLine("\n"); + // } + + // static void RemoveViewFolder() + // { + // Console.WriteLine("Close connection to DB"); + // DB.Shutdown(); + // DB = null; + + // Console.WriteLine("Remove 'Views' folder from DB folder to force the views to be re-populated"); + // Directory.Delete(dataPath + @"\Views", true); + + // Console.WriteLine("Re-connect to database"); + // //Re-initialise DB + // DBInit(); + // } + //} + + //public class Sample + //{ + // public Guid ID; + // public string Name; + // public string OtherValue; + + // public Sample() + // { + // } + //} + + //public class SampleView : View + //{ + // public class RowSchema : RDBSchema + // { + // public string Name; + // } + + // public SampleView() + // { + // this.Name = "SampleView"; + // this.Description = "A primary view for sample objects"; + // this.isPrimaryList = true; + // this.isActive = true; + // this.BackgroundIndexing = false; + + // this.Schema = typeof(SampleView.RowSchema); + + // this.AddFireOnTypes(typeof(Sample)); + + // this.Mapper = (api, docid, doc) => + // { + // api.EmitObject(docid, doc); + // }; + // } + //} + + ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + + /* + public class program2 + { + static RaptorDB.RaptorDB rd; + public static void Main2(string[] args) + { + if (args.Length < 2) + { + System.Console.WriteLine("Params: numthreads insertsforthreads (numthreads = 0 for insertion in the main thread )"); + return; + } + DateTime dt = FastDateTime.Now; + int maxThread = int.Parse(args[0]); + int maxDataToInsert = int.Parse(args[1]); + RepositoryStart(); + System.Console.WriteLine("Starting inserts " + maxDataToInsert + " objects for " + maxThread + " thread/s...."); + if (maxThread == 0) + InsertInvoice(maxDataToInsert); + else + { + System.Threading.Tasks.Task[] tasks = new System.Threading.Tasks.Task[maxThread]; + Action act = InsertInvoice; + + for (int i = 0; i < maxThread; i++) + { + tasks[i] = System.Threading.Tasks.Task.Factory.StartNew(act, maxDataToInsert); + } + Task.WaitAll(tasks); + } + //System.Console.Write((maxDataToInsert * (maxThread == 0 ? 1 : maxThread)).ToString("#,###") + " inserted press a key to continue... "); + //System.Console.ReadLine(); + RepositoryStop(); + System.Console.WriteLine("Re-opening RaptorDB for objects count...."); + RepositoryStart(); + System.Console.WriteLine(rd.Count(typeof(InvoiceView))); + RepositoryStop(); + Console.WriteLine("time = " + FastDateTime.Now.Subtract(dt).TotalSeconds); + return; + } + + + public static void RepositoryStart() + { + rd = RaptorDB.RaptorDB.Open(@"C:\temp\RaptorDBData\DMDATA"); + System.Console.WriteLine("Registering indexes...."); + rd.RegisterView(new InvoiceView()); + rd.RegisterView(new InvoiceAdditionalView()); + rd.RegisterView(new InvoiceItemView()); + System.Console.WriteLine("RaptorDB is Up...."); + } + + public static void RepositoryStop() + { + System.Console.WriteLine("RaptorDB shutdown...."); + rd.Shutdown(); + rd.Dispose(); + rd = null; + System.Console.WriteLine("RaptorDB halted...."); + } + + + + public static void InsertInvoice(object maxDataToInsert) + { + System.Console.WriteLine("Thread " + System.Threading.Thread.CurrentThread.ManagedThreadId + " started..."); + int maxVal = (int)maxDataToInsert; + for (int i = 1; i <= maxVal; i++) + { + + Invoice invoice = new Invoice(); + invoice.UNIQUEID = Guid.NewGuid(); + invoice.Customer = "Customer1"; + invoice.InvoiceNumber = 10; + invoice.InvoiceDate = DateTime.Now; + invoice.items = new List(); + invoice.items.Add(new InvoiceItem() { SKU = "ART" + i.ToString(), qty = 10.50M, UnitPrice = i, TotalPrice = (10.50M * i) }); + rd.Save(invoice.UNIQUEID, invoice); + // System.Console.WriteLine(" Writing Thread " + System.Threading.Thread.CurrentThread.ManagedThreadId.ToString() + " Inserted doc " + i.ToString()); + } + System.Console.WriteLine("Thread " + System.Threading.Thread.CurrentThread.ManagedThreadId + " ended with " + maxVal + " objects"); + } + + public static void SearchInvoice() + { + int count = rd.Query(typeof(InvoiceView)).Count; + } + + + + public class BaseStore + { + public Guid UNIQUEID { get; set; } + + } + + + + public class Invoice : BaseStore + { + public DateTime InvoiceDate { get; set; } + public String Customer { get; set; } + public int InvoiceNumber { get; set; } + public List items { get; set; } + public String TAG { get; set; } + } + + + public class InvoiceItem : BaseStore + { + public string SKU { get; set; } + public Decimal qty { get; set; } + public Decimal UnitPrice { get; set; } + public Decimal TotalPrice { get; set; } + } + + + + + public class InvoiceView : RaptorDB.View + { + public class RowSchema : RaptorDB.RDBSchema + { + public DateTime InvoiceDate { get; set; } + public String Customer { get; set; } + public int InvoiceNumber { get; set; } + + } + + public InvoiceView() + { + this.Name = "InvoiceView"; + this.Schema = typeof(InvoiceView.RowSchema); + this.isActive = true; + this.isPrimaryList = true; + this.ConsistentSaveToThisView = true; + this.AddFireOnTypes(typeof(Invoice)); + this.Mapper = (api, docid, doc) => { api.Emit(docid, doc.InvoiceDate, doc.Customer, doc.InvoiceNumber); }; + } + } + + public class InvoiceAdditionalView : RaptorDB.View + { + public class RowSchema : RaptorDB.RDBSchema + { + public String TAG { get; set; } + } + + public InvoiceAdditionalView() + { + this.Name = "InvoiceAdditionalView"; + this.Schema = typeof(InvoiceView.RowSchema); + this.isActive = true; + this.isPrimaryList = false; + this.ConsistentSaveToThisView = true; + this.AddFireOnTypes(typeof(Invoice)); + this.Mapper = (api, docid, doc) => { api.Emit(docid, doc.TAG); }; + } + } + + [RegisterView] + public class InvoiceItemView : RaptorDB.View + { + public class RowSchema : RDBSchema + { + public string SKU { get; set; } + public Decimal UnitPrice { get; set; } + public Decimal qty { get; set; } + } + + public InvoiceItemView() + { + this.Name = "InvoiceItem View"; + this.Schema = typeof(InvoiceItemView.RowSchema); + this.isActive = true; + this.isPrimaryList = false; + this.AddFireOnTypes(typeof(Invoice)); + this.Mapper = (api, docid, doc) => + { + foreach (InvoiceItem item in doc.items) + { + api.Emit(docid, item.SKU, item.UnitPrice, item.qty); + } + }; + } + } + } + */ } \ No newline at end of file diff --git a/testing/program.cs b/testing/program.cs index 2a7ede7..fd741d0 100644 --- a/testing/program.cs +++ b/testing/program.cs @@ -1,48 +1,48 @@ -using System; -using System.Diagnostics; -using System.Collections; -using System.IO; -using System.Text; -using System.Threading; -using RaptorDB; -using System.Collections.Generic; -using System.Runtime.InteropServices; -using System.Reflection; -using RaptorDB.Views; -using System.Linq.Expressions; -using System.Linq; -using System.Collections.ObjectModel; -using System.Dynamic; -using RaptorDB.Common; - -namespace testing -{ - public class program - { - static RaptorDBServer server; - public static void Main(string[] args) - { - AppDomain.CurrentDomain.UnhandledException += new UnhandledExceptionEventHandler(CurrentDomain_UnhandledException); - server = new RaptorDBServer(90, @"..\..\RaptorDBdata"); - - Console.WriteLine("Server started on port 90"); - Console.WriteLine("Press Enter to exit..."); - Console.CancelKeyPress += new ConsoleCancelEventHandler(Console_CancelKeyPress); - Console.ReadLine(); - server.Shutdown(); - - return; - } - - static void Console_CancelKeyPress(object sender, ConsoleCancelEventArgs e) - { - Console.WriteLine("Shutting down..."); - server.Shutdown(); - } - - static void CurrentDomain_UnhandledException(object sender, UnhandledExceptionEventArgs e) - { - File.WriteAllText("error.txt", "" + e.ExceptionObject); - } - } -} +using System; +using System.Diagnostics; +using System.Collections; +using System.IO; +using System.Text; +using System.Threading; +using RaptorDB; +using System.Collections.Generic; +using System.Runtime.InteropServices; +using System.Reflection; +using RaptorDB.Views; +using System.Linq.Expressions; +using System.Linq; +using System.Collections.ObjectModel; +using System.Dynamic; +using RaptorDB.Common; + +namespace testing +{ + public class program + { + static RaptorDBServer server; + public static void Main(string[] args) + { + AppDomain.CurrentDomain.UnhandledException += new UnhandledExceptionEventHandler(CurrentDomain_UnhandledException); + server = new RaptorDBServer(90, @"..\..\RaptorDBdata"); + + Console.WriteLine("Server started on port 90"); + Console.WriteLine("Press Enter to exit..."); + Console.CancelKeyPress += new ConsoleCancelEventHandler(Console_CancelKeyPress); + Console.ReadLine(); + server.Shutdown(); + + return; + } + + static void Console_CancelKeyPress(object sender, ConsoleCancelEventArgs e) + { + Console.WriteLine("Shutting down..."); + server.Shutdown(); + } + + static void CurrentDomain_UnhandledException(object sender, UnhandledExceptionEventArgs e) + { + File.WriteAllText("error.txt", "" + e.ExceptionObject); + } + } +} diff --git a/testing/tests.csproj b/testing/tests.csproj index 7e455ff..65bac06 100644 --- a/testing/tests.csproj +++ b/testing/tests.csproj @@ -1,159 +1,181 @@ - - - - Local - 9.0.30729 - 2.0 - {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90} - Debug - AnyCPU - - - testing - - - JScript - Grid - IE50 - false - Exe - testing - OnBuildSuccess - - - - - 0.0 - v4.0 - publish\ - true - Disk - false - Foreground - 7 - Days - false - false - true - 0 - 1.0.0.%2a - false - false - true - - - - - ..\Output\server\ - true - 285212672 - false - - - DEBUG;TRACE - true - 4096 - false - false - false - false - false - 4 - Full - prompt - false - - - bin\Release\ - false - 285212672 - false - - - TRACE - - - false - 4096 - false - - - true - false - false - false - 4 - none - prompt - false - - - - {45F6BE30-989A-4749-B6A0-69099C8661F4} - RaptorDB - - - {A1347486-8D54-4E17-8A22-76EFE61BF37B} - Views - False - - - System - - - {32331D51-5BE0-41E2-AF1A-9B086C5AE809} - RaptorDB.Common - - - - - Code - - - Code - - - - - False - .NET Framework Client Profile - false - - - False - .NET Framework 2.0 %28x86%29 - false - - - False - .NET Framework 3.0 %28x86%29 - false - - - False - .NET Framework 3.5 - false - - - False - .NET Framework 3.5 SP1 - true - - - False - Windows Installer 3.1 - true - - - - - - - - - - - Auto - AnyCPU - + + + + Local + 9.0.30729 + 2.0 + {C6DA7503-3BCF-4688-ADD7-1CB6EDCE5E90} + Debug + AnyCPU + + + testing + + + JScript + Grid + IE50 + false + Exe + testing + OnBuildSuccess + + + + + 0.0 + v4.0 + publish\ + true + Disk + false + Foreground + 7 + Days + false + false + true + 0 + 1.0.0.%2a + false + false + true + + + + + ..\Output\server\ + true + 285212672 + false + + + DEBUG;TRACE + true + 4096 + false + false + false + false + false + 4 + Full + prompt + false + + + bin\Release\ + false + 285212672 + false + + + TRACE + + + false + 4096 + false + + + true + false + false + false + 4 + none + prompt + false + + + true + bin\x64\Debug\ + DEBUG;TRACE + 285212672 + true + 4096 + Full + x64 + prompt + MinimumRecommendedRules.ruleset + + + bin\x64\Release\ + TRACE + 285212672 + true + 4096 + x64 + prompt + MinimumRecommendedRules.ruleset + + + + {45F6BE30-989A-4749-B6A0-69099C8661F4} + RaptorDB + + + {A1347486-8D54-4E17-8A22-76EFE61BF37B} + Views + False + + + System + + + {32331D51-5BE0-41E2-AF1A-9B086C5AE809} + RaptorDB.Common + + + + + Code + + + Code + + + + + False + .NET Framework Client Profile + false + + + False + .NET Framework 2.0 %28x86%29 + false + + + False + .NET Framework 3.0 %28x86%29 + false + + + False + .NET Framework 3.5 + false + + + False + .NET Framework 3.5 SP1 + true + + + False + Windows Installer 3.1 + true + + + + + + + + + + + Auto + AnyCPU + \ No newline at end of file diff --git a/vbTestConsole/App.config b/vbTestConsole/App.config index 8e15646..fad249e 100644 --- a/vbTestConsole/App.config +++ b/vbTestConsole/App.config @@ -1,6 +1,6 @@ - - - - - + + + + + \ No newline at end of file diff --git a/vbTestConsole/Module1.vb b/vbTestConsole/Module1.vb index 89aab65..3b346fd 100644 --- a/vbTestConsole/Module1.vb +++ b/vbTestConsole/Module1.vb @@ -1,10 +1,10 @@ -Module Module1 - - Sub Main() - Dim rdb As RaptorDB.RaptorDB = RaptorDB.RaptorDB.Open("..\..\..\RaptorDBdata") - rdb.RegisterView(New SampleViews.SalesInvoiceView()) - Dim r = rdb.Query(Of SampleViews.SalesInvoiceViewRowSchema)(Function(x) x.NoCase = "Me 4" And x.Serial < 10) - Console.WriteLine(fastJSON.JSON.ToNiceJSON(r.Rows, New fastJSON.JSONParameters With {.UseExtensions = False})) - End Sub - -End Module +Module Module1 + + Sub Main() + Dim rdb As RaptorDB.RaptorDB = RaptorDB.RaptorDB.Open("..\..\..\RaptorDBdata") + rdb.RegisterView(New SampleViews.SalesInvoiceView()) + Dim r = rdb.Query(Of SampleViews.SalesInvoiceViewRowSchema)(Function(x) x.NoCase = "Me 4" And x.Serial < 10) + Console.WriteLine(fastJSON.JSON.ToNiceJSON(r.Rows, New fastJSON.JSONParameters With {.UseExtensions = False})) + End Sub + +End Module diff --git a/vbTestConsole/My Project/Application.Designer.vb b/vbTestConsole/My Project/Application.Designer.vb index 9faf70b..d4afad1 100644 --- a/vbTestConsole/My Project/Application.Designer.vb +++ b/vbTestConsole/My Project/Application.Designer.vb @@ -1,13 +1,13 @@ -'------------------------------------------------------------------------------ -' -' This code was generated by a tool. -' Runtime Version:4.0.30319.34014 -' -' Changes to this file may cause incorrect behavior and will be lost if -' the code is regenerated. -' -'------------------------------------------------------------------------------ - -Option Strict On -Option Explicit On - +'------------------------------------------------------------------------------ +' +' This code was generated by a tool. +' Runtime Version:4.0.30319.34014 +' +' Changes to this file may cause incorrect behavior and will be lost if +' the code is regenerated. +' +'------------------------------------------------------------------------------ + +Option Strict On +Option Explicit On + diff --git a/vbTestConsole/My Project/Application.myapp b/vbTestConsole/My Project/Application.myapp index e62f1a5..23b627f 100644 --- a/vbTestConsole/My Project/Application.myapp +++ b/vbTestConsole/My Project/Application.myapp @@ -1,10 +1,10 @@ - - - false - false - 0 - true - 0 - 2 - true - + + + false + false + 0 + true + 0 + 2 + true + diff --git a/vbTestConsole/My Project/AssemblyInfo.vb b/vbTestConsole/My Project/AssemblyInfo.vb index 6f7f075..58c5795 100644 --- a/vbTestConsole/My Project/AssemblyInfo.vb +++ b/vbTestConsole/My Project/AssemblyInfo.vb @@ -1,35 +1,35 @@ -Imports System -Imports System.Reflection -Imports System.Runtime.InteropServices - -' General Information about an assembly is controlled through the following -' set of attributes. Change these attribute values to modify the information -' associated with an assembly. - -' Review the values of the assembly attributes - - - - - - - - - - -'The following GUID is for the ID of the typelib if this project is exposed to COM - - -' Version information for an assembly consists of the following four values: -' -' Major Version -' Minor Version -' Build Number -' Revision -' -' You can specify all the values or you can default the Build and Revision Numbers -' by using the '*' as shown below: -' - - - +Imports System +Imports System.Reflection +Imports System.Runtime.InteropServices + +' General Information about an assembly is controlled through the following +' set of attributes. Change these attribute values to modify the information +' associated with an assembly. + +' Review the values of the assembly attributes + + + + + + + + + + +'The following GUID is for the ID of the typelib if this project is exposed to COM + + +' Version information for an assembly consists of the following four values: +' +' Major Version +' Minor Version +' Build Number +' Revision +' +' You can specify all the values or you can default the Build and Revision Numbers +' by using the '*' as shown below: +' + + + diff --git a/vbTestConsole/My Project/Resources.Designer.vb b/vbTestConsole/My Project/Resources.Designer.vb index 5616380..b5a39e8 100644 --- a/vbTestConsole/My Project/Resources.Designer.vb +++ b/vbTestConsole/My Project/Resources.Designer.vb @@ -1,62 +1,62 @@ -'------------------------------------------------------------------------------ -' -' This code was generated by a tool. -' Runtime Version:4.0.30319.34014 -' -' Changes to this file may cause incorrect behavior and will be lost if -' the code is regenerated. -' -'------------------------------------------------------------------------------ - -Option Strict On -Option Explicit On - - -Namespace My.Resources - - 'This class was auto-generated by the StronglyTypedResourceBuilder - 'class via a tool like ResGen or Visual Studio. - 'To add or remove a member, edit your .ResX file then rerun ResGen - 'with the /str option, or rebuild your VS project. - ''' - ''' A strongly-typed resource class, for looking up localized strings, etc. - ''' - _ - Friend Module Resources - - Private resourceMan As Global.System.Resources.ResourceManager - - Private resourceCulture As Global.System.Globalization.CultureInfo - - ''' - ''' Returns the cached ResourceManager instance used by this class. - ''' - _ - Friend ReadOnly Property ResourceManager() As Global.System.Resources.ResourceManager - Get - If Object.ReferenceEquals(resourceMan, Nothing) Then - Dim temp As Global.System.Resources.ResourceManager = New Global.System.Resources.ResourceManager("ConsoleApplication1.Resources", GetType(Resources).Assembly) - resourceMan = temp - End If - Return resourceMan - End Get - End Property - - ''' - ''' Overrides the current thread's CurrentUICulture property for all - ''' resource lookups using this strongly typed resource class. - ''' - _ - Friend Property Culture() As Global.System.Globalization.CultureInfo - Get - Return resourceCulture - End Get - Set(ByVal value As Global.System.Globalization.CultureInfo) - resourceCulture = value - End Set - End Property - End Module -End Namespace +'------------------------------------------------------------------------------ +' +' This code was generated by a tool. +' Runtime Version:4.0.30319.34014 +' +' Changes to this file may cause incorrect behavior and will be lost if +' the code is regenerated. +' +'------------------------------------------------------------------------------ + +Option Strict On +Option Explicit On + + +Namespace My.Resources + + 'This class was auto-generated by the StronglyTypedResourceBuilder + 'class via a tool like ResGen or Visual Studio. + 'To add or remove a member, edit your .ResX file then rerun ResGen + 'with the /str option, or rebuild your VS project. + ''' + ''' A strongly-typed resource class, for looking up localized strings, etc. + ''' + _ + Friend Module Resources + + Private resourceMan As Global.System.Resources.ResourceManager + + Private resourceCulture As Global.System.Globalization.CultureInfo + + ''' + ''' Returns the cached ResourceManager instance used by this class. + ''' + _ + Friend ReadOnly Property ResourceManager() As Global.System.Resources.ResourceManager + Get + If Object.ReferenceEquals(resourceMan, Nothing) Then + Dim temp As Global.System.Resources.ResourceManager = New Global.System.Resources.ResourceManager("ConsoleApplication1.Resources", GetType(Resources).Assembly) + resourceMan = temp + End If + Return resourceMan + End Get + End Property + + ''' + ''' Overrides the current thread's CurrentUICulture property for all + ''' resource lookups using this strongly typed resource class. + ''' + _ + Friend Property Culture() As Global.System.Globalization.CultureInfo + Get + Return resourceCulture + End Get + Set(ByVal value As Global.System.Globalization.CultureInfo) + resourceCulture = value + End Set + End Property + End Module +End Namespace diff --git a/vbTestConsole/My Project/Resources.resx b/vbTestConsole/My Project/Resources.resx index af7dbeb..ffecec8 100644 --- a/vbTestConsole/My Project/Resources.resx +++ b/vbTestConsole/My Project/Resources.resx @@ -1,117 +1,117 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - text/microsoft-resx - - - 2.0 - - - System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 - - - System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + text/microsoft-resx + + + 2.0 + + + System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + \ No newline at end of file diff --git a/vbTestConsole/My Project/Settings.Designer.vb b/vbTestConsole/My Project/Settings.Designer.vb index 2ba4249..40e8be4 100644 --- a/vbTestConsole/My Project/Settings.Designer.vb +++ b/vbTestConsole/My Project/Settings.Designer.vb @@ -1,73 +1,73 @@ -'------------------------------------------------------------------------------ -' -' This code was generated by a tool. -' Runtime Version:4.0.30319.34014 -' -' Changes to this file may cause incorrect behavior and will be lost if -' the code is regenerated. -' -'------------------------------------------------------------------------------ - -Option Strict On -Option Explicit On - - -Namespace My - - _ - Partial Friend NotInheritable Class MySettings - Inherits Global.System.Configuration.ApplicationSettingsBase - - Private Shared defaultInstance As MySettings = CType(Global.System.Configuration.ApplicationSettingsBase.Synchronized(New MySettings), MySettings) - -#Region "My.Settings Auto-Save Functionality" -#If _MyType = "WindowsForms" Then - Private Shared addedHandler As Boolean - - Private Shared addedHandlerLockObject As New Object - - _ - Private Shared Sub AutoSaveSettings(ByVal sender As Global.System.Object, ByVal e As Global.System.EventArgs) - If My.Application.SaveMySettingsOnExit Then - My.Settings.Save() - End If - End Sub -#End If -#End Region - - Public Shared ReadOnly Property [Default]() As MySettings - Get - -#If _MyType = "WindowsForms" Then - If Not addedHandler Then - SyncLock addedHandlerLockObject - If Not addedHandler Then - AddHandler My.Application.Shutdown, AddressOf AutoSaveSettings - addedHandler = True - End If - End SyncLock - End If -#End If - Return defaultInstance - End Get - End Property - End Class -End Namespace - -Namespace My - - _ - Friend Module MySettingsProperty - - _ - Friend ReadOnly Property Settings() As Global.ConsoleApplication1.My.MySettings - Get - Return Global.ConsoleApplication1.My.MySettings.Default - End Get - End Property - End Module -End Namespace +'------------------------------------------------------------------------------ +' +' This code was generated by a tool. +' Runtime Version:4.0.30319.34014 +' +' Changes to this file may cause incorrect behavior and will be lost if +' the code is regenerated. +' +'------------------------------------------------------------------------------ + +Option Strict On +Option Explicit On + + +Namespace My + + _ + Partial Friend NotInheritable Class MySettings + Inherits Global.System.Configuration.ApplicationSettingsBase + + Private Shared defaultInstance As MySettings = CType(Global.System.Configuration.ApplicationSettingsBase.Synchronized(New MySettings), MySettings) + +#Region "My.Settings Auto-Save Functionality" +#If _MyType = "WindowsForms" Then + Private Shared addedHandler As Boolean + + Private Shared addedHandlerLockObject As New Object + + _ + Private Shared Sub AutoSaveSettings(ByVal sender As Global.System.Object, ByVal e As Global.System.EventArgs) + If My.Application.SaveMySettingsOnExit Then + My.Settings.Save() + End If + End Sub +#End If +#End Region + + Public Shared ReadOnly Property [Default]() As MySettings + Get + +#If _MyType = "WindowsForms" Then + If Not addedHandler Then + SyncLock addedHandlerLockObject + If Not addedHandler Then + AddHandler My.Application.Shutdown, AddressOf AutoSaveSettings + addedHandler = True + End If + End SyncLock + End If +#End If + Return defaultInstance + End Get + End Property + End Class +End Namespace + +Namespace My + + _ + Friend Module MySettingsProperty + + _ + Friend ReadOnly Property Settings() As Global.ConsoleApplication1.My.MySettings + Get + Return Global.ConsoleApplication1.My.MySettings.Default + End Get + End Property + End Module +End Namespace diff --git a/vbTestConsole/My Project/Settings.settings b/vbTestConsole/My Project/Settings.settings index 85b890b..377f56d 100644 --- a/vbTestConsole/My Project/Settings.settings +++ b/vbTestConsole/My Project/Settings.settings @@ -1,7 +1,7 @@ - - - - - - - + + + + + + + diff --git a/vbTestConsole/vbtestconsole.vbproj b/vbTestConsole/vbtestconsole.vbproj index 7546d0a..245c903 100644 --- a/vbTestConsole/vbtestconsole.vbproj +++ b/vbTestConsole/vbtestconsole.vbproj @@ -1,128 +1,128 @@ - - - - - Debug - AnyCPU - {D3A7445B-4D0D-4989-9EEF-334821D70894} - Exe - ConsoleApplication1.Module1 - ConsoleApplication1 - ConsoleApplication1 - 512 - Console - v4.5 - - - AnyCPU - true - full - true - true - bin\Debug\ - ConsoleApplication1.xml - 42016,41999,42017,42018,42019,42032,42036,42020,42021,42022 - - - AnyCPU - pdbonly - false - true - true - bin\Release\ - ConsoleApplication1.xml - 42016,41999,42017,42018,42019,42032,42036,42020,42021,42022 - - - On - - - Binary - - - Off - - - On - - - - - - - - - - - - - - - - - - - - - - - - - - True - Application.myapp - - - True - True - Resources.resx - - - True - Settings.settings - True - - - - - VbMyResourcesResXFileCodeGenerator - Resources.Designer.vb - My.Resources - Designer - - - - - MyApplicationCodeGenerator - Application.Designer.vb - - - SettingsSingleFileGenerator - My - Settings.Designer.vb - - - - - - {32331d51-5be0-41e2-af1a-9b086c5ae809} - RaptorDB.Common - - - {45f6be30-989a-4749-b6a0-69099c8661f4} - RaptorDB - - - {a1347486-8d54-4e17-8a22-76efe61bf37b} - Views - - - - + + + + + Debug + AnyCPU + {D3A7445B-4D0D-4989-9EEF-334821D70894} + Exe + ConsoleApplication1.Module1 + ConsoleApplication1 + ConsoleApplication1 + 512 + Console + v4.5 + + + AnyCPU + true + full + true + true + bin\Debug\ + ConsoleApplication1.xml + 42016,41999,42017,42018,42019,42032,42036,42020,42021,42022 + + + AnyCPU + pdbonly + false + true + true + bin\Release\ + ConsoleApplication1.xml + 42016,41999,42017,42018,42019,42032,42036,42020,42021,42022 + + + On + + + Binary + + + Off + + + On + + + + + + + + + + + + + + + + + + + + + + + + + + True + Application.myapp + + + True + True + Resources.resx + + + True + Settings.settings + True + + + + + VbMyResourcesResXFileCodeGenerator + Resources.Designer.vb + My.Resources + Designer + + + + + MyApplicationCodeGenerator + Application.Designer.vb + + + SettingsSingleFileGenerator + My + Settings.Designer.vb + + + + + + {32331d51-5be0-41e2-af1a-9b086c5ae809} + RaptorDB.Common + + + {45f6be30-989a-4749-b6a0-69099c8661f4} + RaptorDB + + + {a1347486-8d54-4e17-8a22-76efe61bf37b} + Views + + + + \ No newline at end of file