diff --git a/@Scripts/Debug.kbs b/@Scripts/Debug.kbs index 09c11de2..6c385718 100644 --- a/@Scripts/Debug.kbs +++ b/@Scripts/Debug.kbs @@ -1,472 +1,5 @@ ---Test 'SELECT *' -SELECT - * -FROM - WordList:Language +EXEC ClearCacheAllocations +begin transaction +delete from Benchmarking:Payload_100000 where Id = 4 +rollback transaction ---Test WHERE equals. -SELECT - * -FROM - WordList:Language -WHERE - Name = 'French' - ---Test WHERE with equals. -SELECT - * -FROM - WordList:Language as L -WHERE - L.Name = 'French' - ---Test SELECT * with alias. -SELECT - L.* -FROM - WordList:Language as L -WHERE - L.Name = 'French' - ---Test WHERE LIKE '%...'. -SELECT - * -FROM - WordList:Language -WHERE - Name LIKE 'F%' - ---Test WHERE LIKE '...%'. -SELECT - * -FROM - WordList:Language -WHERE - Name LIKE '%h' - ---Test WHERE NOT LIKE '%...'. -SELECT - * -FROM - WordList:Language -WHERE - Name NOT LIKE 'F%' - ---Test WHERE NOT LIKE '...%'. -SELECT - Name -FROM - WordList:Language -WHERE - Name = 'French' - ---Test SELECT column which does not exist. -SELECT - Id, - ColumnWhichDoesNotExist, - Name -FROM - WordList:Language -WHERE - Name = 'French' - ---Test WHERE column which does not exist. -SELECT - Name -FROM - WordList:Language -WHERE - ColumnWhichDoesNotExist = 'French' - ---Test WHERE column which does not exist OR valid condition. -SELECT - Name -FROM - WordList:Language -WHERE - Name = 'French' - OR ColumnWhichDoesNotExist = 'French' - ---Test WHERE column which does not exist AND valid condition -SELECT - Name -FROM - WordList:Language -WHERE - Name = 'French' - OR ColumnWhichDoesNotExist = 'French' - ---Test GROUP BY with ORDER BY using aggregate ORDER BY. -SELECT - sw.Text as SourceWord, - tw.Text as TargetWord, - Avg(sw.Id) as Average, - Count(0) as CountOf -FROM - WordList:Word as sw -INNER JOIN WordList:Language as sl - ON sl.Id = sw.LanguageId -INNER JOIN WordList:Synonym as s - ON s.SourceWordId = sw.Id -INNER JOIN WordList:Word as tw - ON tw.Id = s.TargetWordId -INNER JOIN WordList:Language as tl - ON tl.Id = tw.LanguageId -WHERE - sw.Text = 'Car' - OR sw.Text = 'Gym' -GROUP BY - sw.Text, - tw.Text -ORDER BY - Avg(sw.Id / 60) asc, - sw.Text desc - ---Test aggregate without group by. -SELECT - Avg(sw.Id) as Average, - Count(0) as CountOf -FROM - WordList:Word as sw -INNER JOIN WordList:Language as sl - ON sl.Id = sw.LanguageId -INNER JOIN WordList:Synonym as s - ON s.SourceWordId = sw.Id -INNER JOIN WordList:Word as tw - ON tw.Id = s.TargetWordId -INNER JOIN WordList:Language as tl - ON tl.Id = tw.LanguageId -WHERE - sw.Text = 'Car' - OR sw.Text = 'Gym' - ---Test TOP. -SELECT TOP 10 - * -FROM - WordList:Word as sw -INNER JOIN WordList:Language as sl - ON sl.Id = sw.LanguageId -INNER JOIN WordList:Synonym as s - ON s.SourceWordId = sw.Id -INNER JOIN WordList:Word as tw - ON tw.Id = s.TargetWordId -INNER JOIN WordList:Language as tl - ON tl.Id = tw.LanguageId -WHERE - sw.Text = 'Car' - OR sw.Text = 'Gym' - ---Test TOP with OFFSET. -SELECT TOP 10 - * -FROM - WordList:Word as sw -INNER JOIN WordList:Language as sl - ON sl.Id = sw.LanguageId -INNER JOIN WordList:Synonym as s - ON s.SourceWordId = sw.Id -INNER JOIN WordList:Word as tw - ON tw.Id = s.TargetWordId -INNER JOIN WordList:Language as tl - ON tl.Id = tw.LanguageId -WHERE - sw.Text = 'Car' - OR sw.Text = 'Gym' -OFFSET 100 - -DROP SCHEMA Temporary:Payload1 -DROP SCHEMA Temporary:Payload2 - ---Test SELECT INTO. -SELECT - sw.Id as SourceWordId, - sw.Text as SourceWord, - sl.Name as SourceLanguage, - tw.Id as TargetWordId, - tw.Text as TargetWord, - tl.Name as TargetLanguage -INTO - Temporary:Payload1 -FROM - WordList:Word as sw -INNER JOIN WordList:Language as sl - ON sl.Id = sw.LanguageId -INNER JOIN WordList:Synonym as s - ON s.SourceWordId = sw.Id -INNER JOIN WordList:Word as tw - ON tw.Id = s.TargetWordId -INNER JOIN WordList:Language as tl - ON tl.Id = tw.LanguageId -WHERE - sw.Text = 'Car' - OR sw.Text = 'Gym' - OR sw.Text = 'Moon' - -SELECT * INTO Temporary:Payload2 FROM Temporary:Payload1 - ---Test DELETE from schema. -DELETE FROM Temporary:Payload1 WHERE TargetLanguage = 'French' - ---Test DELETE from alias. -DELETE - p1 -FROM - Temporary:Payload1 as p1 -WHERE - p1.TargetLanguage = 'German' - ---Test DELETE from alias with JOIN. -DELETE - p1 -FROM - Temporary:Payload1 as p1 -INNER JOIN WordList:Language as L - ON L.Name = P1.TargetLanguage -WHERE - L.Id = 1 --English - ---Test JOIN. -SELECT - T1.* -FROM - Temporary:Payload1 as T1 -INNER JOIN WordList:Word as T2 - ON T2.Id = T1.SourceWordId - ---Test UPDATE. -UPDATE - Temporary:Payload1 -SET - TargetLanguage = 'UPDATED ' + TargetLanguage -WHERE - TargetLanguage = 'Finnish' - ---Test UPDATE with join. -UPDATE - p1 -SET - TargetLanguage = l.Name -FROM - Temporary:Payload1 as p1 -INNER JOIN WordList:Word as w - ON w.Id = p1.TargetWordId -INNER JOIN WordList:Language as l - ON l.Id = W.LanguageId -WHERE - p1.TargetLanguage LIKE 'Updated %' - -DROP SCHEMA Temporary:InsertTest1 -CREATE SCHEMA Temporary:InsertTest1 - ---Test VALUES INSERT. -INSERT INTO Temporary:InsertTest1(FirstName, LastName) -VALUES('Jane', 'Doe'),('John', 'Doe') - ---Test VALUES INSERT with expression. -INSERT INTO Temporary:InsertTest1(Id, FirstName, LastName) -VALUES(1, 'Jane', 'Doe'),(2, 'John', 'Doe'),(3, 'Test', Guid() + ' (some expression)') - -DROP SCHEMA Temporary:InsertTest2 -CREATE SCHEMA Temporary:InsertTest2 - ---Test RAGGED INSERT. -INSERT INTO Temporary:InsertTest2 -(FirstName = 'John', LastName = 'Doe'), -(FirstName = 'Fred', MiddleName = 'Joe', LastName = 'Doe'), -(FirstName = 'Jane', LastName = 'Doe') - - ---Test RAGGED INSERT with expression -INSERT INTO Temporary:InsertTest2 -(FirstName = 'John', LastName = 'Doe'), -(FirstName = 'Fred', MiddleName = Guid(), LastName = 'Doe'), -(FirstName = 'Jane', LastName = 'Doe') - ----Test complex SELECT and complex WHERE. -SELECT - Sha1('t'), - 11 ^ (2 + 1) + 'ten' + (Length('A10CharStr') * 10 + 2), - 6 * -1 as Negative, - 10 + 10 + (11 ^ 3) + 10 + '->' + Guid(), - 10 + 10 + 'ten' + 10 * 10, - 'ten (' + 10 * 10 + ') : ' + DateTimeUTC('yyyy/MM/dd hh:mm:ss tt') -FROM - Single -WHERE - 1 = 1 - and 1 != 3 - and ( - 10 = 10 - and 5 = 5 - and ( - 10 = 10 - and length(guid()) = 36 - and 5 = 5 - ) - ) -ORDER BY - 10 - ---Test index creation. -CREATE INDEX ix_Temporary_SourceWord_TargetWord -( - SourceWord, - TargetWord -) ON Temporary:Payload1 - ---Test index rebuild. -REBUILD INDEX ix_Temporary_SourceWord_TargetWord ON Temporary:Payload1 -REBUILD INDEX ix_Temporary_SourceWord_TargetWord ON Temporary:Payload1 WITH (Partitions = 1) -REBUILD INDEX ix_Temporary_SourceWord_TargetWord ON Temporary:Payload1 WITH (Partitions = 10) -REBUILD INDEX ix_Temporary_SourceWord_TargetWord ON Temporary:Payload1 WITH (Partitions = 100) - ---Test index analyze. -ANALYZE INDEX ix_Temporary_SourceWord_TargetWord ON Temporary:Payload1 - ---Test unique key creation. -CREATE UNIQUEKEY uk_Temporary_SourceWordId_TargetWordId -( - SourceWordId, - TargetWordId -) ON Temporary:Payload1 - ---Test unique key rebuild. -REBUILD INDEX uk_Temporary_SourceWordId_TargetWordId ON Temporary:Payload1 -REBUILD INDEX uk_Temporary_SourceWordId_TargetWordId ON Temporary:Payload1 WITH (Partitions = 1) -REBUILD INDEX uk_Temporary_SourceWordId_TargetWordId ON Temporary:Payload1 WITH (Partitions = 10) -REBUILD INDEX uk_Temporary_SourceWordId_TargetWordId ON Temporary:Payload1 WITH (Partitions = 100) - ---Test unique key analyze. -ANALYZE INDEX uk_Temporary_SourceWordId_TargetWordId ON Temporary:Payload1 - ---Test analysis. -ANALYZE SCHEMA Temporary:Payload1 -ANALYZE SCHEMA Temporary:Payload1 WITH (IncludePhysicalPages = false) -ANALYZE SCHEMA Temporary:Payload1 WITH (IncludePhysicalPages = true) - ---Test ALTER schema. -ALTER schema Temporary:Payload1 WITH (PageSize = 100) - ---Test functions: - -exec CheckpointHealthCounters() -exec ClearCacheAllocations() -exec ClearHealthCounters() -exec ReleaseCacheAllocations() -exec ShowAggregateFunctions() -exec ShowBlocks() -exec ShowBlocks(1) -exec ShowBlocks(null) -exec ShowBlockTree() -exec ShowCacheAllocations() -exec ShowCachePages() -exec ShowCachePartitions() -exec ShowHealthCounters() -exec ShowLocks() -exec ShowLocks(1) -exec ShowLocks(null) -exec ShowMemoryUtilization() -exec ShowProcesses() -exec ShowProcesses(1) -exec ShowProcesses(null) -exec ShowScalarFunctions() -exec ShowSystemFunctions() -exec ShowTransactions() -exec ShowTransactions(1) -exec ShowTransactions(null) -exec ShowVersion() -exec ShowVersion(true) -exec ShowVersion(false) -exec ShowWaitingLocks() -exec ShowWaitingLocks(1) -exec ShowWaitingLocks(null) -exec Cancel(0) --pid:0 never exists, so this is not a great test. -exec Terminate(0) --pid:0 never exists, so this is not a great test. -exec ShowThreadPools() - ---Test scalar functions: -SELECT IsBetween(50, 10, 100) FROM Single -SELECT IsBetween(200, 10, 100) FROM Single -SELECT IsEqual('string1', 'string2') FROM Single -SELECT IsGreater(10, 5) FROM Single -SELECT IsGreaterOrEqual(10, 5) FROM Single -SELECT IsLess(10, 5) FROM Single -SELECT IsLessOrEqual(10, 5) FROM Single -SELECT IsLike('Hello World', '%o w%') FROM Single -SELECT IsNotBetween(10, 5, 100) FROM Single -SELECT IsNotEqual('string1', 'string2') FROM Single -SELECT IsNotLike('Hello World', '%o w%') FROM Single -SELECT IsInteger('123456') FROM Single -SELECT IsString('123456') FROM Single -SELECT IsDouble('123456') FROM Single -SELECT Checksum('hello world') FROM Single -SELECT LastIndexOf('text', 'This is some text and this is more text.') FROM Single -SELECT Length('Hello World') FROM Single -SELECT DateTime('yyyy-MM-dd HH:mm:ss.ms') FROM Single -SELECT DateTimeUTC('yyyy-MM-dd HH:mm:ss.ms') FROM Single -SELECT DocumentID('h') FROM Single -SELECT DocumentPage('') FROM Single -SELECT DocumentUID('') FROM Single -SELECT Guid() FROM Single -SELECT IndexOf('text', 'This is some text and this is more text.') FROM Single -SELECT Left('Hello world', 5) FROM Single -SELECT Right('Hello world', 5) FROM Single -SELECT Sha1('Hello world') FROM Single -SELECT Sha256('Hello world') FROM Single -SELECT Sha512('Hello world') FROM Single -SELECT SubString('Hello cruel world', 6, 5) FROM Single -SELECT ToLower('Hello cruel world') FROM Single -SELECT ToProper('Hello cruel world') FROM Single -SELECT ToUpper('Hello cruel world') FROM Single -SELECT Concat('hello', ' ', 'world') FROM Single -SELECT Coalesce(null, null, 'hello', 'world') FROM Single -SELECT Trim('hello world', 'dhle') FROM Single -SELECT Trim(' hello world ') FROM Single -SELECT IIF(IsGreater(Length ('Hello World'), 10), 'Is True', 'Is False') FROM Single - ---Test aggregate functions -SELECT - Avg(L.Id) as Avg, - Count(L.Id) as Count, - CountDistinct(L.Name, true) as CaseSensitiveDistinct, - CountDistinct(L.Name, false) as CaseInsensitiveDistinct, - GeometricMean(L.Id) as GeometricMean, - Min(L.Id) as Min, - Max(L.Id) as Max, - Mean(L.Id) as Mean, - Median(L.Id) as Median, - Mode(L.Id) as Mode, - Sum(L.Id) as Sum, - Variance(L.Id) as Variance, - MinString(L.Name) as MinString, - MaxString(L.Name) as MaxString, - Sha1Agg(L.Name) as Sha1Agg, - Sha256Agg(L.Name) as Sha256Agg, - Sha512Agg(L.Name) as Sha512Agg -FROM - WordList:Language as L - ---Test aggregate functions -SELECT - Avg(L.Id) as Avg, - Count(L.Id) as Count, - CountDistinct(L.Text, true) as CaseSensitiveDistinct, - CountDistinct(L.Text, false) as CaseInsensitiveDistinct, - GeometricMean(L.Id) as GeometricMean, - Min(L.Id) as Min, - Max(L.Id) as Max, - Mean(L.Id) as Mean, - Median(L.Id) as Median, - Mode(L.Id) as Mode, - Sum(L.Id) as Sum, - Variance(L.Id) as Variance, - MinString(L.Text) as MinString, - MaxString(L.Text) as MaxString, - Sha1Agg(L.Text) as Sha1Agg, - Sha256Agg(L.Text) as Sha256Agg, - Sha512Agg(L.Text) as Sha512Agg -FROM - WordList:Word as L -WHERE - L.Text LIKE '%oad%' diff --git a/NTDLS.Katzebase.Engine/Functions/System/Implementations/SystemShowAggregateFunctions.cs b/NTDLS.Katzebase.Engine/Functions/System/Implementations/SystemShowAggregateFunctions.cs index d3fd7869..b39e6ff3 100644 --- a/NTDLS.Katzebase.Engine/Functions/System/Implementations/SystemShowAggregateFunctions.cs +++ b/NTDLS.Katzebase.Engine/Functions/System/Implementations/SystemShowAggregateFunctions.cs @@ -3,7 +3,6 @@ using NTDLS.Katzebase.Parsers.Functions.Aggregate; using NTDLS.Katzebase.Parsers.Functions.System; using System.Text; -using static NTDLS.Katzebase.Api.KbConstants; namespace NTDLS.Katzebase.Engine.Functions.System.Implementations { diff --git a/NTDLS.Katzebase.Engine/Functions/System/Implementations/SystemShowScalarFunctions.cs b/NTDLS.Katzebase.Engine/Functions/System/Implementations/SystemShowScalarFunctions.cs index 6187fbb7..499a6a5c 100644 --- a/NTDLS.Katzebase.Engine/Functions/System/Implementations/SystemShowScalarFunctions.cs +++ b/NTDLS.Katzebase.Engine/Functions/System/Implementations/SystemShowScalarFunctions.cs @@ -3,7 +3,6 @@ using NTDLS.Katzebase.Parsers.Functions.Scalar; using NTDLS.Katzebase.Parsers.Functions.System; using System.Text; -using static NTDLS.Katzebase.Api.KbConstants; namespace NTDLS.Katzebase.Engine.Functions.System.Implementations { diff --git a/NTDLS.Katzebase.Engine/Functions/System/Implementations/SystemShowSystemFunctions.cs b/NTDLS.Katzebase.Engine/Functions/System/Implementations/SystemShowSystemFunctions.cs index b8abf245..641429e9 100644 --- a/NTDLS.Katzebase.Engine/Functions/System/Implementations/SystemShowSystemFunctions.cs +++ b/NTDLS.Katzebase.Engine/Functions/System/Implementations/SystemShowSystemFunctions.cs @@ -2,7 +2,6 @@ using NTDLS.Katzebase.Engine.Atomicity; using NTDLS.Katzebase.Parsers.Functions.System; using System.Text; -using static NTDLS.Katzebase.Api.KbConstants; namespace NTDLS.Katzebase.Engine.Functions.System.Implementations { diff --git a/TestApplications/Benchmark/Program.cs b/TestApplications/Benchmark/Program.cs index 2924bcd2..f28c3315 100644 --- a/TestApplications/Benchmark/Program.cs +++ b/TestApplications/Benchmark/Program.cs @@ -32,82 +32,164 @@ static void Main() ExecuteBenchmark_Inserts(); Console.WriteLine("Executing updates:"); - //TODO: implement. + ExecuteBenchmark_Updates(); Console.WriteLine("Executing deletes:"); //TODO: implement. } - private static void ExecuteBenchmark_Inserts() - { - ExecuteBenchmark_Inserts("Payload.gz", "Benchmarking:Insertion_tx10", 10000, 10); - ExecuteBenchmark_Inserts("Payload.gz", "Benchmarking:Payload_Insertion_tx100", 10000, 100); - ExecuteBenchmark_Inserts("Payload.gz", "Benchmarking:Insertion_tx1000", 10000, 1000); - } - - private static void ExecuteBenchmark_Inserts(string fileName, string schemaName, int maxCount, int rowsPerTransaction) + private static void ExecuteBenchmark_Updates() { - Console.WriteLine($"ExecuteBenchmark_Inserts: {schemaName}"); + Updates("Payload.gz", "Benchmarking:Payload_1000", 100, 100, 100); + Updates("Payload.gz", "Benchmarking:Payload_10000", 100, 100, 1000); + Updates("Payload.gz", "Benchmarking:Payload_100000", 100, 100, 1000); - var process = StartService(); - using (var client = new KbClient(_serverHost, _serverPort, "admin", KbClient.HashPassword(""))) + static void Updates(string fileName, string schemaName, int maxCount, int rowsPerTransaction, int mod) { - client.Schema.DropIfExists(schemaName); - client.Schema.Create(schemaName); + Console.WriteLine($"Updates: {schemaName}"); - var bytes = DecompressToString(File.ReadAllBytes(Path.Combine(_DataPath, fileName))); - var payloadRows = Newtonsoft.Json.JsonConvert.DeserializeObject>>(bytes).EnsureNotNull(); + var statement = $"UPDATE {schemaName} SET\r\nViewCount = @AcceptedAnswerId,\r\nTitle = @AnswerCount,\r\nTags = @Body,\r\nScore = @ClosedDate,\r\nPostTypeId = @CommentCount,\r\nParentId = @CommunityOwnedDate,\r\nOwnerUserId = @CreationDate,\r\nLastEditorUserId = @FavoriteCount,\r\nLastEditorDisplayName = @LastActivityDate,\r\nLastEditDate = @LastEditDate,\r\nLastActivityDate = @LastEditorDisplayName,\r\nFavoriteCount = @LastEditorUserId,\r\nCreationDate = @OwnerUserId,\r\nCommunityOwnedDate = @ParentId,\r\nCommentCount = @PostTypeId,\r\nClosedDate = @Score,\r\nBody = @Tags,\r\nAnswerCount = @Title,\r\nAcceptedAnswerId = @ViewCount\r\nWHERE\r\nId = @Id"; - double previousTotalProcessorTime = 0; - for (int i = 0; i < _iterationsPerTest; i++) + var process = StartService(); + using (var client = new KbClient(_serverHost, _serverPort, "admin", KbClient.HashPassword(""))) { - var startTime = DateTime.Now; + Console.WriteLine("Extracting payload."); + var bytes = DecompressToString(File.ReadAllBytes(Path.Combine(_DataPath, fileName))); + Console.WriteLine("Deserializing payload."); + var payloadRows = Newtonsoft.Json.JsonConvert.DeserializeObject>(bytes).EnsureNotNull(); - int rowCount = 0; + Console.WriteLine($"Creating required indexes on {schemaName}."); + client.Query.ExecuteNonQuery($"DROP INDEX IX_{schemaName}_Id ON {schemaName}\r\nCREATE INDEX IX_{schemaName}_Id(\r\n\tId\r\n) ON {schemaName} WITH (PARTITIONS=1000)\r\n"); - client.Transaction.Begin(); + Console.WriteLine($"Starting updates..."); - foreach (var row in payloadRows) + double previousTotalProcessorTime = 0; + for (int i = 0; i < _iterationsPerTest; i++) { - if (rowCount > maxCount) - { - break; - } + var startTime = DateTime.UtcNow; - if (rowCount > 0 && (rowCount % rowsPerTransaction) == 0) - { - client.Transaction.Commit(); - client.Transaction.Begin(); - } + int rowCount = 0; + int modCount = 0; - try - { - client.Document.Store(schemaName, new KbDocument(row)); - } - catch (Exception ex) + client.Transaction.Begin(); + + foreach (var row in payloadRows) { - Console.WriteLine(ex.Message); + if ((modCount % mod) == 0) + { + if (rowCount > maxCount) + { + break; + } + + if (rowCount > 0 && (rowCount % rowsPerTransaction) == 0) + { + Console.WriteLine("Committing..."); + client.Transaction.Commit(); + client.Transaction.Begin(); + } + + try + { + client.Query.ExecuteNonQuery(statement, row); + } + catch (Exception ex) + { + Console.WriteLine(ex.Message); + } + rowCount++; + } + + modCount++; } - rowCount++; + Console.WriteLine("Committing..."); + client.Transaction.Commit(); + + double thisTotalProcessorTime = process.TotalProcessorTime.TotalSeconds; + double deltaTotalProcessorTime = thisTotalProcessorTime - previousTotalProcessorTime; + previousTotalProcessorTime = thisTotalProcessorTime; + + WriteMetrics(schemaName, i, (DateTime.UtcNow - startTime).TotalMilliseconds, process.PeakWorkingSet64, deltaTotalProcessorTime); } + } + Thread.Sleep(1000); + process.Kill(); + } + } - client.Transaction.Commit(); + private static void ExecuteBenchmark_Inserts() + { + Inserts("Payload.gz", "Benchmarking:Insertion_tx10", 1000, 10); + Inserts("Payload.gz", "Benchmarking:Insertion_tx100", 1000, 100); + Inserts("Payload.gz", "Benchmarking:Insertion_tx1000", 1000, 100); - double thisTotalProcessorTime = process.TotalProcessorTime.TotalSeconds; - double deltaTotalProcessorTime = thisTotalProcessorTime - previousTotalProcessorTime; - previousTotalProcessorTime = thisTotalProcessorTime; + static void Inserts(string fileName, string schemaName, int maxCount, int rowsPerTransaction) + { + Console.WriteLine($"Inserts: {schemaName}"); - WriteMetrics(schemaName, i, (DateTime.Now - startTime).TotalMilliseconds, process.PeakWorkingSet64, deltaTotalProcessorTime); + var process = StartService(); + using (var client = new KbClient(_serverHost, _serverPort, "admin", KbClient.HashPassword(""))) + { + client.Schema.DropIfExists(schemaName); + client.Schema.Create(schemaName); + + Console.WriteLine("Extracting payload."); + var bytes = DecompressToString(File.ReadAllBytes(Path.Combine(_DataPath, fileName))); + Console.WriteLine("Deserializing payload."); + var payloadRows = Newtonsoft.Json.JsonConvert.DeserializeObject>>(bytes).EnsureNotNull(); + + double previousTotalProcessorTime = 0; + for (int i = 0; i < _iterationsPerTest; i++) + { + var startTime = DateTime.UtcNow; + + int rowCount = 0; + + client.Transaction.Begin(); + + foreach (var row in payloadRows) + { + if (rowCount > maxCount) + { + break; + } + + if (rowCount > 0 && (rowCount % rowsPerTransaction) == 0) + { + client.Transaction.Commit(); + client.Transaction.Begin(); + } + + try + { + client.Document.Store(schemaName, new KbDocument(row)); + } + catch (Exception ex) + { + Console.WriteLine(ex.Message); + } + + rowCount++; + } + + client.Transaction.Commit(); + + double thisTotalProcessorTime = process.TotalProcessorTime.TotalSeconds; + double deltaTotalProcessorTime = thisTotalProcessorTime - previousTotalProcessorTime; + previousTotalProcessorTime = thisTotalProcessorTime; + + WriteMetrics(schemaName, i, (DateTime.UtcNow - startTime).TotalMilliseconds, process.PeakWorkingSet64, deltaTotalProcessorTime); + } } + Thread.Sleep(1000); + process.Kill(); } - Thread.Sleep(1000); - process.Kill(); } private static void ExecuteBenchmark_Scripts() { - var scriptFiles = GetBenchmarkScripts(); + var scriptFiles = Directory.EnumerateFiles(_ScriptsPath, "*.kbs").ToList(); foreach (var scriptFile in scriptFiles) { @@ -140,12 +222,17 @@ private static void CreatePayloadData() var process = StartService(); using (var client = new KbClient(_serverHost, _serverPort, "admin", KbClient.HashPassword(""))) { + Console.WriteLine("Dropping benchmarking schema."); client.Schema.DropIfExists("Benchmarking"); + + Console.WriteLine("Dropping benchmarking schemas."); client.Schema.Create("Benchmarking:Payload_1000"); client.Schema.Create("Benchmarking:Payload_10000"); client.Schema.Create("Benchmarking:Payload_100000"); + Console.WriteLine("Extracting payload."); var bytes = DecompressToString(File.ReadAllBytes(Path.Combine(_DataPath, "Payload.gz"))); + Console.WriteLine("Deserializing payload."); var payloadRows = Newtonsoft.Json.JsonConvert.DeserializeObject>>(bytes).EnsureNotNull(); int rowCount = 0; @@ -156,8 +243,8 @@ private static void CreatePayloadData() { if (rowCount > 0 && (rowCount % rowsPerTransaction) == 0) { - client.Transaction.Commit(); Console.Write($"Committing {rowsPerTransaction:n0} rows, total: {rowCount:n0} ({((rowCount / (double)payloadRows.Count) * 100.0):n2}%)...\r"); + client.Transaction.Commit(); client.Transaction.Begin(); } @@ -196,59 +283,59 @@ private static void CreatePayloadData() Thread.Sleep(1000); process.Kill(); - } - private static void InsertPayloadData(string fileName, string schemaName, int maxCount) - { - var process = StartService(); - using (var client = new KbClient(_serverHost, _serverPort, "admin", KbClient.HashPassword(""))) - { - client.Schema.DropIfExists(schemaName); - client.Schema.Create(schemaName); - - var bytes = DecompressToString(File.ReadAllBytes(Path.Combine(_DataPath, fileName))); - var payloadRows = Newtonsoft.Json.JsonConvert.DeserializeObject>>(bytes).EnsureNotNull(); - for (int i = 0; i < _iterationsPerTest; i++) + static void InsertPayloadData(string fileName, string schemaName, int maxCount) + { + var process = StartService(); + using (var client = new KbClient(_serverHost, _serverPort, "admin", KbClient.HashPassword(""))) { - int rowCount = 0; - int rowsPerTransaction = 1000; + client.Schema.DropIfExists(schemaName); + client.Schema.Create(schemaName); - client.Transaction.Begin(); - foreach (var row in payloadRows) - { - if (rowCount > maxCount) - { - break; - } + Console.WriteLine("Extracting payload."); + var bytes = DecompressToString(File.ReadAllBytes(Path.Combine(_DataPath, fileName))); + Console.WriteLine("Deserializing payload."); + var payloadRows = Newtonsoft.Json.JsonConvert.DeserializeObject>>(bytes).EnsureNotNull(); - if (rowCount > 0 && (rowCount % rowsPerTransaction) == 0) - { - client.Transaction.Commit(); - client.Transaction.Begin(); - } + for (int i = 0; i < _iterationsPerTest; i++) + { + int rowCount = 0; + int rowsPerTransaction = 1000; - try - { - client.Document.Store(schemaName, new KbDocument(row)); - } - catch (Exception ex) + client.Transaction.Begin(); + foreach (var row in payloadRows) { - Console.WriteLine(ex.Message); + if (rowCount > maxCount) + { + break; + } + + if (rowCount > 0 && (rowCount % rowsPerTransaction) == 0) + { + Console.WriteLine("Committing..."); + client.Transaction.Commit(); + client.Transaction.Begin(); + } + + try + { + client.Document.Store(schemaName, new KbDocument(row)); + } + catch (Exception ex) + { + Console.WriteLine(ex.Message); + } + + rowCount++; } - - rowCount++; + Console.WriteLine("Committing..."); + client.Transaction.Commit(); } - client.Transaction.Commit(); } + Thread.Sleep(1000); + process.Kill(); } - Thread.Sleep(1000); - process.Kill(); - } - - private static List GetBenchmarkScripts() - { - return Directory.EnumerateFiles(_ScriptsPath, "*.kbs").ToList(); } private static Process StartService() @@ -261,7 +348,7 @@ private static Process StartService() return process; } - public static byte[] Decompress(byte[] bytes) + public static string DecompressToString(byte[] bytes) { using var msi = new MemoryStream(bytes); using var mso = new MemoryStream(); @@ -269,11 +356,9 @@ public static byte[] Decompress(byte[] bytes) { gs.CopyTo(mso); } - return mso.ToArray(); + return Encoding.UTF8.GetString(mso.ToArray()); } - public static string DecompressToString(byte[] bytes) => Encoding.UTF8.GetString(Decompress(bytes)); - static void WriteMetrics(string name, int iteration, double durationMs, double peakMemory, double cpuTime) { File.AppendAllText(OutputFileName, $"{StartDateTime}\t{name}\t{iteration}\t{durationMs:n2}\t{(peakMemory / 1024.0 / 1024.0):n2}\t{cpuTime:n2}\r\n"); diff --git a/TestApplications/Benchmark/Scripts/0000.0 - Create Index - 1.kbs b/TestApplications/Benchmark/Scripts/0000.1 - Create Index - 1.kbs similarity index 96% rename from TestApplications/Benchmark/Scripts/0000.0 - Create Index - 1.kbs rename to TestApplications/Benchmark/Scripts/0000.1 - Create Index - 1.kbs index ae252f9a..f841632d 100644 --- a/TestApplications/Benchmark/Scripts/0000.0 - Create Index - 1.kbs +++ b/TestApplications/Benchmark/Scripts/0000.1 - Create Index - 1.kbs @@ -1,7 +1,7 @@ -EXEC ClearCacheAllocations - -DROP INDEX IX_Payload_tx100000_01_1000 ON Benchmarking:Payload_100000 - -CREATE INDEX IX_Payload_tx100000_01_1000( - Score -) ON Benchmarking:Payload_100000 WITH (PARTITIONS=1000) +EXEC ClearCacheAllocations + +DROP INDEX IX_Payload_tx100000_01_1000 ON Benchmarking:Payload_100000 + +CREATE INDEX IX_Payload_tx100000_01_1000( + Score +) ON Benchmarking:Payload_100000 WITH (PARTITIONS=1000) diff --git a/TestApplications/Benchmark/Scripts/0000.0 - Create Index - 2.kbs b/TestApplications/Benchmark/Scripts/0000.1 - Create Index - 2.kbs similarity index 96% rename from TestApplications/Benchmark/Scripts/0000.0 - Create Index - 2.kbs rename to TestApplications/Benchmark/Scripts/0000.1 - Create Index - 2.kbs index e515a7d8..cefa1069 100644 --- a/TestApplications/Benchmark/Scripts/0000.0 - Create Index - 2.kbs +++ b/TestApplications/Benchmark/Scripts/0000.1 - Create Index - 2.kbs @@ -1,8 +1,8 @@ -EXEC ClearCacheAllocations - -DROP INDEX IX_Payload_tx100000_02_1000 ON Benchmarking:Payload_100000 - -CREATE INDEX IX_Payload_tx100000_02_1000( - OwnerUserId, - ParentId -) ON Benchmarking:Payload_100000 WITH (PARTITIONS=1000) +EXEC ClearCacheAllocations + +DROP INDEX IX_Payload_tx100000_02_1000 ON Benchmarking:Payload_100000 + +CREATE INDEX IX_Payload_tx100000_02_1000( + OwnerUserId, + ParentId +) ON Benchmarking:Payload_100000 WITH (PARTITIONS=1000) diff --git a/TestApplications/Benchmark/Scripts/0000.0 - Create Index - Key.kbs b/TestApplications/Benchmark/Scripts/0000.1 - Create Index - Key.kbs similarity index 96% rename from TestApplications/Benchmark/Scripts/0000.0 - Create Index - Key.kbs rename to TestApplications/Benchmark/Scripts/0000.1 - Create Index - Key.kbs index cded0e93..a9df9b2c 100644 --- a/TestApplications/Benchmark/Scripts/0000.0 - Create Index - Key.kbs +++ b/TestApplications/Benchmark/Scripts/0000.1 - Create Index - Key.kbs @@ -1,7 +1,7 @@ -EXEC ClearCacheAllocations - -DROP INDEX IX_Payload_tx100000_03_1000 ON Benchmarking:Payload_100000 - -CREATE UNIQUEKEY IX_Payload_tx100000_03_1000( - Id -) ON Benchmarking:Payload_100000 WITH (PARTITIONS=1000) +EXEC ClearCacheAllocations + +DROP INDEX IX_Payload_tx100000_03_1000 ON Benchmarking:Payload_100000 + +CREATE UNIQUEKEY IX_Payload_tx100000_03_1000( + Id +) ON Benchmarking:Payload_100000 WITH (PARTITIONS=1000)