Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -224,9 +224,9 @@ private int ReadBytes(byte[] buffer, int offset, int count)

// we are guaranteed that cb is < Int32.Max since we always pass in count which is of type Int32 to
// our getbytes interface
count -= (int)cb;
offset += (int)cb;
intCount += (int)cb;
count -= cb;
offset += cb;
intCount += cb;
}
else
{
Expand Down Expand Up @@ -387,9 +387,9 @@ public override int Read(byte[] buffer, int offset, int count)

Buffer.BlockCopy(_cachedBytes[_currentArrayIndex], _currentPosition, buffer, offset, cb);
_currentPosition += cb;
count -= (int)cb;
offset += (int)cb;
intCount += (int)cb;
count -= cb;
offset += cb;
intCount += cb;
}

return intCount;
Expand Down Expand Up @@ -477,13 +477,14 @@ private long TotalLength

sealed internal class SqlStreamingXml
{
private static readonly XmlWriterSettings s_writerSettings = new() { CloseOutput = true, ConformanceLevel = ConformanceLevel.Fragment };
private static readonly XmlWriterSettings s_writerSettings = new() { CloseOutput = true, ConformanceLevel = ConformanceLevel.Fragment, Encoding = new UTF8Encoding(false) };

private readonly int _columnOrdinal;
private SqlDataReader _reader;
private XmlReader _xmlReader;
private bool _canReadChunk;
private XmlWriter _xmlWriter;
private StringWriter _strWriter;
private MemoryStream _memoryStream;
private long _charsRemoved;

public SqlStreamingXml(int i, SqlDataReader reader)
Expand All @@ -495,11 +496,12 @@ public SqlStreamingXml(int i, SqlDataReader reader)
public void Close()
{
((IDisposable)_xmlWriter).Dispose();
((IDisposable)_memoryStream).Dispose();
((IDisposable)_xmlReader).Dispose();
_reader = null;
_xmlReader = null;
_xmlWriter = null;
_strWriter = null;
_memoryStream = null;
}

public int ColumnOrdinal => _columnOrdinal;
Expand All @@ -508,14 +510,15 @@ public long GetChars(long dataIndex, char[] buffer, int bufferIndex, int length)
{
if (_xmlReader == null)
{
SqlStream sqlStream = new(_columnOrdinal, _reader, addByteOrderMark: true, processAllRows:false, advanceReader:false);
SqlStream sqlStream = new(_columnOrdinal, _reader, addByteOrderMark: true, processAllRows: false, advanceReader: false);
_xmlReader = sqlStream.ToXmlReader();
_strWriter = new StringWriter((System.IFormatProvider)null);
_xmlWriter = XmlWriter.Create(_strWriter, s_writerSettings);
_canReadChunk = _xmlReader.CanReadValueChunk;
_memoryStream = new MemoryStream();
_xmlWriter = XmlWriter.Create(_memoryStream, s_writerSettings);
}

int charsToSkip = 0;
int cnt = 0;
long charsToSkip = 0;
long cnt = 0;
if (dataIndex < _charsRemoved)
{
throw ADP.NonSeqByteAccess(dataIndex, _charsRemoved, nameof(GetChars));
Expand All @@ -529,72 +532,73 @@ public long GetChars(long dataIndex, char[] buffer, int bufferIndex, int length)
// total size up front without reading and converting the XML.
if (buffer == null)
{
return (long)(-1);
return -1;
}

StringBuilder strBldr = _strWriter.GetStringBuilder();
while (!_xmlReader.EOF)
long memoryStreamRemaining = _memoryStream.Length - _memoryStream.Position;
while (memoryStreamRemaining < (length + charsToSkip) && !_xmlReader.EOF)
{
if (strBldr.Length >= (length + charsToSkip))
// Check whether the MemoryStream has been fully read.
// If so, reset the MemoryStream for reuse and to avoid growing size too much.
if (_memoryStream.Length > 0 && memoryStreamRemaining == 0)
{
break;
// This also sets the Position back to 0.
_memoryStream.SetLength(0);
}
// Can't call _xmlWriter.WriteNode here, since it reads all of the data in before returning the first char.
// Do own implementation of WriteNode instead that reads just enough data to return the required number of chars
//_xmlWriter.WriteNode(_xmlReader, true);
// _xmlWriter.Flush();
WriteXmlElement();
// Update memoryStreamRemaining based on the number of chars just written to the MemoryStream
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What's written to the MemoryStream is bytes, the number of bytes and chars can be quite different depending on encoding. It might be clearer to just refer to byte counts in the comment.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree for a MemoryStream, but within method GetChars() the stream is passed into XmlWriter.Create() with specific Encoding = new UTF8Encoding(false) to prevent the BOM, as well as meet the XML expectation.
The only use for writing is within WriteXmlElement() consistent with only XML characters being written. Changing the comment for this specific method might dilute that only chars are written. Thoughts?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ultimately, the comment should assist future maintenance so I'm good with modifying it to achieve that goal while also being more inline with MemoryStream semantics.

Something felt unexpected about the prior StringWriter using the default UTF-8 encoding, and why I included "Note: UTF8Encoding(false)" within the PR comment. Perhaps there is a different, existing issue with SqlClient support of other encodings. I've seen references to using FOR XML with non-XML columns causing concerns.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's hard to tell what the right thing to do is. I'll go with your intuition as you currently know the code better.

do the new tests you've added work on the old implementation as well? What sort of code coverage do we have?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I will modify the comment on Mon. and also comment regarding the above "UTF8Encoding(false) addition in s_writerSettings" as the PR note is not as good as within the actual code.

  • LinearSingleNode failed with prior code base as it most directly replicated the logic within SqlBenchmarker - time delta more than 20% of linear.
  • LinearMultipleNodes passes with both versions - time delta within 20% of linear for 1MB vs. 5 x 1MB.

TDD-wise, I was using a locally modified version SqlBenchmarker testing extremes - up to 500MB single node & up to 500 x 1MB nodes. Both ran longer than I cared to wait on prior SqlClient code and completed under 3 minutes on new. Minimal time difference between single and multiple nodes, but noticeable memory usage difference through Task Manager.

I'm not sure on code coverage tests. I will review them before pushing comment changes. I did not run the PerformanceTests, so I will have to get my environment configured for them.

memoryStreamRemaining = _memoryStream.Length - _memoryStream.Position;
if (charsToSkip > 0)
{
// Aggressively remove the characters we want to skip to avoid growing StringBuilder size too much
cnt = strBldr.Length < charsToSkip ? strBldr.Length : charsToSkip;
strBldr.Remove(0, cnt);
cnt = memoryStreamRemaining < charsToSkip ? memoryStreamRemaining : charsToSkip;
// Move the Position forward
_memoryStream.Seek(cnt, SeekOrigin.Current);
memoryStreamRemaining -= cnt;
charsToSkip -= cnt;
_charsRemoved += (long)cnt;
_charsRemoved += cnt;
}
}

if (charsToSkip > 0)
{
cnt = strBldr.Length < charsToSkip ? strBldr.Length : charsToSkip;
strBldr.Remove(0, cnt);
cnt = memoryStreamRemaining < charsToSkip ? memoryStreamRemaining : charsToSkip;
// Move the Position forward
_memoryStream.Seek(cnt, SeekOrigin.Current);
memoryStreamRemaining -= cnt;
charsToSkip -= cnt;
_charsRemoved += (long)cnt;
_charsRemoved += cnt;
}

if (strBldr.Length == 0)
if (memoryStreamRemaining == 0)
{
return 0;
}
// At this point charsToSkip must be 0
Debug.Assert(charsToSkip == 0);

cnt = strBldr.Length < length ? strBldr.Length : length;
cnt = memoryStreamRemaining < length ? memoryStreamRemaining : length;
for (int i = 0; i < cnt; i++)
{
buffer[bufferIndex + i] = strBldr[i];
buffer[bufferIndex + i] = (char)_memoryStream.ReadByte();
}
// Remove the characters we have already returned
strBldr.Remove(0, cnt);
_charsRemoved += (long)cnt;
return (long)cnt;
_charsRemoved += cnt;
return cnt;
}

// This method duplicates the work of XmlWriter.WriteNode except that it reads one element at a time
// instead of reading the entire node like XmlWriter.
// Caller already ensures !_xmlReader.EOF
private void WriteXmlElement()
{
if (_xmlReader.EOF)
{
return;
}

bool canReadChunk = _xmlReader.CanReadValueChunk;
char[] writeNodeBuffer = null;

// Constants
const int WriteNodeBufferSize = 1024;

long memoryStreamPosition = _memoryStream.Position;

_xmlReader.Read();
switch (_xmlReader.NodeType)
{
Expand All @@ -608,12 +612,9 @@ private void WriteXmlElement()
}
break;
case XmlNodeType.Text:
if (canReadChunk)
if (_canReadChunk)
{
if (writeNodeBuffer == null)
{
writeNodeBuffer = new char[WriteNodeBufferSize];
}
char[] writeNodeBuffer = new char[WriteNodeBufferSize];
int read;
while ((read = _xmlReader.ReadValueChunk(writeNodeBuffer, 0, WriteNodeBufferSize)) > 0)
{
Expand Down Expand Up @@ -650,6 +651,7 @@ private void WriteXmlElement()
break;
}
_xmlWriter.Flush();
_memoryStream.Position = memoryStreamPosition;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -218,6 +218,7 @@
<Compile Include="SQL\SqlNotificationTest\SqlNotificationTest.cs" />
<Compile Include="SQL\SqlSchemaInfoTest\SqlSchemaInfoTest.cs" />
<Compile Include="SQL\SqlStatisticsTest\SqlStatisticsTest.cs" />
<Compile Include="SQL\SqlStreamingXmlTest\SqlStreamingXmlTest.cs" />
<Compile Include="SQL\TransactionTest\DistributedTransactionTest.cs" />
<Compile Include="SQL\TransactionTest\TransactionEnlistmentTest.cs" />
<Compile Include="SQL\TransactionTest\TransactionTest.cs" />
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.

using System;
using System.Data;
using System.Diagnostics;
using System.Globalization;
using Xunit;

namespace Microsoft.Data.SqlClient.ManualTesting.Tests
{
public static class SqlStreamingXmlTest
{
[ConditionalFact(typeof(DataTestUtility), nameof(DataTestUtility.AreConnStringsSetup))]
public static void LinearSingleNode()
{
SqlConnection connection = new(DataTestUtility.TCPConnectionString);
// Use a literal XML column of the specified size. The XML is constructed by replicating a string of 'B' characters to reach the desired size, and wrapping it in XML tags.
const string commandTextBase = "SELECT Convert(xml, N'<foo>' + REPLICATE(CAST('' AS nvarchar(max)) +N'B', ({0} * 1024 * 1024) - 11) + N'</foo>')";

TimeSpan time1 = TimedExecution(commandTextBase, 1);
TimeSpan time5 = TimedExecution(commandTextBase, 5);

// Compare linear time for 1MB vs 5MB. We expect the time to be at most 6 times higher for 5MB, which permits additional 20% for any noise in the measurements.
Assert.True(time5.TotalMilliseconds <= (time1.TotalMilliseconds * 6), $"Execution time did not follow linear scale: 1MB={time1.TotalMilliseconds}ms vs. 5MB={time5.TotalMilliseconds}ms");
}

[ConditionalFact(typeof(DataTestUtility), nameof(DataTestUtility.AreConnStringsSetup))]
public static void LinearMultipleNodes()
{
SqlConnection connection = new(DataTestUtility.TCPConnectionString);
// Use a literal XML column with the specified number of 1MB elements. The XML is constructed by replicating a string of 'B' characters to reach 1MB, then replicating to the desired number of elements.
const string commandTextBase = "SELECT Convert(xml, REPLICATE(N'<foo>' + REPLICATE(CAST('' AS nvarchar(max)) + N'B', (1024 * 1024) - 11) + N'</foo>', {0}))";

TimeSpan time1 = TimedExecution(commandTextBase, 1);
TimeSpan time5 = TimedExecution(commandTextBase, 5);

// Compare linear time for 1MB vs 5MB. We expect the time to be at most 6 times higher for 5MB, which permits additional 20% for any noise in the measurements.
Assert.True(time5.TotalMilliseconds <= (time1.TotalMilliseconds * 6), $"Execution time did not follow linear scale: 1x={time1.TotalMilliseconds}ms vs. 5x={time5.TotalMilliseconds}ms");
}

private static TimeSpan TimedExecution(string commandTextBase, int scale)
{
SqlConnection connection = new(DataTestUtility.TCPConnectionString);
var stopwatch = new Stopwatch();

using (SqlCommand command = connection.CreateCommand())
{
connection.Open();
command.CommandText = string.Format(CultureInfo.InvariantCulture, commandTextBase, scale);

SqlDataReader sqlDataReader = command.ExecuteReader(CommandBehavior.SequentialAccess);
if (sqlDataReader.Read())
{
stopwatch.Start();
ReadAllChars(sqlDataReader, scale);
stopwatch.Stop();
}
connection.Close();
}

return stopwatch.Elapsed;
}

/// <summary>
/// Replicate the reading approach used with issue #1877
/// </summary>
private static void ReadAllChars(SqlDataReader sqlDataReader, int expectedMB)
{
var expectedSize = expectedMB * 1024 * 1024;
var text = new char[expectedSize];
var buffer = new char[1];

long position = 0;
long numCharsRead;
do
{
numCharsRead = sqlDataReader.GetChars(0, position, buffer, 0, 1);
if (numCharsRead > 0)
{
text[position] = buffer[0];
position += numCharsRead;
}
}
while (numCharsRead > 0);

Assert.Equal(expectedSize, position);
}
}
}