Skip to content

Commit f3aaa08

Browse files
committed
fix: Retry after partial file reads.
1 parent faf51dc commit f3aaa08

1 file changed

Lines changed: 44 additions & 0 deletions

File tree

pkgs/sdk/server/src/Internal/DataSources/FileDataSource.cs

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,10 @@ internal sealed class FileDataSource : IDataSource
2828
private volatile int _lastVersion;
2929
private object _updateLock = new object();
3030

31+
private const int MaxRetries = 5;
32+
private readonly TimeSpan RetryDelay = TimeSpan.FromSeconds(0.6);
33+
private readonly Dictionary<string, int> _retryCounts = new Dictionary<string, int>();
34+
3135
public FileDataSource(IDataSourceUpdates dataSourceUpdates, FileDataTypes.IFileReader fileReader,
3236
List<string> paths, bool autoUpdate, Func<string, object> alternateParser, bool skipMissingPaths,
3337
FileDataTypes.DuplicateKeysHandling duplicateKeysHandling,
@@ -94,6 +98,7 @@ private void LoadAll()
9498
var version = Interlocked.Increment(ref _lastVersion);
9599
var flags = new Dictionary<string, ItemDescriptor>();
96100
var segments = new Dictionary<string, ItemDescriptor>();
101+
var shouldRetry = false;
97102
foreach (var path in _paths)
98103
{
99104
try
@@ -102,17 +107,56 @@ private void LoadAll()
102107
_logger.Debug("file data: {0}", content);
103108
var data = _parser.Parse(content, version);
104109
_dataMerger.AddToData(data, flags, segments);
110+
// Remove any retry count associated with this path.
111+
_retryCounts.Remove(path);
105112
}
106113
catch (FileNotFoundException) when (_skipMissingPaths)
107114
{
108115
_logger.Debug("{0}: {1}", path, "File not found");
109116
}
117+
catch (System.Text.Json.JsonException)
118+
{
119+
// We may have received the notification of a file change while the file was being written.
120+
// So we may read an empty or partially written file. So, when we encounter a JSON parsing issue
121+
// we will retry after a short delay.
122+
// We will retry up to MaxRetries times before giving up.
123+
if (!_retryCounts.ContainsKey(path))
124+
{
125+
_retryCounts[path] = 0;
126+
}
127+
_retryCounts[path]++;
128+
129+
if (_retryCounts[path] < MaxRetries)
130+
{
131+
shouldRetry = true;
132+
_logger.Warn("{0}: {1}", path, "Failed to parse file, retrying in " + RetryDelay.TotalMilliseconds + " milliseconds");
133+
}
134+
else
135+
{
136+
_logger.Error("{0}: {1}", path, "Failed to parse file after " + MaxRetries + " retries");
137+
}
138+
139+
break;
140+
}
110141
catch (Exception e)
111142
{
112143
LogHelpers.LogException(_logger, "Failed to load " + path, e);
113144
return;
114145
}
115146
}
147+
148+
if (shouldRetry)
149+
{
150+
Task.Run(async () =>
151+
{
152+
await Task.Delay(RetryDelay);
153+
LoadAll();
154+
});
155+
return;
156+
}
157+
// If any files failed to load, from anything other than not existing, then that
158+
// update would fail. This behavior is retained with the addition of the retry. But it should be
159+
// examined.
116160

117161
var allData = new FullDataSet<ItemDescriptor>(
118162
ImmutableDictionary.Create<DataKind, KeyedItems<ItemDescriptor>>()

0 commit comments

Comments
 (0)