From f74f39c4580e62fa99ada2156ab42f8d816e6acd Mon Sep 17 00:00:00 2001 From: Paul Irwin Date: Sat, 21 Dec 2024 21:34:43 -0700 Subject: [PATCH 1/7] Add using statements where possible, per CA2000, #265 --- .../Analysis/Hunspell/Dictionary.cs | 4 +- .../Analysis/Synonym/SynonymMap.cs | 3 ++ .../Dict/BinaryDictionary.cs | 8 ++-- .../Dict/CharacterDefinition.cs | 2 +- .../Dict/ConnectionCosts.cs | 2 +- .../Tools/BinaryDictionaryWriter.cs | 6 +-- .../Tools/CharacterDefinitionWriter.cs | 2 +- .../Tools/ConnectionCostsBuilder.cs | 2 +- .../Tools/ConnectionCostsWriter.cs | 4 +- .../Tools/TokenInfoDictionaryBuilder.cs | 2 +- .../Memory/DirectPostingsFormat.cs | 2 +- src/Lucene.Net.Misc/Index/IndexSplitter.cs | 2 +- .../Index/MultiPassIndexSplitter.cs | 2 +- src/Lucene.Net.Misc/Misc/GetTermInfo.cs | 40 +++++++++++-------- src/Lucene.Net.Misc/Misc/HighFreqTerms.cs | 2 +- .../Query/SpatialArgsParser.cs | 2 +- .../Search/ShardSearchingTestBase.cs | 2 +- src/Lucene.Net.TestFramework/Util/TestUtil.cs | 5 ++- .../Lucene45/Lucene45DocValuesConsumer.cs | 2 +- src/Lucene.Net/Index/FlushPolicy.cs | 4 +- src/Lucene.Net/Store/LockStressTest.cs | 4 +- src/Lucene.Net/Store/LockVerifyServer.cs | 6 +-- src/Lucene.Net/Util/Constants.cs | 5 ++- src/Lucene.Net/Util/Fst/FST.cs | 2 +- 24 files changed, 63 insertions(+), 52 deletions(-) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs index c77fd262cb..ceeb7eb532 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs @@ -295,7 +295,7 @@ private void ReadAffixFile(Stream affixStream, Encoding decoder) [""] = 0 }; - var reader = new StreamReader(affixStream, decoder); + using var reader = new StreamReader(affixStream, decoder); // LUCENENET specific - CA2000: Use using pattern to ensure reader is disposed string line; // LUCENENET: Removed unnecessary null assignment int lineNumber = 0; while ((line = reader.ReadLine()) != null) @@ -910,7 +910,7 @@ private void ReadDictionaryFiles(IList dictionaries, Encoding decoder, B { foreach (Stream dictionary in dictionaries) { - var lines = new StreamReader(dictionary, decoder); + using var lines = new StreamReader(dictionary, decoder); // LUCENENET specific - CA2000: Use using pattern to ensure reader is disposed string line = lines.ReadLine(); // first line is number of entries (approximately, sometimes) while ((line = lines.ReadLine()) != null) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs index 4f3f66adee..c319c7d79c 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs @@ -348,6 +348,9 @@ protected Parser(bool dedup, Analyzer analyzer) // LUCENENET: CA1012: Abstract t /// /// Parse the given input, adding synonyms to the inherited . /// The input to parse + /// + /// LUCENENET NOTE: Implementations are expected to dispose of the parameter. + /// public abstract void Parse(TextReader @in); /// diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs index 13c530afd6..f43c79190c 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs @@ -101,8 +101,8 @@ protected BinaryDictionary() ByteBuffer buffer; // LUCENENET: IDE0059: Remove unnecessary value assignment using (Stream mapIS = GetResource(TARGETMAP_FILENAME_SUFFIX)) + using (var @in = new InputStreamDataInput(mapIS)) // LUCENENET: CA2000: Use using statement { - DataInput @in = new InputStreamDataInput(mapIS); CodecUtil.CheckHeader(@in, TARGETMAP_HEADER, VERSION, VERSION); targetMap = new int[@in.ReadVInt32()]; targetMapOffsets = new int[@in.ReadVInt32()]; @@ -124,8 +124,8 @@ protected BinaryDictionary() } using (Stream posIS = GetResource(POSDICT_FILENAME_SUFFIX)) + using (var @in = new InputStreamDataInput(posIS)) // LUCENENET: CA2000: Use using statement { - DataInput @in = new InputStreamDataInput(posIS); CodecUtil.CheckHeader(@in, POSDICT_HEADER, VERSION, VERSION); int posSize = @in.ReadVInt32(); posDict = new string[posSize]; @@ -151,9 +151,9 @@ protected BinaryDictionary() ByteBuffer tmpBuffer; using (Stream dictIS = GetResource(DICT_FILENAME_SUFFIX)) + // no buffering here, as we load in one large buffer + using (var @in = new InputStreamDataInput(dictIS)) // LUCENENET: CA2000: Use using statement { - // no buffering here, as we load in one large buffer - DataInput @in = new InputStreamDataInput(dictIS); CodecUtil.CheckHeader(@in, DICT_HEADER, VERSION, VERSION); int size = @in.ReadVInt32(); tmpBuffer = ByteBuffer.Allocate(size); // AllocateDirect..? diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs index 5288cd03d4..6c32db4847 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs @@ -61,7 +61,7 @@ private enum CharacterClass : byte private CharacterDefinition() { using Stream @is = BinaryDictionary.GetTypeResource(GetType(), FILENAME_SUFFIX); - DataInput @in = new InputStreamDataInput(@is); + using var @in = new InputStreamDataInput(@is); // LUCENENET: CA2000: Use using statement CodecUtil.CheckHeader(@in, HEADER, VERSION, VERSION); @in.ReadBytes(characterCategoryMap, 0, characterCategoryMap.Length); for (int i = 0; i < CLASS_COUNT; i++) diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs index 9065f1228a..af4d61bba3 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs @@ -39,8 +39,8 @@ private ConnectionCosts() short[][] costs = null; using (Stream @is = BinaryDictionary.GetTypeResource(GetType(), FILENAME_SUFFIX)) + using (var @in = new InputStreamDataInput(@is)) // LUCENENET: CA2000: Use using statement { - DataInput @in = new InputStreamDataInput(@is); CodecUtil.CheckHeader(@in, HEADER, VERSION, VERSION); int forwardSize = @in.ReadVInt32(); int backwardSize = @in.ReadVInt32(); diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs index 2c7078b3a7..2d9221bf60 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs @@ -298,7 +298,7 @@ protected virtual void WriteTargetMap(string filename) //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write); - DataOutput @out = new OutputStreamDataOutput(os); + using var @out = new OutputStreamDataOutput(os); // LUCENENET: CA2000: Use using statement CodecUtil.WriteHeader(@out, BinaryDictionary.TARGETMAP_HEADER, BinaryDictionary.VERSION); int numSourceIds = lastSourceId + 1; @@ -328,7 +328,7 @@ protected virtual void WritePosDict(string filename) //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write); - DataOutput @out = new OutputStreamDataOutput(os); + using var @out = new OutputStreamDataOutput(os); // LUCENENET: CA2000: Use using statement CodecUtil.WriteHeader(@out, BinaryDictionary.POSDICT_HEADER, BinaryDictionary.VERSION); @out.WriteVInt32(posDict.Count); foreach (string s in posDict) @@ -355,7 +355,7 @@ protected virtual void WriteDictionary(string filename) //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write); - DataOutput @out = new OutputStreamDataOutput(os); + using var @out = new OutputStreamDataOutput(os); // LUCENENET: CA2000: Use using statement CodecUtil.WriteHeader(@out, BinaryDictionary.DICT_HEADER, BinaryDictionary.VERSION); @out.WriteVInt32(m_buffer.Position); diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs index d963d1252e..bf21ccde92 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs @@ -80,7 +80,7 @@ public void Write(string baseDir) //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(baseDir)); using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write); - DataOutput @out = new OutputStreamDataOutput(os); + using var @out = new OutputStreamDataOutput(os); // LUCENENET: CA2000: Use using statement CodecUtil.WriteHeader(@out, CharacterDefinition.HEADER, CharacterDefinition.VERSION); @out.WriteBytes(characterCategoryMap, 0, characterCategoryMap.Length); for (int i = 0; i < CharacterDefinition.CLASS_COUNT; i++) diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs index d15ddb9aef..0e2a218b8d 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs @@ -31,7 +31,7 @@ public static class ConnectionCostsBuilder // LUCENENET specific: CA1052 Static public static ConnectionCostsWriter Build(string filename) { using Stream inputStream = new FileStream(filename, FileMode.Open, FileAccess.Read); - StreamReader streamReader = new StreamReader(inputStream, Encoding.ASCII); + using StreamReader streamReader = new StreamReader(inputStream, Encoding.ASCII); // LUCENENET: CA2000: Use using statement string line = streamReader.ReadLine(); string[] dimensions = whiteSpaceRegex.Split(line).TrimEnd(); diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs index b6c6c1bd5c..aa80a75fa4 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs @@ -54,11 +54,11 @@ public void Write(string baseDir) // LUCENENET specific: we don't need to do a "classpath" output directory, since we // are changing the implementation to read files dynamically instead of making the // user recompile with the new files. - string filename = System.IO.Path.Combine(baseDir, typeof(ConnectionCosts).Name + CharacterDefinition.FILENAME_SUFFIX); + string filename = System.IO.Path.Combine(baseDir, nameof(ConnectionCosts) + CharacterDefinition.FILENAME_SUFFIX); //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write); - DataOutput @out = new OutputStreamDataOutput(os); + using var @out = new OutputStreamDataOutput(os); // LUCENENET: CA2000: Use using statement CodecUtil.WriteHeader(@out, ConnectionCosts.HEADER, ConnectionCosts.VERSION); @out.WriteVInt32(forwardSize); @out.WriteVInt32(backwardSize); diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs index 3ba3ac3a5b..6f85b44c20 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs @@ -72,7 +72,7 @@ public virtual TokenInfoDictionaryWriter BuildDictionary(IList csvFiles) { using Stream inputStream = new FileStream(file, FileMode.Open, FileAccess.Read); Encoding decoder = Encoding.GetEncoding(encoding); - TextReader reader = new StreamReader(inputStream, decoder); + using TextReader reader = new StreamReader(inputStream, decoder); // LUCENENET: CA2000: Use using statement string line = null; while ((line = reader.ReadLine()) != null) diff --git a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs index c3f19ac735..b58e645113 100644 --- a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs @@ -401,7 +401,7 @@ public DirectField(SegmentReadState state, string field, Terms termsIn, int minS Int32ArrayWriter scratch = new Int32ArrayWriter(); // Used for payloads, if any: - RAMOutputStream ros = new RAMOutputStream(); + using RAMOutputStream ros = new RAMOutputStream(); // LUCENENET specific - dispose when done // if (DEBUG) { // System.out.println("\nLOAD terms seg=" + state.segmentInfo.name + " field=" + field + " hasOffsets=" + hasOffsets + " hasFreq=" + hasFreq + " hasPos=" + hasPos + " hasPayloads=" + hasPayloads); diff --git a/src/Lucene.Net.Misc/Index/IndexSplitter.cs b/src/Lucene.Net.Misc/Index/IndexSplitter.cs index 26f51ed5f4..5a842805d5 100644 --- a/src/Lucene.Net.Misc/Index/IndexSplitter.cs +++ b/src/Lucene.Net.Misc/Index/IndexSplitter.cs @@ -167,7 +167,7 @@ public virtual void Remove(ICollection segs) // LUCENENET specific - cha public virtual void Split(DirectoryInfo destDir, ICollection segs) // LUCENENET specific - changed to ICollection to reduce copy operations { destDir.Create(); - FSDirectory destFSDir = FSDirectory.Open(destDir); + using FSDirectory destFSDir = FSDirectory.Open(destDir); // LUCENENET specific - CA2000: dispose of destFSDir when finished SegmentInfos destInfos = new SegmentInfos(); destInfos.Counter = Infos.Counter; foreach (string n in segs) diff --git a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs index 99910ec5ac..a0495b7e2e 100644 --- a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs +++ b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs @@ -73,7 +73,7 @@ public virtual void Split(LuceneVersion version, IndexReader @in, Store.Director // wrap a potentially read-only input // this way we don't have to preserve original deletions because neither // deleteDocument(int) or undeleteAll() is applied to the wrapped input index. - FakeDeleteIndexReader input = new FakeDeleteIndexReader(@in); + using FakeDeleteIndexReader input = new FakeDeleteIndexReader(@in); // LUCENENET: CA2000: Dispose FakeDeleteIndexReader int maxDoc = input.MaxDoc; int partLen = maxDoc / numParts; for (int i = 0; i < numParts; i++) diff --git a/src/Lucene.Net.Misc/Misc/GetTermInfo.cs b/src/Lucene.Net.Misc/Misc/GetTermInfo.cs index cd682a7b14..a5caf8d698 100644 --- a/src/Lucene.Net.Misc/Misc/GetTermInfo.cs +++ b/src/Lucene.Net.Misc/Misc/GetTermInfo.cs @@ -50,31 +50,37 @@ public static class GetTermInfo // LUCENENET specific: CA1052 Static holder type /// Thrown if the incorrect number of arguments are provided public static void Main(string[] args) { - - FSDirectory dir; // LUCENENET: IDE0059: Remove unnecessary value assignment - string inputStr; // LUCENENET: IDE0059: Remove unnecessary value assignment - string field; // LUCENENET: IDE0059: Remove unnecessary value assignment - - if (args.Length == 3) + // LUCENENET specific - CA2000: dispose of directory when finished + FSDirectory dir = null; + try { - dir = FSDirectory.Open(new DirectoryInfo(args[0])); - field = args[1]; - inputStr = args[2]; + string inputStr; // LUCENENET: IDE0059: Remove unnecessary value assignment + string field; // LUCENENET: IDE0059: Remove unnecessary value assignment + if (args.Length == 3) + { + dir = FSDirectory.Open(new DirectoryInfo(args[0])); + field = args[1]; + inputStr = args[2]; + } + else + { + // LUCENENET specific - our wrapper console shows the correct usage + throw new ArgumentException("GetTermInfo requires 3 arguments", nameof(args)); + //Usage(); + //Environment.Exit(1); + } + + TermInfo(dir, new Term(field, inputStr)); } - else + finally { - // LUCENENET specific - our wrapper console shows the correct usage - throw new ArgumentException("GetTermInfo requires 3 arguments", nameof(args)); - //Usage(); - //Environment.Exit(1); + dir?.Dispose(); } - - TermInfo(dir, new Term(field, inputStr)); } public static void TermInfo(Store.Directory dir, Term term) { - IndexReader reader = DirectoryReader.Open(dir); + using IndexReader reader = DirectoryReader.Open(dir); Console.WriteLine("{0}:{1} \t totalTF = {2:#,##0} \t doc freq = {3:#,##0} \n", term.Field, term.Text, reader.TotalTermFreq(term), reader.DocFreq(term)); } diff --git a/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs b/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs index 133f7fe82b..afc30f40c4 100644 --- a/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs +++ b/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs @@ -73,7 +73,7 @@ public static void Main(string[] args) //Environment.Exit(1); } - Store.Directory dir = FSDirectory.Open(new DirectoryInfo(args[0])); + using Store.Directory dir = FSDirectory.Open(new DirectoryInfo(args[0])); IComparer comparer = DocFreqComparer.Default; diff --git a/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs b/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs index 42e16c7fd6..b423ed4830 100644 --- a/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs +++ b/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs @@ -181,7 +181,7 @@ protected static IDictionary ParseMap(string body) throw new ArgumentNullException(nameof(body)); var map = new Dictionary(); - StringTokenizer st = new StringTokenizer(body, " \n\t"); + using StringTokenizer st = new StringTokenizer(body, " \n\t"); while (st.MoveNext()) { diff --git a/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs b/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs index 72986281bd..6ce386b1ef 100644 --- a/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs +++ b/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs @@ -704,7 +704,7 @@ public override void Run() { try { - LineFileDocs docs = new LineFileDocs(Random, DefaultCodecSupportsDocValues); + using LineFileDocs docs = new LineFileDocs(Random, DefaultCodecSupportsDocValues); int numDocs = 0; while (J2N.Time.NanoTime() < outerInstance.endTimeNanos) { diff --git a/src/Lucene.Net.TestFramework/Util/TestUtil.cs b/src/Lucene.Net.TestFramework/Util/TestUtil.cs index 1c4bd1b6a0..dffaf2c7de 100644 --- a/src/Lucene.Net.TestFramework/Util/TestUtil.cs +++ b/src/Lucene.Net.TestFramework/Util/TestUtil.cs @@ -201,8 +201,9 @@ public static void CheckReader(IndexReader reader) public static void CheckReader(AtomicReader reader, bool crossCheckTermVectors) { - ByteArrayOutputStream bos = new ByteArrayOutputStream(1024); - StreamWriter infoStream = new StreamWriter(bos, Encoding.UTF8); + // LUCENENET: dispose the StreamWriter and ByteArrayOutputStream when done + using ByteArrayOutputStream bos = new ByteArrayOutputStream(1024); + using StreamWriter infoStream = new StreamWriter(bos, Encoding.UTF8); reader.CheckIntegrity(); CheckIndex.Status.FieldNormStatus fieldNormStatus = Index.CheckIndex.TestFieldNorms(reader, infoStream); diff --git a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs index d4a68a4e67..2351e4aa25 100644 --- a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs +++ b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs @@ -384,7 +384,7 @@ protected virtual void AddTermsDict(FieldInfo field, IEnumerable value long startFP = data.Position; // LUCENENET specific: Renamed from getFilePointer() to match FileStream // currently, we have to store the delta from expected for every 1/nth term // we could avoid this, but its not much and less overall RAM than the previous approach! - RAMOutputStream addressBuffer = new RAMOutputStream(); + using RAMOutputStream addressBuffer = new RAMOutputStream(); MonotonicBlockPackedWriter termAddresses = new MonotonicBlockPackedWriter(addressBuffer, BLOCK_SIZE); BytesRef lastTerm = new BytesRef(); long count = 0; diff --git a/src/Lucene.Net/Index/FlushPolicy.cs b/src/Lucene.Net/Index/FlushPolicy.cs index 51280b06ff..69b45c3c6c 100644 --- a/src/Lucene.Net/Index/FlushPolicy.cs +++ b/src/Lucene.Net/Index/FlushPolicy.cs @@ -121,7 +121,7 @@ protected virtual ThreadState FindLargestNonPendingWriter(DocumentsWriterFlushCo // the dwpt which needs to be flushed eventually ThreadState maxRamUsingThreadState = perThreadState; if (Debugging.AssertsEnabled) Debugging.Assert(!perThreadState.flushPending, "DWPT should have flushed"); - IEnumerator activePerThreadsIterator = control.AllActiveThreadStates(); + using IEnumerator activePerThreadsIterator = control.AllActiveThreadStates(); while (activePerThreadsIterator.MoveNext()) { ThreadState next = activePerThreadsIterator.Current; @@ -158,4 +158,4 @@ public virtual object Clone() return clone; } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net/Store/LockStressTest.cs b/src/Lucene.Net/Store/LockStressTest.cs index 2614a64340..485a2f190d 100644 --- a/src/Lucene.Net/Store/LockStressTest.cs +++ b/src/Lucene.Net/Store/LockStressTest.cs @@ -144,8 +144,8 @@ public static void Main(string[] args) socket.Connect(verifierHost, verifierPort); using Stream stream = new NetworkStream(socket); - BinaryReader intReader = new BinaryReader(stream); - BinaryWriter intWriter = new BinaryWriter(stream); + using BinaryReader intReader = new BinaryReader(stream); + using BinaryWriter intWriter = new BinaryWriter(stream); intWriter.Write(myID); stream.Flush(); diff --git a/src/Lucene.Net/Store/LockVerifyServer.cs b/src/Lucene.Net/Store/LockVerifyServer.cs index cf087d7543..8b79e51259 100644 --- a/src/Lucene.Net/Store/LockVerifyServer.cs +++ b/src/Lucene.Net/Store/LockVerifyServer.cs @@ -90,7 +90,7 @@ public static void Main(string[] args) object localLock = new object(); int[] lockedID = new int[1]; lockedID[0] = -1; - CountdownEvent startingGun = new CountdownEvent(1); + using CountdownEvent startingGun = new CountdownEvent(1); // LUCENENET specific - dispose when finished ThreadJob[] threads = new ThreadJob[maxClients]; for (int count = 0; count < maxClients; count++) @@ -134,8 +134,8 @@ public ThreadAnonymousClass(object localLock, int[] lockedID, CountdownEvent sta public override void Run() { using Stream stream = new NetworkStream(cs); - BinaryReader intReader = new BinaryReader(stream); - BinaryWriter intWriter = new BinaryWriter(stream); + using BinaryReader intReader = new BinaryReader(stream); + using BinaryWriter intWriter = new BinaryWriter(stream); try { int id = intReader.ReadInt32(); diff --git a/src/Lucene.Net/Util/Constants.cs b/src/Lucene.Net/Util/Constants.cs index a49694c944..cd30a3e487 100644 --- a/src/Lucene.Net/Util/Constants.cs +++ b/src/Lucene.Net/Util/Constants.cs @@ -178,9 +178,10 @@ private static string GetFramework45PlusFromRegistry() { const string subkey = @"SOFTWARE\Microsoft\NET Framework Setup\NDP\v4\Full\"; - // As an alternative, if you know the computers you will query are running .NET Framework 4.5 + // As an alternative, if you know the computers you will query are running .NET Framework 4.5 // or later, you can use: - using RegistryKey ndpKey = RegistryKey.OpenBaseKey(RegistryHive.LocalMachine, RegistryView.Registry32).OpenSubKey(subkey); + using RegistryKey baseKey = RegistryKey.OpenBaseKey(RegistryHive.LocalMachine, RegistryView.Registry32); + using RegistryKey ndpKey = baseKey.OpenSubKey(subkey); object releaseValue; if (ndpKey != null && (releaseValue = ndpKey.GetValue("Release")) != null) { diff --git a/src/Lucene.Net/Util/Fst/FST.cs b/src/Lucene.Net/Util/Fst/FST.cs index c10f55aefa..d3b85e8734 100644 --- a/src/Lucene.Net/Util/Fst/FST.cs +++ b/src/Lucene.Net/Util/Fst/FST.cs @@ -511,7 +511,7 @@ public void Save(DataOutput @out) @out.WriteByte(1); // Serialize empty-string output: - var ros = new RAMOutputStream(); + using var ros = new RAMOutputStream(); Outputs.WriteFinalOutput(emptyOutput, ros); var emptyOutputBytes = new byte[(int)ros.Position]; // LUCENENET specific: Renamed from getFilePointer() to match FileStream From 6121f1cb1eefbe6eca8105e23a95356af1f1799d Mon Sep 17 00:00:00 2001 From: Paul Irwin Date: Sun, 22 Dec 2024 09:50:29 -0700 Subject: [PATCH 2/7] Fix file handle leaks in demo code, #615 --- src/Lucene.Net.Demo/SearchFiles.cs | 71 ++++++++++--------- .../Suggest/Fst/LargeInputFST.cs | 31 ++++---- 2 files changed, 55 insertions(+), 47 deletions(-) diff --git a/src/Lucene.Net.Demo/SearchFiles.cs b/src/Lucene.Net.Demo/SearchFiles.cs index b001396bfe..01e71eed28 100644 --- a/src/Lucene.Net.Demo/SearchFiles.cs +++ b/src/Lucene.Net.Demo/SearchFiles.cs @@ -112,57 +112,64 @@ public static void Main(string[] args) // :Post-Release-Update-Version.LUCENE_XY: Analyzer analyzer = new StandardAnalyzer(LuceneVersion.LUCENE_48); + FileStream fileStream = null; // LUCENENET specific - keep track of the FileStream so we can dispose of it TextReader input = null; if (queries != null) { - input = new StreamReader(new FileStream(queries, FileMode.Open, FileAccess.Read), Encoding.UTF8); + fileStream = new FileStream(queries, FileMode.Open, FileAccess.Read); + input = new StreamReader(fileStream, Encoding.UTF8); } else { input = Console.In; } - // :Post-Release-Update-Version.LUCENE_XY: - QueryParser parser = new QueryParser(LuceneVersion.LUCENE_48, field, analyzer); - while (true) + + using (fileStream) // LUCENENET specific - dispose of the FileStream when we are done with it { - if (queries is null && queryString is null) + // :Post-Release-Update-Version.LUCENE_XY: + QueryParser parser = new QueryParser(LuceneVersion.LUCENE_48, field, analyzer); + while (true) { - // prompt the user - Console.WriteLine("Enter query (or press Enter to exit): "); - } + if (queries is null && queryString is null) + { + // prompt the user + Console.WriteLine("Enter query (or press Enter to exit): "); + } - string line = queryString ?? input.ReadLine(); + string line = queryString ?? input.ReadLine(); - if (line is null || line.Length == 0) - { - break; - } + if (line is null || line.Length == 0) + { + break; + } - line = line.Trim(); - if (line.Length == 0) - { - break; - } + line = line.Trim(); + if (line.Length == 0) + { + break; + } - Query query = parser.Parse(line); - Console.WriteLine("Searching for: " + query.ToString(field)); + Query query = parser.Parse(line); + Console.WriteLine("Searching for: " + query.ToString(field)); - if (repeat > 0) // repeat & time as benchmark - { - DateTime start = DateTime.UtcNow; - for (int i = 0; i < repeat; i++) + if (repeat > 0) // repeat & time as benchmark { - searcher.Search(query, null, 100); + DateTime start = DateTime.UtcNow; + for (int i = 0; i < repeat; i++) + { + searcher.Search(query, null, 100); + } + + DateTime end = DateTime.UtcNow; + Console.WriteLine("Time: " + (end - start).TotalMilliseconds + "ms"); } - DateTime end = DateTime.UtcNow; - Console.WriteLine("Time: " + (end - start).TotalMilliseconds + "ms"); - } - DoPagingSearch(searcher, query, hitsPerPage, raw, queries is null && queryString is null); + DoPagingSearch(searcher, query, hitsPerPage, raw, queries is null && queryString is null); - if (queryString != null) - { - break; + if (queryString != null) + { + break; + } } } } diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs index aedaa40d17..98af153937 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs @@ -32,30 +32,31 @@ public static class LargeInputFST // LUCENENET specific: CA1052 Static holder ty // LUCENENET specific - renaming from Main() because we must only have 1 entry point. // Not sure why this utility is in a test project anyway - this seems like something that should // be in Lucene.Net.Suggest so we can put it into the lucene-cli tool. - public static void Main2(string[] args) + public static void Main2(string[] args) { FileInfo input = new FileInfo("/home/dweiss/tmp/shuffled.dict"); - int buckets = 20; - int shareMaxTail = 10; + const int buckets = 20; + const int shareMaxTail = 10; ExternalRefSorter sorter = new ExternalRefSorter(new OfflineSorter()); FSTCompletionBuilder builder = new FSTCompletionBuilder(buckets, sorter, shareMaxTail); - TextReader reader = - new StreamReader( - new FileStream(input.FullName, FileMode.Open), Encoding.UTF8); - - BytesRef scratch = new BytesRef(); - string line; - int count = 0; - while ((line = reader.ReadLine()) != null) + // LUCENENET specific - dispose of fileStream and reader when done + using (FileStream fileStream = new FileStream(input.FullName, FileMode.Open)) + using (TextReader reader = new StreamReader(fileStream, Encoding.UTF8)) { - scratch.CopyChars(line); - builder.Add(scratch, count % buckets); - if ((count++ % 100000) == 0) + BytesRef scratch = new BytesRef(); + string line; + int count = 0; + while ((line = reader.ReadLine()) != null) { - Console.WriteLine("Line: " + count); + scratch.CopyChars(line); + builder.Add(scratch, count % buckets); + if ((count++ % 100000) == 0) + { + Console.WriteLine("Line: " + count); + } } } From 760d01d09ff4d3b50adb43d0ba298a07bc767785 Mon Sep 17 00:00:00 2001 From: Paul Irwin Date: Sun, 22 Dec 2024 10:22:19 -0700 Subject: [PATCH 3/7] Remove using from LineFileDocs use --- src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs b/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs index 6ce386b1ef..72986281bd 100644 --- a/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs +++ b/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs @@ -704,7 +704,7 @@ public override void Run() { try { - using LineFileDocs docs = new LineFileDocs(Random, DefaultCodecSupportsDocValues); + LineFileDocs docs = new LineFileDocs(Random, DefaultCodecSupportsDocValues); int numDocs = 0; while (J2N.Time.NanoTime() < outerInstance.endTimeNanos) { From ef239954b371468ce6461b76e42dd6e84b95f79f Mon Sep 17 00:00:00 2001 From: Paul Irwin Date: Tue, 24 Dec 2024 20:41:56 -0700 Subject: [PATCH 4/7] Add leaveOpen parameter to InputStreamDataInput, #265 --- .../Dict/BinaryDictionary.cs | 6 ++-- .../Dict/CharacterDefinition.cs | 2 +- .../Dict/ConnectionCosts.cs | 2 +- .../Directory/DirectoryTaxonomyWriter.cs | 5 ++-- src/Lucene.Net/Store/InputStreamDataInput.cs | 29 +++++++++++++++++-- 5 files changed, 34 insertions(+), 10 deletions(-) diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs index f43c79190c..c4b6b77eb1 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs @@ -101,7 +101,7 @@ protected BinaryDictionary() ByteBuffer buffer; // LUCENENET: IDE0059: Remove unnecessary value assignment using (Stream mapIS = GetResource(TARGETMAP_FILENAME_SUFFIX)) - using (var @in = new InputStreamDataInput(mapIS)) // LUCENENET: CA2000: Use using statement + using (var @in = new InputStreamDataInput(mapIS, leaveOpen: true)) // LUCENENET: CA2000: Use using statement { CodecUtil.CheckHeader(@in, TARGETMAP_HEADER, VERSION, VERSION); targetMap = new int[@in.ReadVInt32()]; @@ -124,7 +124,7 @@ protected BinaryDictionary() } using (Stream posIS = GetResource(POSDICT_FILENAME_SUFFIX)) - using (var @in = new InputStreamDataInput(posIS)) // LUCENENET: CA2000: Use using statement + using (var @in = new InputStreamDataInput(posIS, leaveOpen: true)) // LUCENENET: CA2000: Use using statement { CodecUtil.CheckHeader(@in, POSDICT_HEADER, VERSION, VERSION); int posSize = @in.ReadVInt32(); @@ -152,7 +152,7 @@ protected BinaryDictionary() using (Stream dictIS = GetResource(DICT_FILENAME_SUFFIX)) // no buffering here, as we load in one large buffer - using (var @in = new InputStreamDataInput(dictIS)) // LUCENENET: CA2000: Use using statement + using (var @in = new InputStreamDataInput(dictIS, leaveOpen: true)) // LUCENENET: CA2000: Use using statement { CodecUtil.CheckHeader(@in, DICT_HEADER, VERSION, VERSION); int size = @in.ReadVInt32(); diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs index 6c32db4847..c9fccc184c 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs @@ -61,7 +61,7 @@ private enum CharacterClass : byte private CharacterDefinition() { using Stream @is = BinaryDictionary.GetTypeResource(GetType(), FILENAME_SUFFIX); - using var @in = new InputStreamDataInput(@is); // LUCENENET: CA2000: Use using statement + using var @in = new InputStreamDataInput(@is, leaveOpen: true); // LUCENENET: CA2000: Use using statement CodecUtil.CheckHeader(@in, HEADER, VERSION, VERSION); @in.ReadBytes(characterCategoryMap, 0, characterCategoryMap.Length); for (int i = 0; i < CLASS_COUNT; i++) diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs index af4d61bba3..3095f39830 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs @@ -39,7 +39,7 @@ private ConnectionCosts() short[][] costs = null; using (Stream @is = BinaryDictionary.GetTypeResource(GetType(), FILENAME_SUFFIX)) - using (var @in = new InputStreamDataInput(@is)) // LUCENENET: CA2000: Use using statement + using (var @in = new InputStreamDataInput(@is, leaveOpen: true)) // LUCENENET: CA2000: Use using statement { CodecUtil.CheckHeader(@in, HEADER, VERSION, VERSION); int forwardSize = @in.ReadVInt32(); diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs index 857a30b96f..8007532ad8 100644 --- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs +++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs @@ -1170,8 +1170,9 @@ public int[] GetMap() } AddDone(); // in case this wasn't previously called - var ifs = new FileStream(tmpfile, FileMode.OpenOrCreate, FileAccess.Read); - using (var @in = new InputStreamDataInput(ifs)) + // LUCENENET specific - dispose of resources + using (var ifs = new FileStream(tmpfile, FileMode.OpenOrCreate, FileAccess.Read)) + using (var @in = new InputStreamDataInput(ifs, leaveOpen: true)) { map = new int[@in.ReadInt32()]; // NOTE: The current code assumes here that the map is complete, diff --git a/src/Lucene.Net/Store/InputStreamDataInput.cs b/src/Lucene.Net/Store/InputStreamDataInput.cs index 8b450561b6..0a31c45409 100644 --- a/src/Lucene.Net/Store/InputStreamDataInput.cs +++ b/src/Lucene.Net/Store/InputStreamDataInput.cs @@ -27,13 +27,33 @@ namespace Lucene.Net.Store public class InputStreamDataInput : DataInput, IDisposable { private readonly Stream _is; - private int disposed = 0; // LUCENENET specific - allow double-dispose + private int disposed; // LUCENENET specific - allow double-dispose + private readonly bool leaveOpen; // LUCENENET specific - added to allow the stream to be left open + /// + /// Initializes a new instance of with the specified (input stream). + /// + /// The input stream to read from. + /// If is null. public InputStreamDataInput(Stream @is) { this._is = @is ?? throw new ArgumentNullException(nameof(@is)); // LUCENENET specific - added null guard clause } + /// + /// + /// Initializes a new instance of with the specified (input stream) and flag. + /// + /// If true, the stream will not be disposed when this instance is disposed. + /// + /// LUCENENET specific - added to allow the stream to be left open. + /// + public InputStreamDataInput(Stream @is, bool leaveOpen) + : this(@is) + { + this.leaveOpen = leaveOpen; + } + public override byte ReadByte() { int v = _is.ReadByte(); @@ -71,8 +91,11 @@ protected virtual void Dispose(bool disposing) if (disposing) { - _is.Dispose(); + if (!leaveOpen) + { + _is.Dispose(); + } } } } -} \ No newline at end of file +} From 86e18f0178a94c153e353df5c5150aaa435d707a Mon Sep 17 00:00:00 2001 From: Paul Irwin Date: Wed, 25 Dec 2024 16:02:57 -0700 Subject: [PATCH 5/7] Add leaveOpen parameter to OutputStreamDataOutput, #265 --- .../Tools/BinaryDictionaryWriter.cs | 6 ++-- .../Tools/CharacterDefinitionWriter.cs | 2 +- .../Tools/ConnectionCostsWriter.cs | 2 +- src/Lucene.Net.Suggest/Suggest/Lookup.cs | 4 +-- .../Store/OutputStreamDataOutput.cs | 29 +++++++++++++++++-- 5 files changed, 33 insertions(+), 10 deletions(-) diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs index 2d9221bf60..16d2b5634c 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs @@ -298,7 +298,7 @@ protected virtual void WriteTargetMap(string filename) //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write); - using var @out = new OutputStreamDataOutput(os); // LUCENENET: CA2000: Use using statement + using var @out = new OutputStreamDataOutput(os, leaveOpen: true); // LUCENENET: CA2000: Use using statement CodecUtil.WriteHeader(@out, BinaryDictionary.TARGETMAP_HEADER, BinaryDictionary.VERSION); int numSourceIds = lastSourceId + 1; @@ -328,7 +328,7 @@ protected virtual void WritePosDict(string filename) //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write); - using var @out = new OutputStreamDataOutput(os); // LUCENENET: CA2000: Use using statement + using var @out = new OutputStreamDataOutput(os, leaveOpen: true); // LUCENENET: CA2000: Use using statement CodecUtil.WriteHeader(@out, BinaryDictionary.POSDICT_HEADER, BinaryDictionary.VERSION); @out.WriteVInt32(posDict.Count); foreach (string s in posDict) @@ -355,7 +355,7 @@ protected virtual void WriteDictionary(string filename) //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write); - using var @out = new OutputStreamDataOutput(os); // LUCENENET: CA2000: Use using statement + using var @out = new OutputStreamDataOutput(os, leaveOpen: true); // LUCENENET: CA2000: Use using statement CodecUtil.WriteHeader(@out, BinaryDictionary.DICT_HEADER, BinaryDictionary.VERSION); @out.WriteVInt32(m_buffer.Position); diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs index bf21ccde92..140349113d 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs @@ -80,7 +80,7 @@ public void Write(string baseDir) //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(baseDir)); using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write); - using var @out = new OutputStreamDataOutput(os); // LUCENENET: CA2000: Use using statement + using var @out = new OutputStreamDataOutput(os, leaveOpen: true); // LUCENENET: CA2000: Use using statement CodecUtil.WriteHeader(@out, CharacterDefinition.HEADER, CharacterDefinition.VERSION); @out.WriteBytes(characterCategoryMap, 0, characterCategoryMap.Length); for (int i = 0; i < CharacterDefinition.CLASS_COUNT; i++) diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs index aa80a75fa4..486b5b0503 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs @@ -58,7 +58,7 @@ public void Write(string baseDir) //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write); - using var @out = new OutputStreamDataOutput(os); // LUCENENET: CA2000: Use using statement + using var @out = new OutputStreamDataOutput(os, leaveOpen: true); // LUCENENET: CA2000: Use using statement CodecUtil.WriteHeader(@out, ConnectionCosts.HEADER, ConnectionCosts.VERSION); @out.WriteVInt32(forwardSize); @out.WriteVInt32(backwardSize); diff --git a/src/Lucene.Net.Suggest/Suggest/Lookup.cs b/src/Lucene.Net.Suggest/Suggest/Lookup.cs index a644eece33..fb4969d2b6 100644 --- a/src/Lucene.Net.Suggest/Suggest/Lookup.cs +++ b/src/Lucene.Net.Suggest/Suggest/Lookup.cs @@ -274,14 +274,14 @@ public virtual bool Load(Stream input) /// public virtual bool Store(Stream output) { - DataOutput dataOut = new OutputStreamDataOutput(output); + var dataOut = new OutputStreamDataOutput(output, leaveOpen: true); try { return Store(dataOut); } finally { - IOUtils.Dispose(output); + IOUtils.Dispose(dataOut, output); // LUCENENET specific - dispose of dataOut } } diff --git a/src/Lucene.Net/Store/OutputStreamDataOutput.cs b/src/Lucene.Net/Store/OutputStreamDataOutput.cs index 7a96de6a83..6949ea4810 100644 --- a/src/Lucene.Net/Store/OutputStreamDataOutput.cs +++ b/src/Lucene.Net/Store/OutputStreamDataOutput.cs @@ -27,13 +27,33 @@ namespace Lucene.Net.Store public class OutputStreamDataOutput : DataOutput, IDisposable { private readonly Stream _os; - private int disposed = 0; // LUCENENET specific - allow double-dispose + private int disposed; // LUCENENET specific - allow double-dispose + private readonly bool leaveOpen; // LUCENENET specific - added to allow the stream to be left open + /// + /// Initializes a new instance of with the specified (output stream). + /// + /// The output stream to write to. + /// If is null. public OutputStreamDataOutput(Stream os) { this._os = os ?? throw new ArgumentNullException(nameof(os)); // LUCENENET specific - added null guard clause } + /// + /// + /// Initializes a new instance of with the specified (output stream) and flag. + /// + /// If true, the stream will not be disposed when this instance is disposed. + /// + /// LUCENENET specific - added to allow the stream to be left open. + /// + public OutputStreamDataOutput(Stream os, bool leaveOpen) + : this(os) + { + this.leaveOpen = leaveOpen; + } + public override void WriteByte(byte b) { _os.WriteByte(b); @@ -66,8 +86,11 @@ protected virtual void Dispose(bool disposing) if (disposing) { - _os.Dispose(); + if (!leaveOpen) + { + _os.Dispose(); + } } } } -} \ No newline at end of file +} From a450c37f9b17c184550a90e9f81916e524f8091f Mon Sep 17 00:00:00 2001 From: Paul Irwin Date: Wed, 25 Dec 2024 16:34:57 -0700 Subject: [PATCH 6/7] PR feedback --- .../Tools/ConnectionCostsBuilder.cs | 2 +- .../Tools/TokenInfoDictionaryBuilder.cs | 10 +++++----- src/Lucene.Net.TestFramework/Util/TestUtil.cs | 14 +++++++------- .../Suggest/Fst/LargeInputFST.cs | 2 +- src/Lucene.Net/Store/LockStressTest.cs | 5 +++-- src/Lucene.Net/Store/LockVerifyServer.cs | 5 +++-- 6 files changed, 20 insertions(+), 18 deletions(-) diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs index 0e2a218b8d..5d5f1d4c7d 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs @@ -31,7 +31,7 @@ public static class ConnectionCostsBuilder // LUCENENET specific: CA1052 Static public static ConnectionCostsWriter Build(string filename) { using Stream inputStream = new FileStream(filename, FileMode.Open, FileAccess.Read); - using StreamReader streamReader = new StreamReader(inputStream, Encoding.ASCII); // LUCENENET: CA2000: Use using statement + using StreamReader streamReader = new StreamReader(inputStream, Encoding.ASCII, detectEncodingFromByteOrderMarks: true, bufferSize: 1024, leaveOpen: true); // LUCENENET: CA2000: Use using statement string line = streamReader.ReadLine(); string[] dimensions = whiteSpaceRegex.Split(line).TrimEnd(); diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs index 6f85b44c20..7371829619 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs @@ -72,7 +72,7 @@ public virtual TokenInfoDictionaryWriter BuildDictionary(IList csvFiles) { using Stream inputStream = new FileStream(file, FileMode.Open, FileAccess.Read); Encoding decoder = Encoding.GetEncoding(encoding); - using TextReader reader = new StreamReader(inputStream, decoder); // LUCENENET: CA2000: Use using statement + using TextReader reader = new StreamReader(inputStream, decoder, detectEncodingFromByteOrderMarks: true, bufferSize: 1024, leaveOpen: true); // LUCENENET: CA2000: Use using statement string line = null; while ((line = reader.ReadLine()) != null) @@ -159,10 +159,10 @@ public virtual TokenInfoDictionaryWriter BuildDictionary(IList csvFiles) return dictionary; } - + /// /// IPADIC features - /// + /// /// 0 - surface /// 1 - left cost /// 2 - right cost @@ -171,9 +171,9 @@ public virtual TokenInfoDictionaryWriter BuildDictionary(IList csvFiles) /// 10 - base form /// 11 - reading /// 12 - pronounciation - /// + /// /// UniDic features - /// + /// /// 0 - surface /// 1 - left cost /// 2 - right cost diff --git a/src/Lucene.Net.TestFramework/Util/TestUtil.cs b/src/Lucene.Net.TestFramework/Util/TestUtil.cs index dffaf2c7de..d270d069e0 100644 --- a/src/Lucene.Net.TestFramework/Util/TestUtil.cs +++ b/src/Lucene.Net.TestFramework/Util/TestUtil.cs @@ -99,7 +99,7 @@ private static ISet Rm(ISet unremoved, params Fi /// /// Convenience method unzipping into , cleaning up - /// first. + /// first. /// public static void Unzip(Stream zipFileStream, DirectoryInfo destDir) { @@ -180,7 +180,7 @@ public static CheckIndex.Status CheckIndex(Directory dir, bool crossCheckTermVec { if (LuceneTestCase.UseInfoStream) { - checker.FlushInfoStream(); + checker.FlushInfoStream(); Console.WriteLine(bos.ToString()); } return indexStatus; @@ -203,7 +203,7 @@ public static void CheckReader(AtomicReader reader, bool crossCheckTermVectors) { // LUCENENET: dispose the StreamWriter and ByteArrayOutputStream when done using ByteArrayOutputStream bos = new ByteArrayOutputStream(1024); - using StreamWriter infoStream = new StreamWriter(bos, Encoding.UTF8); + using StreamWriter infoStream = new StreamWriter(bos, Encoding.UTF8, leaveOpen: true, bufferSize: 1024); reader.CheckIntegrity(); CheckIndex.Status.FieldNormStatus fieldNormStatus = Index.CheckIndex.TestFieldNorms(reader, infoStream); @@ -592,8 +592,8 @@ public static string GetDocValuesFormat(Codec codec, string field) public static bool FieldSupportsHugeBinaryDocValues(string field) { string dvFormat = GetDocValuesFormat(field); - if (dvFormat.Equals("Lucene40", StringComparison.Ordinal) - || dvFormat.Equals("Lucene42", StringComparison.Ordinal) + if (dvFormat.Equals("Lucene40", StringComparison.Ordinal) + || dvFormat.Equals("Lucene42", StringComparison.Ordinal) || dvFormat.Equals("Memory", StringComparison.Ordinal)) { return false; @@ -869,7 +869,7 @@ public static ICharSequence BytesToCharSequence(BytesRef @ref, Random random) /// Returns a valid (compiling) instance with random stuff inside. Be careful /// when applying random patterns to longer strings as certain types of patterns /// may explode into exponential times in backtracking implementations (such as Java's). - /// + /// public static Regex RandomRegex(Random random) // LUCENENET specific - renamed from RandomPattern() { return RandomizedTesting.Generators.RandomExtensions.NextRegex(random); // LUCENENET: Moved general random data generation to RandomizedTesting.Generators @@ -1060,4 +1060,4 @@ public static string RandomSubString(Random random, int wordLength, bool simple) '\u3000' }; } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs index 98af153937..b514f3ce96 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs @@ -44,7 +44,7 @@ public static void Main2(string[] args) // LUCENENET specific - dispose of fileStream and reader when done using (FileStream fileStream = new FileStream(input.FullName, FileMode.Open)) - using (TextReader reader = new StreamReader(fileStream, Encoding.UTF8)) + using (TextReader reader = new StreamReader(fileStream, Encoding.UTF8, leaveOpen: true)) { BytesRef scratch = new BytesRef(); string line; diff --git a/src/Lucene.Net/Store/LockStressTest.cs b/src/Lucene.Net/Store/LockStressTest.cs index 485a2f190d..1ef271052f 100644 --- a/src/Lucene.Net/Store/LockStressTest.cs +++ b/src/Lucene.Net/Store/LockStressTest.cs @@ -4,6 +4,7 @@ using System.IO; using System.Net; using System.Net.Sockets; +using System.Text; using System.Threading; using Console = Lucene.Net.Util.SystemConsole; @@ -144,8 +145,8 @@ public static void Main(string[] args) socket.Connect(verifierHost, verifierPort); using Stream stream = new NetworkStream(socket); - using BinaryReader intReader = new BinaryReader(stream); - using BinaryWriter intWriter = new BinaryWriter(stream); + using BinaryReader intReader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true); + using BinaryWriter intWriter = new BinaryWriter(stream, Encoding.UTF8, leaveOpen: true); intWriter.Write(myID); stream.Flush(); diff --git a/src/Lucene.Net/Store/LockVerifyServer.cs b/src/Lucene.Net/Store/LockVerifyServer.cs index 8b79e51259..876da8ebc9 100644 --- a/src/Lucene.Net/Store/LockVerifyServer.cs +++ b/src/Lucene.Net/Store/LockVerifyServer.cs @@ -6,6 +6,7 @@ using System.IO; using System.Net; using System.Net.Sockets; +using System.Text; using System.Threading; using Console = Lucene.Net.Util.SystemConsole; @@ -134,8 +135,8 @@ public ThreadAnonymousClass(object localLock, int[] lockedID, CountdownEvent sta public override void Run() { using Stream stream = new NetworkStream(cs); - using BinaryReader intReader = new BinaryReader(stream); - using BinaryWriter intWriter = new BinaryWriter(stream); + using BinaryReader intReader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true); + using BinaryWriter intWriter = new BinaryWriter(stream, Encoding.UTF8, leaveOpen: true); try { int id = intReader.ReadInt32(); From 98509e79b2e23cc5a1624cba99ebf7ee3d788305 Mon Sep 17 00:00:00 2001 From: Paul Irwin Date: Wed, 25 Dec 2024 16:39:27 -0700 Subject: [PATCH 7/7] Fix build error on .NET Framework --- src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs index b514f3ce96..19a6a45870 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs @@ -44,7 +44,7 @@ public static void Main2(string[] args) // LUCENENET specific - dispose of fileStream and reader when done using (FileStream fileStream = new FileStream(input.FullName, FileMode.Open)) - using (TextReader reader = new StreamReader(fileStream, Encoding.UTF8, leaveOpen: true)) + using (TextReader reader = new StreamReader(fileStream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, bufferSize: 1024, leaveOpen: true)) { BytesRef scratch = new BytesRef(); string line;