Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Dispose of some disposables, #265, #615 #1074

Merged
merged 7 commits into from
Dec 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ private void ReadAffixFile(Stream affixStream, Encoding decoder)
[""] = 0
};

var reader = new StreamReader(affixStream, decoder);
using var reader = new StreamReader(affixStream, decoder); // LUCENENET specific - CA2000: Use using pattern to ensure reader is disposed
string line; // LUCENENET: Removed unnecessary null assignment
int lineNumber = 0;
while ((line = reader.ReadLine()) != null)
Expand Down Expand Up @@ -910,7 +910,7 @@ private void ReadDictionaryFiles(IList<Stream> dictionaries, Encoding decoder, B
{
foreach (Stream dictionary in dictionaries)
{
var lines = new StreamReader(dictionary, decoder);
using var lines = new StreamReader(dictionary, decoder); // LUCENENET specific - CA2000: Use using pattern to ensure reader is disposed
string line = lines.ReadLine(); // first line is number of entries (approximately, sometimes)

while ((line = lines.ReadLine()) != null)
Expand Down
3 changes: 3 additions & 0 deletions src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs
Original file line number Diff line number Diff line change
Expand Up @@ -348,6 +348,9 @@ protected Parser(bool dedup, Analyzer analyzer) // LUCENENET: CA1012: Abstract t
/// <summary>
/// Parse the given input, adding synonyms to the inherited <see cref="Builder"/>. </summary>
/// <param name="in"> The input to parse </param>
/// <remarks>
/// LUCENENET NOTE: Implementations are expected to dispose of the <paramref name="in"/> parameter.
/// </remarks>
public abstract void Parse(TextReader @in);

/// <summary>
Expand Down
8 changes: 4 additions & 4 deletions src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,8 @@ protected BinaryDictionary()
ByteBuffer buffer; // LUCENENET: IDE0059: Remove unnecessary value assignment

using (Stream mapIS = GetResource(TARGETMAP_FILENAME_SUFFIX))
using (var @in = new InputStreamDataInput(mapIS, leaveOpen: true)) // LUCENENET: CA2000: Use using statement
{
DataInput @in = new InputStreamDataInput(mapIS);
CodecUtil.CheckHeader(@in, TARGETMAP_HEADER, VERSION, VERSION);
targetMap = new int[@in.ReadVInt32()];
targetMapOffsets = new int[@in.ReadVInt32()];
Expand All @@ -124,8 +124,8 @@ protected BinaryDictionary()
}

using (Stream posIS = GetResource(POSDICT_FILENAME_SUFFIX))
using (var @in = new InputStreamDataInput(posIS, leaveOpen: true)) // LUCENENET: CA2000: Use using statement
{
DataInput @in = new InputStreamDataInput(posIS);
CodecUtil.CheckHeader(@in, POSDICT_HEADER, VERSION, VERSION);
int posSize = @in.ReadVInt32();
posDict = new string[posSize];
Expand All @@ -151,9 +151,9 @@ protected BinaryDictionary()
ByteBuffer tmpBuffer;

using (Stream dictIS = GetResource(DICT_FILENAME_SUFFIX))
// no buffering here, as we load in one large buffer
using (var @in = new InputStreamDataInput(dictIS, leaveOpen: true)) // LUCENENET: CA2000: Use using statement
{
paulirwin marked this conversation as resolved.
Show resolved Hide resolved
// no buffering here, as we load in one large buffer
DataInput @in = new InputStreamDataInput(dictIS);
CodecUtil.CheckHeader(@in, DICT_HEADER, VERSION, VERSION);
int size = @in.ReadVInt32();
tmpBuffer = ByteBuffer.Allocate(size); // AllocateDirect..?
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ private enum CharacterClass : byte
private CharacterDefinition()
{
using Stream @is = BinaryDictionary.GetTypeResource(GetType(), FILENAME_SUFFIX);
DataInput @in = new InputStreamDataInput(@is);
using var @in = new InputStreamDataInput(@is, leaveOpen: true); // LUCENENET: CA2000: Use using statement
CodecUtil.CheckHeader(@in, HEADER, VERSION, VERSION);
@in.ReadBytes(characterCategoryMap, 0, characterCategoryMap.Length);
for (int i = 0; i < CLASS_COUNT; i++)
Expand Down
2 changes: 1 addition & 1 deletion src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@ private ConnectionCosts()
short[][] costs = null;

using (Stream @is = BinaryDictionary.GetTypeResource(GetType(), FILENAME_SUFFIX))
using (var @in = new InputStreamDataInput(@is, leaveOpen: true)) // LUCENENET: CA2000: Use using statement
{
DataInput @in = new InputStreamDataInput(@is);
CodecUtil.CheckHeader(@in, HEADER, VERSION, VERSION);
int forwardSize = @in.ReadVInt32();
int backwardSize = @in.ReadVInt32();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,7 @@ protected virtual void WriteTargetMap(string filename)
//new File(filename).getParentFile().mkdirs();
System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename));
using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write);
DataOutput @out = new OutputStreamDataOutput(os);
using var @out = new OutputStreamDataOutput(os, leaveOpen: true); // LUCENENET: CA2000: Use using statement
CodecUtil.WriteHeader(@out, BinaryDictionary.TARGETMAP_HEADER, BinaryDictionary.VERSION);

int numSourceIds = lastSourceId + 1;
Expand Down Expand Up @@ -328,7 +328,7 @@ protected virtual void WritePosDict(string filename)
//new File(filename).getParentFile().mkdirs();
System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename));
using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write);
DataOutput @out = new OutputStreamDataOutput(os);
using var @out = new OutputStreamDataOutput(os, leaveOpen: true); // LUCENENET: CA2000: Use using statement
CodecUtil.WriteHeader(@out, BinaryDictionary.POSDICT_HEADER, BinaryDictionary.VERSION);
@out.WriteVInt32(posDict.Count);
foreach (string s in posDict)
Expand All @@ -355,7 +355,7 @@ protected virtual void WriteDictionary(string filename)
//new File(filename).getParentFile().mkdirs();
System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename));
using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write);
DataOutput @out = new OutputStreamDataOutput(os);
using var @out = new OutputStreamDataOutput(os, leaveOpen: true); // LUCENENET: CA2000: Use using statement
CodecUtil.WriteHeader(@out, BinaryDictionary.DICT_HEADER, BinaryDictionary.VERSION);
@out.WriteVInt32(m_buffer.Position);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ public void Write(string baseDir)
//new File(filename).getParentFile().mkdirs();
System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(baseDir));
using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write);
DataOutput @out = new OutputStreamDataOutput(os);
using var @out = new OutputStreamDataOutput(os, leaveOpen: true); // LUCENENET: CA2000: Use using statement
CodecUtil.WriteHeader(@out, CharacterDefinition.HEADER, CharacterDefinition.VERSION);
@out.WriteBytes(characterCategoryMap, 0, characterCategoryMap.Length);
for (int i = 0; i < CharacterDefinition.CLASS_COUNT; i++)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ public static class ConnectionCostsBuilder // LUCENENET specific: CA1052 Static
public static ConnectionCostsWriter Build(string filename)
{
using Stream inputStream = new FileStream(filename, FileMode.Open, FileAccess.Read);
StreamReader streamReader = new StreamReader(inputStream, Encoding.ASCII);
using StreamReader streamReader = new StreamReader(inputStream, Encoding.ASCII, detectEncodingFromByteOrderMarks: true, bufferSize: 1024, leaveOpen: true); // LUCENENET: CA2000: Use using statement

string line = streamReader.ReadLine();
string[] dimensions = whiteSpaceRegex.Split(line).TrimEnd();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,11 +54,11 @@ public void Write(string baseDir)
// LUCENENET specific: we don't need to do a "classpath" output directory, since we
// are changing the implementation to read files dynamically instead of making the
// user recompile with the new files.
string filename = System.IO.Path.Combine(baseDir, typeof(ConnectionCosts).Name + CharacterDefinition.FILENAME_SUFFIX);
string filename = System.IO.Path.Combine(baseDir, nameof(ConnectionCosts) + CharacterDefinition.FILENAME_SUFFIX);
paulirwin marked this conversation as resolved.
Show resolved Hide resolved
//new File(filename).getParentFile().mkdirs();
System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename));
using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write);
DataOutput @out = new OutputStreamDataOutput(os);
using var @out = new OutputStreamDataOutput(os, leaveOpen: true); // LUCENENET: CA2000: Use using statement
CodecUtil.WriteHeader(@out, ConnectionCosts.HEADER, ConnectionCosts.VERSION);
@out.WriteVInt32(forwardSize);
@out.WriteVInt32(backwardSize);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ public virtual TokenInfoDictionaryWriter BuildDictionary(IList<string> csvFiles)
{
using Stream inputStream = new FileStream(file, FileMode.Open, FileAccess.Read);
Encoding decoder = Encoding.GetEncoding(encoding);
TextReader reader = new StreamReader(inputStream, decoder);
using TextReader reader = new StreamReader(inputStream, decoder, detectEncodingFromByteOrderMarks: true, bufferSize: 1024, leaveOpen: true); // LUCENENET: CA2000: Use using statement

string line = null;
while ((line = reader.ReadLine()) != null)
Expand Down Expand Up @@ -159,10 +159,10 @@ public virtual TokenInfoDictionaryWriter BuildDictionary(IList<string> csvFiles)

return dictionary;
}

/// <summary>
/// IPADIC features
///
///
/// 0 - surface
/// 1 - left cost
/// 2 - right cost
Expand All @@ -171,9 +171,9 @@ public virtual TokenInfoDictionaryWriter BuildDictionary(IList<string> csvFiles)
/// 10 - base form
/// 11 - reading
/// 12 - pronounciation
///
///
/// UniDic features
///
///
/// 0 - surface
/// 1 - left cost
/// 2 - right cost
Expand Down
2 changes: 1 addition & 1 deletion src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,7 @@ public DirectField(SegmentReadState state, string field, Terms termsIn, int minS
Int32ArrayWriter scratch = new Int32ArrayWriter();

// Used for payloads, if any:
RAMOutputStream ros = new RAMOutputStream();
using RAMOutputStream ros = new RAMOutputStream(); // LUCENENET specific - dispose when done

// if (DEBUG) {
// System.out.println("\nLOAD terms seg=" + state.segmentInfo.name + " field=" + field + " hasOffsets=" + hasOffsets + " hasFreq=" + hasFreq + " hasPos=" + hasPos + " hasPayloads=" + hasPayloads);
Expand Down
71 changes: 39 additions & 32 deletions src/Lucene.Net.Demo/SearchFiles.cs
Original file line number Diff line number Diff line change
Expand Up @@ -112,57 +112,64 @@ public static void Main(string[] args)
// :Post-Release-Update-Version.LUCENE_XY:
Analyzer analyzer = new StandardAnalyzer(LuceneVersion.LUCENE_48);

FileStream fileStream = null; // LUCENENET specific - keep track of the FileStream so we can dispose of it
TextReader input = null;
if (queries != null)
{
input = new StreamReader(new FileStream(queries, FileMode.Open, FileAccess.Read), Encoding.UTF8);
fileStream = new FileStream(queries, FileMode.Open, FileAccess.Read);
input = new StreamReader(fileStream, Encoding.UTF8);
}
else
{
input = Console.In;
}
// :Post-Release-Update-Version.LUCENE_XY:
QueryParser parser = new QueryParser(LuceneVersion.LUCENE_48, field, analyzer);
while (true)

using (fileStream) // LUCENENET specific - dispose of the FileStream when we are done with it
{
if (queries is null && queryString is null)
// :Post-Release-Update-Version.LUCENE_XY:
QueryParser parser = new QueryParser(LuceneVersion.LUCENE_48, field, analyzer);
while (true)
{
// prompt the user
Console.WriteLine("Enter query (or press Enter to exit): ");
}
if (queries is null && queryString is null)
{
// prompt the user
Console.WriteLine("Enter query (or press Enter to exit): ");
}

string line = queryString ?? input.ReadLine();
string line = queryString ?? input.ReadLine();

if (line is null || line.Length == 0)
{
break;
}
if (line is null || line.Length == 0)
{
break;
}

line = line.Trim();
if (line.Length == 0)
{
break;
}
line = line.Trim();
if (line.Length == 0)
{
break;
}

Query query = parser.Parse(line);
Console.WriteLine("Searching for: " + query.ToString(field));
Query query = parser.Parse(line);
Console.WriteLine("Searching for: " + query.ToString(field));

if (repeat > 0) // repeat & time as benchmark
{
DateTime start = DateTime.UtcNow;
for (int i = 0; i < repeat; i++)
if (repeat > 0) // repeat & time as benchmark
{
searcher.Search(query, null, 100);
DateTime start = DateTime.UtcNow;
for (int i = 0; i < repeat; i++)
{
searcher.Search(query, null, 100);
}

DateTime end = DateTime.UtcNow;
Console.WriteLine("Time: " + (end - start).TotalMilliseconds + "ms");
}
DateTime end = DateTime.UtcNow;
Console.WriteLine("Time: " + (end - start).TotalMilliseconds + "ms");
}

DoPagingSearch(searcher, query, hitsPerPage, raw, queries is null && queryString is null);
DoPagingSearch(searcher, query, hitsPerPage, raw, queries is null && queryString is null);

if (queryString != null)
{
break;
if (queryString != null)
{
break;
}
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1170,8 +1170,9 @@ public int[] GetMap()
}
AddDone(); // in case this wasn't previously called

var ifs = new FileStream(tmpfile, FileMode.OpenOrCreate, FileAccess.Read);
using (var @in = new InputStreamDataInput(ifs))
// LUCENENET specific - dispose of resources
using (var ifs = new FileStream(tmpfile, FileMode.OpenOrCreate, FileAccess.Read))
using (var @in = new InputStreamDataInput(ifs, leaveOpen: true))
{
map = new int[@in.ReadInt32()];
// NOTE: The current code assumes here that the map is complete,
Expand Down
2 changes: 1 addition & 1 deletion src/Lucene.Net.Misc/Index/IndexSplitter.cs
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ public virtual void Remove(ICollection<string> segs) // LUCENENET specific - cha
public virtual void Split(DirectoryInfo destDir, ICollection<string> segs) // LUCENENET specific - changed to ICollection to reduce copy operations
{
destDir.Create();
FSDirectory destFSDir = FSDirectory.Open(destDir);
using FSDirectory destFSDir = FSDirectory.Open(destDir); // LUCENENET specific - CA2000: dispose of destFSDir when finished
SegmentInfos destInfos = new SegmentInfos();
destInfos.Counter = Infos.Counter;
foreach (string n in segs)
Expand Down
2 changes: 1 addition & 1 deletion src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ public virtual void Split(LuceneVersion version, IndexReader @in, Store.Director
// wrap a potentially read-only input
// this way we don't have to preserve original deletions because neither
// deleteDocument(int) or undeleteAll() is applied to the wrapped input index.
FakeDeleteIndexReader input = new FakeDeleteIndexReader(@in);
using FakeDeleteIndexReader input = new FakeDeleteIndexReader(@in); // LUCENENET: CA2000: Dispose FakeDeleteIndexReader
int maxDoc = input.MaxDoc;
int partLen = maxDoc / numParts;
for (int i = 0; i < numParts; i++)
Expand Down
40 changes: 23 additions & 17 deletions src/Lucene.Net.Misc/Misc/GetTermInfo.cs
Original file line number Diff line number Diff line change
Expand Up @@ -50,31 +50,37 @@ public static class GetTermInfo // LUCENENET specific: CA1052 Static holder type
/// <exception cref="ArgumentException">Thrown if the incorrect number of arguments are provided</exception>
public static void Main(string[] args)
{

FSDirectory dir; // LUCENENET: IDE0059: Remove unnecessary value assignment
string inputStr; // LUCENENET: IDE0059: Remove unnecessary value assignment
string field; // LUCENENET: IDE0059: Remove unnecessary value assignment

if (args.Length == 3)
// LUCENENET specific - CA2000: dispose of directory when finished
FSDirectory dir = null;
try
{
dir = FSDirectory.Open(new DirectoryInfo(args[0]));
field = args[1];
inputStr = args[2];
string inputStr; // LUCENENET: IDE0059: Remove unnecessary value assignment
string field; // LUCENENET: IDE0059: Remove unnecessary value assignment
if (args.Length == 3)
{
dir = FSDirectory.Open(new DirectoryInfo(args[0]));
field = args[1];
inputStr = args[2];
}
else
{
// LUCENENET specific - our wrapper console shows the correct usage
throw new ArgumentException("GetTermInfo requires 3 arguments", nameof(args));
//Usage();
//Environment.Exit(1);
}

TermInfo(dir, new Term(field, inputStr));
}
else
finally
{
// LUCENENET specific - our wrapper console shows the correct usage
throw new ArgumentException("GetTermInfo requires 3 arguments", nameof(args));
//Usage();
//Environment.Exit(1);
dir?.Dispose();
}

TermInfo(dir, new Term(field, inputStr));
}

public static void TermInfo(Store.Directory dir, Term term)
{
IndexReader reader = DirectoryReader.Open(dir);
using IndexReader reader = DirectoryReader.Open(dir);
Console.WriteLine("{0}:{1} \t totalTF = {2:#,##0} \t doc freq = {3:#,##0} \n", term.Field, term.Text, reader.TotalTermFreq(term), reader.DocFreq(term));
}

Expand Down
2 changes: 1 addition & 1 deletion src/Lucene.Net.Misc/Misc/HighFreqTerms.cs
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ public static void Main(string[] args)
//Environment.Exit(1);
}

Store.Directory dir = FSDirectory.Open(new DirectoryInfo(args[0]));
using Store.Directory dir = FSDirectory.Open(new DirectoryInfo(args[0]));

IComparer<TermStats> comparer = DocFreqComparer.Default;

Expand Down
Loading
Loading