From 9b863cce6f8881364994859af2a5ed21fda5d899 Mon Sep 17 00:00:00 2001 From: Paul Irwin Date: Fri, 20 Dec 2024 21:40:10 -0700 Subject: [PATCH] Replace SetOnce Get/Set with Value, #1069 --- .../Index/MockRandomMergePolicy.cs | 3 +- src/Lucene.Net.Tests/Index/TestIndexWriter.cs | 6 +-- src/Lucene.Net.Tests/Util/TestSetOnce.cs | 16 +++--- src/Lucene.Net/Index/IndexWriterConfig.cs | 2 +- src/Lucene.Net/Index/LogMergePolicy.cs | 42 ++++++++------- src/Lucene.Net/Index/MergePolicy.cs | 7 +-- src/Lucene.Net/Index/TieredMergePolicy.cs | 49 +++++++++-------- .../Index/UpgradeIndexMergePolicy.cs | 4 +- src/Lucene.Net/Support/ObsoleteAPI/SetOnce.cs | 43 +++++++++++++++ src/Lucene.Net/Util/SetOnce.cs | 52 ++++++++++--------- 10 files changed, 138 insertions(+), 86 deletions(-) create mode 100644 src/Lucene.Net/Support/ObsoleteAPI/SetOnce.cs diff --git a/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs b/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs index ed20cb2a22..c9a205b25c 100644 --- a/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs +++ b/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs @@ -47,7 +47,8 @@ public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, Segment int numSegments/* = segmentInfos.Count*/; // LUCENENET: IDE0059: Remove unnecessary value assignment JCG.List segments = new JCG.List(); - ICollection merging = base.m_writer.Get().MergingSegments; + ICollection merging = base.m_writer.Value?.MergingSegments + ?? throw new InvalidOperationException("The writer has not been initialized"); // LUCENENET specific - throw exception if writer is null foreach (SegmentCommitInfo sipc in segmentInfos.Segments) { diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs index 7e8db747d5..bda15ce0fe 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs @@ -2877,7 +2877,7 @@ public virtual void TestMergeAllDeleted() SetOnce iwRef = new SetOnce(); iwc.SetInfoStream(new TestPointInfoStream(iwc.InfoStream, new TestPointAnonymousClass(iwRef))); IndexWriter evilWriter = new IndexWriter(dir, iwc); - iwRef.Set(evilWriter); + iwRef.Value = evilWriter; for (int i = 0; i < 1000; i++) { AddDoc(evilWriter); @@ -2905,11 +2905,11 @@ public void Apply(string message) { if ("startCommitMerge".Equals(message, StringComparison.Ordinal)) { - iwRef.Get().KeepFullyDeletedSegments = false; + iwRef.Value!.KeepFullyDeletedSegments = false; } else if ("startMergeInit".Equals(message, StringComparison.Ordinal)) { - iwRef.Get().KeepFullyDeletedSegments = true; + iwRef.Value!.KeepFullyDeletedSegments = true; } } } diff --git a/src/Lucene.Net.Tests/Util/TestSetOnce.cs b/src/Lucene.Net.Tests/Util/TestSetOnce.cs index 4af4b812c2..a6d6b3d20b 100644 --- a/src/Lucene.Net.Tests/Util/TestSetOnce.cs +++ b/src/Lucene.Net.Tests/Util/TestSetOnce.cs @@ -56,7 +56,7 @@ public override void Run() try { Sleep(RAND.Next(10)); // sleep for a short time - set.Set(new Integer(Convert.ToInt32(Name.Substring(2), CultureInfo.InvariantCulture))); + set.Value = new Integer(Convert.ToInt32(Name.Substring(2), CultureInfo.InvariantCulture)); success = true; } catch (Exception e) when (e.IsInterruptedException()) @@ -76,24 +76,24 @@ public override void Run() public virtual void TestEmptyCtor() { SetOnce set = new SetOnce(); - Assert.IsNull(set.Get()); + Assert.IsNull(set.Value); } [Test] public virtual void TestSettingCtor() { SetOnce set = new SetOnce(new Integer(5)); - Assert.AreEqual(5, set.Get().value); - Assert.Throws(() => set.Set(new Integer(7))); + Assert.AreEqual(5, set.Value?.value); + Assert.Throws(() => set.Value = new Integer(7)); } [Test] public virtual void TestSetOnce_mem() { SetOnce set = new SetOnce(); - set.Set(new Integer(5)); - Assert.AreEqual(5, set.Get().value); - Assert.Throws(() => set.Set(new Integer(7))); + set.Value = new Integer(5); + Assert.AreEqual(5, set.Value.value); + Assert.Throws(() => set.Value = new Integer(7)); } [Test] @@ -124,7 +124,7 @@ public virtual void TestSetMultiThreaded() if (t.success) { int expectedVal = Convert.ToInt32(t.Name.Substring(2)); - Assert.AreEqual(expectedVal, t.set.Get().value, "thread " + t.Name); + Assert.AreEqual(expectedVal, t.set.Value?.value, "thread " + t.Name); } } } diff --git a/src/Lucene.Net/Index/IndexWriterConfig.cs b/src/Lucene.Net/Index/IndexWriterConfig.cs index 8ac9cdbe9f..6025b670b6 100644 --- a/src/Lucene.Net/Index/IndexWriterConfig.cs +++ b/src/Lucene.Net/Index/IndexWriterConfig.cs @@ -150,7 +150,7 @@ public static long DefaultWriteLockTimeout /// if this config is already attached to a writer. internal IndexWriterConfig SetIndexWriter(IndexWriter writer) { - this.writer.Set(writer); + this.writer.Value = writer; return this; } diff --git a/src/Lucene.Net/Index/LogMergePolicy.cs b/src/Lucene.Net/Index/LogMergePolicy.cs index 8ed22752d9..3946e0f48f 100644 --- a/src/Lucene.Net/Index/LogMergePolicy.cs +++ b/src/Lucene.Net/Index/LogMergePolicy.cs @@ -61,8 +61,8 @@ public abstract class LogMergePolicy : MergePolicy public static readonly int DEFAULT_MERGE_FACTOR = 10; /// - /// Default maximum segment size. A segment of this size - /// or larger will never be merged. + /// Default maximum segment size. A segment of this size + /// or larger will never be merged. /// public static readonly int DEFAULT_MAX_MERGE_DOCS = int.MaxValue; @@ -125,7 +125,7 @@ protected virtual bool IsVerbose { get { - IndexWriter w = m_writer.Get(); + IndexWriter w = m_writer.Value; return w != null && w.infoStream.IsEnabled("LMP"); } } @@ -137,7 +137,7 @@ protected virtual void Message(string message) { if (IsVerbose) { - m_writer.Get().infoStream.Message("LMP", message); + m_writer.Value?.infoStream.Message("LMP", message); } } @@ -184,16 +184,17 @@ protected override void Dispose(bool disposing) } /// - /// Return the number of documents in the provided + /// Return the number of documents in the provided /// , pro-rated by percentage of - /// non-deleted documents if + /// non-deleted documents if /// is set. /// protected virtual long SizeDocs(SegmentCommitInfo info) { if (m_calibrateSizeByDeletes) { - int delCount = m_writer.Get().NumDeletedDocs(info); + int delCount = m_writer.Value?.NumDeletedDocs(info) + ?? throw new InvalidOperationException("The writer has not been initialized"); // LUCENENET specific - throw exception if writer is null if (Debugging.AssertsEnabled) Debugging.Assert(delCount <= info.Info.DocCount); return (info.Info.DocCount - (long)delCount); } @@ -204,9 +205,9 @@ protected virtual long SizeDocs(SegmentCommitInfo info) } /// - /// Return the byte size of the provided + /// Return the byte size of the provided /// , pro-rated by percentage of - /// non-deleted documents if + /// non-deleted documents if /// is set. /// protected virtual long SizeBytes(SegmentCommitInfo info) @@ -220,7 +221,7 @@ protected virtual long SizeBytes(SegmentCommitInfo info) /// /// Returns true if the number of segments eligible for - /// merging is less than or equal to the specified + /// merging is less than or equal to the specified /// . /// protected virtual bool IsMerged(SegmentInfos infos, int maxNumSegments, IDictionary segmentsToMerge) @@ -381,7 +382,7 @@ public override MergeSpecification FindForcedMerges(SegmentInfos infos, int maxN if (Debugging.AssertsEnabled) Debugging.Assert(maxNumSegments > 0); if (IsVerbose) { - Message("findForcedMerges: maxNumSegs=" + maxNumSegments + " segsToMerge=" + + Message("findForcedMerges: maxNumSegs=" + maxNumSegments + " segsToMerge=" + string.Format(J2N.Text.StringFormatter.InvariantCulture, "{0}", segmentsToMerge)); } @@ -468,7 +469,7 @@ public override MergeSpecification FindForcedDeletesMerges(SegmentInfos segmentI var spec = new MergeSpecification(); int firstSegmentWithDeletions = -1; - IndexWriter w = m_writer.Get(); + IndexWriter w = m_writer.Value; if (Debugging.AssertsEnabled) Debugging.Assert(w != null); for (int i = 0; i < numSegments; i++) { @@ -545,10 +546,10 @@ public virtual int CompareTo(SegmentInfoAndLevel other) /// /// Checks if any merges are now necessary and returns a /// if so. A merge - /// is necessary when there are more than + /// is necessary when there are more than /// segments at a given level. When /// multiple levels have too many segments, this method - /// will return multiple merges, allowing the + /// will return multiple merges, allowing the /// to use concurrency. /// public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, SegmentInfos infos) @@ -564,7 +565,8 @@ public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, Segment IList levels = new JCG.List(); var norm = (float)Math.Log(m_mergeFactor); - ICollection mergingSegments = m_writer.Get().MergingSegments; + ICollection mergingSegments = m_writer.Value?.MergingSegments + ?? throw new InvalidOperationException("The writer has not been initialized"); // LUCENENET specific - throw exception if writer is null for (int i = 0; i < numSegments; i++) { @@ -588,7 +590,7 @@ public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, Segment { extra += " [skip: too large]"; } - Message("seg=" + m_writer.Get().SegString(info) + " level=" + infoLevel.level + " size=" + String.Format(CultureInfo.InvariantCulture, "{0:0.00} MB", segBytes / 1024 / 1024.0) + extra); + Message("seg=" + m_writer.Value?.SegString(info) + " level=" + infoLevel.level + " size=" + string.Format(CultureInfo.InvariantCulture, "{0:0.00} MB", segBytes / 1024 / 1024.0) + extra); } } @@ -696,7 +698,7 @@ public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, Segment } if (IsVerbose) { - Message(" add merge=" + m_writer.Get().SegString(mergeInfos) + " start=" + start + " end=" + end); + Message(" add merge=" + m_writer.Value?.SegString(mergeInfos) + " start=" + start + " end=" + end); } spec.Add(new OneMerge(mergeInfos)); } @@ -726,9 +728,9 @@ public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, Segment /// /// The default value is . /// - /// The default merge policy + /// The default merge policy /// () also allows you to set this - /// limit by net size (in MB) of the segment, using + /// limit by net size (in MB) of the segment, using /// . /// public virtual int MaxMergeDocs @@ -752,4 +754,4 @@ public override string ToString() return sb.ToString(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net/Index/MergePolicy.cs b/src/Lucene.Net/Index/MergePolicy.cs index 325eef7f2c..c697bbad9f 100644 --- a/src/Lucene.Net/Index/MergePolicy.cs +++ b/src/Lucene.Net/Index/MergePolicy.cs @@ -658,7 +658,7 @@ protected MergePolicy(double defaultNoCFSRatio, long defaultMaxCFSSegmentSize) /// public virtual void SetIndexWriter(IndexWriter writer) { - this.m_writer.Set(writer); + this.m_writer.Value = writer; } /// @@ -754,7 +754,8 @@ public virtual bool UseCompoundFile(SegmentInfos infos, SegmentCommitInfo merged protected virtual long Size(SegmentCommitInfo info) { long byteSize = info.GetSizeInBytes(); - int delCount = m_writer.Get().NumDeletedDocs(info); + int delCount = m_writer.Value?.NumDeletedDocs(info) + ?? throw new InvalidOperationException("The writer has not been initialized"); // LUCENENET specific - throw exception if writer is null double delRatio = (info.Info.DocCount <= 0 ? 0.0f : ((float)delCount / (float)info.Info.DocCount)); if (Debugging.AssertsEnabled) Debugging.Assert(delRatio <= 1.0); return (info.Info.DocCount <= 0 ? byteSize : (long)(byteSize * (1.0 - delRatio))); @@ -767,7 +768,7 @@ protected virtual long Size(SegmentCommitInfo info) /// protected bool IsMerged(SegmentInfos infos, SegmentCommitInfo info) { - IndexWriter w = m_writer.Get(); + IndexWriter w = m_writer.Value; if (Debugging.AssertsEnabled) Debugging.Assert(w != null); bool hasDeletions = w.NumDeletedDocs(info) > 0; return !hasDeletions diff --git a/src/Lucene.Net/Index/TieredMergePolicy.cs b/src/Lucene.Net/Index/TieredMergePolicy.cs index a44f8da6cb..a989ab18d2 100644 --- a/src/Lucene.Net/Index/TieredMergePolicy.cs +++ b/src/Lucene.Net/Index/TieredMergePolicy.cs @@ -31,7 +31,7 @@ namespace Lucene.Net.Index /// an allowed number of segments per tier. This is similar /// to , except this merge /// policy is able to merge non-adjacent segment, and - /// separates how many segments are merged at once () + /// separates how many segments are merged at once () /// from how many segments are allowed /// per tier (). This merge /// policy also does not over-merge (i.e. cascade merges). @@ -100,8 +100,8 @@ public TieredMergePolicy() /// /// Gets or sets maximum number of segments to be merged at a time /// during "normal" merging. For explicit merging (eg, - /// or - /// was called), see + /// or + /// was called), see /// . Default is 10. /// public virtual int MaxMergeAtOnce @@ -122,7 +122,7 @@ public virtual int MaxMergeAtOnce /// /// Gets or sets maximum number of segments to be merged at a time, - /// during or + /// during or /// . Default is 30. /// public virtual int MaxMergeAtOnceExplicit @@ -223,11 +223,11 @@ public virtual double ForceMergeDeletesPctAllowed /// /// Gets or sets the allowed number of segments per tier. Smaller /// values mean more merging but fewer segments. - /// - /// NOTE: this value should be >= the + /// + /// NOTE: this value should be >= the /// otherwise you'll force too much /// merging to occur. - /// + /// /// Default is 10.0. /// public virtual double SegmentsPerTier @@ -315,7 +315,8 @@ public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, Segment { return null; } - ICollection merging = m_writer.Get().MergingSegments; + ICollection merging = m_writer.Value?.MergingSegments + ?? throw new InvalidOperationException("The writer has not been initialized"); // LUCENENET specific - throw exception if writer is null ICollection toBeMerged = new JCG.HashSet(); JCG.List infosSorted = new JCG.List(infos.AsList()); @@ -338,7 +339,7 @@ public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, Segment { extra += " [floored]"; } - Message(" seg=" + m_writer.Get().SegString(info) + " size=" + string.Format("{0:0.000}", segBytes / 1024 / 1024.0) + " MB" + extra); + Message(" seg=" + m_writer.Value?.SegString(info) + " size=" + string.Format("{0:0.000}", segBytes / 1024 / 1024.0) + " MB" + extra); } minSegmentBytes = Math.Min(segBytes, minSegmentBytes); @@ -449,7 +450,7 @@ public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, Segment MergeScore score = Score(candidate, hitTooLarge, mergingBytes); if (Verbose()) { - Message(" maybe=" + m_writer.Get().SegString(candidate) + " score=" + score.Score + " " + score.Explanation + " tooLarge=" + hitTooLarge + " size=" + string.Format("{0:0.000} MB", totAfterMergeBytes / 1024.0 / 1024.0)); + Message(" maybe=" + m_writer.Value?.SegString(candidate) + " score=" + score.Score + " " + score.Explanation + " tooLarge=" + hitTooLarge + " size=" + string.Format("{0:0.000} MB", totAfterMergeBytes / 1024.0 / 1024.0)); } // If we are already running a max sized merge @@ -479,7 +480,7 @@ public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, Segment if (Verbose()) { - Message(" add merge=" + m_writer.Get().SegString(merge.Segments) + " size=" + string.Format("{0:0.000} MB", bestMergeBytes / 1024.0 / 1024.0) + " score=" + string.Format("{0:0.000}", bestScore.Score) + " " + bestScore.Explanation + (bestTooLarge ? " [max merge]" : "")); + Message(" add merge=" + m_writer.Value?.SegString(merge.Segments) + " size=" + string.Format("{0:0.000} MB", bestMergeBytes / 1024.0 / 1024.0) + " score=" + string.Format("{0:0.000}", bestScore.Score) + " " + bestScore.Explanation + (bestTooLarge ? " [max merge]" : "")); } } else @@ -570,14 +571,15 @@ public override MergeSpecification FindForcedMerges(SegmentInfos infos, int maxS { if (Verbose()) { - Message("FindForcedMerges maxSegmentCount=" + maxSegmentCount + - " infos=" + m_writer.Get().SegString(infos.Segments) + + Message("FindForcedMerges maxSegmentCount=" + maxSegmentCount + + " infos=" + m_writer.Value?.SegString(infos.Segments) + " segmentsToMerge=" + string.Format(J2N.Text.StringFormatter.InvariantCulture, "{0}", segmentsToMerge)); } JCG.List eligible = new JCG.List(); bool forceMergeRunning = false; - ICollection merging = m_writer.Get().MergingSegments; + ICollection merging = m_writer.Value?.MergingSegments + ?? throw new InvalidOperationException("The writer has not been initialized"); // LUCENENET specific - throw exception if writer is null bool segmentIsOriginal = false; foreach (SegmentCommitInfo info in infos.Segments) { @@ -631,7 +633,7 @@ public override MergeSpecification FindForcedMerges(SegmentInfos infos, int maxS OneMerge merge = new OneMerge(eligible.GetView(end - maxMergeAtOnceExplicit, maxMergeAtOnceExplicit)); // LUCENENET: Converted end index to length if (Verbose()) { - Message("add merge=" + m_writer.Get().SegString(merge.Segments)); + Message("add merge=" + m_writer.Value?.SegString(merge.Segments)); } spec.Add(merge); end -= maxMergeAtOnceExplicit; @@ -644,7 +646,7 @@ public override MergeSpecification FindForcedMerges(SegmentInfos infos, int maxS OneMerge merge = new OneMerge(eligible.GetView(end - numToMerge, numToMerge)); // LUCENENET: Converted end index to length if (Verbose()) { - Message("add final merge=" + merge.SegString(m_writer.Get().Directory)); + Message("add final merge=" + merge.SegString(m_writer.Value?.Directory)); } spec = new MergeSpecification(); spec.Add(merge); @@ -657,13 +659,14 @@ public override MergeSpecification FindForcedDeletesMerges(SegmentInfos infos) { if (Verbose()) { - Message("findForcedDeletesMerges infos=" + m_writer.Get().SegString(infos.Segments) + " forceMergeDeletesPctAllowed=" + forceMergeDeletesPctAllowed); + Message("findForcedDeletesMerges infos=" + m_writer.Value?.SegString(infos.Segments) + " forceMergeDeletesPctAllowed=" + forceMergeDeletesPctAllowed); } JCG.List eligible = new JCG.List(); - ICollection merging = m_writer.Get().MergingSegments; + ICollection merging = m_writer.Value?.MergingSegments + ?? throw new InvalidOperationException("The writer has not been initialized"); // LUCENENET specific - throw exception if writer is null foreach (SegmentCommitInfo info in infos.Segments) { - double pctDeletes = 100.0 * ((double)m_writer.Get().NumDeletedDocs(info)) / info.Info.DocCount; + double pctDeletes = 100.0 * ((double)m_writer.Value.NumDeletedDocs(info)) / info.Info.DocCount; if (pctDeletes > forceMergeDeletesPctAllowed && !merging.Contains(info)) { eligible.Add(info); @@ -699,7 +702,7 @@ public override MergeSpecification FindForcedDeletesMerges(SegmentInfos infos) OneMerge merge = new OneMerge(eligible.GetView(start, end - start)); // LUCENENET: Converted end index to length if (Verbose()) { - Message("add merge=" + m_writer.Get().SegString(merge.Segments)); + Message("add merge=" + m_writer.Value.SegString(merge.Segments)); } spec.Add(merge); start = end; @@ -719,13 +722,13 @@ private long FloorSize(long bytes) private bool Verbose() { - IndexWriter w = m_writer.Get(); + IndexWriter w = m_writer.Value; return w != null && w.infoStream.IsEnabled("TMP"); } private void Message(string message) { - m_writer.Get().infoStream.Message("TMP", message); + m_writer.Value?.infoStream.Message("TMP", message); } public override string ToString() @@ -742,4 +745,4 @@ public override string ToString() return sb.ToString(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs b/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs index 62a1e359c6..2a9c84bd92 100644 --- a/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs +++ b/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs @@ -175,13 +175,13 @@ public override string ToString() private bool Verbose() { - IndexWriter w = m_writer.Get(); + IndexWriter w = m_writer.Value; return w != null && w.infoStream.IsEnabled("UPGMP"); } private void Message(string message) { - m_writer.Get().infoStream.Message("UPGMP", message); + m_writer.Value?.infoStream.Message("UPGMP", message); } } } diff --git a/src/Lucene.Net/Support/ObsoleteAPI/SetOnce.cs b/src/Lucene.Net/Support/ObsoleteAPI/SetOnce.cs new file mode 100644 index 0000000000..7b9f11205b --- /dev/null +++ b/src/Lucene.Net/Support/ObsoleteAPI/SetOnce.cs @@ -0,0 +1,43 @@ +using System; +#nullable enable + +namespace Lucene.Net.Util +{ + /* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + public partial class SetOnce + { + /// + /// Sets the given object. If the object has already been set, an exception is thrown. + /// + [Obsolete("Use Value property instead. This method will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] + public void Set(T? obj) + { + Value = obj; + } + + /// + /// Returns the object set by or . + /// + [Obsolete("Use Value property instead. This method will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] + public T? Get() + { + return Value; + } + } +} diff --git a/src/Lucene.Net/Util/SetOnce.cs b/src/Lucene.Net/Util/SetOnce.cs index fbf38c66ef..6f76b15388 100644 --- a/src/Lucene.Net/Util/SetOnce.cs +++ b/src/Lucene.Net/Util/SetOnce.cs @@ -5,6 +5,7 @@ using System.ComponentModel; using System.Runtime.Serialization; #endif +#nullable enable namespace Lucene.Net.Util { @@ -28,21 +29,21 @@ namespace Lucene.Net.Util /// /// A convenient class which offers a semi-immutable object wrapper /// implementation which allows one to set the value of an object exactly once, - /// and retrieve it many times. If is called more than once, + /// and retrieve it many times. If the setter is called more than once, /// is thrown and the operation /// will fail. /// /// @lucene.experimental /// - public sealed class SetOnce // LUCENENET specific: Not implementing ICloneable per Microsoft's recommendation + public sealed partial class SetOnce // LUCENENET specific: Not implementing ICloneable per Microsoft's recommendation where T : class // LUCENENET specific - added class constraint so we don't accept value types (which cannot be volatile) { - private volatile T obj = default; + private volatile T? obj; private readonly AtomicBoolean set; /// /// A default constructor which does not set the internal object, and allows - /// setting it by calling . + /// setting it via . /// public SetOnce() { @@ -51,39 +52,40 @@ public SetOnce() /// /// Creates a new instance with the internal object set to the given object. - /// Note that any calls to afterwards will result in + /// Note that any calls to the setter afterwards will result in /// /// /// if called more than once - /// - public SetOnce(T obj) + /// + public SetOnce(T? obj) { this.obj = obj; set = new AtomicBoolean(true); } /// - /// Sets the given object. If the object has already been set, an exception is thrown. - public void Set(T obj) + /// Gets or sets the object. + /// + /// + /// This property's getter and setter replace the Get() and Set(T) methods in the Java version. + /// + /// Thrown if the object has already been set. + public T? Value { - if (set.CompareAndSet(false, true)) - { - this.obj = obj; - } - else + get => obj; + set { - throw new AlreadySetException(); + if (set.CompareAndSet(false, true)) + { + this.obj = value; + } + else + { + throw new AlreadySetException(); + } } } - /// - /// Returns the object set by . - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public T Get() - { - return obj; - } - [MethodImpl(MethodImplOptions.AggressiveInlining)] public object Clone() { @@ -92,10 +94,10 @@ public object Clone() } /// - /// Thrown when is called more than once. + /// Thrown when the setter is called more than once. // LUCENENET specific - de-nested the class from SetOnce to allow the test // framework to serialize it without the generic type. - // LUCENENET: It is no longer good practice to use binary serialization. + // LUCENENET: It is no longer good practice to use binary serialization. // See: https://github.com/dotnet/corefx/issues/23584#issuecomment-325724568 #if FEATURE_SERIALIZABLE_EXCEPTIONS [Serializable]