Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make structs readonly #2147

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
1 change: 1 addition & 0 deletions src/Confluent.Kafka/Confluent.Kafka.csproj
Expand Up @@ -18,6 +18,7 @@
<GenerateDocumentationFile>true</GenerateDocumentationFile>
<SignAssembly>true</SignAssembly>
<AssemblyOriginatorKeyFile>Confluent.Kafka.snk</AssemblyOriginatorKeyFile>
<LangVersion>9.0</LangVersion>
</PropertyGroup>

<ItemGroup>
Expand Down
37 changes: 16 additions & 21 deletions src/Confluent.Kafka/Offset.cs
Expand Up @@ -22,13 +22,13 @@ namespace Confluent.Kafka
{
/// <summary>
/// Represents a Kafka partition offset value.
/// </summary>
/// </summary>
/// <remarks>
/// This structure is the same size as a long -
/// its purpose is to add some syntactical sugar
/// This structure is the same size as a long -
/// its purpose is to add some syntactical sugar
/// related to special values.
/// </remarks>
public struct Offset : IEquatable<Offset>
public readonly struct Offset : IEquatable<Offset>
{
private const long RD_KAFKA_OFFSET_BEGINNING = -2;
private const long RD_KAFKA_OFFSET_END = -1;
Expand All @@ -38,22 +38,22 @@ public struct Offset : IEquatable<Offset>
/// <summary>
/// A special value that refers to the beginning of a partition.
/// </summary>
public static readonly Offset Beginning = new Offset(RD_KAFKA_OFFSET_BEGINNING);
public static readonly Offset Beginning = new(RD_KAFKA_OFFSET_BEGINNING);

/// <summary>
/// A special value that refers to the end of a partition.
/// </summary>
public static readonly Offset End = new Offset(RD_KAFKA_OFFSET_END);
public static readonly Offset End = new(RD_KAFKA_OFFSET_END);

/// <summary>
/// A special value that refers to the stored offset for a partition.
/// </summary>
public static readonly Offset Stored = new Offset(RD_KAFKA_OFFSET_STORED);
public static readonly Offset Stored = new(RD_KAFKA_OFFSET_STORED);

/// <summary>
/// A special value that refers to an invalid, unassigned or default partition offset.
/// </summary>
public static readonly Offset Unset = new Offset(RD_KAFKA_OFFSET_INVALID);
public static readonly Offset Unset = new(RD_KAFKA_OFFSET_INVALID);

/// <summary>
/// Initializes a new instance of the Offset structure.
Expand All @@ -72,7 +72,7 @@ public Offset(long offset)
public long Value { get; }

/// <summary>
/// Gets whether or not this is one of the special
/// Gets whether or not this is one of the special
/// offset values.
/// </summary>
public bool IsSpecial
Expand Down Expand Up @@ -273,19 +273,14 @@ public override int GetHashCode()
/// </returns>
public override string ToString()
{
switch (Value)
return Value switch
{
case RD_KAFKA_OFFSET_BEGINNING:
return $"Beginning [{RD_KAFKA_OFFSET_BEGINNING}]";
case RD_KAFKA_OFFSET_END:
return $"End [{RD_KAFKA_OFFSET_END}]";
case RD_KAFKA_OFFSET_STORED:
return $"Stored [{RD_KAFKA_OFFSET_STORED}]";
case RD_KAFKA_OFFSET_INVALID:
return $"Unset [{RD_KAFKA_OFFSET_INVALID}]";
default:
return Value.ToString();
}
RD_KAFKA_OFFSET_BEGINNING => $"Beginning [{RD_KAFKA_OFFSET_BEGINNING}]",
RD_KAFKA_OFFSET_END => $"End [{RD_KAFKA_OFFSET_END}]",
RD_KAFKA_OFFSET_STORED => $"Stored [{RD_KAFKA_OFFSET_STORED}]",
RD_KAFKA_OFFSET_INVALID => $"Unset [{RD_KAFKA_OFFSET_INVALID}]",
_ => Value.ToString(),
};
}
}
}
24 changes: 11 additions & 13 deletions src/Confluent.Kafka/Partition.cs
Expand Up @@ -22,20 +22,20 @@ namespace Confluent.Kafka
{
/// <summary>
/// Represents a Kafka partition.
/// </summary>
/// </summary>
/// <remarks>
/// This structure is the same size as an int -
/// its purpose is to add some syntactical sugar
/// This structure is the same size as an int -
/// its purpose is to add some syntactical sugar
/// related to special values.
/// </remarks>
public struct Partition : IEquatable<Partition>
public readonly struct Partition : IEquatable<Partition>
{
private const int RD_KAFKA_PARTITION_UA = -1;

/// <summary>
/// A special value that refers to an unspecified / unknown partition.
/// </summary>
public static readonly Partition Any = new Partition(RD_KAFKA_PARTITION_UA);
public static readonly Partition Any = new(RD_KAFKA_PARTITION_UA);

/// <summary>
/// Initializes a new instance of the Partition structure.
Expand All @@ -54,7 +54,7 @@ public Partition(int partition)
public int Value { get; }

/// <summary>
/// Gets whether or not this is one of the special
/// Gets whether or not this is one of the special
/// partition values.
/// </summary>
public bool IsSpecial
Expand Down Expand Up @@ -197,7 +197,7 @@ public override int GetHashCode()
/// The int value to convert.
/// </param>
public static implicit operator Partition(int v)
=> new Partition(v);
=> new(v);

/// <summary>
/// Converts the specified Partition value to an int value.
Expand All @@ -216,13 +216,11 @@ public override int GetHashCode()
/// </returns>
public override string ToString()
{
switch (Value)
return Value switch
{
case RD_KAFKA_PARTITION_UA:
return $"[Any]";
default:
return $"[{Value}]";
}
RD_KAFKA_PARTITION_UA => $"[Any]",
_ => $"[{Value}]",
};
}
}
}
14 changes: 7 additions & 7 deletions src/Confluent.Kafka/SerializationContext.cs
Expand Up @@ -20,13 +20,13 @@ namespace Confluent.Kafka
/// <summary>
/// Context relevant to a serialization or deserialization operation.
/// </summary>
public struct SerializationContext
public readonly struct SerializationContext
{
/// <summary>
/// The default SerializationContext value (representing no context defined).
/// </summary>
public static SerializationContext Empty
=> default(SerializationContext);
=> default;

/// <summary>
/// Create a new SerializationContext object instance.
Expand All @@ -41,7 +41,7 @@ public static SerializationContext Empty
/// The collection of message headers (or null). Specifying null or an
/// empty list are equivalent. The order of headers is maintained, and
/// duplicate header keys are allowed.
/// </param>
/// </param>
public SerializationContext(MessageComponentType component, string topic, Headers headers = null)
{
Component = component;
Expand All @@ -52,18 +52,18 @@ public SerializationContext(MessageComponentType component, string topic, Header
/// <summary>
/// The topic the data is being written to or read from.
/// </summary>
public string Topic { get; private set; }
public string Topic { get; }

/// <summary>
/// The component of the message the serialization operation relates to.
/// </summary>
public MessageComponentType Component { get; private set; }
public MessageComponentType Component { get; }

/// <summary>
/// The collection of message headers (or null). Specifying null or an
/// empty list are equivalent. The order of headers is maintained, and
/// duplicate header keys are allowed.
/// </summary>
public Headers Headers { get; private set; }
public Headers Headers { get; }
}
}
27 changes: 13 additions & 14 deletions src/Confluent.Kafka/Timestamp.cs
Expand Up @@ -24,7 +24,7 @@ namespace Confluent.Kafka
/// <summary>
/// Encapsulates a Kafka timestamp and its type.
/// </summary>
public struct Timestamp : IEquatable<Timestamp>
public readonly struct Timestamp : IEquatable<Timestamp>
{
private const long RD_KAFKA_NO_TIMESTAMP = 0;

Expand All @@ -37,17 +37,16 @@ public static Timestamp Default
}

/// <summary>
/// Unix epoch as a UTC DateTime. Unix time is defined as
/// the number of seconds past this UTC time, excluding
/// Unix epoch as a UTC DateTime. Unix time is defined as
/// the number of seconds past this UTC time, excluding
/// leap seconds.
/// </summary>
public static readonly DateTime UnixTimeEpoch
= new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
public static readonly DateTime UnixTimeEpoch
= new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);

private const long UnixTimeEpochMilliseconds
private const long UnixTimeEpochMilliseconds
= 62135596800000; // = UnixTimeEpoch.TotalMiliseconds


/// <summary>
/// Initializes a new instance of the Timestamp structure.
/// </summary>
Expand All @@ -65,7 +64,7 @@ public Timestamp(long unixTimestampMs, TimestampType type)

/// <summary>
/// Initializes a new instance of the Timestamp structure.
/// Note: <paramref name="dateTime"/> is first converted to UTC
/// Note: <paramref name="dateTime"/> is first converted to UTC
/// if it is not already.
/// </summary>
/// <param name="dateTime">
Expand All @@ -90,7 +89,7 @@ public Timestamp(DateTime dateTime, TimestampType type)
/// The DateTime value corresponding to the timestamp.
/// </param>
public Timestamp(DateTime dateTime)
: this(dateTime, TimestampType.CreateTime)
: this(dateTime, TimestampType.CreateTime)
{}

/// <summary>
Expand All @@ -101,7 +100,7 @@ public Timestamp(DateTime dateTime)
/// The DateTimeOffset value corresponding to the timestamp.
/// </param>
public Timestamp(DateTimeOffset dateTimeOffset)
: this(dateTimeOffset.UtcDateTime, TimestampType.CreateTime)
: this(dateTimeOffset.UtcDateTime, TimestampType.CreateTime)
{}

/// <summary>
Expand All @@ -124,12 +123,12 @@ public DateTime UtcDateTime
/// Determines whether two Timestamps have the same value.
/// </summary>
/// <param name="obj">
/// Determines whether this instance and a specified object,
/// Determines whether this instance and a specified object,
/// which must also be a Timestamp object, have the same value.
/// </param>
/// <returns>
/// true if obj is a Timestamp and its value is the same as
/// this instance; otherwise, false. If obj is null, the method
/// true if obj is a Timestamp and its value is the same as
/// this instance; otherwise, false. If obj is null, the method
/// returns false.
/// </returns>
public override bool Equals(object obj)
Expand Down Expand Up @@ -195,7 +194,7 @@ public override int GetHashCode()

/// <summary>
/// Convert a DateTime instance to a milliseconds unix timestamp.
/// Note: <paramref name="dateTime"/> is first converted to UTC
/// Note: <paramref name="dateTime"/> is first converted to UTC
/// if it is not already.
/// </summary>
/// <param name="dateTime">
Expand Down