Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Deprecate the extension methods #18

Merged
merged 1 commit into from
Jun 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 4 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ var example = "Hello, 🌏 world. 你好,世界.";
// The tokenizer can split words, graphemes or sentences.
// It operates on strings, UTF-8 bytes, and streams.

var words = example.GetWords();
var words = Tokenizer.GetWords(example);

// Iterate over the tokens
foreach (var word in words)
Expand All @@ -46,9 +46,8 @@ world
.
*/


var utf8bytes = Encoding.UTF8.GetBytes(example);
var graphemes = utf8bytes.GetGraphemes();
var graphemes = Tokenizer.GetGraphemes(utf8bytes);

// Iterate over the tokens
foreach (var grapheme in graphemes)
Expand Down Expand Up @@ -109,9 +108,9 @@ If you are using v1.x of this package, v2 has been renamed:

We now use extension methods:

`Tokenizer.Create(input)` → `input.GetWords()`
`Tokenizer.Create(input)` → `Tokenizer.GetWords(input)`

`Tokenizer.Create(input, TokenType.Graphemes)` → `input.GetGraphemes()`
`Tokenizer.Create(input, TokenType.Graphemes)` → `Tokenizer.GetGraphemes(input)`

### Performance

Expand Down
4 changes: 2 additions & 2 deletions uax29/Examples.Test.cs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ public void Readme()
// The tokenizer can split words, graphemes or sentences.
// It operates on strings, UTF-8 bytes, and streams.

var words = example.GetWords();
var words = Tokenizer.GetWords(example);

// Iterate over the tokens
foreach (var word in words)
Expand Down Expand Up @@ -49,7 +49,7 @@ public void Readme()
*/

var utf8bytes = Encoding.UTF8.GetBytes(example);
var graphemes = utf8bytes.GetGraphemes();
var graphemes = Tokenizer.GetGraphemes(utf8bytes);

// Iterate over the tokens
foreach (var grapheme in graphemes)
Expand Down
9 changes: 4 additions & 5 deletions uax29/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ var example = "Hello, 🌏 world. 你好,世界.";
// The tokenizer can split words, graphemes or sentences.
// It operates on strings, UTF-8 bytes, and streams.

var words = example.GetWords();
var words = Tokenizer.GetWords(example);

// Iterate over the tokens
foreach (var word in words)
Expand All @@ -46,9 +46,8 @@ world
.
*/


var utf8bytes = Encoding.UTF8.GetBytes(example);
var graphemes = utf8bytes.GetGraphemes();
var graphemes = Tokenizer.GetGraphemes(utf8bytes);

// Iterate over the tokens
foreach (var grapheme in graphemes)
Expand Down Expand Up @@ -109,9 +108,9 @@ If you are using v1.x of this package, v2 has been renamed:

We now use extension methods:

`Tokenizer.Create(input)` → `input.GetWords()`
`Tokenizer.Create(input)` → `Tokenizer.GetWords(input)`

`Tokenizer.Create(input, TokenType.Graphemes)` → `input.GetGraphemes()`
`Tokenizer.Create(input, TokenType.Graphemes)` → `Tokenizer.GetGraphemes(input)`

### Performance

Expand Down
4 changes: 2 additions & 2 deletions uax29/StreamTokenizer.Test.cs
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@ public void Stream()
foreach (var input in examples)
{
var bytes = Encoding.UTF8.GetBytes(input);
var staticTokens = bytes.GetWords();
var staticTokens = Tokenizer.GetWords(bytes);

using var stream = new MemoryStream(bytes);
var streamTokens = stream.GetWords();
var streamTokens = Tokenizer.GetWords(stream);

foreach (var streamToken in streamTokens)
{
Expand Down
26 changes: 13 additions & 13 deletions uax29/Tokenizer.Graphemes.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of graphemes. Use foreach (var grapheme in graphemes).
/// </returns>
public static Tokenizer<byte> GetGraphemes(this Span<byte> input) => new(input, Graphemes.SplitUtf8Bytes);
public static Tokenizer<byte> GetGraphemes(Span<byte> input) => new(input, Graphemes.SplitUtf8Bytes);

/// <summary>
/// Split the graphemes in the given <see cref="ReadOnlySpan"/> of UTF-8 encoded bytes, according to the Unicode UAX #29 spec. https://unicode.org/reports/tr29/
Expand All @@ -20,7 +20,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of graphemes. Use foreach (var grapheme in graphemes).
/// </returns>
public static Tokenizer<byte> GetGraphemes(this ReadOnlySpan<byte> input) => new(input, Graphemes.SplitUtf8Bytes);
public static Tokenizer<byte> GetGraphemes(ReadOnlySpan<byte> input) => new(input, Graphemes.SplitUtf8Bytes);

/// <summary>
/// Split the graphemes in the given <see cref="Memory"/> of UTF-8 encoded bytes.
Expand All @@ -29,7 +29,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of graphemes. Use foreach (var grapheme in graphemes).
/// </returns>
public static Tokenizer<byte> GetGraphemes(this Memory<byte> input) => new(input.Span, Graphemes.SplitUtf8Bytes);
public static Tokenizer<byte> GetGraphemes(Memory<byte> input) => new(input.Span, Graphemes.SplitUtf8Bytes);

/// <summary>
/// Split the graphemes in the given <see cref="ReadOnlyMemory"/> of UTF-8 encoded bytes.
Expand All @@ -38,7 +38,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of graphemes. Use foreach (var grapheme in graphemes).
/// </returns>
public static Tokenizer<byte> GetGraphemes(this ReadOnlyMemory<byte> input) => new(input.Span, Graphemes.SplitUtf8Bytes);
public static Tokenizer<byte> GetGraphemes(ReadOnlyMemory<byte> input) => new(input.Span, Graphemes.SplitUtf8Bytes);

/// <summary>
/// Split the graphemes in the given array of UTF-8 encoded bytes.
Expand All @@ -47,7 +47,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of graphemes. Use foreach (var grapheme in graphemes).
/// </returns>
public static Tokenizer<byte> GetGraphemes(this byte[] input) => new(input.AsSpan(), Graphemes.SplitUtf8Bytes);
public static Tokenizer<byte> GetGraphemes(byte[] input) => new(input.AsSpan(), Graphemes.SplitUtf8Bytes);

/// <summary>
/// Split the graphemes in the given string.
Expand All @@ -56,7 +56,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of graphemes. Use foreach (var grapheme in graphemes).
/// </returns>
public static Tokenizer<char> GetGraphemes(this string input) => new(input.AsSpan(), Graphemes.SplitChars);
public static Tokenizer<char> GetGraphemes(string input) => new(input.AsSpan(), Graphemes.SplitChars);

/// <summary>
/// Split the graphemes in the given string.
Expand All @@ -65,7 +65,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of graphemes. Use foreach (var grapheme in graphemes).
/// </returns>
public static Tokenizer<char> GetGraphemes(this char[] input) => new(input.AsSpan(), Graphemes.SplitChars);
public static Tokenizer<char> GetGraphemes(char[] input) => new(input.AsSpan(), Graphemes.SplitChars);

/// <summary>
/// Split the graphemes in the given <see cref="Span"/> of <see cref="char"/>.
Expand All @@ -75,7 +75,7 @@ public static partial class Tokenizer
/// An enumerator of graphemes. Use foreach (var grapheme in graphemes).
/// </returns>
///
public static Tokenizer<char> GetGraphemes(this Span<char> input) => new(input, Graphemes.SplitChars);
public static Tokenizer<char> GetGraphemes(Span<char> input) => new(input, Graphemes.SplitChars);

/// <summary>
/// Split the graphemes in the given <see cref="ReadOnlySpan"/> of <see cref="char"/>.
Expand All @@ -84,7 +84,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of graphemes. Use foreach (var grapheme in graphemes).
/// </returns>
public static Tokenizer<char> GetGraphemes(this ReadOnlySpan<char> input) => new(input, Graphemes.SplitChars);
public static Tokenizer<char> GetGraphemes(ReadOnlySpan<char> input) => new(input, Graphemes.SplitChars);

/// <summary>
/// Split the graphemes in the given <see cref="Memory"/> of <see cref="char"/>.
Expand All @@ -93,7 +93,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of graphemes. Use foreach (var grapheme in graphemes).
/// </returns>
public static Tokenizer<char> GetGraphemes(this Memory<char> input) => new(input.Span, Graphemes.SplitChars);
public static Tokenizer<char> GetGraphemes(Memory<char> input) => new(input.Span, Graphemes.SplitChars);

/// <summary>
/// Split the graphemes in the given <see cref="ReadOnlyMemory"/> of <see cref="char"/>.
Expand All @@ -102,7 +102,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of graphemes. Use foreach (var grapheme in graphemes).
/// </returns>
public static Tokenizer<char> GetGraphemes(this ReadOnlyMemory<char> input) => new(input.Span, Graphemes.SplitChars);
public static Tokenizer<char> GetGraphemes(ReadOnlyMemory<char> input) => new(input.Span, Graphemes.SplitChars);

/// <summary>
/// Split the graphemes in the given <see cref="Stream"/> of UTF-8 encoded bytes.
Expand All @@ -127,7 +127,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of graphemes. Use foreach (var grapheme in graphemes).
/// </returns>
public static StreamTokenizer<byte> GetGraphemes(this Stream stream, int minBufferBytes = 1024, byte[]? bufferStorage = null)
public static StreamTokenizer<byte> GetGraphemes(Stream stream, int minBufferBytes = 1024, byte[]? bufferStorage = null)
{
bufferStorage ??= new byte[minBufferBytes * 2];
var buffer = new Buffer<byte>(stream.Read, minBufferBytes, bufferStorage);
Expand Down Expand Up @@ -157,7 +157,7 @@ public static StreamTokenizer<byte> GetGraphemes(this Stream stream, int minBuff
/// <returns>
/// An enumerator of graphemes. Use foreach (var grapheme in graphemes).
/// </returns>
public static StreamTokenizer<char> GetGraphemes(this TextReader stream, int minBufferChars = 1024, char[]? bufferStorage = null)
public static StreamTokenizer<char> GetGraphemes(TextReader stream, int minBufferChars = 1024, char[]? bufferStorage = null)
{
bufferStorage ??= new char[minBufferChars * 2];
var buffer = new Buffer<char>(stream.Read, minBufferChars, bufferStorage);
Expand Down
26 changes: 13 additions & 13 deletions uax29/Tokenizer.Sentences.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of sentences. Use foreach (var sentence in sentences).
/// </returns>
public static Tokenizer<byte> GetSentences(this Span<byte> input) => new(input, Sentences.SplitUtf8Bytes);
public static Tokenizer<byte> GetSentences(Span<byte> input) => new(input, Sentences.SplitUtf8Bytes);

/// <summary>
/// Split the sentences in the given <see cref="ReadOnlySpan"/> of UTF-8 encoded bytes, according to the Unicode UAX #29 spec. https://unicode.org/reports/tr29/
Expand All @@ -20,7 +20,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of sentences. Use foreach (var sentence in sentences).
/// </returns>
public static Tokenizer<byte> GetSentences(this ReadOnlySpan<byte> input) => new(input, Sentences.SplitUtf8Bytes);
public static Tokenizer<byte> GetSentences(ReadOnlySpan<byte> input) => new(input, Sentences.SplitUtf8Bytes);

/// <summary>
/// Split the sentences in the given <see cref="Memory"/> of UTF-8 encoded bytes.
Expand All @@ -29,7 +29,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of sentences. Use foreach (var sentence in sentences).
/// </returns>
public static Tokenizer<byte> GetSentences(this Memory<byte> input) => new(input.Span, Sentences.SplitUtf8Bytes);
public static Tokenizer<byte> GetSentences(Memory<byte> input) => new(input.Span, Sentences.SplitUtf8Bytes);

/// <summary>
/// Split the sentences in the given <see cref="ReadOnlyMemory"/> of UTF-8 encoded bytes.
Expand All @@ -38,7 +38,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of sentences. Use foreach (var sentence in sentences).
/// </returns>
public static Tokenizer<byte> GetSentences(this ReadOnlyMemory<byte> input) => new(input.Span, Sentences.SplitUtf8Bytes);
public static Tokenizer<byte> GetSentences(ReadOnlyMemory<byte> input) => new(input.Span, Sentences.SplitUtf8Bytes);

/// <summary>
/// Split the sentences in the given array of UTF-8 encoded bytes.
Expand All @@ -47,7 +47,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of sentences. Use foreach (var sentence in sentences).
/// </returns>
public static Tokenizer<byte> GetSentences(this byte[] input) => new(input.AsSpan(), Sentences.SplitUtf8Bytes);
public static Tokenizer<byte> GetSentences(byte[] input) => new(input.AsSpan(), Sentences.SplitUtf8Bytes);

/// <summary>
/// Split the sentences in the given string.
Expand All @@ -56,7 +56,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of sentences. Use foreach (var sentence in sentences).
/// </returns>
public static Tokenizer<char> GetSentences(this string input) => new(input.AsSpan(), Sentences.SplitChars);
public static Tokenizer<char> GetSentences(string input) => new(input.AsSpan(), Sentences.SplitChars);

/// <summary>
/// Split the sentences in the given string.
Expand All @@ -65,7 +65,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of sentences. Use foreach (var sentence in sentences).
/// </returns>
public static Tokenizer<char> GetSentences(this char[] input) => new(input.AsSpan(), Sentences.SplitChars);
public static Tokenizer<char> GetSentences(char[] input) => new(input.AsSpan(), Sentences.SplitChars);

/// <summary>
/// Split the sentences in the given <see cref="Span"/> of <see cref="char"/>.
Expand All @@ -75,7 +75,7 @@ public static partial class Tokenizer
/// An enumerator of sentences. Use foreach (var sentence in sentences).
/// </returns>
///
public static Tokenizer<char> GetSentences(this Span<char> input) => new(input, Sentences.SplitChars);
public static Tokenizer<char> GetSentences(Span<char> input) => new(input, Sentences.SplitChars);

/// <summary>
/// Split the sentences in the given <see cref="ReadOnlySpan"/> of <see cref="char"/>.
Expand All @@ -84,7 +84,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of sentences. Use foreach (var sentence in sentences).
/// </returns>
public static Tokenizer<char> GetSentences(this ReadOnlySpan<char> input) => new(input, Sentences.SplitChars);
public static Tokenizer<char> GetSentences(ReadOnlySpan<char> input) => new(input, Sentences.SplitChars);

/// <summary>
/// Split the sentences in the given <see cref="Memory"/> of <see cref="char"/>.
Expand All @@ -93,7 +93,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of sentences. Use foreach (var sentence in sentences).
/// </returns>
public static Tokenizer<char> GetSentences(this Memory<char> input) => new(input.Span, Sentences.SplitChars);
public static Tokenizer<char> GetSentences(Memory<char> input) => new(input.Span, Sentences.SplitChars);

/// <summary>
/// Split the sentences in the given <see cref="ReadOnlyMemory"/> of <see cref="char"/>.
Expand All @@ -102,7 +102,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of sentences. Use foreach (var sentence in sentences).
/// </returns>
public static Tokenizer<char> GetSentences(this ReadOnlyMemory<char> input) => new(input.Span, Sentences.SplitChars);
public static Tokenizer<char> GetSentences(ReadOnlyMemory<char> input) => new(input.Span, Sentences.SplitChars);



Expand All @@ -129,7 +129,7 @@ public static partial class Tokenizer
/// <returns>
/// An enumerator of sentences. Use foreach (var sentence in sentences).
/// </returns>
public static StreamTokenizer<byte> GetSentences(this Stream stream, int minBufferBytes = 1024, byte[]? bufferStorage = null)
public static StreamTokenizer<byte> GetSentences(Stream stream, int minBufferBytes = 1024, byte[]? bufferStorage = null)
{
bufferStorage ??= new byte[minBufferBytes * 2];
var buffer = new Buffer<byte>(stream.Read, minBufferBytes, bufferStorage);
Expand Down Expand Up @@ -159,7 +159,7 @@ public static StreamTokenizer<byte> GetSentences(this Stream stream, int minBuff
/// <returns>
/// An enumerator of sentences. Use foreach (var sentence in sentences).
/// </returns>
public static StreamTokenizer<char> GetSentences(this TextReader stream, int minBufferChars = 1024, char[]? bufferStorage = null)
public static StreamTokenizer<char> GetSentences(TextReader stream, int minBufferChars = 1024, char[]? bufferStorage = null)
{
bufferStorage ??= new char[minBufferChars * 2];
var buffer = new Buffer<char>(stream.Read, minBufferChars, bufferStorage);
Expand Down
Loading