File tree Expand file tree Collapse file tree 3 files changed +66
-0
lines changed
Expand file tree Collapse file tree 3 files changed +66
-0
lines changed Original file line number Diff line number Diff line change 2424
2525FormatType = types .FormatType # Backward compat
2626
27+ __all__ = [
28+ "AlignmentStatus" ,
29+ "CharInterval" ,
30+ "Extraction" ,
31+ "Document" ,
32+ "AnnotatedDocument" ,
33+ "ExampleData" ,
34+ "FormatType" ,
35+ ]
36+
2737
2838class AlignmentStatus (enum .Enum ):
2939 MATCH_EXACT = "match_exact"
Original file line number Diff line number Diff line change 3131from langextract .core import debug_utils
3232from langextract .core import exceptions
3333
34+ __all__ = [
35+ "BaseTokenizerError" ,
36+ "InvalidTokenIntervalError" ,
37+ "SentenceRangeError" ,
38+ "CharInterval" ,
39+ "TokenInterval" ,
40+ "TokenType" ,
41+ "Token" ,
42+ "TokenizedText" ,
43+ "tokenize" ,
44+ "tokens_text" ,
45+ "find_sentence_range" ,
46+ ]
47+
3448
3549class BaseTokenizerError (exceptions .LangExtractError ):
3650 """Base class for all tokenizer-related errors."""
Original file line number Diff line number Diff line change @@ -191,6 +191,48 @@ def test_extract_custom_params_reach_inference(
191191 _ , kwargs = mock_model .infer .call_args
192192 self .assertEqual (kwargs .get ("max_workers" ), 5 )
193193
194+ def test_data_module_exports_via_compatibility_shim (self ):
195+ """Verify data module exports are accessible via lx.data."""
196+ expected_exports = [
197+ "AlignmentStatus" ,
198+ "CharInterval" ,
199+ "Extraction" ,
200+ "Document" ,
201+ "AnnotatedDocument" ,
202+ "ExampleData" ,
203+ "FormatType" ,
204+ ]
205+
206+ for name in expected_exports :
207+ with self .subTest (export = name ):
208+ self .assertTrue (
209+ hasattr (lx .data , name ),
210+ f"lx.data.{ name } not accessible via compatibility shim" ,
211+ )
212+
213+ def test_tokenizer_module_exports_via_compatibility_shim (self ):
214+ """Verify tokenizer module exports are accessible via lx.tokenizer."""
215+ expected_exports = [
216+ "BaseTokenizerError" ,
217+ "InvalidTokenIntervalError" ,
218+ "SentenceRangeError" ,
219+ "CharInterval" ,
220+ "TokenInterval" ,
221+ "TokenType" ,
222+ "Token" ,
223+ "TokenizedText" ,
224+ "tokenize" ,
225+ "tokens_text" ,
226+ "find_sentence_range" ,
227+ ]
228+
229+ for name in expected_exports :
230+ with self .subTest (export = name ):
231+ self .assertTrue (
232+ hasattr (lx .tokenizer , name ),
233+ f"lx.tokenizer.{ name } not accessible via compatibility shim" ,
234+ )
235+
194236
195237if __name__ == "__main__" :
196238 absltest .main ()
You can’t perform that action at this time.
0 commit comments