Skip to content

Commit f33f1e5

Browse files
committed
Re-organized the package
Renamed module name Exposed minimum necessary symbols in package level
1 parent eb42b08 commit f33f1e5

File tree

6 files changed

+11
-10
lines changed

6 files changed

+11
-10
lines changed

sqlitefts/__init__.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
from .tokenizer import Tokenizer, make_tokenizer_module, register_tokenizer
2+
from . import tokenizer
3+
4+
__all__ = ["Tokenizer", "make_tokenizer_module", "register_tokenizer", "tokenizer"]
File renamed without changes.

tests/test_base.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import struct
66
import re
77

8-
import sqlitefts.sqlite_tokenizer as fts
8+
import sqlitefts as fts
99

1010

1111
class SimpleTokenizer(fts.Tokenizer):
@@ -20,7 +20,7 @@ def tokenize(self, text):
2020
def test_make_tokenizer():
2121
c = sqlite3.connect(':memory:')
2222
tokenizer_module = fts.make_tokenizer_module(SimpleTokenizer())
23-
assert fts.sqlite3_tokenizer_module == type(tokenizer_module)
23+
assert fts.tokenizer.sqlite3_tokenizer_module == type(tokenizer_module)
2424
c.close()
2525

2626

tests/test_base2.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import sqlite3
66
import re
77

8-
import sqlitefts.sqlite_tokenizer as fts
8+
import sqlitefts as fts
99

1010

1111
class BaseTokenizer(fts.Tokenizer):

tests/test_igo.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import ctypes
55
import struct
66

7-
import sqlitefts.sqlite_tokenizer as fts
7+
import sqlitefts as fts
88

99
import pytest
1010
igo = pytest.importorskip('igo')
@@ -25,7 +25,7 @@ def tokenize(self, text):
2525
def test_make_tokenizer():
2626
c = sqlite3.connect(':memory:')
2727
tokenizer_module = fts.make_tokenizer_module(t)
28-
assert fts.sqlite3_tokenizer_module == type(tokenizer_module)
28+
assert fts.tokenizer.sqlite3_tokenizer_module == type(tokenizer_module)
2929
c.close()
3030

3131

tests/test_tinysegmenter.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,10 @@
11
# coding: utf-8
22
from __future__ import print_function, unicode_literals
3-
import sys
4-
import os
53
import sqlite3
64
import ctypes
75
import struct
86

9-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
10-
import sqlitefts.sqlite_tokenizer as fts
7+
import sqlitefts as fts
118

129
import pytest
1310
ts = pytest.importorskip('tinysegmenter')
@@ -31,7 +28,7 @@ def tokenize(self, text):
3128
def test_make_tokenizer():
3229
c = sqlite3.connect(':memory:')
3330
tokenizer_module = fts.make_tokenizer_module(t)
34-
assert fts.sqlite3_tokenizer_module == type(tokenizer_module)
31+
assert fts.tokenizer.sqlite3_tokenizer_module == type(tokenizer_module)
3532
c.close()
3633

3734

0 commit comments

Comments
 (0)