|
| 1 | +# coding: utf-8 |
| 2 | +""" |
| 3 | +PoC SQLite FTS5 tokenizer in Python |
| 4 | +""" |
| 5 | +from __future__ import print_function, unicode_literals |
| 6 | +import struct |
| 7 | + |
| 8 | +from .tokenizer import ffi, SQLITE_OK |
| 9 | + |
| 10 | +FTS5_TOKENIZE_QUERY = 0x0001 |
| 11 | +FTS5_TOKENIZE_PREFIX = 0x0002 |
| 12 | +FTS5_TOKENIZE_DOCUMENT = 0x0004 |
| 13 | +FTS5_TOKENIZE_AUX = 0x0008 |
| 14 | +FTS5_TOKEN_COLOCATED = 0x0001 |
| 15 | + |
| 16 | +ffi.cdef(''' |
| 17 | +typedef struct fts5_api fts5_api; |
| 18 | +typedef struct fts5_tokenizer fts5_tokenizer; |
| 19 | +typedef struct Fts5Tokenizer Fts5Tokenizer; |
| 20 | +
|
| 21 | +struct fts5_api { |
| 22 | + int iVersion; |
| 23 | + int (*xCreateTokenizer)( |
| 24 | + fts5_api *pApi, const char *zName, void *pContext, |
| 25 | + fts5_tokenizer *pTokenizer,void (*xDestroy)(void*)); |
| 26 | +}; |
| 27 | +
|
| 28 | +struct fts5_tokenizer { |
| 29 | + int (*xCreate)(void*, const char **azArg, int nArg, Fts5Tokenizer **ppOut); |
| 30 | + void (*xDelete)(Fts5Tokenizer*); |
| 31 | + int (*xTokenize)( |
| 32 | + Fts5Tokenizer*, void *pCtx, int flags, const char *pText, int nText, |
| 33 | + int (*xToken)( |
| 34 | + void *pCtx, int tflags,const char *pToken, |
| 35 | + int nToken, int iStart, int iEnd)); |
| 36 | +}; |
| 37 | +''') |
| 38 | + |
| 39 | +fts5_tokenizers = {} |
| 40 | +"""hold references to prevent GC""" |
| 41 | + |
| 42 | + |
| 43 | +def fts5_api_from_db(c): |
| 44 | + cur = c.cursor() |
| 45 | + try: |
| 46 | + cur.execute('SELECT fts5()') |
| 47 | + blob = cur.fetchone()[0] |
| 48 | + pRet = ffi.cast('fts5_api*', struct.unpack("P", blob)[0]) |
| 49 | + finally: |
| 50 | + cur.close() |
| 51 | + return pRet |
| 52 | + |
| 53 | + |
| 54 | +def register_tokenizer(c, name, tokenizer, context=None, on_destroy=None): |
| 55 | + """ need to keep reference of context and on_destroy """ |
| 56 | + fts5api = fts5_api_from_db(c) |
| 57 | + pContext = ffi.new_handle(context) |
| 58 | + if on_destroy is None: |
| 59 | + xDestroy = ffi.NULL |
| 60 | + else: |
| 61 | + |
| 62 | + @ffi.callback('void(void*)') |
| 63 | + def xDestroy(context): |
| 64 | + on_destroy(ffi.from_handle(context)) |
| 65 | + |
| 66 | + fts5_tokenizers[name] = (tokenizer, pContext, xDestroy) |
| 67 | + r = fts5api.xCreateTokenizer(fts5api, name.encode('utf-8'), pContext, |
| 68 | + tokenizer, xDestroy) |
| 69 | + return r == SQLITE_OK |
| 70 | + |
| 71 | + |
| 72 | +def make_fts5_tokenizer(tokenizer): |
| 73 | + """ make tokenizer module """ |
| 74 | + t = ffi.new_handle(tokenizer) |
| 75 | + tokenizers = {} |
| 76 | + |
| 77 | + @ffi.callback('int(void*, const char **, int, Fts5Tokenizer **)') |
| 78 | + def xcreate(ctx, argv, argc, ppOut): |
| 79 | + # is keeping ctor instead of keeping tokenizer instance good idea? |
| 80 | + # i.e. t = ctor(context, argv) |
| 81 | + tkn = ffi.cast('Fts5Tokenizer *', t) |
| 82 | + tokenizers[int(ffi.cast('intptr_t', tkn))] = tkn |
| 83 | + ppOut[0] = tkn |
| 84 | + return SQLITE_OK |
| 85 | + |
| 86 | + @ffi.callback('void(Fts5Tokenizer *)') |
| 87 | + def xdelete(pTokenizer): |
| 88 | + del tokenizers[int(ffi.cast('intptr_t', pTokenizer))] |
| 89 | + return None |
| 90 | + |
| 91 | + @ffi.callback('int(Fts5Tokenizer *, void *, int, const char *, int, ' |
| 92 | + 'int(void*, int, const char *, int, int, int))') |
| 93 | + def xtokenize(pTokenizer, pCtx, flags, pText, nText, xToken): |
| 94 | + tokenizer = ffi.from_handle(ffi.cast('void *', pTokenizer)) |
| 95 | + text = ffi.string(pText[0:nText]).decode('utf-8') |
| 96 | + for normalized, inputBegin, inputEnd in tokenizer.tokenize(text): |
| 97 | + normalized = normalized.encode('utf-8') |
| 98 | + if not normalized: |
| 99 | + continue |
| 100 | + |
| 101 | + # TODO: Synonym Support |
| 102 | + r = xToken(pCtx, 0, ffi.new('char[]', normalized), len(normalized), |
| 103 | + inputBegin, inputEnd) |
| 104 | + if r != SQLITE_OK: |
| 105 | + return r |
| 106 | + return SQLITE_OK |
| 107 | + |
| 108 | + fts5_tokenizer = ffi.new("fts5_tokenizer *", [xcreate, xdelete, xtokenize]) |
| 109 | + fts5_tokenizers[tokenizer] = (fts5_tokenizer, xcreate, xdelete, xtokenize) |
| 110 | + return fts5_tokenizer |
| 111 | + |
| 112 | + |
| 113 | +__all__ = ["register_tokenizer", "make_fts5_tokenizer"] |
0 commit comments