sl@0
|
1 |
# 2007 June 21
|
sl@0
|
2 |
#
|
sl@0
|
3 |
# The author disclaims copyright to this source code. In place of
|
sl@0
|
4 |
# a legal notice, here is a blessing:
|
sl@0
|
5 |
#
|
sl@0
|
6 |
# May you do good and not evil.
|
sl@0
|
7 |
# May you find forgiveness for yourself and forgive others.
|
sl@0
|
8 |
# May you share freely, never taking more than you give.
|
sl@0
|
9 |
#
|
sl@0
|
10 |
#*************************************************************************
|
sl@0
|
11 |
# This file implements regression tests for SQLite library. The focus
|
sl@0
|
12 |
# of this script is testing the pluggable tokeniser feature of the
|
sl@0
|
13 |
# FTS2 module.
|
sl@0
|
14 |
#
|
sl@0
|
15 |
# $Id: fts2token.test,v 1.3 2007/06/25 12:05:40 danielk1977 Exp $
|
sl@0
|
16 |
#
|
sl@0
|
17 |
|
sl@0
|
18 |
set testdir [file dirname $argv0]
|
sl@0
|
19 |
source $testdir/tester.tcl
|
sl@0
|
20 |
|
sl@0
|
21 |
# If SQLITE_ENABLE_FTS2 is defined, omit this file.
|
sl@0
|
22 |
ifcapable !fts2 {
|
sl@0
|
23 |
finish_test
|
sl@0
|
24 |
return
|
sl@0
|
25 |
}
|
sl@0
|
26 |
|
sl@0
|
27 |
proc escape_string {str} {
|
sl@0
|
28 |
set out ""
|
sl@0
|
29 |
foreach char [split $str ""] {
|
sl@0
|
30 |
scan $char %c i
|
sl@0
|
31 |
if {$i<=127} {
|
sl@0
|
32 |
append out $char
|
sl@0
|
33 |
} else {
|
sl@0
|
34 |
append out [format {\x%.4x} $i]
|
sl@0
|
35 |
}
|
sl@0
|
36 |
}
|
sl@0
|
37 |
set out
|
sl@0
|
38 |
}
|
sl@0
|
39 |
|
sl@0
|
40 |
#--------------------------------------------------------------------------
|
sl@0
|
41 |
# Test cases fts2token-1.* are the warm-body test for the SQL scalar
|
sl@0
|
42 |
# function fts2_tokenizer(). The procedure is as follows:
|
sl@0
|
43 |
#
|
sl@0
|
44 |
# 1: Verify that there is no such fts2 tokenizer as 'blah'.
|
sl@0
|
45 |
#
|
sl@0
|
46 |
# 2: Query for the built-in tokenizer 'simple'. Insert a copy of the
|
sl@0
|
47 |
# retrieved value as tokenizer 'blah'.
|
sl@0
|
48 |
#
|
sl@0
|
49 |
# 3: Test that the value returned for tokenizer 'blah' is now the
|
sl@0
|
50 |
# same as that retrieved for 'simple'.
|
sl@0
|
51 |
#
|
sl@0
|
52 |
# 4: Test that it is now possible to create an fts2 table using
|
sl@0
|
53 |
# tokenizer 'blah' (it was not possible in step 1).
|
sl@0
|
54 |
#
|
sl@0
|
55 |
# 5: Test that the table created to use tokenizer 'blah' is usable.
|
sl@0
|
56 |
#
|
sl@0
|
57 |
do_test fts2token-1.1 {
|
sl@0
|
58 |
catchsql {
|
sl@0
|
59 |
CREATE VIRTUAL TABLE t1 USING fts2(content, tokenize blah);
|
sl@0
|
60 |
}
|
sl@0
|
61 |
} {1 {unknown tokenizer: blah}}
|
sl@0
|
62 |
do_test fts2token-1.2 {
|
sl@0
|
63 |
execsql {
|
sl@0
|
64 |
SELECT fts2_tokenizer('blah', fts2_tokenizer('simple')) IS NULL;
|
sl@0
|
65 |
}
|
sl@0
|
66 |
} {0}
|
sl@0
|
67 |
do_test fts2token-1.3 {
|
sl@0
|
68 |
execsql {
|
sl@0
|
69 |
SELECT fts2_tokenizer('blah') == fts2_tokenizer('simple');
|
sl@0
|
70 |
}
|
sl@0
|
71 |
} {1}
|
sl@0
|
72 |
do_test fts2token-1.4 {
|
sl@0
|
73 |
catchsql {
|
sl@0
|
74 |
CREATE VIRTUAL TABLE t1 USING fts2(content, tokenize blah);
|
sl@0
|
75 |
}
|
sl@0
|
76 |
} {0 {}}
|
sl@0
|
77 |
do_test fts2token-1.5 {
|
sl@0
|
78 |
execsql {
|
sl@0
|
79 |
INSERT INTO t1(content) VALUES('There was movement at the station');
|
sl@0
|
80 |
INSERT INTO t1(content) VALUES('For the word has passed around');
|
sl@0
|
81 |
INSERT INTO t1(content) VALUES('That the colt from ol regret had got away');
|
sl@0
|
82 |
SELECT content FROM t1 WHERE content MATCH 'movement'
|
sl@0
|
83 |
}
|
sl@0
|
84 |
} {{There was movement at the station}}
|
sl@0
|
85 |
|
sl@0
|
86 |
#--------------------------------------------------------------------------
|
sl@0
|
87 |
# Test cases fts2token-2.* test error cases in the scalar function based
|
sl@0
|
88 |
# API for getting and setting tokenizers.
|
sl@0
|
89 |
#
|
sl@0
|
90 |
do_test fts2token-2.1 {
|
sl@0
|
91 |
catchsql {
|
sl@0
|
92 |
SELECT fts2_tokenizer('nosuchtokenizer');
|
sl@0
|
93 |
}
|
sl@0
|
94 |
} {1 {unknown tokenizer: nosuchtokenizer}}
|
sl@0
|
95 |
|
sl@0
|
96 |
#--------------------------------------------------------------------------
|
sl@0
|
97 |
# Test cases fts2token-3.* test the three built-in tokenizers with a
|
sl@0
|
98 |
# simple input string via the built-in test function. This is as much
|
sl@0
|
99 |
# to test the test function as the tokenizer implementations.
|
sl@0
|
100 |
#
|
sl@0
|
101 |
do_test fts2token-3.1 {
|
sl@0
|
102 |
execsql {
|
sl@0
|
103 |
SELECT fts2_tokenizer_test('simple', 'I don''t see how');
|
sl@0
|
104 |
}
|
sl@0
|
105 |
} {{0 i I 1 don don 2 t t 3 see see 4 how how}}
|
sl@0
|
106 |
do_test fts2token-3.2 {
|
sl@0
|
107 |
execsql {
|
sl@0
|
108 |
SELECT fts2_tokenizer_test('porter', 'I don''t see how');
|
sl@0
|
109 |
}
|
sl@0
|
110 |
} {{0 i I 1 don don 2 t t 3 see see 4 how how}}
|
sl@0
|
111 |
ifcapable icu {
|
sl@0
|
112 |
do_test fts2token-3.3 {
|
sl@0
|
113 |
execsql {
|
sl@0
|
114 |
SELECT fts2_tokenizer_test('icu', 'I don''t see how');
|
sl@0
|
115 |
}
|
sl@0
|
116 |
} {{0 i I 1 don't don't 2 see see 3 how how}}
|
sl@0
|
117 |
}
|
sl@0
|
118 |
|
sl@0
|
119 |
#--------------------------------------------------------------------------
|
sl@0
|
120 |
# Test cases fts2token-4.* test the ICU tokenizer. In practice, this
|
sl@0
|
121 |
# tokenizer only has two modes - "thai" and "everybody else". Some other
|
sl@0
|
122 |
# Asian languages (Lao, Khmer etc.) require the same special treatment as
|
sl@0
|
123 |
# Thai, but ICU doesn't support them yet.
|
sl@0
|
124 |
#
|
sl@0
|
125 |
ifcapable icu {
|
sl@0
|
126 |
|
sl@0
|
127 |
proc do_icu_test {name locale input output} {
|
sl@0
|
128 |
set ::out [db eval { SELECT fts2_tokenizer_test('icu', $locale, $input) }]
|
sl@0
|
129 |
do_test $name {
|
sl@0
|
130 |
lindex $::out 0
|
sl@0
|
131 |
} $output
|
sl@0
|
132 |
}
|
sl@0
|
133 |
|
sl@0
|
134 |
do_icu_test fts2token-4.1 en_US {} {}
|
sl@0
|
135 |
do_icu_test fts2token-4.2 en_US {Test cases fts2} [list \
|
sl@0
|
136 |
0 test Test 1 cases cases 2 fts2 fts2
|
sl@0
|
137 |
]
|
sl@0
|
138 |
|
sl@0
|
139 |
# The following test shows that ICU is smart enough to recognise
|
sl@0
|
140 |
# Thai chararacters, even when the locale is set to English/United
|
sl@0
|
141 |
# States.
|
sl@0
|
142 |
#
|
sl@0
|
143 |
set input "\u0e2d\u0e30\u0e44\u0e23\u0e19\u0e30\u0e04\u0e23\u0e31\u0e1a"
|
sl@0
|
144 |
set output "0 \u0e2d\u0e30\u0e44\u0e23 \u0e2d\u0e30\u0e44\u0e23 "
|
sl@0
|
145 |
append output "1 \u0e19\u0e30 \u0e19\u0e30 "
|
sl@0
|
146 |
append output "2 \u0e04\u0e23\u0e31\u0e1a \u0e04\u0e23\u0e31\u0e1a"
|
sl@0
|
147 |
|
sl@0
|
148 |
do_icu_test fts2token-4.3 th_TH $input $output
|
sl@0
|
149 |
do_icu_test fts2token-4.4 en_US $input $output
|
sl@0
|
150 |
|
sl@0
|
151 |
# ICU handles an unknown locale by falling back to the default.
|
sl@0
|
152 |
# So this is not an error.
|
sl@0
|
153 |
do_icu_test fts2token-4.5 MiddleOfTheOcean $input $output
|
sl@0
|
154 |
|
sl@0
|
155 |
set longtoken "AReallyReallyLongTokenOneThatWillSurelyRequire"
|
sl@0
|
156 |
append longtoken "AReallocInTheIcuTokenizerCode"
|
sl@0
|
157 |
|
sl@0
|
158 |
set input "short tokens then "
|
sl@0
|
159 |
append input $longtoken
|
sl@0
|
160 |
set output "0 short short "
|
sl@0
|
161 |
append output "1 tokens tokens "
|
sl@0
|
162 |
append output "2 then then "
|
sl@0
|
163 |
append output "3 [string tolower $longtoken] $longtoken"
|
sl@0
|
164 |
|
sl@0
|
165 |
do_icu_test fts2token-4.6 MiddleOfTheOcean $input $output
|
sl@0
|
166 |
do_icu_test fts2token-4.7 th_TH $input $output
|
sl@0
|
167 |
do_icu_test fts2token-4.8 en_US $input $output
|
sl@0
|
168 |
}
|
sl@0
|
169 |
|
sl@0
|
170 |
do_test fts2token-internal {
|
sl@0
|
171 |
execsql { SELECT fts2_tokenizer_internal_test() }
|
sl@0
|
172 |
} {ok}
|
sl@0
|
173 |
|
sl@0
|
174 |
finish_test
|