1616from datetime import datetime
1717
1818import pytest
19+ from pygridgain .exceptions import SQLError
1920
2021from pygridgain .api import (
2122 sql_fields , sql_fields_cursor_get_page ,
2223 sql , sql_cursor_get_page ,
2324 cache_get_configuration ,
2425)
26+ from pygridgain .datatypes .cache_config import CacheMode
2527from pygridgain .datatypes .prop_codes import *
2628from pygridgain .utils import entity_id
2729from pygridgain .binary import unwrap_binary
@@ -60,9 +62,10 @@ def test_sql(client):
6062
6163 result = sql_fields (
6264 conn ,
63- 'PUBLIC' ,
65+ 0 ,
6466 create_query ,
6567 page_size ,
68+ schema = 'PUBLIC' ,
6669 include_field_names = True
6770 )
6871 assert result .status == 0 , result .message
@@ -71,9 +74,10 @@ def test_sql(client):
7174 fname , lname , grade = data_line
7275 result = sql_fields (
7376 conn ,
74- 'PUBLIC' ,
77+ 0 ,
7578 insert_query ,
7679 page_size ,
80+ schema = 'PUBLIC' ,
7781 query_args = [i , fname , lname , grade ],
7882 include_field_names = True
7983 )
@@ -109,7 +113,7 @@ def test_sql(client):
109113 assert data .type_id == entity_id (binary_type_name )
110114
111115 # repeat cleanup
112- result = sql_fields (conn , 'PUBLIC' , drop_query , page_size )
116+ result = sql_fields (conn , 0 , drop_query , page_size , schema = 'PUBLIC' )
113117 assert result .status == 0
114118
115119
@@ -122,9 +126,10 @@ def test_sql_fields(client):
122126
123127 result = sql_fields (
124128 conn ,
125- 'PUBLIC' ,
129+ 0 ,
126130 create_query ,
127131 page_size ,
132+ schema = 'PUBLIC' ,
128133 include_field_names = True
129134 )
130135 assert result .status == 0 , result .message
@@ -133,19 +138,21 @@ def test_sql_fields(client):
133138 fname , lname , grade = data_line
134139 result = sql_fields (
135140 conn ,
136- 'PUBLIC' ,
141+ 0 ,
137142 insert_query ,
138143 page_size ,
144+ schema = 'PUBLIC' ,
139145 query_args = [i , fname , lname , grade ],
140146 include_field_names = True
141147 )
142148 assert result .status == 0 , result .message
143149
144150 result = sql_fields (
145151 conn ,
146- 'PUBLIC' ,
152+ 0 ,
147153 select_query ,
148154 page_size ,
155+ schema = 'PUBLIC' ,
149156 include_field_names = True
150157 )
151158 assert result .status == 0
@@ -160,7 +167,7 @@ def test_sql_fields(client):
160167 assert result .value ['more' ] is False
161168
162169 # repeat cleanup
163- result = sql_fields (conn , 'PUBLIC' , drop_query , page_size )
170+ result = sql_fields (conn , 0 , drop_query , page_size , schema = 'PUBLIC' )
164171 assert result .status == 0
165172
166173
@@ -177,7 +184,7 @@ def test_long_multipage_query(client):
177184
178185 client .sql ('DROP TABLE LongMultipageQuery IF EXISTS' )
179186
180- client .sql ("CREATE TABLE LongMultiPageQuery (%s, %s)" % \
187+ client .sql ("CREATE TABLE LongMultiPageQuery (%s, %s)" %
181188 (fields [0 ] + " INT(11) PRIMARY KEY" , "," .join (map (lambda f : f + " INT(11)" , fields [1 :]))))
182189
183190 for id in range (1 , 21 ):
@@ -216,3 +223,65 @@ def test_server_in_different_timezone(start_ignite_server, start_client, timezon
216223 client .close ()
217224 finally :
218225 kill_process_tree (server .pid )
226+
227+
228+ def test_sql_not_create_cache_with_schema (client ):
229+ with pytest .raises (SQLError , match = r".*Cache does not exist.*" ):
230+ client .sql (schema = None , cache = 'NOT_EXISTING' , query_str = 'select * from NotExisting' )
231+
232+
233+ def test_sql_not_create_cache_with_cache (client ):
234+ with pytest .raises (SQLError , match = r".*Failed to set schema.*" ):
235+ client .sql (schema = 'NOT_EXISTING' , query_str = 'select * from NotExisting' )
236+
237+
238+ def test_query_with_cache (client ):
239+ test_key = 42
240+ test_value = 'Lorem ipsum'
241+
242+ cache_name = test_query_with_cache .__name__ .upper ()
243+ schema_name = f'{ cache_name } _schema' .upper ()
244+ table_name = f'{ cache_name } _table' .upper ()
245+
246+ cache = client .create_cache ({
247+ PROP_NAME : cache_name ,
248+ PROP_SQL_SCHEMA : schema_name ,
249+ PROP_CACHE_MODE : CacheMode .PARTITIONED ,
250+ PROP_QUERY_ENTITIES : [
251+ {
252+ 'table_name' : table_name ,
253+ 'key_field_name' : 'KEY' ,
254+ 'value_field_name' : 'VALUE' ,
255+ 'key_type_name' : 'java.lang.Long' ,
256+ 'value_type_name' : 'java.lang.String' ,
257+ 'query_indexes' : [],
258+ 'field_name_aliases' : [],
259+ 'query_fields' : [
260+ {
261+ 'name' : 'KEY' ,
262+ 'type_name' : 'java.lang.Long' ,
263+ 'is_key_field' : True ,
264+ 'is_notnull_constraint_field' : True ,
265+ },
266+ {
267+ 'name' : 'VALUE' ,
268+ 'type_name' : 'java.lang.String' ,
269+ },
270+ ],
271+ },
272+ ],
273+ })
274+
275+ cache .put (test_key , test_value )
276+
277+ args_to_check = [
278+ ('schema' , schema_name ),
279+ ('cache' , cache ),
280+ ('cache' , cache .name ),
281+ ('cache' , cache .cache_id )
282+ ]
283+
284+ for param , value in args_to_check :
285+ page = client .sql (f'select value from { table_name } ' , ** {param : value })
286+ received = next (page )[0 ]
287+ assert test_value == received
0 commit comments