1
1
package com .dqops .connectors .duckdb ;
2
2
3
3
import com .dqops .BaseTest ;
4
- import com .dqops .core .filesystem .virtual .FolderTreeNode ;
5
4
import com .dqops .core .secrets .SecretValueLookupContext ;
6
5
import com .dqops .metadata .sources .*;
7
6
import com .dqops .metadata .sources .fileformat .FileFormatSpec ;
8
7
import com .dqops .metadata .sources .fileformat .FileFormatSpecObjectMother ;
9
- import com .dqops .metadata .storage .localfiles .sources .FileConnectionListImpl ;
10
- import com .dqops .metadata .storage .localfiles .sources .FileConnectionWrapperImpl ;
11
8
import com .dqops .metadata .storage .localfiles .userhome .UserHomeContext ;
12
9
import com .dqops .metadata .storage .localfiles .userhome .UserHomeContextObjectMother ;
13
10
import com .dqops .metadata .userhome .UserHome ;
14
11
import com .dqops .sampledata .SampleCsvFileNames ;
15
- import com .dqops .utils . serialization . YamlSerializer ;
16
- import com .dqops .utils . serialization . YamlSerializerObjectMother ;
12
+ import com .dqops .sampledata . SampleJsonFileNames ;
13
+ import com .dqops .sampledata . SampleParquetFileNames ;
17
14
import org .junit .jupiter .api .Assertions ;
18
15
import org .junit .jupiter .api .BeforeEach ;
19
16
import org .junit .jupiter .api .Test ;
26
23
class DuckdbSourceConnectionTest extends BaseTest {
27
24
28
25
private DuckdbSourceConnection sut ;
29
- private FileConnectionWrapperImpl fileConnectionWrapper ;
26
+ private ConnectionWrapperImpl connectionWrapper ;
30
27
private SecretValueLookupContext secretValueLookupContext ;
28
+ private String connectionName = "test-connection" ;
29
+ private String tableSchemaName = "example_schema" ;
30
+ private String tableName = "example_table" ;
31
31
32
32
@ BeforeEach
33
33
void setUp () {
34
- String connectionName = "test-connection" ;
35
34
this .sut = DuckdbSourceConnectionObjectMother .getDuckdbSourceConnection ();
36
35
UserHomeContext userHomeContext = UserHomeContextObjectMother .createTemporaryFileHomeContext (true );
37
36
UserHome userHome = userHomeContext .getUserHome ();
38
37
userHome .getConnections ().createAndAddNew (connectionName );
39
38
secretValueLookupContext = new SecretValueLookupContext (userHome );
40
- FileConnectionListImpl connections = (FileConnectionListImpl ) userHome .getConnections ();
41
-
42
- FolderTreeNode connectionFolder = connections .getSourcesFolder ().getOrAddDirectFolder (connectionName );
43
- YamlSerializer yamlSerializer = YamlSerializerObjectMother .createNew ();
44
- this .fileConnectionWrapper = new FileConnectionWrapperImpl (connectionFolder , yamlSerializer );
39
+ this .connectionWrapper = new ConnectionWrapperImpl ();
45
40
}
46
41
47
42
@ Test
48
43
void retrieveTableMetadata_fromTableSpecWithCsvFilePath_readColumnTypes () {
49
- ConnectionSpec spec = DuckdbConnectionSpecObjectMother .createForCsv ( );
44
+ ConnectionSpec spec = DuckdbConnectionSpecObjectMother .createForFiles ( DuckdbSourceFilesType . csv );
50
45
this .sut .setConnectionSpec (spec );
51
- this .fileConnectionWrapper .setSpec (spec );
52
-
53
- String tableSchemaName = "example_schema" ;
54
- String tableName = "example_table" ;
55
- TableWrapper tableWrapper = fileConnectionWrapper .getTables ().createAndAddNew (new PhysicalTableName (tableSchemaName , tableName ));
56
-
46
+ this .connectionWrapper .setSpec (spec );
47
+ TableWrapper tableWrapper = connectionWrapper .getTables ().createAndAddNew (new PhysicalTableName (tableSchemaName , tableName ));
57
48
FileFormatSpec fileFormatSpec = FileFormatSpecObjectMother .createForCsvFile (SampleCsvFileNames .continuous_days_one_row_per_day );
58
49
tableWrapper .getSpec ().setFileFormat (fileFormatSpec );
50
+ List <String > tableNames = connectionWrapper
51
+ .getTables ().toList ().stream ()
52
+ .map (tw -> tw .getPhysicalTableName ().toString ())
53
+ .collect (Collectors .toList ());
54
+
55
+
56
+ this .sut .open (secretValueLookupContext );
57
+ List <TableSpec > tableSpecs = sut .retrieveTableMetadata (tableSchemaName , tableNames , connectionWrapper );
58
+
59
+
60
+ ColumnSpecMap firstTableColumns = tableSpecs .get (0 ).getColumns ();
61
+ ColumnSpec idColumn = firstTableColumns .get ("id:INTEGER" );
62
+ Assertions .assertEquals ("BIGINT" , idColumn .getTypeSnapshot ().getColumnType ());
63
+ Assertions .assertTrue (idColumn .getTypeSnapshot ().getNullable ());
64
+
65
+ ColumnSpec dateColumn = firstTableColumns .get ("date:LOCAL_DATE" );
66
+ Assertions .assertEquals ("DATE" , dateColumn .getTypeSnapshot ().getColumnType ());
59
67
60
- List <String > tableNames = fileConnectionWrapper
68
+ ColumnSpec valueColumn = firstTableColumns .get ("value:STRING" );
69
+ Assertions .assertEquals ("VARCHAR" , valueColumn .getTypeSnapshot ().getColumnType ());
70
+ }
71
+
72
+ @ Test
73
+ void retrieveTableMetadata_fromTableSpecWithJsonFilePath_readColumnTypes () {
74
+ ConnectionSpec spec = DuckdbConnectionSpecObjectMother .createForFiles (DuckdbSourceFilesType .json );
75
+ this .sut .setConnectionSpec (spec );
76
+ this .connectionWrapper .setSpec (spec );
77
+ TableWrapper tableWrapper = connectionWrapper .getTables ().createAndAddNew (new PhysicalTableName (tableSchemaName , tableName ));
78
+ FileFormatSpec fileFormatSpec = FileFormatSpecObjectMother .createForJsonFile (SampleJsonFileNames .continuous_days_one_row_per_day );
79
+ tableWrapper .getSpec ().setFileFormat (fileFormatSpec );
80
+ List <String > tableNames = connectionWrapper
61
81
.getTables ().toList ().stream ()
62
82
.map (tw -> tw .getPhysicalTableName ().toString ())
63
83
.collect (Collectors .toList ());
64
84
65
85
66
86
this .sut .open (secretValueLookupContext );
67
- List <TableSpec > tableSpecs = sut .retrieveTableMetadata (tableSchemaName , tableNames , fileConnectionWrapper );
87
+ List <TableSpec > tableSpecs = sut .retrieveTableMetadata (tableSchemaName , tableNames , connectionWrapper );
68
88
69
89
70
90
ColumnSpecMap firstTableColumns = tableSpecs .get (0 ).getColumns ();
@@ -77,6 +97,36 @@ void retrieveTableMetadata_fromTableSpecWithCsvFilePath_readColumnTypes() {
77
97
78
98
ColumnSpec valueColumn = firstTableColumns .get ("value:STRING" );
79
99
Assertions .assertEquals ("VARCHAR" , valueColumn .getTypeSnapshot ().getColumnType ());
100
+ }
101
+
102
+ @ Test
103
+ void retrieveTableMetadata_fromTableSpecWithParquetFilePath_readColumnTypes () {
104
+ ConnectionSpec spec = DuckdbConnectionSpecObjectMother .createForFiles (DuckdbSourceFilesType .parquet );
105
+ this .sut .setConnectionSpec (spec );
106
+ this .connectionWrapper .setSpec (spec );
107
+ TableWrapper tableWrapper = connectionWrapper .getTables ().createAndAddNew (new PhysicalTableName (tableSchemaName , tableName ));
108
+ FileFormatSpec fileFormatSpec = FileFormatSpecObjectMother .createForParquetFile (SampleParquetFileNames .continuous_days_one_row_per_day );
109
+ tableWrapper .getSpec ().setFileFormat (fileFormatSpec );
110
+ List <String > tableNames = connectionWrapper
111
+ .getTables ().toList ().stream ()
112
+ .map (tw -> tw .getPhysicalTableName ().toString ())
113
+ .collect (Collectors .toList ());
114
+
115
+
116
+ this .sut .open (secretValueLookupContext );
117
+ List <TableSpec > tableSpecs = sut .retrieveTableMetadata (tableSchemaName , tableNames , connectionWrapper );
80
118
119
+
120
+ ColumnSpecMap firstTableColumns = tableSpecs .get (0 ).getColumns ();
121
+ ColumnSpec idColumn = firstTableColumns .get ("id:INTEGER" );
122
+ Assertions .assertEquals ("BIGINT" , idColumn .getTypeSnapshot ().getColumnType ());
123
+ Assertions .assertTrue (idColumn .getTypeSnapshot ().getNullable ());
124
+
125
+ ColumnSpec dateColumn = firstTableColumns .get ("date:LOCAL_DATE" );
126
+ Assertions .assertEquals ("DATE" , dateColumn .getTypeSnapshot ().getColumnType ());
127
+
128
+ ColumnSpec valueColumn = firstTableColumns .get ("value:STRING" );
129
+ Assertions .assertEquals ("VARCHAR" , valueColumn .getTypeSnapshot ().getColumnType ());
81
130
}
131
+
82
132
}
0 commit comments