Newer
Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
/*
* This file is part of vospace-rest
* Copyright (C) 2021 Istituto Nazionale di Astrofisica
* SPDX-License-Identifier: GPL-3.0-or-later
*/
package it.inaf.oats.vospace.parent.persistence;
import com.opentable.db.postgres.embedded.EmbeddedPostgres;
import com.opentable.db.postgres.embedded.PgBinaryResolver;
import com.opentable.db.postgres.embedded.UncompressBundleDirectoryResolver;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.sql.DataSource;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.Scope;
import org.springframework.core.io.ClassPathResource;
/**
* Generates a DataSource that can be used for testing DAO classes. It loads an
* embedded Postgres database and fills it using the data from
* vospace-transfer-service repository (folder must exists; it location can be
* configured using the init_database_scripts_path in test.properties).
*/
@TestConfiguration
public class DataSourceConfig {
@Value("${init_database_scripts_path}")
private String scriptPath;
/**
* Using the prototype scope we are generating a different database in each
* test.
*/
@Bean
@Scope("prototype")
@Primary
public DataSource dataSource() throws Exception {
DataSource embeddedPostgresDS = EmbeddedPostgres.builder()
.setPgDirectoryResolver(new UncompressBundleDirectoryResolver(new CustomPostgresBinaryResolver()))
.start().getPostgresDatabase();
initDatabase(embeddedPostgresDS);
return embeddedPostgresDS;
}
private class CustomPostgresBinaryResolver implements PgBinaryResolver {
/**
* Loads specific embedded Postgres version.
*/
@Override
public InputStream getPgBinary(String system, String architecture) throws IOException {
ClassPathResource resource = new ClassPathResource(String.format("postgres-%s-%s.txz", system.toLowerCase(), architecture));
return resource.getInputStream();
}
}
/**
* Loads SQL scripts for database initialization from
* vospace-transfer-service repo directory.
*/
private void initDatabase(DataSource dataSource) throws Exception {
try ( Connection conn = dataSource.getConnection()) {
File currentDir = new File(DataSourceConfig.class.getClassLoader().getResource(".").getFile());
File scriptDir = currentDir.toPath().resolve(scriptPath).toFile().getCanonicalFile();
assertTrue(scriptDir.exists(), "DAO tests require " + scriptDir.getAbsolutePath() + " to exists.\n"
+ "Please clone the repository from https://www.ict.inaf.it/gitlab/vospace/vospace-file-catalog.git");
// load all sql files in vospace-file-catalog repo
File[] repoScripts = scriptDir.listFiles(f -> f.getName().endsWith(".sql"));
Arrays.sort(repoScripts); // sort alphabetically
// add test-data.sql
List<File> scripts = new ArrayList<>(Arrays.asList(repoScripts));
scripts.add(new ClassPathResource("test-data.sql").getFile());
for (File script : scripts) {
String scriptContent = Files.readString(script.toPath());
for (String sql : splitScript(scriptContent)) {
executeSql(conn, replaceDollarQuoting(sql));
}
}
}
}
/**
* Spring ScriptUtils is not able to correctly split the SQL statements if a
* function definition contains semicolon characters, so this method is used
* instead of it.
*/
private List<String> splitScript(String script) {
List<String> parts = new ArrayList<>();
StringBuilder sb = new StringBuilder();
boolean insideFunc = false;
for (int i = 0; i < script.length(); i++) {
char c = script.charAt(i);
sb.append(c);
if (insideFunc) {
if (i > 6 && "$func$".equals(script.substring(i - 6, i))) {
insideFunc = false;
}
} else {
if (i > 6 && "$func$".equals(script.substring(i - 6, i))) {
insideFunc = true;
} else if (c == ';') {
parts.add(sb.toString());
sb = new StringBuilder();
}
}
}
return parts;
}
private void executeSql(Connection conn, String sqlStatement) throws SQLException {
try ( Statement stat = conn.createStatement()) {
stat.execute(sqlStatement);
}
}
/**
* It seems that dollar quoting (used in UDF) is broken in JDBC. Replacing
* it with single quotes solves the problem. We replace the quoting here
* instead of inside the original files because dollar quoting provides a
* better visibility.
*/
private String replaceDollarQuoting(String scriptContent) {
if (scriptContent.contains("$func$")) {
String func = extractFunctionDefinition(scriptContent);
String originalFunction = "$func$" + func + "$func$";
String newFunction = "'" + func.replaceAll("'", "''") + "'";
scriptContent = scriptContent.replace(originalFunction, newFunction);
}
return scriptContent;
}
private String extractFunctionDefinition(String scriptContent) {
Pattern pattern = Pattern.compile("\\$func\\$(.*?)\\$func\\$", Pattern.DOTALL);
Matcher matcher = pattern.matcher(scriptContent);
if (matcher.find()) {
return matcher.group(1);
}
throw new IllegalArgumentException(scriptContent + " doesn't contain $func$");
}
}