Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
VOSpace INAF
vospace-parent
Commits
e0dc7f3f
Commit
e0dc7f3f
authored
Oct 20, 2021
by
Nicola Fulvio Calabria
Browse files
Added support to linked service table + tests
parent
ae07dc83
Pipeline
#4530
failed with stages
in 18 seconds
Changes
6
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
pom.xml
View file @
e0dc7f3f
...
...
@@ -10,4 +10,53 @@
</parent>
<version>
0.0.2-SNAPSHOT
</version>
<packaging>
jar
</packaging>
<properties>
<!-- File catalog repository directory -->
<init_database_scripts_path>
../../../vospace-file-catalog
</init_database_scripts_path>
</properties>
<build>
<testResources>
<testResource>
<directory>
src/test/resources
</directory>
<filtering>
true
</filtering>
<includes>
<include>
test.properties
</include>
</includes>
</testResource>
<testResource>
<directory>
src/test/resources
</directory>
<filtering>
false
</filtering>
<includes>
<include>
**/*
</include>
</includes>
<excludes>
<exclude>
test.properties
</exclude>
</excludes>
</testResource>
</testResources>
<plugins>
<plugin>
<groupId>
org.jacoco
</groupId>
<artifactId>
jacoco-maven-plugin
</artifactId>
<executions>
<execution>
<goals>
<goal>
prepare-agent
</goal>
</goals>
</execution>
<execution>
<id>
report
</id>
<phase>
test
</phase>
<goals>
<goal>
report
</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
\ No newline at end of file
src/main/java/it/inaf/oats/vospace/parent/persistence/LinkedServiceDAO.java
0 → 100644
View file @
e0dc7f3f
/*
* This file is part of vospace-rest
* Copyright (C) 2021 Istituto Nazionale di Astrofisica
* SPDX-License-Identifier: GPL-3.0-or-later
*/
package
it.inaf.oats.vospace.parent.persistence
;
import
com.fasterxml.jackson.databind.ObjectMapper
;
import
javax.sql.DataSource
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.jdbc.core.JdbcTemplate
;
import
org.springframework.stereotype.Repository
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
@Repository
public
class
LinkedServiceDAO
{
private
static
final
Logger
LOG
=
LoggerFactory
.
getLogger
(
LinkedServiceDAO
.
class
);
private
static
final
ObjectMapper
MAPPER
=
new
ObjectMapper
();
private
final
JdbcTemplate
jdbcTemplate
;
@Autowired
public
LinkedServiceDAO
(
DataSource
dataSource
)
{
jdbcTemplate
=
new
JdbcTemplate
(
dataSource
);
}
public
boolean
isLinkedServiceUrl
(
String
targetUrl
)
{
String
sql
=
" SELECT COUNT(*) > 0\n"
+
"FROM linked_service\n"
+
"WHERE ? LIKE service_base_url || '%'"
;
return
jdbcTemplate
.
query
(
sql
,
ps
->
{
ps
.
setString
(
1
,
targetUrl
);
},
row
->
{
if
(!
row
.
next
())
{
throw
new
IllegalStateException
(
"Expected one result"
);
}
return
row
.
getBoolean
(
1
);
});
}
}
src/test/java/it/inaf/oats/vospace/parent/persistence/DataSourceConfig.java
0 → 100644
View file @
e0dc7f3f
/*
* This file is part of vospace-rest
* Copyright (C) 2021 Istituto Nazionale di Astrofisica
* SPDX-License-Identifier: GPL-3.0-or-later
*/
package
it.inaf.oats.vospace.parent.persistence
;
import
com.opentable.db.postgres.embedded.EmbeddedPostgres
;
import
com.opentable.db.postgres.embedded.PgBinaryResolver
;
import
com.opentable.db.postgres.embedded.UncompressBundleDirectoryResolver
;
import
java.io.File
;
import
java.io.IOException
;
import
java.io.InputStream
;
import
java.nio.file.Files
;
import
java.sql.Connection
;
import
java.sql.SQLException
;
import
java.sql.Statement
;
import
java.util.ArrayList
;
import
java.util.Arrays
;
import
java.util.List
;
import
java.util.regex.Matcher
;
import
java.util.regex.Pattern
;
import
javax.sql.DataSource
;
import
static
org
.
junit
.
jupiter
.
api
.
Assertions
.
assertTrue
;
import
org.springframework.beans.factory.annotation.Value
;
import
org.springframework.boot.test.context.TestConfiguration
;
import
org.springframework.context.annotation.Bean
;
import
org.springframework.context.annotation.Primary
;
import
org.springframework.context.annotation.Scope
;
import
org.springframework.core.io.ClassPathResource
;
/**
* Generates a DataSource that can be used for testing DAO classes. It loads an
* embedded Postgres database and fills it using the data from
* vospace-transfer-service repository (folder must exists; it location can be
* configured using the init_database_scripts_path in test.properties).
*/
@TestConfiguration
public
class
DataSourceConfig
{
@Value
(
"${init_database_scripts_path}"
)
private
String
scriptPath
;
/**
* Using the prototype scope we are generating a different database in each
* test.
*/
@Bean
@Scope
(
"prototype"
)
@Primary
public
DataSource
dataSource
()
throws
Exception
{
DataSource
embeddedPostgresDS
=
EmbeddedPostgres
.
builder
()
.
setPgDirectoryResolver
(
new
UncompressBundleDirectoryResolver
(
new
CustomPostgresBinaryResolver
()))
.
start
().
getPostgresDatabase
();
initDatabase
(
embeddedPostgresDS
);
return
embeddedPostgresDS
;
}
private
class
CustomPostgresBinaryResolver
implements
PgBinaryResolver
{
/**
* Loads specific embedded Postgres version.
*/
@Override
public
InputStream
getPgBinary
(
String
system
,
String
architecture
)
throws
IOException
{
ClassPathResource
resource
=
new
ClassPathResource
(
String
.
format
(
"postgres-%s-%s.txz"
,
system
.
toLowerCase
(),
architecture
));
return
resource
.
getInputStream
();
}
}
/**
* Loads SQL scripts for database initialization from
* vospace-transfer-service repo directory.
*/
private
void
initDatabase
(
DataSource
dataSource
)
throws
Exception
{
try
(
Connection
conn
=
dataSource
.
getConnection
())
{
File
currentDir
=
new
File
(
DataSourceConfig
.
class
.
getClassLoader
().
getResource
(
"."
).
getFile
());
File
scriptDir
=
currentDir
.
toPath
().
resolve
(
scriptPath
).
toFile
().
getCanonicalFile
();
assertTrue
(
scriptDir
.
exists
(),
"DAO tests require "
+
scriptDir
.
getAbsolutePath
()
+
" to exists.\n"
+
"Please clone the repository from https://www.ict.inaf.it/gitlab/vospace/vospace-file-catalog.git"
);
// load all sql files in vospace-file-catalog repo
File
[]
repoScripts
=
scriptDir
.
listFiles
(
f
->
f
.
getName
().
endsWith
(
".sql"
));
Arrays
.
sort
(
repoScripts
);
// sort alphabetically
// add test-data.sql
List
<
File
>
scripts
=
new
ArrayList
<>(
Arrays
.
asList
(
repoScripts
));
scripts
.
add
(
new
ClassPathResource
(
"test-data.sql"
).
getFile
());
for
(
File
script
:
scripts
)
{
String
scriptContent
=
Files
.
readString
(
script
.
toPath
());
for
(
String
sql
:
splitScript
(
scriptContent
))
{
executeSql
(
conn
,
replaceDollarQuoting
(
sql
));
}
}
}
}
/**
* Spring ScriptUtils is not able to correctly split the SQL statements if a
* function definition contains semicolon characters, so this method is used
* instead of it.
*/
private
List
<
String
>
splitScript
(
String
script
)
{
List
<
String
>
parts
=
new
ArrayList
<>();
StringBuilder
sb
=
new
StringBuilder
();
boolean
insideFunc
=
false
;
for
(
int
i
=
0
;
i
<
script
.
length
();
i
++)
{
char
c
=
script
.
charAt
(
i
);
sb
.
append
(
c
);
if
(
insideFunc
)
{
if
(
i
>
6
&&
"$func$"
.
equals
(
script
.
substring
(
i
-
6
,
i
)))
{
insideFunc
=
false
;
}
}
else
{
if
(
i
>
6
&&
"$func$"
.
equals
(
script
.
substring
(
i
-
6
,
i
)))
{
insideFunc
=
true
;
}
else
if
(
c
==
';'
)
{
parts
.
add
(
sb
.
toString
());
sb
=
new
StringBuilder
();
}
}
}
return
parts
;
}
private
void
executeSql
(
Connection
conn
,
String
sqlStatement
)
throws
SQLException
{
try
(
Statement
stat
=
conn
.
createStatement
())
{
stat
.
execute
(
sqlStatement
);
}
}
/**
* It seems that dollar quoting (used in UDF) is broken in JDBC. Replacing
* it with single quotes solves the problem. We replace the quoting here
* instead of inside the original files because dollar quoting provides a
* better visibility.
*/
private
String
replaceDollarQuoting
(
String
scriptContent
)
{
if
(
scriptContent
.
contains
(
"$func$"
))
{
String
func
=
extractFunctionDefinition
(
scriptContent
);
String
originalFunction
=
"$func$"
+
func
+
"$func$"
;
String
newFunction
=
"'"
+
func
.
replaceAll
(
"'"
,
"''"
)
+
"'"
;
scriptContent
=
scriptContent
.
replace
(
originalFunction
,
newFunction
);
}
return
scriptContent
;
}
private
String
extractFunctionDefinition
(
String
scriptContent
)
{
Pattern
pattern
=
Pattern
.
compile
(
"\\$func\\$(.*?)\\$func\\$"
,
Pattern
.
DOTALL
);
Matcher
matcher
=
pattern
.
matcher
(
scriptContent
);
if
(
matcher
.
find
())
{
return
matcher
.
group
(
1
);
}
throw
new
IllegalArgumentException
(
scriptContent
+
" doesn't contain $func$"
);
}
}
src/test/java/it/inaf/oats/vospace/parent/persistence/LinkedServiceDAOTest.java
0 → 100644
View file @
e0dc7f3f
/*
* This file is part of vospace-rest
* Copyright (C) 2021 Istituto Nazionale di Astrofisica
* SPDX-License-Identifier: GPL-3.0-or-later
*/
package
it.inaf.oats.vospace.parent.persistence
;
import
javax.sql.DataSource
;
import
static
org
.
junit
.
jupiter
.
api
.
Assertions
.
assertTrue
;
import
org.junit.jupiter.api.BeforeEach
;
import
org.junit.jupiter.api.Test
;
import
org.junit.jupiter.api.extension.ExtendWith
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.test.context.ContextConfiguration
;
import
org.springframework.test.context.TestPropertySource
;
import
org.springframework.test.context.junit.jupiter.SpringExtension
;
import
static
org
.
junit
.
jupiter
.
api
.
Assertions
.
assertFalse
;
@ExtendWith
(
SpringExtension
.
class
)
@ContextConfiguration
(
classes
=
{
DataSourceConfig
.
class
})
@TestPropertySource
(
locations
=
"classpath:test.properties"
)
public
class
LinkedServiceDAOTest
{
@Autowired
private
DataSource
dataSource
;
private
LinkedServiceDAO
dao
;
@BeforeEach
public
void
init
()
{
dao
=
new
LinkedServiceDAO
(
dataSource
);
}
@Test
void
testIsLinkedService
()
{
assertTrue
(
dao
.
isLinkedServiceUrl
(
"http://archives.ia2.inaf.it/files/aao/pippofile.fits.gz"
));
assertFalse
(
dao
.
isLinkedServiceUrl
(
"http://noportal.ia2.inaf.it/files/nop/nopippofile.tar.gz"
));
}
}
src/test/resources/test-data.sql
0 → 100644
View file @
e0dc7f3f
INSERT
INTO
linked_service
(
service_base_url
)
VALUES
(
'http://archives.ia2.inaf.it/files/aao'
);
INSERT
INTO
storage
(
storage_type
,
base_path
,
base_url
,
hostname
)
VALUES
(
'cold'
,
'/ia2_tape/users'
,
NULL
,
'tape-server'
);
INSERT
INTO
storage
(
storage_type
,
base_path
,
base_url
,
hostname
)
VALUES
(
'hot'
,
'/mnt/hot_storage/users'
,
NULL
,
'server'
);
INSERT
INTO
storage
(
storage_type
,
base_path
,
base_url
,
hostname
)
VALUES
(
'local'
,
'/home'
,
NULL
,
'localhost'
);
INSERT
INTO
storage
(
storage_type
,
base_path
,
base_url
,
hostname
)
VALUES
(
'local'
,
'/home/vospace/upload'
,
NULL
,
'localhost'
);
INSERT
INTO
storage
(
storage_type
,
base_path
,
base_url
,
hostname
)
VALUES
(
'portal'
,
NULL
,
'/files/lbt'
,
'archive.lbto.org'
);
INSERT
INTO
location
(
location_type
,
storage_src_id
,
storage_dest_id
)
VALUES
(
'async'
,
1
,
3
);
INSERT
INTO
location
(
location_type
,
storage_src_id
,
storage_dest_id
)
VALUES
(
'async'
,
2
,
3
);
INSERT
INTO
location
(
location_type
,
storage_src_id
,
storage_dest_id
)
VALUES
(
'user'
,
4
,
4
);
INSERT
INTO
location
(
location_type
,
storage_src_id
,
storage_dest_id
)
VALUES
(
'portal'
,
5
,
5
);
DELETE
FROM
node
;
ALTER
SEQUENCE
node_node_id_seq
RESTART
WITH
1
;
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
location_id
,
is_public
)
VALUES
(
NULL
,
NULL
,
''
,
'container'
,
'0'
,
1
,
true
);
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
group_read
,
group_write
,
location_id
)
VALUES
(
''
,
NULL
,
'test1'
,
'container'
,
'user1'
,
'{"group1","group2"}'
,
'{"group2"}'
,
1
);
-- /test1
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
location_id
)
VALUES
(
'2'
,
''
,
'f1'
,
'container'
,
'user1'
,
1
);
-- /test1/f1 (rel: /f1)
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
os_name
,
type
,
creator_id
,
location_id
,
quota
)
VALUES
(
'2.3'
,
'3'
,
'f2_renamed'
,
'f2'
,
'container'
,
'user1'
,
1
,
50000
);
-- /test1/f1/f2_renamed (rel: /f1/f2)
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
location_id
,
content_md5
,
content_length
)
VALUES
(
'2.3.4'
,
'3.4'
,
'f3'
,
'data'
,
'user1'
,
1
,
'<md5sum>'
,
4000
);
-- /test1/f1/f2_renamed/f3 (rel: /f1/f2/f3)
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
is_public
,
location_id
)
VALUES
(
''
,
NULL
,
'test2'
,
'container'
,
'user2'
,
true
,
1
);
-- /test2
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
is_public
,
location_id
)
VALUES
(
'6'
,
''
,
'f4'
,
'container'
,
'user2'
,
true
,
1
);
-- /test2/f4 (rel: /f4)
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
is_public
,
location_id
)
VALUES
(
'6'
,
''
,
'f5'
,
'container'
,
'user2'
,
true
,
1
);
-- /test2/f5 (rel: /f5)
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
is_public
,
location_id
)
VALUES
(
''
,
NULL
,
'test3'
,
'container'
,
'user3'
,
false
,
3
);
-- /test3
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
is_public
,
location_id
)
VALUES
(
'9'
,
''
,
'm1'
,
'container'
,
'user3'
,
false
,
3
);
-- /test3/m1
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
is_public
,
location_id
)
VALUES
(
'9.10'
,
''
,
'm2'
,
'container'
,
'user3'
,
false
,
3
);
-- /test3/m1/m2
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
is_public
,
location_id
)
VALUES
(
''
,
NULL
,
'test4'
,
'container'
,
'user3'
,
false
,
3
);
-- /test4
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
sticky
,
type
,
creator_id
,
is_public
,
location_id
)
VALUES
(
'9'
,
''
,
'mstick'
,
true
,
'container'
,
'user3'
,
false
,
3
);
-- /test3/mstick
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
job_id
,
type
,
creator_id
,
is_public
,
location_id
)
VALUES
(
'9'
,
''
,
'mbusy'
,
'job1234'
,
'container'
,
'user3'
,
false
,
3
);
-- /test3/mbusy
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
async_trans
,
type
,
creator_id
,
is_public
,
location_id
)
VALUES
(
'9'
,
''
,
'masynctrans'
,
true
,
'container'
,
'user3'
,
false
,
3
);
-- /test3/masynctrans
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
is_public
,
location_id
)
VALUES
(
'9'
,
''
,
'asyncloc'
,
'container'
,
'user3'
,
false
,
1
);
-- /test3/asyncloc
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
group_write
,
group_read
,
is_public
,
location_id
)
VALUES
(
'9'
,
''
,
'group1'
,
'container'
,
'user3'
,
'{"group1"}'
,
'{"group1"}'
,
false
,
3
);
-- /test3/group1
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
group_write
,
group_read
,
is_public
,
location_id
,
target
)
VALUES
(
'9.10'
,
''
,
'link1'
,
'link'
,
'user3'
,
'{"group1"}'
,
'{"group1"}'
,
false
,
3
,
'vos://authority/dummy/link'
);
-- /test3/m1/link1
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
sticky
,
type
,
creator_id
,
group_write
,
group_read
,
is_public
,
location_id
)
VALUES
(
''
,
NULL
,
'mycontainer'
,
true
,
'container'
,
'user3'
,
'{"group1"}'
,
'{"group1"}'
,
false
,
3
);
-- /mycontainer
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
group_write
,
group_read
,
is_public
,
location_id
)
VALUES
(
'19'
,
''
,
'container1'
,
'container'
,
'user3'
,
'{"group1"}'
,
'{"group1"}'
,
false
,
3
);
-- /mycontainer/container1
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
group_write
,
group_read
,
is_public
,
location_id
)
VALUES
(
'19'
,
''
,
'destination2'
,
'container'
,
'user3'
,
'{"group1"}'
,
'{"group1"}'
,
false
,
3
);
-- /mycontainer/destination2
INSERT
INTO
node
(
parent_path
,
parent_relative_path
,
name
,
type
,
creator_id
,
group_write
,
group_read
,
is_public
,
location_id
)
VALUES
(
'19.21'
,
'20'
,
'control'
,
'container'
,
'user3'
,
'{"group1"}'
,
'{"group1"}'
,
false
,
3
);
-- /mycontainer/destination2/control
DELETE
FROM
job
;
INSERT
INTO
job
(
job_id
,
owner_id
,
job_type
,
phase
,
start_time
,
end_time
,
creation_time
,
job_info
,
results
)
VALUES
(
'pippo1'
,
'user1'
,
'pullFromVoSpace'
,
'ARCHIVED'
,
NULL
,
NULL
,
'2011-06-22 19:10:25'
,
NULL
,
NULL
);
INSERT
INTO
job
(
job_id
,
owner_id
,
job_type
,
phase
,
start_time
,
end_time
,
creation_time
,
job_info
,
results
)
VALUES
(
'pippo2'
,
'user1'
,
'pullToVoSpace'
,
'PENDING'
,
NULL
,
NULL
,
'2012-06-22 19:10:25'
,
NULL
,
NULL
);
INSERT
INTO
job
(
job_id
,
owner_id
,
job_type
,
phase
,
start_time
,
end_time
,
creation_time
,
job_info
,
results
)
VALUES
(
'pippo3'
,
'user1'
,
'pullFromVoSpace'
,
'QUEUED'
,
NULL
,
NULL
,
'2013-06-22 19:10:25'
,
'{"transfer": {"view": {"uri": "ivo://ia2.inaf.it/vospace/views#zip"}}}'
,
NULL
);
INSERT
INTO
job
(
job_id
,
owner_id
,
job_type
,
phase
,
start_time
,
end_time
,
creation_time
,
job_info
,
results
)
VALUES
(
'pippo4'
,
'user2'
,
'copyNode'
,
'PENDING'
,
NULL
,
NULL
,
'2014-06-22 19:10:25'
,
NULL
,
NULL
);
INSERT
INTO
job
(
job_id
,
owner_id
,
job_type
,
phase
,
start_time
,
end_time
,
creation_time
,
job_info
,
results
)
VALUES
(
'pippo5'
,
'user1'
,
'pushToVoSpace'
,
'EXECUTING'
,
NULL
,
NULL
,
'2015-06-22 19:10:25'
,
NULL
,
NULL
);
INSERT
INTO
job
(
job_id
,
owner_id
,
job_type
,
phase
,
start_time
,
end_time
,
creation_time
,
job_info
,
results
)
VALUES
(
'pippo6'
,
'user2'
,
'pullFromVoSpace'
,
'PENDING'
,
NULL
,
NULL
,
'2015-06-22 19:10:25'
,
NULL
,
NULL
);
src/test/resources/test.properties
0 → 100644
View file @
e0dc7f3f
# File catalog repository directory (filled by pom.xml, overridable passing environment variable)
init_database_scripts_path
=
@init_database_scripts_path@
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment