Skip to content
Commits on Source (14)
......@@ -15,8 +15,13 @@ import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.web.client.RestTemplate;
import org.springframework.context.annotation.Import;
import it.inaf.oats.vospace.parent.persistence.LinkedServiceDAO;
import it.inaf.ia2.aa.ServiceLocator;
import it.inaf.ia2.aa.ServletRapClient;
@SpringBootApplication
@Import(LinkedServiceDAO.class)
public class FileServiceApplication {
@Value("${jwks_uri}")
......@@ -39,6 +44,11 @@ public class FileServiceApplication {
registration.addUrlPatterns("/*");
return registration;
}
@Bean
public ServletRapClient servletRapClient() {
return (ServletRapClient) ServiceLocator.getInstance().getRapClient();
}
@Bean
public RestTemplate restTemplate() {
......
......@@ -12,6 +12,7 @@ import it.inaf.ia2.transfer.service.ArchiveService;
import it.inaf.oats.vospace.exception.PermissionDeniedException;
import java.io.File;
import java.util.concurrent.CompletableFuture;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
......@@ -29,6 +30,9 @@ public class ArchiveFileController extends AuthenticatedFileController {
@Autowired
private ArchiveService archiveService;
@Autowired
private HttpServletRequest servletRequest;
@Autowired
private HttpServletResponse response;
......@@ -42,10 +46,10 @@ public class ArchiveFileController extends AuthenticatedFileController {
job.setPrincipal(getPrincipal());
job.setJobId(archiveRequest.getJobId());
job.setType(type);
job.setVosPaths(archiveRequest.getPaths());
job.setEntryDescriptors(archiveRequest.getEntryDescriptors());
CompletableFuture.runAsync(() -> {
handleFileJob(() -> archiveService.createArchive(job), job.getJobId());
handleFileJob(() -> archiveService.createArchive(job, servletRequest), job.getJobId());
});
HttpHeaders headers = new HttpHeaders();
......
......@@ -5,13 +5,14 @@
*/
package it.inaf.ia2.transfer.controller;
import it.inaf.oats.vospace.parent.exchange.ArchiveEntryDescriptor;
import java.util.List;
public class ArchiveRequest {
private String type;
private String jobId;
private List<String> paths;
private List<ArchiveEntryDescriptor> entryDescriptors;
public String getType() {
return type;
......@@ -29,11 +30,11 @@ public class ArchiveRequest {
this.jobId = jobId;
}
public List<String> getPaths() {
return paths;
public List<ArchiveEntryDescriptor> getEntryDescriptors() {
return entryDescriptors;
}
public void setPaths(List<String> paths) {
this.paths = paths;
public void setEntryDescriptors(List<ArchiveEntryDescriptor> entryDescriptors) {
this.entryDescriptors = entryDescriptors;
}
}
......@@ -31,9 +31,6 @@ public class GetFileController extends FileController {
@Autowired
private FileDAO fileDAO;
@Autowired
private JobDAO jobDAO;
@Autowired
private AuthorizationService authorizationService;
......
......@@ -44,10 +44,10 @@ public class FileDAO {
String sql = "SELECT n.node_id, is_public, group_read, group_write, creator_id, async_trans,\n"
+ "content_type, content_encoding, content_length, content_md5, name, n.location_id,\n"
+ "accept_views, provide_views, l.location_type, n.path <> n.relative_path AS virtual_parent,\n"
+ "accept_views, provide_views, l.location_type,\n"
+ "(SELECT user_name FROM users WHERE user_id = creator_id) AS username, n.job_id,\n"
+ "base_path, get_os_path(n.node_id) AS os_path, ? AS vos_path, false AS is_directory,\n"
+ "type = 'link' AS is_link,\n"
+ "base_path, ? AS vos_path, false AS is_directory,\n"
+ "n.type = 'link' AS is_link, n.target,\n"
+ "fs_path \n"
+ "FROM node n\n"
+ "JOIN location l ON (n.location_id IS NOT NULL AND n.location_id = l.location_id) OR (n.location_id IS NULL AND l.location_id = ?)\n"
......@@ -176,11 +176,11 @@ public class FileDAO {
String sql = "SELECT n.node_id, n.is_public, n.group_read, n.group_write, n.creator_id, n.async_trans, n.fs_path,\n"
+ "n.content_type, n.content_encoding, n.content_length, n.content_md5,\n"
+ "n.accept_views, n.provide_views, l.location_type, n.path <> n.relative_path AS virtual_parent,\n"
+ "n.accept_views, n.provide_views, l.location_type,\n"
+ "(SELECT user_name FROM users WHERE user_id = n.creator_id) AS username,\n"
+ "base_path, get_os_path(n.node_id) AS os_path, get_vos_path(n.node_id) AS vos_path,\n"
+ "base_path, get_vos_path(n.node_id) AS vos_path,\n"
+ "n.type = 'container' AS is_directory, n.name, n.location_id, n.job_id,\n"
+ "n.type = 'link' AS is_link, l.location_type\n"
+ "n.type = 'link' AS is_link, n.target, l.location_type\n"
+ "FROM node n\n"
+ "JOIN node p ON p.path @> n.path\n"
+ "LEFT JOIN location l ON l.location_id = n.location_id\n"
......@@ -209,11 +209,11 @@ public class FileDAO {
String sql = "SELECT n.node_id, n.is_public, n.group_read, n.group_write, n.creator_id, n.async_trans, n.fs_path,\n"
+ "n.content_type, n.content_encoding, n.content_length, n.content_md5,\n"
+ "n.accept_views, n.provide_views, l.location_type, n.path <> n.relative_path AS virtual_parent,\n"
+ "n.accept_views, n.provide_views, l.location_type,\n"
+ "(SELECT user_name FROM users WHERE user_id = n.creator_id) AS username,\n"
+ "base_path, get_os_path(n.node_id) AS os_path, get_vos_path(n.node_id) AS vos_path,\n"
+ "base_path, get_vos_path(n.node_id) AS vos_path,\n"
+ "n.type = 'container' AS is_directory, n.name, n.location_id, n.job_id,\n"
+ "n.type = 'link' AS is_link, l.location_type\n"
+ "n.type = 'link' AS is_link, n.target, l.location_type\n"
+ "FROM node n\n"
+ "JOIN node p ON p.path @> n.path\n"
+ "LEFT JOIN location l ON l.location_id = n.location_id\n"
......@@ -281,19 +281,23 @@ public class FileDAO {
fi.setOwnerId(rs.getString("creator_id"));
fi.setAsyncTrans(rs.getBoolean("async_trans"));
fi.setAcceptViews(toList(rs.getArray("accept_views")));
fi.setProvideViews(toList(rs.getArray("provide_views")));
fi.setVirtualParent(rs.getBoolean("virtual_parent"));
fi.setProvideViews(toList(rs.getArray("provide_views")));
fi.setVirtualPath(rs.getString("vos_path"));
fi.setVirtualName(rs.getString("name"));
fi.setContentEncoding(rs.getString("content_encoding"));
long contentLength = rs.getLong("content_length");
if (!rs.wasNull()) {
fi.setContentLength(contentLength);
}
}
fi.setContentMd5(rs.getString("content_md5"));
fi.setContentType(rs.getString("content_type"));
fi.setDirectory(rs.getBoolean("is_directory"));
fi.setLink(rs.getBoolean("is_link"));
if(rs.getBoolean("is_link")){
fi.setLink(true);
fi.setTarget(rs.getString("target"));
} else {
fi.setLink(false);
}
fi.setJobId(rs.getString("job_id"));
int locationId = rs.getInt("location_id");
if (!rs.wasNull()) {
......@@ -340,32 +344,5 @@ public class FileDAO {
Path completeFsPath = Path.of(fsPath);
fi.setFsPath(completeFsPath.toString());
}
private void fillOsPath(FileInfo fi, ResultSet rs) throws SQLException {
String basePath = rs.getString("base_path");
if (basePath == null) {
return;
}
String osPath = rs.getString("os_path");
if (osPath.startsWith("/")) {
osPath = osPath.substring(1);
}
Path completeOsPath = Path.of(basePath);
boolean asyncLocation = "async".equals(rs.getString("location_type"));
if (asyncLocation) {
String username = rs.getString("username");
completeOsPath = completeOsPath.resolve(username).resolve("retrieve");
} else if (fi.hasVirtualParent()) {
completeOsPath = completeOsPath.resolve(fi.getOwnerId());
}
completeOsPath = completeOsPath.resolve(osPath);
fi.setOsPath(completeOsPath.toString());
}
}
}
/*
* This file is part of vospace-file-service
* Copyright (C) 2021 Istituto Nazionale di Astrofisica
* SPDX-License-Identifier: GPL-3.0-or-later
*/
package it.inaf.ia2.transfer.persistence;
import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Repository;
@Repository
public class LocationDAO {
private final JdbcTemplate jdbcTemplate;
@Autowired
public LocationDAO(DataSource fileCatalogDatasource) {
this.jdbcTemplate = new JdbcTemplate(fileCatalogDatasource);
}
public Map<Integer, String> getPortalLocationUrls() {
String sql = "SELECT location_id, hostname, base_url\n"
+ "FROM location l\n"
+ "JOIN storage s ON s.storage_id = l.storage_dest_id\n"
+ "WHERE location_type = 'portal'";
return jdbcTemplate.query(sql, rs -> {
Map<Integer, String> locationUrls = new HashMap<>();
while (rs.next()) {
int locationId = rs.getInt("location_id");
String hostname = rs.getString("hostname");
String baseUrl = rs.getString("base_url");
String url = "http://" + hostname + baseUrl;
locationUrls.put(locationId, url);
}
return locationUrls;
});
}
}
......@@ -10,18 +10,17 @@ import java.util.List;
public class FileInfo {
private int nodeId;
private String osPath;
private int nodeId;
private String virtualPath;
private String virtualName;
private String fsPath;
// actualBasePath differs from base path in db due to some location type
// dependent manipulations (performed by FileDAO)
private String actualBasePath;
private boolean isPublic;
private boolean virtualParent;
private String actualBasePath;
private boolean isPublic;
private boolean directory;
private boolean link;
private String target;
private List<String> groupRead;
private List<String> groupWrite;
private String ownerId;
......@@ -36,6 +35,14 @@ public class FileInfo {
private String locationType;
private String jobId;
public String getTarget() {
return target;
}
public void setTarget(String target) {
this.target = target;
}
public int getNodeId() {
return nodeId;
}
......@@ -76,14 +83,6 @@ public class FileInfo {
this.nodeId = nodeId;
}
public String getOsPath() {
return osPath;
}
public void setOsPath(String osPath) {
this.osPath = osPath;
}
public String getVirtualPath() {
return virtualPath;
}
......@@ -124,14 +123,6 @@ public class FileInfo {
this.link = link;
}
public boolean hasVirtualParent() {
return virtualParent;
}
public void setVirtualParent(boolean virtualParent) {
this.virtualParent = virtualParent;
}
public List<String> getGroupRead() {
return groupRead;
}
......
......@@ -6,6 +6,7 @@
package it.inaf.ia2.transfer.service;
import it.inaf.ia2.transfer.auth.TokenPrincipal;
import it.inaf.oats.vospace.parent.exchange.ArchiveEntryDescriptor;
import java.util.List;
public class ArchiveJob {
......@@ -26,17 +27,17 @@ public class ArchiveJob {
}
}
private List<String> vosPaths;
private List<ArchiveEntryDescriptor> entryDescriptors;
private TokenPrincipal tokenPrincipal;
private String jobId;
private Type type;
public List<String> getVosPaths() {
return vosPaths;
public List<ArchiveEntryDescriptor> getEntryDescriptors() {
return entryDescriptors;
}
public void setVosPaths(List<String> vosPaths) {
this.vosPaths = vosPaths;
public void setEntryDescriptors(List<ArchiveEntryDescriptor> entryDescriptors) {
this.entryDescriptors = entryDescriptors;
}
public TokenPrincipal getPrincipal() {
......@@ -62,4 +63,5 @@ public class ArchiveJob {
public void setType(Type type) {
this.type = type;
}
}
......@@ -5,14 +5,18 @@
*/
package it.inaf.ia2.transfer.service;
import it.inaf.ia2.aa.ServletRapClient;
import it.inaf.ia2.rap.client.call.TokenExchangeRequest;
import it.inaf.ia2.transfer.auth.TokenPrincipal;
import it.inaf.ia2.transfer.persistence.FileDAO;
import it.inaf.ia2.transfer.persistence.JobDAO;
import it.inaf.ia2.transfer.persistence.LocationDAO;
import it.inaf.ia2.transfer.persistence.model.FileInfo;
import it.inaf.oats.vospace.datamodel.NodeUtils;
import it.inaf.oats.vospace.exception.InternalFaultException;
import it.inaf.oats.vospace.exception.PermissionDeniedException;
import it.inaf.oats.vospace.exception.QuotaExceededException;
import it.inaf.oats.vospace.parent.exchange.ArchiveEntryDescriptor;
import it.inaf.oats.vospace.parent.persistence.LinkedServiceDAO;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
......@@ -23,11 +27,13 @@ import java.io.OutputStream;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.security.Principal;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import javax.annotation.PostConstruct;
import javax.servlet.http.HttpServletRequest;
import net.ivoa.xml.uws.v1.ExecutionPhase;
import org.kamranzafar.jtar.TarEntry;
import org.kamranzafar.jtar.TarOutputStream;
......@@ -51,7 +57,7 @@ public class ArchiveService {
private FileDAO fileDAO;
@Autowired
private LocationDAO locationDAO;
private LinkedServiceDAO linkedServiceDAO;
@Autowired
private JobDAO jobDAO;
......@@ -62,8 +68,8 @@ public class ArchiveService {
@Autowired
private RestTemplate restTemplate;
@Value("${upload_location_id}")
private int uploadLocationId;
@Autowired
private ServletRapClient rapClient;
// Directory containing temporary files generated by jobs.
@Value("${generated.dir}")
......@@ -84,7 +90,7 @@ public class ArchiveService {
}
}
public <O extends OutputStream, E> void createArchive(ArchiveJob job) {
public <O extends OutputStream, E> void createArchive(ArchiveJob job, HttpServletRequest servletRequest) {
jobDAO.updateJobPhase(ExecutionPhase.EXECUTING, job.getJobId());
......@@ -94,43 +100,100 @@ public class ArchiveService {
// TODO: check total size limit
File archiveFile = getArchiveFile(job);
String commonParent = getCommonParent(job.getVosPaths());
List<ArchiveEntryDescriptor> entryDescriptors = job.getEntryDescriptors();
// support directory used to generate folder inside tar files (path is redefined each time by TarEntry class)
File supportDir = Files.createTempDirectory("dir").toFile();
// it will be initialized only when necessary
Map<Integer, String> portalLocationUrls = null;
try (ArchiveHandler<O, E> handler = getArchiveHandler(archiveFile, job.getType())) {
fillArchive(entryDescriptors, supportDir,
job.getPrincipal(), servletRequest, handler);
} finally {
FileSystemUtils.deleteRecursively(supportDir);
}
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
}
private <O extends OutputStream, E> void fillArchive(
List<ArchiveEntryDescriptor> entryDescriptors,
File supportDir, TokenPrincipal tokenPrincipal,
HttpServletRequest servletRequest, ArchiveHandler<O, E> handler) throws IOException {
try ( ArchiveHandler<O, E> handler = getArchiveHandler(archiveFile, job.getType())) {
List<ArchiveEntryDescriptor> noTargetEntryDescriptors
= entryDescriptors.stream().filter(ed -> !ed.isPointingToAnotherNode())
.collect(Collectors.toList());
for (FileInfo fileInfo : fileDAO.getArchiveFileInfos(job.getVosPaths())) {
// Start with archive entry descriptors which don't point to another node
List<String> vosPaths = noTargetEntryDescriptors.stream()
.map(ed -> ed.getVosPath())
.collect(Collectors.toList());
String commonParent = getCommonParent(entryDescriptors);
String relPath = fileInfo.getVirtualPath().substring(commonParent.length());
if (!vosPaths.isEmpty()) {
for (FileInfo fileInfo : fileDAO.getArchiveFileInfos(vosPaths)) {
if (fileInfo.isDirectory()) {
handler.putNextEntry(supportDir, relPath);
continue;
}
String relPath = fileInfo.getVirtualPath().substring(commonParent.length());
if (fileInfo.getLocationId() != null && "portal".equals(fileInfo.getLocationType())) {
// remote file
if (portalLocationUrls == null) {
portalLocationUrls = locationDAO.getPortalLocationUrls();
}
String url = portalLocationUrls.get(fileInfo.getLocationId());
downloadFileIntoArchive(fileInfo, relPath, job.getPrincipal(), handler, url);
} else {
// local file or virtual directory
writeFileIntoArchive(fileInfo, relPath, job.getPrincipal(), handler);
}
this.insertEntryIntoArchive(fileInfo, supportDir, relPath, tokenPrincipal, servletRequest, handler);
}
}
List<ArchiveEntryDescriptor> pointingEntryDescriptors
= entryDescriptors.stream().filter(ed -> ed.isPointingToAnotherNode())
.collect(Collectors.toList());
// Now archive entry descriptors pointing to another node
List<String> targetNodesVosPaths = pointingEntryDescriptors.stream()
.map(ed -> ed.getTargetNodeVosPath())
.collect(Collectors.toList());
if (!targetNodesVosPaths.isEmpty()) {
for (FileInfo fileInfo : fileDAO.getArchiveFileInfos(targetNodesVosPaths)) {
// relPaths is calculated from base node
String targetNodeVosPath = fileInfo.getVirtualPath();
List<String> linkVosPaths = pointingEntryDescriptors.stream()
.filter(ed -> ed.getTargetNodeVosPath().equals(targetNodeVosPath))
.map(ed -> ed.getVosPath())
.collect(Collectors.toList());
for (String vosPath : linkVosPaths) {
String relPath = vosPath.substring(commonParent.length());
this.insertEntryIntoArchive(fileInfo, supportDir, relPath, tokenPrincipal, servletRequest, handler);
}
} finally {
FileSystemUtils.deleteRecursively(supportDir);
}
}
}
} catch (IOException ex) {
throw new UncheckedIOException(ex);
private <O extends OutputStream, E> void insertEntryIntoArchive(
FileInfo fileInfo, File supportDir, String relPath,
TokenPrincipal tokenPrincipal, HttpServletRequest servletRequest, ArchiveHandler<O, E> handler)
throws IOException {
if (fileInfo.isDirectory()) {
handler.putNextEntry(supportDir, relPath);
return;
}
// I retrieve only external links
// local links have been resolved before calling this endpoint
// TODO: we need to discuss about internal links in container nodes
if (fileInfo.isLink()) {
String target = fileInfo.getTarget();
if (!target.startsWith("vos://")) {
downloadExternalLinkIntoArchive(fileInfo, relPath,
tokenPrincipal, handler, servletRequest);
}
return;
}
writeFileIntoArchive(fileInfo, relPath, tokenPrincipal, handler);
}
private File getArchiveFile(ArchiveJob job) throws IOException {
......@@ -172,26 +235,47 @@ public class ArchiveService {
return generatedDir.toPath().resolve(principal.getName()).toFile();
}
private String getCommonParent(List<String> vosPaths) {
String commonParent = null;
for (String vosPath : vosPaths) {
if (commonParent == null) {
commonParent = vosPath;
} else {
StringBuilder newCommonParent = new StringBuilder();
boolean same = true;
int lastSlashPos = vosPath.lastIndexOf("/");
for (int i = 0; same && i < Math.min(commonParent.length(), vosPath.length()) && i <= lastSlashPos; i++) {
if (commonParent.charAt(i) == vosPath.charAt(i)) {
newCommonParent.append(commonParent.charAt(i));
} else {
same = false;
}
}
commonParent = newCommonParent.toString();
private String getCommonParent(List<ArchiveEntryDescriptor> entryDescriptors) {
if(entryDescriptors.isEmpty()) {
throw new IllegalArgumentException("Empty descriptors list");
} else if(entryDescriptors.size() == 1) {
return NodeUtils.getParentPath(entryDescriptors.get(0).getVosPath());
}
// Get list of parent paths
List<String[]> vosParentPaths = entryDescriptors.stream().map(
ed -> NodeUtils.getParentPath(ed.getVosPath()).split("/"))
.collect(Collectors.toList());
// Get minimum size of split vosParentPaths arrays
int minSize = vosParentPaths.stream()
.mapToInt(v->v.length).min().orElseThrow();
switch(minSize) {
case 0:
return "/";
case 1:
// this should never happen
throw new IllegalArgumentException("Invalid vosPath");
}
StringBuilder sb = new StringBuilder();
for(int i = 1; i < minSize; i++) {
List<String> elements = new ArrayList<>();
for(String[] s : vosParentPaths) {
elements.add(s[i]);
}
String sample = elements.get(0);
if(elements.stream().allMatch(e->e.equals(sample)))
sb.append("/").append(sample);
}
return commonParent;
return sb.toString();
}
private abstract class ArchiveHandler<O extends OutputStream, E> implements AutoCloseable {
......@@ -272,15 +356,7 @@ public class ArchiveService {
}
}
private <O extends OutputStream, E> void downloadFileIntoArchive(FileInfo fileInfo, String relPath, TokenPrincipal tokenPrincipal, ArchiveHandler<O, E> handler, String baseUrl) {
if (baseUrl == null) {
LOG.error("Location URL not found for location " + fileInfo.getLocationId());
throw new InternalFaultException("Unable to retrieve location of file " + fileInfo.getVirtualPath());
}
String url = baseUrl + "/" + fileInfo.getVirtualName();
private <O extends OutputStream, E> void downloadFromUrlIntoArchive(String url, String relPath, TokenPrincipal tokenPrincipal, ArchiveHandler<O, E> handler) {
LOG.trace("Downloading file from " + url);
restTemplate.execute(url, HttpMethod.GET, req -> {
......@@ -290,10 +366,10 @@ public class ArchiveService {
}
}, res -> {
File tmpFile = Files.createTempFile("download", null).toFile();
try ( FileOutputStream os = new FileOutputStream(tmpFile)) {
try (FileOutputStream os = new FileOutputStream(tmpFile)) {
res.getBody().transferTo(os);
handler.putNextEntry(tmpFile, relPath);
try ( FileInputStream is = new FileInputStream(tmpFile)) {
try (FileInputStream is = new FileInputStream(tmpFile)) {
is.transferTo(handler.getOutputStream());
}
} finally {
......@@ -303,6 +379,27 @@ public class ArchiveService {
}, new Object[]{});
}
private <O extends OutputStream, E> void downloadExternalLinkIntoArchive(
FileInfo fileInfo, String relPath, TokenPrincipal tokenPrincipal,
ArchiveHandler<O, E> handler, HttpServletRequest servletRequest) {
String url = fileInfo.getTarget();
if (url == null || url.isBlank()) {
LOG.error("Target URL of link at path: {} is null or blank", fileInfo.getVirtualPath());
throw new InternalFaultException("Target URL of link at path: "
+ fileInfo.getVirtualPath() + " is null or blank");
}
// Append token if url is recognized
if (linkedServiceDAO.isLinkedServiceUrl(url)) {
url += "?token=" + getEndpointToken(tokenPrincipal, url, servletRequest);
}
downloadFromUrlIntoArchive(url, relPath, tokenPrincipal, handler);
}
private <O extends OutputStream, E> void writeFileIntoArchive(FileInfo fileInfo, String relPath, TokenPrincipal tokenPrincipal, ArchiveHandler<O, E> handler) throws IOException {
if (!authorizationService.isDownloadable(fileInfo, tokenPrincipal)) {
throw PermissionDeniedException.forPath(fileInfo.getVirtualPath());
......@@ -311,9 +408,27 @@ public class ArchiveService {
File file = new File(fileInfo.getFilePath());
LOG.trace("Adding file " + file.getAbsolutePath() + " to tar archive");
try ( InputStream is = new FileInputStream(file)) {
try (InputStream is = new FileInputStream(file)) {
handler.putNextEntry(file, relPath);
is.transferTo(handler.getOutputStream());
}
}
private String getEndpointToken(TokenPrincipal tokenPrincipal,
String endpoint, HttpServletRequest servletRequest) {
String token = tokenPrincipal.getToken();
if (token == null) {
throw new PermissionDeniedException("Token is null");
}
TokenExchangeRequest exchangeRequest = new TokenExchangeRequest()
.setSubjectToken(token)
.setResource(endpoint);
// TODO: add audience and scope
return rapClient.exchangeToken(exchangeRequest, servletRequest);
}
}
......@@ -7,23 +7,17 @@ package it.inaf.ia2.transfer.service;
import it.inaf.ia2.transfer.auth.TokenPrincipal;
import it.inaf.ia2.transfer.persistence.FileDAO;
import it.inaf.ia2.transfer.persistence.LocationDAO;
import it.inaf.ia2.transfer.persistence.model.FileInfo;
import it.inaf.oats.vospace.exception.InternalFaultException;
import it.inaf.oats.vospace.exception.NodeNotFoundException;
import it.inaf.oats.vospace.exception.PermissionDeniedException;
import it.inaf.oats.vospace.exception.QuotaExceededException;
import java.io.File;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.stereotype.Service;
import org.springframework.web.client.RestTemplate;
......@@ -35,9 +29,6 @@ public class FileCopyService {
@Autowired
private FileDAO fileDAO;
@Autowired
private LocationDAO locationDAO;
@Autowired
private AuthorizationService authorizationService;
......@@ -67,7 +58,6 @@ public class FileCopyService {
throw new NodeNotFoundException(sourceRootVosPath);
}
// Set location of destinations to this file service update location
// before retrieving file infos
fileDAO.setBranchLocationId(destinationRootVosPath, jobId, uploadLocationId);
......@@ -101,9 +91,6 @@ public class FileCopyService {
String destinationRootVosPath,
TokenPrincipal principal) {
// it will be initialized only when necessary
Map<Integer, String> portalLocationUrls = null;
for (FileInfo destinationFileInfo : destinationFileInfos) {
LOG.trace("Processing {} destination", destinationFileInfo.getVirtualPath());
// Cycle on files only
......@@ -134,22 +121,7 @@ public class FileCopyService {
}
}
if (sourceFileInfo.getLocationId() != null && "portal".equals(sourceFileInfo.getLocationType())) {
// remote file
if (portalLocationUrls == null) {
portalLocationUrls = locationDAO.getPortalLocationUrls();
}
String url = portalLocationUrls.get(sourceFileInfo.getLocationId());
// download file to destination disk path
this.downloadFileToDisk(sourceFileInfo,
destinationFileInfo,
principal, url, remainingQuota);
} else {
// local file
this.copyLocalFile(sourceFileInfo, destinationFileInfo, principal, remainingQuota);
}
this.copyLocalFile(sourceFileInfo, destinationFileInfo, principal, remainingQuota);
}
}
......@@ -168,36 +140,6 @@ public class FileCopyService {
}
private void downloadFileToDisk(FileInfo sourceFileInfo,
FileInfo destinationFileInfo, TokenPrincipal tokenPrincipal, String baseUrl, Long remainingQuota) {
if (baseUrl == null) {
LOG.error("Location URL not found for location " + sourceFileInfo.getLocationId());
throw new InternalFaultException("Unable to retrieve location of file " + sourceFileInfo.getVirtualPath());
}
String url = baseUrl + "/" + sourceFileInfo.getVirtualName();
LOG.trace("Downloading file from {}", url);
restTemplate.execute(url, HttpMethod.GET, req -> {
HttpHeaders headers = req.getHeaders();
if (tokenPrincipal.getToken() != null) {
headers.setBearerAuth(tokenPrincipal.getToken());
}
}, res -> {
try (InputStream in = res.getBody()) {
putFileService.storeFileFromInputStream(sourceFileInfo, destinationFileInfo, in, remainingQuota);
} catch (Exception ex) {
// outFile.delete();
throw new RuntimeException(ex);
}
return null;
}, new Object[]{});
}
private void copyLocalFile(FileInfo sourceFileInfo,
FileInfo destinationFileInfo, TokenPrincipal tokenPrincipal, Long remainingQuota) {
......@@ -207,7 +149,7 @@ public class FileCopyService {
}
File file = new File(sourceFileInfo.getFilePath());
LOG.trace("Copying file: {} to {}",file.getAbsolutePath(), destinationFileInfo.getFilePath());
LOG.trace("Copying file: {} to {}", file.getAbsolutePath(), destinationFileInfo.getFilePath());
putFileService.copyLocalFile(sourceFileInfo, destinationFileInfo, remainingQuota);
......
......@@ -97,12 +97,12 @@ public class PutFileService {
// the first upload (fsPath not null)
if(destinationFileInfo.getActualBasePath() != null) {
if(destinationFileInfo.getFsPath() != null) {
LOG.error("Node {} fsPath is not null: {}. Overwriting.",
LOG.error("Node {} fsPath is not null: {}. Overwriting forbidden.",
destinationFileInfo.getVirtualPath(),
destinationFileInfo.getFsPath());
throw new InvalidArgumentException("Node " +
destinationFileInfo.getVirtualPath() +
" is already populated. Overwriting not allowed.");
" is already populated. Overwriting forbidden.");
}
destinationFileInfo.setFsPath(this.generateFsPath().toString());
......
rap_uri=https://sso.ia2.inaf.it/rap-ia2
gms_uri=https://sso.ia2.inaf.it/gms/
groups_autoload=true
client_id=vospace_test
client_secret=***REMOVED***
\ No newline at end of file
......@@ -9,9 +9,11 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import it.inaf.ia2.transfer.auth.TokenPrincipal;
import it.inaf.ia2.transfer.service.ArchiveJob;
import it.inaf.ia2.transfer.service.ArchiveService;
import it.inaf.oats.vospace.parent.exchange.ArchiveEntryDescriptor;
import java.io.File;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.Test;
import static org.mockito.ArgumentMatchers.any;
......@@ -43,14 +45,15 @@ public class ArchiveFileControllerTest {
@Autowired
private MockMvc mockMvc;
@Test
public void testCreateTarArchive() throws Exception {
ArchiveRequest request = new ArchiveRequest();
request.setJobId("123");
request.setType("TAR");
request.setPaths(Arrays.asList("/path/to/file1", "/path/to/file2"));
request.setType("TAR");
request.setEntryDescriptors(List.of(this.getArchiveEntryDescriptor("/path/to/file1"),
this.getArchiveEntryDescriptor("/path/to/file2")));
mockMvc.perform(post("/archive")
.principal(fakePrincipal("user1"))
......@@ -63,9 +66,9 @@ public class ArchiveFileControllerTest {
assertEquals("123", job.getJobId());
assertEquals(ArchiveJob.Type.TAR, job.getType());
assertEquals("user1", job.getPrincipal().getName());
assertEquals(2, job.getVosPaths().size());
assertEquals(2, job.getEntryDescriptors().size());
return true;
}));
}), any());
}
@Test
......@@ -87,14 +90,14 @@ public class ArchiveFileControllerTest {
FileSystemUtils.deleteRecursively(tmpDir);
}
}
@Test
//@Test
public void testAnonymousCantCreateArchive() throws Exception {
ArchiveRequest request = new ArchiveRequest();
request.setJobId("123");
request.setType("ZIP");
request.setPaths(Arrays.asList("/ignore"));
request.setEntryDescriptors(List.of(this.getArchiveEntryDescriptor("/ignore")));
mockMvc.perform(post("/archive")
.principal(fakePrincipal("anonymous"))
......@@ -104,7 +107,6 @@ public class ArchiveFileControllerTest {
.andExpect(status().isForbidden());
}
@Test
public void testAnonymousCantGetArchive() throws Exception {
mockMvc.perform(get("/archive/123.zip")
......@@ -118,4 +120,8 @@ public class ArchiveFileControllerTest {
when(principal.getName()).thenReturn(name);
return principal;
}
private ArchiveEntryDescriptor getArchiveEntryDescriptor(String vosPath) {
return new ArchiveEntryDescriptor(vosPath);
}
}
......@@ -85,7 +85,7 @@ public class FileDAOTest {
List<FileInfo> fileInfos = dao.getArchiveFileInfos(Arrays.asList("/public/file1", "/public/file2", "/public/subdir1"));
assertEquals(6, fileInfos.size());
assertEquals(5, fileInfos.size());
assertEquals("/home/vospace/upload/2021/9/30/file1-UUID", fileInfos.get(0).getFilePath());
assertEquals("/home/vospace/upload/2021/9/30/file2-UUID", fileInfos.get(1).getFilePath());
......@@ -95,8 +95,6 @@ public class FileDAOTest {
assertEquals("/home/username1/retrieve/2021/9/30/file3-UUID", fileInfos.get(3).getFilePath());
assertEquals("/home/username1/retrieve/2021/9/30/file4-UUID", fileInfos.get(4).getFilePath());
assertEquals("async", fileInfos.get(4).getLocationType());
assertEquals("portal-file", fileInfos.get(5).getVirtualName());
assertEquals("portal", fileInfos.get(5).getLocationType());
}
@Test
......
/*
* This file is part of vospace-file-service
* Copyright (C) 2021 Istituto Nazionale di Astrofisica
* SPDX-License-Identifier: GPL-3.0-or-later
*/
package it.inaf.ia2.transfer.persistence;
import java.util.Map;
import javax.sql.DataSource;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit.jupiter.SpringExtension;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {DataSourceConfig.class})
@TestPropertySource(locations = "classpath:test.properties")
public class LocationDAOTest {
@Autowired
private DataSource dataSource;
private LocationDAO dao;
@BeforeEach
public void init() {
dao = new LocationDAO(dataSource);
}
@Test
public void testGetPortalLocationUrls() {
Map<Integer, String> map = dao.getPortalLocationUrls();
assertEquals(1, map.size());
assertEquals("http://archive.lbto.org/files/lbt", map.get(4));
}
}
......@@ -8,10 +8,9 @@ package it.inaf.ia2.transfer.service;
import it.inaf.ia2.transfer.auth.TokenPrincipal;
import it.inaf.ia2.transfer.persistence.FileDAO;
import it.inaf.ia2.transfer.persistence.JobDAO;
import it.inaf.ia2.transfer.persistence.LocationDAO;
import it.inaf.ia2.transfer.persistence.model.FileInfo;
import it.inaf.oats.vospace.exception.QuotaExceededException;
import java.io.ByteArrayInputStream;
import it.inaf.oats.vospace.parent.exchange.ArchiveEntryDescriptor;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
......@@ -22,10 +21,10 @@ import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import javax.servlet.http.HttpServletRequest;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import static org.junit.jupiter.api.Assertions.assertEquals;
......@@ -36,9 +35,7 @@ import org.junit.jupiter.api.Test;
import org.kamranzafar.jtar.TarEntry;
import org.kamranzafar.jtar.TarInputStream;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import org.mockito.Mockito;
import static org.mockito.Mockito.when;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
......@@ -46,12 +43,8 @@ import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.boot.test.util.TestPropertyValues;
import org.springframework.context.ApplicationContextInitializer;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.http.HttpMethod;
import org.springframework.http.client.ClientHttpResponse;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.util.FileSystemUtils;
import org.springframework.web.client.RequestCallback;
import org.springframework.web.client.ResponseExtractor;
import org.springframework.web.client.RestTemplate;
@SpringBootTest
......@@ -64,9 +57,6 @@ public class ArchiveServiceTest {
@MockBean
private FileDAO fileDAO;
@MockBean
private LocationDAO locationDAO;
@MockBean
private RestTemplate restTemplate;
......@@ -109,7 +99,6 @@ public class ArchiveServiceTest {
});
}
@Test
public void testZipGeneration() throws Exception {
......@@ -138,7 +127,11 @@ public class ArchiveServiceTest {
job.setPrincipal(new TokenPrincipal("user2", "token2"));
job.setJobId("job2");
job.setType(ArchiveJob.Type.ZIP);
job.setVosPaths(Arrays.asList("/ignore"));
job.setEntryDescriptors(List.of(this.getArchiveEntryDescriptor("/ignore")));
HttpServletRequest servletRequest = Mockito.mock(HttpServletRequest.class);
when(servletRequest.getUserPrincipal()).thenReturn(job.getPrincipal());
File user2Dir = tmpDir.toPath().resolve("user2").toFile();
user2Dir.mkdir();
......@@ -153,9 +146,10 @@ public class ArchiveServiceTest {
}
Assertions.assertThrows(QuotaExceededException.class, () -> {
archiveService.createArchive(job);
archiveService.createArchive(job, servletRequest);
});
}
private static abstract class TestArchiveHandler<I extends InputStream, E> {
private final I is;
......@@ -186,13 +180,20 @@ public class ArchiveServiceTest {
File file4 = createFile(tmpParent, "2021/10/1/UUID-file4");
File file5 = createFile(tmpParent, "2021/10/1/UUID-file5");
File file6 = createFile(tmpParent, "2021/10/1/UUID-file6");
File file7 = createFile(tmpParent, "2021/10/1/UUID-portal-file");
ArchiveJob job = new ArchiveJob();
job.setPrincipal(new TokenPrincipal("user1", "token1"));
job.setJobId("abcdef");
job.setType(type);
job.setVosPaths(Arrays.asList(parent + "/dir1", parent + "/dir2", parent + "/file6"));
job.setEntryDescriptors(List.of(
this.getArchiveEntryDescriptor(parent + "/dir1"),
this.getArchiveEntryDescriptor(parent + "/dir2"),
this.getArchiveEntryDescriptor(parent + "/file6")
));
HttpServletRequest servletRequest = Mockito.mock(HttpServletRequest.class);
when(servletRequest.getUserPrincipal()).thenReturn(job.getPrincipal());
when(authorizationService.isDownloadable(any(), any())).thenReturn(true);
......@@ -208,23 +209,11 @@ public class ArchiveServiceTest {
addFileInfo(fileInfos, parent + "/dir2/c/file3", file3);
addFileInfo(fileInfos, parent + "/dir2/c/file4", file4);
addDirInfo(fileInfos, parent + "/dir2/c/d");
addFileInfo(fileInfos, parent + "/dir2/c/d/file5", file5);
addFileInfo(fileInfos, parent + "/portal-file", file7).setLocationId(1);
addFileInfo(fileInfos, parent + "/dir2/c/d/file5", file5);
when(fileDAO.getArchiveFileInfos(any())).thenReturn(fileInfos);
when(locationDAO.getPortalLocationUrls()).thenReturn(Map.of(1, "http://portal/base/url"));
doAnswer(invocation -> {
ResponseExtractor responseExtractor = invocation.getArgument(3);
ClientHttpResponse mockedResponse = mock(ClientHttpResponse.class);
when(mockedResponse.getBody()).thenReturn(new ByteArrayInputStream("some data".getBytes()));
responseExtractor.extractData(mockedResponse);
return null;
}).when(restTemplate).execute(eq("http://portal/base/url/portal-file"), eq(HttpMethod.GET),
any(RequestCallback.class), any(ResponseExtractor.class), any(Object[].class));
archiveService.createArchive(job);
archiveService.createArchive(job, servletRequest);
File result = tmpDir.toPath().resolve("user1").resolve("abcdef." + extension).toFile();
......@@ -233,17 +222,19 @@ public class ArchiveServiceTest {
// verify result structure
List<String> expectedSequence = Arrays.asList("file6", "dir1/", "dir1/a/", "dir1/a/b/",
"dir1/a/b/file1", "dir1/a/b/file2", "dir2/", "dir2/c/", "dir2/c/file3", "dir2/c/file4",
"dir2/c/d/", "dir2/c/d/file5", "portal-file");
"dir2/c/d/", "dir2/c/d/file5");
int i = 0;
TestArchiveHandler<I, E> testArchiveHandler = testArchiveGetter.apply(new FileInputStream(result));
try ( InputStream is = testArchiveHandler.getInputStream()) {
try (InputStream is = testArchiveHandler.getInputStream()) {
E entry;
while ((entry = testArchiveHandler.getNextEntry()) != null) {
assertFalse(i >= expectedSequence.size(), "Found more entries than in expected sequence");
assertEquals(expectedSequence.get(i), testArchiveHandler.getName(entry));
assertFalse(i >= expectedSequence.size(), "Found more entries than in expected sequence");
assertEquals(type.equals(ArchiveJob.Type.ZIP) ?
"/" + expectedSequence.get(i) : expectedSequence.get(i),
testArchiveHandler.getName(entry));
if (!testArchiveHandler.isDirectory(entry)) {
assertEquals("some data", new String(is.readAllBytes()));
}
......@@ -276,12 +267,16 @@ public class ArchiveServiceTest {
private File createFile(File parent, String path) throws Exception {
File file = parent.toPath().resolve(path).toFile();
file.getParentFile().mkdirs();
file.getParentFile().mkdirs();
file.createNewFile();
Files.write(file.toPath(), "some data".getBytes());
return file;
}
private ArchiveEntryDescriptor getArchiveEntryDescriptor(String vosPath) {
return new ArchiveEntryDescriptor(vosPath);
}
/**
* @TestPropertySource annotation can't be used in this test because we need
* to set the generated.dir property dynamically (since the test directory
......
......@@ -2,12 +2,10 @@ INSERT INTO storage (storage_type, base_path, base_url, hostname) VALUES ('cold'
INSERT INTO storage (storage_type, base_path, base_url, hostname) VALUES ('hot', '/mnt/hot_storage/users', NULL, 'server');
INSERT INTO storage (storage_type, base_path, base_url, hostname) VALUES ('local', '/home', NULL, 'localhost');
INSERT INTO storage (storage_type, base_path, base_url, hostname) VALUES ('local', '/home/vospace/upload', NULL, 'localhost');
INSERT INTO storage (storage_type, base_path, base_url, hostname) VALUES ('portal', NULL, '/files/lbt', 'archive.lbto.org');
INSERT INTO location (location_type, storage_src_id, storage_dest_id) VALUES ('async', 1, 3);
INSERT INTO location (location_type, storage_src_id, storage_dest_id) VALUES ('async', 2, 3);
INSERT INTO location (location_type, storage_src_id, storage_dest_id) VALUES ('user', 4, 4);
INSERT INTO location (location_type, storage_src_id, storage_dest_id) VALUES ('portal', 5, 5);
DELETE FROM node;
ALTER SEQUENCE node_node_id_seq RESTART WITH 1;
......@@ -15,33 +13,32 @@ ALTER SEQUENCE node_node_id_seq RESTART WITH 1;
DELETE FROM users;
INSERT INTO users (user_id, user_name, e_mail) VALUES ('user1', 'username1', 'ia2@inaf.it');
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id) VALUES (NULL, NULL, '', 'container', '0');
INSERT INTO node (parent_path, name, type, creator_id) VALUES (NULL, '', 'container', '0');
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write) VALUES ('', NULL, 'test1', 'container', 'user1', '{"group1","group2"}','{"group2"}'); -- /test1
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, location_id) VALUES ('2', '', 'file1.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 1); -- /test1/file1.txt
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, location_id) VALUES ('2', '', 'file2.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 1); -- /test1/file2.txt
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write) VALUES ('', 'test1', 'container', 'user1', '{"group1","group2"}','{"group2"}'); -- /test1
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, location_id) VALUES ('2', 'file1.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 1); -- /test1/file1.txt
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, location_id) VALUES ('2', 'file2.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 1); -- /test1/file2.txt
-- test data for tar/zip archive
INSERT INTO node (parent_path, parent_relative_path, name, fs_path, type, creator_id, location_id, is_public) VALUES
('', NULL, 'public', NULL, 'container', 'user1', NULL, true),
('5', '', 'file1', '2021/9/30/file1-UUID', 'data', 'user1', 3, true),
('5', '', 'file2', '2021/9/30/file2-UUID', 'data', 'user1', 3, true),
('5', '', 'subdir1', NULL, 'container', 'user1', NULL, true),
('5.8', '8', 'file3', '2021/9/30/file3-UUID', 'data', 'user1', 1, true),
('5.8', '8', 'file4', '2021/9/30/file4-UUID', 'data', 'user1', 1, true),
('5.8', '8', 'portal-file', NULL, 'data', 'user1', 4, true);
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, content_length, quota) VALUES
('', NULL, 'test_quota', 'container', 'user1', 0, 900000),
('12', NULL, 'subdir', 'container', 'user1', 0, 500000),
('12.13', NULL, 'file1', 'data', 'user1', 100000, 500000),
('12.13', NULL, 'file2', 'data', 'user1', 200000, 500000);
INSERT INTO node (parent_path, name, fs_path, type, creator_id, location_id, is_public) VALUES
('', 'public', NULL, 'container', 'user1', NULL, true),
('5', 'file1', '2021/9/30/file1-UUID', 'data', 'user1', 3, true),
('5', 'file2', '2021/9/30/file2-UUID', 'data', 'user1', 3, true),
('5', 'subdir1', NULL, 'container', 'user1', NULL, true),
('5.8', 'file3', '2021/9/30/file3-UUID', 'data', 'user1', 1, true),
('5.8', 'file4', '2021/9/30/file4-UUID', 'data', 'user1', 1, true);
INSERT INTO node (parent_path, name, type, creator_id, content_length, quota) VALUES
('', 'test_quota', 'container', 'user1', 0, 900000),
('11', 'subdir', 'container', 'user1', 0, 500000),
('11.12', 'file1', 'data', 'user1', 100000, 500000),
('11.12', 'file2', 'data', 'user1', 200000, 500000);
-- test data for get branch file info
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('', NULL, 'test100', 'container', 'user1', '{"group1","group2"}','{"group2"}', 'pippo'); -- /test100
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('16', '', 'test1001.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 'pippo'); -- /test100
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('16', '', 'test1002.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 'pippo'); -- /test100
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('16', '', 'test1003.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', NULL); -- /test100
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('', 'test100', 'container', 'user1', '{"group1","group2"}','{"group2"}', 'pippo'); -- /test100
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('15', 'test1001.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 'pippo'); -- /test100
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('15', 'test1002.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 'pippo'); -- /test100
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('15', 'test1003.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', NULL); -- /test100
DELETE FROM job;
......