Skip to content
Commits on Source (6)
......@@ -31,9 +31,6 @@ public class GetFileController extends FileController {
@Autowired
private FileDAO fileDAO;
@Autowired
private JobDAO jobDAO;
@Autowired
private AuthorizationService authorizationService;
......
......@@ -44,9 +44,9 @@ public class FileDAO {
String sql = "SELECT n.node_id, is_public, group_read, group_write, creator_id, async_trans,\n"
+ "content_type, content_encoding, content_length, content_md5, name, n.location_id,\n"
+ "accept_views, provide_views, l.location_type, n.path <> n.relative_path AS virtual_parent,\n"
+ "accept_views, provide_views, l.location_type,\n"
+ "(SELECT user_name FROM users WHERE user_id = creator_id) AS username, n.job_id,\n"
+ "base_path, get_os_path(n.node_id) AS os_path, ? AS vos_path, false AS is_directory,\n"
+ "base_path, ? AS vos_path, false AS is_directory,\n"
+ "n.type = 'link' AS is_link, n.target,\n"
+ "fs_path \n"
+ "FROM node n\n"
......@@ -176,9 +176,9 @@ public class FileDAO {
String sql = "SELECT n.node_id, n.is_public, n.group_read, n.group_write, n.creator_id, n.async_trans, n.fs_path,\n"
+ "n.content_type, n.content_encoding, n.content_length, n.content_md5,\n"
+ "n.accept_views, n.provide_views, l.location_type, n.path <> n.relative_path AS virtual_parent,\n"
+ "n.accept_views, n.provide_views, l.location_type,\n"
+ "(SELECT user_name FROM users WHERE user_id = n.creator_id) AS username,\n"
+ "base_path, get_os_path(n.node_id) AS os_path, get_vos_path(n.node_id) AS vos_path,\n"
+ "base_path, get_vos_path(n.node_id) AS vos_path,\n"
+ "n.type = 'container' AS is_directory, n.name, n.location_id, n.job_id,\n"
+ "n.type = 'link' AS is_link, n.target, l.location_type\n"
+ "FROM node n\n"
......@@ -209,9 +209,9 @@ public class FileDAO {
String sql = "SELECT n.node_id, n.is_public, n.group_read, n.group_write, n.creator_id, n.async_trans, n.fs_path,\n"
+ "n.content_type, n.content_encoding, n.content_length, n.content_md5,\n"
+ "n.accept_views, n.provide_views, l.location_type, n.path <> n.relative_path AS virtual_parent,\n"
+ "n.accept_views, n.provide_views, l.location_type,\n"
+ "(SELECT user_name FROM users WHERE user_id = n.creator_id) AS username,\n"
+ "base_path, get_os_path(n.node_id) AS os_path, get_vos_path(n.node_id) AS vos_path,\n"
+ "base_path, get_vos_path(n.node_id) AS vos_path,\n"
+ "n.type = 'container' AS is_directory, n.name, n.location_id, n.job_id,\n"
+ "n.type = 'link' AS is_link, n.target, l.location_type\n"
+ "FROM node n\n"
......@@ -281,8 +281,7 @@ public class FileDAO {
fi.setOwnerId(rs.getString("creator_id"));
fi.setAsyncTrans(rs.getBoolean("async_trans"));
fi.setAcceptViews(toList(rs.getArray("accept_views")));
fi.setProvideViews(toList(rs.getArray("provide_views")));
fi.setVirtualParent(rs.getBoolean("virtual_parent"));
fi.setProvideViews(toList(rs.getArray("provide_views")));
fi.setVirtualPath(rs.getString("vos_path"));
fi.setVirtualName(rs.getString("name"));
fi.setContentEncoding(rs.getString("content_encoding"));
......@@ -345,32 +344,5 @@ public class FileDAO {
Path completeFsPath = Path.of(fsPath);
fi.setFsPath(completeFsPath.toString());
}
private void fillOsPath(FileInfo fi, ResultSet rs) throws SQLException {
String basePath = rs.getString("base_path");
if (basePath == null) {
return;
}
String osPath = rs.getString("os_path");
if (osPath.startsWith("/")) {
osPath = osPath.substring(1);
}
Path completeOsPath = Path.of(basePath);
boolean asyncLocation = "async".equals(rs.getString("location_type"));
if (asyncLocation) {
String username = rs.getString("username");
completeOsPath = completeOsPath.resolve(username).resolve("retrieve");
} else if (fi.hasVirtualParent()) {
completeOsPath = completeOsPath.resolve(fi.getOwnerId());
}
completeOsPath = completeOsPath.resolve(osPath);
fi.setOsPath(completeOsPath.toString());
}
}
}
/*
* This file is part of vospace-file-service
* Copyright (C) 2021 Istituto Nazionale di Astrofisica
* SPDX-License-Identifier: GPL-3.0-or-later
*/
package it.inaf.ia2.transfer.persistence;
import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Repository;
@Repository
public class LocationDAO {
private final JdbcTemplate jdbcTemplate;
@Autowired
public LocationDAO(DataSource fileCatalogDatasource) {
this.jdbcTemplate = new JdbcTemplate(fileCatalogDatasource);
}
public Map<Integer, String> getPortalLocationUrls() {
String sql = "SELECT location_id, hostname, base_url\n"
+ "FROM location l\n"
+ "JOIN storage s ON s.storage_id = l.storage_dest_id\n"
+ "WHERE location_type = 'portal'";
return jdbcTemplate.query(sql, rs -> {
Map<Integer, String> locationUrls = new HashMap<>();
while (rs.next()) {
int locationId = rs.getInt("location_id");
String hostname = rs.getString("hostname");
String baseUrl = rs.getString("base_url");
String url = "http://" + hostname + baseUrl;
locationUrls.put(locationId, url);
}
return locationUrls;
});
}
}
......@@ -10,16 +10,14 @@ import java.util.List;
public class FileInfo {
private int nodeId;
private String osPath;
private int nodeId;
private String virtualPath;
private String virtualName;
private String fsPath;
// actualBasePath differs from base path in db due to some location type
// dependent manipulations (performed by FileDAO)
private String actualBasePath;
private boolean isPublic;
private boolean virtualParent;
private boolean isPublic;
private boolean directory;
private boolean link;
private String target;
......@@ -85,14 +83,6 @@ public class FileInfo {
this.nodeId = nodeId;
}
public String getOsPath() {
return osPath;
}
public void setOsPath(String osPath) {
this.osPath = osPath;
}
public String getVirtualPath() {
return virtualPath;
}
......@@ -133,14 +123,6 @@ public class FileInfo {
this.link = link;
}
public boolean hasVirtualParent() {
return virtualParent;
}
public void setVirtualParent(boolean virtualParent) {
this.virtualParent = virtualParent;
}
public List<String> getGroupRead() {
return groupRead;
}
......
......@@ -6,13 +6,12 @@
package it.inaf.ia2.transfer.service;
import it.inaf.ia2.aa.ServletRapClient;
import it.inaf.ia2.aa.data.User;
import it.inaf.ia2.rap.client.call.TokenExchangeRequest;
import it.inaf.ia2.transfer.auth.TokenPrincipal;
import it.inaf.ia2.transfer.persistence.FileDAO;
import it.inaf.ia2.transfer.persistence.JobDAO;
import it.inaf.ia2.transfer.persistence.LocationDAO;
import it.inaf.ia2.transfer.persistence.model.FileInfo;
import it.inaf.oats.vospace.datamodel.NodeUtils;
import it.inaf.oats.vospace.exception.InternalFaultException;
import it.inaf.oats.vospace.exception.PermissionDeniedException;
import it.inaf.oats.vospace.exception.QuotaExceededException;
......@@ -28,8 +27,8 @@ import java.io.OutputStream;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.security.Principal;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
......@@ -57,9 +56,6 @@ public class ArchiveService {
@Autowired
private FileDAO fileDAO;
@Autowired
private LocationDAO locationDAO;
@Autowired
private LinkedServiceDAO linkedServiceDAO;
......@@ -75,9 +71,6 @@ public class ArchiveService {
@Autowired
private ServletRapClient rapClient;
@Value("${upload_location_id}")
private int uploadLocationId;
// Directory containing temporary files generated by jobs.
@Value("${generated.dir}")
private String generatedDirString;
......@@ -109,13 +102,11 @@ public class ArchiveService {
List<ArchiveEntryDescriptor> entryDescriptors = job.getEntryDescriptors();
String commonParent = getCommonParent(entryDescriptors);
// support directory used to generate folder inside tar files (path is redefined each time by TarEntry class)
File supportDir = Files.createTempDirectory("dir").toFile();
try ( ArchiveHandler<O, E> handler = getArchiveHandler(archiveFile, job.getType())) {
fillArchive(entryDescriptors, commonParent, supportDir,
try (ArchiveHandler<O, E> handler = getArchiveHandler(archiveFile, job.getType())) {
fillArchive(entryDescriptors, supportDir,
job.getPrincipal(), servletRequest, handler);
} finally {
FileSystemUtils.deleteRecursively(supportDir);
......@@ -127,13 +118,10 @@ public class ArchiveService {
}
private <O extends OutputStream, E> void fillArchive(
List<ArchiveEntryDescriptor> entryDescriptors, String commonParent,
List<ArchiveEntryDescriptor> entryDescriptors,
File supportDir, TokenPrincipal tokenPrincipal,
HttpServletRequest servletRequest, ArchiveHandler<O, E> handler) throws IOException {
// it will be initialized only when necessary
Map<Integer, String> portalLocationUrls = null;
List<ArchiveEntryDescriptor> noTargetEntryDescriptors
= entryDescriptors.stream().filter(ed -> !ed.isPointingToAnotherNode())
.collect(Collectors.toList());
......@@ -142,13 +130,15 @@ public class ArchiveService {
List<String> vosPaths = noTargetEntryDescriptors.stream()
.map(ed -> ed.getVosPath())
.collect(Collectors.toList());
String commonParent = getCommonParent(entryDescriptors);
if (!vosPaths.isEmpty()) {
for (FileInfo fileInfo : fileDAO.getArchiveFileInfos(vosPaths)) {
String relPath = fileInfo.getVirtualPath().substring(commonParent.length());
this.insertEntryIntoArchive(fileInfo, supportDir, relPath, tokenPrincipal, portalLocationUrls, servletRequest, handler);
this.insertEntryIntoArchive(fileInfo, supportDir, relPath, tokenPrincipal, servletRequest, handler);
}
}
......@@ -175,7 +165,7 @@ public class ArchiveService {
for (String vosPath : linkVosPaths) {
String relPath = vosPath.substring(commonParent.length());
this.insertEntryIntoArchive(fileInfo, supportDir, relPath, tokenPrincipal, portalLocationUrls, servletRequest, handler);
this.insertEntryIntoArchive(fileInfo, supportDir, relPath, tokenPrincipal, servletRequest, handler);
}
}
}
......@@ -183,8 +173,7 @@ public class ArchiveService {
private <O extends OutputStream, E> void insertEntryIntoArchive(
FileInfo fileInfo, File supportDir, String relPath,
TokenPrincipal tokenPrincipal, Map<Integer, String> portalLocationUrls,
HttpServletRequest servletRequest, ArchiveHandler<O, E> handler)
TokenPrincipal tokenPrincipal, HttpServletRequest servletRequest, ArchiveHandler<O, E> handler)
throws IOException {
if (fileInfo.isDirectory()) {
handler.putNextEntry(supportDir, relPath);
......@@ -202,18 +191,9 @@ public class ArchiveService {
}
return;
}
writeFileIntoArchive(fileInfo, relPath, tokenPrincipal, handler);
if (fileInfo.getLocationId() != null && "portal".equals(fileInfo.getLocationType())) {
// remote file
if (portalLocationUrls == null) {
portalLocationUrls = locationDAO.getPortalLocationUrls();
}
String url = portalLocationUrls.get(fileInfo.getLocationId());
downloadRemoteLocationFileIntoArchive(fileInfo, relPath, tokenPrincipal, handler, url);
} else {
// local file or virtual directory
writeFileIntoArchive(fileInfo, relPath, tokenPrincipal, handler);
}
}
private File getArchiveFile(ArchiveJob job) throws IOException {
......@@ -256,34 +236,46 @@ public class ArchiveService {
}
private String getCommonParent(List<ArchiveEntryDescriptor> entryDescriptors) {
List<String> vosPaths = entryDescriptors.stream().map(ed -> ed.getVosPath())
if(entryDescriptors.isEmpty()) {
throw new IllegalArgumentException("Empty descriptors list");
} else if(entryDescriptors.size() == 1) {
return NodeUtils.getParentPath(entryDescriptors.get(0).getVosPath());
}
// Get list of parent paths
List<String[]> vosParentPaths = entryDescriptors.stream().map(
ed -> NodeUtils.getParentPath(ed.getVosPath()).split("/"))
.collect(Collectors.toList());
if (vosPaths.size() == 1) {
String vosPath = vosPaths.get(0);
return vosPath.substring(0, vosPath.lastIndexOf("/"));
// Get minimum size of split vosParentPaths arrays
int minSize = vosParentPaths.stream()
.mapToInt(v->v.length).min().orElseThrow();
switch(minSize) {
case 0:
return "/";
case 1:
// this should never happen
throw new IllegalArgumentException("Invalid vosPath");
}
String commonParent = null;
for (String vosPath : vosPaths) {
if (commonParent == null) {
commonParent = vosPath;
} else {
StringBuilder newCommonParent = new StringBuilder();
boolean same = true;
int lastSlashPos = vosPath.lastIndexOf("/");
for (int i = 0; same && i < Math.min(commonParent.length(), vosPath.length()) && i <= lastSlashPos; i++) {
if (commonParent.charAt(i) == vosPath.charAt(i)) {
newCommonParent.append(commonParent.charAt(i));
} else {
same = false;
}
}
commonParent = newCommonParent.toString();
StringBuilder sb = new StringBuilder();
for(int i = 1; i < minSize; i++) {
List<String> elements = new ArrayList<>();
for(String[] s : vosParentPaths) {
elements.add(s[i]);
}
String sample = elements.get(0);
if(elements.stream().allMatch(e->e.equals(sample)))
sb.append("/").append(sample);
}
return commonParent;
return sb.toString();
}
private abstract class ArchiveHandler<O extends OutputStream, E> implements AutoCloseable {
......@@ -374,10 +366,10 @@ public class ArchiveService {
}
}, res -> {
File tmpFile = Files.createTempFile("download", null).toFile();
try ( FileOutputStream os = new FileOutputStream(tmpFile)) {
try (FileOutputStream os = new FileOutputStream(tmpFile)) {
res.getBody().transferTo(os);
handler.putNextEntry(tmpFile, relPath);
try ( FileInputStream is = new FileInputStream(tmpFile)) {
try (FileInputStream is = new FileInputStream(tmpFile)) {
is.transferTo(handler.getOutputStream());
}
} finally {
......@@ -387,21 +379,6 @@ public class ArchiveService {
}, new Object[]{});
}
private <O extends OutputStream, E> void downloadRemoteLocationFileIntoArchive(
FileInfo fileInfo, String relPath, TokenPrincipal tokenPrincipal,
ArchiveHandler<O, E> handler, String baseUrl) {
if (baseUrl == null) {
LOG.error("Location URL not found for location " + fileInfo.getLocationId());
throw new InternalFaultException("Unable to retrieve location of file "
+ fileInfo.getVirtualPath());
}
String url = baseUrl + "/" + fileInfo.getVirtualName();
downloadFromUrlIntoArchive(url, relPath, tokenPrincipal, handler);
}
private <O extends OutputStream, E> void downloadExternalLinkIntoArchive(
FileInfo fileInfo, String relPath, TokenPrincipal tokenPrincipal,
ArchiveHandler<O, E> handler, HttpServletRequest servletRequest) {
......@@ -431,7 +408,7 @@ public class ArchiveService {
File file = new File(fileInfo.getFilePath());
LOG.trace("Adding file " + file.getAbsolutePath() + " to tar archive");
try ( InputStream is = new FileInputStream(file)) {
try (InputStream is = new FileInputStream(file)) {
handler.putNextEntry(file, relPath);
is.transferTo(handler.getOutputStream());
}
......
......@@ -7,23 +7,17 @@ package it.inaf.ia2.transfer.service;
import it.inaf.ia2.transfer.auth.TokenPrincipal;
import it.inaf.ia2.transfer.persistence.FileDAO;
import it.inaf.ia2.transfer.persistence.LocationDAO;
import it.inaf.ia2.transfer.persistence.model.FileInfo;
import it.inaf.oats.vospace.exception.InternalFaultException;
import it.inaf.oats.vospace.exception.NodeNotFoundException;
import it.inaf.oats.vospace.exception.PermissionDeniedException;
import it.inaf.oats.vospace.exception.QuotaExceededException;
import java.io.File;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.stereotype.Service;
import org.springframework.web.client.RestTemplate;
......@@ -35,9 +29,6 @@ public class FileCopyService {
@Autowired
private FileDAO fileDAO;
@Autowired
private LocationDAO locationDAO;
@Autowired
private AuthorizationService authorizationService;
......@@ -67,7 +58,6 @@ public class FileCopyService {
throw new NodeNotFoundException(sourceRootVosPath);
}
// Set location of destinations to this file service update location
// before retrieving file infos
fileDAO.setBranchLocationId(destinationRootVosPath, jobId, uploadLocationId);
......@@ -101,9 +91,6 @@ public class FileCopyService {
String destinationRootVosPath,
TokenPrincipal principal) {
// it will be initialized only when necessary
Map<Integer, String> portalLocationUrls = null;
for (FileInfo destinationFileInfo : destinationFileInfos) {
LOG.trace("Processing {} destination", destinationFileInfo.getVirtualPath());
// Cycle on files only
......@@ -134,22 +121,7 @@ public class FileCopyService {
}
}
if (sourceFileInfo.getLocationId() != null && "portal".equals(sourceFileInfo.getLocationType())) {
// remote file
if (portalLocationUrls == null) {
portalLocationUrls = locationDAO.getPortalLocationUrls();
}
String url = portalLocationUrls.get(sourceFileInfo.getLocationId());
// download file to destination disk path
this.downloadFileToDisk(sourceFileInfo,
destinationFileInfo,
principal, url, remainingQuota);
} else {
// local file
this.copyLocalFile(sourceFileInfo, destinationFileInfo, principal, remainingQuota);
}
this.copyLocalFile(sourceFileInfo, destinationFileInfo, principal, remainingQuota);
}
}
......@@ -168,36 +140,6 @@ public class FileCopyService {
}
private void downloadFileToDisk(FileInfo sourceFileInfo,
FileInfo destinationFileInfo, TokenPrincipal tokenPrincipal, String baseUrl, Long remainingQuota) {
if (baseUrl == null) {
LOG.error("Location URL not found for location " + sourceFileInfo.getLocationId());
throw new InternalFaultException("Unable to retrieve location of file " + sourceFileInfo.getVirtualPath());
}
String url = baseUrl + "/" + sourceFileInfo.getVirtualName();
LOG.trace("Downloading file from {}", url);
restTemplate.execute(url, HttpMethod.GET, req -> {
HttpHeaders headers = req.getHeaders();
if (tokenPrincipal.getToken() != null) {
headers.setBearerAuth(tokenPrincipal.getToken());
}
}, res -> {
try (InputStream in = res.getBody()) {
putFileService.storeFileFromInputStream(sourceFileInfo, destinationFileInfo, in, remainingQuota);
} catch (Exception ex) {
// outFile.delete();
throw new RuntimeException(ex);
}
return null;
}, new Object[]{});
}
private void copyLocalFile(FileInfo sourceFileInfo,
FileInfo destinationFileInfo, TokenPrincipal tokenPrincipal, Long remainingQuota) {
......@@ -207,7 +149,7 @@ public class FileCopyService {
}
File file = new File(sourceFileInfo.getFilePath());
LOG.trace("Copying file: {} to {}",file.getAbsolutePath(), destinationFileInfo.getFilePath());
LOG.trace("Copying file: {} to {}", file.getAbsolutePath(), destinationFileInfo.getFilePath());
putFileService.copyLocalFile(sourceFileInfo, destinationFileInfo, remainingQuota);
......
......@@ -97,12 +97,12 @@ public class PutFileService {
// the first upload (fsPath not null)
if(destinationFileInfo.getActualBasePath() != null) {
if(destinationFileInfo.getFsPath() != null) {
LOG.error("Node {} fsPath is not null: {}. Overwriting.",
LOG.error("Node {} fsPath is not null: {}. Overwriting forbidden.",
destinationFileInfo.getVirtualPath(),
destinationFileInfo.getFsPath());
throw new InvalidArgumentException("Node " +
destinationFileInfo.getVirtualPath() +
" is already populated. Overwriting not allowed.");
" is already populated. Overwriting forbidden.");
}
destinationFileInfo.setFsPath(this.generateFsPath().toString());
......
......@@ -85,7 +85,7 @@ public class FileDAOTest {
List<FileInfo> fileInfos = dao.getArchiveFileInfos(Arrays.asList("/public/file1", "/public/file2", "/public/subdir1"));
assertEquals(6, fileInfos.size());
assertEquals(5, fileInfos.size());
assertEquals("/home/vospace/upload/2021/9/30/file1-UUID", fileInfos.get(0).getFilePath());
assertEquals("/home/vospace/upload/2021/9/30/file2-UUID", fileInfos.get(1).getFilePath());
......@@ -95,8 +95,6 @@ public class FileDAOTest {
assertEquals("/home/username1/retrieve/2021/9/30/file3-UUID", fileInfos.get(3).getFilePath());
assertEquals("/home/username1/retrieve/2021/9/30/file4-UUID", fileInfos.get(4).getFilePath());
assertEquals("async", fileInfos.get(4).getLocationType());
assertEquals("portal-file", fileInfos.get(5).getVirtualName());
assertEquals("portal", fileInfos.get(5).getLocationType());
}
@Test
......
/*
* This file is part of vospace-file-service
* Copyright (C) 2021 Istituto Nazionale di Astrofisica
* SPDX-License-Identifier: GPL-3.0-or-later
*/
package it.inaf.ia2.transfer.persistence;
import java.util.Map;
import javax.sql.DataSource;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit.jupiter.SpringExtension;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {DataSourceConfig.class})
@TestPropertySource(locations = "classpath:test.properties")
public class LocationDAOTest {
@Autowired
private DataSource dataSource;
private LocationDAO dao;
@BeforeEach
public void init() {
dao = new LocationDAO(dataSource);
}
@Test
public void testGetPortalLocationUrls() {
Map<Integer, String> map = dao.getPortalLocationUrls();
assertEquals(1, map.size());
assertEquals("http://archive.lbto.org/files/lbt", map.get(4));
}
}
......@@ -8,11 +8,9 @@ package it.inaf.ia2.transfer.service;
import it.inaf.ia2.transfer.auth.TokenPrincipal;
import it.inaf.ia2.transfer.persistence.FileDAO;
import it.inaf.ia2.transfer.persistence.JobDAO;
import it.inaf.ia2.transfer.persistence.LocationDAO;
import it.inaf.ia2.transfer.persistence.model.FileInfo;
import it.inaf.oats.vospace.exception.QuotaExceededException;
import it.inaf.oats.vospace.parent.exchange.ArchiveEntryDescriptor;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
......@@ -23,7 +21,6 @@ import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
......@@ -38,10 +35,7 @@ import org.junit.jupiter.api.Test;
import org.kamranzafar.jtar.TarEntry;
import org.kamranzafar.jtar.TarInputStream;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import org.mockito.Mockito;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
......@@ -49,13 +43,8 @@ import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.boot.test.util.TestPropertyValues;
import org.springframework.context.ApplicationContextInitializer;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Primary;
import org.springframework.http.HttpMethod;
import org.springframework.http.client.ClientHttpResponse;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.util.FileSystemUtils;
import org.springframework.web.client.RequestCallback;
import org.springframework.web.client.ResponseExtractor;
import org.springframework.web.client.RestTemplate;
@SpringBootTest
......@@ -68,9 +57,6 @@ public class ArchiveServiceTest {
@MockBean
private FileDAO fileDAO;
@MockBean
private LocationDAO locationDAO;
@MockBean
private RestTemplate restTemplate;
......@@ -92,7 +78,6 @@ public class ArchiveServiceTest {
FileSystemUtils.deleteRecursively(tmpDir);
}
// TODO: refactor tests
@Test
public void testTarGeneration() throws Exception {
......@@ -195,7 +180,6 @@ public class ArchiveServiceTest {
File file4 = createFile(tmpParent, "2021/10/1/UUID-file4");
File file5 = createFile(tmpParent, "2021/10/1/UUID-file5");
File file6 = createFile(tmpParent, "2021/10/1/UUID-file6");
File file7 = createFile(tmpParent, "2021/10/1/UUID-portal-file");
ArchiveJob job = new ArchiveJob();
job.setPrincipal(new TokenPrincipal("user1", "token1"));
......@@ -225,22 +209,10 @@ public class ArchiveServiceTest {
addFileInfo(fileInfos, parent + "/dir2/c/file3", file3);
addFileInfo(fileInfos, parent + "/dir2/c/file4", file4);
addDirInfo(fileInfos, parent + "/dir2/c/d");
addFileInfo(fileInfos, parent + "/dir2/c/d/file5", file5);
addFileInfo(fileInfos, parent + "/portal-file", file7).setLocationId(1);
addFileInfo(fileInfos, parent + "/dir2/c/d/file5", file5);
when(fileDAO.getArchiveFileInfos(any())).thenReturn(fileInfos);
when(locationDAO.getPortalLocationUrls()).thenReturn(Map.of(1, "http://portal/base/url"));
doAnswer(invocation -> {
ResponseExtractor responseExtractor = invocation.getArgument(3);
ClientHttpResponse mockedResponse = mock(ClientHttpResponse.class);
when(mockedResponse.getBody()).thenReturn(new ByteArrayInputStream("some data".getBytes()));
responseExtractor.extractData(mockedResponse);
return null;
}).when(restTemplate).execute(eq("http://portal/base/url/portal-file"), eq(HttpMethod.GET),
any(RequestCallback.class), any(ResponseExtractor.class), any(Object[].class));
archiveService.createArchive(job, servletRequest);
File result = tmpDir.toPath().resolve("user1").resolve("abcdef." + extension).toFile();
......@@ -250,7 +222,7 @@ public class ArchiveServiceTest {
// verify result structure
List<String> expectedSequence = Arrays.asList("file6", "dir1/", "dir1/a/", "dir1/a/b/",
"dir1/a/b/file1", "dir1/a/b/file2", "dir2/", "dir2/c/", "dir2/c/file3", "dir2/c/file4",
"dir2/c/d/", "dir2/c/d/file5", "portal-file");
"dir2/c/d/", "dir2/c/d/file5");
int i = 0;
......@@ -259,8 +231,10 @@ public class ArchiveServiceTest {
try (InputStream is = testArchiveHandler.getInputStream()) {
E entry;
while ((entry = testArchiveHandler.getNextEntry()) != null) {
assertFalse(i >= expectedSequence.size(), "Found more entries than in expected sequence");
assertEquals(expectedSequence.get(i), testArchiveHandler.getName(entry));
assertFalse(i >= expectedSequence.size(), "Found more entries than in expected sequence");
assertEquals(type.equals(ArchiveJob.Type.ZIP) ?
"/" + expectedSequence.get(i) : expectedSequence.get(i),
testArchiveHandler.getName(entry));
if (!testArchiveHandler.isDirectory(entry)) {
assertEquals("some data", new String(is.readAllBytes()));
}
......
......@@ -2,12 +2,10 @@ INSERT INTO storage (storage_type, base_path, base_url, hostname) VALUES ('cold'
INSERT INTO storage (storage_type, base_path, base_url, hostname) VALUES ('hot', '/mnt/hot_storage/users', NULL, 'server');
INSERT INTO storage (storage_type, base_path, base_url, hostname) VALUES ('local', '/home', NULL, 'localhost');
INSERT INTO storage (storage_type, base_path, base_url, hostname) VALUES ('local', '/home/vospace/upload', NULL, 'localhost');
INSERT INTO storage (storage_type, base_path, base_url, hostname) VALUES ('portal', NULL, '/files/lbt', 'archive.lbto.org');
INSERT INTO location (location_type, storage_src_id, storage_dest_id) VALUES ('async', 1, 3);
INSERT INTO location (location_type, storage_src_id, storage_dest_id) VALUES ('async', 2, 3);
INSERT INTO location (location_type, storage_src_id, storage_dest_id) VALUES ('user', 4, 4);
INSERT INTO location (location_type, storage_src_id, storage_dest_id) VALUES ('portal', 5, 5);
DELETE FROM node;
ALTER SEQUENCE node_node_id_seq RESTART WITH 1;
......@@ -15,33 +13,32 @@ ALTER SEQUENCE node_node_id_seq RESTART WITH 1;
DELETE FROM users;
INSERT INTO users (user_id, user_name, e_mail) VALUES ('user1', 'username1', 'ia2@inaf.it');
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id) VALUES (NULL, NULL, '', 'container', '0');
INSERT INTO node (parent_path, name, type, creator_id) VALUES (NULL, '', 'container', '0');
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write) VALUES ('', NULL, 'test1', 'container', 'user1', '{"group1","group2"}','{"group2"}'); -- /test1
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, location_id) VALUES ('2', '', 'file1.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 1); -- /test1/file1.txt
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, location_id) VALUES ('2', '', 'file2.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 1); -- /test1/file2.txt
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write) VALUES ('', 'test1', 'container', 'user1', '{"group1","group2"}','{"group2"}'); -- /test1
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, location_id) VALUES ('2', 'file1.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 1); -- /test1/file1.txt
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, location_id) VALUES ('2', 'file2.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 1); -- /test1/file2.txt
-- test data for tar/zip archive
INSERT INTO node (parent_path, parent_relative_path, name, fs_path, type, creator_id, location_id, is_public) VALUES
('', NULL, 'public', NULL, 'container', 'user1', NULL, true),
('5', '', 'file1', '2021/9/30/file1-UUID', 'data', 'user1', 3, true),
('5', '', 'file2', '2021/9/30/file2-UUID', 'data', 'user1', 3, true),
('5', '', 'subdir1', NULL, 'container', 'user1', NULL, true),
('5.8', '8', 'file3', '2021/9/30/file3-UUID', 'data', 'user1', 1, true),
('5.8', '8', 'file4', '2021/9/30/file4-UUID', 'data', 'user1', 1, true),
('5.8', '8', 'portal-file', NULL, 'data', 'user1', 4, true);
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, content_length, quota) VALUES
('', NULL, 'test_quota', 'container', 'user1', 0, 900000),
('12', NULL, 'subdir', 'container', 'user1', 0, 500000),
('12.13', NULL, 'file1', 'data', 'user1', 100000, 500000),
('12.13', NULL, 'file2', 'data', 'user1', 200000, 500000);
INSERT INTO node (parent_path, name, fs_path, type, creator_id, location_id, is_public) VALUES
('', 'public', NULL, 'container', 'user1', NULL, true),
('5', 'file1', '2021/9/30/file1-UUID', 'data', 'user1', 3, true),
('5', 'file2', '2021/9/30/file2-UUID', 'data', 'user1', 3, true),
('5', 'subdir1', NULL, 'container', 'user1', NULL, true),
('5.8', 'file3', '2021/9/30/file3-UUID', 'data', 'user1', 1, true),
('5.8', 'file4', '2021/9/30/file4-UUID', 'data', 'user1', 1, true);
INSERT INTO node (parent_path, name, type, creator_id, content_length, quota) VALUES
('', 'test_quota', 'container', 'user1', 0, 900000),
('11', 'subdir', 'container', 'user1', 0, 500000),
('11.12', 'file1', 'data', 'user1', 100000, 500000),
('11.12', 'file2', 'data', 'user1', 200000, 500000);
-- test data for get branch file info
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('', NULL, 'test100', 'container', 'user1', '{"group1","group2"}','{"group2"}', 'pippo'); -- /test100
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('16', '', 'test1001.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 'pippo'); -- /test100
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('16', '', 'test1002.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 'pippo'); -- /test100
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('16', '', 'test1003.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', NULL); -- /test100
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('', 'test100', 'container', 'user1', '{"group1","group2"}','{"group2"}', 'pippo'); -- /test100
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('15', 'test1001.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 'pippo'); -- /test100
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('15', 'test1002.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 'pippo'); -- /test100
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('15', 'test1003.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', NULL); -- /test100
DELETE FROM job;
......