Commit 772b00ce authored by Sonia Zorba's avatar Sonia Zorba
Browse files

Tar/Zip archive generation: added endpoint for retrieving generated files,...

Tar/Zip archive generation: added endpoint for retrieving generated files, modified generation endpoint payload and added tests
parent 1af31ba0
......@@ -148,6 +148,9 @@
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.22.2</version>
<configuration>
<trimStackTrace>false</trimStackTrace>
</configuration>
</plugin>
<plugin>
<groupId>org.springframework.boot</groupId>
......
......@@ -7,15 +7,21 @@ package it.inaf.ia2.transfer.controller;
import it.inaf.ia2.transfer.auth.TokenPrincipal;
import it.inaf.ia2.transfer.service.ArchiveJob;
import it.inaf.ia2.transfer.service.ArchiveJob.Type;
import it.inaf.ia2.transfer.service.ArchiveService;
import java.util.List;
import java.io.File;
import java.util.concurrent.CompletableFuture;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@RestController
......@@ -27,21 +33,36 @@ public class ArchiveFileController {
@Autowired
private HttpServletRequest request;
@PostMapping(value = "/tar", consumes = MediaType.APPLICATION_JSON_VALUE)
public void createTarArchive(@RequestParam(value = "jobId", required = true) String jobId, @RequestBody List<String> vosPaths) {
@Autowired
private HttpServletResponse response;
@PostMapping(value = "/archive", consumes = MediaType.APPLICATION_JSON_VALUE)
public ResponseEntity<?> createArchiveFile(@RequestBody ArchiveRequest archiveRequest) {
Type type = Type.valueOf(archiveRequest.getType());
ArchiveJob job = new ArchiveJob();
job.setPrincipal((TokenPrincipal) request.getUserPrincipal());
job.setJobId(jobId);
job.setType(ArchiveJob.Type.TAR);
job.setVosPaths(vosPaths);
job.setJobId(archiveRequest.getJobId());
job.setType(type);
job.setVosPaths(archiveRequest.getPaths());
startArchiveJob(job);
}
private void startArchiveJob(ArchiveJob job) {
CompletableFuture.runAsync(() -> {
archiveService.createArchive(job);
});
HttpHeaders headers = new HttpHeaders();
headers.set("Location", request.getRequestURL() + "/" + archiveRequest.getJobId() + "." + type.getExtension());
return new ResponseEntity<>(headers, HttpStatus.SEE_OTHER);
}
@GetMapping(value = "/archive/{fileName}")
public ResponseEntity<?> getArchiveFile(@PathVariable("fileName") String fileName) {
TokenPrincipal principal = (TokenPrincipal) request.getUserPrincipal();
File file = archiveService.getArchiveParentDir(principal).toPath().resolve(fileName).toFile();
return FileResponseUtil.getFileResponse(response, file);
}
}
......@@ -3,14 +3,37 @@
* Copyright (C) 2021 Istituto Nazionale di Astrofisica
* SPDX-License-Identifier: GPL-3.0-or-later
*/
package it.inaf.ia2.transfer.controller;
import java.util.List;
public class ArchiveRequest {
private List<String> vosPaths;
String jobId;
String type;
private String type;
private String jobId;
private List<String> paths;
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getJobId() {
return jobId;
}
public void setJobId(String jobId) {
this.jobId = jobId;
}
public List<String> getPaths() {
return paths;
}
public void setPaths(List<String> paths) {
this.paths = paths;
}
}
/*
* This file is part of vospace-file-service
* Copyright (C) 2021 Istituto Nazionale di Astrofisica
* SPDX-License-Identifier: GPL-3.0-or-later
*/
package it.inaf.ia2.transfer.controller;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UncheckedIOException;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.springframework.http.HttpStatus.INTERNAL_SERVER_ERROR;
import static org.springframework.http.HttpStatus.NOT_FOUND;
import org.springframework.http.ResponseEntity;
public class FileResponseUtil {
private static final Logger LOG = LoggerFactory.getLogger(FileResponseUtil.class);
public static ResponseEntity<?> getFileResponse(HttpServletResponse response, File file) {
return getFileResponse(response, file, null);
}
public static ResponseEntity<?> getFileResponse(HttpServletResponse response, File file, String fileName) {
if (!file.exists()) {
LOG.error("File not found: " + file.getAbsolutePath());
return new ResponseEntity<>("File " + file.getName() + " not found", NOT_FOUND);
}
if (!file.canRead()) {
LOG.error("File not readable: " + file.getAbsolutePath());
return new ResponseEntity<>("File " + file.getName() + " is not readable", INTERNAL_SERVER_ERROR);
}
response.setHeader("Content-Disposition", "attachment; filename="
+ URLEncoder.encode(fileName == null ? file.getName() : fileName, StandardCharsets.UTF_8));
response.setHeader("Content-Length", String.valueOf(file.length()));
response.setCharacterEncoding("UTF-8");
byte[] bytes = new byte[1024];
try ( OutputStream out = response.getOutputStream(); InputStream is = new FileInputStream(file)) {
int read;
while ((read = is.read(bytes)) != -1) {
out.write(bytes, 0, read);
}
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
return null;
}
}
......@@ -10,19 +10,11 @@ import it.inaf.ia2.transfer.auth.TokenPrincipal;
import it.inaf.ia2.transfer.persistence.FileDAO;
import it.inaf.ia2.transfer.service.AuthorizationService;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UncheckedIOException;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.Optional;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import static org.springframework.http.HttpStatus.INTERNAL_SERVER_ERROR;
import static org.springframework.http.HttpStatus.NOT_FOUND;
import static org.springframework.http.HttpStatus.UNAUTHORIZED;
import org.springframework.http.ResponseEntity;
......@@ -69,33 +61,9 @@ public class GetFileController extends FileController {
private ResponseEntity<?> getFileResponse(FileInfo fileInfo) {
File file = new File(fileInfo.getOsPath());
String vosName = fileInfo.getVirtualPath() == null ? null
: fileInfo.getVirtualPath().substring(fileInfo.getVirtualPath().lastIndexOf("/") + 1);
if (!file.exists()) {
LOG.error("File not found: " + file.getAbsolutePath());
return new ResponseEntity<>("File " + file.getName() + " not found", NOT_FOUND);
}
if (!file.canRead()) {
LOG.error("File not readable: " + file.getAbsolutePath());
return new ResponseEntity<>("File " + file.getName() + " is not readable", INTERNAL_SERVER_ERROR);
}
String vosName = fileInfo.getVirtualPath().substring(fileInfo.getVirtualPath().lastIndexOf("/") + 1);
response.setHeader("Content-Disposition", "attachment; filename=" + URLEncoder.encode(vosName, StandardCharsets.UTF_8));
response.setHeader("Content-Length", String.valueOf(file.length()));
response.setCharacterEncoding("UTF-8");
byte[] bytes = new byte[1024];
try (OutputStream out = response.getOutputStream();
InputStream is = new FileInputStream(file)) {
int read;
while ((read = is.read(bytes)) != -1) {
out.write(bytes, 0, read);
}
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
return null;
return FileResponseUtil.getFileResponse(response, file, vosName);
}
}
......@@ -128,22 +128,22 @@ public class FileDAO {
throw new IllegalArgumentException("Received empty list of paths");
}
String sql = "SELECT n.node_id, is_public, group_read, group_write, creator_id, async_trans,\n"
+ "content_type, content_encoding, content_length, content_md5,\n"
+ "accept_views, provide_views, l.location_type, n.path <> n.relative_path AS virtual_parent,\n"
+ "(SELECT user_name FROM users WHERE user_id = creator_id) AS username,\n"
String sql = "SELECT n.node_id, n.is_public, n.group_read, n.group_write, n.creator_id, n.async_trans,\n"
+ "n.content_type, n.content_encoding, n.content_length, n.content_md5,\n"
+ "n.accept_views, n.provide_views, l.location_type, n.path <> n.relative_path AS virtual_parent,\n"
+ "(SELECT user_name FROM users WHERE user_id = n.creator_id) AS username,\n"
+ "base_path, get_os_path(n.node_id) AS os_path, get_vos_path(n.node_id) AS vos_path,\n"
+ "type = 'container' AS is_directory\n"
+ "n.type = 'container' AS is_directory\n"
+ "FROM node n\n"
+ "JOIN location l ON (n.location_id IS NOT NULL AND n.location_id = l.location_id) OR (n.location_id IS NULL AND l.location_id = ?)\n"
+ "JOIN node p ON p.path @> n.path\n"
+ "LEFT JOIN location l ON l.location_id = n.location_id\n"
+ "LEFT JOIN storage s ON s.storage_id = l.storage_dest_id\n"
+ "WHERE " + String.join(" OR ", Collections.nCopies(vosPaths.size(), "n.node_id = id_from_vos_path(?)"))
+ "WHERE " + String.join(" OR ", Collections.nCopies(vosPaths.size(), "p.node_id = id_from_vos_path(?)"))
+ "\nORDER BY vos_path ASC";
return jdbcTemplate.query(conn -> {
PreparedStatement ps = conn.prepareStatement(sql);
int i = 0;
ps.setInt(++i, uploadLocationId);
for (String vosPath : vosPaths) {
ps.setString(++i, vosPath);
}
......@@ -183,6 +183,10 @@ public class FileDAO {
private void fillOsPath(FileInfo fi, ResultSet rs) throws SQLException {
String basePath = rs.getString("base_path");
if (basePath == null) {
return;
}
String osPath = rs.getString("os_path");
if (osPath.startsWith("/")) {
osPath = osPath.substring(1);
......
......@@ -12,7 +12,21 @@ public class ArchiveJob {
public static enum Type {
TAR,
ZIP
TGZ,
ZIP;
public String getExtension() {
switch (this) {
case TAR:
return "tar";
case TGZ:
return "tar.gz";
case ZIP:
return "zip";
default:
throw new IllegalArgumentException("Extension not defined for type " + this);
}
}
}
private List<String> vosPaths;
......
......@@ -15,6 +15,7 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.security.Principal;
import java.util.List;
import org.kamranzafar.jtar.TarEntry;
import org.kamranzafar.jtar.TarOutputStream;
......@@ -56,7 +57,7 @@ public class ArchiveService {
try {
// TODO: check total size limit
// TODO: switch on archive type
File parentDir = generatedDir.toPath().resolve(job.getPrincipal().getName()).toFile();
File parentDir = getArchiveParentDir(job.getPrincipal());
if (!parentDir.exists()) {
if (!parentDir.mkdirs()) {
......@@ -64,7 +65,7 @@ public class ArchiveService {
}
}
File archiveFile = parentDir.toPath().resolve(job.getJobId() + ".tar").toFile();
File archiveFile = parentDir.toPath().resolve(job.getJobId() + "." + job.getType().getExtension()).toFile();
if (!archiveFile.createNewFile()) {
throw new IllegalStateException("Unable to create file " + archiveFile.getAbsolutePath());
}
......@@ -105,6 +106,10 @@ public class ArchiveService {
}
}
public File getArchiveParentDir(Principal principal) {
return generatedDir.toPath().resolve(principal.getName()).toFile();
}
private String getCommonParent(List<String> vosPaths) {
String commonParent = null;
for (String vosPath : vosPaths) {
......
......@@ -5,8 +5,10 @@
*/
package it.inaf.ia2.transfer.controller;
import com.fasterxml.jackson.databind.ObjectMapper;
import it.inaf.ia2.transfer.service.ArchiveJob;
import it.inaf.ia2.transfer.service.ArchiveService;
import java.util.Arrays;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.Test;
import static org.mockito.ArgumentMatchers.argThat;
......@@ -26,6 +28,8 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
@AutoConfigureMockMvc
public class ArchiveFileControllerTest {
private static final ObjectMapper MAPPER = new ObjectMapper();
@MockBean
private ArchiveService archiveService;
......@@ -35,11 +39,16 @@ public class ArchiveFileControllerTest {
@Test
public void testCreateTarArchive() throws Exception {
mockMvc.perform(post("/tar?jobId=123")
ArchiveRequest request = new ArchiveRequest();
request.setJobId("123");
request.setType("TAR");
request.setPaths(Arrays.asList("/path/to/file1", "/path/to/file2"));
mockMvc.perform(post("/archive")
.contentType(MediaType.APPLICATION_JSON)
.content("[\"/path/to/file1\", \"/path/to/file2\"]"))
.content(MAPPER.writeValueAsString(request)))
.andDo(print())
.andExpect(status().isOk());
.andExpect(status().is3xxRedirection());
verify(archiveService, times(1)).createArchive(argThat(job -> {
assertEquals("123", job.getJobId());
......
......@@ -6,6 +6,8 @@
package it.inaf.ia2.transfer.persistence;
import it.inaf.ia2.transfer.persistence.model.FileInfo;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import javax.sql.DataSource;
import static org.junit.jupiter.api.Assertions.assertEquals;
......@@ -44,4 +46,18 @@ public class FileDAOTest {
assertEquals("/home/username1/retrieve/file1.txt", fileInfo.getOsPath());
}
@Test
public void testGetArchiveFileInfos() {
List<FileInfo> fileInfos = dao.getArchiveFileInfos(Arrays.asList("/public/file1", "/public/file2", "/public/subdir1"));
assertEquals(5, fileInfos.size());
assertEquals("/home/vospace/upload/user1/file1", fileInfos.get(0).getOsPath());
assertEquals("/home/vospace/upload/user1/file2", fileInfos.get(1).getOsPath());
assertTrue(fileInfos.get(2).isDirectory());
assertEquals("/home/username1/retrieve/subdir1/file3", fileInfos.get(3).getOsPath());
assertEquals("/home/username1/retrieve/subdir1/file4", fileInfos.get(4).getOsPath());
}
}
......@@ -18,10 +18,18 @@ INSERT INTO users (user_id, user_name, e_mail) VALUES ('user1', 'username1', 'ia
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id) VALUES (NULL, NULL, '', 'container', '0');
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write) VALUES ('', NULL, 'test1', 'container', 'user1', '{"group1","group2"}','{"group2"}'); -- /test1
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write) VALUES ('2', NULL, '.tmp-123.txt', 'structured', 'user1', '{"group1","group2"}','{"group2"}'); -- /test1/.tmp-123.txt
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, location_id) VALUES ('2', '', 'file1.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 1); -- /test1/file1.txt
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, location_id) VALUES ('2', '', 'file2.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 1); -- /test1/file2.txt
-- test data for tar/zip archive
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, location_id, is_public) VALUES
('', NULL, 'public', 'container', 'user1', NULL, true),
('5', '', 'file1', 'data', 'user1', 3, true),
('5', '', 'file2', 'data', 'user1', 3, true),
('5', '', 'subdir1', 'container', 'user1', NULL, true),
('5.8', '8', 'file3', 'data', 'user1', 1, true),
('5.8', '8', 'file4', 'data', 'user1', 1, true);
DELETE FROM job;
INSERT INTO job (job_id, owner_id, job_type, phase, start_time, end_time, creation_time, job_info, results) VALUES ('pippo1', 'user1', 'pullFromVoSpace', 'ARCHIVED', NULL, NULL, '2011-06-22 19:10:25', NULL, NULL);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment