Commit ea16cc78 authored by Nicola Fulvio Calabria's avatar Nicola Fulvio Calabria
Browse files

Cleaned test data script

parent ba05f979
Loading
Loading
Loading
Loading
+1 −3
Original line number Diff line number Diff line
@@ -85,7 +85,7 @@ public class FileDAOTest {

        List<FileInfo> fileInfos = dao.getArchiveFileInfos(Arrays.asList("/public/file1", "/public/file2", "/public/subdir1"));

        assertEquals(6, fileInfos.size());
        assertEquals(5, fileInfos.size());

        assertEquals("/home/vospace/upload/2021/9/30/file1-UUID", fileInfos.get(0).getFilePath());
        assertEquals("/home/vospace/upload/2021/9/30/file2-UUID", fileInfos.get(1).getFilePath());
@@ -95,8 +95,6 @@ public class FileDAOTest {
        assertEquals("/home/username1/retrieve/2021/9/30/file3-UUID", fileInfos.get(3).getFilePath());
        assertEquals("/home/username1/retrieve/2021/9/30/file4-UUID", fileInfos.get(4).getFilePath());
        assertEquals("async", fileInfos.get(4).getLocationType());
        assertEquals("portal-file", fileInfos.get(5).getVirtualName());
        assertEquals("portal", fileInfos.get(5).getLocationType());
    }

    @Test
+0 −11
Original line number Diff line number Diff line
@@ -223,17 +223,6 @@ public class ArchiveServiceTest {

        when(fileDAO.getArchiveFileInfos(any())).thenReturn(fileInfos);

        /*
        doAnswer(invocation -> {
            ResponseExtractor responseExtractor = invocation.getArgument(3);
            ClientHttpResponse mockedResponse = mock(ClientHttpResponse.class);
            when(mockedResponse.getBody()).thenReturn(new ByteArrayInputStream("some data".getBytes()));
            responseExtractor.extractData(mockedResponse);
            return null;
        }).when(restTemplate).execute(eq("http://portal/base/url/portal-file"), eq(HttpMethod.GET),
                any(RequestCallback.class), any(ResponseExtractor.class), any(Object[].class));
        */

        archiveService.createArchive(job, servletRequest);

        File result = tmpDir.toPath().resolve("user1").resolve("abcdef." + extension).toFile();
+21 −24
Original line number Diff line number Diff line
@@ -2,12 +2,10 @@ INSERT INTO storage (storage_type, base_path, base_url, hostname) VALUES ('cold'
INSERT INTO storage (storage_type, base_path, base_url, hostname) VALUES ('hot', '/mnt/hot_storage/users', NULL, 'server');
INSERT INTO storage (storage_type, base_path, base_url, hostname) VALUES ('local', '/home', NULL, 'localhost');
INSERT INTO storage (storage_type, base_path, base_url, hostname) VALUES ('local', '/home/vospace/upload', NULL, 'localhost');
INSERT INTO storage (storage_type, base_path, base_url, hostname) VALUES ('portal', NULL, '/files/lbt', 'archive.lbto.org');

INSERT INTO location (location_type, storage_src_id, storage_dest_id) VALUES ('async', 1, 3);
INSERT INTO location (location_type, storage_src_id, storage_dest_id) VALUES ('async', 2, 3);
INSERT INTO location (location_type, storage_src_id, storage_dest_id) VALUES ('user', 4, 4);
INSERT INTO location (location_type, storage_src_id, storage_dest_id) VALUES ('portal', 5, 5);

DELETE FROM node;
ALTER SEQUENCE node_node_id_seq RESTART WITH 1;
@@ -15,33 +13,32 @@ ALTER SEQUENCE node_node_id_seq RESTART WITH 1;
DELETE FROM users;
INSERT INTO users (user_id, user_name, e_mail) VALUES ('user1', 'username1', 'ia2@inaf.it');

INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id) VALUES (NULL, NULL, '', 'container', '0');
INSERT INTO node (parent_path, name, type, creator_id) VALUES (NULL, '', 'container', '0');

INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write) VALUES ('', NULL, 'test1', 'container', 'user1', '{"group1","group2"}','{"group2"}');      -- /test1
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, location_id) VALUES ('2', '', 'file1.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 1);      -- /test1/file1.txt
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, location_id) VALUES ('2', '', 'file2.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 1);      -- /test1/file2.txt
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write) VALUES ('', 'test1', 'container', 'user1', '{"group1","group2"}','{"group2"}');      -- /test1
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, location_id) VALUES ('2', 'file1.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 1);      -- /test1/file1.txt
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, location_id) VALUES ('2', 'file2.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 1);      -- /test1/file2.txt

-- test data for tar/zip archive
INSERT INTO node (parent_path, parent_relative_path, name, fs_path, type, creator_id, location_id, is_public) VALUES
('', NULL, 'public', NULL, 'container', 'user1', NULL, true),
('5', '', 'file1', '2021/9/30/file1-UUID', 'data', 'user1', 3, true),
('5', '', 'file2', '2021/9/30/file2-UUID', 'data', 'user1', 3, true),
('5', '', 'subdir1', NULL, 'container', 'user1', NULL, true),
('5.8', '8', 'file3', '2021/9/30/file3-UUID', 'data', 'user1', 1, true),
('5.8', '8', 'file4', '2021/9/30/file4-UUID', 'data', 'user1', 1, true),
('5.8', '8', 'portal-file', NULL, 'data', 'user1', 4, true);

INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, content_length, quota) VALUES
('', NULL, 'test_quota', 'container', 'user1', 0, 900000),
('12', NULL, 'subdir', 'container', 'user1', 0, 500000),
('12.13', NULL, 'file1', 'data', 'user1', 100000, 500000),
('12.13', NULL, 'file2', 'data', 'user1', 200000, 500000);
INSERT INTO node (parent_path, name, fs_path, type, creator_id, location_id, is_public) VALUES
('', 'public', NULL, 'container', 'user1', NULL, true),
('5', 'file1', '2021/9/30/file1-UUID', 'data', 'user1', 3, true),
('5', 'file2', '2021/9/30/file2-UUID', 'data', 'user1', 3, true),
('5', 'subdir1', NULL, 'container', 'user1', NULL, true),
('5.8', 'file3', '2021/9/30/file3-UUID', 'data', 'user1', 1, true),
('5.8', 'file4', '2021/9/30/file4-UUID', 'data', 'user1', 1, true);

INSERT INTO node (parent_path, name, type, creator_id, content_length, quota) VALUES
('', 'test_quota', 'container', 'user1', 0, 900000),
('11', 'subdir', 'container', 'user1', 0, 500000),
('11.12', 'file1', 'data', 'user1', 100000, 500000),
('11.12', 'file2', 'data', 'user1', 200000, 500000);

-- test data for get branch file info
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('', NULL, 'test100', 'container', 'user1', '{"group1","group2"}','{"group2"}', 'pippo');      -- /test100
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('16', '', 'test1001.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 'pippo');      -- /test100
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('16', '', 'test1002.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 'pippo');      -- /test100
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('16', '', 'test1003.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', NULL);      -- /test100
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('', 'test100', 'container', 'user1', '{"group1","group2"}','{"group2"}', 'pippo');      -- /test100
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('15', 'test1001.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 'pippo');      -- /test100
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('15', 'test1002.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 'pippo');      -- /test100
INSERT INTO node (parent_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('15', 'test1003.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', NULL);      -- /test100

DELETE FROM job;