1
1
import pytest
2
- from plane .db .models import User , Workspace , Project , ProjectMember , Issue , FileAsset
2
+ from plane .db .models import Project , ProjectMember , Issue , FileAsset
3
3
from unittest .mock import patch , MagicMock
4
- from plane .bgtasks .copy_s3_object import copy_s3_objects
4
+ from plane .bgtasks .copy_s3_object import (
5
+ copy_s3_objects_of_description_and_assets ,
6
+ copy_assets ,
7
+ )
5
8
import base64
6
9
7
10
8
11
@pytest .mark .unit
9
12
class TestCopyS3Objects :
10
- """Test the copy_s3_objects function"""
11
-
12
- @pytest .mark .django_db
13
- @patch ("plane.bgtasks.copy_s3_object.S3Storage" )
14
- def test_copy_s3_objects (self , mock_s3_storage ):
15
- # Create test data
16
- test_user = User .objects .create (
17
- email = "[email protected] " ,
first_name = "Test" ,
last_name = "User"
18
- )
19
-
20
- workspace = Workspace .objects .create (
21
- name = "Test Workspace" , slug = "test-workspace" , owner = test_user
22
- )
13
+ """Test the copy_s3_objects_of_description_and_assets function"""
23
14
15
+ @pytest .fixture
16
+ def project (self , create_user , workspace ):
24
17
project = Project .objects .create (
25
18
name = "Test Project" , identifier = "test-project" , workspace = workspace
26
19
)
27
- ProjectMember .objects .create (project = project , member = test_user )
28
20
29
- issue = Issue .objects .create (
21
+ ProjectMember .objects .create (project = project , member = create_user )
22
+ return project
23
+
24
+ @pytest .fixture
25
+ def issue (self , workspace , project ):
26
+ return Issue .objects .create (
30
27
name = "Test Issue" ,
31
28
workspace = workspace ,
32
- project = project ,
29
+ project_id = project . id ,
33
30
description_html = f'<div><image-component src="35e8b958-6ee5-43ce-ae56-fb0e776f421e"></image-component><image-component src="97988198-274f-4dfe-aa7a-4c0ffc684214"></image-component></div>' ,
34
31
)
35
32
36
- # Create test file assets
37
- asset1 = FileAsset .objects .create (
33
+ @pytest .fixture
34
+ def file_asset (self , workspace , project , issue ):
35
+ return FileAsset .objects .create (
38
36
issue = issue ,
39
37
workspace = workspace ,
40
38
project = project ,
@@ -45,10 +43,15 @@ def test_copy_s3_objects(self, mock_s3_storage):
45
43
"type" : "image/jpeg" ,
46
44
},
47
45
id = "35e8b958-6ee5-43ce-ae56-fb0e776f421e" ,
48
- entity_type = "ISSUE_DESCRIPTION" , # Set the correct entity type
46
+ entity_type = "ISSUE_DESCRIPTION" ,
49
47
)
50
48
51
- asset2 = FileAsset .objects .create (
49
+ @pytest .mark .django_db
50
+ @patch ("plane.bgtasks.copy_s3_object.S3Storage" )
51
+ def test_copy_s3_objects_of_description_and_assets (
52
+ self , mock_s3_storage , create_user , workspace , project , issue , file_asset
53
+ ):
54
+ FileAsset .objects .create (
52
55
issue = issue ,
53
56
workspace = workspace ,
54
57
project = project ,
@@ -59,9 +62,11 @@ def test_copy_s3_objects(self, mock_s3_storage):
59
62
"type" : "application/pdf" ,
60
63
},
61
64
id = "97988198-274f-4dfe-aa7a-4c0ffc684214" ,
62
- entity_type = "ISSUE_DESCRIPTION" , # Set the correct entity type
65
+ entity_type = "ISSUE_DESCRIPTION" ,
63
66
)
64
67
68
+ issue .save ()
69
+
65
70
# Set up mock S3 storage
66
71
mock_storage_instance = MagicMock ()
67
72
mock_s3_storage .return_value = mock_storage_instance
@@ -76,39 +81,102 @@ def test_copy_s3_objects(self, mock_s3_storage):
76
81
}
77
82
78
83
# Call the actual function (not .delay())
79
- copy_s3_objects (
80
- "ISSUE" , issue .id , project .id , "test-workspace" , test_user .id
84
+ copy_s3_objects_of_description_and_assets (
85
+ "ISSUE" , issue .id , project .id , "test-workspace" , create_user .id
81
86
)
82
87
83
88
# Assert that copy_object was called for each asset
84
89
assert mock_storage_instance .copy_object .call_count == 2
85
90
86
- # Get the copy operations that were performed
87
- copy_calls = mock_storage_instance .copy_object .call_args_list
88
-
89
- # Sort assets and calls by asset name to ensure they match
90
- assets = sorted ([asset1 , asset2 ], key = lambda x : str (x .asset ))
91
- sorted_calls = sorted (
92
- copy_calls , key = lambda x : str (x [0 ][0 ])
93
- ) # Sort by source path as string
94
-
95
- # Verify both assets were copied
96
- for asset , call in zip (assets , sorted_calls ):
97
- args = call [0 ]
98
- # Verify source path matches original asset
99
- assert str (asset .asset ) == str (
100
- args [0 ]
101
- ) # Convert both to strings for comparison
102
- # Verify destination path
103
- assert str (workspace .id ) in str (args [1 ])
104
- assert asset .attributes ["name" ] in str (args [1 ])
105
-
106
91
# Get the updated issue and its new assets
107
92
updated_issue = Issue .objects .get (id = issue .id )
108
93
new_assets = FileAsset .objects .filter (
109
94
issue = updated_issue ,
110
- entity_type = "ISSUE_DESCRIPTION" , # Filter by the correct entity type
95
+ entity_type = "ISSUE_DESCRIPTION" ,
111
96
)
112
97
113
98
# Verify new assets were created
114
- assert new_assets .count () == 4
99
+ assert new_assets .count () == 4 # 2 original + 2 copied
100
+
101
+ @pytest .mark .django_db
102
+ @patch ("plane.bgtasks.copy_s3_object.S3Storage" )
103
+ def test_copy_assets_successful (
104
+ self , mock_s3_storage , workspace , project , issue , file_asset
105
+ ):
106
+ """Test successful copying of assets"""
107
+ # Arrange
108
+ mock_storage_instance = MagicMock ()
109
+ mock_s3_storage .return_value = mock_storage_instance
110
+
111
+ # Act
112
+ result = copy_assets (
113
+ entity = issue ,
114
+ entity_identifier = issue .id ,
115
+ project_id = project .id ,
116
+ asset_ids = [file_asset .id ],
117
+ user_id = issue .created_by_id ,
118
+ )
119
+
120
+ # Assert
121
+ # Verify S3 copy was called
122
+ mock_storage_instance .copy_object .assert_called_once ()
123
+
124
+ # Verify new asset was created
125
+ assert len (result ) == 1
126
+ new_asset_id = result [0 ]["new_asset_id" ]
127
+ new_asset = FileAsset .objects .get (id = new_asset_id )
128
+
129
+ # Verify asset properties were copied correctly
130
+ assert new_asset .workspace == workspace
131
+ assert new_asset .project_id == project .id
132
+ assert new_asset .entity_type == file_asset .entity_type
133
+ assert new_asset .attributes == file_asset .attributes
134
+ assert new_asset .size == file_asset .size
135
+ assert new_asset .is_uploaded is True
136
+
137
+ @pytest .mark .django_db
138
+ @patch ("plane.bgtasks.copy_s3_object.S3Storage" )
139
+ def test_copy_assets_empty_asset_ids (
140
+ self , mock_s3_storage , workspace , project , issue
141
+ ):
142
+ """Test copying with empty asset_ids list"""
143
+ # Arrange
144
+ mock_storage_instance = MagicMock ()
145
+ mock_s3_storage .return_value = mock_storage_instance
146
+
147
+ # Act
148
+ result = copy_assets (
149
+ entity = issue ,
150
+ entity_identifier = issue .id ,
151
+ project_id = project .id ,
152
+ asset_ids = [],
153
+ user_id = issue .created_by_id ,
154
+ )
155
+
156
+ # Assert
157
+ assert result == []
158
+ mock_storage_instance .copy_object .assert_not_called ()
159
+
160
+ @pytest .mark .django_db
161
+ @patch ("plane.bgtasks.copy_s3_object.S3Storage" )
162
+ def test_copy_assets_nonexistent_asset (
163
+ self , mock_s3_storage , workspace , project , issue
164
+ ):
165
+ """Test copying with non-existent asset ID"""
166
+ # Arrange
167
+ mock_storage_instance = MagicMock ()
168
+ mock_s3_storage .return_value = mock_storage_instance
169
+ non_existent_id = "00000000-0000-0000-0000-000000000000"
170
+
171
+ # Act
172
+ result = copy_assets (
173
+ entity = issue ,
174
+ entity_identifier = issue .id ,
175
+ project_id = project .id ,
176
+ asset_ids = [non_existent_id ],
177
+ user_id = issue .created_by_id ,
178
+ )
179
+
180
+ # Assert
181
+ assert result == []
182
+ mock_storage_instance .copy_object .assert_not_called ()
0 commit comments