|
1 |
| -from unittest.mock import patch, call, ANY |
2 |
| -from collections import defaultdict |
| 1 | +import json |
3 | 2 |
|
4 | 3 | import pytest
|
| 4 | +import responses |
5 | 5 |
|
6 | 6 | from filestack.uploads.intelligent_ingestion import upload_part
|
7 |
| -from tests.helpers import DummyHttpResponse |
8 | 7 |
|
9 | 8 |
|
10 |
| -@patch('filestack.uploads.intelligent_ingestion.requests.put') |
11 |
| -@patch('filestack.uploads.intelligent_ingestion.requests.post') |
12 |
| -def test_upload_part_success(post_mock, put_mock): |
13 |
| - post_mock.side_effect = [ |
14 |
| - DummyHttpResponse(json_dict={'url': 'http://upload.url', 'headers': {'upload': 'headers'}}), |
15 |
| - DummyHttpResponse() |
16 |
| - ] |
17 |
| - |
18 |
| - put_mock.return_value = DummyHttpResponse() |
| 9 | +@responses.activate |
| 10 | +def test_upload_part_success(): |
| 11 | + responses.add( |
| 12 | + responses.POST, 'https://fs-upload.com/multipart/upload', |
| 13 | + json={'url': 'http://s3.url', 'headers': {'filestack': 'headers'}} |
| 14 | + ) |
| 15 | + responses.add(responses.PUT, 'http://s3.url') |
| 16 | + responses.add(responses.POST, 'https://fs-upload.com/multipart/commit') |
19 | 17 |
|
20 | 18 | part = {'seek_point': 0, 'num': 1}
|
21 |
| - upload_part( |
22 |
| - 'Aaaaapikey', 'file.txt', 'tests/data/doom.mp4', 1234, 's3', defaultdict(lambda: 'fs-upload.com'), part |
23 |
| - ) |
24 |
| - assert post_mock.call_args_list == [ |
25 |
| - call( |
26 |
| - 'https://fs-upload.com/multipart/upload', |
27 |
| - json={ |
28 |
| - 'apikey': 'Aaaaapikey', 'uri': 'fs-upload.com', 'region': 'fs-upload.com', |
29 |
| - 'upload_id': 'fs-upload.com', 'store': {'location': 's3'}, |
30 |
| - 'part': 1, 'size': 5415034, 'md5': 'IuNjhgPo2wbzGFo6f7WhUA==', 'offset': 0, 'fii': True |
31 |
| - }, |
32 |
| - ), |
33 |
| - call( |
34 |
| - 'https://fs-upload.com/multipart/commit', |
35 |
| - json={ |
36 |
| - 'apikey': 'Aaaaapikey', 'uri': 'fs-upload.com', 'region': 'fs-upload.com', |
37 |
| - 'upload_id': 'fs-upload.com', 'store': {'location': 's3'}, 'part': 1, 'size': 1234 |
38 |
| - }, |
39 |
| - ) |
40 |
| - ] |
41 |
| - put_mock.assert_called_once_with( |
42 |
| - 'http://upload.url', |
43 |
| - data=ANY, |
44 |
| - headers={'upload': 'headers'} |
45 |
| - ) |
| 19 | + start_response = { |
| 20 | + 'uri': 'fs-upload.com', 'location_url': 'fs-upload.com', 'region': 'region', 'upload_id': 'abc' |
| 21 | + } |
| 22 | + upload_part('Aaaaapikey', 'file.txt', 'tests/data/doom.mp4', 1234, 's3', start_response, part) |
| 23 | + multipart_upload_payload = json.loads(responses.calls[0].request.body.decode()) |
| 24 | + assert multipart_upload_payload == { |
| 25 | + 'apikey': 'Aaaaapikey', 'uri': 'fs-upload.com', 'region': 'region', |
| 26 | + 'upload_id': 'abc', 'store': {'location': 's3'}, |
| 27 | + 'part': 1, 'size': 5415034, 'md5': 'IuNjhgPo2wbzGFo6f7WhUA==', 'offset': 0, 'fii': True |
| 28 | + } |
| 29 | + with open('tests/data/doom.mp4', 'rb') as f: |
| 30 | + assert responses.calls[1].request.body == f.read() |
| 31 | + multipart_commit_payload = json.loads(responses.calls[2].request.body.decode()) |
| 32 | + assert multipart_commit_payload == { |
| 33 | + 'apikey': 'Aaaaapikey', 'uri': 'fs-upload.com', 'region': 'region', |
| 34 | + 'upload_id': 'abc', 'store': {'location': 's3'}, 'part': 1, 'size': 1234 |
| 35 | + } |
46 | 36 |
|
47 | 37 |
|
48 |
| -@patch('filestack.uploads.intelligent_ingestion.requests.put') |
49 |
| -@patch('filestack.uploads.intelligent_ingestion.requests.post') |
50 |
| -def test_upload_part_with_resize(post_mock, put_mock): |
51 |
| - # this mock will work fine for commit request too |
52 |
| - post_mock.return_value = DummyHttpResponse( |
53 |
| - ok=True, json_dict={'url': 'http://upload.url', 'headers': {'upload': 'headers'}} |
| 38 | +@responses.activate |
| 39 | +def test_upload_part_with_resize(): |
| 40 | + responses.add( |
| 41 | + responses.POST, 'https://fs-upload.com/multipart/upload', |
| 42 | + json={'url': 'https://s3.url', 'headers': {'filestack': 'headers'}} |
54 | 43 | )
|
| 44 | + responses.add(responses.PUT, 'https://s3.url', status=400) |
| 45 | + responses.add(responses.PUT, 'https://s3.url') # chunks 1 & 2 of part 1 |
| 46 | + responses.add(responses.POST, 'https://fs-upload.com/multipart/commit') |
55 | 47 |
|
56 |
| - put_mock.side_effect = [ |
57 |
| - DummyHttpResponse(ok=False), # fail first attempt, should split file part |
58 |
| - DummyHttpResponse(), # part 1, chunk 1 |
59 |
| - DummyHttpResponse(), # part 1, chunk 2 |
60 |
| - ] |
61 |
| - |
| 48 | + start_response = { |
| 49 | + 'uri': 'fs-upload.com', 'location_url': 'fs-upload.com', 'region': 'region', 'upload_id': 'abc' |
| 50 | + } |
62 | 51 | part = {'seek_point': 0, 'num': 1}
|
63 |
| - upload_part( |
64 |
| - 'Aaaaapikey', 'file.txt', 'tests/data/doom.mp4', 5415034, 's3', defaultdict(lambda: 'fs-upload.com'), part |
65 |
| - ) |
| 52 | + upload_part('Aaaaapikey', 'file.txt', 'tests/data/doom.mp4', 5415034, 's3', start_response, part) |
66 | 53 |
|
67 |
| - assert post_mock.call_count == 4 # 3x upload, 1 commit |
68 |
| - # 1st attempt |
69 |
| - req_args, req_kwargs = post_mock.call_args_list[0] |
70 |
| - assert req_kwargs['json']['size'] == 5415034 |
71 |
| - # 2nd attempt |
72 |
| - req_args, req_kwargs = post_mock.call_args_list[1] |
73 |
| - assert req_kwargs['json']['size'] == 4194304 |
74 |
| - # 3rd attempt |
75 |
| - req_args, req_kwargs = post_mock.call_args_list[2] |
76 |
| - assert req_kwargs['json']['size'] == 1220730 |
| 54 | + responses.assert_call_count('https://fs-upload.com/multipart/upload', 3) |
| 55 | + responses.assert_call_count('https://s3.url', 3) |
| 56 | + assert len(responses.calls[1].request.body) == 5415034 |
| 57 | + assert len(responses.calls[3].request.body) == 4194304 |
| 58 | + assert len(responses.calls[5].request.body) == 1220730 |
77 | 59 |
|
78 | 60 |
|
79 |
| -@patch('filestack.uploads.intelligent_ingestion.requests.put') |
80 |
| -@patch('filestack.uploads.intelligent_ingestion.requests.post') |
81 |
| -def test_min_chunk_size_exception(post_mock, put_mock): |
82 |
| - post_mock.return_value = DummyHttpResponse( |
83 |
| - ok=True, json_dict={'url': 'http://upload.url', 'headers': {'upload': 'headers'}} |
| 61 | +@responses.activate |
| 62 | +def test_min_chunk_size_exception(): |
| 63 | + responses.reset() |
| 64 | + responses.add( |
| 65 | + responses.POST, 'https://fs-upload.com/multipart/upload', |
| 66 | + json={'url': 'https://upload.url', 'headers': {'filestack': 'headers'}} |
84 | 67 | )
|
85 |
| - put_mock.return_value = DummyHttpResponse(ok=False) |
| 68 | + responses.add(responses.PUT, 'https://upload.url', status=400) |
86 | 69 |
|
87 | 70 | part = {'seek_point': 0, 'num': 1}
|
| 71 | + start_response = { |
| 72 | + 'uri': 'fs-upload.com', 'location_url': 'fs-upload.com', 'region': 'region', 'upload_id': 'abc' |
| 73 | + } |
88 | 74 | with pytest.raises(Exception, match='Minimal chunk size failed'):
|
89 |
| - upload_part( |
90 |
| - 'Aaaaapikey', 'file.txt', 'tests/data/doom.mp4', 5415034, 's3', defaultdict(lambda: 'fs-upload.com'), part |
91 |
| - ) |
| 75 | + upload_part('Aaaaapikey', 'file.txt', 'tests/data/doom.mp4', 5415034, 's3', start_response, part) |
| 76 | + |
| 77 | + chunk_sizes = [len(call.request.body) for call in responses.calls if call.request.method == 'PUT'] |
| 78 | + assert chunk_sizes[-1] == 32768 # check size of last attempt |
0 commit comments