source: OpenWorkouts-current/ow/tests/models/test_bulk.py @ 715671f

current
Last change on this file since 715671f was 715671f, checked in by Borja Lopez <borja@…>, 5 years ago

(#77) Bulk workouts upload:

  • Added methods to extract files from compressed bulk files, then load workouts from those files.
  • Added a task to process/load workouts from "not loaded" bulk files
  • Added full tests coverage
  • Property mode set to 100644
File size: 10.0 KB
Line 
1import os
2from datetime import datetime, timezone, timedelta
3from tempfile import TemporaryDirectory
4from unittest.mock import patch
5
6import pytest
7import pytz
8from pyramid.security import Allow, Everyone, Deny, ALL_PERMISSIONS
9
10from ow.models.user import User
11from ow.models.root import OpenWorkouts
12from ow.models.bulk import BulkFile, BulkFiles
13from ow.utilities import create_blob
14
15
16class TestBulkFile(object):
17
18    @pytest.fixture
19    def root(self):
20        root = OpenWorkouts()
21        root['_bulk_files'] = BulkFiles()
22        john = User(firstname='John', lastname='Doe',
23                    email='john.doe@example.net')
24        john.password = 's3cr3t'
25        root.add_user(john)
26        uid = str(john.uid)
27        bulk_file = BulkFile(uid=uid)
28        bulk_file_path = os.path.join(
29            os.path.dirname(os.path.dirname(__file__)),
30            'fixtures/bulk-fit.zip')
31        with open(bulk_file_path, 'rb') as _bulk_file:
32            bulk_file.compressed_file = create_blob(
33                _bulk_file.read(), file_extension='zip', binary=True)
34        bulk_file.file_name = 'bulk-fit.zip'
35        bulk_file.file_type = 'zip'
36        root['_bulk_files'].add_bulk_file(bulk_file)
37        return root
38
39    def test__acl__(self, root):
40        bulk_file = root['_bulk_files'].values()[0]
41        permissions = [
42            (Allow, str(bulk_file.uid), 'view'),
43            (Allow, str(bulk_file.uid), 'edit'),
44            (Allow, str(bulk_file.uid), 'delete'),
45            (Deny, Everyone, ALL_PERMISSIONS)
46        ]
47        assert bulk_file.__acl__() == permissions
48
49    def test_uploaded_in_timezone(self, root):
50        bulk_file = root['_bulk_files'].values()[0]
51        timezones = ['UTC', 'Europe/Madrid', 'Asia/Tokyo', 'Canada/Pacific']
52        for _timezone in timezones:
53            expected = bulk_file.uploaded.astimezone(pytz.timezone(_timezone))
54            expected = expected.strftime('%d/%m/%Y %H:%M (%Z)')
55            assert bulk_file.uploaded_in_timezone(_timezone) == expected
56
57    def test_loaded_in_timezone(self, root):
58        timezones = ['UTC', 'Europe/Madrid', 'Asia/Tokyo', 'Canada/Pacific']
59        # first try a non-loaded bulk file
60        bulk_file = root['_bulk_files'].values()[0]
61        for _timezone in timezones:
62            assert bulk_file.loaded_in_timezone(_timezone) == ''
63        # now, "mark" it as loaded, try again
64        bulk_file.loaded = datetime.now(timezone.utc) - timedelta(hours=5)
65        for _timezone in timezones:
66            expected = bulk_file.loaded.astimezone(pytz.timezone(_timezone))
67            expected = expected.strftime('%d/%m/%Y %H:%M (%Z)')
68            assert bulk_file.loaded_in_timezone(_timezone) == expected
69
70    @patch('ow.models.bulk.os')
71    @patch('ow.models.bulk.unpack_archive')
72    def test_extract_none(self, unpack_archive, _os, root):
73        """
74        Call extract on a bulk file without an associated compressed file.
75        """
76        user = root.users[0]
77        uid = str(user.uid)
78        bulk_file = BulkFile(uid=uid)
79        with TemporaryDirectory() as tmp_path:
80            extracted = bulk_file.extract(tmp_path, tmp_path)
81            assert extracted == []
82            assert len(os.listdir(tmp_path)) == 0
83            assert not unpack_archive.called
84            assert not _os.path.join.called
85            assert not _os.remove.called
86
87    params = (
88        ('fixtures/bulk-fit.zip', {
89            'extracted': [
90                '2019-09-19-09-42-41.fit', '2019-09-17-09-42-50.fit'
91            ],
92        }),
93        ('fixtures/bulk-fit.tgz', {
94            'extracted': [
95                '2019-09-19-09-42-41.fit', '2019-09-17-09-42-50.fit'
96            ],
97        }),
98        ('fixtures/bulk-gpx.zip', {
99            'extracted': [
100                '20181230_101115.gpx', '20181231_110728.gpx'
101            ],
102        }),
103        ('fixtures/bulk-gpx.tgz', {
104            'extracted': [
105                '20181230_101115.gpx', '20181231_110728.gpx'
106            ],
107        }),
108        ('fixtures/bulk-empty.zip', {'extracted': []}),
109    )
110
111    @pytest.mark.parametrize(('filename', 'expected'), params)
112    def test_extract(self, filename, expected, root):
113        base_name, extension = os.path.splitext(filename)
114        extension = extension.lstrip('.')
115
116        user = root.users[0]
117        uid = str(user.uid)
118
119        bulk_file = BulkFile(uid=uid)
120        bulk_file_path = os.path.join(
121            os.path.dirname(os.path.dirname(__file__)), filename)
122        with open(bulk_file_path, 'rb') as _bulk_file:
123            bulk_file.compressed_file = create_blob(
124                _bulk_file.read(), file_extension=extension, binary=True)
125        bulk_file.file_name = os.path.basename(filename)
126        bulk_file.file_type = extension
127
128        root['_bulk_files'].add_bulk_file(bulk_file)
129
130        with TemporaryDirectory() as tmp_path:
131            extracted = bulk_file.extract(tmp_path, tmp_path)
132            assert expected['extracted'] == os.listdir(tmp_path)
133            expected_extracted = [
134                os.path.join(tmp_path, p) for p in expected['extracted']]
135            assert extracted == expected_extracted
136
137    params = (
138        ('fixtures/bulk-fit.zip', {
139            'extracted': [
140                '2019-09-19-09-42-41.fit', '2019-09-17-09-42-50.fit'
141            ],
142            'loaded': [
143                '2019-09-19-09-42-41.fit', '2019-09-17-09-42-50.fit'
144            ],
145        }),
146        ('fixtures/bulk-fit.tgz', {
147            'extracted': [
148                '2019-09-19-09-42-41.fit', '2019-09-17-09-42-50.fit'
149            ],
150            'loaded': [
151                '2019-09-19-09-42-41.fit', '2019-09-17-09-42-50.fit'
152            ],
153        }),
154        ('fixtures/bulk-gpx.zip', {
155            'extracted': [
156                '20181230_101115.gpx', '20181231_110728.gpx'
157            ],
158            'loaded': [
159                '20181230_101115.gpx', '20181231_110728.gpx'
160            ],
161        }),
162        ('fixtures/bulk-gpx.tgz', {
163            'extracted': [
164                '20181230_101115.gpx', '20181231_110728.gpx'
165            ],
166            'loaded': [
167                '20181230_101115.gpx', '20181231_110728.gpx'
168            ],
169        }),
170        ('fixtures/bulk-empty.zip', {'extracted': [], 'loaded': []}),
171        ('fixtures/bulk-invalid.zip', {
172            'extracted': [
173                'empty.fit', 'empty.gpx', 'invalid.fit', 'invalid.gpx',
174                '20181230_101115.gpx', '20181230_101115-duplicate.gpx'
175            ],
176            'loaded': ['empty.gpx', '20181230_101115.gpx'],
177        }),
178    )
179
180    @pytest.mark.parametrize(('filename', 'expected'), params)
181    def test_load(self, filename, expected, root):
182        base_name, extension = os.path.splitext(filename)
183        extension = extension.lstrip('.')
184
185        user = root.users[0]
186        uid = str(user.uid)
187
188        bulk_file = BulkFile(uid=uid)
189        bulk_file_path = os.path.join(
190            os.path.dirname(os.path.dirname(__file__)), filename)
191        with open(bulk_file_path, 'rb') as _bulk_file:
192            bulk_file.compressed_file = create_blob(
193                _bulk_file.read(), file_extension=extension, binary=True)
194        bulk_file.file_name = os.path.basename(filename)
195        bulk_file.file_type = extension
196
197        root['_bulk_files'].add_bulk_file(bulk_file)
198
199        assert list(user.workouts()) == []
200        assert not bulk_file.loaded
201        assert bulk_file.loaded_info == {}
202        assert bulk_file.workout_ids == []
203
204        num_extracted = len(expected['extracted'])
205        num_loaded = len(expected['loaded'])
206        with TemporaryDirectory() as tmp_path:
207            bulk_file.load(root, tmp_path)
208            assert isinstance(bulk_file.loaded, datetime)
209            assert len(bulk_file.loaded_info.keys()) == num_extracted
210            assert len(bulk_file.workout_ids) == num_loaded
211            for key, value in bulk_file.loaded_info.items():
212                if value['loaded']:
213                    assert value['error'] is None
214                    assert value['workout'] is not None
215                else:
216                    assert isinstance(value['error'], str)
217                    assert len(value['error']) > 2
218                    assert value['workout'] is None
219            assert len(user.workouts()) == num_loaded
220
221
222class TestBulkFiles(object):
223
224    @pytest.fixture
225    def root(self):
226        root = OpenWorkouts()
227        root['_bulk_files'] = BulkFiles()
228        return root
229
230    def test__acl__(self, root):
231        permissions = [
232            (Allow, Everyone, 'view'),
233            (Allow, 'admins', 'edit'),
234            (Deny, Everyone, ALL_PERMISSIONS)
235        ]
236        assert root['_bulk_files'].__acl__() == permissions
237
238    def test_add_bulk_file(self, root):
239        assert len(root['_bulk_files']) == 0
240        bulk_file = BulkFile(uid='faked-uid')
241        root['_bulk_files'].add_bulk_file(bulk_file)
242        assert len(root['_bulk_files']) == 1
243        assert list(root['_bulk_files'].keys()) == [str(bulk_file.bfid)]
244        assert list(root['_bulk_files'].values()) == [bulk_file]
245
246    def test_get_by_uid(self, root):
247        # no bulk files uploaded, trying to get one for 'faked-uid'
248        bulk_files = root['_bulk_files'].get_by_uid('faked-uid')
249        assert bulk_files == []
250        # add a bulk file, trying to get it back
251        bulk_file = BulkFile(uid='faked-uid')
252        root['_bulk_files'].add_bulk_file(bulk_file)
253        bulk_files = root['_bulk_files'].get_by_uid('faked-uid')
254        assert bulk_files == [bulk_file]
255        # trying to get files for another user, who did not upload anything
256        bulk_files = root['_bulk_files'].get_by_uid('other-faked-uid')
257        assert bulk_files == []
258        # add another bulk file, for the same user, and get both files
259        other_bulk_file = BulkFile(uid='faked-uid')
260        root['_bulk_files'].add_bulk_file(other_bulk_file)
261        bulk_files = root['_bulk_files'].get_by_uid('faked-uid')
262        assert bulk_file in bulk_files
263        assert other_bulk_file in bulk_files
264        assert len(bulk_files) == 2
265        # this user still did not upload anything, so no files returned for
266        # him
267        bulk_files = root['_bulk_files'].get_by_uid('other-faked-uid')
268        assert bulk_files == []
Note: See TracBrowser for help on using the repository browser.