Skip to content

sitesync

Python 3 only implementation.

SiteSyncThread

Bases: Thread

Separate thread running synchronization server with asyncio loop. Stopped when tray is closed.

Source code in client/ayon_sitesync/sitesync.py
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
class SiteSyncThread(threading.Thread):
    """
        Separate thread running synchronization server with asyncio loop.
        Stopped when tray is closed.
    """
    def __init__(self, addon):
        self.log = Logger.get_logger(self.__class__.__name__)
        super(SiteSyncThread, self).__init__()
        self.addon = addon
        self.loop = None
        self.is_running = False
        self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=3)
        self.timer = None

    def run(self):
        self.is_running = True

        try:
            self.log.info("Starting SiteSync")
            self.loop = asyncio.new_event_loop()  # create new loop for thread
            asyncio.set_event_loop(self.loop)
            self.loop.set_default_executor(self.executor)

            asyncio.ensure_future(self.check_shutdown(), loop=self.loop)
            asyncio.ensure_future(self.sync_loop(), loop=self.loop)
            self.log.info("SiteSync Started")
            self.loop.run_forever()
        except Exception:
            self.log.warning(
                "SiteSync service has failed", exc_info=True
            )
        finally:
            self.loop.close()  # optional

    async def sync_loop(self):
        """
            Runs permanently, each time:
                - gets list of collections in DB
                - gets list of active remote providers (has configuration,
                    credentials)
                - for each project_name it looks for representations that
                  should be synced
                - synchronize found collections
                - update representations - fills error messages for exceptions
                - waits X seconds and repeat
        Returns:

        """
        while self.is_running and not self.addon.is_paused():
            try:
                start_time = time.time()
                self.addon.set_sync_project_settings()  # clean cache
                project_name = None
                enabled_projects = self.addon.get_enabled_projects()
                for project_name in enabled_projects:
                    await self._sync_project(project_name)

                duration = time.time() - start_time
                self.log.debug("One loop took {:.2f}s".format(duration))

                delay = self.addon.get_loop_delay(project_name)
                self.log.debug(
                    "Waiting for {} seconds to new loop".format(delay)
                )
                self.timer = asyncio.create_task(self.run_timer(delay))
                await asyncio.gather(self.timer)

            except ConnectionResetError:
                self.log.warning(
                    "ConnectionResetError in sync loop, trying next loop",
                    exc_info=True)
            except asyncio.exceptions.CancelledError:
                # cancelling timer
                pass
            except ResumableError:
                self.log.warning(
                    "ResumableError in sync loop, trying next loop",
                    exc_info=True)
            except Exception:
                self.stop()
                self.log.warning(
                    "Unhandled except. in sync loop, stopping server",
                    exc_info=True)

    def stop(self):
        """Sets is_running flag to false, 'check_shutdown' shuts server down"""
        self.is_running = False

    async def check_shutdown(self):
        """ Future that is running and checks if server should be running
            periodically.
        """
        while self.is_running:
            if self.addon.long_running_tasks:
                task = self.addon.long_running_tasks.pop()
                self.log.info("starting long running")
                await self.loop.run_in_executor(None, task["func"])
                self.log.info("finished long running")
                self.addon.projects_processed.remove(task["project_name"])
            await asyncio.sleep(0.5)

        tasks = [
            task
            for task in asyncio.all_tasks()
            if task is not asyncio.current_task()
        ]
        list(map(lambda task: task.cancel(), tasks))  # cancel all the tasks
        results = await asyncio.gather(*tasks, return_exceptions=True)
        self.log.debug(
            f"Finished awaiting cancelled tasks, results: {results}...")
        await self.loop.shutdown_asyncgens()
        # to really make sure everything else has time to stop
        self.executor.shutdown(wait=True)
        await asyncio.sleep(0.07)
        self.loop.stop()

    async def run_timer(self, delay):
        """Wait for 'delay' seconds to start next loop"""
        await asyncio.sleep(delay)

    def reset_timer(self):
        """Called when waiting for next loop should be skipped"""
        self.log.debug("Resetting timer")
        if self.timer:
            self.timer.cancel()
            self.timer = None

    def _working_sites(self, project_name, sync_config):
        if self.addon.is_project_paused(project_name):
            self.log.debug("Both sites same, skipping")
            return None, None

        local_site = self.addon.get_active_site(project_name)
        remote_site = self.addon.get_remote_site(project_name)
        if local_site == remote_site:
            self.log.debug("{}-{} sites same, skipping".format(
                local_site, remote_site))
            return None, None

        local_site_config = sync_config.get("sites")[local_site]
        remote_site_config = sync_config.get("sites")[remote_site]
        if not all([
            _site_is_working(
                self.addon, project_name, local_site, local_site_config
            ),
            _site_is_working(
                self.addon, project_name, remote_site, remote_site_config
            )
        ]):
            self.log.debug((
                "Some of the sites {} - {} in {} is not working properly"
            ).format(local_site, remote_site, project_name))

            return None, None

        return local_site, remote_site

    def _get_remote_provider_info(
        self, project_name, remote_site, site_preset
    ):
        remote_provider = self.addon.get_provider_for_site(site=remote_site)
        handler = lib.factory.get_provider(
            remote_provider,
            project_name,
            remote_site,
            presets=site_preset
        )
        limit = lib.factory.get_provider_batch_limit(remote_provider)

        return handler, remote_provider, limit

    async def _sync_project(self, project_name):
        self.log.info(f"Processing '{project_name}'")
        preset = self.addon.sync_project_settings[project_name]

        local_site, remote_site = self._working_sites(
            project_name, preset
        )
        if not local_site or not remote_site:
            return

        remote_site_preset = preset.get("sites")[remote_site]

        handler, remote_provider, limit = self._get_remote_provider_info(
            project_name,
            remote_site,
            remote_site_preset
        )

        repre_states = self.addon.get_sync_representations(
            project_name,
            local_site,
            remote_site,
            limit
        )

        task_files_to_process = []
        files_processed_info = []
        # process only unique file paths in one batch
        # multiple representation could have same file path
        # (textures),
        # upload process can find already uploaded file and
        # reuse same id
        processed_file_path = set()

        # first call to get_provider could be expensive, its
        # building folder tree structure in memory
        # call only if needed, eg. DO_UPLOAD or DO_DOWNLOAD
        for repre_state in repre_states:
            repre_id = repre_state["representationId"]
            # QUESTION Why is not project passed in?
            # QUESTION Why there is not option to check all representations
            #    in one batch?
            if self.addon.is_representation_paused(repre_id):
                continue
            file_states = repre_state.get("files") or []
            for file_state in file_states:
                # skip already processed files
                # WARNING Using empty string for path is dangerous!!!
                file_path = file_state.get("path", "")
                if file_path in processed_file_path:
                    continue
                status = self.addon.check_status(
                    file_state,
                    local_site,
                    remote_site,
                    preset.get("config")
                )
                if (status == SyncStatus.DO_UPLOAD and
                        len(task_files_to_process) < limit):
                    tree = handler.get_tree()
                    task = asyncio.create_task(
                        upload(
                            self.addon,
                            project_name,
                            file_state,
                            repre_state,
                            remote_provider,
                            remote_site,
                            tree,
                            remote_site_preset
                        )
                    )
                    task_files_to_process.append(task)
                    # store info for exception handlingy
                    files_processed_info.append((
                        file_state,
                        repre_state,
                        remote_site,
                        "remote",
                        project_name
                    ))
                    processed_file_path.add(file_path)

                if (status == SyncStatus.DO_DOWNLOAD and
                        len(task_files_to_process) < limit):
                    tree = handler.get_tree()
                    task = asyncio.create_task(
                        download(
                            self.addon,
                            project_name,
                            file_state,
                            repre_state,
                            remote_provider,
                            remote_site,
                            tree,
                            remote_site_preset
                        )
                    )
                    task_files_to_process.append(task)

                    files_processed_info.append((
                        file_state,
                        repre_state,
                        local_site,
                        "local",
                        project_name
                    ))
                    processed_file_path.add(file_path)

        self.log.debug("Sync tasks count {}".format(
            len(task_files_to_process)
        ))
        files_created = await asyncio.gather(
            *task_files_to_process,
            return_exceptions=True
        )

        for file_result, info in zip(files_created, files_processed_info):
            file_state, repre_status, site_name, side, project_name = info
            error = None
            if isinstance(file_result, BaseException):
                error = str(file_result)
                self.log.warning(error, exc_info=True)
                file_result = None  # it is exception >> no id >> reset

            self.addon.update_db(
                project_name=project_name,
                new_file_id=file_result,
                file=file_state,
                repre_status=repre_status,
                site_name=site_name,
                side=side,
                error=error
            )

            repre_id = repre_status["representationId"]
            self.addon.handle_alternate_site(
                project_name,
                repre_id,
                site_name,
                file_state["fileHash"]
            )

check_shutdown() async

Future that is running and checks if server should be running periodically.

Source code in client/ayon_sitesync/sitesync.py
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
async def check_shutdown(self):
    """ Future that is running and checks if server should be running
        periodically.
    """
    while self.is_running:
        if self.addon.long_running_tasks:
            task = self.addon.long_running_tasks.pop()
            self.log.info("starting long running")
            await self.loop.run_in_executor(None, task["func"])
            self.log.info("finished long running")
            self.addon.projects_processed.remove(task["project_name"])
        await asyncio.sleep(0.5)

    tasks = [
        task
        for task in asyncio.all_tasks()
        if task is not asyncio.current_task()
    ]
    list(map(lambda task: task.cancel(), tasks))  # cancel all the tasks
    results = await asyncio.gather(*tasks, return_exceptions=True)
    self.log.debug(
        f"Finished awaiting cancelled tasks, results: {results}...")
    await self.loop.shutdown_asyncgens()
    # to really make sure everything else has time to stop
    self.executor.shutdown(wait=True)
    await asyncio.sleep(0.07)
    self.loop.stop()

reset_timer()

Called when waiting for next loop should be skipped

Source code in client/ayon_sitesync/sitesync.py
443
444
445
446
447
448
def reset_timer(self):
    """Called when waiting for next loop should be skipped"""
    self.log.debug("Resetting timer")
    if self.timer:
        self.timer.cancel()
        self.timer = None

run_timer(delay) async

Wait for 'delay' seconds to start next loop

Source code in client/ayon_sitesync/sitesync.py
439
440
441
async def run_timer(self, delay):
    """Wait for 'delay' seconds to start next loop"""
    await asyncio.sleep(delay)

stop()

Sets is_running flag to false, 'check_shutdown' shuts server down

Source code in client/ayon_sitesync/sitesync.py
407
408
409
def stop(self):
    """Sets is_running flag to false, 'check_shutdown' shuts server down"""
    self.is_running = False

sync_loop() async

Runs permanently, each time:
    - gets list of collections in DB
    - gets list of active remote providers (has configuration,
        credentials)
    - for each project_name it looks for representations that
      should be synced
    - synchronize found collections
    - update representations - fills error messages for exceptions
    - waits X seconds and repeat

Returns:

Source code in client/ayon_sitesync/sitesync.py
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
async def sync_loop(self):
    """
        Runs permanently, each time:
            - gets list of collections in DB
            - gets list of active remote providers (has configuration,
                credentials)
            - for each project_name it looks for representations that
              should be synced
            - synchronize found collections
            - update representations - fills error messages for exceptions
            - waits X seconds and repeat
    Returns:

    """
    while self.is_running and not self.addon.is_paused():
        try:
            start_time = time.time()
            self.addon.set_sync_project_settings()  # clean cache
            project_name = None
            enabled_projects = self.addon.get_enabled_projects()
            for project_name in enabled_projects:
                await self._sync_project(project_name)

            duration = time.time() - start_time
            self.log.debug("One loop took {:.2f}s".format(duration))

            delay = self.addon.get_loop_delay(project_name)
            self.log.debug(
                "Waiting for {} seconds to new loop".format(delay)
            )
            self.timer = asyncio.create_task(self.run_timer(delay))
            await asyncio.gather(self.timer)

        except ConnectionResetError:
            self.log.warning(
                "ConnectionResetError in sync loop, trying next loop",
                exc_info=True)
        except asyncio.exceptions.CancelledError:
            # cancelling timer
            pass
        except ResumableError:
            self.log.warning(
                "ResumableError in sync loop, trying next loop",
                exc_info=True)
        except Exception:
            self.stop()
            self.log.warning(
                "Unhandled except. in sync loop, stopping server",
                exc_info=True)

download(addon, project_name, file, representation, provider_name, remote_site_name, tree=None, preset=None) async

Downloads file to local folder denoted in representation.Context.

Parameters:

Name Type Description Default
addon SiteSyncAddon

SiteSyncAddon object.

required
project_name str

Project name.

required
file dict)

Info about processed file.

required
representation dict

repr that 'file' belongs to

required
provider_name str

'gdrive' etc

required
remote_site_name str

site on provider, single provider(gdrive) could have multiple sites (different accounts, credentials)

required
tree Optional[dict]

Injected memory structure for performance.

None
preset Optional[dict]

Site config ('credentials_url', 'root'...).

None

Returns:

Name Type Description
str

Name of local file

Source code in client/ayon_sitesync/sitesync.py
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
async def download(
    addon,
    project_name,
    file,
    representation,
    provider_name,
    remote_site_name,
    tree=None,
    preset=None
):
    """Downloads file to local folder denoted in representation.Context.

    Args:
        addon (SiteSyncAddon): SiteSyncAddon object.
        project_name (str): Project name.
        file (dict) : Info about processed file.
        representation (dict):  repr that 'file' belongs to
        provider_name (str):  'gdrive' etc
        remote_site_name (str): site on provider, single provider(gdrive)
            could have multiple sites (different accounts, credentials)
        tree (Optional[dict]): Injected memory structure for performance.
        preset (Optional[dict]): Site config ('credentials_url', 'root'...).

    Returns:
        str: Name of local file

    """
    with addon.lock:
        remote_handler = lib.factory.get_provider(
            provider_name,
            project_name,
            remote_site_name,
            tree=tree,
            presets=preset
        )

        file_path = file.get("path", "")
        local_file_path, remote_file_path = resolve_paths(
            addon, file_path, project_name, remote_site_name, remote_handler
        )

        local_folder = os.path.dirname(local_file_path)
        os.makedirs(local_folder, exist_ok=True)

    local_site = addon.get_active_site(project_name)

    loop = asyncio.get_running_loop()
    file_id = await loop.run_in_executor(
        None,
        remote_handler.download_file,
        remote_file_path,
        local_file_path,
        addon,
        project_name,
        file,
        representation,
        local_site,
        True
    )

    return file_id

download_last_published_workfile(host_name, project_name, task_name, workfile_representation, max_retries, anatomy=None, sitesync_addon=None)

Download the last published workfile

Parameters:

Name Type Description Default
host_name str

Host name.

required
project_name str

Project name.

required
task_name str

Task name.

required
workfile_representation dict

Workfile representation.

required
max_retries int

complete file failure only after so many attempts

required
anatomy Optional[Anatomy]

Project anatomy, used for optimization. Defaults to None.

None
sitesync_addon Optional[SiteSyncAddon]

Addons manager, used for optimization.

None

Returns:

Type Description
Union[str, None]

Union[str, None]: last published workfile path localized

Source code in client/ayon_sitesync/sitesync.py
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
def download_last_published_workfile(
    host_name: str,
    project_name: str,
    task_name: str,
    workfile_representation: dict,
    max_retries: int,
    anatomy: Anatomy = None,
    sitesync_addon=None,
) -> Union[str, None]:
    """Download the last published workfile

    Args:
        host_name (str): Host name.
        project_name (str): Project name.
        task_name (str): Task name.
        workfile_representation (dict): Workfile representation.
        max_retries (int): complete file failure only after so many attempts
        anatomy (Optional[Anatomy]): Project anatomy, used for optimization.
            Defaults to None.
        sitesync_addon (Optional[SiteSyncAddon]): Addons manager,
            used for optimization.

    Returns:
        Union[str, None]: last published workfile path localized

    """
    if not workfile_representation:
        print(
            "Not published workfile for task '{}' and host '{}'.".format(
                task_name, host_name
            )
        )
        return None

    if sitesync_addon is None:
        addons_manager = AddonsManager()
        sitesync_addon = addons_manager.addons_by_name.get("sitesync")

    # Get sync server addon
    if not sitesync_addon or not sitesync_addon.enabled:
        print("Site sync addon is disabled or unavailable.")
        return None

    if not anatomy:
        anatomy = Anatomy(project_name)

    last_published_workfile_path = get_representation_path_with_anatomy(
        workfile_representation, anatomy
    )
    if not last_published_workfile_path:
        return None

    # If representation isn't available on remote site, then return.
    remote_site = sitesync_addon.get_remote_site(project_name)
    if not sitesync_addon.is_representation_on_site(
        project_name,
        workfile_representation["id"],
        remote_site,
    ):
        print(
            "Representation not available for task '{}', site '{}'".format(
                task_name, remote_site
            )
        )
        return None

    # Get local site
    local_site_id = get_local_site_id()

    # Add workfile representation to local site
    representation_ids = {workfile_representation["id"]}
    representation_ids.update(
        get_linked_representation_id(
            project_name, workfile_representation, "reference"
        )
    )
    for repre_id in representation_ids:
        if not sitesync_addon.is_representation_on_site(
            project_name, repre_id, local_site_id
        ):
            sitesync_addon.add_site(
                project_name,
                repre_id,
                local_site_id,
                force=True,
                # priority=99  TODO add when implemented
            )
    sitesync_addon.reset_timer()
    print("Starting to download:{}".format(last_published_workfile_path))
    # While representation unavailable locally, wait.
    while not sitesync_addon.is_representation_on_site(
        project_name,
        workfile_representation["id"],
        local_site_id,
        max_retries=max_retries
    ):
        time.sleep(5)

    return last_published_workfile_path

resolve_paths(addon, file_path, project_name, remote_site_name=None, remote_handler=None)

Resolve local and remote full path.

Returns tuple of local and remote file paths with {root} placeholders replaced with proper values from Settings or Anatomy

Ejected here because of Python 2 hosts (GDriveHandler is an issue)

Parameters:

Name Type Description Default
addon SiteSyncAddon

object to run SiteSyncAddon API

required
file_path str

File path with {root}.

required
project_name str

Project name.

required
remote_site_name Optional[str]

Remote site name.

None
remote_handler Optional[AbstractProvider]

implementation

None

Returns:

Type Description

tuple[str, str]: Proper absolute paths, remote path is optional.

Source code in client/ayon_sitesync/sitesync.py
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
def resolve_paths(
    addon, file_path, project_name, remote_site_name=None, remote_handler=None
):
    """Resolve local and remote full path.

    Returns tuple of local and remote file paths with {root}
    placeholders replaced with proper values from Settings or Anatomy

    Ejected here because of Python 2 hosts (GDriveHandler is an issue)

    Args:
        addon (SiteSyncAddon): object to run SiteSyncAddon API
        file_path (str): File path with {root}.
        project_name (str): Project name.
        remote_site_name (Optional[str]): Remote site name.
        remote_handler (Optional[AbstractProvider]): implementation

    Returns:
        tuple[str, str]: Proper absolute paths, remote path is optional.

    """
    remote_file_path = ""
    if remote_handler:
        remote_file_path = remote_handler.resolve_path(file_path)

    local_handler = lib.factory.get_provider(
        "local_drive", project_name, addon.get_active_site(project_name)
    )
    local_file_path = local_handler.resolve_path(file_path)

    return local_file_path, remote_file_path

upload(addon, project_name, file, representation, provider_name, remote_site_name, tree=None, preset=None) async

Upload representation file.

Upload single 'file' of a 'representation' to 'provider'. Source url is taken from 'file' portion, where {root} placeholder is replaced by 'representation.Context.root' Provider could be one of implemented in provider.py.

Updates database, fills in id of file from provider (ie. file_id from GDrive), 'created_dt' - time of upload

Value of 'provider_name' doesn't have to match to 'site_name', single provider (GDrive) might have multiple sites ('projectA', 'projectB')

Parameters:

Name Type Description Default
addon SiteSyncAddon

object to run SiteSyncAddon API

required
project_name str

Project name.

required
file dict[str, Any]

of file from representation in Mongo

required
representation dictionary

of representation

required
provider_name str

gdrive, gdc etc.

required
remote_site_name string

Site on provider, single provider(gdrive) could have multiple sites (different accounts, credentials)

required
tree Optional[dict]

Injected memory structure for performance.

None
preset Optional[dict]

site config ('credentials_url', 'root'...)

None
Source code in client/ayon_sitesync/sitesync.py
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
async def upload(
    addon,
    project_name,
    file,
    representation,
    provider_name,
    remote_site_name,
    tree=None,
    preset=None
):
    """Upload representation file.

    Upload single 'file' of a 'representation' to 'provider'.
    Source url is taken from 'file' portion, where {root} placeholder
    is replaced by 'representation.Context.root'
    Provider could be one of implemented in provider.py.

    Updates database, fills in id of file from provider (ie. file_id
        from GDrive), 'created_dt' - time of upload

    Value of 'provider_name' doesn't have to match to 'site_name', single
    provider (GDrive) might have multiple sites ('projectA', 'projectB')

    Args:
        addon (SiteSyncAddon): object to run SiteSyncAddon API
        project_name (str): Project name.
        file (dict[str, Any]): of file from representation in Mongo
        representation (dictionary): of representation
        provider_name (str): gdrive, gdc etc.
        remote_site_name (string): Site on provider, single provider(gdrive)
            could have multiple sites (different accounts, credentials)
        tree (Optional[dict]): Injected memory structure for performance.
        preset (Optional[dict]): site config ('credentials_url', 'root'...)

    """
    # create ids sequentially, upload file in parallel later
    with addon.lock:
        # this part modifies structure on 'remote_site', only single
        # thread can do that at a time, upload/download to prepared
        # structure should be run in parallel
        remote_handler = lib.factory.get_provider(
            provider_name,
            project_name,
            remote_site_name,
            tree=tree,
            presets=preset
        )

        file_path = file.get("path", "")

        local_file_path, remote_file_path = resolve_paths(
            addon, file_path, project_name,
            remote_site_name, remote_handler
        )

        target_folder = os.path.dirname(remote_file_path)
        folder_id = remote_handler.create_folder(target_folder)

        if not folder_id:
            err = "Folder {} wasn't created. Check permissions.". \
                format(target_folder)
            raise NotADirectoryError(err)

    loop = asyncio.get_running_loop()
    file_id = await loop.run_in_executor(
        None,
        remote_handler.upload_file,
        local_file_path,
        remote_file_path,
        addon,
        project_name,
        file,
        representation,
        remote_site_name,
        True
    )

    return file_id