Skip to content

Distributask Class

distributask.Distributask

The Distributask class contains the core features of distributask, including creating and executing the task queue, managing workers using the Vast.ai API, and uploading files and directories using the Hugging Face API.

Source code in distributask/distributask.py
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
class Distributask:
    """
    The Distributask class contains the core features of distributask, including creating and
    executing the task queue, managing workers using the Vast.ai API, and uploading files and directories
    using the Hugging Face API.
    """

    app: Celery = None
    redis_client: Redis = None
    registered_functions: dict = {}
    pool: ConnectionPool = None

    def __init__(
        self,
        hf_repo_id=os.getenv("HF_REPO_ID"),
        hf_token=os.getenv("HF_TOKEN"),
        vast_api_key=os.getenv("VAST_API_KEY"),
        redis_host=os.getenv("REDIS_HOST", "localhost"),
        redis_password=os.getenv("REDIS_PASSWORD", ""),
        redis_port=os.getenv("REDIS_PORT", 6379),
        redis_username=os.getenv("REDIS_USER", "default"),
        broker_pool_limit=os.getenv("BROKER_POOL_LIMIT", 1),
    ) -> None:
        """
        Initialize the Distributask object with the provided configuration parameters. Also sets some
        default settings in Celery and handles cleanup of Celery queue and Redis server on exit.

        Args:
            hf_repo_id (str): Hugging Face repository ID.
            hf_token (str): Hugging Face API token.
            vast_api_key (str): Vast.ai API key.
            redis_host (str): Redis host. Defaults to "localhost".
            redis_password (str): Redis password. Defaults to an empty string.
            redis_port (int): Redis port. Defaults to 6379.
            redis_username (str): Redis username. Defaults to "default".
            broker_pool_limit (int): Celery broker pool limit. Defaults to 1.

        Raises:
            ValueError: If any of the required parameters (hf_repo_id, hf_token, vast_api_key) are not provided.
        """
        if hf_repo_id is None:
            raise ValueError(
                "HF_REPO_ID is not provided to the Distributask constructor"
            )

        if hf_token is None:
            raise ValueError("HF_TOKEN is not provided to the Distributask constructor")

        if vast_api_key is None:
            raise ValueError(
                "VAST_API_KEY is not provided to the Distributask constructor"
            )

        if redis_host == "localhost":
            print(
                "WARNING: Using default Redis host 'localhost'. This is not recommended for production use and won't work for distributed rendering."
            )

        self.settings = {
            "HF_REPO_ID": hf_repo_id,
            "HF_TOKEN": hf_token,
            "VAST_API_KEY": vast_api_key,
            "REDIS_HOST": redis_host,
            "REDIS_PASSWORD": redis_password,
            "REDIS_PORT": redis_port,
            "REDIS_USER": redis_username,
            "BROKER_POOL_LIMIT": broker_pool_limit,
        }

        redis_url = self.get_redis_url()
        # start Celery app instance
        self.app = Celery("distributask", broker=redis_url, backend=redis_url)
        self.app.conf.broker_pool_limit = self.settings["BROKER_POOL_LIMIT"]

        def cleanup_redis():
            """
            Deletes keys in redis related to Celery tasks and closes the Redis connection on exit
            """
            patterns = ["celery-task*", "task_status*"]
            redis_connection = self.get_redis_connection()
            for pattern in patterns:
                for key in redis_connection.scan_iter(match=pattern):
                    redis_connection.delete(key)
            print("Redis server cleared")

        def cleanup_celery():
            """
            Clears Celery task queue on exit
            """
            self.app.control.purge()
            print("Celery queue cleared")

        # At exit, close Celery instance, delete all previous task info from queue and Redis, and close Redis
        atexit.register(self.app.close)
        atexit.register(cleanup_redis)
        atexit.register(cleanup_celery)

        self.redis_client = self.get_redis_connection()

        # Tasks are acknowledged after they have been executed
        self.app.conf.task_acks_late = True
        self.call_function_task = self.app.task(
            bind=True, name="call_function_task", max_retries=3, default_retry_delay=30
        )(self.call_function_task)

    def __del__(self):
        """Destructor to clean up resources."""
        if self.pool is not None:
            self.pool.disconnect()
        if self.redis_client is not None:
            self.redis_client.close()
        if self.app is not None:
            self.app.close()

    def log(self, message: str, level: str = "info") -> None:
        """
        Log a message with the specified level.

        Args:
            message (str): The message to log.
            level (str): The logging level. Defaults to "info".
        """
        logger = get_task_logger(__name__)
        getattr(logger, level)(message)

    def get_settings(self) -> str:
        """
        Return settings of distributask instance.
        """
        return self.settings

    def get_redis_url(self) -> str:
        """
        Construct a Redis URL from the configuration settings.

        Returns:
            str: A Redis URL string.

        Raises:
            ValueError: If any required Redis connection parameter is missing.
        """
        host = self.settings["REDIS_HOST"]
        password = self.settings["REDIS_PASSWORD"]
        port = self.settings["REDIS_PORT"]
        username = self.settings["REDIS_USER"]

        if None in [host, password, port, username]:
            raise ValueError("Missing required Redis configuration values")

        redis_url = f"redis://{username}:{password}@{host}:{port}"
        return redis_url

    def get_redis_connection(self, force_new: bool = False) -> Redis:
        """
        Returns Redis connection. If it already exists, returns current connection.
        If it does not exist, its create a new Redis connection using a connection pool.

        Args:
            force_new (bool): Force the creation of a new connection if set to True. Defaults to False.

        Returns:
            Redis: A Redis connection object.
        """
        if self.redis_client is not None and not force_new:
            return self.redis_client
        else:
            self.pool = ConnectionPool(host=self.settings["REDIS_HOST"], 
                                       port=self.settings["REDIS_PORT"],
                                       password=self.settings["REDIS_PASSWORD"], 
                                       max_connections=1)
            self.redis_client = Redis(connection_pool=self.pool)
            atexit.register(self.pool.disconnect)

        return self.redis_client

    def get_env(self, key: str, default: any = None) -> any:
        """
        Retrieve a value from the configuration or .env file, with an optional default if the key is not found.

        Args:
            key (str): The key to look for in the settings.
            default (any): The default value to return if the key is not found. Defaults to None.

        Returns:
            any: The value from the settings if the key exists, otherwise the default value.
        """
        return self.settings.get(key, default)

    def call_function_task(self, func_name: str, args_json: str) -> any:
        """
        Creates Celery task that executes a registered function with provided JSON arguments.

        Args:
            func_name (str): The name of the registered function to execute.
            args_json (str): JSON string representation of the arguments for the function.

        Returns:
            any: Celery.app.task object, represents result of the registered function

        Raises:
            ValueError: If the function name is not registered.
            Exception: If an error occurs during the execution of the function. The task will retry in this case.
        """
        try:
            if func_name not in self.registered_functions:
                raise ValueError(f"Function '{func_name}' is not registered.")

            func = self.registered_functions[func_name]
            args = json.loads(args_json)
            result = func(**args)
            # self.update_function_status(self.call_function_task.request.id, "success")

            return result
        except Exception as e:
            self.log(f"Error in call_function_task: {str(e)}", "error")
            # self.call_function_task.retry(exc=e)


    def register_function(self, func: callable) -> callable:
        """
        Decorator to register a function so that it can be invoked as a Celery task.

        Args:
            func (callable): The function to register.

        Returns:
            callable: The original function, now registered as a callable task.
        """
        self.registered_functions[func.__name__] = func
        return func

    def execute_function(self, func_name: str, args: dict) -> Celery.AsyncResult:
        """
        Execute a registered function as a Celery task with provided arguments.

        Args:
            func_name (str): The name of the function to execute.
            args (dict): Arguments to pass to the function.

        Returns:
            celery.result.AsyncResult: An object representing the asynchronous result of the task.
        """
        args_json = json.dumps(args)
        async_result = self.call_function_task.delay(func_name, args_json)
        return async_result

    def update_function_status(self, task_id: str, status: str) -> None:
        """
        Update the status of a function task as a new Redis key.

        Args:
            task_id (str): The ID of the task.
            status (str): The new status to set.
        """
        redis_client = self.get_redis_connection()
        redis_client.set(f"task_status:{task_id}", status)

    def initialize_dataset(self, **kwargs) -> None:
        """
        Initialize a Hugging Face repository if it doesn't exist. Reads Hugging Face info from config or .env

        Args:
            kwargs: kwargs that can be passed into the HfApi.create_repo function.

        Raises:
            HTTPError: If repo cannot be created due to connection error other than repo not existing
        """
        repo_id = self.settings.get("HF_REPO_ID")
        hf_token = self.settings.get("HF_TOKEN")
        api = HfApi(token=hf_token)

        # creates new repo if desired repo is not found
        try:
            repo_info = api.repo_info(repo_id=repo_id, repo_type="dataset", timeout=30)
        except HTTPError as e:
            if e.response.status_code == 404:
                self.log(
                    f"Repository {repo_id} does not exist. Creating a new repository.",
                    "warn",
                )
                api.create_repo(
                    repo_id=repo_id, token=hf_token, repo_type="dataset", **kwargs
                )
            else:
                raise e

        # Create config.json file
        config = {
            "data_loader_name": "custom",
            "data_loader_kwargs": {
                "path": repo_id,
                "format": "files",
                "fields": ["file_path", "text"],
            },
        }

        # apply config.json to created repo
        with tempfile.TemporaryDirectory() as temp_dir:
            with Repository(
                local_dir=temp_dir,
                clone_from=repo_id,
                repo_type="dataset",
                use_auth_token=hf_token,
            ).commit(commit_message="Add config.json"):
                with open(os.path.join(temp_dir, "config.json"), "w") as f:
                    json.dump(config, f, indent=2)

        self.log(f"Initialized repository {repo_id}.")

    # upload a single file to the Hugging Face repository
    def upload_file(self, file_path: str) -> None:
        """
        Upload a file to a Hugging Face repository.

        Args:
            file_path (str): The path of the file to upload.

        Raises:
            Exception: If an error occurs during the upload process.

        """
        hf_token = self.settings.get("HF_TOKEN")
        repo_id = self.settings.get("HF_REPO_ID")

        api = HfApi(token=hf_token)

        try:
            self.log(f"Uploading {file_path} to Hugging Face repo {repo_id}")
            api.upload_file(
                path_or_fileobj=file_path,
                path_in_repo=os.path.basename(file_path),
                repo_id=repo_id,
                token=hf_token,
                repo_type="dataset",
            )
            self.log(f"Uploaded {file_path} to Hugging Face repo {repo_id}")
        except Exception as e:
            self.log(
                f"Failed to upload {file_path} to Hugging Face repo {repo_id}: {e}",
                "error",
            )

    def upload_directory(self, dir_path: str) -> None:
        """
        Upload a directory to a Hugging Face repository. Can be used to reduce frequency of Hugging Face API
        calls if you are rate limited while using the upload_file function.

        Args:
            dir_path (str): The path of the directory to upload.

        Raises:
            Exception: If an error occurs during the upload process.

        """
        hf_token = self.settings.get("HF_TOKEN")
        repo_id = self.settings.get("HF_REPO_ID")

        try:
            self.log(f"Uploading {dir_path} to Hugging Face repo {repo_id}")

            api = HfApi(token=hf_token)
            api.upload_folder(
                folder_path=dir_path,
                repo_id=repo_id,
                repo_type="dataset",
            )
            self.log(f"Uploaded {dir_path} to Hugging Face repo {repo_id}")
        except Exception as e:
            self.log(
                f"Failed to upload {dir_path} to Hugging Face repo {repo_id}: {e}",
                "error",
            )

    def delete_file(self, repo_id: str, path_in_repo: str) -> None:
        """
        Delete a file from a Hugging Face repository.

        Args:
            repo_id (str): The ID of the repository.
            path_in_repo (str): The path of the file to delete within the repository.

        Raises:
            Exception: If an error occurs during the deletion process.

        """
        hf_token = self.settings.get("HF_TOKEN")
        api = HfApi(token=hf_token)

        try:
            api.delete_file(
                repo_id=repo_id,
                path_in_repo=path_in_repo,
                repo_type="dataset",
                token=hf_token,
            )
            self.log(f"Deleted {path_in_repo} from Hugging Face repo {repo_id}")
        except Exception as e:
            self.log(
                f"Failed to delete {path_in_repo} from Hugging Face repo {repo_id}: {e}",
                "error",
            )

    def file_exists(self, repo_id: str, path_in_repo: str) -> bool:
        """
        Check if a file exists in a Hugging Face repository.

        Args:
            repo_id (str): The ID of the repository.
            path_in_repo (str): The path of the file to check within the repository.

        Returns:
            bool: True if the file exists in the repository, False otherwise.

        Raises:
            Exception: If an error occurs while checking the existence of the file.
        """
        hf_token = self.settings.get("HF_TOKEN")
        api = HfApi(token=hf_token)

        try:
            repo_files = api.list_repo_files(
                repo_id=repo_id, repo_type="dataset", token=hf_token
            )
            return path_in_repo in repo_files
        except Exception as e:
            self.log(
                f"Failed to check if {path_in_repo} exists in Hugging Face repo {repo_id}: {e}",
                "error",
            )
            return False

    def list_files(self, repo_id: str) -> list:
        """
        Get a list of files from a Hugging Face repository.

        Args:
            repo_id (str): The ID of the repository.

        Returns:
            list: A list of file paths in the repository.

        Raises:
            Exception: If an error occurs while retrieving the list of files.
        """
        hf_token = self.settings.get("HF_TOKEN")
        api = HfApi(token=hf_token)

        try:
            repo_files = api.list_repo_files(
                repo_id=repo_id, repo_type="dataset", token=hf_token
            )
            return repo_files
        except Exception as e:
            self.log(
                f"Failed to get the list of files from Hugging Face repo {repo_id}: {e}",
                "error",
            )
            return []

    def search_offers(self, max_price: float) -> List[Dict]:
        """
        Search for available offers to rent a node as an instance on the Vast.ai platform.

        Args:
            max_price (float): The maximum price per hour for the instance.

        Returns:
            List[Dict]: A list of dictionaries representing the available offers.

        Raises:
            requests.exceptions.RequestException: If there is an error while making the API request.
        """
        api_key = self.get_env("VAST_API_KEY")
        base_url = "https://console.vast.ai/api/v0/bundles/"
        headers = {
            "Accept": "application/json",
            "Content-Type": "application/json",
            "Authorization": f"Bearer {api_key}",
        }
        url = (
            base_url
            + '?q={"gpu_ram":">=4","rentable":{"eq":true},"dph_total":{"lte":'
            + str(max_price)
            + '},"sort_option":{"0":["dph_total","asc"],"1":["total_flops","asc"]}}'
        )

        try:
            response = requests.get(url, headers=headers)
            response.raise_for_status()
            json_response = response.json()
            return json_response["offers"]

        except requests.exceptions.RequestException as e:
            self.log(
                f"Error: {e}\nResponse: {response.text if response else 'No response'}"
            )
            raise

    def create_instance(
        self, offer_id: str, image: str, module_name: str, env_settings: Dict, command: str
    ) -> Dict:
        """
        Create an instance on the Vast.ai platform. Passes in some useful Celery settings by default.

        Args:
            offer_id (str): The ID of the offer to create the instance from.
            image (str): The image to use for the instance. (example: RaccoonResearch/distributask-test-worker)
            module_name (str): The name of the module to run on the instance, configured to be a docker file (example: distributask.example.worker)
            command (str): Command that initializes celery worker. Has default command with specific settings if not passed in. These settings have
            been found to be beneficial to the stability and simplicity of a Distributask run. 
            env_settings (Dict): Used to pass in environment variables to the Vast.ai instance. This is a dictionary with keys of the 
            environment variable name and values of the desired value of the environment variable.

        Returns:
            Dict: A dictionary representing the created instance.

        Raises:
            ValueError: If the Vast.ai API key is not set in the environment.
            Exception: If there is an error while creating the instance.
        """
        if self.get_env("VAST_API_KEY") is None:
            self.log("VAST_API_KEY is not set in the environment", "error")
            raise ValueError("VAST_API_KEY is not set in the environment")

        if command is None:
            command = f"celery -A {module_name} worker --loglevel=info --concurrency=1 --without-heartbeat --prefetch-multiplier=1"

        if env_settings is None:
            env_settings = self.settings

        json_blob = {
            "client_id": "me",
            "image": image,
            "env": env_settings,
            "disk": 32,  # Set a non-zero value for disk
            "onstart": f"export PATH=$PATH:/ && cd ../ && {command}",
            "runtype": "ssh ssh_proxy",
        }
        url = f"https://console.vast.ai/api/v0/asks/{offer_id}/?api_key={self.get_env('VAST_API_KEY')}"
        headers = {"Authorization": f"Bearer {self.get_env('VAST_API_KEY')}"}
        response = requests.put(url, headers=headers, json=json_blob)

        if response.status_code != 200:
            self.log(f"Failed to create instance: {response.text}", "error")
            raise Exception(f"Failed to create instance: {response.text}")

        return response.json()

    def destroy_instance(self, instance_id: str) -> Dict:
        """
        Destroy an instance on the Vast.ai platform.

        Args:
            instance_id (str): The ID of the instance to destroy.

        Returns:
            Dict: A dictionary representing the result of the destroy operation.
        """
        api_key = self.get_env("VAST_API_KEY")
        headers = {"Authorization": f"Bearer {api_key}"}
        url = (
            f"https://console.vast.ai/api/v0/instances/{instance_id}/?api_key={api_key}"
        )
        response = requests.delete(url, headers=headers)
        return response

    def rent_nodes(
        self,
        max_price: float,
        max_nodes: int,
        image: str,
        module_name: str,
        env_settings: Dict = None,
        command: str = None,
    ) -> List[Dict]:
        """
        Rent nodes as an instance on the Vast.ai platform.

        Args:
            max_price (float): The maximum price per hour for the nodes.
            max_nodes (int): The maximum number of nodes to rent.
            image (str): The image to use for the nodes.
            module_name (str): The name of the module to run on the nodes.

        Returns:
            List[Dict]: A list of dictionaries representing the rented nodes. If error is encountered
            trying to rent, it will retry every 5 seconds.
        """
        rented_nodes: List[Dict] = []
        while len(rented_nodes) < max_nodes:
            search_retries = 10
            while search_retries > 0:
                try:
                    offers = self.search_offers(max_price)
                    break
                except Exception as e:
                    self.log(
                        f"Error searching for offers: {str(e)} - retrying in 5 seconds...",
                        "error",
                    )
                    search_retries -= 1
                    # sleep for 10 seconds before retrying
                    time.sleep(10)
                    continue

            offers = sorted(
                offers, key=lambda offer: offer["dph_total"]
            )  # Sort offers by price, lowest to highest
            for offer in offers:
                time.sleep(5)
                if len(rented_nodes) >= max_nodes:
                    break
                try:
                    instance = self.create_instance(
                        offer["id"], image, module_name, env_settings=env_settings, command=command
                    )
                    rented_nodes.append(
                        {
                            "offer_id": offer["id"],
                            "instance_id": instance["new_contract"],
                        }
                    )
                except Exception as e:
                    self.log(
                        f"Error renting node: {str(e)} - searching for new offers",
                        "error",
                    )
                    break  # Break out of the current offer iteration
            else:
                # If the loop completes without breaking, all offers have been tried
                self.log("No more offers available - stopping node rental", "warning")
                break

        atexit.register(self.terminate_nodes, rented_nodes)
        return rented_nodes

    def get_node_log(self, node: Dict, wait_time: int = 2):
        """
        Get the log of the Vast.ai instance that is passed in. Makes an api call to tell the instance to send the log,
        and another one to actually retrive the log
        Args:
            node (Dict): the node that corresponds to the Vast.ai instance you want the log from
            wait_time (int): how long to wait in between the two api calls described above

        Returns:
            str: the log of the instance requested. If anything else other than a code 200 is received, return None
        """
        node_id = node["instance_id"]
        url = f"https://console.vast.ai/api/v0/instances/request_logs/{node_id}/"

        payload = {"tail": "1000"}
        headers = {
            "Accept": "application/json",
            "Authorization": f"Bearer {self.settings['VAST_API_KEY']}",
        }

        response = requests.request(
            "PUT", url, headers=headers, json=payload, timeout=5
        )

        if response.status_code == 200:
            log_url = response.json()["result_url"]
            time.sleep(wait_time)
            log_response = requests.get(log_url, timeout=5)
            if log_response.status_code == 200:
                return log_response
            else:
                return None
        else:
            return None

    def terminate_nodes(self, nodes: List[Dict]) -> None:
        """
        Terminate the instances of rented nodes on Vast.ai.

        Args:
            nodes (List[Dict]): A list of dictionaries representing the rented nodes.

        Raises:
            Exception: If error in destroying instances.
        """
        print("Terminating nodes...")
        for node in nodes:
            time.sleep(1)
            try:
                response = self.destroy_instance(node["instance_id"])
                if response.status_code != 200:
                    time.sleep(5)
                    self.destroy_instance(node["instance_id"])
            except Exception as e:
                self.log(
                    f"Error terminating node: {node['instance_id']}, {str(e)}", "error"
                )

    def monitor_tasks(
        self, tasks, update_interval=1, show_time_left=True, print_statements=True
    ):
        """
        Monitor the status of the tasks on the Vast.ai nodes.

        Args:
            tasks (List): A list of the tasks to monitor. Should be a list of the results of execute_function.
            update_interval (bool): Number of seconds the status of tasks are updated.
            show_time_left (bool): Show the estimated time left to complete tasks using the tqdm progress bar
            print_statments (bool): Allow printing of status of task queue

        Raises:
            Exception: If error in the process of executing the tasks
        """

        try:
            # Wait for the tasks to complete
            if print_statements:
                print("Tasks submitted to queue. Starting queue...")
                print("Elapsed time<Estimated time to completion")
            with tqdm(total=len(tasks), unit="task") as pbar:
                while not all(task.ready() for task in tasks):
                    current_tasks = sum([task.ready() for task in tasks])
                    pbar.update(current_tasks - pbar.n)
                    time.sleep(update_interval)
        except Exception as e:
            self.log(f"Error in executing tasks on nodes, {str(e)}")

        if all(task.ready() for task in tasks):
            print("All tasks completed.")

app: Celery = Celery('distributask', broker=redis_url, backend=redis_url) class-attribute instance-attribute

pool: ConnectionPool = None class-attribute instance-attribute

redis_client: Redis = self.get_redis_connection() class-attribute instance-attribute

registered_functions: dict = {} class-attribute instance-attribute

settings = {'HF_REPO_ID': hf_repo_id, 'HF_TOKEN': hf_token, 'VAST_API_KEY': vast_api_key, 'REDIS_HOST': redis_host, 'REDIS_PASSWORD': redis_password, 'REDIS_PORT': redis_port, 'REDIS_USER': redis_username, 'BROKER_POOL_LIMIT': broker_pool_limit} instance-attribute

__del__()

Destructor to clean up resources.

Source code in distributask/distributask.py
124
125
126
127
128
129
130
131
def __del__(self):
    """Destructor to clean up resources."""
    if self.pool is not None:
        self.pool.disconnect()
    if self.redis_client is not None:
        self.redis_client.close()
    if self.app is not None:
        self.app.close()

__init__(hf_repo_id=os.getenv('HF_REPO_ID'), hf_token=os.getenv('HF_TOKEN'), vast_api_key=os.getenv('VAST_API_KEY'), redis_host=os.getenv('REDIS_HOST', 'localhost'), redis_password=os.getenv('REDIS_PASSWORD', ''), redis_port=os.getenv('REDIS_PORT', 6379), redis_username=os.getenv('REDIS_USER', 'default'), broker_pool_limit=os.getenv('BROKER_POOL_LIMIT', 1))

Initialize the Distributask object with the provided configuration parameters. Also sets some default settings in Celery and handles cleanup of Celery queue and Redis server on exit.

Parameters:

Name Type Description Default
hf_repo_id str

Hugging Face repository ID.

getenv('HF_REPO_ID')
hf_token str

Hugging Face API token.

getenv('HF_TOKEN')
vast_api_key str

Vast.ai API key.

getenv('VAST_API_KEY')
redis_host str

Redis host. Defaults to "localhost".

getenv('REDIS_HOST', 'localhost')
redis_password str

Redis password. Defaults to an empty string.

getenv('REDIS_PASSWORD', '')
redis_port int

Redis port. Defaults to 6379.

getenv('REDIS_PORT', 6379)
redis_username str

Redis username. Defaults to "default".

getenv('REDIS_USER', 'default')
broker_pool_limit int

Celery broker pool limit. Defaults to 1.

getenv('BROKER_POOL_LIMIT', 1)

Raises:

Type Description
ValueError

If any of the required parameters (hf_repo_id, hf_token, vast_api_key) are not provided.

Source code in distributask/distributask.py
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
def __init__(
    self,
    hf_repo_id=os.getenv("HF_REPO_ID"),
    hf_token=os.getenv("HF_TOKEN"),
    vast_api_key=os.getenv("VAST_API_KEY"),
    redis_host=os.getenv("REDIS_HOST", "localhost"),
    redis_password=os.getenv("REDIS_PASSWORD", ""),
    redis_port=os.getenv("REDIS_PORT", 6379),
    redis_username=os.getenv("REDIS_USER", "default"),
    broker_pool_limit=os.getenv("BROKER_POOL_LIMIT", 1),
) -> None:
    """
    Initialize the Distributask object with the provided configuration parameters. Also sets some
    default settings in Celery and handles cleanup of Celery queue and Redis server on exit.

    Args:
        hf_repo_id (str): Hugging Face repository ID.
        hf_token (str): Hugging Face API token.
        vast_api_key (str): Vast.ai API key.
        redis_host (str): Redis host. Defaults to "localhost".
        redis_password (str): Redis password. Defaults to an empty string.
        redis_port (int): Redis port. Defaults to 6379.
        redis_username (str): Redis username. Defaults to "default".
        broker_pool_limit (int): Celery broker pool limit. Defaults to 1.

    Raises:
        ValueError: If any of the required parameters (hf_repo_id, hf_token, vast_api_key) are not provided.
    """
    if hf_repo_id is None:
        raise ValueError(
            "HF_REPO_ID is not provided to the Distributask constructor"
        )

    if hf_token is None:
        raise ValueError("HF_TOKEN is not provided to the Distributask constructor")

    if vast_api_key is None:
        raise ValueError(
            "VAST_API_KEY is not provided to the Distributask constructor"
        )

    if redis_host == "localhost":
        print(
            "WARNING: Using default Redis host 'localhost'. This is not recommended for production use and won't work for distributed rendering."
        )

    self.settings = {
        "HF_REPO_ID": hf_repo_id,
        "HF_TOKEN": hf_token,
        "VAST_API_KEY": vast_api_key,
        "REDIS_HOST": redis_host,
        "REDIS_PASSWORD": redis_password,
        "REDIS_PORT": redis_port,
        "REDIS_USER": redis_username,
        "BROKER_POOL_LIMIT": broker_pool_limit,
    }

    redis_url = self.get_redis_url()
    # start Celery app instance
    self.app = Celery("distributask", broker=redis_url, backend=redis_url)
    self.app.conf.broker_pool_limit = self.settings["BROKER_POOL_LIMIT"]

    def cleanup_redis():
        """
        Deletes keys in redis related to Celery tasks and closes the Redis connection on exit
        """
        patterns = ["celery-task*", "task_status*"]
        redis_connection = self.get_redis_connection()
        for pattern in patterns:
            for key in redis_connection.scan_iter(match=pattern):
                redis_connection.delete(key)
        print("Redis server cleared")

    def cleanup_celery():
        """
        Clears Celery task queue on exit
        """
        self.app.control.purge()
        print("Celery queue cleared")

    # At exit, close Celery instance, delete all previous task info from queue and Redis, and close Redis
    atexit.register(self.app.close)
    atexit.register(cleanup_redis)
    atexit.register(cleanup_celery)

    self.redis_client = self.get_redis_connection()

    # Tasks are acknowledged after they have been executed
    self.app.conf.task_acks_late = True
    self.call_function_task = self.app.task(
        bind=True, name="call_function_task", max_retries=3, default_retry_delay=30
    )(self.call_function_task)

call_function_task(func_name, args_json)

Creates Celery task that executes a registered function with provided JSON arguments.

Parameters:

Name Type Description Default
func_name str

The name of the registered function to execute.

required
args_json str

JSON string representation of the arguments for the function.

required

Returns:

Name Type Description
any any

Celery.app.task object, represents result of the registered function

Raises:

Type Description
ValueError

If the function name is not registered.

Exception

If an error occurs during the execution of the function. The task will retry in this case.

Source code in distributask/distributask.py
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
def call_function_task(self, func_name: str, args_json: str) -> any:
    """
    Creates Celery task that executes a registered function with provided JSON arguments.

    Args:
        func_name (str): The name of the registered function to execute.
        args_json (str): JSON string representation of the arguments for the function.

    Returns:
        any: Celery.app.task object, represents result of the registered function

    Raises:
        ValueError: If the function name is not registered.
        Exception: If an error occurs during the execution of the function. The task will retry in this case.
    """
    try:
        if func_name not in self.registered_functions:
            raise ValueError(f"Function '{func_name}' is not registered.")

        func = self.registered_functions[func_name]
        args = json.loads(args_json)
        result = func(**args)
        # self.update_function_status(self.call_function_task.request.id, "success")

        return result
    except Exception as e:
        self.log(f"Error in call_function_task: {str(e)}", "error")

create_instance(offer_id, image, module_name, env_settings, command)

Create an instance on the Vast.ai platform. Passes in some useful Celery settings by default.

Parameters:

Name Type Description Default
offer_id str

The ID of the offer to create the instance from.

required
image str

The image to use for the instance. (example: RaccoonResearch/distributask-test-worker)

required
module_name str

The name of the module to run on the instance, configured to be a docker file (example: distributask.example.worker)

required
command str

Command that initializes celery worker. Has default command with specific settings if not passed in. These settings have

required
env_settings Dict

Used to pass in environment variables to the Vast.ai instance. This is a dictionary with keys of the

required

Returns:

Name Type Description
Dict Dict

A dictionary representing the created instance.

Raises:

Type Description
ValueError

If the Vast.ai API key is not set in the environment.

Exception

If there is an error while creating the instance.

Source code in distributask/distributask.py
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
def create_instance(
    self, offer_id: str, image: str, module_name: str, env_settings: Dict, command: str
) -> Dict:
    """
    Create an instance on the Vast.ai platform. Passes in some useful Celery settings by default.

    Args:
        offer_id (str): The ID of the offer to create the instance from.
        image (str): The image to use for the instance. (example: RaccoonResearch/distributask-test-worker)
        module_name (str): The name of the module to run on the instance, configured to be a docker file (example: distributask.example.worker)
        command (str): Command that initializes celery worker. Has default command with specific settings if not passed in. These settings have
        been found to be beneficial to the stability and simplicity of a Distributask run. 
        env_settings (Dict): Used to pass in environment variables to the Vast.ai instance. This is a dictionary with keys of the 
        environment variable name and values of the desired value of the environment variable.

    Returns:
        Dict: A dictionary representing the created instance.

    Raises:
        ValueError: If the Vast.ai API key is not set in the environment.
        Exception: If there is an error while creating the instance.
    """
    if self.get_env("VAST_API_KEY") is None:
        self.log("VAST_API_KEY is not set in the environment", "error")
        raise ValueError("VAST_API_KEY is not set in the environment")

    if command is None:
        command = f"celery -A {module_name} worker --loglevel=info --concurrency=1 --without-heartbeat --prefetch-multiplier=1"

    if env_settings is None:
        env_settings = self.settings

    json_blob = {
        "client_id": "me",
        "image": image,
        "env": env_settings,
        "disk": 32,  # Set a non-zero value for disk
        "onstart": f"export PATH=$PATH:/ && cd ../ && {command}",
        "runtype": "ssh ssh_proxy",
    }
    url = f"https://console.vast.ai/api/v0/asks/{offer_id}/?api_key={self.get_env('VAST_API_KEY')}"
    headers = {"Authorization": f"Bearer {self.get_env('VAST_API_KEY')}"}
    response = requests.put(url, headers=headers, json=json_blob)

    if response.status_code != 200:
        self.log(f"Failed to create instance: {response.text}", "error")
        raise Exception(f"Failed to create instance: {response.text}")

    return response.json()

delete_file(repo_id, path_in_repo)

Delete a file from a Hugging Face repository.

Parameters:

Name Type Description Default
repo_id str

The ID of the repository.

required
path_in_repo str

The path of the file to delete within the repository.

required

Raises:

Type Description
Exception

If an error occurs during the deletion process.

Source code in distributask/distributask.py
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
def delete_file(self, repo_id: str, path_in_repo: str) -> None:
    """
    Delete a file from a Hugging Face repository.

    Args:
        repo_id (str): The ID of the repository.
        path_in_repo (str): The path of the file to delete within the repository.

    Raises:
        Exception: If an error occurs during the deletion process.

    """
    hf_token = self.settings.get("HF_TOKEN")
    api = HfApi(token=hf_token)

    try:
        api.delete_file(
            repo_id=repo_id,
            path_in_repo=path_in_repo,
            repo_type="dataset",
            token=hf_token,
        )
        self.log(f"Deleted {path_in_repo} from Hugging Face repo {repo_id}")
    except Exception as e:
        self.log(
            f"Failed to delete {path_in_repo} from Hugging Face repo {repo_id}: {e}",
            "error",
        )

destroy_instance(instance_id)

Destroy an instance on the Vast.ai platform.

Parameters:

Name Type Description Default
instance_id str

The ID of the instance to destroy.

required

Returns:

Name Type Description
Dict Dict

A dictionary representing the result of the destroy operation.

Source code in distributask/distributask.py
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
def destroy_instance(self, instance_id: str) -> Dict:
    """
    Destroy an instance on the Vast.ai platform.

    Args:
        instance_id (str): The ID of the instance to destroy.

    Returns:
        Dict: A dictionary representing the result of the destroy operation.
    """
    api_key = self.get_env("VAST_API_KEY")
    headers = {"Authorization": f"Bearer {api_key}"}
    url = (
        f"https://console.vast.ai/api/v0/instances/{instance_id}/?api_key={api_key}"
    )
    response = requests.delete(url, headers=headers)
    return response

execute_function(func_name, args)

Execute a registered function as a Celery task with provided arguments.

Parameters:

Name Type Description Default
func_name str

The name of the function to execute.

required
args dict

Arguments to pass to the function.

required

Returns:

Type Description
AsyncResult

celery.result.AsyncResult: An object representing the asynchronous result of the task.

Source code in distributask/distributask.py
250
251
252
253
254
255
256
257
258
259
260
261
262
263
def execute_function(self, func_name: str, args: dict) -> Celery.AsyncResult:
    """
    Execute a registered function as a Celery task with provided arguments.

    Args:
        func_name (str): The name of the function to execute.
        args (dict): Arguments to pass to the function.

    Returns:
        celery.result.AsyncResult: An object representing the asynchronous result of the task.
    """
    args_json = json.dumps(args)
    async_result = self.call_function_task.delay(func_name, args_json)
    return async_result

file_exists(repo_id, path_in_repo)

Check if a file exists in a Hugging Face repository.

Parameters:

Name Type Description Default
repo_id str

The ID of the repository.

required
path_in_repo str

The path of the file to check within the repository.

required

Returns:

Name Type Description
bool bool

True if the file exists in the repository, False otherwise.

Raises:

Type Description
Exception

If an error occurs while checking the existence of the file.

Source code in distributask/distributask.py
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
def file_exists(self, repo_id: str, path_in_repo: str) -> bool:
    """
    Check if a file exists in a Hugging Face repository.

    Args:
        repo_id (str): The ID of the repository.
        path_in_repo (str): The path of the file to check within the repository.

    Returns:
        bool: True if the file exists in the repository, False otherwise.

    Raises:
        Exception: If an error occurs while checking the existence of the file.
    """
    hf_token = self.settings.get("HF_TOKEN")
    api = HfApi(token=hf_token)

    try:
        repo_files = api.list_repo_files(
            repo_id=repo_id, repo_type="dataset", token=hf_token
        )
        return path_in_repo in repo_files
    except Exception as e:
        self.log(
            f"Failed to check if {path_in_repo} exists in Hugging Face repo {repo_id}: {e}",
            "error",
        )
        return False

get_env(key, default=None)

Retrieve a value from the configuration or .env file, with an optional default if the key is not found.

Parameters:

Name Type Description Default
key str

The key to look for in the settings.

required
default any

The default value to return if the key is not found. Defaults to None.

None

Returns:

Name Type Description
any any

The value from the settings if the key exists, otherwise the default value.

Source code in distributask/distributask.py
194
195
196
197
198
199
200
201
202
203
204
205
def get_env(self, key: str, default: any = None) -> any:
    """
    Retrieve a value from the configuration or .env file, with an optional default if the key is not found.

    Args:
        key (str): The key to look for in the settings.
        default (any): The default value to return if the key is not found. Defaults to None.

    Returns:
        any: The value from the settings if the key exists, otherwise the default value.
    """
    return self.settings.get(key, default)

get_node_log(node, wait_time=2)

Get the log of the Vast.ai instance that is passed in. Makes an api call to tell the instance to send the log, and another one to actually retrive the log Args: node (Dict): the node that corresponds to the Vast.ai instance you want the log from wait_time (int): how long to wait in between the two api calls described above

Returns:

Name Type Description
str

the log of the instance requested. If anything else other than a code 200 is received, return None

Source code in distributask/distributask.py
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
def get_node_log(self, node: Dict, wait_time: int = 2):
    """
    Get the log of the Vast.ai instance that is passed in. Makes an api call to tell the instance to send the log,
    and another one to actually retrive the log
    Args:
        node (Dict): the node that corresponds to the Vast.ai instance you want the log from
        wait_time (int): how long to wait in between the two api calls described above

    Returns:
        str: the log of the instance requested. If anything else other than a code 200 is received, return None
    """
    node_id = node["instance_id"]
    url = f"https://console.vast.ai/api/v0/instances/request_logs/{node_id}/"

    payload = {"tail": "1000"}
    headers = {
        "Accept": "application/json",
        "Authorization": f"Bearer {self.settings['VAST_API_KEY']}",
    }

    response = requests.request(
        "PUT", url, headers=headers, json=payload, timeout=5
    )

    if response.status_code == 200:
        log_url = response.json()["result_url"]
        time.sleep(wait_time)
        log_response = requests.get(log_url, timeout=5)
        if log_response.status_code == 200:
            return log_response
        else:
            return None
    else:
        return None

get_redis_connection(force_new=False)

Returns Redis connection. If it already exists, returns current connection. If it does not exist, its create a new Redis connection using a connection pool.

Parameters:

Name Type Description Default
force_new bool

Force the creation of a new connection if set to True. Defaults to False.

False

Returns:

Name Type Description
Redis Redis

A Redis connection object.

Source code in distributask/distributask.py
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
def get_redis_connection(self, force_new: bool = False) -> Redis:
    """
    Returns Redis connection. If it already exists, returns current connection.
    If it does not exist, its create a new Redis connection using a connection pool.

    Args:
        force_new (bool): Force the creation of a new connection if set to True. Defaults to False.

    Returns:
        Redis: A Redis connection object.
    """
    if self.redis_client is not None and not force_new:
        return self.redis_client
    else:
        self.pool = ConnectionPool(host=self.settings["REDIS_HOST"], 
                                   port=self.settings["REDIS_PORT"],
                                   password=self.settings["REDIS_PASSWORD"], 
                                   max_connections=1)
        self.redis_client = Redis(connection_pool=self.pool)
        atexit.register(self.pool.disconnect)

    return self.redis_client

get_redis_url()

Construct a Redis URL from the configuration settings.

Returns:

Name Type Description
str str

A Redis URL string.

Raises:

Type Description
ValueError

If any required Redis connection parameter is missing.

Source code in distributask/distributask.py
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
def get_redis_url(self) -> str:
    """
    Construct a Redis URL from the configuration settings.

    Returns:
        str: A Redis URL string.

    Raises:
        ValueError: If any required Redis connection parameter is missing.
    """
    host = self.settings["REDIS_HOST"]
    password = self.settings["REDIS_PASSWORD"]
    port = self.settings["REDIS_PORT"]
    username = self.settings["REDIS_USER"]

    if None in [host, password, port, username]:
        raise ValueError("Missing required Redis configuration values")

    redis_url = f"redis://{username}:{password}@{host}:{port}"
    return redis_url

get_settings()

Return settings of distributask instance.

Source code in distributask/distributask.py
144
145
146
147
148
def get_settings(self) -> str:
    """
    Return settings of distributask instance.
    """
    return self.settings

initialize_dataset(**kwargs)

Initialize a Hugging Face repository if it doesn't exist. Reads Hugging Face info from config or .env

Parameters:

Name Type Description Default
kwargs

kwargs that can be passed into the HfApi.create_repo function.

{}

Raises:

Type Description
HTTPError

If repo cannot be created due to connection error other than repo not existing

Source code in distributask/distributask.py
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
def initialize_dataset(self, **kwargs) -> None:
    """
    Initialize a Hugging Face repository if it doesn't exist. Reads Hugging Face info from config or .env

    Args:
        kwargs: kwargs that can be passed into the HfApi.create_repo function.

    Raises:
        HTTPError: If repo cannot be created due to connection error other than repo not existing
    """
    repo_id = self.settings.get("HF_REPO_ID")
    hf_token = self.settings.get("HF_TOKEN")
    api = HfApi(token=hf_token)

    # creates new repo if desired repo is not found
    try:
        repo_info = api.repo_info(repo_id=repo_id, repo_type="dataset", timeout=30)
    except HTTPError as e:
        if e.response.status_code == 404:
            self.log(
                f"Repository {repo_id} does not exist. Creating a new repository.",
                "warn",
            )
            api.create_repo(
                repo_id=repo_id, token=hf_token, repo_type="dataset", **kwargs
            )
        else:
            raise e

    # Create config.json file
    config = {
        "data_loader_name": "custom",
        "data_loader_kwargs": {
            "path": repo_id,
            "format": "files",
            "fields": ["file_path", "text"],
        },
    }

    # apply config.json to created repo
    with tempfile.TemporaryDirectory() as temp_dir:
        with Repository(
            local_dir=temp_dir,
            clone_from=repo_id,
            repo_type="dataset",
            use_auth_token=hf_token,
        ).commit(commit_message="Add config.json"):
            with open(os.path.join(temp_dir, "config.json"), "w") as f:
                json.dump(config, f, indent=2)

    self.log(f"Initialized repository {repo_id}.")

list_files(repo_id)

Get a list of files from a Hugging Face repository.

Parameters:

Name Type Description Default
repo_id str

The ID of the repository.

required

Returns:

Name Type Description
list list

A list of file paths in the repository.

Raises:

Type Description
Exception

If an error occurs while retrieving the list of files.

Source code in distributask/distributask.py
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
def list_files(self, repo_id: str) -> list:
    """
    Get a list of files from a Hugging Face repository.

    Args:
        repo_id (str): The ID of the repository.

    Returns:
        list: A list of file paths in the repository.

    Raises:
        Exception: If an error occurs while retrieving the list of files.
    """
    hf_token = self.settings.get("HF_TOKEN")
    api = HfApi(token=hf_token)

    try:
        repo_files = api.list_repo_files(
            repo_id=repo_id, repo_type="dataset", token=hf_token
        )
        return repo_files
    except Exception as e:
        self.log(
            f"Failed to get the list of files from Hugging Face repo {repo_id}: {e}",
            "error",
        )
        return []

log(message, level='info')

Log a message with the specified level.

Parameters:

Name Type Description Default
message str

The message to log.

required
level str

The logging level. Defaults to "info".

'info'
Source code in distributask/distributask.py
133
134
135
136
137
138
139
140
141
142
def log(self, message: str, level: str = "info") -> None:
    """
    Log a message with the specified level.

    Args:
        message (str): The message to log.
        level (str): The logging level. Defaults to "info".
    """
    logger = get_task_logger(__name__)
    getattr(logger, level)(message)

monitor_tasks(tasks, update_interval=1, show_time_left=True, print_statements=True)

Monitor the status of the tasks on the Vast.ai nodes.

Parameters:

Name Type Description Default
tasks List

A list of the tasks to monitor. Should be a list of the results of execute_function.

required
update_interval bool

Number of seconds the status of tasks are updated.

1
show_time_left bool

Show the estimated time left to complete tasks using the tqdm progress bar

True
print_statments bool

Allow printing of status of task queue

required

Raises:

Type Description
Exception

If error in the process of executing the tasks

Source code in distributask/distributask.py
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
def monitor_tasks(
    self, tasks, update_interval=1, show_time_left=True, print_statements=True
):
    """
    Monitor the status of the tasks on the Vast.ai nodes.

    Args:
        tasks (List): A list of the tasks to monitor. Should be a list of the results of execute_function.
        update_interval (bool): Number of seconds the status of tasks are updated.
        show_time_left (bool): Show the estimated time left to complete tasks using the tqdm progress bar
        print_statments (bool): Allow printing of status of task queue

    Raises:
        Exception: If error in the process of executing the tasks
    """

    try:
        # Wait for the tasks to complete
        if print_statements:
            print("Tasks submitted to queue. Starting queue...")
            print("Elapsed time<Estimated time to completion")
        with tqdm(total=len(tasks), unit="task") as pbar:
            while not all(task.ready() for task in tasks):
                current_tasks = sum([task.ready() for task in tasks])
                pbar.update(current_tasks - pbar.n)
                time.sleep(update_interval)
    except Exception as e:
        self.log(f"Error in executing tasks on nodes, {str(e)}")

    if all(task.ready() for task in tasks):
        print("All tasks completed.")

register_function(func)

Decorator to register a function so that it can be invoked as a Celery task.

Parameters:

Name Type Description Default
func callable

The function to register.

required

Returns:

Name Type Description
callable callable

The original function, now registered as a callable task.

Source code in distributask/distributask.py
237
238
239
240
241
242
243
244
245
246
247
248
def register_function(self, func: callable) -> callable:
    """
    Decorator to register a function so that it can be invoked as a Celery task.

    Args:
        func (callable): The function to register.

    Returns:
        callable: The original function, now registered as a callable task.
    """
    self.registered_functions[func.__name__] = func
    return func

rent_nodes(max_price, max_nodes, image, module_name, env_settings=None, command=None)

Rent nodes as an instance on the Vast.ai platform.

Parameters:

Name Type Description Default
max_price float

The maximum price per hour for the nodes.

required
max_nodes int

The maximum number of nodes to rent.

required
image str

The image to use for the nodes.

required
module_name str

The name of the module to run on the nodes.

required

Returns:

Type Description
List[Dict]

List[Dict]: A list of dictionaries representing the rented nodes. If error is encountered

List[Dict]

trying to rent, it will retry every 5 seconds.

Source code in distributask/distributask.py
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
def rent_nodes(
    self,
    max_price: float,
    max_nodes: int,
    image: str,
    module_name: str,
    env_settings: Dict = None,
    command: str = None,
) -> List[Dict]:
    """
    Rent nodes as an instance on the Vast.ai platform.

    Args:
        max_price (float): The maximum price per hour for the nodes.
        max_nodes (int): The maximum number of nodes to rent.
        image (str): The image to use for the nodes.
        module_name (str): The name of the module to run on the nodes.

    Returns:
        List[Dict]: A list of dictionaries representing the rented nodes. If error is encountered
        trying to rent, it will retry every 5 seconds.
    """
    rented_nodes: List[Dict] = []
    while len(rented_nodes) < max_nodes:
        search_retries = 10
        while search_retries > 0:
            try:
                offers = self.search_offers(max_price)
                break
            except Exception as e:
                self.log(
                    f"Error searching for offers: {str(e)} - retrying in 5 seconds...",
                    "error",
                )
                search_retries -= 1
                # sleep for 10 seconds before retrying
                time.sleep(10)
                continue

        offers = sorted(
            offers, key=lambda offer: offer["dph_total"]
        )  # Sort offers by price, lowest to highest
        for offer in offers:
            time.sleep(5)
            if len(rented_nodes) >= max_nodes:
                break
            try:
                instance = self.create_instance(
                    offer["id"], image, module_name, env_settings=env_settings, command=command
                )
                rented_nodes.append(
                    {
                        "offer_id": offer["id"],
                        "instance_id": instance["new_contract"],
                    }
                )
            except Exception as e:
                self.log(
                    f"Error renting node: {str(e)} - searching for new offers",
                    "error",
                )
                break  # Break out of the current offer iteration
        else:
            # If the loop completes without breaking, all offers have been tried
            self.log("No more offers available - stopping node rental", "warning")
            break

    atexit.register(self.terminate_nodes, rented_nodes)
    return rented_nodes

search_offers(max_price)

Search for available offers to rent a node as an instance on the Vast.ai platform.

Parameters:

Name Type Description Default
max_price float

The maximum price per hour for the instance.

required

Returns:

Type Description
List[Dict]

List[Dict]: A list of dictionaries representing the available offers.

Raises:

Type Description
RequestException

If there is an error while making the API request.

Source code in distributask/distributask.py
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
def search_offers(self, max_price: float) -> List[Dict]:
    """
    Search for available offers to rent a node as an instance on the Vast.ai platform.

    Args:
        max_price (float): The maximum price per hour for the instance.

    Returns:
        List[Dict]: A list of dictionaries representing the available offers.

    Raises:
        requests.exceptions.RequestException: If there is an error while making the API request.
    """
    api_key = self.get_env("VAST_API_KEY")
    base_url = "https://console.vast.ai/api/v0/bundles/"
    headers = {
        "Accept": "application/json",
        "Content-Type": "application/json",
        "Authorization": f"Bearer {api_key}",
    }
    url = (
        base_url
        + '?q={"gpu_ram":">=4","rentable":{"eq":true},"dph_total":{"lte":'
        + str(max_price)
        + '},"sort_option":{"0":["dph_total","asc"],"1":["total_flops","asc"]}}'
    )

    try:
        response = requests.get(url, headers=headers)
        response.raise_for_status()
        json_response = response.json()
        return json_response["offers"]

    except requests.exceptions.RequestException as e:
        self.log(
            f"Error: {e}\nResponse: {response.text if response else 'No response'}"
        )
        raise

terminate_nodes(nodes)

Terminate the instances of rented nodes on Vast.ai.

Parameters:

Name Type Description Default
nodes List[Dict]

A list of dictionaries representing the rented nodes.

required

Raises:

Type Description
Exception

If error in destroying instances.

Source code in distributask/distributask.py
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
def terminate_nodes(self, nodes: List[Dict]) -> None:
    """
    Terminate the instances of rented nodes on Vast.ai.

    Args:
        nodes (List[Dict]): A list of dictionaries representing the rented nodes.

    Raises:
        Exception: If error in destroying instances.
    """
    print("Terminating nodes...")
    for node in nodes:
        time.sleep(1)
        try:
            response = self.destroy_instance(node["instance_id"])
            if response.status_code != 200:
                time.sleep(5)
                self.destroy_instance(node["instance_id"])
        except Exception as e:
            self.log(
                f"Error terminating node: {node['instance_id']}, {str(e)}", "error"
            )

update_function_status(task_id, status)

Update the status of a function task as a new Redis key.

Parameters:

Name Type Description Default
task_id str

The ID of the task.

required
status str

The new status to set.

required
Source code in distributask/distributask.py
265
266
267
268
269
270
271
272
273
274
def update_function_status(self, task_id: str, status: str) -> None:
    """
    Update the status of a function task as a new Redis key.

    Args:
        task_id (str): The ID of the task.
        status (str): The new status to set.
    """
    redis_client = self.get_redis_connection()
    redis_client.set(f"task_status:{task_id}", status)

upload_directory(dir_path)

Upload a directory to a Hugging Face repository. Can be used to reduce frequency of Hugging Face API calls if you are rate limited while using the upload_file function.

Parameters:

Name Type Description Default
dir_path str

The path of the directory to upload.

required

Raises:

Type Description
Exception

If an error occurs during the upload process.

Source code in distributask/distributask.py
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
def upload_directory(self, dir_path: str) -> None:
    """
    Upload a directory to a Hugging Face repository. Can be used to reduce frequency of Hugging Face API
    calls if you are rate limited while using the upload_file function.

    Args:
        dir_path (str): The path of the directory to upload.

    Raises:
        Exception: If an error occurs during the upload process.

    """
    hf_token = self.settings.get("HF_TOKEN")
    repo_id = self.settings.get("HF_REPO_ID")

    try:
        self.log(f"Uploading {dir_path} to Hugging Face repo {repo_id}")

        api = HfApi(token=hf_token)
        api.upload_folder(
            folder_path=dir_path,
            repo_id=repo_id,
            repo_type="dataset",
        )
        self.log(f"Uploaded {dir_path} to Hugging Face repo {repo_id}")
    except Exception as e:
        self.log(
            f"Failed to upload {dir_path} to Hugging Face repo {repo_id}: {e}",
            "error",
        )

upload_file(file_path)

Upload a file to a Hugging Face repository.

Parameters:

Name Type Description Default
file_path str

The path of the file to upload.

required

Raises:

Type Description
Exception

If an error occurs during the upload process.

Source code in distributask/distributask.py
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
def upload_file(self, file_path: str) -> None:
    """
    Upload a file to a Hugging Face repository.

    Args:
        file_path (str): The path of the file to upload.

    Raises:
        Exception: If an error occurs during the upload process.

    """
    hf_token = self.settings.get("HF_TOKEN")
    repo_id = self.settings.get("HF_REPO_ID")

    api = HfApi(token=hf_token)

    try:
        self.log(f"Uploading {file_path} to Hugging Face repo {repo_id}")
        api.upload_file(
            path_or_fileobj=file_path,
            path_in_repo=os.path.basename(file_path),
            repo_id=repo_id,
            token=hf_token,
            repo_type="dataset",
        )
        self.log(f"Uploaded {file_path} to Hugging Face repo {repo_id}")
    except Exception as e:
        self.log(
            f"Failed to upload {file_path} to Hugging Face repo {repo_id}: {e}",
            "error",
        )