1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859 |
- def prefix_dict_keys(prefix, input_dict):
- return {f'{prefix}_{key}': val for key, val in input_dict.items()}
-
- def print_system_info():
- from platform import python_version
- print(f"Python version is: {python_version()}")
-
- try:
- import sklearn
- print(f"Scikit-learn version is: {sklearn.__version__}")
- except:
- print("Scikit-learn not found!!!")
-
- try:
- import torch
- print(f"Torch version is: {torch.__version__}")
- if torch.cuda.is_available() and torch.cuda.device_count() >= 0:
- print(f"Nvidia device is: {torch.cuda.get_device_name(0)}")
- else:
- print("Torch is using CPU")
- except:
- print("Torch not found!!!")
- return
-
- try:
- import transformers
- print(f"Transformers version is: {transformers.__version__}")
- try:
- print(f"Adapterhub version is: {transformers.adapters.__version__}")
- except:
- print("Adapterhub not found!!!")
- except:
- print("Transformers not found!!!")
-
- def silent_logs():
- import os
- os.environ["WANDB_SILENT"] = "true"
- # os.environ["TRANSFORMERS_VERBOSITY"] = "fatal"
- os.environ["TRANSFORMERS_NO_ADVISORY_WARNINGS"] = "1"
- os.environ["ACCELERATE_LOG_LEVEL"] = "CRITICAL"
- import transformers
- from transformers.utils import logging
- logging.set_verbosity(transformers.logging.FATAL)
- from datasets.utils.logging import disable_progress_bar, set_verbosity_error
- disable_progress_bar()
- set_verbosity_error()
-
- import accelerate.utils.other as accelerate_other
- accelerate_other.logger.setLevel(50)
-
- def sp_encode(data):
- import json
- import base64
- return base64.b32encode(json.dumps(data).encode())
-
- def sp_decode(encoded_data):
- import json
- import base64
- return json.loads(base64.b32decode(encoded_data).decode())
|