azure_blob_storage.py 3.1 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677
  1. from collections.abc import Generator
  2. from datetime import datetime, timedelta, timezone
  3. from azure.storage.blob import AccountSasPermissions, BlobServiceClient, ResourceTypes, generate_account_sas
  4. from configs import dify_config
  5. from extensions.ext_redis import redis_client
  6. from extensions.storage.base_storage import BaseStorage
  7. class AzureBlobStorage(BaseStorage):
  8. """Implementation for Azure Blob storage."""
  9. def __init__(self):
  10. super().__init__()
  11. self.bucket_name = dify_config.AZURE_BLOB_CONTAINER_NAME
  12. self.account_url = dify_config.AZURE_BLOB_ACCOUNT_URL
  13. self.account_name = dify_config.AZURE_BLOB_ACCOUNT_NAME
  14. self.account_key = dify_config.AZURE_BLOB_ACCOUNT_KEY
  15. def save(self, filename, data):
  16. client = self._sync_client()
  17. blob_container = client.get_container_client(container=self.bucket_name)
  18. blob_container.upload_blob(filename, data)
  19. def load_once(self, filename: str) -> bytes:
  20. client = self._sync_client()
  21. blob = client.get_container_client(container=self.bucket_name)
  22. blob = blob.get_blob_client(blob=filename)
  23. data = blob.download_blob().readall()
  24. return data
  25. def load_stream(self, filename: str) -> Generator:
  26. client = self._sync_client()
  27. def generate(filename: str = filename) -> Generator:
  28. blob = client.get_blob_client(container=self.bucket_name, blob=filename)
  29. blob_data = blob.download_blob()
  30. yield from blob_data.chunks()
  31. return generate(filename)
  32. def download(self, filename, target_filepath):
  33. client = self._sync_client()
  34. blob = client.get_blob_client(container=self.bucket_name, blob=filename)
  35. with open(target_filepath, "wb") as my_blob:
  36. blob_data = blob.download_blob()
  37. blob_data.readinto(my_blob)
  38. def exists(self, filename):
  39. client = self._sync_client()
  40. blob = client.get_blob_client(container=self.bucket_name, blob=filename)
  41. return blob.exists()
  42. def delete(self, filename):
  43. client = self._sync_client()
  44. blob_container = client.get_container_client(container=self.bucket_name)
  45. blob_container.delete_blob(filename)
  46. def _sync_client(self):
  47. cache_key = "azure_blob_sas_token_{}_{}".format(self.account_name, self.account_key)
  48. cache_result = redis_client.get(cache_key)
  49. if cache_result is not None:
  50. sas_token = cache_result.decode("utf-8")
  51. else:
  52. sas_token = generate_account_sas(
  53. account_name=self.account_name,
  54. account_key=self.account_key,
  55. resource_types=ResourceTypes(service=True, container=True, object=True),
  56. permission=AccountSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True),
  57. expiry=datetime.now(timezone.utc).replace(tzinfo=None) + timedelta(hours=1),
  58. )
  59. redis_client.set(cache_key, sas_token, ex=3000)
  60. return BlobServiceClient(account_url=self.account_url, credential=sas_token)