utils.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401
  1. #!/usr/bin/env python
  2. """Support utilities used by the Earth Engine command line interface.
  3. This module defines the Command class which is the base class of all
  4. the commands supported by the EE command line tool. It also defines
  5. the classes for configuration and runtime context management.
  6. """
  7. import collections
  8. import datetime
  9. import json
  10. import os
  11. import re
  12. import tempfile
  13. import threading
  14. import time
  15. import urllib.parse
  16. from google.cloud import storage
  17. from google.oauth2.credentials import Credentials
  18. import httplib2
  19. import six
  20. import ee
  21. HOMEDIR = os.path.expanduser('~')
  22. EE_CONFIG_FILE = 'EE_CONFIG_FILE'
  23. DEFAULT_EE_CONFIG_FILE_RELATIVE = os.path.join(
  24. '.config',
  25. 'earthengine',
  26. 'credentials',
  27. )
  28. DEFAULT_EE_CONFIG_FILE = os.path.join(
  29. HOMEDIR, DEFAULT_EE_CONFIG_FILE_RELATIVE)
  30. CONFIG_PARAMS = {
  31. 'url': 'https://earthengine.googleapis.com',
  32. 'account': None,
  33. 'private_key': None,
  34. 'refresh_token': None,
  35. 'cloud_api_key': None,
  36. 'project': None,
  37. 'client_id': ee.oauth.CLIENT_ID,
  38. 'client_secret': ee.oauth.CLIENT_SECRET,
  39. 'scopes': ee.oauth.SCOPES,
  40. }
  41. TASK_FINISHED_STATES = (ee.batch.Task.State.COMPLETED,
  42. ee.batch.Task.State.FAILED,
  43. ee.batch.Task.State.CANCELLED)
  44. class CommandLineConfig(object):
  45. """Holds the configuration parameters used by the EE command line interface.
  46. This class attempts to load the configuration parameters from a file
  47. specified as a constructor argument. If not provided, it attempts to load
  48. the configuration from a file specified via the EE_CONFIG_FILE environment
  49. variable. If the variable is not set, it looks for a JSON file at the
  50. path ~/.config/earthengine/credentials. If all fails, it falls back to using
  51. some predefined defaults for each configuration parameter.
  52. If --service_account_file is specified, it is used instead.
  53. """
  54. def __init__(
  55. self, config_file=None, service_account_file=None,
  56. project_override=None):
  57. if not config_file:
  58. config_file = os.environ.get(EE_CONFIG_FILE, DEFAULT_EE_CONFIG_FILE)
  59. self.config_file = config_file
  60. self.project_override = project_override
  61. config = {}
  62. if os.path.exists(config_file):
  63. with open(config_file) as config_file_json:
  64. config = json.load(config_file_json)
  65. for key, default_value in CONFIG_PARAMS.items():
  66. setattr(self, key, config.get(key, default_value))
  67. self.service_account_file = service_account_file
  68. if service_account_file:
  69. # Load the file to verify that it exists.
  70. with open(service_account_file) as service_file_json:
  71. service = json.load(service_file_json)
  72. for key, value in service.items():
  73. setattr(self, key, value)
  74. def _get_credentials(self):
  75. """Acquires credentials."""
  76. if self.service_account_file:
  77. return ee.ServiceAccountCredentials(self.client_email,
  78. self.service_account_file)
  79. elif self.account and self.private_key:
  80. return ee.ServiceAccountCredentials(self.account, self.private_key)
  81. elif self.refresh_token:
  82. return Credentials(
  83. None,
  84. client_id=self.client_id,
  85. client_secret=self.client_secret,
  86. refresh_token=self.refresh_token,
  87. scopes=self.scopes,
  88. token_uri=ee.oauth.TOKEN_URI)
  89. else:
  90. return 'persistent'
  91. # TODO(user): We now have two ways of accessing GCS. storage.Client is
  92. # preferred and we should eventually migrate to just use that
  93. # instead of sending raw HTTP requests.
  94. def create_gcs_helper(self):
  95. """Creates a GcsHelper using the same credentials EE authorizes with."""
  96. project = self._get_project()
  97. if project is None:
  98. raise ValueError('A project is required to access Cloud Storage. It '
  99. 'can be set per-call by passing the --project flag or '
  100. 'by setting the \'project\' parameter in your Earth '
  101. 'Engine config file.')
  102. creds = self._get_credentials()
  103. if creds == 'persistent':
  104. creds = ee.data.get_persistent_credentials()
  105. return GcsHelper(
  106. storage.Client(project=project, credentials=creds))
  107. def _get_project(self):
  108. # If a --project flag is passed into a command, it supersedes the one set
  109. # by calling the set_project command.
  110. if self.project_override:
  111. return self.project_override
  112. else:
  113. return self.project
  114. def ee_init(self):
  115. """Loads the EE credentials and initializes the EE client."""
  116. ee.Initialize(
  117. credentials=self._get_credentials(),
  118. opt_url=self.url,
  119. cloud_api_key=self.cloud_api_key,
  120. project=self._get_project())
  121. def save(self):
  122. config = {}
  123. for key in CONFIG_PARAMS:
  124. value = getattr(self, key)
  125. if value is not None:
  126. config[key] = value
  127. with open(self.config_file, 'w') as output_file:
  128. json.dump(config, output_file)
  129. class GcsHelper(object):
  130. """A helper for manipulating files in GCS."""
  131. def __init__(self, client):
  132. self.client = client
  133. @staticmethod
  134. def _split_gcs_path(path):
  135. m = re.search('gs://([a-z0-9-_.]*)/(.*)', path, re.IGNORECASE)
  136. if not m:
  137. raise ValueError('\'{}\' is not a valid GCS path'.format(path))
  138. return m.groups()
  139. @staticmethod
  140. def _canonicalize_dir_path(path):
  141. return path.strip().rstrip('/')
  142. def _get_blobs_under_path(self, path):
  143. bucket, prefix = GcsHelper._split_gcs_path(
  144. GcsHelper._canonicalize_dir_path(path))
  145. return self.client.get_bucket(bucket).list_blobs(prefix=prefix + '/')
  146. def check_gcs_dir_within_size(self, path, max_bytes):
  147. blobs = self._get_blobs_under_path(path)
  148. total_bytes = 0
  149. for blob in blobs:
  150. total_bytes += blob.size
  151. if total_bytes > max_bytes:
  152. raise ValueError('Size of files in \'{}\' exceeds allowed size: '
  153. '{} > {}.'.format(path, total_bytes, max_bytes))
  154. if total_bytes == 0:
  155. raise ValueError('No files found at \'{}\'.'.format(path))
  156. def download_dir_to_temp(self, path):
  157. """Downloads recursively the contents at a GCS path to a temp directory."""
  158. canonical_path = GcsHelper._canonicalize_dir_path(path)
  159. blobs = self._get_blobs_under_path(canonical_path)
  160. temp_dir = tempfile.mkdtemp()
  161. _, prefix = GcsHelper._split_gcs_path(canonical_path)
  162. for blob in blobs:
  163. stripped_name = blob.name[len(prefix):]
  164. if stripped_name == '/':
  165. continue
  166. output_path = temp_dir + six.ensure_str(stripped_name)
  167. dir_path = os.path.dirname(output_path)
  168. if not os.path.exists(dir_path):
  169. os.makedirs(dir_path)
  170. if output_path[-1:] != '/':
  171. blob.download_to_filename(output_path)
  172. return temp_dir
  173. def upload_dir_to_bucket(self, source_path, dest_path):
  174. """Uploads a directory to cloud storage."""
  175. canonical_path = GcsHelper._canonicalize_dir_path(source_path)
  176. files = list()
  177. for dirpath, _, filenames in os.walk(canonical_path):
  178. files += [os.path.join(dirpath, f) for f in filenames]
  179. bucket, prefix = GcsHelper._split_gcs_path(
  180. GcsHelper._canonicalize_dir_path(dest_path))
  181. bucket_client = self.client.get_bucket(bucket)
  182. for f in files:
  183. relative_file = f[len(canonical_path):]
  184. bucket_client.blob(prefix + relative_file).upload_from_filename(f)
  185. def is_gcs_path(path):
  186. return six.ensure_str(path.strip()).startswith('gs://')
  187. def query_yes_no(msg):
  188. print('%s (y/n)' % msg)
  189. while True:
  190. confirm = input().lower()
  191. if confirm == 'y':
  192. return True
  193. elif confirm == 'n':
  194. return False
  195. else:
  196. print('Please respond with \'y\' or \'n\'.')
  197. def truncate(string, length):
  198. if len(string) > length:
  199. return six.ensure_str(string[:length]) + '..'
  200. else:
  201. return string
  202. def wait_for_task(task_id, timeout, log_progress=True):
  203. """Waits for the specified task to finish, or a timeout to occur."""
  204. start = time.time()
  205. elapsed = 0
  206. last_check = 0
  207. while True:
  208. elapsed = time.time() - start
  209. status = ee.data.getTaskStatus(task_id)[0]
  210. state = status['state']
  211. if state in TASK_FINISHED_STATES:
  212. error_message = status.get('error_message', None)
  213. print('Task %s ended at state: %s after %.2f seconds'
  214. % (task_id, state, elapsed))
  215. if error_message:
  216. raise ee.ee_exception.EEException('Error: %s' % error_message)
  217. return
  218. if log_progress and elapsed - last_check >= 30:
  219. print('[{:%H:%M:%S}] Current state for task {}: {}'
  220. .format(datetime.datetime.now(), task_id, state))
  221. last_check = elapsed
  222. remaining = timeout - elapsed
  223. if remaining > 0:
  224. time.sleep(min(10, remaining))
  225. else:
  226. break
  227. print('Wait for task %s timed out after %.2f seconds' % (task_id, elapsed))
  228. def wait_for_tasks(task_id_list, timeout, log_progress=False):
  229. """For each task specified in task_id_list, wait for that task or timeout."""
  230. if len(task_id_list) == 1:
  231. wait_for_task(task_id_list[0], timeout, log_progress)
  232. return
  233. threads = []
  234. for task_id in task_id_list:
  235. t = threading.Thread(target=wait_for_task,
  236. args=(task_id, timeout, log_progress))
  237. threads.append(t)
  238. t.start()
  239. for thread in threads:
  240. thread.join()
  241. status_list = ee.data.getTaskStatus(task_id_list)
  242. status_counts = collections.defaultdict(int)
  243. for status in status_list:
  244. status_counts[status['state']] += 1
  245. num_incomplete = (len(status_list) - status_counts['COMPLETED']
  246. - status_counts['FAILED'] - status_counts['CANCELLED'])
  247. print('Finished waiting for tasks.\n Status summary:')
  248. print(' %d tasks completed successfully.' % status_counts['COMPLETED'])
  249. print(' %d tasks failed.' % status_counts['FAILED'])
  250. print(' %d tasks cancelled.' % status_counts['CANCELLED'])
  251. print(' %d tasks are still incomplete (timed-out)' % num_incomplete)
  252. def expand_gcs_wildcards(source_files):
  253. """Implements glob-like '*' wildcard completion for cloud storage objects.
  254. Args:
  255. source_files: A list of one or more cloud storage paths of the format
  256. gs://[bucket]/[path-maybe-with-wildcards]
  257. Yields:
  258. cloud storage paths of the above format with '*' wildcards expanded.
  259. Raises:
  260. EEException: If badly formatted source_files
  261. (e.g., missing gs://) are specified
  262. """
  263. for source in source_files:
  264. if '*' not in source:
  265. yield source
  266. continue
  267. # We extract the bucket and prefix from the input path to match
  268. # the parameters for calling GCS list objects and reduce the number
  269. # of items returned by that API call
  270. # Capture the part of the path after gs:// and before the first /
  271. bucket_regex = 'gs://([a-z0-9_.-]+)/(.*)'
  272. bucket_match = re.match(bucket_regex, six.ensure_str(source))
  273. if bucket_match:
  274. bucket, rest = bucket_match.group(1, 2)
  275. else:
  276. raise ee.ee_exception.EEException(
  277. 'Badly formatted source file or bucket: %s' % source)
  278. prefix = rest[:rest.find('*')] # Everything before the first wildcard
  279. bucket_files = _gcs_ls(bucket, prefix)
  280. # Regex to match the source path with wildcards expanded
  281. regex = six.ensure_str(re.escape(source)).replace(r'\*', '[^/]*') + '$'
  282. for gcs_path in bucket_files:
  283. if re.match(regex, gcs_path):
  284. yield gcs_path
  285. def _gcs_ls(bucket, prefix=''):
  286. """Retrieve a list of cloud storage filepaths from the given bucket.
  287. Args:
  288. bucket: The cloud storage bucket to be queried
  289. prefix: Optional, a prefix used to select the objects to return
  290. Yields:
  291. Cloud storage filepaths matching the given bucket and prefix
  292. Raises:
  293. EEException:
  294. If there is an error in accessing the specified bucket
  295. """
  296. base_url = 'https://storage.googleapis.com/storage/v1/b/%s/o' % bucket
  297. method = 'GET'
  298. http = ee.data.authorizeHttp(httplib2.Http(0))
  299. next_page_token = None
  300. # Loop to handle paginated responses from GCS;
  301. # Exits once no 'next page token' is returned
  302. while True:
  303. params = {'fields': 'items/name,nextPageToken'}
  304. if next_page_token:
  305. params['pageToken'] = next_page_token
  306. if prefix:
  307. params['prefix'] = prefix
  308. payload = urllib.parse.urlencode(params)
  309. url = base_url + '?' + payload
  310. try:
  311. response, content = http.request(url, method=method)
  312. except httplib2.HttpLib2Error as e:
  313. raise ee.ee_exception.EEException('Unexpected HTTP error: %s' % str(e))
  314. if response.status < 100 or response.status >= 300:
  315. raise ee.ee_exception.EEException(('Error retrieving bucket %s;'
  316. ' Server returned HTTP code: %d' %
  317. (bucket, response.status)))
  318. json_content = json.loads(content)
  319. if 'error' in json_content:
  320. json_error = json_content['error']['message']
  321. raise ee.ee_exception.EEException('Error retrieving bucket %s: %s' %
  322. (bucket, json_error))
  323. if 'items' not in json_content:
  324. raise ee.ee_exception.EEException(
  325. 'Cannot find items list in the response from GCS: %s' % json_content)
  326. objects = json_content['items']
  327. object_names = [str(gc_object['name']) for gc_object in objects]
  328. for name in object_names:
  329. yield 'gs://%s/%s' % (bucket, name)
  330. # GCS indicates no more results
  331. if 'nextPageToken' not in json_content:
  332. return
  333. # Load next page, continue at beginning of while True:
  334. next_page_token = json_content['nextPageToken']