commands.py 63 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868
  1. #!/usr/bin/env python
  2. """Commands supported by the Earth Engine command line interface.
  3. Each command is implemented by extending the Command class. Each class
  4. defines the supported positional and optional arguments, as well as
  5. the actions to be taken when the command is executed.
  6. """
  7. # pylint: disable=g-bad-import-order
  8. import argparse
  9. import calendar
  10. from collections import Counter
  11. import datetime
  12. import json
  13. import logging
  14. import os
  15. import re
  16. import urllib.parse
  17. import shutil
  18. import sys
  19. import tempfile
  20. # Prevent TensorFlow from logging anything at the native level.
  21. # pylint: disable=g-import-not-at-top
  22. os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
  23. TENSORFLOW_INSTALLED = False
  24. # pylint: disable=g-import-not-at-top
  25. try:
  26. # Suppress non-error logs while TF initializes
  27. old_level = logging.getLogger().level
  28. logging.getLogger().setLevel(logging.ERROR)
  29. import tensorflow.compat.v1 as tf
  30. from tensorflow.compat.v1.saved_model import utils as saved_model_utils
  31. from tensorflow.compat.v1.saved_model import signature_constants
  32. from tensorflow.compat.v1.saved_model import signature_def_utils
  33. # This triggers a warning about disable_resource_variables
  34. tf.disable_v2_behavior()
  35. # Prevent TensorFlow from logging anything at the python level.
  36. tf.logging.set_verbosity(tf.logging.ERROR)
  37. TENSORFLOW_INSTALLED = True
  38. except ImportError:
  39. pass
  40. finally:
  41. logging.getLogger().setLevel(old_level)
  42. TENSORFLOW_ADDONS_INSTALLED = False
  43. # pylint: disable=g-import-not-at-top
  44. if TENSORFLOW_INSTALLED:
  45. try:
  46. if sys.version_info[0] >= 3:
  47. # This import is enough to register TFA ops though isn't directly used
  48. # (for now).
  49. # pylint: disable=unused-import
  50. import tensorflow_addons as tfa
  51. tfa.register_all(custom_kernels=False)
  52. TENSORFLOW_ADDONS_INSTALLED = True
  53. except ImportError:
  54. pass
  55. except AttributeError:
  56. # This can be thrown by "tfa.register_all()" which means the
  57. # tensorflow_addons version is registering ops the old way, i.e.
  58. # automatically at import time. If this is the case, we've actually
  59. # successfully registered TFA.
  60. TENSORFLOW_ADDONS_INSTALLED = True
  61. # pylint: disable=g-import-not-at-top
  62. import ee
  63. from ee.cli import utils
  64. # Constants used in ACLs.
  65. ALL_USERS = 'allUsers'
  66. ALL_USERS_CAN_READ = 'all_users_can_read'
  67. READERS = 'readers'
  68. WRITERS = 'writers'
  69. # Constants used in setting metadata properties.
  70. TYPE_DATE = 'date'
  71. TYPE_NUMBER = 'number'
  72. TYPE_STRING = 'string'
  73. SYSTEM_TIME_START = 'system:time_start'
  74. SYSTEM_TIME_END = 'system:time_end'
  75. # A regex that parses properties of the form "[(type)]name=value". The
  76. # second, third, and fourth group are type, name, and number, respectively.
  77. PROPERTY_RE = re.compile(r'(\(([^\)]*)\))?([^=]+)=(.*)')
  78. # Translate internal task type identifiers to user-friendly strings that
  79. # are consistent with the language in the API and docs.
  80. TASK_TYPES = {
  81. 'EXPORT_FEATURES': 'Export.table',
  82. 'EXPORT_IMAGE': 'Export.image',
  83. 'EXPORT_TILES': 'Export.map',
  84. 'EXPORT_VIDEO': 'Export.video',
  85. 'INGEST': 'Upload',
  86. 'INGEST_IMAGE': 'Upload',
  87. 'INGEST_TABLE': 'Upload',
  88. }
  89. TF_RECORD_EXTENSIONS = ['.tfrecord', 'tfrecord.gz']
  90. # Maximum size of objects in a SavedModel directory that we're willing to
  91. # download from GCS.
  92. SAVED_MODEL_MAX_SIZE = 400 * 1024 * 1024
  93. # Default path to SavedModel variables.
  94. DEFAULT_VARIABLES_PREFIX = '/variables/variables'
  95. def _add_wait_arg(parser):
  96. parser.add_argument(
  97. '--wait', '-w', nargs='?', default=-1, type=int, const=sys.maxsize,
  98. help=('Wait for the task to finish,'
  99. ' or timeout after the specified number of seconds.'
  100. ' Without this flag, the command just starts an export'
  101. ' task in the background, and returns immediately.'))
  102. def _add_overwrite_arg(parser):
  103. parser.add_argument(
  104. '--force', '-f', action='store_true',
  105. help='Overwrite any existing version of the asset.')
  106. def _upload(args, request, ingestion_function):
  107. if 0 <= args.wait < 10:
  108. raise ee.EEException('Wait time should be at least 10 seconds.')
  109. request_id = ee.data.newTaskId()[0]
  110. task_id = ingestion_function(request_id, request, args.force)['id']
  111. print('Started upload task with ID: %s' % task_id)
  112. if args.wait >= 0:
  113. print('Waiting for the upload task to complete...')
  114. utils.wait_for_task(task_id, args.wait)
  115. # Argument types
  116. def _comma_separated_strings(string):
  117. """Parses an input consisting of comma-separated strings."""
  118. error_msg = 'Argument should be a comma-separated list of strings: {}'
  119. values = string.split(',')
  120. if not values:
  121. raise argparse.ArgumentTypeError(error_msg.format(string))
  122. return values
  123. def _comma_separated_numbers(string):
  124. """Parses an input consisting of comma-separated numbers."""
  125. error_msg = 'Argument should be a comma-separated list of numbers: {}'
  126. values = string.split(',')
  127. if not values:
  128. raise argparse.ArgumentTypeError(error_msg.format(string))
  129. numbervalues = []
  130. for value in values:
  131. try:
  132. numbervalues.append(int(value))
  133. except ValueError:
  134. try:
  135. numbervalues.append(float(value))
  136. except ValueError:
  137. raise argparse.ArgumentTypeError(error_msg.format(string))
  138. return numbervalues
  139. def _comma_separated_pyramiding_policies(string):
  140. """Parses an input consisting of comma-separated pyramiding policies."""
  141. error_msg = ('Argument should be a comma-separated list of: '
  142. '{{"mean", "sample", "min", "max", "mode"}}: {}')
  143. values = string.split(',')
  144. if not values:
  145. raise argparse.ArgumentTypeError(error_msg.format(string))
  146. redvalues = []
  147. for value in values:
  148. value = value.upper()
  149. if value not in {'MEAN', 'SAMPLE', 'MIN', 'MAX', 'MODE', 'MEDIAN'}:
  150. raise argparse.ArgumentTypeError(error_msg.format(string))
  151. redvalues.append(value)
  152. return redvalues
  153. def _decode_number(string):
  154. """Decodes a number from a command line argument."""
  155. try:
  156. return float(string)
  157. except ValueError:
  158. raise argparse.ArgumentTypeError(
  159. 'Invalid value for property of type "number": "%s".' % string)
  160. def _timestamp_ms_for_datetime(datetime_obj):
  161. """Returns time since the epoch in ms for the given UTC datetime object."""
  162. return (
  163. int(calendar.timegm(datetime_obj.timetuple()) * 1000) +
  164. datetime_obj.microsecond / 1000)
  165. def _cloud_timestamp_for_timestamp_ms(timestamp_ms):
  166. """Returns a Cloud-formatted date for the given millisecond timestamp."""
  167. # Desired format is like '2003-09-07T19:30:12.345Z'
  168. return datetime.datetime.utcfromtimestamp(
  169. timestamp_ms / 1000.0).isoformat() + 'Z'
  170. def _parse_millis(millis):
  171. return datetime.datetime.fromtimestamp(millis / 1000)
  172. def _decode_date(string):
  173. """Decodes a date from a command line argument, returning msec since epoch".
  174. Args:
  175. string: See AssetSetCommand class comment for the allowable
  176. date formats.
  177. Returns:
  178. long, ms since epoch, or '' if the input is empty.
  179. Raises:
  180. argparse.ArgumentTypeError: if string does not conform to a legal
  181. date format.
  182. """
  183. if not string:
  184. return ''
  185. try:
  186. return int(string)
  187. except ValueError:
  188. date_formats = ['%Y-%m-%d',
  189. '%Y-%m-%dT%H:%M:%S',
  190. '%Y-%m-%dT%H:%M:%S.%f']
  191. for date_format in date_formats:
  192. try:
  193. dt = datetime.datetime.strptime(string, date_format)
  194. return _timestamp_ms_for_datetime(dt)
  195. except ValueError:
  196. continue
  197. raise argparse.ArgumentTypeError(
  198. 'Invalid value for property of type "date": "%s".' % string)
  199. def _decode_property(string):
  200. """Decodes a general key-value property from a command-line argument.
  201. Args:
  202. string: The string must have the form name=value or (type)name=value, where
  203. type is one of 'number', 'string', or 'date'. The value format for dates
  204. is YYYY-MM-DD[THH:MM:SS[.MS]]. The value 'null' is special: it evaluates
  205. to None unless it is cast to a string of 'null'.
  206. Returns:
  207. a tuple representing the property in the format (name, value)
  208. Raises:
  209. argparse.ArgumentTypeError: if the flag value could not be decoded or if
  210. the type is not recognized
  211. """
  212. m = PROPERTY_RE.match(string)
  213. if not m:
  214. raise argparse.ArgumentTypeError(
  215. 'Invalid property: "%s". Must have the form "name=value" or '
  216. '"(type)name=value".', string)
  217. _, type_str, name, value_str = m.groups()
  218. if value_str == 'null' and type_str != TYPE_STRING:
  219. return (name, None)
  220. if type_str is None:
  221. # Guess numeric types automatically.
  222. try:
  223. value = _decode_number(value_str)
  224. except argparse.ArgumentTypeError:
  225. value = value_str
  226. elif type_str == TYPE_DATE:
  227. value = _decode_date(value_str)
  228. elif type_str == TYPE_NUMBER:
  229. value = _decode_number(value_str)
  230. elif type_str == TYPE_STRING:
  231. value = value_str
  232. else:
  233. raise argparse.ArgumentTypeError(
  234. 'Unrecognized property type name: "%s". Expected one of "string", '
  235. '"number", or "date".' % type_str)
  236. return (name, value)
  237. def _add_property_flags(parser):
  238. """Adds command line flags related to metadata properties to a parser."""
  239. parser.add_argument(
  240. '--property', '-p',
  241. help='A property to set, in the form [(type)]name=value. If no type '
  242. 'is specified the type will be "number" if the value is numeric and '
  243. '"string" otherwise. May be provided multiple times.',
  244. action='append',
  245. type=_decode_property)
  246. parser.add_argument(
  247. '--time_start', '-ts',
  248. help='Sets the start time property to a number or date.',
  249. type=_decode_date)
  250. parser.add_argument(
  251. '--time_end', '-te',
  252. help='Sets the end time property to a number or date.',
  253. type=_decode_date)
  254. def _decode_property_flags(args):
  255. """Decodes metadata properties from args as a name->value dict."""
  256. property_list = list(args.property or [])
  257. names = [name for name, _ in property_list]
  258. duplicates = [name for name, count in Counter(names).items() if count > 1]
  259. if duplicates:
  260. raise ee.EEException('Duplicate property name(s): %s.' % duplicates)
  261. return dict(property_list)
  262. def _decode_timestamp_flags(args):
  263. """Decodes timestamp properties from args as a name->value dict."""
  264. result = {}
  265. if args.time_start is not None:
  266. result[SYSTEM_TIME_START] = args.time_start
  267. if args.time_end is not None:
  268. result[SYSTEM_TIME_END] = args.time_end
  269. return result
  270. def _check_valid_files(filenames):
  271. """Returns true if the given filenames are valid upload file URIs."""
  272. for filename in filenames:
  273. if not filename.startswith('gs://'):
  274. raise ee.EEException('Invalid Cloud Storage URL: ' + filename)
  275. def _pretty_print_json(json_obj):
  276. """Pretty-prints a JSON object to stdandard output."""
  277. print(json.dumps(json_obj, sort_keys=True, indent=2, separators=(',', ': ')))
  278. class Dispatcher(object):
  279. """Dispatches to a set of commands implemented as command classes."""
  280. def __init__(self, parser):
  281. self.command_dict = {}
  282. self.dest = self.name + '_cmd'
  283. subparsers = parser.add_subparsers(title='Commands', dest=self.dest)
  284. subparsers.required = True # Needed for proper missing arg handling in 3.x
  285. for command in self.COMMANDS:
  286. command_help = None
  287. if command.__doc__ and command.__doc__.splitlines():
  288. command_help = command.__doc__.splitlines()[0]
  289. subparser = subparsers.add_parser(
  290. command.name,
  291. description=command.__doc__,
  292. help=command_help)
  293. self.command_dict[command.name] = command(subparser)
  294. def run(self, args, config):
  295. self.command_dict[vars(args)[self.dest]].run(args, config)
  296. class AuthenticateCommand(object):
  297. """Prompts the user to authorize access to Earth Engine via OAuth2.
  298. Note that running this command in the default interactive mode within
  299. JupyterLab with a bash magic command (i.e. "!earthengine authenticate") is
  300. problematic (see https://github.com/ipython/ipython/issues/10499). To avoid
  301. this issue, use the non-interactive mode
  302. (i.e. "!earthengine authenticate --quiet").
  303. """
  304. name = 'authenticate'
  305. def __init__(self, parser):
  306. parser.add_argument(
  307. '--authorization-code',
  308. help='Use this specified authorization code.')
  309. parser.add_argument(
  310. '--quiet',
  311. action='store_true',
  312. help='Do not prompt for input, and run gcloud in no-browser mode.')
  313. parser.add_argument(
  314. '--code-verifier',
  315. help='PKCE verifier to prevent auth code stealing.')
  316. parser.add_argument(
  317. '--auth_mode',
  318. help='One of: notebook - use notebook authenticator; gcloud - use'
  319. ' gcloud; appdefault - read GOOGLE_APPLICATION_CREDENTIALS;'
  320. ' localhost[:PORT] - use local browser')
  321. parser.add_argument(
  322. '--scopes', help='Optional comma-separated list of scopes.')
  323. def run(self, args, unused_config):
  324. """Prompts for an auth code, requests a token and saves it."""
  325. # Filter for arguments relevant for ee.Authenticate()
  326. args_auth = {x: vars(args)[x] for x in (
  327. 'authorization_code', 'quiet', 'code_verifier', 'auth_mode')}
  328. if args.scopes:
  329. args_auth['scopes'] = args.scopes.split(',')
  330. ee.Authenticate(**args_auth)
  331. class SetProjectCommand(object):
  332. """Sets the default user project to be used for all API calls."""
  333. name = 'set_project'
  334. def __init__(self, parser):
  335. parser.add_argument('project', help='project id or number to use.')
  336. def run(self, args, config):
  337. """Saves the project to the config file."""
  338. config_path = config.config_file
  339. with open(config_path) as config_file_json:
  340. config = json.load(config_file_json)
  341. config['project'] = args.project
  342. json.dump(config, open(config_path, 'w'))
  343. print('Successfully saved project id')
  344. class UnSetProjectCommand(object):
  345. """UnSets the default user project to be used for all API calls."""
  346. name = 'unset_project'
  347. def __init__(self, unused_parser):
  348. pass
  349. def run(self, unused_args, config):
  350. """Saves the project to the config file."""
  351. config_path = config.config_file
  352. with open(config_path) as config_file_json:
  353. config = json.load(config_file_json)
  354. if 'project' in config:
  355. del config['project']
  356. json.dump(config, open(config_path, 'w'))
  357. print('Successfully unset project id')
  358. class AclChCommand(object):
  359. """Changes the access control list for an asset.
  360. Each change specifies the email address of a user or group and,
  361. for additions, one of R or W corresponding to the read or write
  362. permissions to be granted, as in "user@domain.com:R". Use the
  363. special name "allUsers" to change whether all users can read the
  364. asset.
  365. """
  366. name = 'ch'
  367. def __init__(self, parser):
  368. parser.add_argument('-u', action='append', metavar='user permission',
  369. help='Add or modify a user\'s permission.')
  370. parser.add_argument('-d', action='append', metavar='remove user',
  371. help='Remove all permissions for a user.')
  372. parser.add_argument('-g', action='append', metavar='group permission',
  373. help='Add or modify a group\'s permission.')
  374. parser.add_argument('-dg', action='append', metavar='remove group',
  375. help='Remove all permissions for a user.')
  376. parser.add_argument('asset_id', help='ID of the asset.')
  377. def run(self, args, config):
  378. """Performs an ACL update."""
  379. config.ee_init()
  380. permissions = self._parse_permissions(args)
  381. acl = ee.data.getAssetAcl(args.asset_id)
  382. self._apply_permissions(acl, permissions)
  383. ee.data.setAssetAcl(args.asset_id, json.dumps(acl))
  384. def _set_permission(self, permissions, grant, prefix):
  385. """Sets the permission for a given user/group."""
  386. parts = grant.rsplit(':', 1)
  387. if len(parts) != 2 or parts[1] not in ['R', 'W']:
  388. raise ee.EEException('Invalid permission "%s".' % grant)
  389. user, role = parts
  390. prefixed_user = user
  391. if not self._is_all_users(user):
  392. prefixed_user = prefix + user
  393. if prefixed_user in permissions:
  394. raise ee.EEException('Multiple permission settings for "%s".' % user)
  395. if self._is_all_users(user) and role == 'W':
  396. raise ee.EEException('Cannot grant write permissions to all users.')
  397. permissions[prefixed_user] = role
  398. def _remove_permission(self, permissions, user, prefix):
  399. """Removes permissions for a given user/group."""
  400. prefixed_user = user
  401. if not self._is_all_users(user):
  402. prefixed_user = prefix + user
  403. if prefixed_user in permissions:
  404. raise ee.EEException('Multiple permission settings for "%s".' % user)
  405. permissions[prefixed_user] = 'D'
  406. def _user_account_type(self, user):
  407. """Returns the appropriate account type for a user email."""
  408. # Here 'user' ends with ':R', ':W', or ':D', so we extract
  409. # just the username.
  410. if user.split(':')[0].endswith('.gserviceaccount.com'):
  411. return 'serviceAccount:'
  412. else:
  413. return 'user:'
  414. def _parse_permissions(self, args):
  415. """Decodes and sanity-checks the permissions in the arguments."""
  416. # A dictionary mapping from user ids to one of 'R', 'W', or 'D'.
  417. permissions = {}
  418. if args.u:
  419. for user in args.u:
  420. self._set_permission(permissions, user, self._user_account_type(user))
  421. if args.d:
  422. for user in args.d:
  423. self._remove_permission(
  424. permissions, user, self._user_account_type(user))
  425. if args.g:
  426. for group in args.g:
  427. self._set_permission(permissions, group, 'group:')
  428. if args.dg:
  429. for group in args.dg:
  430. self._remove_permission(permissions, group, 'group:')
  431. return permissions
  432. def _apply_permissions(self, acl, permissions):
  433. """Applies the given permission edits to the given acl."""
  434. for user, role in permissions.items():
  435. if self._is_all_users(user):
  436. acl[ALL_USERS_CAN_READ] = (role == 'R')
  437. elif role == 'R':
  438. if user not in acl[READERS]:
  439. acl[READERS].append(user)
  440. if user in acl[WRITERS]:
  441. acl[WRITERS].remove(user)
  442. elif role == 'W':
  443. if user in acl[READERS]:
  444. acl[READERS].remove(user)
  445. if user not in acl[WRITERS]:
  446. acl[WRITERS].append(user)
  447. elif role == 'D':
  448. if user in acl[READERS]:
  449. acl[READERS].remove(user)
  450. if user in acl[WRITERS]:
  451. acl[WRITERS].remove(user)
  452. def _is_all_users(self, user):
  453. """Determines if a user name represents the special "all users" entity."""
  454. # We previously used "AllUsers" as the magic string to denote that we wanted
  455. # to apply some permission to everyone. However, Google Cloud convention for
  456. # this concept is "allUsers". Because some people might be using one and
  457. # some the other, we do a case-insentive comparison.
  458. return user.lower() == ALL_USERS.lower()
  459. class AclGetCommand(object):
  460. """Prints the access control list for an asset."""
  461. name = 'get'
  462. def __init__(self, parser):
  463. parser.add_argument('asset_id', help='ID of the asset.')
  464. def run(self, args, config):
  465. config.ee_init()
  466. acl = ee.data.getAssetAcl(args.asset_id)
  467. _pretty_print_json(acl)
  468. class AclSetCommand(object):
  469. """Sets the access control list for an asset.
  470. The ACL may be the name of a canned ACL, or it may be the path to a
  471. file containing the output from "acl get". The recognized canned ACL
  472. names are "private", indicating that no users other than the owner
  473. have access, and "public", indicating that all users have read
  474. access. It is currently not possible to modify the owner ACL using
  475. this tool.
  476. """
  477. name = 'set'
  478. CANNED_ACLS = {
  479. 'private': {
  480. READERS: [],
  481. WRITERS: [],
  482. ALL_USERS_CAN_READ: False,
  483. },
  484. 'public': {
  485. READERS: [],
  486. WRITERS: [],
  487. ALL_USERS_CAN_READ: True,
  488. },
  489. }
  490. def __init__(self, parser):
  491. parser.add_argument('file_or_acl_name',
  492. help='File path or canned ACL name.')
  493. parser.add_argument('asset_id', help='ID of the asset.')
  494. def run(self, args, config):
  495. """Sets asset ACL to a canned ACL or one provided in a JSON file."""
  496. config.ee_init()
  497. if args.file_or_acl_name in list(self.CANNED_ACLS.keys()):
  498. acl = self.CANNED_ACLS[args.file_or_acl_name]
  499. else:
  500. acl = json.load(open(args.file_or_acl_name))
  501. ee.data.setAssetAcl(args.asset_id, json.dumps(acl))
  502. class AclCommand(Dispatcher):
  503. """Prints or updates the access control list of the specified asset."""
  504. name = 'acl'
  505. COMMANDS = [
  506. AclChCommand,
  507. AclGetCommand,
  508. AclSetCommand,
  509. ]
  510. class AssetInfoCommand(object):
  511. """Prints metadata and other information about an Earth Engine asset."""
  512. name = 'info'
  513. def __init__(self, parser):
  514. parser.add_argument('asset_id', help='ID of the asset to print.')
  515. def run(self, args, config):
  516. config.ee_init()
  517. info = ee.data.getInfo(args.asset_id)
  518. if info:
  519. _pretty_print_json(info)
  520. else:
  521. raise ee.EEException(
  522. 'Asset does not exist or is not accessible: %s' % args.asset_id)
  523. class AssetSetCommand(object):
  524. """Sets metadata properties of an Earth Engine asset.
  525. Properties may be of type "string", "number", or "date". Dates must
  526. be specified in the form YYYY-MM-DD[Thh:mm:ss[.ff]] in UTC and are
  527. stored as numbers representing the number of milliseconds since the
  528. Unix epoch (00:00:00 UTC on 1 January 1970).
  529. To delete a property, set it to null without a type:
  530. prop=null.
  531. To set a property to the string value 'null', use the assignment
  532. (string)prop4=null.
  533. """
  534. name = 'set'
  535. def __init__(self, parser):
  536. parser.add_argument('asset_id', help='ID of the asset to update.')
  537. _add_property_flags(parser)
  538. def run(self, args, config):
  539. """Runs the asset update."""
  540. config.ee_init()
  541. properties = _decode_property_flags(args)
  542. if not properties and args.time_start is None and args.time_end is None:
  543. raise ee.EEException('No properties specified.')
  544. update_mask = [
  545. 'properties.' + property_name for property_name in properties
  546. ]
  547. asset = {}
  548. if properties:
  549. asset['properties'] = {
  550. k: v for k, v in properties.items() if v is not None
  551. }
  552. # args.time_start and .time_end could have any of three falsy values, with
  553. # different meanings:
  554. # None: the --time_start flag was not provided at all
  555. # '': the --time_start flag was explicitly set to the empty string
  556. # 0: the --time_start flag was explicitly set to midnight 1 Jan 1970.
  557. # pylint:disable=g-explicit-bool-comparison
  558. if args.time_start is not None:
  559. update_mask.append('start_time')
  560. if args.time_start != '':
  561. asset['start_time'] = _cloud_timestamp_for_timestamp_ms(
  562. args.time_start)
  563. if args.time_end is not None:
  564. update_mask.append('end_time')
  565. if args.time_end != '':
  566. asset['end_time'] = _cloud_timestamp_for_timestamp_ms(args.time_end)
  567. # pylint:enable=g-explicit-bool-comparison
  568. ee.data.updateAsset(args.asset_id, asset, update_mask)
  569. return
  570. class AssetCommand(Dispatcher):
  571. """Prints or updates metadata associated with an Earth Engine asset."""
  572. name = 'asset'
  573. COMMANDS = [
  574. AssetInfoCommand,
  575. AssetSetCommand,
  576. ]
  577. class CopyCommand(object):
  578. """Creates a new Earth Engine asset as a copy of another asset."""
  579. name = 'cp'
  580. def __init__(self, parser):
  581. parser.add_argument(
  582. 'source', help='Full path of the source asset.')
  583. parser.add_argument(
  584. 'destination', help='Full path of the destination asset.')
  585. _add_overwrite_arg(parser)
  586. def run(self, args, config):
  587. """Runs the asset copy."""
  588. config.ee_init()
  589. ee.data.copyAsset(
  590. args.source,
  591. args.destination,
  592. args.force
  593. )
  594. class CreateCommandBase(object):
  595. """Base class for implementing Create subcommands."""
  596. def __init__(self, parser, fragment, asset_type):
  597. parser.add_argument(
  598. 'asset_id', nargs='+',
  599. help='Full path of %s to create.' % fragment)
  600. parser.add_argument(
  601. '--parents', '-p', action='store_true',
  602. help='Make parent folders as needed.')
  603. self.asset_type = asset_type
  604. def run(self, args, config):
  605. config.ee_init()
  606. ee.data.create_assets(args.asset_id, self.asset_type, args.parents)
  607. class CreateCollectionCommand(CreateCommandBase):
  608. """Creates one or more image collections."""
  609. name = 'collection'
  610. def __init__(self, parser):
  611. super(CreateCollectionCommand, self).__init__(
  612. parser, 'an image collection', ee.data.ASSET_TYPE_IMAGE_COLL)
  613. class CreateFolderCommand(CreateCommandBase):
  614. """Creates one or more folders."""
  615. name = 'folder'
  616. def __init__(self, parser):
  617. super(CreateFolderCommand, self).__init__(
  618. parser, 'a folder', ee.data.ASSET_TYPE_FOLDER)
  619. class CreateCommand(Dispatcher):
  620. """Creates assets and folders."""
  621. name = 'create'
  622. COMMANDS = [
  623. CreateCollectionCommand,
  624. CreateFolderCommand,
  625. ]
  626. class ListCommand(object):
  627. """Prints the contents of a folder or collection."""
  628. name = 'ls'
  629. def __init__(self, parser):
  630. parser.add_argument(
  631. 'asset_id', nargs='*',
  632. help='A folder or image collection to be inspected.')
  633. parser.add_argument(
  634. '--long_format',
  635. '-l',
  636. action='store_true',
  637. help='Print output in long format.')
  638. parser.add_argument(
  639. '--max_items', '-m', default=-1, type=int,
  640. help='Maximum number of items to list for each collection.')
  641. parser.add_argument(
  642. '--recursive',
  643. '-r',
  644. action='store_true',
  645. help='List folders recursively.')
  646. parser.add_argument(
  647. '--filter', '-f', default='', type=str,
  648. help='Filter string to pass to ee.ImageCollection.filter().')
  649. def run(self, args, config):
  650. config.ee_init()
  651. if not args.asset_id:
  652. roots = ee.data.getAssetRoots()
  653. self._print_assets(roots, args.max_items, '', args.long_format,
  654. args.recursive)
  655. return
  656. assets = args.asset_id
  657. count = 0
  658. for asset in assets:
  659. if count > 0:
  660. print()
  661. self._list_asset_content(
  662. asset, args.max_items, len(assets), args.long_format,
  663. args.recursive, args.filter)
  664. count += 1
  665. def _print_assets(self, assets, max_items, indent, long_format, recursive):
  666. """Prints the listing of given assets."""
  667. if not assets:
  668. return
  669. max_type_length = max([len(asset['type']) for asset in assets])
  670. if recursive:
  671. # fallback to max to include the string 'ImageCollection'
  672. max_type_length = ee.data.MAX_TYPE_LENGTH
  673. format_str = '%s{:%ds}{:s}' % (indent, max_type_length + 4)
  674. for asset in assets:
  675. if long_format:
  676. # Example output:
  677. # [Image] user/test/my_img
  678. # [ImageCollection] user/test/my_coll
  679. print(format_str.format('['+asset['type']+']', asset['id']))
  680. else:
  681. print(asset['id'])
  682. if recursive and asset['type'] in (ee.data.ASSET_TYPE_FOLDER,
  683. ee.data.ASSET_TYPE_FOLDER_CLOUD):
  684. list_req = {'id': asset['id']}
  685. children = ee.data.getList(list_req)
  686. self._print_assets(children, max_items, indent, long_format, recursive)
  687. def _list_asset_content(self, asset, max_items, total_assets, long_format,
  688. recursive, filter_string):
  689. try:
  690. list_req = {'id': asset}
  691. if max_items >= 0:
  692. list_req['num'] = max_items
  693. if filter_string:
  694. list_req['filter'] = filter_string
  695. children = ee.data.getList(list_req)
  696. indent = ''
  697. if total_assets > 1:
  698. print('%s:' % asset)
  699. indent = ' '
  700. self._print_assets(children, max_items, indent, long_format, recursive)
  701. except ee.EEException as e:
  702. print(e)
  703. class SizeCommand(object):
  704. """Prints the size and names of all items in a given folder or collection."""
  705. name = 'du'
  706. def __init__(self, parser):
  707. parser.add_argument(
  708. 'asset_id',
  709. nargs='*',
  710. help='A folder or image collection to be inspected.')
  711. parser.add_argument(
  712. '--summarize', '-s', action='store_true',
  713. help='Display only a total.')
  714. def run(self, args, config):
  715. """Runs the du command."""
  716. config.ee_init()
  717. # Select all available asset roots if no asset ids are given.
  718. if not args.asset_id:
  719. assets = ee.data.getAssetRoots()
  720. else:
  721. assets = [ee.data.getInfo(asset) for asset in args.asset_id]
  722. # If args.summarize is True, list size+name for every leaf child asset,
  723. # and show totals for non-leaf children.
  724. # If args.summarize is False, print sizes of all children.
  725. for index, asset in enumerate(assets):
  726. if args.asset_id and not asset:
  727. asset_id = args.asset_id[index]
  728. print('Asset does not exist or is not accessible: %s' % asset_id)
  729. continue
  730. is_parent = asset['type'] in (
  731. ee.data.ASSET_TYPE_FOLDER,
  732. ee.data.ASSET_TYPE_IMAGE_COLL,
  733. ee.data.ASSET_TYPE_FOLDER_CLOUD,
  734. ee.data.ASSET_TYPE_IMAGE_COLL_CLOUD,
  735. )
  736. if not is_parent or args.summarize:
  737. self._print_size(asset)
  738. else:
  739. children = ee.data.getList({'id': asset['id']})
  740. if not children:
  741. # A leaf asset
  742. children = [asset]
  743. for child in children:
  744. self._print_size(child)
  745. def _print_size(self, asset):
  746. size = self._get_size(asset)
  747. print('{:>16d} {}'.format(size, asset['id']))
  748. def _get_size(self, asset):
  749. """Returns the size of the given asset in bytes."""
  750. size_parsers = {
  751. 'Image': self._get_size_asset,
  752. 'Folder': self._get_size_folder,
  753. 'ImageCollection': self._get_size_image_collection,
  754. 'Table': self._get_size_asset,
  755. 'IMAGE': self._get_size_asset,
  756. 'FOLDER': self._get_size_folder,
  757. 'IMAGE_COLLECTION': self._get_size_image_collection,
  758. 'TABLE': self._get_size_asset,
  759. }
  760. if asset['type'] not in size_parsers:
  761. raise ee.EEException(
  762. 'Cannot get size for asset type "%s"' % asset['type'])
  763. return size_parsers[asset['type']](asset)
  764. def _get_size_asset(self, asset):
  765. info = ee.data.getInfo(asset['id'])
  766. if 'sizeBytes' in info:
  767. return int(info['sizeBytes'])
  768. return info['properties']['system:asset_size']
  769. def _get_size_folder(self, asset):
  770. children = ee.data.getList({'id': asset['id']})
  771. sizes = [self._get_size(child) for child in children]
  772. return sum(sizes)
  773. def _get_size_image_collection(self, asset):
  774. images = ee.ImageCollection(asset['id'])
  775. sizes = images.aggregate_array('system:asset_size')
  776. return sum(sizes.getInfo())
  777. class MoveCommand(object):
  778. """Moves or renames an Earth Engine asset."""
  779. name = 'mv'
  780. def __init__(self, parser):
  781. parser.add_argument(
  782. 'source', help='Full path of the source asset.')
  783. parser.add_argument(
  784. 'destination', help='Full path of the destination asset.')
  785. def run(self, args, config):
  786. config.ee_init()
  787. ee.data.renameAsset(args.source, args.destination)
  788. class RmCommand(object):
  789. """Deletes the specified assets."""
  790. name = 'rm'
  791. def __init__(self, parser):
  792. parser.add_argument(
  793. 'asset_id', nargs='+', help='Full path of an asset to delete.')
  794. parser.add_argument(
  795. '--recursive', '-r', action='store_true',
  796. help='Recursively delete child assets.')
  797. parser.add_argument(
  798. '--dry_run', action='store_true',
  799. help=('Perform a dry run of the delete operation. Does not '
  800. 'delete any assets.'))
  801. parser.add_argument(
  802. '--verbose', '-v', action='store_true',
  803. help='Print the progress of the operation to the console.')
  804. def run(self, args, config):
  805. config.ee_init()
  806. for asset in args.asset_id:
  807. self._delete_asset(asset, args.recursive, args.verbose, args.dry_run)
  808. def _delete_asset(self, asset_id, recursive, verbose, dry_run):
  809. """Attempts to delete the specified asset or asset collection."""
  810. if recursive:
  811. info = ee.data.getInfo(asset_id)
  812. if info is None:
  813. print('Asset does not exist or is not accessible: %s' % asset_id)
  814. return
  815. if info['type'] in (ee.data.ASSET_TYPE_FOLDER,
  816. ee.data.ASSET_TYPE_IMAGE_COLL,
  817. ee.data.ASSET_TYPE_FOLDER_CLOUD,
  818. ee.data.ASSET_TYPE_IMAGE_COLL_CLOUD):
  819. children = ee.data.getList({'id': asset_id})
  820. for child in children:
  821. self._delete_asset(child['id'], True, verbose, dry_run)
  822. if dry_run:
  823. print('[dry-run] Deleting asset: %s' % asset_id)
  824. else:
  825. if verbose:
  826. print('Deleting asset: %s' % asset_id)
  827. try:
  828. ee.data.deleteAsset(asset_id)
  829. except ee.EEException as e:
  830. print('Failed to delete %s. %s' % (asset_id, e))
  831. class TaskCancelCommand(object):
  832. """Cancels a running task."""
  833. name = 'cancel'
  834. def __init__(self, parser):
  835. parser.add_argument(
  836. 'task_ids', nargs='+',
  837. help='IDs of one or more tasks to cancel,'
  838. ' or `all` to cancel all tasks.')
  839. def run(self, args, config):
  840. config.ee_init()
  841. cancel_all = args.task_ids == ['all']
  842. if cancel_all:
  843. statuses = ee.data.getTaskList()
  844. else:
  845. statuses = ee.data.getTaskStatus(args.task_ids)
  846. for status in statuses:
  847. state = status['state']
  848. task_id = status['id']
  849. if state == 'UNKNOWN':
  850. raise ee.EEException('Unknown task id "%s"' % task_id)
  851. elif state == 'READY' or state == 'RUNNING':
  852. print('Canceling task "%s"' % task_id)
  853. ee.data.cancelTask(task_id)
  854. elif not cancel_all:
  855. print('Task "%s" already in state "%s".' % (status['id'], state))
  856. class TaskInfoCommand(object):
  857. """Prints information about a task."""
  858. name = 'info'
  859. def __init__(self, parser):
  860. parser.add_argument('task_id', nargs='*', help='ID of a task to get.')
  861. def run(self, args, config):
  862. config.ee_init()
  863. for i, status in enumerate(ee.data.getTaskStatus(args.task_id)):
  864. if i:
  865. print()
  866. print('%s:' % status['id'])
  867. print(' State: %s' % status['state'])
  868. if status['state'] == 'UNKNOWN':
  869. continue
  870. print(' Type: %s' % TASK_TYPES.get(status.get('task_type'), 'Unknown'))
  871. print(' Description: %s' % status.get('description'))
  872. print(' Created: %s' % _parse_millis(status['creation_timestamp_ms']))
  873. if 'start_timestamp_ms' in status:
  874. print(' Started: %s' % _parse_millis(status['start_timestamp_ms']))
  875. if 'update_timestamp_ms' in status:
  876. print(' Updated: %s' % _parse_millis(status['update_timestamp_ms']))
  877. if 'error_message' in status:
  878. print(' Error: %s' % status['error_message'])
  879. if 'destination_uris' in status:
  880. print(' Destination URIs: %s' % ', '.join(status['destination_uris']))
  881. class TaskListCommand(object):
  882. """Lists the tasks submitted recently."""
  883. name = 'list'
  884. def __init__(self, parser):
  885. parser.add_argument(
  886. '--status', '-s', required=False, nargs='*',
  887. choices=['READY', 'RUNNING', 'COMPLETED', 'FAILED',
  888. 'CANCELLED', 'UNKNOWN'],
  889. help=('List tasks only with a given status'))
  890. parser.add_argument(
  891. '--long_format',
  892. '-l',
  893. action='store_true',
  894. help=('Print output in long format. Extra columns are: creation time, '
  895. 'start time, update time, EECU-seconds, output URLs.')
  896. )
  897. def run(self, args, config):
  898. """Lists tasks present for a user, maybe filtering by state."""
  899. config.ee_init()
  900. status = args.status
  901. tasks = ee.data.getTaskList()
  902. descs = [utils.truncate(task.get('description', ''), 40) for task in tasks]
  903. desc_length = max((len(word) for word in descs), default=0)
  904. format_str = '{:25s} {:13s} {:%ds} {:10s} {:s}' % (desc_length + 1)
  905. for task in tasks:
  906. if status and task['state'] not in status:
  907. continue
  908. truncated_desc = utils.truncate(task.get('description', ''), 40)
  909. task_type = TASK_TYPES.get(task['task_type'], 'Unknown')
  910. extra = ''
  911. if args.long_format:
  912. show_date = lambda ms: _parse_millis(ms).strftime('%Y-%m-%d %H:%M:%S')
  913. eecu = '{:.4f}'.format(
  914. task['batch_eecu_usage_seconds']
  915. ) if 'batch_eecu_usage_seconds' in task else '-'
  916. extra = ' {:20s} {:20s} {:20s} {:11s} {}'.format(
  917. show_date(task['creation_timestamp_ms']),
  918. show_date(task['start_timestamp_ms']),
  919. show_date(task['update_timestamp_ms']),
  920. eecu,
  921. ' '.join(task.get('destination_uris', [])))
  922. print(format_str.format(
  923. task['id'], task_type, truncated_desc,
  924. task['state'], task.get('error_message', '---')) + extra)
  925. class TaskWaitCommand(object):
  926. """Waits for the specified task or tasks to complete."""
  927. name = 'wait'
  928. def __init__(self, parser):
  929. parser.add_argument(
  930. '--timeout', '-t', default=sys.maxsize, type=int,
  931. help=('Stop waiting for the task(s) to finish after the specified,'
  932. ' number of seconds. Without this flag, the command will wait'
  933. ' indefinitely.'))
  934. parser.add_argument('--verbose', '-v', action='store_true',
  935. help=('Print periodic status messages for each'
  936. ' incomplete task.'))
  937. parser.add_argument('task_ids', nargs='+',
  938. help=('Either a list of one or more currently-running'
  939. ' task ids to wait on; or \'all\' to wait on all'
  940. ' running tasks.'))
  941. def run(self, args, config):
  942. """Waits on the given tasks to complete or for a timeout to pass."""
  943. config.ee_init()
  944. task_ids = []
  945. if args.task_ids == ['all']:
  946. tasks = ee.data.getTaskList()
  947. for task in tasks:
  948. if task['state'] not in utils.TASK_FINISHED_STATES:
  949. task_ids.append(task['id'])
  950. else:
  951. statuses = ee.data.getTaskStatus(args.task_ids)
  952. for status in statuses:
  953. state = status['state']
  954. task_id = status['id']
  955. if state == 'UNKNOWN':
  956. raise ee.EEException('Unknown task id "%s"' % task_id)
  957. else:
  958. task_ids.append(task_id)
  959. utils.wait_for_tasks(task_ids, args.timeout, log_progress=args.verbose)
  960. class TaskCommand(Dispatcher):
  961. """Prints information about or manages long-running tasks."""
  962. name = 'task'
  963. COMMANDS = [
  964. TaskCancelCommand,
  965. TaskInfoCommand,
  966. TaskListCommand,
  967. TaskWaitCommand,
  968. ]
  969. # TODO(user): in both upload tasks, check if the parent namespace
  970. # exists and is writeable first.
  971. class UploadImageCommand(object):
  972. """Uploads an image from Cloud Storage to Earth Engine.
  973. See docs for "asset set" for additional details on how to specify asset
  974. metadata properties.
  975. """
  976. name = 'image'
  977. def __init__(self, parser):
  978. _add_wait_arg(parser)
  979. _add_overwrite_arg(parser)
  980. parser.add_argument(
  981. 'src_files',
  982. help=('Cloud Storage URL(s) of the file(s) to upload. '
  983. 'Must have the prefix \'gs://\'.'),
  984. nargs='*')
  985. parser.add_argument(
  986. '--asset_id',
  987. help='Destination asset ID for the uploaded file.')
  988. parser.add_argument(
  989. '--last_band_alpha',
  990. help='Use the last band as a masking channel for all bands. '
  991. 'Mutually exclusive with nodata_value.',
  992. action='store_true')
  993. parser.add_argument(
  994. '--nodata_value',
  995. help='Value for missing data. '
  996. 'Mutually exclusive with last_band_alpha.',
  997. type=_comma_separated_numbers)
  998. parser.add_argument(
  999. '--pyramiding_policy',
  1000. help='The pyramid reduction policy to use',
  1001. type=_comma_separated_pyramiding_policies)
  1002. parser.add_argument(
  1003. '--bands',
  1004. help='Comma-separated list of names to use for the image bands.',
  1005. type=_comma_separated_strings)
  1006. parser.add_argument(
  1007. '--crs',
  1008. help='The coordinate reference system, to override the map projection '
  1009. 'of the image. May be either a well-known authority code (e.g. '
  1010. 'EPSG:4326) or a WKT string.')
  1011. parser.add_argument(
  1012. '--manifest',
  1013. help='Local path to a JSON asset manifest file. No other flags are '
  1014. 'used if this flag is set.')
  1015. _add_property_flags(parser)
  1016. def _check_num_bands(self, bands, num_bands, flag_name):
  1017. """Checks the number of bands, creating them if there are none yet."""
  1018. if bands:
  1019. if len(bands) != num_bands:
  1020. raise ValueError(
  1021. 'Inconsistent number of bands in --{}: expected {} but found {}.'
  1022. .format(flag_name, len(bands), num_bands))
  1023. else:
  1024. bands = ['b%d' % (i + 1) for i in range(num_bands)]
  1025. return bands
  1026. def run(self, args, config):
  1027. """Starts the upload task, and waits for completion if requested."""
  1028. config.ee_init()
  1029. manifest = self.manifest_from_args(args)
  1030. _upload(args, manifest, ee.data.startIngestion)
  1031. def manifest_from_args(self, args):
  1032. """Constructs an upload manifest from the command-line flags."""
  1033. def is_tf_record(path):
  1034. if any(path.lower().endswith(extension)
  1035. for extension in TF_RECORD_EXTENSIONS):
  1036. return True
  1037. return False
  1038. if args.manifest:
  1039. with open(args.manifest) as fh:
  1040. return json.loads(fh.read())
  1041. if not args.asset_id:
  1042. raise ValueError('Flag --asset_id must be set.')
  1043. _check_valid_files(args.src_files)
  1044. if args.last_band_alpha and args.nodata_value:
  1045. raise ValueError(
  1046. 'last_band_alpha and nodata_value are mutually exclusive.')
  1047. properties = _decode_property_flags(args)
  1048. source_files = list(utils.expand_gcs_wildcards(args.src_files))
  1049. if not source_files:
  1050. raise ValueError('At least one file must be specified.')
  1051. bands = args.bands
  1052. if args.pyramiding_policy and len(args.pyramiding_policy) != 1:
  1053. bands = self._check_num_bands(bands, len(args.pyramiding_policy),
  1054. 'pyramiding_policy')
  1055. if args.nodata_value and len(args.nodata_value) != 1:
  1056. bands = self._check_num_bands(bands, len(args.nodata_value),
  1057. 'nodata_value')
  1058. args.asset_id = ee.data.convert_asset_id_to_asset_name(args.asset_id)
  1059. # If we are ingesting a tfrecord, we actually treat the inputs as one
  1060. # source and many uris.
  1061. if any(is_tf_record(source) for source in source_files):
  1062. tileset = {
  1063. 'id': 'ts',
  1064. 'sources': [{'uris': [source for source in source_files]}]
  1065. }
  1066. else:
  1067. tileset = {
  1068. 'id': 'ts',
  1069. 'sources': [{'uris': [source]} for source in source_files]
  1070. }
  1071. if args.crs:
  1072. tileset['crs'] = args.crs
  1073. manifest = {
  1074. 'name': args.asset_id,
  1075. 'properties': properties,
  1076. 'tilesets': [tileset]
  1077. }
  1078. # pylint:disable=g-explicit-bool-comparison
  1079. if args.time_start is not None and args.time_start != '':
  1080. manifest['start_time'] = _cloud_timestamp_for_timestamp_ms(
  1081. args.time_start)
  1082. if args.time_end is not None and args.time_end != '':
  1083. manifest['end_time'] = _cloud_timestamp_for_timestamp_ms(args.time_end)
  1084. # pylint:enable=g-explicit-bool-comparison
  1085. if bands:
  1086. file_bands = []
  1087. for i, band in enumerate(bands):
  1088. file_bands.append({
  1089. 'id': band,
  1090. 'tilesetId': tileset['id'],
  1091. 'tilesetBandIndex': i
  1092. })
  1093. manifest['bands'] = file_bands
  1094. if args.pyramiding_policy:
  1095. if len(args.pyramiding_policy) == 1:
  1096. manifest['pyramidingPolicy'] = args.pyramiding_policy[0]
  1097. else:
  1098. for index, policy in enumerate(args.pyramiding_policy):
  1099. file_bands[index]['pyramidingPolicy'] = policy
  1100. if args.nodata_value:
  1101. if len(args.nodata_value) == 1:
  1102. manifest['missingData'] = {'values': [args.nodata_value[0]]}
  1103. else:
  1104. for index, value in enumerate(args.nodata_value):
  1105. file_bands[index]['missingData'] = {'values': [value]}
  1106. if args.last_band_alpha:
  1107. manifest['maskBands'] = {'tilesetId': tileset['id']}
  1108. return manifest
  1109. # TODO(user): update src_files help string when secondary files
  1110. # can be uploaded.
  1111. class UploadTableCommand(object):
  1112. """Uploads a table from Cloud Storage to Earth Engine."""
  1113. name = 'table'
  1114. def __init__(self, parser):
  1115. _add_wait_arg(parser)
  1116. _add_overwrite_arg(parser)
  1117. parser.add_argument(
  1118. 'src_file',
  1119. help=('Cloud Storage URL of the .csv, .tfrecord, .shp, or '
  1120. '.zip file to upload. Must have the prefix \'gs://\'. For '
  1121. '.shp files, related .dbf, .shx, and .prj files must be '
  1122. 'present in the same location.'),
  1123. nargs='*')
  1124. parser.add_argument(
  1125. '--asset_id',
  1126. help='Destination asset ID for the uploaded file.')
  1127. _add_property_flags(parser)
  1128. parser.add_argument(
  1129. '--max_error',
  1130. help='Max allowed error in meters when transforming geometry '
  1131. 'between coordinate systems.',
  1132. type=float, nargs='?')
  1133. parser.add_argument(
  1134. '--max_vertices',
  1135. help='Max number of vertices per geometry. If set, geometry will be '
  1136. 'subdivided into spatially disjoint pieces each under this limit.',
  1137. type=int, nargs='?')
  1138. parser.add_argument(
  1139. '--max_failed_features',
  1140. help='The maximum number of failed features to allow during ingestion.',
  1141. type=int, nargs='?')
  1142. parser.add_argument(
  1143. '--crs',
  1144. help='The default CRS code or WKT string specifying the coordinate '
  1145. 'reference system of any geometry without one. If unspecified, '
  1146. 'the default will be EPSG:4326 (https://epsg.io/4326). For '
  1147. 'CSV/TFRecord only.')
  1148. parser.add_argument(
  1149. '--geodesic',
  1150. help='The default strategy for interpreting edges in geometries that '
  1151. 'do not have one specified. If false, edges are '
  1152. 'straight in the projection. If true, edges are curved to follow '
  1153. 'the shortest path on the surface of the Earth. When '
  1154. 'unspecified, defaults to false if \'crs\' is a projected '
  1155. 'coordinate system. For CSV/TFRecord only.',
  1156. action='store_true')
  1157. parser.add_argument(
  1158. '--primary_geometry_column',
  1159. help='The geometry column to use as a row\'s primary geometry when '
  1160. 'there is more than one geometry column. If unspecified and more '
  1161. 'than one geometry column exists, the first geometry column '
  1162. 'is used. For CSV/TFRecord only.')
  1163. parser.add_argument(
  1164. '--x_column',
  1165. help='The name of the numeric x coordinate column for constructing '
  1166. 'point geometries. If the y_column is also specified, and both '
  1167. 'columns contain numerical values, then a point geometry column '
  1168. 'will be constructed with x,y values in the coordinate system '
  1169. 'given in \'--crs\'. If unspecified and \'--crs\' does _not_ '
  1170. 'specify a projected coordinate system, defaults to "longitude". '
  1171. 'If unspecified and \'--crs\' _does_ specify a projected '
  1172. 'coordinate system, defaults to "" and no point geometry is '
  1173. 'generated. A generated point geometry column will be named '
  1174. '{x_column}_{y_column}_N where N might be appended to '
  1175. 'disambiguate the column name. For CSV/TFRecord only.')
  1176. parser.add_argument(
  1177. '--y_column',
  1178. help='The name of the numeric y coordinate column for constructing '
  1179. 'point geometries. If the x_column is also specified, and both '
  1180. 'columns contain numerical values, then a point geometry column '
  1181. 'will be constructed with x,y values in the coordinate system '
  1182. 'given in \'--crs\'. If unspecified and \'--crs\' does _not_ '
  1183. 'specify a projected coordinate system, defaults to "latitude". '
  1184. 'If unspecified and \'--crs\' _does_ specify a projected '
  1185. 'coordinate system, defaults to "" and no point geometry is '
  1186. 'generated. A generated point geometry column will be named '
  1187. '{x_column}_{y_column}_N where N might be appended to '
  1188. 'disambiguate the column name. For CSV/TFRecord only.')
  1189. parser.add_argument(
  1190. '--date_format',
  1191. help='A format used to parse dates. The format pattern must follow '
  1192. 'http://joda-time.sourceforge.net/apidocs/org/joda/time/format/DateTimeFormat.html. '
  1193. 'If unspecified, dates will be imported as strings. For '
  1194. 'CSV/TFRecord only.')
  1195. parser.add_argument(
  1196. '--csv_delimiter',
  1197. help='A single character used as a delimiter between column values '
  1198. 'in a row. If unspecified, defaults to \',\'. For CSV only.')
  1199. parser.add_argument(
  1200. '--csv_qualifier',
  1201. help='A character that surrounds column values (a.k.a. '
  1202. '\'quote character\'). If unspecified, defaults to \'"\'. A '
  1203. 'column value may include the qualifier as a literal character by '
  1204. 'having 2 consecutive qualifier characters. For CSV only.')
  1205. parser.add_argument(
  1206. '--manifest',
  1207. help='Local path to a JSON asset manifest file. No other flags are '
  1208. 'used if this flag is set.')
  1209. def run(self, args, config):
  1210. """Starts the upload task, and waits for completion if requested."""
  1211. config.ee_init()
  1212. manifest = self.manifest_from_args(args)
  1213. _upload(args, manifest, ee.data.startTableIngestion)
  1214. def manifest_from_args(self, args):
  1215. """Constructs an upload manifest from the command-line flags."""
  1216. if args.manifest:
  1217. with open(args.manifest) as fh:
  1218. return json.loads(fh.read())
  1219. if not args.asset_id:
  1220. raise ValueError('Flag --asset_id must be set.')
  1221. _check_valid_files(args.src_file)
  1222. source_files = list(utils.expand_gcs_wildcards(args.src_file))
  1223. if len(source_files) != 1:
  1224. raise ValueError('Exactly one file must be specified.')
  1225. properties = _decode_property_flags(args)
  1226. args.asset_id = ee.data.convert_asset_id_to_asset_name(args.asset_id)
  1227. source = {'uris': source_files}
  1228. if args.max_error:
  1229. source['maxErrorMeters'] = args.max_error
  1230. if args.max_vertices:
  1231. source['maxVertices'] = args.max_vertices
  1232. if args.max_failed_features:
  1233. raise ee.EEException(
  1234. '--max_failed_features is not supported with the Cloud API')
  1235. if args.crs:
  1236. source['crs'] = args.crs
  1237. if args.geodesic:
  1238. source['geodesic'] = args.geodesic
  1239. if args.primary_geometry_column:
  1240. source['primary_geometry_column'] = args.primary_geometry_column
  1241. if args.x_column:
  1242. source['x_column'] = args.x_column
  1243. if args.y_column:
  1244. source['y_column'] = args.y_column
  1245. if args.date_format:
  1246. source['date_format'] = args.date_format
  1247. if args.csv_delimiter:
  1248. source['csv_delimiter'] = args.csv_delimiter
  1249. if args.csv_qualifier:
  1250. source['csv_qualifier'] = args.csv_qualifier
  1251. manifest = {
  1252. 'name': args.asset_id,
  1253. 'sources': [source],
  1254. 'properties': properties
  1255. }
  1256. # pylint:disable=g-explicit-bool-comparison
  1257. if args.time_start is not None and args.time_start != '':
  1258. manifest['start_time'] = _cloud_timestamp_for_timestamp_ms(
  1259. args.time_start)
  1260. if args.time_end is not None and args.time_end != '':
  1261. manifest['end_time'] = _cloud_timestamp_for_timestamp_ms(args.time_end)
  1262. # pylint:enable=g-explicit-bool-comparison
  1263. return manifest
  1264. class UploadCommand(Dispatcher):
  1265. """Uploads assets to Earth Engine."""
  1266. name = 'upload'
  1267. COMMANDS = [
  1268. UploadImageCommand,
  1269. UploadTableCommand,
  1270. ]
  1271. class _UploadManifestBase(object):
  1272. """Uploads an asset to Earth Engine using the given manifest file."""
  1273. def __init__(self, parser):
  1274. _add_wait_arg(parser)
  1275. _add_overwrite_arg(parser)
  1276. parser.add_argument(
  1277. 'manifest',
  1278. help=('Local path to a JSON asset manifest file.'))
  1279. def run(self, args, config, ingestion_function):
  1280. """Starts the upload task, and waits for completion if requested."""
  1281. config.ee_init()
  1282. with open(args.manifest) as fh:
  1283. manifest = json.loads(fh.read())
  1284. _upload(args, manifest, ingestion_function)
  1285. class UploadImageManifestCommand(_UploadManifestBase):
  1286. """Uploads an image to Earth Engine using the given manifest file."""
  1287. name = 'upload_manifest'
  1288. def run(self, args, config):
  1289. """Starts the upload task, and waits for completion if requested."""
  1290. print(
  1291. 'This command is deprecated. '
  1292. 'Use "earthengine upload image --manifest".'
  1293. )
  1294. super(UploadImageManifestCommand, self).run(
  1295. args, config, ee.data.startIngestion)
  1296. class UploadTableManifestCommand(_UploadManifestBase):
  1297. """Uploads a table to Earth Engine using the given manifest file."""
  1298. name = 'upload_table_manifest'
  1299. def run(self, args, config):
  1300. print(
  1301. 'This command is deprecated. '
  1302. 'Use "earthengine upload table --manifest".'
  1303. )
  1304. super(UploadTableManifestCommand, self).run(
  1305. args, config, ee.data.startTableIngestion)
  1306. class LicensesCommand(object):
  1307. """Prints the name and license of all third party dependencies."""
  1308. name = 'licenses'
  1309. def __init__(self, unused_parser):
  1310. pass
  1311. def run(self, unused_args, unused_config):
  1312. print('The Earth Engine python client library uess the following opensource'
  1313. ' libraries.\n')
  1314. license_path = os.path.join(os.path.dirname(__file__), 'licenses.txt')
  1315. print(open(license_path).read())
  1316. class PrepareModelCommand(object):
  1317. """Prepares a TensorFlow/Keras SavedModel for inference with Earth Engine.
  1318. This is required only if a model is manually uploaded to Cloud AI Platform
  1319. (https://cloud.google.com/ai-platform/) for predictions.
  1320. """
  1321. name = 'prepare'
  1322. def __init__(self, parser):
  1323. parser.add_argument(
  1324. '--source_dir',
  1325. help='The local or Cloud Storage path to directory containing the '
  1326. 'SavedModel.')
  1327. parser.add_argument(
  1328. '--dest_dir',
  1329. help='The name of the directory to be created locally or in Cloud '
  1330. 'Storage that will contain the Earth Engine ready SavedModel.')
  1331. parser.add_argument(
  1332. '--input',
  1333. help='A comma-delimited list of input node names that will map to '
  1334. 'Earth Engine Feature columns or Image bands for prediction, or a JSON '
  1335. 'dictionary specifying a remapping of input node names to names '
  1336. 'mapping to Feature columns or Image bands etc... (e.x: '
  1337. '\'{"Conv2D:0":"my_landsat_band"}\'). The names of model inputs will '
  1338. 'be stripped of any trailing \'<:prefix>\'.')
  1339. parser.add_argument(
  1340. '--output',
  1341. help='A comma-delimited list of output tensor names that will map to '
  1342. 'Earth Engine Feature columns or Image bands for prediction, or a JSON '
  1343. 'dictionary specifying a remapping of output node names to names '
  1344. 'mapping to Feature columns or Image bands etc... (e.x: '
  1345. '\'{"Sigmoid:0":"my_predicted_class"}\'). The names of model outputs '
  1346. 'will be stripped of any trailing \'<:prefix>\'.')
  1347. parser.add_argument(
  1348. '--tag',
  1349. help='An optional tag used to load a specific graph from the '
  1350. 'SavedModel. Defaults to \'serve\'.')
  1351. parser.add_argument(
  1352. '--variables',
  1353. help='An optional relative path from within the source directory to '
  1354. 'the prefix of the model variables. (e.x: if the model variables are '
  1355. 'stored under \'model_dir/variables/x.*\', set '
  1356. '--variables=/variables/x). Defaults to \'/variables/variables\'.')
  1357. @staticmethod
  1358. def _validate_and_extract_nodes(args):
  1359. """Validate command line args and extract in/out node mappings."""
  1360. if not args.source_dir:
  1361. raise ValueError('Flag --source_dir must be set.')
  1362. if not args.dest_dir:
  1363. raise ValueError('Flag --dest_dir must be set.')
  1364. if not args.input:
  1365. raise ValueError('Flag --input must be set.')
  1366. if not args.output:
  1367. raise ValueError('Flag --output must be set.')
  1368. return (PrepareModelCommand._get_nodes(args.input, '--input'),
  1369. PrepareModelCommand._get_nodes(args.output, '--output'))
  1370. @staticmethod
  1371. def _get_nodes(node_spec, source_flag_name):
  1372. """Extract a node mapping from a list or flag-specified JSON."""
  1373. try:
  1374. spec = json.loads(node_spec)
  1375. except ValueError:
  1376. spec = [n.strip() for n in node_spec.split(',')]
  1377. return {item: item for item in spec}
  1378. if not isinstance(spec, dict):
  1379. raise ValueError(
  1380. 'If flag {} is JSON it must specify a dictionary.'.format(
  1381. source_flag_name))
  1382. for k, v in spec.items():
  1383. if ((not isinstance(k, str)) or (not isinstance(v, str))):
  1384. raise ValueError('All key/value pairs of the dictionary specified in '
  1385. '{} must be strings.'.format(source_flag_name))
  1386. return spec
  1387. @staticmethod
  1388. def _encode_op(output_tensor, name):
  1389. return tf.identity(
  1390. tf.map_fn(lambda x: tf.io.encode_base64(tf.serialize_tensor(x)),
  1391. output_tensor, tf.string),
  1392. name=name)
  1393. @staticmethod
  1394. def _decode_op(input_tensor, dtype):
  1395. mapped = tf.map_fn(lambda x: tf.parse_tensor(tf.io.decode_base64(x), dtype),
  1396. input_tensor, dtype)
  1397. return mapped
  1398. @staticmethod
  1399. def _shape_from_proto(shape_proto):
  1400. return [d.size for d in shape_proto.dim]
  1401. @staticmethod
  1402. def _strip_index(edge_name):
  1403. colon_pos = edge_name.rfind(':')
  1404. if colon_pos == -1:
  1405. return edge_name
  1406. else:
  1407. return edge_name[:colon_pos]
  1408. @staticmethod
  1409. def _get_input_tensor_spec(graph_def, input_names_set):
  1410. """Extracts the types of the given node names from the GraphDef."""
  1411. # Get the op names stripped of the input index e.g: "op:0" becomes "op"
  1412. input_names_missing_index = {
  1413. PrepareModelCommand._strip_index(i): i for i in input_names_set
  1414. }
  1415. spec = {}
  1416. for cur_node in graph_def.node:
  1417. if cur_node.name in input_names_missing_index:
  1418. if 'shape' not in cur_node.attr or 'dtype' not in cur_node.attr:
  1419. raise ValueError(
  1420. 'Specified input op is not a valid graph input: \'{}\'.'.format(
  1421. cur_node.name))
  1422. spec[input_names_missing_index[cur_node.name]] = tf.dtypes.DType(
  1423. cur_node.attr['dtype'].type)
  1424. if len(spec) != len(input_names_set):
  1425. raise ValueError(
  1426. 'Specified input ops were missing from graph: {}.'.format(
  1427. list(set(input_names_set).difference(list(spec.keys())))))
  1428. return spec
  1429. @staticmethod
  1430. def _make_rpc_friendly(model_dir, tag, in_map, out_map, vars_path):
  1431. """Wraps a SavedModel in EE RPC-friendly ops and saves a temporary copy."""
  1432. out_dir = tempfile.mkdtemp()
  1433. builder = tf.saved_model.Builder(out_dir)
  1434. # Get a GraphDef from the saved model
  1435. with tf.Session() as sesh:
  1436. meta_graph = tf.saved_model.load(sesh, [tag], model_dir)
  1437. graph_def = meta_graph.graph_def
  1438. # Purge the default graph immediately after: we want to remap parts of the
  1439. # graph when we load it and we don't know what those parts are yet.
  1440. tf.reset_default_graph()
  1441. input_op_keys = list(in_map.keys())
  1442. input_new_keys = list(in_map.values())
  1443. # Get the shape and type of the input tensors
  1444. in_op_types = PrepareModelCommand._get_input_tensor_spec(
  1445. graph_def, input_op_keys)
  1446. # Create new input placeholders to receive RPC TensorProto payloads
  1447. in_op_map = {
  1448. k: tf.placeholder(
  1449. tf.string, shape=[None], name='earthengine_in_{}'.format(i))
  1450. for (i, k) in enumerate(input_new_keys)
  1451. }
  1452. # Glue on decoding ops to remap to the imported graph.
  1453. decoded_op_map = {
  1454. k: PrepareModelCommand._decode_op(in_op_map[in_map[k]], in_op_types[k])
  1455. for k in input_op_keys
  1456. }
  1457. # Okay now we're ready to import the graph again but remapped.
  1458. saver = tf.train.import_meta_graph(
  1459. meta_graph_or_file=meta_graph, input_map=decoded_op_map)
  1460. # Boilerplate to build a signature def for our new graph
  1461. sig_in = {
  1462. PrepareModelCommand._strip_index(k):
  1463. saved_model_utils.build_tensor_info(v) for (k, v) in in_op_map.items()
  1464. }
  1465. sig_out = {}
  1466. for index, (k, v) in enumerate(out_map.items()):
  1467. out_tensor = saved_model_utils.build_tensor_info(
  1468. PrepareModelCommand._encode_op(
  1469. tf.get_default_graph().get_tensor_by_name(k),
  1470. name='earthengine_out_{}'.format(index)))
  1471. sig_out[PrepareModelCommand._strip_index(v)] = out_tensor
  1472. sig_def = signature_def_utils.build_signature_def(
  1473. sig_in, sig_out, signature_constants.PREDICT_METHOD_NAME)
  1474. # Open a new session to load the variables and add them to the builder.
  1475. with tf.Session() as sesh:
  1476. if saver:
  1477. saver.restore(sesh, model_dir + vars_path)
  1478. builder.add_meta_graph_and_variables(
  1479. sesh,
  1480. tags=[tf.saved_model.tag_constants.SERVING],
  1481. signature_def_map={
  1482. signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: sig_def
  1483. },
  1484. saver=saver)
  1485. builder.save()
  1486. return out_dir
  1487. def run(self, args, config):
  1488. """Wraps a SavedModel in EE RPC-friendly ops and saves a copy of it."""
  1489. ModelCommand.check_tensorflow_installed()
  1490. in_spec, out_spec = PrepareModelCommand._validate_and_extract_nodes(args)
  1491. gcs_client = None
  1492. if utils.is_gcs_path(args.source_dir):
  1493. # If the model isn't locally available, we have to make it available...
  1494. gcs_client = config.create_gcs_helper()
  1495. gcs_client.check_gcs_dir_within_size(args.source_dir,
  1496. SAVED_MODEL_MAX_SIZE)
  1497. local_model_dir = gcs_client.download_dir_to_temp(args.source_dir)
  1498. else:
  1499. local_model_dir = args.source_dir
  1500. tag = args.tag if args.tag else tf.saved_model.tag_constants.SERVING
  1501. vars_path = args.variables if args.variables else DEFAULT_VARIABLES_PREFIX
  1502. new_model_dir = PrepareModelCommand._make_rpc_friendly(
  1503. local_model_dir, tag, in_spec, out_spec, vars_path)
  1504. if utils.is_gcs_path(args.dest_dir):
  1505. if not gcs_client:
  1506. gcs_client = config.create_gcs_helper()
  1507. gcs_client.upload_dir_to_bucket(new_model_dir, args.dest_dir)
  1508. else:
  1509. shutil.move(new_model_dir, args.dest_dir)
  1510. print(
  1511. 'Success: model at \'{}\' is ready to be hosted in AI Platform.'.format(
  1512. args.dest_dir))
  1513. class ModelCommand(Dispatcher):
  1514. """TensorFlow model related commands."""
  1515. name = 'model'
  1516. COMMANDS = [PrepareModelCommand]
  1517. @staticmethod
  1518. def check_tensorflow_installed():
  1519. """Checks the status of TensorFlow installations."""
  1520. if not TENSORFLOW_INSTALLED:
  1521. raise ImportError(
  1522. 'By default, TensorFlow is not installed with Earth Engine client '
  1523. 'libraries. To use \'model\' commands, make sure at least TensorFlow '
  1524. '1.14 is installed; you can do this by executing \'pip install '
  1525. 'tensorflow\' in your shell.'
  1526. )
  1527. else:
  1528. if not TENSORFLOW_ADDONS_INSTALLED:
  1529. if sys.version_info[0] < 3:
  1530. print(
  1531. 'Warning: Python 3 required for TensorFlow Addons. Models that '
  1532. 'use non-standard ops may not work.')
  1533. else:
  1534. print(
  1535. 'Warning: TensorFlow Addons not found. Models that use '
  1536. 'non-standard ops may not work.')
  1537. EXTERNAL_COMMANDS = [
  1538. AuthenticateCommand,
  1539. AclCommand,
  1540. AssetCommand,
  1541. CopyCommand,
  1542. CreateCommand,
  1543. ListCommand,
  1544. LicensesCommand,
  1545. SizeCommand,
  1546. MoveCommand,
  1547. ModelCommand,
  1548. RmCommand,
  1549. SetProjectCommand,
  1550. TaskCommand,
  1551. UnSetProjectCommand,
  1552. UploadCommand,
  1553. UploadImageManifestCommand,
  1554. UploadTableManifestCommand,
  1555. ]