dbfs.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467
  1. import base64
  2. import urllib
  3. import requests
  4. import requests.exceptions
  5. from requests.adapters import HTTPAdapter, Retry
  6. from fsspec import AbstractFileSystem
  7. from fsspec.spec import AbstractBufferedFile
  8. class DatabricksException(Exception):
  9. """
  10. Helper class for exceptions raised in this module.
  11. """
  12. def __init__(self, error_code, message):
  13. """Create a new DatabricksException"""
  14. super().__init__(message)
  15. self.error_code = error_code
  16. self.message = message
  17. class DatabricksFileSystem(AbstractFileSystem):
  18. """
  19. Get access to the Databricks filesystem implementation over HTTP.
  20. Can be used inside and outside of a databricks cluster.
  21. """
  22. def __init__(self, instance, token, **kwargs):
  23. """
  24. Create a new DatabricksFileSystem.
  25. Parameters
  26. ----------
  27. instance: str
  28. The instance URL of the databricks cluster.
  29. For example for an Azure databricks cluster, this
  30. has the form adb-<some-number>.<two digits>.azuredatabricks.net.
  31. token: str
  32. Your personal token. Find out more
  33. here: https://docs.databricks.com/dev-tools/api/latest/authentication.html
  34. """
  35. self.instance = instance
  36. self.token = token
  37. self.session = requests.Session()
  38. self.retries = Retry(
  39. total=10,
  40. backoff_factor=0.05,
  41. status_forcelist=[408, 429, 500, 502, 503, 504],
  42. )
  43. self.session.mount("https://", HTTPAdapter(max_retries=self.retries))
  44. self.session.headers.update({"Authorization": f"Bearer {self.token}"})
  45. super().__init__(**kwargs)
  46. def ls(self, path, detail=True, **kwargs):
  47. """
  48. List the contents of the given path.
  49. Parameters
  50. ----------
  51. path: str
  52. Absolute path
  53. detail: bool
  54. Return not only the list of filenames,
  55. but also additional information on file sizes
  56. and types.
  57. """
  58. out = self._ls_from_cache(path)
  59. if not out:
  60. try:
  61. r = self._send_to_api(
  62. method="get", endpoint="list", json={"path": path}
  63. )
  64. except DatabricksException as e:
  65. if e.error_code == "RESOURCE_DOES_NOT_EXIST":
  66. raise FileNotFoundError(e.message) from e
  67. raise
  68. files = r["files"]
  69. out = [
  70. {
  71. "name": o["path"],
  72. "type": "directory" if o["is_dir"] else "file",
  73. "size": o["file_size"],
  74. }
  75. for o in files
  76. ]
  77. self.dircache[path] = out
  78. if detail:
  79. return out
  80. return [o["name"] for o in out]
  81. def makedirs(self, path, exist_ok=True):
  82. """
  83. Create a given absolute path and all of its parents.
  84. Parameters
  85. ----------
  86. path: str
  87. Absolute path to create
  88. exist_ok: bool
  89. If false, checks if the folder
  90. exists before creating it (and raises an
  91. Exception if this is the case)
  92. """
  93. if not exist_ok:
  94. try:
  95. # If the following succeeds, the path is already present
  96. self._send_to_api(
  97. method="get", endpoint="get-status", json={"path": path}
  98. )
  99. raise FileExistsError(f"Path {path} already exists")
  100. except DatabricksException as e:
  101. if e.error_code == "RESOURCE_DOES_NOT_EXIST":
  102. pass
  103. try:
  104. self._send_to_api(method="post", endpoint="mkdirs", json={"path": path})
  105. except DatabricksException as e:
  106. if e.error_code == "RESOURCE_ALREADY_EXISTS":
  107. raise FileExistsError(e.message) from e
  108. raise
  109. self.invalidate_cache(self._parent(path))
  110. def mkdir(self, path, create_parents=True, **kwargs):
  111. """
  112. Create a given absolute path and all of its parents.
  113. Parameters
  114. ----------
  115. path: str
  116. Absolute path to create
  117. create_parents: bool
  118. Whether to create all parents or not.
  119. "False" is not implemented so far.
  120. """
  121. if not create_parents:
  122. raise NotImplementedError
  123. self.mkdirs(path, **kwargs)
  124. def rm(self, path, recursive=False, **kwargs):
  125. """
  126. Remove the file or folder at the given absolute path.
  127. Parameters
  128. ----------
  129. path: str
  130. Absolute path what to remove
  131. recursive: bool
  132. Recursively delete all files in a folder.
  133. """
  134. try:
  135. self._send_to_api(
  136. method="post",
  137. endpoint="delete",
  138. json={"path": path, "recursive": recursive},
  139. )
  140. except DatabricksException as e:
  141. # This is not really an exception, it just means
  142. # not everything was deleted so far
  143. if e.error_code == "PARTIAL_DELETE":
  144. self.rm(path=path, recursive=recursive)
  145. elif e.error_code == "IO_ERROR":
  146. # Using the same exception as the os module would use here
  147. raise OSError(e.message) from e
  148. raise
  149. self.invalidate_cache(self._parent(path))
  150. def mv(
  151. self, source_path, destination_path, recursive=False, maxdepth=None, **kwargs
  152. ):
  153. """
  154. Move a source to a destination path.
  155. A note from the original [databricks API manual]
  156. (https://docs.databricks.com/dev-tools/api/latest/dbfs.html#move).
  157. When moving a large number of files the API call will time out after
  158. approximately 60s, potentially resulting in partially moved data.
  159. Therefore, for operations that move more than 10k files, we strongly
  160. discourage using the DBFS REST API.
  161. Parameters
  162. ----------
  163. source_path: str
  164. From where to move (absolute path)
  165. destination_path: str
  166. To where to move (absolute path)
  167. recursive: bool
  168. Not implemented to far.
  169. maxdepth:
  170. Not implemented to far.
  171. """
  172. if recursive:
  173. raise NotImplementedError
  174. if maxdepth:
  175. raise NotImplementedError
  176. try:
  177. self._send_to_api(
  178. method="post",
  179. endpoint="move",
  180. json={"source_path": source_path, "destination_path": destination_path},
  181. )
  182. except DatabricksException as e:
  183. if e.error_code == "RESOURCE_DOES_NOT_EXIST":
  184. raise FileNotFoundError(e.message) from e
  185. elif e.error_code == "RESOURCE_ALREADY_EXISTS":
  186. raise FileExistsError(e.message) from e
  187. raise
  188. self.invalidate_cache(self._parent(source_path))
  189. self.invalidate_cache(self._parent(destination_path))
  190. def _open(self, path, mode="rb", block_size="default", **kwargs):
  191. """
  192. Overwrite the base class method to make sure to create a DBFile.
  193. All arguments are copied from the base method.
  194. Only the default blocksize is allowed.
  195. """
  196. return DatabricksFile(self, path, mode=mode, block_size=block_size, **kwargs)
  197. def _send_to_api(self, method, endpoint, json):
  198. """
  199. Send the given json to the DBFS API
  200. using a get or post request (specified by the argument `method`).
  201. Parameters
  202. ----------
  203. method: str
  204. Which http method to use for communication; "get" or "post".
  205. endpoint: str
  206. Where to send the request to (last part of the API URL)
  207. json: dict
  208. Dictionary of information to send
  209. """
  210. if method == "post":
  211. session_call = self.session.post
  212. elif method == "get":
  213. session_call = self.session.get
  214. else:
  215. raise ValueError(f"Do not understand method {method}")
  216. url = urllib.parse.urljoin(f"https://{self.instance}/api/2.0/dbfs/", endpoint)
  217. r = session_call(url, json=json)
  218. # The DBFS API will return a json, also in case of an exception.
  219. # We want to preserve this information as good as possible.
  220. try:
  221. r.raise_for_status()
  222. except requests.HTTPError as e:
  223. # try to extract json error message
  224. # if that fails, fall back to the original exception
  225. try:
  226. exception_json = e.response.json()
  227. except Exception:
  228. raise e from None
  229. raise DatabricksException(**exception_json) from e
  230. return r.json()
  231. def _create_handle(self, path, overwrite=True):
  232. """
  233. Internal function to create a handle, which can be used to
  234. write blocks of a file to DBFS.
  235. A handle has a unique identifier which needs to be passed
  236. whenever written during this transaction.
  237. The handle is active for 10 minutes - after that a new
  238. write transaction needs to be created.
  239. Make sure to close the handle after you are finished.
  240. Parameters
  241. ----------
  242. path: str
  243. Absolute path for this file.
  244. overwrite: bool
  245. If a file already exist at this location, either overwrite
  246. it or raise an exception.
  247. """
  248. try:
  249. r = self._send_to_api(
  250. method="post",
  251. endpoint="create",
  252. json={"path": path, "overwrite": overwrite},
  253. )
  254. return r["handle"]
  255. except DatabricksException as e:
  256. if e.error_code == "RESOURCE_ALREADY_EXISTS":
  257. raise FileExistsError(e.message) from e
  258. raise
  259. def _close_handle(self, handle):
  260. """
  261. Close a handle, which was opened by :func:`_create_handle`.
  262. Parameters
  263. ----------
  264. handle: str
  265. Which handle to close.
  266. """
  267. try:
  268. self._send_to_api(method="post", endpoint="close", json={"handle": handle})
  269. except DatabricksException as e:
  270. if e.error_code == "RESOURCE_DOES_NOT_EXIST":
  271. raise FileNotFoundError(e.message) from e
  272. raise
  273. def _add_data(self, handle, data):
  274. """
  275. Upload data to an already opened file handle
  276. (opened by :func:`_create_handle`).
  277. The maximal allowed data size is 1MB after
  278. conversion to base64.
  279. Remember to close the handle when you are finished.
  280. Parameters
  281. ----------
  282. handle: str
  283. Which handle to upload data to.
  284. data: bytes
  285. Block of data to add to the handle.
  286. """
  287. data = base64.b64encode(data).decode()
  288. try:
  289. self._send_to_api(
  290. method="post",
  291. endpoint="add-block",
  292. json={"handle": handle, "data": data},
  293. )
  294. except DatabricksException as e:
  295. if e.error_code == "RESOURCE_DOES_NOT_EXIST":
  296. raise FileNotFoundError(e.message) from e
  297. elif e.error_code == "MAX_BLOCK_SIZE_EXCEEDED":
  298. raise ValueError(e.message) from e
  299. raise
  300. def _get_data(self, path, start, end):
  301. """
  302. Download data in bytes from a given absolute path in a block
  303. from [start, start+length].
  304. The maximum number of allowed bytes to read is 1MB.
  305. Parameters
  306. ----------
  307. path: str
  308. Absolute path to download data from
  309. start: int
  310. Start position of the block
  311. end: int
  312. End position of the block
  313. """
  314. try:
  315. r = self._send_to_api(
  316. method="get",
  317. endpoint="read",
  318. json={"path": path, "offset": start, "length": end - start},
  319. )
  320. return base64.b64decode(r["data"])
  321. except DatabricksException as e:
  322. if e.error_code == "RESOURCE_DOES_NOT_EXIST":
  323. raise FileNotFoundError(e.message) from e
  324. elif e.error_code in ["INVALID_PARAMETER_VALUE", "MAX_READ_SIZE_EXCEEDED"]:
  325. raise ValueError(e.message) from e
  326. raise
  327. def invalidate_cache(self, path=None):
  328. if path is None:
  329. self.dircache.clear()
  330. else:
  331. self.dircache.pop(path, None)
  332. super().invalidate_cache(path)
  333. class DatabricksFile(AbstractBufferedFile):
  334. """
  335. Helper class for files referenced in the DatabricksFileSystem.
  336. """
  337. DEFAULT_BLOCK_SIZE = 1 * 2**20 # only allowed block size
  338. def __init__(
  339. self,
  340. fs,
  341. path,
  342. mode="rb",
  343. block_size="default",
  344. autocommit=True,
  345. cache_type="readahead",
  346. cache_options=None,
  347. **kwargs,
  348. ):
  349. """
  350. Create a new instance of the DatabricksFile.
  351. The blocksize needs to be the default one.
  352. """
  353. if block_size is None or block_size == "default":
  354. block_size = self.DEFAULT_BLOCK_SIZE
  355. assert block_size == self.DEFAULT_BLOCK_SIZE, (
  356. f"Only the default block size is allowed, not {block_size}"
  357. )
  358. super().__init__(
  359. fs,
  360. path,
  361. mode=mode,
  362. block_size=block_size,
  363. autocommit=autocommit,
  364. cache_type=cache_type,
  365. cache_options=cache_options or {},
  366. **kwargs,
  367. )
  368. def _initiate_upload(self):
  369. """Internal function to start a file upload"""
  370. self.handle = self.fs._create_handle(self.path)
  371. def _upload_chunk(self, final=False):
  372. """Internal function to add a chunk of data to a started upload"""
  373. self.buffer.seek(0)
  374. data = self.buffer.getvalue()
  375. data_chunks = [
  376. data[start:end] for start, end in self._to_sized_blocks(len(data))
  377. ]
  378. for data_chunk in data_chunks:
  379. self.fs._add_data(handle=self.handle, data=data_chunk)
  380. if final:
  381. self.fs._close_handle(handle=self.handle)
  382. return True
  383. def _fetch_range(self, start, end):
  384. """Internal function to download a block of data"""
  385. return_buffer = b""
  386. length = end - start
  387. for chunk_start, chunk_end in self._to_sized_blocks(length, start):
  388. return_buffer += self.fs._get_data(
  389. path=self.path, start=chunk_start, end=chunk_end
  390. )
  391. return return_buffer
  392. def _to_sized_blocks(self, length, start=0):
  393. """Helper function to split a range from 0 to total_length into bloksizes"""
  394. end = start + length
  395. for data_chunk in range(start, end, self.blocksize):
  396. data_start = data_chunk
  397. data_end = min(end, data_chunk + self.blocksize)
  398. yield data_start, data_end