|
33 | 33 | "\n", |
34 | 34 | "import dotenv\n", |
35 | 35 | "import jsonschema\n", |
36 | | - "from fs.base import FS\n", |
37 | | - "from fs.osfs import OSFS\n", |
| 36 | + "from fsspec.spec import AbstractFileSystem\n", |
| 37 | + "from fsspec.implementations.local import LocalFileSystem\n", |
| 38 | + "from fsspec.implementations.dirfs import DirFileSystem\n", |
38 | 39 | "from jsonschema import ValidationError, validate\n", |
| 40 | + "from io import StringIO\n", |
39 | 41 | "\n", |
40 | 42 | "logger = logging.getLogger(__name__)" |
41 | 43 | ] |
|
241 | 243 | " # this is a risky value: it gets reused across instances;\n", |
242 | 244 | " # the idea is to maybe set it once and use it multiple times.\n", |
243 | 245 | " # but in testing this smells bad\n", |
244 | | - " DEFAULT_STORAGE_DRIVER: FS = None # defaults to OSFS\n", |
| 246 | + " DEFAULT_STORAGE_DRIVER: AbstractFileSystem = None # defaults to DirFileSystem\n", |
245 | 247 | " \n", |
246 | 248 | " CONFIG_VALIDATOR_JSON_SCHEMA_ENVVAR_NAME = 'CONFIG_VALIDATOR_JSON_SCHEMA'\n", |
247 | 249 | " \n", |
248 | 250 | " @classmethod\n", |
249 | 251 | " def get_default_storage_driver(cls):\n", |
250 | 252 | " if cls.DEFAULT_STORAGE_DRIVER is None:\n", |
251 | | - " cls.DEFAULT_STORAGE_DRIVER = OSFS(os.getcwd())\n", |
| 253 | + " cls.DEFAULT_STORAGE_DRIVER = DirFileSystem(os.getcwd())\n", |
252 | 254 | " return cls.DEFAULT_STORAGE_DRIVER\n", |
253 | 255 | "\n", |
254 | 256 | " \n", |
255 | 257 | " @classmethod\n", |
256 | 258 | " def _get_maybe_abspath_driver(cls, maybe_abspath: str):\n", |
257 | 259 | " if os.path.isabs(maybe_abspath): # special case\n", |
258 | | - " return OSFS('/')\n", |
| 260 | + " return DirFileSystem('/')\n", |
259 | 261 | " else:\n", |
260 | 262 | " return cls.get_default_storage_driver()\n", |
261 | 263 | " \n", |
262 | 264 | " @classmethod\n", |
263 | | - " def load_json(cls, json_source: Union[str, dict]=None, storage_driver: FS = None) -> dict:\n", |
| 265 | + " def load_json(cls, json_source: Union[str, dict]=None, storage_driver: AbstractFileSystem = None) -> dict:\n", |
264 | 266 | " \"\"\"\n", |
265 | 267 | " Convenience method to return a dictionary from either a file path or an already-loaded dictionary.\n", |
266 | 268 | "\n", |
267 | 269 | " Args:\n", |
268 | 270 | " - `json_source` (Union[str, dict], optional): The JSON source to load.\n", |
269 | 271 | " This can be a file path (str) \n", |
270 | 272 | " or an already loaded dictionary (dict). \n", |
271 | | - " - `storage_driver` (FS, optional): An instance of the storage driver used to load the JSON file. \n", |
272 | | - " If not provided, OSFS from the current working dir is used.\n", |
| 273 | + " - `storage_driver` (AbstractFileSystem, optional): An instance of the storage driver used to\n", |
| 274 | + " load the JSON file. If not provided, DirFileSystem from the current\n", |
| 275 | + " working dir is used.\n", |
273 | 276 | "\n", |
274 | 277 | " Returns:\n", |
275 | 278 | " dict: A dictionary that was loaded from the provided `json_source`.\n", |
|
283 | 286 | " return json.load(ifile)\n", |
284 | 287 | "\n", |
285 | 288 | " @classmethod\n", |
286 | | - " def get_default_json_schema(cls, storage_driver: FS = None) -> dict:\n", |
| 289 | + " def get_default_json_schema(cls, storage_driver: AbstractFileSystem = None) -> dict:\n", |
287 | 290 | " if cls.CONFIG_VALIDATOR_JSON_SCHEMA_ENVVAR_NAME in os.environ:\n", |
288 | 291 | " expected_json_schema_path = \\\n", |
289 | 292 | " os.environ[cls.CONFIG_VALIDATOR_JSON_SCHEMA_ENVVAR_NAME]\n", |
290 | 293 | " return cls.load_json(expected_json_schema_path, storage_driver)\n", |
291 | 294 | " return None\n", |
292 | 295 | "\n", |
293 | | - " def __init__(self, json_schema: Union[str, dict]=None, storage_driver: FS=None):\n", |
| 296 | + " def __init__(self, json_schema: Union[str, dict]=None, storage_driver: AbstractFileSystem=None):\n", |
294 | 297 | " \"\"\"\n", |
295 | 298 | " Initialize the instance with a JSON schema and a storage driver.\n", |
296 | 299 | "\n", |
|
299 | 302 | " If no value is provided, it will fall back to looking for an environment \n", |
300 | 303 | " variable corresponding to the class variable \n", |
301 | 304 | " `CONFIG_VALIDATOR_JSON_SCHEMA_ENVVAR_NAME` to find a JSON schema file.\n", |
302 | | - " - `storage_driver` (FS, optional): The storage driver to use. If no value is provided, \n", |
303 | | - " `self.__class__.DEFAULT_STORAGE_DRIVER` is used.\n", |
| 305 | + " - `storage_driver` (AbstractFileSystem, optional): The storage driver to use. If no value is provided, \n", |
| 306 | + " `self.__class__.DEFAULT_STORAGE_DRIVER` is used.\n", |
304 | 307 | "\n", |
305 | 308 | " Raises:\n", |
306 | 309 | " Exception: An exception is raised if no valid JSON schema is provided or found.\n", |
|
336 | 339 | " def load_dotenv(cls,\n", |
337 | 340 | " json_schema: Union[str, dict]=None,\n", |
338 | 341 | " dotenv_path: str=None,\n", |
339 | | - " storage_driver: FS=None,\n", |
| 342 | + " storage_driver: AbstractFileSystem=None,\n", |
340 | 343 | " override: bool=False,\n", |
341 | 344 | " ):\n", |
342 | 345 | " \"\"\"\n", |
|
349 | 352 | " (such as an environment variable or default schema) is used.\n", |
350 | 353 | " - `dotenv_path` (str, optional): Path to the .env file to load the variables from.\n", |
351 | 354 | " If not provided, loads an empty dict to start.\n", |
352 | | - " - `storage_driver` (FS, optional): The storage driver to use for loading files. If not given,\n", |
353 | | - " \".env\" will be attempted from the current working directory;\n", |
354 | | - " if that does not exist, an empty dict will be used.\n", |
| 355 | + " - `storage_driver` (AbstractFileSystem, optional): The storage driver to use for loading files.\n", |
| 356 | + " If not given, \".env\" will be attempted from the current working\n", |
| 357 | + " directory; if that does not exist, an empty dict will be used.\n", |
355 | 358 | " - `override` (bool, optional): If True, variables from the .env file or schema default override existing\n", |
356 | 359 | " `os.environ` variables.\n", |
357 | 360 | " \"\"\"\n", |
358 | 361 | "\n", |
359 | 362 | " # WARN this sidesteps storage_driver!\n", |
360 | | - " # it will cause breakage if storage_driver != OSFS AND `.env` exists in PWD\n", |
| 363 | + " # it will cause breakage if storage_driver != DirFileSystem AND `.env` exists in PWD\n", |
361 | 364 | " if dotenv_path is None:\n", |
362 | 365 | " maybe_dotenv_path = dotenv.find_dotenv() # '' if not exist; else abspath\n", |
363 | 366 | " if maybe_dotenv_path:\n", |
|
369 | 372 | " if dotenv_path:\n", |
370 | 373 | " dotenv_storage_driver = storage_driver or cls._get_maybe_abspath_driver(dotenv_path)\n", |
371 | 374 | " with dotenv_storage_driver.open(dotenv_path) as ifile:\n", |
372 | | - " config = dotenv.dotenv_values(stream=ifile)\n", |
| 375 | + " config = dotenv.dotenv_values(stream=StringIO(ifile.read().decode('utf-8')))\n", |
373 | 376 | " \n", |
374 | 377 | " if config is None:\n", |
375 | 378 | " dotenv_storage_driver = storage_driver or cls.get_default_storage_driver()\n", |
376 | 379 | " if dotenv_storage_driver.exists('.env'): # unlike dotenv.find_dotenv, stay relative!\n", |
377 | 380 | " with dotenv_storage_driver.open('.env') as ifile:\n", |
378 | | - " config = dotenv.dotenv_values(stream=ifile)\n", |
| 381 | + " config = dotenv.dotenv_values(stream=StringIO(ifile.read().decode('utf-8')))\n", |
379 | 382 | " \n", |
380 | 383 | " if config is None:\n", |
381 | 384 | " config = {}\n", |
|
580 | 583 | "outputs": [], |
581 | 584 | "source": [ |
582 | 585 | "#| hide\n", |
583 | | - "from fs.memoryfs import MemoryFS" |
| 586 | + "from morefs.memory import MemFS" |
584 | 587 | ] |
585 | 588 | }, |
586 | 589 | { |
|
590 | 593 | "outputs": [], |
591 | 594 | "source": [ |
592 | 595 | "#| hide\n", |
593 | | - "# test ability to override the storage driver (memoryfs here)\n", |
| 596 | + "# test ability to override the storage driver (MemFS here)\n", |
594 | 597 | "\n", |
595 | | - "memfs = MemoryFS()\n", |
| 598 | + "memfs = MemFS()\n", |
596 | 599 | "\n", |
597 | | - "memfs.makedirs('extra-long-directory-place', recreate=True)\n", |
598 | | - "with memfs.open('extra-long-directory-place/schema.json', 'w') as ofile:\n", |
| 600 | + "memfs.makedirs('extra-long-directory-place', exist_ok=True)\n", |
| 601 | + "temp_config_validator_json_schema_path = 'extra-long-directory-place/schema.json'\n", |
| 602 | + "with memfs.open(temp_config_validator_json_schema_path, 'w') as ofile:\n", |
599 | 603 | " ofile.write(json.dumps(example_properties_schema))\n", |
600 | | - " os.environ['CONFIG_VALIDATOR_JSON_SCHEMA'] = ofile.name\n", |
| 604 | + " os.environ['CONFIG_VALIDATOR_JSON_SCHEMA'] = temp_config_validator_json_schema_path\n", |
601 | 605 | "\n", |
602 | 606 | "validator = ConfigValidator(storage_driver=memfs)\n", |
603 | 607 | "validated_config = validator.load_config({\n", |
|
615 | 619 | "\n", |
616 | 620 | "# test loading dotenv from an arbitrary file\n", |
617 | 621 | "\n", |
618 | | - "memfs.makedirs('special-bespoke-location', recreate=True)\n", |
| 622 | + "memfs.makedirs('special-bespoke-location', exist_ok=True)\n", |
619 | 623 | "with memfs.open('special-bespoke-location/my-own.env', 'w') as ofile:\n", |
620 | 624 | " ofile.write('\\n'.join([\n", |
621 | 625 | " 'string_value_with_enum=only',\n", |
|
647 | 651 | "#| hide\n", |
648 | 652 | "# test non-os FS with fallback .env path (=$PWD/.env)\n", |
649 | 653 | "\n", |
650 | | - "memfs_fallback = MemoryFS()\n", |
| 654 | + "memfs_fallback = MemFS()\n", |
651 | 655 | "\n", |
652 | 656 | "with memfs_fallback.open('schema.json', 'w') as ofile:\n", |
653 | 657 | " ofile.write(json.dumps(example_properties_schema))\n", |
|
714 | 718 | " }\n", |
715 | 719 | " }))\n", |
716 | 720 | " \n", |
717 | | - "memfs.makedirs('precedence-test', recreate=True)\n", |
| 721 | + "memfs.makedirs('precedence-test', exist_ok=True)\n", |
718 | 722 | "with memfs.open('precedence-test/.env', 'w') as ofile:\n", |
719 | 723 | " ofile.write('\\n'.join([\n", |
720 | 724 | " 'A_VALUE_TO_OVERRIDE=in dotenv',\n", |
|
0 commit comments